hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e1735cf465878e51e68b23b8c11b4772b03f40a7 | 172 | py | Python | empower/apps/helloworld2/helloworld2.py | blasf1/empower-runtime | a8099f5aed5fe51fed4502cc8513b6128dc8961a | [
"Apache-2.0"
] | null | null | null | empower/apps/helloworld2/helloworld2.py | blasf1/empower-runtime | a8099f5aed5fe51fed4502cc8513b6128dc8961a | [
"Apache-2.0"
] | null | null | null | empower/apps/helloworld2/helloworld2.py | blasf1/empower-runtime | a8099f5aed5fe51fed4502cc8513b6128dc8961a | [
"Apache-2.0"
] | null | null | null | class HelloWorld(EmpowerApp):
def loop(self):
print("Hello! World.")
def launch(tenant_id, every=5000):
return HelloWorld(tenant_id=tenant_id, every=every) | 28.666667 | 55 | 0.709302 | 23 | 172 | 5.173913 | 0.652174 | 0.201681 | 0.218487 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.027778 | 0.162791 | 172 | 6 | 55 | 28.666667 | 0.798611 | 0 | 0 | 0 | 0 | 0 | 0.075145 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | false | 0 | 0 | 0.2 | 0.8 | 0.2 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
e1818711a713cb4b6aaa629ca9e7df697a8c7b6b | 2,519 | py | Python | Test-Tree.py | jiayeerawr/my-python-modules | 0c466f83ad3e875dd2b02da1abff5de8fc9d341a | [
"MIT"
] | 1 | 2016-09-23T04:47:27.000Z | 2016-09-23T04:47:27.000Z | Test-Tree.py | jiayeerawr/my-python-modules | 0c466f83ad3e875dd2b02da1abff5de8fc9d341a | [
"MIT"
] | 2 | 2016-07-21T02:52:13.000Z | 2016-07-29T03:15:01.000Z | Test-Tree.py | jiayeerawr/data-structures | 0c466f83ad3e875dd2b02da1abff5de8fc9d341a | [
"MIT"
] | null | null | null | from Trees import *
data_list = []
for p in xrange(16):
data_list.append(p)
print data_list
my_tree = BinarySearchTree()
print my_tree.is_empty()
my_tree.set_root(TreeNode(data_list[8]))
print "Assert\t",
print my_tree.get_root().get_data() == data_list[8]
print "Weights\t",
print my_tree.get_root().get_weight(),
my_tree.insert(data_list[4])
print my_tree.get_root().get_weight(),
my_tree.insert(data_list[2])
print my_tree.get_root().get_weight(),
my_tree.insert(data_list[1])
print my_tree.get_root().get_weight(),
my_tree.insert(data_list[3])
print my_tree.get_root().get_weight(),
my_tree.insert(data_list[5])
print my_tree.get_root().get_weight(),
my_tree.insert(data_list[6])
print my_tree.get_root().get_weight(),
my_tree.insert(data_list[7])
print my_tree.get_root().get_weight(),
my_tree.insert(data_list[12])
print my_tree.get_root().get_weight(),
my_tree.insert(data_list[10])
print my_tree.get_root().get_weight(),
my_tree.insert(data_list[9])
print my_tree.get_root().get_weight(),
my_tree.insert(data_list[11])
print my_tree.get_root().get_weight(),
my_tree.insert(data_list[15])
print my_tree.get_root().get_weight(),
my_tree.insert(data_list[14])
print my_tree.get_root().get_weight(),
my_tree.insert(data_list[13])
print my_tree.get_root().get_weight()
'''
+---------------8----------------+
+-------4---+ +------12-----------+
+---2---+ 5---+ +---10---+ +--15
1 3 6---+ 9 11 +--14
7 13
'''
print "Inorder\t",
my_tree.print_in_order()
print
# 1
node = my_tree.get_root().get_left().get_left().get_left()
print "Successor of root with data = %d is node with data = %d" % (
node.get_data(), node.get_successor().get_data())
# 7
node = my_tree.get_root().get_left().get_right().get_right().get_right()
print "Successor of root with data = %d is node with data = %d" % (
node.get_data(), node.get_successor().get_data())
# 8
node = my_tree.get_root()
print "Successor of root with data = %d is node with data = %d" % (
node.get_data(), node.get_successor().get_data())
# 15
node = my_tree.get_root().get_right().get_right()
print "Successor of root with data = %d is node with data = %d" % (
node.get_data(),
-1 if node.get_successor() == None else node.get_successor())
for i in xrange(0, 32, 5):
print i, my_tree.contains(i),
print
my_tree.delete(11)
# my_tree.delete(12)
my_tree.delete(15)
print "Inorder\t",
my_tree.print_in_order()
print
| 28.954023 | 72 | 0.664549 | 424 | 2,519 | 3.653302 | 0.136792 | 0.166559 | 0.116204 | 0.16785 | 0.767592 | 0.756617 | 0.745642 | 0.713363 | 0.678502 | 0.632021 | 0 | 0.028385 | 0.146884 | 2,519 | 86 | 73 | 29.290698 | 0.692415 | 0.010719 | 0 | 0.4375 | 0 | 0 | 0.115594 | 0 | 0 | 0 | 0 | 0 | 0.015625 | 0 | null | null | 0 | 0.015625 | null | null | 0.5 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
8321b9cb55766fe1c31e015d937b8efdeca84d8d | 15,942 | py | Python | Embedded/test/test_ra.py | gshimansky/omniscidb | 8aac2883f929d5b06ed67d7fadf2c75211f52ce8 | [
"Apache-2.0"
] | 2 | 2021-10-16T10:04:54.000Z | 2022-01-10T18:24:50.000Z | Embedded/test/test_ra.py | gshimansky/omniscidb | 8aac2883f929d5b06ed67d7fadf2c75211f52ce8 | [
"Apache-2.0"
] | 91 | 2020-09-09T09:58:25.000Z | 2022-03-31T15:27:39.000Z | Embedded/test/test_ra.py | gshimansky/omniscidb | 8aac2883f929d5b06ed67d7fadf2c75211f52ce8 | [
"Apache-2.0"
] | 12 | 2020-09-02T10:53:36.000Z | 2022-01-24T16:56:58.000Z | #cython: language_level=3
import pytest
import dbe
import ctypes
import pyarrow as pa
ctypes._dlopen('libDBEngine.so', ctypes.RTLD_GLOBAL)
def test_init():
global engine
engine = dbe.PyDbEngine(
enable_union=1,
enable_columnar_output=1,
enable_lazy_fetch=0,
null_div_by_zero=1,
)
assert bool(engine.closed) == False
engine = None
def test_import_table():
data = [
pa.array([1, 1, 2, 2, 3]),
pa.array([None, None, 2, 1, 3]),
pa.array([3, None, None, 2, 1])
]
table = pa.Table.from_arrays(data, ['F_a', 'F_b', 'F_c'])
engine.importArrowTable('test', table)
assert engine.get_tables() == ['test']
def test_projection():
ra = """execute calcite {"rels": [
{"id": "0", "relOp": "EnumerableTableScan", "table": ["omnisci", "test"], "fieldNames": ["F_a", "F_b", "F_c", "rowid"], "inputs": []},
{"id": "1", "relOp": "LogicalProject", "fields": ["F___index__", "F_a", "F_b", "F_c"], "exprs": [{"input": 3}, {"input": 0}, {"input": 1}, {"input": 2}]},
{"id": "2", "relOp": "LogicalFilter", "condition": {"op": "CASE", "operands": [{"op": "IS NULL", "operands": [{"input": 1}], "type": {"type": "BOOLEAN", "nullable": true}},
{"literal": false, "type": "BOOLEAN", "target_type": "BOOLEAN", "scale": -2147483648, "precision": 1, "type_scale": -2147483648, "type_precision": 1},
{"op": "=", "operands": [{"input": 1}, {"literal": 1, "type": "DECIMAL", "target_type": "BIGINT", "scale": 0, "precision": 1, "type_scale": 0, "type_precision": 19}],
"type": {"type": "BOOLEAN", "nullable": true}}], "type": {"type": "BOOLEAN", "nullable": true}}},
{"id": "3", "relOp": "LogicalProject", "fields": ["F___index__", "F_a", "F_b", "F_c"], "exprs": [{"input": 0}, {"input": 1}, {"input": 2}, {"input": 3}]}]}
"""
target = {'F___index__': [0, 1], 'F_a': [1, 1], 'F_b': [None, None], 'F_c': [3, None]}
cursor = engine.executeRA(ra)
batch = cursor.getArrowRecordBatch()
assert batch.to_pydict() == target
def test_drop():
ra = """execute calcite {"rels": [
{"id": "0", "relOp": "EnumerableTableScan", "table": ["omnisci", "test"], "fieldNames": ["F_a", "F_b", "F_c", "rowid"], "inputs": []},
{"id": "1", "relOp": "LogicalProject", "fields": ["F_a", "F_b", "F_c"], "exprs": [{"input": 0}, {"input": 1}, {"input": 2}]},
{"id": "2", "relOp": "LogicalProject", "fields": ["F_a", "F_b", "F_c"], "exprs": [{"input": 0}, {"input": 1}, {"input": 2}]},
{"id": "3", "relOp": "LogicalAggregate", "fields": ["F_a", "F_b", "F_c"], "group": [0], "aggs": [{"agg": "SUM", "operands": [1],
"distinct": false, "type": {"type": "DOUBLE", "nullable": true}}, {"agg": "SUM", "operands": [2], "distinct": false, "type": {"type": "DOUBLE", "nullable": true}}]},
{"id": "4", "relOp": "LogicalSort", "collation": [{"field": 0, "direction": "ASCENDING", "nulls": "LAST"}]},
{"id": "5", "relOp": "LogicalFilter", "condition": {"op": "CASE", "operands": [{"op": "IS NULL", "operands": [{"input": 1}], "type": {"type": "BOOLEAN", "nullable": true}},
{"literal": false, "type": "BOOLEAN", "target_type": "BOOLEAN", "scale": -2147483648, "precision": 1, "type_scale": -2147483648, "type_precision": 1},
{"op": ">", "operands": [{"input": 1}, {"literal": 1, "type": "DECIMAL", "target_type": "BIGINT", "scale": 0, "precision": 1, "type_scale": 0, "type_precision": 19}],
"type": {"type": "BOOLEAN", "nullable": true}}], "type": {"type": "BOOLEAN", "nullable": true}}}, {"id": "6", "relOp": "LogicalProject",
"fields": ["F_a", "F_b", "F_c"], "exprs": [{"input": 0}, {"input": 1}, {"input": 2}]}]}
"""
target = {'F_a': [2, 3], '$f1': [3, 3], '$f2': [2, 1]}
cursor = engine.executeRA(ra)
batch = cursor.getArrowRecordBatch()
assert batch.to_pydict() == target
def test_iloc():
ra = """execute calcite {"rels": [
{"id": "0", "relOp": "EnumerableTableScan", "table": ["omnisci", "test"], "fieldNames": ["F_a", "F_b", "F_c", "rowid"], "inputs": []},
{"id": "1", "relOp": "LogicalProject", "fields": ["F___index__", "F_a", "F_b", "F_c"], "exprs": [{"input": 3}, {"input": 0}, {"input": 1}, {"input": 2}]},
{"id": "2", "relOp": "LogicalFilter", "condition": {"op": "CASE", "operands": [{"op": "IS NULL", "operands": [{"op": "+", "operands": [{"input": 1},
{"op": "+", "operands": [{"input": 2}, {"literal": 2, "type": "DECIMAL", "target_type": "BIGINT", "scale": 0, "precision": 1, "type_scale": 0, "type_precision": 19}],
"type": {"type": "DOUBLE", "nullable": true}}], "type": {"type": "DOUBLE", "nullable": true}}], "type": {"type": "BOOLEAN", "nullable": true}},
{"literal": false, "type": "BOOLEAN", "target_type": "BOOLEAN", "scale": -2147483648, "precision": 1, "type_scale": -2147483648, "type_precision": 1},
{"op": ">", "operands": [{"op": "+", "operands": [{"input": 1},
{"op": "+", "operands": [{"input": 2}, {"literal": 2, "type": "DECIMAL", "target_type": "BIGINT", "scale": 0, "precision": 1, "type_scale": 0, "type_precision": 19}],
"type": {"type": "DOUBLE", "nullable": true}}], "type": {"type": "DOUBLE", "nullable": true}},
{"literal": 1, "type": "DECIMAL", "target_type": "BIGINT", "scale": 0, "precision": 1, "type_scale": 0, "type_precision": 19}],
"type": {"type": "BOOLEAN", "nullable": true}}], "type": {"type": "BOOLEAN", "nullable": true}}},
{"id": "3", "relOp": "LogicalProject", "fields": ["F___index__", "F_a", "F_b", "F_c"], "exprs": [{"input": 0},
{"op": "+", "operands": [{"input": 1}, {"literal": 2, "type": "DECIMAL", "target_type": "BIGINT", "scale": 0, "precision": 1, "type_scale": 0, "type_precision": 19}],
"type": {"type": "BIGINT", "nullable": true}},
{"op": "+", "operands": [{"input": 2}, {"literal": 2, "type": "DECIMAL", "target_type": "BIGINT", "scale": 0, "precision": 1, "type_scale": 0, "type_precision": 19}],
"type": {"type": "DOUBLE", "nullable": true}},
{"op": "+", "operands": [{"input": 3}, {"literal": 2, "type": "DECIMAL", "target_type": "BIGINT", "scale": 0, "precision": 1, "type_scale": 0, "type_precision": 19}],
"type": {"type": "DOUBLE", "nullable": true}}]}]}
"""
target = {'F___index__': [2, 3, 4], 'F_a': [4, 4, 5], 'F_b': [4, 3, 5], 'F_c': [None, 4, 3]}
cursor = engine.executeRA(ra)
batch = cursor.getArrowRecordBatch()
assert batch.to_pydict() == target
def test_empty():
ra = """execute calcite {"rels": [
{"id": "0", "relOp": "EnumerableTableScan", "table": ["omnisci", "test"], "fieldNames": ["F_a", "F_b", "F_c", "rowid"], "inputs": []},
{"id": "1", "relOp": "LogicalProject", "fields": ["F___index__", "F_a", "F_b", "F_c"], "exprs": [{"input": 3}, {"input": 0}, {"input": 1}, {"input": 2}]},
{"id": "2", "relOp": "LogicalFilter", "condition": {"op": "CASE", "operands": [{"op": "IS NULL", "operands": [{"input": 1}], "type": {"type": "BOOLEAN", "nullable": true}},
{"literal": false, "type": "BOOLEAN", "target_type": "BOOLEAN", "scale": -2147483648, "precision": 1, "type_scale": -2147483648, "type_precision": 1},
{"op": "=", "operands": [{"input": 1}, {"literal": 1, "type": "DECIMAL", "target_type": "BIGINT", "scale": 0, "precision": 1, "type_scale": 0, "type_precision": 19}],
"type": {"type": "BOOLEAN", "nullable": true}}], "type": {"type": "BOOLEAN", "nullable": true}}},
{"id": "3", "relOp": "LogicalProject", "fields": ["F___index__", "F_a", "F_b", "F_c"], "exprs": [{"input": 0}, {"input": 1}, {"input": 2}, {"input": 3}]}]}
"""
target = {'F___index__': [0, 1], 'F_a': [1, 1], 'F_b': [None, None], 'F_c': [3, None]}
cursor = engine.executeRA(ra)
batch = cursor.getArrowRecordBatch()
assert batch.to_pydict() == target
def test_filter():
ra = """execute calcite {"rels": [
{"id": "0", "relOp": "EnumerableTableScan", "table": ["omnisci", "test"], "fieldNames": ["F_a", "F_b", "F_c", "rowid"], "inputs": []},
{"id": "1", "relOp": "LogicalProject", "fields": ["F_a", "F_b", "F_c"], "exprs": [{"input": 0}, {"input": 1}, {"input": 2}]},
{"id": "2", "relOp": "LogicalProject", "fields": ["F_a", "F_b", "F_c"], "exprs": [{"input": 0}, {"input": 1}, {"input": 2}]},
{"id": "3", "relOp": "LogicalAggregate", "fields": ["F_a", "F_b", "F_c"], "group": [0], "aggs": [
{"agg": "SUM", "operands": [1], "distinct": false, "type": {"type": "DOUBLE", "nullable": true}},
{"agg": "SUM", "operands": [2], "distinct": false, "type": {"type": "DOUBLE", "nullable": true}}]},
{"id": "4", "relOp": "LogicalSort", "collation": [{"field": 0, "direction": "ASCENDING", "nulls": "LAST"}]},
{"id": "5", "relOp": "LogicalFilter", "condition": {"op": "CASE", "operands": [{"op": "IS NULL", "operands": [{"input": 1}], "type": {"type": "BOOLEAN", "nullable": true}},
{"literal": false, "type": "BOOLEAN", "target_type": "BOOLEAN", "scale": -2147483648, "precision": 1, "type_scale": -2147483648, "type_precision": 1},
{"op": ">", "operands": [{"input": 1}, {"literal": 1, "type": "DECIMAL", "target_type": "BIGINT", "scale": 0, "precision": 1, "type_scale": 0, "type_precision": 19}],
"type": {"type": "BOOLEAN", "nullable": true}}], "type": {"type": "BOOLEAN", "nullable": true}}},
{"id": "6", "relOp": "LogicalProject", "fields": ["F_a", "F_b", "F_c"], "exprs": [{"input": 0}, {"input": 1}, {"input": 2}]}]}
"""
target = {'F_a': [2, 3], '$f1': [3, 3], '$f2': [2, 1]}
cursor = engine.executeRA(ra)
batch = cursor.getArrowRecordBatch()
assert batch.to_pydict() == target
def test_filter_with_index():
ra = """execute calcite {"rels": [
{"id": "0", "relOp": "EnumerableTableScan", "table": ["omnisci", "test"], "fieldNames": ["F_a", "F_b", "F_c", "rowid"], "inputs": []},
{"id": "1", "relOp": "LogicalProject", "fields": ["F___index__", "F_a", "F_b", "F_c"], "exprs": [{"input": 3}, {"input": 0}, {"input": 1}, {"input": 2}]},
{"id": "2", "relOp": "LogicalFilter", "condition": {"op": "CASE", "operands": [{"op": "IS NULL", "operands": [{"op": "+", "operands": [{"input": 1},
{"op": "+", "operands": [{"input": 2}, {"literal": 2, "type": "DECIMAL", "target_type": "BIGINT", "scale": 0, "precision": 1, "type_scale": 0, "type_precision": 19}],
"type": {"type": "DOUBLE", "nullable": true}}], "type": {"type": "DOUBLE", "nullable": true}}], "type": {"type": "BOOLEAN", "nullable": true}},
{"literal": false, "type": "BOOLEAN", "target_type": "BOOLEAN", "scale": -2147483648, "precision": 1, "type_scale": -2147483648, "type_precision": 1},
{"op": ">", "operands": [{"op": "+", "operands": [{"input": 1}, {"op": "+", "operands": [{"input": 2},
{"literal": 2, "type": "DECIMAL", "target_type": "BIGINT", "scale": 0, "precision": 1, "type_scale": 0, "type_precision": 19}],
"type": {"type": "DOUBLE", "nullable": true}}], "type": {"type": "DOUBLE", "nullable": true}},
{"literal": 1, "type": "DECIMAL", "target_type": "BIGINT", "scale": 0, "precision": 1, "type_scale": 0, "type_precision": 19}],
"type": {"type": "BOOLEAN", "nullable": true}}], "type": {"type": "BOOLEAN", "nullable": true}}},
{"id": "3", "relOp": "LogicalProject", "fields": ["F___index__", "F_a", "F_b", "F_c"], "exprs": [{"input": 0},
{"op": "+", "operands": [{"input": 1}, {"literal": 2, "type": "DECIMAL", "target_type": "BIGINT", "scale": 0, "precision": 1, "type_scale": 0, "type_precision": 19}], "type": {"type": "BIGINT", "nullable": true}},
{"op": "+", "operands": [{"input": 2}, {"literal": 2, "type": "DECIMAL", "target_type": "BIGINT", "scale": 0, "precision": 1, "type_scale": 0, "type_precision": 19}], "type": {"type": "DOUBLE", "nullable": true}},
{"op": "+", "operands": [{"input": 3}, {"literal": 2, "type": "DECIMAL", "target_type": "BIGINT", "scale": 0, "precision": 1, "type_scale": 0, "type_precision": 19}], "type": {"type": "DOUBLE", "nullable": true}}]}]}
"""
target = {'F___index__': [2, 3, 4], 'F_a': [4, 4, 5], 'F_b': [4, 3, 5], 'F_c': [None, 4, 3]}
cursor = engine.executeRA(ra)
batch = cursor.getArrowRecordBatch()
assert batch.to_pydict() == target
def test_filter_proj():
ra = """execute calcite {"rels": [
{"id": "0", "relOp": "EnumerableTableScan", "table": ["omnisci", "test"], "fieldNames": ["F_a", "F_b", "F_c", "rowid"], "inputs": []},
{"id": "1", "relOp": "LogicalProject", "fields": ["F___index__", "F_a", "F_b", "F_c"], "exprs": [{"input": 3}, {"input": 0}, {"input": 1}, {"input": 2}]},
{"id": "2", "relOp": "LogicalFilter", "condition": {"op": "CASE", "operands": [{"op": "IS NULL", "operands": [{"input": 1}], "type": {"type": "BOOLEAN", "nullable": true}},
{"literal": true, "type": "BOOLEAN", "target_type": "BOOLEAN", "scale": -2147483648, "precision": 1, "type_scale": -2147483648, "type_precision": 1},
{"op": "<>", "operands": [{"input": 1}, {"literal": 1, "type": "DECIMAL", "target_type": "BIGINT", "scale": 0, "precision": 1, "type_scale": 0, "type_precision": 19}],
"type": {"type": "BOOLEAN", "nullable": true}}], "type": {"type": "BOOLEAN", "nullable": true}}},
{"id": "3", "relOp": "LogicalProject", "fields": ["F___index__", "F_a", "F_b"],
"exprs": [{"input": 0}, {"op": "*", "operands": [{"input": 1}, {"input": 2}], "type": {"type": "DOUBLE", "nullable": true}}, {"input": 2}]}]}
"""
target = {'F___index__': [2, 3, 4], 'F_a': [4, 2, 9], 'F_b': [2, 1, 3]}
cursor = engine.executeRA(ra)
batch = cursor.getArrowRecordBatch()
assert batch.to_pydict() == target
def test_filter_drop():
ra = """execute calcite {"rels": [
{"id": "0", "relOp": "EnumerableTableScan", "table": ["omnisci", "test"], "fieldNames": ["F_a", "F_b", "F_c", "rowid"], "inputs": []},
{"id": "1", "relOp": "LogicalProject", "fields": ["F___index__", "F_a", "F_b", "F_c"], "exprs": [{"input": 3}, {"input": 0}, {"input": 1}, {"input": 2}]},
{"id": "2", "relOp": "LogicalFilter", "condition": {"op": "CASE", "operands": [{"op": "IS NULL", "operands": [{"input": 1}], "type": {"type": "BOOLEAN", "nullable": true}},
{"literal": true, "type": "BOOLEAN", "target_type": "BOOLEAN", "scale": -2147483648, "precision": 1, "type_scale": -2147483648, "type_precision": 1},
{"op": "<>", "operands": [{"input": 1}, {"literal": 1, "type": "DECIMAL", "target_type": "BIGINT", "scale": 0, "precision": 1, "type_scale": 0, "type_precision": 19}],
"type": {"type": "BOOLEAN", "nullable": true}}], "type": {"type": "BOOLEAN", "nullable": true}}},
{"id": "3", "relOp": "LogicalProject", "fields": ["F___index__", "F_a", "F_b"], "exprs": [{"input": 0},
{"op": "*", "operands": [{"input": 1}, {"input": 2}], "type": {"type": "DOUBLE", "nullable": true}}, {"input": 2}]}]}
"""
target = {'F___index__': [2, 3, 4], 'F_a': [4, 2, 9], 'F_b': [2, 1, 3]}
cursor = engine.executeRA(ra)
batch = cursor.getArrowRecordBatch()
assert batch.to_pydict() == target
def test_self_join():
data = [
pa.array([55, 66, 77]),
pa.array([5, 6, 7]),
pa.array([3, 1, 0]),
pa.array(['aa', 'bb', 'cc'])
]
table = pa.Table.from_arrays(data, ['x', 'w', 'y', 'z'])
engine.importArrowTable('jtest', table)
ra = """execute calcite {"rels": [
{"id": "0", "relOp": "LogicalTableScan", "fieldNames": ["x", "w", "y", "z", "rowid"], "table": ["omnisci", "jtest"], "inputs": []},
{"id": "1", "relOp": "LogicalTableScan", "fieldNames": ["x", "w", "y", "z", "rowid"], "table": ["omnisci", "jtest"], "inputs": []},
{"id": "2", "relOp": "LogicalJoin", "condition": {"op": "=", "operands": [{"input": 0}, {"input": 5}], "type": {"type": "BOOLEAN", "nullable": false}}, "joinType": "left", "inputs": ["0", "1"]},
{"id": "3", "relOp": "LogicalProject", "fields": ["x", "w", "y", "z", "x0", "w0", "y0", "z0"], "exprs": [{"input": 0}, {"input": 1}, {"input": 2}, {"input": 3}, {"input": 5 }, {"input": 6}, {"input": 7}, {"input": 8}]}]}
"""
target = {'x': [55, 66, 77], 'w': [5, 6, 7], 'y': [3, 1, 0], 'z': ['aa', 'bb', 'cc'], 'x0': [55, 66, 77], 'w0': [5, 6, 7], 'y0': [3, 1, 0], 'z0': ['aa', 'bb', 'cc']}
cursor = engine.executeRA(ra)
batch = cursor.getArrowRecordBatch()
assert batch.to_pydict() == target
if __name__ == "__main__":
pytest.main(["-v", __file__])
| 76.644231 | 220 | 0.54554 | 1,975 | 15,942 | 4.24 | 0.073924 | 0.04299 | 0.010389 | 0.013852 | 0.913542 | 0.910198 | 0.903272 | 0.900525 | 0.900525 | 0.897062 | 0 | 0.040712 | 0.140258 | 15,942 | 207 | 221 | 77.014493 | 0.570261 | 0.001505 | 0 | 0.631016 | 0 | 0.459893 | 0.811522 | 0.011057 | 0 | 0 | 0 | 0 | 0.058824 | 1 | 0.058824 | false | 0 | 0.037433 | 0 | 0.096257 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
8360ae471ffb6bf03bb22fd687b983f8a130c113 | 16,238 | py | Python | scripts/data_processing/process_migration_data_initial.py | mikiec84/delphi | 2e517f21e76e334c7dfb14325d25879ddf26d10d | [
"Apache-2.0"
] | 25 | 2018-03-03T11:57:57.000Z | 2022-01-16T21:19:54.000Z | scripts/data_processing/process_migration_data_initial.py | mikiec84/delphi | 2e517f21e76e334c7dfb14325d25879ddf26d10d | [
"Apache-2.0"
] | 385 | 2018-02-21T16:52:06.000Z | 2022-02-17T07:44:56.000Z | scripts/data_processing/process_migration_data_initial.py | mikiec84/delphi | 2e517f21e76e334c7dfb14325d25879ddf26d10d | [
"Apache-2.0"
] | 19 | 2018-03-20T01:08:11.000Z | 2021-09-29T01:04:49.000Z | import sys
from pathlib import Path
import numpy as np
import pandas as pd
data_dir = Path("data")
def clean_reachjongleijan_data():
df = pd.read_csv(
"data/raw/migration/Initial annotation exercise for migration use case - ReachJongleiJan - dep var.tsv",
sep="\t",
)
df = df[~np.isnan(df["Value count"])]
df.drop(
df.columns[[0, 1, 2, 4, 5, 8, 9, 12, 13, 16, 19, 20]],
axis=1,
inplace=True,
)
d = {
"January": 1.0,
"February": 2.0,
"March": 3.0,
"April": 4.0,
"May": 5.0,
"June": 6.0,
"July": 7.0,
"August": 8.0,
"September": 9.0,
"October": 10.0,
"November": 11.0,
"December": 12.0,
}
df.replace(d, inplace=True)
df["Start year"].fillna(value=-1, inplace=True, downcast="infer")
df["Start month"].fillna(value=0, inplace=True, downcast="infer")
df["End year"].fillna(value=-1, inplace=True, downcast="infer")
df["End month"].fillna(value=0, inplace=True, downcast="infer")
c = {
1: 31,
2: 28,
3: 31,
4: 30,
5: 31,
6: 30,
7: 31,
8: 31,
9: 30,
10: 31,
11: 30,
12: 31,
}
for i in range(1, 13):
df.loc[
(df["Value unit (Amount, Rate, Percentage)"] == "Daily")
& (df["End month"] == i),
"Value count",
] = (
df.loc[
(df["Value unit (Amount, Rate, Percentage)"] == "Daily")
& (df["End month"] == i),
"Value count",
]
* c[i]
)
df["Unit"] = "people"
df.reset_index(drop=True, inplace=True)
df["Variable"] = df["Event trigger text"]
df.loc[0:1, "Variable"] = "Internally Displaced People"
df.loc[
df["Event trigger text"] == "leaving", "Variable"
] = "Outgoing Migrants"
df.loc[
df["Event trigger text"] == "returning", "Variable"
] = "Incoming Migrants"
df["Source country"] = "South Sudan"
df["Source county"] = "None"
df["Source state"] = "None"
df["Destination country"] = "Ethiopia"
df["Destination county"] = "None"
df["Destination state"] = "None"
df.loc[0, "Source state"] = "Jonglei"
df.loc[0, "Destination country"] = "South Sudan"
df.loc[0, "Destination state"] = "Eastern Lakes"
df.loc[0, "Destination county"] = "Awerial South"
df.loc[1, "Source state"] = "Yei River"
df.loc[1, "Source county"] = "Yei"
df.loc[1, "Destination country"] = "South Sudan"
df.loc[1, "Destination state"] = "Jonglei"
df.loc[1, "Destination county"] = "Bor"
df.loc[
df["Variable"] == "Incoming Migrants", "Source country"
] = "Ethiopia"
df.loc[
df["Variable"] == "Incoming Migrants", "Destination country"
] = "South Sudan"
df["Source"] = "Migration Curation Experiment"
df.drop(df.columns[[0, 1, 2, 4]], axis=1, inplace=True)
df.rename(columns={"Value count": "Value"}, inplace=True)
df = df.reindex(
columns=[
"Source country",
"Source state",
"Source county",
"Destination country",
"Destination state",
"Destination county",
"Source",
"Unit",
"Value",
"Variable",
"Start year",
"Start month",
"End year",
"End month",
]
)
return df
def clean_54660_data():
df = pd.read_csv(
"data/raw/migration/Initial annotation exercise for migration use case - 54660 - dep var.tsv",
sep="\t",
)
df = df[~np.isnan(df["Value count"])]
df.drop(
df.columns[[0, 1, 2, 4, 5, 8, 9, 12, 13, 16, 19, 20]],
axis=1,
inplace=True,
)
d = {
"January": 1.0,
"February": 2.0,
"March": 3.0,
"April": 4.0,
"May": 5.0,
"June": 6.0,
"July": 7.0,
"August": 8.0,
"September": 9.0,
"October": 10.0,
"November": 11.0,
"December": 12.0,
}
df.replace(d, inplace=True)
df["Start year"].fillna(value=-1, inplace=True, downcast="infer")
df["Start month"].fillna(value=0, inplace=True, downcast="infer")
df["End year"].fillna(value=-1, inplace=True, downcast="infer")
df["End month"].fillna(value=0, inplace=True, downcast="infer")
c = {
0: 30,
1: 31,
2: 28,
3: 31,
4: 30,
5: 31,
6: 30,
7: 31,
8: 31,
9: 30,
10: 31,
11: 30,
12: 31,
}
for i in range(13):
df.loc[
(df["Value unit (Amount, Rate, Percentage)"] == "Daily")
& (df["End month"] == i),
"Value count",
] = (
df.loc[
(df["Value unit (Amount, Rate, Percentage)"] == "Daily")
& (df["End month"] == i),
"Value count",
]
* c[i]
)
df.loc[5, "Value count"] = df.loc[5, "Value count"] * 31
df["Unit"] = "people"
df.drop([3, 5, 10, 21, 27], inplace=True)
df.loc[7, "Start year"] = 2017
df.loc[7, "Start month"] = 3
df.loc[7, "End year"] = 2017
df.loc[7, "End month"] = 3
df.loc[15:18, "Start year"] = 2016
df.loc[15:18, "Start month"] = 9
df.reset_index(drop=True, inplace=True)
df["Variable"] = "Outgoing Migrants"
df["Source country"] = "South Sudan"
df["Source county"] = "None"
df["Source state"] = "None"
df["Destination country"] = "Ethiopia"
df["Destination county"] = "None"
df["Destination state"] = "Gambella"
df.loc[[5, 6, 12], "Destination state"] = "None"
df["Source"] = "Migration Curation Experiment"
df.drop(df.columns[[0, 1, 2, 4]], axis=1, inplace=True)
df.rename(columns={"Value count": "Value"}, inplace=True)
df = df.reindex(
columns=[
"Source country",
"Source state",
"Source county",
"Destination country",
"Destination state",
"Destination county",
"Source",
"Unit",
"Value",
"Variable",
"Start year",
"Start month",
"End year",
"End month",
]
)
return df
def clean_62801_data():
df = pd.read_csv(
"data/raw/migration/Initial annotation exercise for migration use case - 62801 - dep var.tsv",
sep="\t",
)
df.loc[22, "Value count"] = 700
df = df[~np.isnan(df["Value count"].astype(float))]
df.drop(
df.columns[[0, 1, 2, 4, 5, 8, 9, 12, 13, 16, 19, 20]],
axis=1,
inplace=True,
)
df["Value count"] = df["Value count"].astype(float)
d = {
"January": 1.0,
"February": 2.0,
"March": 3.0,
"April": 4.0,
"May": 5.0,
"June": 6.0,
"July": 7.0,
"August": 8.0,
"September": 9.0,
"October": 10.0,
"November": 11.0,
"December": 12.0,
}
df.replace(d, inplace=True)
df["Unit"] = "people"
df["Start year"].fillna(value=-1, inplace=True, downcast="infer")
df["Start month"].fillna(value=0, inplace=True, downcast="infer")
df["End year"].fillna(value=-1, inplace=True, downcast="infer")
df["End month"].fillna(value=0, inplace=True, downcast="infer")
df.loc[4, "Value count"] = df.loc[4, "Value count"] * 30
df.loc[7:9, "Value count"] = (df.loc[7:9, "Value count"] * 365653.0) / 100
df.loc[7:9, "End year"] = 2017
df.loc[7:9, "End month"] = 3
df.loc[16:19, "Start year"] = 2016
df.loc[16:19, "Start month"] = 9
df["Variable"] = "Outgoing Migrants"
df.drop([3, 11], inplace=True)
df.reset_index(drop=True, inplace=True)
df["Source country"] = "South Sudan"
df["Source county"] = "None"
df["Source state"] = "None"
df["Destination country"] = "Ethiopia"
df["Destination county"] = "None"
df["Destination state"] = "Gambella"
df.loc[5, "Source state"] = "Upper Nile"
df.loc[6, "Source state"] = "Jonglei"
df.loc[7, "Source state"] = "Unity"
df.loc[15, "Source state"] = "Boma"
df.loc[15, "Source county"] = "Pochala"
df.loc[[3, 5, 6, 7], "Destination state"] = "None"
df["Source"] = "Migration Curation Experiment"
df.drop(df.columns[[0, 1, 2, 4]], axis=1, inplace=True)
df.rename(columns={"Value count": "Value"}, inplace=True)
df = df.reindex(
columns=[
"Source country",
"Source state",
"Source county",
"Destination country",
"Destination state",
"Destination county",
"Source",
"Unit",
"Value",
"Variable",
"Start year",
"Start month",
"End year",
"End month",
]
)
return df
def clean_62803_data():
df = pd.read_csv(
"data/raw/migration/Initial annotation exercise for migration use case - 62803 - dep var.tsv",
sep="\t",
)
df = df[~np.isnan(df["Value count"])]
df.drop(
df.columns[[0, 1, 2, 4, 5, 8, 9, 12, 13, 16, 19, 20]],
axis=1,
inplace=True,
)
d = {
"January": 1.0,
"February": 2.0,
"March": 3.0,
"April": 4.0,
"May": 5.0,
"June": 6.0,
"July": 7.0,
"August": 8.0,
"September": 9.0,
"October": 10.0,
"November": 11.0,
"December": 12.0,
}
df.replace(d, inplace=True)
df["Unit"] = "people"
df["Start year"].fillna(value=-1, inplace=True, downcast="infer")
df["Start month"].fillna(value=0, inplace=True, downcast="infer")
df["End year"].fillna(value=-1, inplace=True, downcast="infer")
df["End month"].fillna(value=0, inplace=True, downcast="infer")
df.loc[3, "Value count"] = df.loc[3, "Value count"] * 30
df.loc[4:6, "Value count"] = (df.loc[4:6, "Value count"] * 361991.0) / 100
df.loc[4:6, "End year"] = 2017
df.loc[4:6, "End month"] = 4
df["Variable"] = "Outgoing Migrants"
df.drop([7, 9], inplace=True)
df.reset_index(drop=True, inplace=True)
df["Source country"] = "South Sudan"
df["Source county"] = "None"
df["Source state"] = "None"
df["Destination country"] = "Ethiopia"
df["Destination county"] = "None"
df["Destination state"] = "Gambella"
df.loc[4, "Source state"] = "Upper Nile"
df.loc[5, "Source state"] = "Jonglei"
df.loc[6, "Source state"] = "Unity"
df.loc[8, "Source state"] = "Boma"
df.loc[8, "Source county"] = "Pochala"
df.loc[3:6, "Destination state"] = "None"
df["Source"] = "Migration Curation Experiment"
df.drop(df.columns[[0, 1, 2, 4]], axis=1, inplace=True)
df.rename(columns={"Value count": "Value"}, inplace=True)
df = df.reindex(
columns=[
"Source country",
"Source state",
"Source county",
"Destination country",
"Destination state",
"Destination county",
"Source",
"Unit",
"Value",
"Variable",
"Start year",
"Start month",
"End year",
"End month",
]
)
return df
def clean_63604_data():
df = pd.read_csv(
"data/raw/migration/Initial annotation exercise for migration use case - 63604 - dep var.tsv",
sep="\t",
)
df = df[~np.isnan(df["Value count"])]
df.drop(
df.columns[[0, 1, 2, 4, 5, 8, 9, 12, 13, 16, 19, 20]],
axis=1,
inplace=True,
)
d = {
"January": 1.0,
"February": 2.0,
"March": 3.0,
"April": 4.0,
"May": 5.0,
"June": 6.0,
"July": 7.0,
"August": 8.0,
"September": 9.0,
"October": 10.0,
"November": 11.0,
"December": 12.0,
}
df.replace(d, inplace=True)
df["Unit"] = "people"
df["Start year"].fillna(value=-1, inplace=True, downcast="infer")
df["Start month"].fillna(value=0, inplace=True, downcast="infer")
df["End year"].fillna(value=-1, inplace=True, downcast="infer")
df["End month"].fillna(value=0, inplace=True, downcast="infer")
df["Variable"] = "Outgoing Migrants"
df.drop([1, 5, 6, 7], inplace=True)
df.reset_index(drop=True, inplace=True)
df["Source country"] = "South Sudan"
df["Source county"] = "None"
df["Source state"] = "None"
df["Destination country"] = "Ethiopia"
df["Destination county"] = "None"
df["Destination state"] = "None"
df.loc[2, "Destination state"] = "Beneshangul Gumuz"
df.loc[3, "Destination state"] = "Gambella"
df["Source"] = "Migration Curation Experiment"
df.drop(df.columns[[0, 1, 2, 4]], axis=1, inplace=True)
df.rename(columns={"Value count": "Value"}, inplace=True)
df = df.reindex(
columns=[
"Source country",
"Source state",
"Source county",
"Destination country",
"Destination state",
"Destination county",
"Source",
"Unit",
"Value",
"Variable",
"Start year",
"Start month",
"End year",
"End month",
]
)
return df
def clean_UNHCR_data():
df = pd.read_csv(
"data/raw/migration/Initial annotation exercise for migration use case - UNHCR - dep var.tsv",
sep="\t",
)
df = df[~np.isnan(df["Value count"])]
df.drop(
df.columns[[0, 1, 2, 4, 5, 8, 9, 12, 13, 16, 19, 20]],
axis=1,
inplace=True,
)
d = {
"January": 1.0,
"February": 2.0,
"March": 3.0,
"April": 4.0,
"May": 5.0,
"June": 6.0,
"July": 7.0,
"August": 8.0,
"September": 9.0,
"October": 10.0,
"November": 11.0,
"December": 12.0,
}
df.replace(d, inplace=True)
df["Unit"] = "people"
df["Start year"].fillna(value=-1, inplace=True, downcast="infer")
df["Start month"].fillna(value=0, inplace=True, downcast="infer")
df["End year"].fillna(value=-1, inplace=True, downcast="infer")
df["End month"].fillna(value=0, inplace=True, downcast="infer")
df["Variable"] = "Outgoing Migrants"
df.loc[5, "Value count"] = df.loc[5, "Value count"] * 31
df.loc[6:8, "Value count"] = (df.loc[6:8, "Value count"] * 361000.0) / 100
df.loc[6:8, "End year"] = 2017
df.loc[6:8, "End month"] = 3
df.loc[17:20, "Start year"] = 2016
df.loc[17:20, "Start month"] = 9
df.drop([0], inplace=True)
df.reset_index(drop=True, inplace=True)
df["Source country"] = "South Sudan"
df["Source county"] = "None"
df["Source state"] = "None"
df["Destination country"] = "Ethiopia"
df["Destination county"] = "None"
df["Destination state"] = "None"
df.loc[4, "Source state"] = "Upper Nile"
df.loc[5, "Source state"] = "Jonglei"
df.loc[6, "Source state"] = "Unity"
df.loc[0:6, "Destination state"] = "Gambella"
df.loc[9:14, "Destination state"] = "Gambella"
df["Source"] = "Migration Curation Experiment"
df.drop(df.columns[[0, 1, 2, 4]], axis=1, inplace=True)
df.rename(columns={"Value count": "Value"}, inplace=True)
df = df.reindex(
columns=[
"Source country",
"Source state",
"Source county",
"Destination country",
"Destination state",
"Destination county",
"Source",
"Unit",
"Value",
"Variable",
"Start year",
"Start month",
"End year",
"End month",
]
)
return df
if __name__ == "__main__":
combined_df = pd.concat([
clean_reachjongleijan_data(),
clean_54660_data(),
clean_62801_data(),
clean_62803_data(),
clean_63604_data(),
clean_UNHCR_data(),
])
combined_df.to_csv(sys.argv[1], sep="\t", index=False)
| 27.018303 | 112 | 0.509607 | 2,030 | 16,238 | 4.052709 | 0.079803 | 0.041935 | 0.047405 | 0.070013 | 0.898505 | 0.837608 | 0.780965 | 0.778169 | 0.778169 | 0.768202 | 0 | 0.060571 | 0.313709 | 16,238 | 600 | 113 | 27.063333 | 0.677674 | 0 | 0 | 0.748032 | 0 | 0 | 0.31576 | 0.009608 | 0 | 0 | 0 | 0 | 0 | 1 | 0.011811 | false | 0 | 0.007874 | 0 | 0.031496 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
369bece0ce9b9f0cbd64681c201fa6aded4099e9 | 133 | py | Python | himsapi/test_connect.py | nexuszix/TBH1INF | f39a6f0f34f5d42233fc5ef54e47152eb3b947e4 | [
"Apache-2.0"
] | null | null | null | himsapi/test_connect.py | nexuszix/TBH1INF | f39a6f0f34f5d42233fc5ef54e47152eb3b947e4 | [
"Apache-2.0"
] | null | null | null | himsapi/test_connect.py | nexuszix/TBH1INF | f39a6f0f34f5d42233fc5ef54e47152eb3b947e4 | [
"Apache-2.0"
] | null | null | null | ibm_db.connect("DATABASE=S103z3mm;HOSTNAME=192.1.1.99;PORT=446;PROTOCOL=TCPIP;UID=BIRD;PWD=BIRDKK;CurrentSchema=TBHPFTEST;", "", "")
| 66.5 | 132 | 0.766917 | 20 | 133 | 5.05 | 0.95 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.107692 | 0.022556 | 133 | 1 | 133 | 133 | 0.669231 | 0 | 0 | 0 | 0 | 1 | 0.796992 | 0.796992 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
36a5d0618c20b1f9da647e2c0e094da771bbed31 | 15,900 | py | Python | train_ditto.py | BarGenossar/FlexER | b2f77de8a0dc505ad55e60c43eb57f96cf3d1642 | [
"Apache-2.0"
] | null | null | null | train_ditto.py | BarGenossar/FlexER | b2f77de8a0dc505ad55e60c43eb57f96cf3d1642 | [
"Apache-2.0"
] | null | null | null | train_ditto.py | BarGenossar/FlexER | b2f77de8a0dc505ad55e60c43eb57f96cf3d1642 | [
"Apache-2.0"
] | null | null | null | import os
import argparse
import json
import sys
sys.path.insert(0, "Snippext_public")
from ditto.dataset import DittoDataset
from ditto.summarize import Summarizer
from ditto.knowledge import *
if __name__=="__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--task", type=str, default="Structured/Beer")
parser.add_argument("--run_id", type=int, default=0)
parser.add_argument("--batch_size", type=int, default=64)
parser.add_argument("--max_len", type=int, default=256)
parser.add_argument("--lr", type=float, default=3e-5)
parser.add_argument("--n_epochs", type=int, default=20)
parser.add_argument("--finetuning", dest="finetuning", action="store_true")
parser.add_argument("--save_model", dest="save_model", action="store_true")
parser.add_argument("--logdir", type=str, default="checkpoints/")
parser.add_argument("--lm", type=str, default='distilbert')
parser.add_argument("--bert_path", type=str, default=None)
parser.add_argument("--fp16", dest="fp16", action="store_true")
parser.add_argument("--da", type=str, default=None)
parser.add_argument("--alpha_aug", type=float, default=0.8)
parser.add_argument("--dk", type=str, default=None)
parser.add_argument("--summarize", dest="summarize", action="store_true")
parser.add_argument("--balance", dest="balance", action="store_true")
parser.add_argument("--size", type=int, default=None)
parser.add_argument("--intent", type=int, default=1)
parser.add_argument("--intents_num", type=int, default=2)
parser.add_argument("--inference", type=str, default=None)
parser.add_argument("--MCML_inference", type=str, default=None)
parser.add_argument("--seed", type=int, default=1)
hp = parser.parse_args()
# only a single task for baseline
main_task = hp.task
intent = hp.intent
inference = hp.inference
MCML_inference = hp.MCML_inference
seed = hp.seed
if inference is None:
for intent in range(hp.intents_num):
print("********************" + str(intent) + "********************")
# create the tag of the run
task = main_task + str(intent)
# run_tag = '%s_lm=%s_da=%s_dk=%s_su=%s_size=%s_id=%d' % (task, hp.lm, hp.da,
# hp.dk, hp.summarize,
# str(hp.size), hp.run_id)
# run_tag = run_tag.replace('/', '_')
# run_tag = main_task + str(intent)
run_tag = 'checkpoints/' + main_task + '/'
# task = main_task + str(intent)
# load task configuration
configs = json.load(open('configs.json'))
configs = {conf['name']: conf for conf in configs}
config = configs[task]
# config['trainset'] = config['trainset'].replace('train.txt', "Intents/" + str(intent) + "/train.txt")
# config['validset'] = config['validset'].replace('valid.txt', "Intents/" + str(intent) + "/valid.txt")
# config['testset'] = config['testset'].replace('test.txt', "Intents/" + str(intent) + "/test.txt")
trainset = config['trainset']
validset = config['validset']
testset = config['testset']
# trainset = trainset.replace('train.txt', "Intent/" + str(intent) + "/train.txt")
# validset = validset.replace('valid.txt', "Intent/" + str(intent) + "/valid.txt")
# testset = testset.replace('test.txt', "Intent/" + str(intent) + "/test.txt")
task_type = config['task_type']
vocab = config['vocab']
tasknames = [task]
# summarize the sequences up to the max sequence length
if hp.summarize:
summarizer = Summarizer(config, lm=hp.lm)
trainset = summarizer.transform_file(trainset, max_len=hp.max_len)
validset = summarizer.transform_file(validset, max_len=hp.max_len)
testset = summarizer.transform_file(testset, max_len=hp.max_len)
if hp.dk is not None:
if hp.dk == 'product':
injector = ProductDKInjector(config, hp.dk)
else:
injector = GeneralDKInjector(config, hp.dk)
trainset = injector.transform_file(trainset)
validset = injector.transform_file(validset)
testset = injector.transform_file(testset)
# load train/dev/test sets
train_dataset = DittoDataset(trainset, vocab, task,
lm=hp.lm, inference=inference,
max_len=hp.max_len,
size=hp.size,
balance=hp.balance)
valid_dataset = DittoDataset(validset, vocab, task, lm=hp.lm, inference=inference)
test_dataset = DittoDataset(testset, vocab, task, lm=hp.lm, inference=inference)
if hp.da is None:
from snippext.baseline import initialize_and_train
initialize_and_train(config,
train_dataset,
valid_dataset,
test_dataset,
hp,
seed,
run_tag, task.split('/')[1],
inference, MCML_inference,
hp.intents_num)
else:
from snippext.mixda import initialize_and_train
augment_dataset = DittoDataset(trainset, vocab, task,
lm=hp.lm, inference=inference,
max_len=hp.max_len,
augment_op=hp.da,
size=hp.size,
balance=hp.balance)
initialize_and_train(config,
train_dataset,
augment_dataset,
valid_dataset,
test_dataset,
hp,
seed,
run_tag, task.split('/')[1],
inference,
MCML_inference,
hp.intents_num)
elif inference == 'Multilabel':
# create the tag of the run
task = main_task + '_Multilabel'
# run_tag = '%s_lm=%s_da=%s_dk=%s_su=%s_size=%s_id=%d' % (task, hp.lm, hp.da,
# hp.dk, hp.summarize,
# str(hp.size), hp.run_id)
# run_tag = run_tag.replace('/', '_')
# run_tag = main_task + str(intent)
run_tag = 'checkpoints/' + main_task + '/'
# task = main_task + str(intent)
# load task configuration
configs = json.load(open('configs.json'))
configs = {conf['name']: conf for conf in configs}
config = configs[task]
# config['trainset'] = config['trainset'].replace('train.txt', "Intents/" + str(intent) + "/train.txt")
# config['validset'] = config['validset'].replace('valid.txt', "Intents/" + str(intent) + "/valid.txt")
# config['testset'] = config['testset'].replace('test.txt', "Intents/" + str(intent) + "/test.txt")
trainset = config['trainset']
validset = config['validset']
testset = config['testset']
# trainset = trainset.replace('train.txt', "Intent/" + str(intent) + "/train.txt")
# validset = validset.replace('valid.txt', "Intent/" + str(intent) + "/valid.txt")
# testset = testset.replace('test.txt', "Intent/" + str(intent) + "/test.txt")
task_type = config['task_type']
vocab = config['vocab']
tasknames = [task]
# summarize the sequences up to the max sequence length
if hp.summarize:
summarizer = Summarizer(config, lm=hp.lm)
trainset = summarizer.transform_file(trainset, max_len=hp.max_len)
validset = summarizer.transform_file(validset, max_len=hp.max_len)
testset = summarizer.transform_file(testset, max_len=hp.max_len)
if hp.dk is not None:
if hp.dk == 'product':
injector = ProductDKInjector(config, hp.dk)
else:
injector = GeneralDKInjector(config, hp.dk)
trainset = injector.transform_file(trainset)
validset = injector.transform_file(validset)
testset = injector.transform_file(testset)
# load train/dev/test sets
train_dataset = DittoDataset(trainset, vocab, task,
lm=hp.lm,
inference='Multilabel',
max_len=hp.max_len,
size=hp.size,
balance=hp.balance)
valid_dataset = DittoDataset(validset, vocab, task, lm=hp.lm, inference='Multilabel')
test_dataset = DittoDataset(testset, vocab, task, lm=hp.lm, inference='Multilabel')
if hp.da is None:
from snippext.baseline import initialize_and_train
initialize_and_train(config,
train_dataset,
valid_dataset,
test_dataset,
hp,
seed,
run_tag, task.split('/')[1],
inference='Multilabel',
MCML_inference=MCML_inference,
intents_num=hp.intents_num,
)
else:
from snippext.mixda import initialize_and_train
augment_dataset = DittoDataset(trainset, vocab, task,
lm=hp.lm,
inference=inference,
max_len=hp.max_len,
augment_op=hp.da,
size=hp.size,
balance=hp.balance,
)
initialize_and_train(config,
train_dataset,
augment_dataset,
valid_dataset,
test_dataset,
hp,
seed,
run_tag, task.split('/')[1],
inference='Multilabel',
MCML_inference=MCML_inference,
intents_num=hp.intents_num,
)
elif inference == 'MCML':
# create the tag of the run
task = main_task + '_MCML'
# run_tag = '%s_lm=%s_da=%s_dk=%s_su=%s_size=%s_id=%d' % (task, hp.lm, hp.da,
# hp.dk, hp.summarize,
# str(hp.size), hp.run_id)
# run_tag = run_tag.replace('/', '_')
# run_tag = main_task + str(intent)
run_tag = 'checkpoints/' + main_task + '/'
# task = main_task + str(intent)
# load task configuration
configs = json.load(open('configs.json'))
configs = {conf['name']: conf for conf in configs}
config = configs[task]
# config['trainset'] = config['trainset'].replace('train.txt', "Intents/" + str(intent) + "/train.txt")
# config['validset'] = config['validset'].replace('valid.txt', "Intents/" + str(intent) + "/valid.txt")
# config['testset'] = config['testset'].replace('test.txt', "Intents/" + str(intent) + "/test.txt")
trainset = config['trainset']
validset = config['validset']
testset = config['testset']
# trainset = trainset.replace('train.txt', "Intent/" + str(intent) + "/train.txt")
# validset = validset.replace('valid.txt', "Intent/" + str(intent) + "/valid.txt")
# testset = testset.replace('test.txt', "Intent/" + str(intent) + "/test.txt")
task_type = config['task_type']
vocab = config['vocab']
tasknames = [task]
# summarize the sequences up to the max sequence length
if hp.summarize:
summarizer = Summarizer(config, lm=hp.lm)
trainset = summarizer.transform_file(trainset, max_len=hp.max_len)
validset = summarizer.transform_file(validset, max_len=hp.max_len)
testset = summarizer.transform_file(testset, max_len=hp.max_len)
if hp.dk is not None:
if hp.dk == 'product':
injector = ProductDKInjector(config, hp.dk)
else:
injector = GeneralDKInjector(config, hp.dk)
trainset = injector.transform_file(trainset)
validset = injector.transform_file(validset)
testset = injector.transform_file(testset)
# load train/dev/test sets
train_dataset = DittoDataset(trainset, vocab, task,
lm=hp.lm,
inference='Multilabel',
max_len=hp.max_len,
size=hp.size,
balance=hp.balance)
valid_dataset = DittoDataset(validset, vocab, task, lm=hp.lm, inference='Multilabel')
test_dataset = DittoDataset(testset, vocab, task, lm=hp.lm, inference='Multilabel')
if hp.da is None:
from snippext.baseline import initialize_and_train
initialize_and_train(config,
train_dataset,
valid_dataset,
test_dataset,
hp,
seed,
run_tag, task.split('/')[1],
inference='MCML',
MCML_inference=MCML_inference,
intents_num=hp.intents_num,
ML_head=True)
else:
from snippext.mixda import initialize_and_train
augment_dataset = DittoDataset(trainset, vocab, task,
lm=hp.lm,
inference='Multilabel',
max_len=hp.max_len,
augment_op=hp.da,
size=hp.size,
balance=hp.balance)
initialize_and_train(config,
train_dataset,
augment_dataset,
valid_dataset,
test_dataset,
hp,
seed,
run_tag, task.split('/')[1],
inference='MCML',
MCML_inference=MCML_inference,
intents_num=hp.intents_num,
ML_head=True)
| 47.181009 | 115 | 0.48195 | 1,490 | 15,900 | 4.977181 | 0.089933 | 0.025081 | 0.052724 | 0.022249 | 0.860302 | 0.853021 | 0.830232 | 0.813646 | 0.80178 | 0.788835 | 0 | 0.002767 | 0.408931 | 15,900 | 336 | 116 | 47.321429 | 0.786338 | 0.191572 | 0 | 0.760504 | 0 | 0 | 0.058107 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.054622 | 0 | 0.054622 | 0.004202 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
36a876a1ac42068c409fe2d8f0c050848538e439 | 3,051 | py | Python | cultivo/cultivo_main/migrations/0007_auto_20181113_0321.py | amanparmar17/cultivo-1 | 06030116ba47f99fee8f413404777c9dbdb4e92a | [
"MIT"
] | 31 | 2018-12-01T17:06:07.000Z | 2022-02-15T13:23:14.000Z | cultivo/cultivo_main/migrations/0007_auto_20181113_0321.py | amanparmar17/cultivo-1 | 06030116ba47f99fee8f413404777c9dbdb4e92a | [
"MIT"
] | 1 | 2021-12-24T13:22:23.000Z | 2021-12-24T13:23:57.000Z | cultivo/cultivo_main/migrations/0007_auto_20181113_0321.py | amanparmar17/cultivo-1 | 06030116ba47f99fee8f413404777c9dbdb4e92a | [
"MIT"
] | 13 | 2020-08-14T05:19:38.000Z | 2022-01-18T13:55:15.000Z | # Generated by Django 2.1.1 on 2018-11-12 21:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cultivo_main', '0006_auto_20181113_0320'),
]
operations = [
migrations.AlterField(
model_name='one',
name='GPValue1_million_dollar',
field=models.DecimalField(decimal_places=4, default=0.0, max_digits=12),
),
migrations.AlterField(
model_name='one',
name='GPValue1_million_slc',
field=models.DecimalField(decimal_places=4, default=0.0, max_digits=12),
),
migrations.AlterField(
model_name='one',
name='GPValue2_million_dollar',
field=models.DecimalField(decimal_places=4, default=0.0, max_digits=12),
),
migrations.AlterField(
model_name='one',
name='GPValue2_million_slc',
field=models.DecimalField(decimal_places=4, default=0.0, max_digits=12),
),
migrations.AlterField(
model_name='one',
name='GPValue_thousand_dollar',
field=models.DecimalField(decimal_places=4, default=0.0, max_digits=12),
),
migrations.AlterField(
model_name='one',
name='NPValue_thousand_dollar',
field=models.DecimalField(decimal_places=4, default=0.0, max_digits=12),
),
migrations.AlterField(
model_name='pred_one',
name='GPValue1_million_dollar',
field=models.DecimalField(decimal_places=4, default=0.0, max_digits=12),
),
migrations.AlterField(
model_name='pred_one',
name='GPValue1_million_slc',
field=models.DecimalField(decimal_places=4, default=0.0, max_digits=12),
),
migrations.AlterField(
model_name='pred_one',
name='GPValue2_million_dollar',
field=models.DecimalField(decimal_places=4, default=0.0, max_digits=12),
),
migrations.AlterField(
model_name='pred_one',
name='GPValue2_million_slc',
field=models.DecimalField(decimal_places=4, default=0.0, max_digits=12),
),
migrations.AlterField(
model_name='pred_one',
name='GPValue_thousand_dollar',
field=models.DecimalField(decimal_places=4, default=0.0, max_digits=12),
),
migrations.AlterField(
model_name='pred_one',
name='NPValue_thousand_dollar',
field=models.DecimalField(decimal_places=4, default=0.0, max_digits=12),
),
migrations.AlterField(
model_name='prod_area',
name='org_val',
field=models.DecimalField(decimal_places=4, default=0.0, max_digits=12),
),
migrations.AlterField(
model_name='prod_area',
name='pred_val',
field=models.DecimalField(decimal_places=4, default=0.0, max_digits=12),
),
]
| 36.321429 | 84 | 0.597509 | 330 | 3,051 | 5.281818 | 0.157576 | 0.160643 | 0.200803 | 0.232932 | 0.907631 | 0.907631 | 0.907631 | 0.907631 | 0.890993 | 0.890993 | 0 | 0.050207 | 0.28843 | 3,051 | 83 | 85 | 36.759036 | 0.752649 | 0.014749 | 0 | 0.883117 | 1 | 0 | 0.13249 | 0.068908 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.012987 | 0 | 0.051948 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
36c5b9625b766400d6e4ab7773cb807024c203bf | 34,936 | py | Python | spark_fhir_schemas/r4/resources/molecularsequence.py | icanbwell/SparkFhirSchemas | 8c828313c39850b65f8676e67f526ee92b7d624e | [
"Apache-2.0"
] | null | null | null | spark_fhir_schemas/r4/resources/molecularsequence.py | icanbwell/SparkFhirSchemas | 8c828313c39850b65f8676e67f526ee92b7d624e | [
"Apache-2.0"
] | null | null | null | spark_fhir_schemas/r4/resources/molecularsequence.py | icanbwell/SparkFhirSchemas | 8c828313c39850b65f8676e67f526ee92b7d624e | [
"Apache-2.0"
] | null | null | null | from typing import Union, List, Optional
from pyspark.sql.types import StructType, StructField, StringType, ArrayType, DataType
# This file is auto-generated by generate_schema so do not edit it manually
# noinspection PyPep8Naming
class MolecularSequenceSchema:
"""
Raw data describing a biological sequence.
"""
# noinspection PyDefaultArgument
@staticmethod
def get_schema(
max_nesting_depth: Optional[int] = 6,
nesting_depth: int = 0,
nesting_list: List[str] = [],
max_recursion_limit: Optional[int] = 2,
include_extension: Optional[bool] = False,
extension_fields: Optional[List[str]] = None,
extension_depth: int = 0,
max_extension_depth: Optional[int] = 2,
include_modifierExtension: Optional[bool] = False,
use_date_for: Optional[List[str]] = None,
parent_path: Optional[str] = "",
) -> Union[StructType, DataType]:
"""
Raw data describing a biological sequence.
resourceType: This is a MolecularSequence resource
id: The logical id of the resource, as used in the URL for the resource. Once
assigned, this value never changes.
meta: The metadata about the resource. This is content that is maintained by the
infrastructure. Changes to the content might not always be associated with
version changes to the resource.
implicitRules: A reference to a set of rules that were followed when the resource was
constructed, and which must be understood when processing the content. Often,
this is a reference to an implementation guide that defines the special rules
along with other profiles etc.
language: The base language in which the resource is written.
text: A human-readable narrative that contains a summary of the resource and can be
used to represent the content of the resource to a human. The narrative need
not encode all the structured data, but is required to contain sufficient
detail to make it "clinically safe" for a human to just read the narrative.
Resource definitions may define what content should be represented in the
narrative to ensure clinical safety.
contained: These resources do not have an independent existence apart from the resource
that contains them - they cannot be identified independently, and nor can they
have their own independent transaction scope.
extension: May be used to represent additional information that is not part of the basic
definition of the resource. To make the use of extensions safe and manageable,
there is a strict set of governance applied to the definition and use of
extensions. Though any implementer can define an extension, there is a set of
requirements that SHALL be met as part of the definition of the extension.
modifierExtension: May be used to represent additional information that is not part of the basic
definition of the resource and that modifies the understanding of the element
that contains it and/or the understanding of the containing element's
descendants. Usually modifier elements provide negation or qualification. To
make the use of extensions safe and manageable, there is a strict set of
governance applied to the definition and use of extensions. Though any
implementer is allowed to define an extension, there is a set of requirements
that SHALL be met as part of the definition of the extension. Applications
processing a resource are required to check for modifier extensions.
Modifier extensions SHALL NOT change the meaning of any elements on Resource
or DomainResource (including cannot change the meaning of modifierExtension
itself).
identifier: A unique identifier for this particular sequence instance. This is a FHIR-
defined id.
type: Amino Acid Sequence/ DNA Sequence / RNA Sequence.
coordinateSystem: Whether the sequence is numbered starting at 0 (0-based numbering or
coordinates, inclusive start, exclusive end) or starting at 1 (1-based
numbering, inclusive start and inclusive end).
patient: The patient whose sequencing results are described by this resource.
specimen: Specimen used for sequencing.
device: The method for sequencing, for example, chip information.
performer: The organization or lab that should be responsible for this result.
quantity: The number of copies of the sequence of interest. (RNASeq).
referenceSeq: A sequence that is used as a reference to describe variants that are present
in a sequence analyzed.
variant: The definition of variant here originates from Sequence ontology ([variant_of]
(http://www.sequenceontology.org/browser/current_svn/term/variant_of)). This
element can represent amino acid or nucleic sequence change(including
insertion,deletion,SNP,etc.) It can represent some complex mutation or
segment variation with the assist of CIGAR string.
observedSeq: Sequence that was observed. It is the result marked by referenceSeq along with
variant records on referenceSeq. This shall start from
referenceSeq.windowStart and end by referenceSeq.windowEnd.
quality: An experimental feature attribute that defines the quality of the feature in a
quantitative way, such as a phred quality score ([SO:0001686](http://www.seque
nceontology.org/browser/current_svn/term/SO:0001686)).
readCoverage: Coverage (read depth or depth) is the average number of reads representing a
given nucleotide in the reconstructed sequence.
repository: Configurations of the external repository. The repository shall store target's
observedSeq or records related with target's observedSeq.
pointer: Pointer to next atomic sequence which at most contains one variant.
structureVariant: Information about chromosome structure variation.
"""
if extension_fields is None:
extension_fields = [
"valueBoolean",
"valueCode",
"valueDate",
"valueDateTime",
"valueDecimal",
"valueId",
"valueInteger",
"valuePositiveInt",
"valueString",
"valueTime",
"valueUnsignedInt",
"valueUri",
"valueUrl",
"valueReference",
"valueCodeableConcept",
"valueAddress",
]
from spark_fhir_schemas.r4.simple_types.id import idSchema
from spark_fhir_schemas.r4.complex_types.meta import MetaSchema
from spark_fhir_schemas.r4.simple_types.uri import uriSchema
from spark_fhir_schemas.r4.simple_types.code import codeSchema
from spark_fhir_schemas.r4.complex_types.narrative import NarrativeSchema
from spark_fhir_schemas.r4.complex_types.resourcelist import ResourceListSchema
from spark_fhir_schemas.r4.complex_types.extension import ExtensionSchema
from spark_fhir_schemas.r4.complex_types.identifier import IdentifierSchema
from spark_fhir_schemas.r4.simple_types.integer import integerSchema
from spark_fhir_schemas.r4.complex_types.reference import ReferenceSchema
from spark_fhir_schemas.r4.complex_types.quantity import QuantitySchema
from spark_fhir_schemas.r4.complex_types.molecularsequence_referenceseq import (
MolecularSequence_ReferenceSeqSchema,
)
from spark_fhir_schemas.r4.complex_types.molecularsequence_variant import (
MolecularSequence_VariantSchema,
)
from spark_fhir_schemas.r4.complex_types.molecularsequence_quality import (
MolecularSequence_QualitySchema,
)
from spark_fhir_schemas.r4.complex_types.molecularsequence_repository import (
MolecularSequence_RepositorySchema,
)
from spark_fhir_schemas.r4.complex_types.molecularsequence_structurevariant import (
MolecularSequence_StructureVariantSchema,
)
if (
max_recursion_limit
and nesting_list.count("MolecularSequence") >= max_recursion_limit
) or (max_nesting_depth and nesting_depth >= max_nesting_depth):
return StructType([StructField("id", StringType(), True)])
# add my name to recursion list for later
my_nesting_list: List[str] = nesting_list + ["MolecularSequence"]
my_parent_path = (
parent_path + ".molecularsequence" if parent_path else "molecularsequence"
)
schema = StructType(
[
# This is a MolecularSequence resource
StructField("resourceType", StringType(), True),
# The logical id of the resource, as used in the URL for the resource. Once
# assigned, this value never changes.
StructField(
"id",
idSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".id",
),
True,
),
# The metadata about the resource. This is content that is maintained by the
# infrastructure. Changes to the content might not always be associated with
# version changes to the resource.
StructField(
"meta",
MetaSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
),
True,
),
# A reference to a set of rules that were followed when the resource was
# constructed, and which must be understood when processing the content. Often,
# this is a reference to an implementation guide that defines the special rules
# along with other profiles etc.
StructField(
"implicitRules",
uriSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".implicitrules",
),
True,
),
# The base language in which the resource is written.
StructField(
"language",
codeSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".language",
),
True,
),
# A human-readable narrative that contains a summary of the resource and can be
# used to represent the content of the resource to a human. The narrative need
# not encode all the structured data, but is required to contain sufficient
# detail to make it "clinically safe" for a human to just read the narrative.
# Resource definitions may define what content should be represented in the
# narrative to ensure clinical safety.
StructField(
"text",
NarrativeSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
),
True,
),
# These resources do not have an independent existence apart from the resource
# that contains them - they cannot be identified independently, and nor can they
# have their own independent transaction scope.
StructField(
"contained",
ArrayType(
ResourceListSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# May be used to represent additional information that is not part of the basic
# definition of the resource. To make the use of extensions safe and manageable,
# there is a strict set of governance applied to the definition and use of
# extensions. Though any implementer can define an extension, there is a set of
# requirements that SHALL be met as part of the definition of the extension.
StructField(
"extension",
ArrayType(
ExtensionSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# May be used to represent additional information that is not part of the basic
# definition of the resource and that modifies the understanding of the element
# that contains it and/or the understanding of the containing element's
# descendants. Usually modifier elements provide negation or qualification. To
# make the use of extensions safe and manageable, there is a strict set of
# governance applied to the definition and use of extensions. Though any
# implementer is allowed to define an extension, there is a set of requirements
# that SHALL be met as part of the definition of the extension. Applications
# processing a resource are required to check for modifier extensions.
#
# Modifier extensions SHALL NOT change the meaning of any elements on Resource
# or DomainResource (including cannot change the meaning of modifierExtension
# itself).
StructField(
"modifierExtension",
ArrayType(
ExtensionSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# A unique identifier for this particular sequence instance. This is a FHIR-
# defined id.
StructField(
"identifier",
ArrayType(
IdentifierSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# Amino Acid Sequence/ DNA Sequence / RNA Sequence.
StructField("type", StringType(), True),
# Whether the sequence is numbered starting at 0 (0-based numbering or
# coordinates, inclusive start, exclusive end) or starting at 1 (1-based
# numbering, inclusive start and inclusive end).
StructField(
"coordinateSystem",
integerSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".coordinatesystem",
),
True,
),
# The patient whose sequencing results are described by this resource.
StructField(
"patient",
ReferenceSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
),
True,
),
# Specimen used for sequencing.
StructField(
"specimen",
ReferenceSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
),
True,
),
# The method for sequencing, for example, chip information.
StructField(
"device",
ReferenceSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
),
True,
),
# The organization or lab that should be responsible for this result.
StructField(
"performer",
ReferenceSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
),
True,
),
# The number of copies of the sequence of interest. (RNASeq).
StructField(
"quantity",
QuantitySchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
),
True,
),
# A sequence that is used as a reference to describe variants that are present
# in a sequence analyzed.
StructField(
"referenceSeq",
MolecularSequence_ReferenceSeqSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
),
True,
),
# The definition of variant here originates from Sequence ontology ([variant_of]
# (http://www.sequenceontology.org/browser/current_svn/term/variant_of)). This
# element can represent amino acid or nucleic sequence change(including
# insertion,deletion,SNP,etc.) It can represent some complex mutation or
# segment variation with the assist of CIGAR string.
StructField(
"variant",
ArrayType(
MolecularSequence_VariantSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# Sequence that was observed. It is the result marked by referenceSeq along with
# variant records on referenceSeq. This shall start from
# referenceSeq.windowStart and end by referenceSeq.windowEnd.
StructField("observedSeq", StringType(), True),
# An experimental feature attribute that defines the quality of the feature in a
# quantitative way, such as a phred quality score ([SO:0001686](http://www.seque
# nceontology.org/browser/current_svn/term/SO:0001686)).
StructField(
"quality",
ArrayType(
MolecularSequence_QualitySchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# Coverage (read depth or depth) is the average number of reads representing a
# given nucleotide in the reconstructed sequence.
StructField(
"readCoverage",
integerSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".readcoverage",
),
True,
),
# Configurations of the external repository. The repository shall store target's
# observedSeq or records related with target's observedSeq.
StructField(
"repository",
ArrayType(
MolecularSequence_RepositorySchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# Pointer to next atomic sequence which at most contains one variant.
StructField(
"pointer",
ArrayType(
ReferenceSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# Information about chromosome structure variation.
StructField(
"structureVariant",
ArrayType(
MolecularSequence_StructureVariantSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
]
)
if not include_extension:
schema.fields = [
c
if c.name != "extension"
else StructField("extension", StringType(), True)
for c in schema.fields
]
if not include_modifierExtension:
schema.fields = [
c
if c.name != "modifierExtension"
else StructField("modifierExtension", StringType(), True)
for c in schema.fields
]
return schema
| 52.456456 | 104 | 0.553584 | 3,179 | 34,936 | 5.821013 | 0.117018 | 0.060308 | 0.038098 | 0.057066 | 0.838044 | 0.827074 | 0.821021 | 0.793353 | 0.774656 | 0.770116 | 0 | 0.004471 | 0.404597 | 34,936 | 665 | 105 | 52.535338 | 0.88515 | 0.268147 | 0 | 0.715464 | 1 | 0 | 0.023875 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.002062 | false | 0 | 0.037113 | 0 | 0.045361 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
36f511eec4a5eec048a4ace71675b52c6af0066b | 19,308 | py | Python | tests/test_dmd_modes_tuner.py | Jamiree/PyDMD | b4dd8f16e52552d2bd29eda53b12d2eda9de4d89 | [
"MIT"
] | null | null | null | tests/test_dmd_modes_tuner.py | Jamiree/PyDMD | b4dd8f16e52552d2bd29eda53b12d2eda9de4d89 | [
"MIT"
] | null | null | null | tests/test_dmd_modes_tuner.py | Jamiree/PyDMD | b4dd8f16e52552d2bd29eda53b12d2eda9de4d89 | [
"MIT"
] | null | null | null | from pytest import raises
import numpy as np
from pydmd import DMD
from pydmd.dmd_modes_tuner import select_modes, stabilize_modes, ModesSelectors, ModesTuner, selectors
# 15 snapshot with 400 data. The matrix is 400x15 and it contains
# the following data: f1 + f2 where
# f1 = lambda x,t: sech(x+3)*(1.*np.exp(1j*2.3*t))
# f2 = lambda x,t: (sech(x)*np.tanh(x))*(2.*np.exp(1j*2.8*t))
sample_data = np.load('tests/test_datasets/input_sample.npy')
def test_select_modes():
def stable_modes(dmd_object):
toll = 1e-3
return np.abs(np.abs(dmd_object.eigs) - 1) < toll
dmd = DMD(svd_rank=10)
dmd.fit(sample_data)
exp = dmd.reconstructed_data
select_modes(dmd, stable_modes)
np.testing.assert_array_almost_equal(exp, dmd.reconstructed_data)
def test_select_modes_index():
class FakeDMDOperator:
pass
fake_dmd_operator = FakeDMDOperator()
fake_dmd = DMD()
eigs = np.array([1 + 1e-4, 2, 1 - 1e-2, 5, 1, 1 - 0.5*1e-3])
setattr(fake_dmd_operator, 'eigenvalues', eigs)
setattr(fake_dmd_operator, '_eigenvalues', eigs)
setattr(fake_dmd_operator, '_Lambda', np.zeros(len(eigs)))
# these are DMD eigenvectors, but we do not care in this test
setattr(fake_dmd_operator, '_eigenvectors', np.zeros((1, len(eigs))))
setattr(fake_dmd_operator, '_modes', np.zeros((1, len(eigs))))
setattr(fake_dmd_operator, 'modes', np.zeros((1, len(eigs))))
setattr(fake_dmd, '_Atilde', fake_dmd_operator)
def fake_cmp_amplitudes():
pass
setattr(fake_dmd, '_compute_amplitudes', fake_cmp_amplitudes)
_, idx = select_modes(fake_dmd, ModesSelectors.stable_modes(max_distance_from_unity=1e-3), return_indexes=True)
np.testing.assert_array_equal(idx, [1,2,3])
assert len(fake_dmd.operator._eigenvalues) == 3
assert len(fake_dmd.operator._Lambda) == 3
assert fake_dmd.operator._eigenvectors.shape[1] == 3
assert fake_dmd.operator._modes.shape[1] == 3
def test_select_modes_index_and_deepcopy():
class FakeDMDOperator:
pass
fake_dmd_operator = FakeDMDOperator()
fake_dmd = DMD()
eigs = np.array([1 + 1e-4, 2, 1 - 1e-2, 5, 1, 1 - 0.5*1e-3])
setattr(fake_dmd_operator, '_eigenvalues', eigs)
setattr(fake_dmd_operator, 'eigenvalues', eigs)
setattr(fake_dmd_operator, '_Lambda', np.zeros(len(eigs)))
# these are DMD eigenvectors, but we do not care in this test
setattr(fake_dmd_operator, '_eigenvectors', np.zeros((1, len(eigs))))
setattr(fake_dmd_operator, '_modes', np.zeros((1, len(eigs))))
setattr(fake_dmd_operator, 'modes', np.zeros((1, len(eigs))))
setattr(fake_dmd, '_Atilde', fake_dmd_operator)
def fake_cmp_amplitudes():
pass
setattr(fake_dmd, '_compute_amplitudes', fake_cmp_amplitudes)
dmd2, idx = select_modes(fake_dmd, ModesSelectors.stable_modes(max_distance_from_unity=1e-3), in_place=False, return_indexes=True)
np.testing.assert_array_equal(idx, [1,2,3])
assert len(fake_dmd.operator._eigenvalues) == 6
assert len(fake_dmd.operator._Lambda) == 6
assert fake_dmd.operator._eigenvectors.shape[1] == 6
assert fake_dmd.operator._modes.shape[1] == 6
assert len(dmd2.operator._eigenvalues) == 3
assert len(dmd2.operator._Lambda) == 3
assert dmd2.operator._eigenvectors.shape[1] == 3
assert dmd2.operator._modes.shape[1] == 3
def test_stable_modes_both():
class FakeDMD:
pass
fake_dmd = FakeDMD()
setattr(fake_dmd, 'eigs', np.array([1 + 1e-4, 2, 1 - 1e-2, 5, 1, 1 - 0.5*1e-3]))
expected_result = np.array([False for _ in range(6)])
expected_result[[0, 4, 5]] = True
assert all(ModesSelectors.stable_modes(max_distance_from_unity=1e-3)(fake_dmd) == expected_result)
def test_stable_modes_outside_only():
class FakeDMD:
pass
fake_dmd = FakeDMD()
setattr(fake_dmd, 'eigs', np.array([1 + 1e-4, 2, 1 - 1e-2, 5, 1, 1 - 0.5*1e-3]))
expected_result = np.array([False for _ in range(6)])
expected_result[[0, 2, 4, 5]] = True
assert all(ModesSelectors.stable_modes(max_distance_from_unity_outside=1e-3)(fake_dmd) == expected_result)
def test_stable_modes_inside_only():
class FakeDMD:
pass
fake_dmd = FakeDMD()
setattr(fake_dmd, 'eigs', np.array([1 + 1e-4, 2, 1 - 1e-2, 5, 1, 1 - 0.5*1e-3]))
expected_result = np.array([False for _ in range(6)])
expected_result[[0, 1, 3, 4, 5]] = True
assert all(ModesSelectors.stable_modes(max_distance_from_unity_inside=1e-3)(fake_dmd) == expected_result)
def test_stable_modes_errors():
with raises(ValueError):
ModesSelectors.stable_modes()
with raises(ValueError):
ModesSelectors.stable_modes(max_distance_from_unity=1.e-2, max_distance_from_unity_inside=1.e-3)
with raises(ValueError):
ModesSelectors.stable_modes(max_distance_from_unity=1.e-2, max_distance_from_unity_outside=1.e-3)
def test_threshold():
class FakeDMD:
pass
fake_dmd = FakeDMD()
setattr(fake_dmd, 'eigs', np.array([complex(1, 1e-4), 2, complex(1, 1e-2), 5, 1, complex(1, 5*1e-2)]))
expected_result = np.array([False for _ in range(6)])
expected_result[[1, 5]] = True
assert all(ModesSelectors.threshold(1+1.e-3, 2+1.e-10)(fake_dmd) == expected_result)
def test_compute_integral_contribution():
np.testing.assert_almost_equal(ModesSelectors._compute_integral_contribution(
np.array([5,0,0,1]), np.array([1,-2,3,-5,6])
), 442, decimal=1)
def test_integral_contribution():
class FakeDMD:
pass
fake_dmd = FakeDMD()
setattr(fake_dmd, 'dynamics', np.array([[i for _ in range(10)] for i in range(4)]))
setattr(fake_dmd, 'modes', np.ones((20, 4)))
setattr(fake_dmd, 'dmd_time', None)
setattr(fake_dmd, 'original_time', None)
expected_result = np.array([False for _ in range(4)])
expected_result[[2, 3]] = True
assert all(ModesSelectors.integral_contribution(2)(fake_dmd) == expected_result)
def test_integral_contribution_reconstruction():
dmd = DMD(svd_rank=10)
dmd.fit(sample_data)
exp = dmd.reconstructed_data
select_modes(dmd, ModesSelectors.integral_contribution(2))
np.testing.assert_array_almost_equal(exp, dmd.reconstructed_data)
def test_stabilize_modes():
class FakeDMDOperator:
pass
dmd = DMD()
fake_dmd_operator = FakeDMDOperator()
eigs = np.array([complex(0.3, 0.2), complex(0.8,0.5), 1, complex(1,1.e-2), 2, complex(2,1.e-2)])
amplitudes = np.array([1,2,3,4,5,6], dtype=complex)
setattr(fake_dmd_operator, '_eigenvalues', eigs)
setattr(fake_dmd_operator, 'eigenvalues', eigs)
setattr(dmd, '_Atilde', fake_dmd_operator)
setattr(fake_dmd_operator, 'modes', np.zeros((1, len(eigs))))
setattr(dmd, '_b', amplitudes)
stabilize_modes(dmd, 0.8, 1.2)
np.testing.assert_array_almost_equal(
dmd.operator._eigenvalues,
np.array([complex(0.3, 0.2), complex(0.8,0.5) / abs(complex(0.8,0.5)),
1, complex(1,1.e-2) / abs(complex(1,1.e-2)), 2, complex(2,1.e-2)]))
np.testing.assert_array_almost_equal(
dmd._b,
np.array([1, 2*abs(complex(0.8,0.5)), 3, 4*abs(complex(1,1.e-2)), 5, 6]))
def test_stabilize_modes_index():
class FakeDMDOperator:
pass
dmd = DMD()
fake_dmd_operator = FakeDMDOperator()
eigs = np.array([complex(0.3, 0.2), complex(0.8,0.5), 1, complex(1,1.e-2), 2, complex(2,1.e-2)])
amplitudes = np.array([1,2,3,4,5,6], dtype=complex)
setattr(fake_dmd_operator, '_eigenvalues', eigs)
setattr(fake_dmd_operator, 'eigenvalues', eigs)
setattr(fake_dmd_operator, 'modes', np.zeros((1, len(eigs))))
setattr(dmd, '_Atilde', fake_dmd_operator)
setattr(dmd, '_b', amplitudes)
_, indexes = stabilize_modes(dmd, 0.8, 1.2, return_indexes=True)
np.testing.assert_array_almost_equal(
dmd.operator._eigenvalues,
np.array([complex(0.3, 0.2), complex(0.8,0.5) / abs(complex(0.8,0.5)),
1, complex(1,1.e-2) / abs(complex(1,1.e-2)), 2, complex(2,1.e-2)]))
np.testing.assert_array_almost_equal(
dmd._b,
np.array([1, 2*abs(complex(0.8,0.5)), 3, 4*abs(complex(1,1.e-2)), 5, 6]))
np.testing.assert_almost_equal(indexes, [1,2,3])
def test_stabilize_modes_index_deepcopy():
class FakeDMDOperator:
pass
dmd = DMD()
fake_dmd_operator = FakeDMDOperator()
eigs = np.array([complex(0.3, 0.2), complex(0.8,0.5), 1, complex(1,1.e-2), 2, complex(2,1.e-2)])
amplitudes = np.array([1,2,3,4,5,6], dtype=complex)
setattr(fake_dmd_operator, '_eigenvalues', eigs)
setattr(fake_dmd_operator, 'eigenvalues', eigs)
setattr(fake_dmd_operator, 'modes', np.zeros((1, len(eigs))))
setattr(dmd, '_Atilde', fake_dmd_operator)
setattr(dmd, '_b', amplitudes)
dmd2, indexes = stabilize_modes(dmd, 0.8, 1.2, in_place=False, return_indexes=True)
np.testing.assert_array_almost_equal(
dmd2.operator._eigenvalues,
np.array([complex(0.3, 0.2), complex(0.8,0.5) / abs(complex(0.8,0.5)),
1, complex(1,1.e-2) / abs(complex(1,1.e-2)), 2, complex(2,1.e-2)]))
np.testing.assert_array_almost_equal(
dmd2._b,
np.array([1, 2*abs(complex(0.8,0.5)), 3, 4*abs(complex(1,1.e-2)), 5, 6]))
np.testing.assert_array_almost_equal(
dmd.operator._eigenvalues,
np.array([complex(0.3, 0.2), complex(0.8,0.5),
1, complex(1,1.e-2), 2, complex(2,1.e-2)]))
np.testing.assert_array_almost_equal(
dmd._b,
np.array([1, 2, 3, 4, 5, 6]))
np.testing.assert_almost_equal(indexes, [1,2,3])
# test that the dmd given to ModesTuner is copied with deepcopy
def test_modes_tuner_copy():
class FakeDMD:
pass
fake_dmd = FakeDMD()
setattr(fake_dmd, 'eigs', np.array([complex(1, 1e-4), 2, complex(1, 1e-2), 5, 1, complex(1, 5*1e-2)]))
ModesTuner(fake_dmd)._dmds[0].eigs[1] = 0
assert fake_dmd.eigs[1] == 2
# assert that passing a scalar DMD (i.e. no list) causes ModesTuner to return
# only scalar DMD instances
def test_modes_tuner_scalar_input():
class FakeDMD:
pass
fake_dmd = FakeDMD()
setattr(fake_dmd, 'eigs', np.array([complex(1, 1e-4), 2, complex(1, 1e-2), 5, 1, complex(1, 5*1e-2)]))
mt = ModesTuner(fake_dmd, in_place=True)
assert mt.get() == fake_dmd
assert isinstance(mt.copy(), FakeDMD)
def test_modes_tuner_list_input():
class FakeDMD:
pass
def cook_fake_dmd():
fake_dmd = FakeDMD()
setattr(fake_dmd, 'eigs', np.array([complex(1, 1e-4), 2, complex(1, 1e-2), 5, 1, complex(1, 5*1e-2)]))
return fake_dmd
dmd1 = cook_fake_dmd()
dmd2 = cook_fake_dmd()
mt = ModesTuner([dmd1, dmd2], in_place=True)
assert isinstance(mt.get(), list)
assert mt.get()[0] == dmd1
assert mt.get()[1] == dmd2
assert isinstance(mt.copy(), list)
assert len(mt.copy()) == 2
def test_modes_tuner_get():
class FakeDMD:
pass
fake_dmd = FakeDMD()
setattr(fake_dmd, 'eigs', np.array([complex(1, 1e-4), 2, complex(1, 1e-2), 5, 1, complex(1, 5*1e-2)]))
mtuner = ModesTuner(fake_dmd)
eigs = mtuner.get().eigs
mtuner._dmds[0].eigs[1] = 0
assert eigs[1] == 0
def test_modes_tuner_secure_copy():
class FakeDMD:
pass
fake_dmd = FakeDMD()
setattr(fake_dmd, 'eigs', np.array([complex(1, 1e-4), 2, complex(1, 1e-2), 5, 1, complex(1, 5*1e-2)]))
mtuner = ModesTuner(fake_dmd)
eigs = mtuner.copy().eigs
mtuner._dmds[0].eigs[1] = 0
assert eigs[1] == 2
def test_modes_tuner_inplace():
class FakeDMD:
pass
fake_dmd = FakeDMD()
setattr(fake_dmd, 'eigs', np.array([complex(1, 1e-4), 2, complex(1, 1e-2), 5, 1, complex(1, 5*1e-2)]))
mtuner = ModesTuner(fake_dmd, in_place=True)
assert mtuner.get() == fake_dmd
mtuner._dmds[0].eigs[1] = 0
assert fake_dmd.eigs[1] == 0
def test_modes_tuner_inplace_list():
class FakeDMD:
pass
fake_dmd = FakeDMD()
setattr(fake_dmd, 'eigs', np.array([complex(1, 1e-4), 2, complex(1, 1e-2), 5, 1, complex(1, 5*1e-2)]))
fake_dmd2 = FakeDMD()
setattr(fake_dmd, 'eigs', np.array([complex(1, 1e-4), 3, complex(1, 1e-2), 5, 1, complex(1, 5*1e-2)]))
mtuner = ModesTuner([fake_dmd, fake_dmd2], in_place=True)
assert mtuner.get()[0] == fake_dmd
assert mtuner.get()[1] == fake_dmd2
mtuner._dmds[0].eigs[1] = 0
assert fake_dmd.eigs[1] == 0
def test_modes_tuner_select_raises():
class FakeDMD:
pass
fake_dmd = FakeDMD()
setattr(fake_dmd, 'eigs', np.array([complex(1, 1e-4), 2, complex(1, 1e-2), 5, 1, complex(1, 5*1e-2)]))
with raises(ValueError):
ModesTuner(fake_dmd).select('ciauu')
with raises(ValueError):
ModesTuner(fake_dmd).select(2)
def test_modes_tuner_select():
class FakeDMDOperator:
pass
fake_dmd_operator = FakeDMDOperator()
fake_dmd = DMD()
eigs = np.array([1 + 1e-4, 2, 1 - 1e-2, 5, 1, 1 - 0.5*1e-3])
setattr(fake_dmd_operator, 'eigenvalues', eigs)
setattr(fake_dmd_operator, '_eigenvalues', eigs)
setattr(fake_dmd_operator, '_Lambda', np.zeros(len(eigs)))
# these are DMD eigenvectors, but we do not care in this test
setattr(fake_dmd_operator, '_eigenvectors', np.zeros((1, len(eigs))))
setattr(fake_dmd_operator, '_modes', np.zeros((1, len(eigs))))
setattr(fake_dmd_operator, 'modes', np.zeros((1, len(eigs))))
setattr(fake_dmd, '_Atilde', fake_dmd_operator)
def fake_cmp_amplitudes():
pass
setattr(fake_dmd, '_compute_amplitudes', fake_cmp_amplitudes)
mtuner = ModesTuner(fake_dmd)
mtuner.select('stable_modes', max_distance_from_unity=1e-3)
dmd = mtuner.get()
assert len(dmd.operator._eigenvalues) == 3
assert len(dmd.operator._Lambda) == 3
assert dmd.operator._eigenvectors.shape[1] == 3
assert dmd.operator._modes.shape[1] == 3
def test_modes_tuner_stabilize():
class FakeDMDOperator:
pass
dmd = DMD()
fake_dmd_operator = FakeDMDOperator()
eigs = np.array([complex(0.3, 0.2), complex(0.8,0.5), 1, complex(1,1.e-2), 2, complex(2,1.e-2)])
amplitudes = np.array([1,2,3,4,5,6], dtype=complex)
setattr(fake_dmd_operator, '_eigenvalues', eigs)
setattr(fake_dmd_operator, 'eigenvalues', eigs)
setattr(fake_dmd_operator, 'modes', np.zeros((1, len(eigs))))
setattr(dmd, '_Atilde', fake_dmd_operator)
setattr(dmd, '_b', amplitudes)
mtuner = ModesTuner(dmd)
mtuner.stabilize(inner_radius=0.8, outer_radius=1.2)
dmd = mtuner.get()
np.testing.assert_array_almost_equal(
dmd.operator._eigenvalues,
np.array([complex(0.3, 0.2), complex(0.8,0.5) / abs(complex(0.8,0.5)),
1, complex(1,1.e-2) / abs(complex(1,1.e-2)), 2, complex(2,1.e-2)]))
np.testing.assert_array_almost_equal(
dmd._b,
np.array([1, 2*abs(complex(0.8,0.5)), 3, 4*abs(complex(1,1.e-2)), 5, 6]))
def test_modes_tuner_stabilize_multiple():
class FakeDMDOperator:
pass
def cook_fake_dmd():
dmd = DMD()
fake_dmd_operator = FakeDMDOperator()
eigs = np.array([complex(0.3, 0.2), complex(0.8,0.5), 1, complex(1,1.e-2), 2, complex(2,1.e-2)])
amplitudes = np.array([1,2,3,4,5,6], dtype=complex)
setattr(fake_dmd_operator, '_eigenvalues', eigs)
setattr(fake_dmd_operator, 'eigenvalues', eigs)
setattr(fake_dmd_operator, 'modes', np.zeros((1, len(eigs))))
setattr(dmd, '_Atilde', fake_dmd_operator)
setattr(dmd, '_b', amplitudes)
return dmd
dmd1 = cook_fake_dmd()
dmd2 = cook_fake_dmd()
dmd3 = cook_fake_dmd()
mtuner = ModesTuner([dmd1, dmd2, dmd3])
mtuner.stabilize(inner_radius=0.8, outer_radius=1.2)
dmds = mtuner.get()
assert isinstance(dmds, list)
for dmd in dmds:
np.testing.assert_array_almost_equal(
dmd.operator._eigenvalues,
np.array([complex(0.3, 0.2), complex(0.8,0.5) / abs(complex(0.8,0.5)),
1, complex(1,1.e-2) / abs(complex(1,1.e-2)), 2, complex(2,1.e-2)]))
np.testing.assert_array_almost_equal(
dmd._b,
np.array([1, 2*abs(complex(0.8,0.5)), 3, 4*abs(complex(1,1.e-2)), 5, 6]))
def test_modes_tuner_subset():
class FakeDMDOperator:
pass
def cook_fake_dmd():
dmd = DMD()
fake_dmd_operator = FakeDMDOperator()
eigs = np.array([complex(0.3, 0.2), complex(0.8,0.5), 1, complex(1,1.e-2), 2, complex(2,1.e-2)])
amplitudes = np.array([1,2,3,4,5,6], dtype=complex)
setattr(fake_dmd_operator, '_eigenvalues', eigs)
setattr(fake_dmd_operator, 'eigenvalues', eigs)
setattr(fake_dmd_operator, 'modes', np.zeros((1, len(eigs))))
setattr(dmd, '_Atilde', fake_dmd_operator)
setattr(dmd, '_b', amplitudes)
return dmd
dmd1 = cook_fake_dmd()
dmd2 = cook_fake_dmd()
dmd3 = cook_fake_dmd()
mtuner = ModesTuner([dmd1, dmd2, dmd3], in_place=True)
assert len(mtuner.subset([0,2]).get()) == 2
assert mtuner.subset([0,2]).get()[0] == dmd1
assert mtuner.subset([0,2]).get()[1] == dmd3
mtuner = ModesTuner([dmd1, dmd2, dmd3], in_place=False)
assert len(mtuner.subset([0,2]).get()) == 2
assert mtuner.subset([0,2]).get()[0] == mtuner._dmds[0]
assert mtuner.subset([0,2]).get()[1] == mtuner._dmds[2]
def test_modes_tuner_stabilize_multiple_subset():
class FakeDMDOperator:
pass
def cook_fake_dmd():
dmd = DMD()
fake_dmd_operator = FakeDMDOperator()
eigs = np.array([complex(0.3, 0.2), complex(0.8,0.5), 1, complex(1,1.e-2), 2, complex(2,1.e-2)])
amplitudes = np.array([1,2,3,4,5,6], dtype=complex)
setattr(fake_dmd_operator, '_eigenvalues', eigs)
setattr(fake_dmd_operator, 'eigenvalues', eigs)
setattr(fake_dmd_operator, 'modes', np.zeros((1, len(eigs))))
setattr(dmd, '_Atilde', fake_dmd_operator)
setattr(dmd, '_b', amplitudes)
return dmd
dmd1 = cook_fake_dmd()
dmd2 = cook_fake_dmd()
dmd3 = cook_fake_dmd()
mtuner = ModesTuner([dmd1, dmd2, dmd3])
mtuner.subset([0,2]).stabilize(inner_radius=0.8, outer_radius=1.2)
dmds = mtuner.get()
assert len(dmds) == 3
for i in range(3):
if i == 1:
continue
np.testing.assert_array_almost_equal(
dmds[i].operator._eigenvalues,
np.array([complex(0.3, 0.2), complex(0.8,0.5) / abs(complex(0.8,0.5)),
1, complex(1,1.e-2) / abs(complex(1,1.e-2)), 2, complex(2,1.e-2)]))
np.testing.assert_array_almost_equal(
dmds[i]._b,
np.array([1, 2*abs(complex(0.8,0.5)), 3, 4*abs(complex(1,1.e-2)), 5, 6]))
np.testing.assert_array_almost_equal(
dmds[1].operator._eigenvalues,
np.array([complex(0.3, 0.2), complex(0.8,0.5), 1, complex(1,1.e-2), 2, complex(2,1.e-2)]))
np.testing.assert_array_almost_equal(
dmds[1]._b,
np.array([1,2,3,4,5,6], dtype=complex))
def test_modes_tuner_selectors():
assert selectors['module_threshold'] == ModesSelectors.threshold
assert selectors['stable_modes'] == ModesSelectors.stable_modes
assert selectors['integral_contribution'] == ModesSelectors.integral_contribution
| 33.933216 | 134 | 0.649368 | 3,007 | 19,308 | 3.97273 | 0.055537 | 0.08438 | 0.084129 | 0.071823 | 0.848066 | 0.815252 | 0.789051 | 0.750712 | 0.729449 | 0.720409 | 0 | 0.057749 | 0.191941 | 19,308 | 568 | 135 | 33.992958 | 0.707922 | 0.028486 | 0 | 0.672372 | 0 | 0 | 0.036646 | 0.00304 | 0 | 0 | 0 | 0 | 0.171149 | 1 | 0.08802 | false | 0.06357 | 0.00978 | 0 | 0.166259 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
7ffabdf1c5c58eee0aa7aa6eb0d2a6cffc1b89d4 | 2,989 | py | Python | craftassist/agent/voxel_models/geoscorer/config_maker.py | kandluis/droidlet | 3851f0bdac7bc63100cfbcf1c206a94658790352 | [
"MIT"
] | null | null | null | craftassist/agent/voxel_models/geoscorer/config_maker.py | kandluis/droidlet | 3851f0bdac7bc63100cfbcf1c206a94658790352 | [
"MIT"
] | null | null | null | craftassist/agent/voxel_models/geoscorer/config_maker.py | kandluis/droidlet | 3851f0bdac7bc63100cfbcf1c206a94658790352 | [
"MIT"
] | null | null | null | """
Copyright (c) Facebook, Inc. and its affiliates.
"""
import json
""" An example:
dataset_config = {
"inst_seg": [
{"drop_perc": -1.0, "ground_type": None, "random_ground_height": False, "prob": 0.1},
{"drop_perc": -1.0, "ground_type": "flat", "random_ground_height": False, "prob": 0.1},
],
"shape_piece": [
{"ground_type": None, "random_ground_height": False, "prob": 0.1},
{"ground_type": "flat", "random_ground_height": True, "prob": 0.1},
{"ground_type": "hilly", "random_ground_height": True, "prob": 0.1},
],
"shape_pair": [
{
"shape_type": "random",
"fixed_size": None,
"max_shift": 6,
"ground_type": "flat",
"random_ground_height": True,
"prob": 0.5,
},
],
}
filename = "dataset_configs/all_datasets_base.json"
"""
dataset_config = {
"shape_pair": [
{
"shape_type": "same",
"fixed_size": 3,
"max_shift": 0,
"ground_type": "flat",
"random_ground_height": False,
"prob": 0.05,
},
{
"shape_type": "same",
"fixed_size": 4,
"max_shift": 0,
"ground_type": "flat",
"random_ground_height": False,
"prob": 0.05,
},
{
"shape_type": "same",
"fixed_size": 5,
"max_shift": 0,
"ground_type": "flat",
"random_ground_height": False,
"prob": 0.05,
},
{
"shape_type": "same",
"fixed_size": 6,
"max_shift": 0,
"ground_type": "flat",
"random_ground_height": False,
"prob": 0.05,
},
{
"shape_type": "random",
"fixed_size": None,
"max_shift": 0,
"ground_type": "flat",
"random_ground_height": True,
"prob": 0.1,
},
{
"shape_type": "random",
"fixed_size": None,
"max_shift": 0,
"ground_type": "hilly",
"random_ground_height": True,
"prob": 0.1,
},
{
"shape_type": "random",
"fixed_size": None,
"max_shift": 5,
"ground_type": "flat",
"random_ground_height": True,
"prob": 0.1,
},
{
"shape_type": "random",
"fixed_size": None,
"max_shift": 5,
"ground_type": "hilly",
"random_ground_height": True,
"prob": 0.1,
},
],
"shape_piece": [{"ground_type": "flat", "random_ground_height": True, "prob": 0.2}],
"inst_seg": [{"ground_type": "flat", "random_ground_height": False, "prob": 0.2}],
}
filename = "dataset_configs/all_good_split.json"
with open(filename, "w+") as f:
json.dump(dataset_config, f)
print("dumped", filename)
| 27.934579 | 95 | 0.4637 | 304 | 2,989 | 4.256579 | 0.190789 | 0.123648 | 0.222566 | 0.170015 | 0.801391 | 0.784389 | 0.769706 | 0.757342 | 0.729521 | 0.633694 | 0 | 0.028117 | 0.369354 | 2,989 | 106 | 96 | 28.198113 | 0.658355 | 0.016059 | 0 | 0.586667 | 0 | 0 | 0.346516 | 0.016478 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.013333 | 0 | 0.013333 | 0.013333 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
3d1114ef4fb1c2cc8445dc58c238ec0cd0c81536 | 38,473 | py | Python | sdk/python/pulumi_rancher2/cluster_v2.py | pulumi/pulumi-rancher2 | 7a98af8cf598b711084a7f46c0fe71b43ed7a8ac | [
"ECL-2.0",
"Apache-2.0"
] | 3 | 2020-03-23T15:59:11.000Z | 2021-01-29T00:37:32.000Z | sdk/python/pulumi_rancher2/cluster_v2.py | pulumi/pulumi-rancher2 | 7a98af8cf598b711084a7f46c0fe71b43ed7a8ac | [
"ECL-2.0",
"Apache-2.0"
] | 76 | 2020-01-16T20:00:25.000Z | 2022-03-31T20:30:08.000Z | sdk/python/pulumi_rancher2/cluster_v2.py | pulumi/pulumi-rancher2 | 7a98af8cf598b711084a7f46c0fe71b43ed7a8ac | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2020-03-27T17:39:59.000Z | 2020-11-24T23:09:24.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ClusterV2Args', 'ClusterV2']
@pulumi.input_type
class ClusterV2Args:
def __init__(__self__, *,
kubernetes_version: pulumi.Input[str],
agent_env_vars: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterV2AgentEnvVarArgs']]]] = None,
annotations: Optional[pulumi.Input[Mapping[str, Any]]] = None,
cloud_credential_secret_name: Optional[pulumi.Input[str]] = None,
default_cluster_role_for_project_members: Optional[pulumi.Input[str]] = None,
default_pod_security_policy_template_name: Optional[pulumi.Input[str]] = None,
enable_network_policy: Optional[pulumi.Input[bool]] = None,
fleet_namespace: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, Any]]] = None,
name: Optional[pulumi.Input[str]] = None,
rke_config: Optional[pulumi.Input['ClusterV2RkeConfigArgs']] = None):
"""
The set of arguments for constructing a ClusterV2 resource.
:param pulumi.Input[str] kubernetes_version: The kubernetes version of the Cluster v2 (list maxitems:1)
:param pulumi.Input[Sequence[pulumi.Input['ClusterV2AgentEnvVarArgs']]] agent_env_vars: Optional Agent Env Vars for Rancher agent (list)
:param pulumi.Input[Mapping[str, Any]] annotations: Annotations for cluster registration token object (map)
:param pulumi.Input[str] cloud_credential_secret_name: Cluster V2 cloud credential secret name (string)
:param pulumi.Input[str] default_cluster_role_for_project_members: Cluster V2 default cluster role for project members (string)
:param pulumi.Input[str] default_pod_security_policy_template_name: Cluster V2 default pod security policy template name (string)
:param pulumi.Input[bool] enable_network_policy: Enable k8s network policy at Cluster V2 (bool)
:param pulumi.Input[str] fleet_namespace: The fleet namespace of the Cluster v2. Default: `\"fleet-default\"` (string)
:param pulumi.Input[Mapping[str, Any]] labels: Labels for cluster registration token object (map)
:param pulumi.Input[str] name: Name of cluster registration token (string)
:param pulumi.Input['ClusterV2RkeConfigArgs'] rke_config: The RKE configuration for `k3s` and `rke2` Clusters v2. (list maxitems:1)
"""
pulumi.set(__self__, "kubernetes_version", kubernetes_version)
if agent_env_vars is not None:
pulumi.set(__self__, "agent_env_vars", agent_env_vars)
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if cloud_credential_secret_name is not None:
pulumi.set(__self__, "cloud_credential_secret_name", cloud_credential_secret_name)
if default_cluster_role_for_project_members is not None:
pulumi.set(__self__, "default_cluster_role_for_project_members", default_cluster_role_for_project_members)
if default_pod_security_policy_template_name is not None:
pulumi.set(__self__, "default_pod_security_policy_template_name", default_pod_security_policy_template_name)
if enable_network_policy is not None:
pulumi.set(__self__, "enable_network_policy", enable_network_policy)
if fleet_namespace is not None:
pulumi.set(__self__, "fleet_namespace", fleet_namespace)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if name is not None:
pulumi.set(__self__, "name", name)
if rke_config is not None:
pulumi.set(__self__, "rke_config", rke_config)
@property
@pulumi.getter(name="kubernetesVersion")
def kubernetes_version(self) -> pulumi.Input[str]:
"""
The kubernetes version of the Cluster v2 (list maxitems:1)
"""
return pulumi.get(self, "kubernetes_version")
@kubernetes_version.setter
def kubernetes_version(self, value: pulumi.Input[str]):
pulumi.set(self, "kubernetes_version", value)
@property
@pulumi.getter(name="agentEnvVars")
def agent_env_vars(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ClusterV2AgentEnvVarArgs']]]]:
"""
Optional Agent Env Vars for Rancher agent (list)
"""
return pulumi.get(self, "agent_env_vars")
@agent_env_vars.setter
def agent_env_vars(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterV2AgentEnvVarArgs']]]]):
pulumi.set(self, "agent_env_vars", value)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Annotations for cluster registration token object (map)
"""
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter(name="cloudCredentialSecretName")
def cloud_credential_secret_name(self) -> Optional[pulumi.Input[str]]:
"""
Cluster V2 cloud credential secret name (string)
"""
return pulumi.get(self, "cloud_credential_secret_name")
@cloud_credential_secret_name.setter
def cloud_credential_secret_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cloud_credential_secret_name", value)
@property
@pulumi.getter(name="defaultClusterRoleForProjectMembers")
def default_cluster_role_for_project_members(self) -> Optional[pulumi.Input[str]]:
"""
Cluster V2 default cluster role for project members (string)
"""
return pulumi.get(self, "default_cluster_role_for_project_members")
@default_cluster_role_for_project_members.setter
def default_cluster_role_for_project_members(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "default_cluster_role_for_project_members", value)
@property
@pulumi.getter(name="defaultPodSecurityPolicyTemplateName")
def default_pod_security_policy_template_name(self) -> Optional[pulumi.Input[str]]:
"""
Cluster V2 default pod security policy template name (string)
"""
return pulumi.get(self, "default_pod_security_policy_template_name")
@default_pod_security_policy_template_name.setter
def default_pod_security_policy_template_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "default_pod_security_policy_template_name", value)
@property
@pulumi.getter(name="enableNetworkPolicy")
def enable_network_policy(self) -> Optional[pulumi.Input[bool]]:
"""
Enable k8s network policy at Cluster V2 (bool)
"""
return pulumi.get(self, "enable_network_policy")
@enable_network_policy.setter
def enable_network_policy(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_network_policy", value)
@property
@pulumi.getter(name="fleetNamespace")
def fleet_namespace(self) -> Optional[pulumi.Input[str]]:
"""
The fleet namespace of the Cluster v2. Default: `\"fleet-default\"` (string)
"""
return pulumi.get(self, "fleet_namespace")
@fleet_namespace.setter
def fleet_namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fleet_namespace", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Labels for cluster registration token object (map)
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of cluster registration token (string)
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="rkeConfig")
def rke_config(self) -> Optional[pulumi.Input['ClusterV2RkeConfigArgs']]:
"""
The RKE configuration for `k3s` and `rke2` Clusters v2. (list maxitems:1)
"""
return pulumi.get(self, "rke_config")
@rke_config.setter
def rke_config(self, value: Optional[pulumi.Input['ClusterV2RkeConfigArgs']]):
pulumi.set(self, "rke_config", value)
@pulumi.input_type
class _ClusterV2State:
def __init__(__self__, *,
agent_env_vars: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterV2AgentEnvVarArgs']]]] = None,
annotations: Optional[pulumi.Input[Mapping[str, Any]]] = None,
cloud_credential_secret_name: Optional[pulumi.Input[str]] = None,
cluster_registration_token: Optional[pulumi.Input['ClusterV2ClusterRegistrationTokenArgs']] = None,
cluster_v1_id: Optional[pulumi.Input[str]] = None,
default_cluster_role_for_project_members: Optional[pulumi.Input[str]] = None,
default_pod_security_policy_template_name: Optional[pulumi.Input[str]] = None,
enable_network_policy: Optional[pulumi.Input[bool]] = None,
fleet_namespace: Optional[pulumi.Input[str]] = None,
kube_config: Optional[pulumi.Input[str]] = None,
kubernetes_version: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, Any]]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_version: Optional[pulumi.Input[str]] = None,
rke_config: Optional[pulumi.Input['ClusterV2RkeConfigArgs']] = None):
"""
Input properties used for looking up and filtering ClusterV2 resources.
:param pulumi.Input[Sequence[pulumi.Input['ClusterV2AgentEnvVarArgs']]] agent_env_vars: Optional Agent Env Vars for Rancher agent (list)
:param pulumi.Input[Mapping[str, Any]] annotations: Annotations for cluster registration token object (map)
:param pulumi.Input[str] cloud_credential_secret_name: Cluster V2 cloud credential secret name (string)
:param pulumi.Input['ClusterV2ClusterRegistrationTokenArgs'] cluster_registration_token: (Computed/Sensitive) Cluster Registration Token generated for the cluster v2 (list maxitems:1)
:param pulumi.Input[str] cluster_v1_id: (Computed) Cluster v1 id for cluster v2. (e.g to be used with `rancher2_sync`) (string)
:param pulumi.Input[str] default_cluster_role_for_project_members: Cluster V2 default cluster role for project members (string)
:param pulumi.Input[str] default_pod_security_policy_template_name: Cluster V2 default pod security policy template name (string)
:param pulumi.Input[bool] enable_network_policy: Enable k8s network policy at Cluster V2 (bool)
:param pulumi.Input[str] fleet_namespace: The fleet namespace of the Cluster v2. Default: `\"fleet-default\"` (string)
:param pulumi.Input[str] kube_config: (Computed/Sensitive) Kube Config generated for the cluster v2 (string)
:param pulumi.Input[str] kubernetes_version: The kubernetes version of the Cluster v2 (list maxitems:1)
:param pulumi.Input[Mapping[str, Any]] labels: Labels for cluster registration token object (map)
:param pulumi.Input[str] name: Name of cluster registration token (string)
:param pulumi.Input[str] resource_version: (Computed) Cluster v2 k8s resource version (string)
:param pulumi.Input['ClusterV2RkeConfigArgs'] rke_config: The RKE configuration for `k3s` and `rke2` Clusters v2. (list maxitems:1)
"""
if agent_env_vars is not None:
pulumi.set(__self__, "agent_env_vars", agent_env_vars)
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if cloud_credential_secret_name is not None:
pulumi.set(__self__, "cloud_credential_secret_name", cloud_credential_secret_name)
if cluster_registration_token is not None:
pulumi.set(__self__, "cluster_registration_token", cluster_registration_token)
if cluster_v1_id is not None:
pulumi.set(__self__, "cluster_v1_id", cluster_v1_id)
if default_cluster_role_for_project_members is not None:
pulumi.set(__self__, "default_cluster_role_for_project_members", default_cluster_role_for_project_members)
if default_pod_security_policy_template_name is not None:
pulumi.set(__self__, "default_pod_security_policy_template_name", default_pod_security_policy_template_name)
if enable_network_policy is not None:
pulumi.set(__self__, "enable_network_policy", enable_network_policy)
if fleet_namespace is not None:
pulumi.set(__self__, "fleet_namespace", fleet_namespace)
if kube_config is not None:
pulumi.set(__self__, "kube_config", kube_config)
if kubernetes_version is not None:
pulumi.set(__self__, "kubernetes_version", kubernetes_version)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if name is not None:
pulumi.set(__self__, "name", name)
if resource_version is not None:
pulumi.set(__self__, "resource_version", resource_version)
if rke_config is not None:
pulumi.set(__self__, "rke_config", rke_config)
@property
@pulumi.getter(name="agentEnvVars")
def agent_env_vars(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ClusterV2AgentEnvVarArgs']]]]:
"""
Optional Agent Env Vars for Rancher agent (list)
"""
return pulumi.get(self, "agent_env_vars")
@agent_env_vars.setter
def agent_env_vars(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterV2AgentEnvVarArgs']]]]):
pulumi.set(self, "agent_env_vars", value)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Annotations for cluster registration token object (map)
"""
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter(name="cloudCredentialSecretName")
def cloud_credential_secret_name(self) -> Optional[pulumi.Input[str]]:
"""
Cluster V2 cloud credential secret name (string)
"""
return pulumi.get(self, "cloud_credential_secret_name")
@cloud_credential_secret_name.setter
def cloud_credential_secret_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cloud_credential_secret_name", value)
@property
@pulumi.getter(name="clusterRegistrationToken")
def cluster_registration_token(self) -> Optional[pulumi.Input['ClusterV2ClusterRegistrationTokenArgs']]:
"""
(Computed/Sensitive) Cluster Registration Token generated for the cluster v2 (list maxitems:1)
"""
return pulumi.get(self, "cluster_registration_token")
@cluster_registration_token.setter
def cluster_registration_token(self, value: Optional[pulumi.Input['ClusterV2ClusterRegistrationTokenArgs']]):
pulumi.set(self, "cluster_registration_token", value)
@property
@pulumi.getter(name="clusterV1Id")
def cluster_v1_id(self) -> Optional[pulumi.Input[str]]:
"""
(Computed) Cluster v1 id for cluster v2. (e.g to be used with `rancher2_sync`) (string)
"""
return pulumi.get(self, "cluster_v1_id")
@cluster_v1_id.setter
def cluster_v1_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_v1_id", value)
@property
@pulumi.getter(name="defaultClusterRoleForProjectMembers")
def default_cluster_role_for_project_members(self) -> Optional[pulumi.Input[str]]:
"""
Cluster V2 default cluster role for project members (string)
"""
return pulumi.get(self, "default_cluster_role_for_project_members")
@default_cluster_role_for_project_members.setter
def default_cluster_role_for_project_members(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "default_cluster_role_for_project_members", value)
@property
@pulumi.getter(name="defaultPodSecurityPolicyTemplateName")
def default_pod_security_policy_template_name(self) -> Optional[pulumi.Input[str]]:
"""
Cluster V2 default pod security policy template name (string)
"""
return pulumi.get(self, "default_pod_security_policy_template_name")
@default_pod_security_policy_template_name.setter
def default_pod_security_policy_template_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "default_pod_security_policy_template_name", value)
@property
@pulumi.getter(name="enableNetworkPolicy")
def enable_network_policy(self) -> Optional[pulumi.Input[bool]]:
"""
Enable k8s network policy at Cluster V2 (bool)
"""
return pulumi.get(self, "enable_network_policy")
@enable_network_policy.setter
def enable_network_policy(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_network_policy", value)
@property
@pulumi.getter(name="fleetNamespace")
def fleet_namespace(self) -> Optional[pulumi.Input[str]]:
"""
The fleet namespace of the Cluster v2. Default: `\"fleet-default\"` (string)
"""
return pulumi.get(self, "fleet_namespace")
@fleet_namespace.setter
def fleet_namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fleet_namespace", value)
@property
@pulumi.getter(name="kubeConfig")
def kube_config(self) -> Optional[pulumi.Input[str]]:
"""
(Computed/Sensitive) Kube Config generated for the cluster v2 (string)
"""
return pulumi.get(self, "kube_config")
@kube_config.setter
def kube_config(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kube_config", value)
@property
@pulumi.getter(name="kubernetesVersion")
def kubernetes_version(self) -> Optional[pulumi.Input[str]]:
"""
The kubernetes version of the Cluster v2 (list maxitems:1)
"""
return pulumi.get(self, "kubernetes_version")
@kubernetes_version.setter
def kubernetes_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kubernetes_version", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Labels for cluster registration token object (map)
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of cluster registration token (string)
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="resourceVersion")
def resource_version(self) -> Optional[pulumi.Input[str]]:
"""
(Computed) Cluster v2 k8s resource version (string)
"""
return pulumi.get(self, "resource_version")
@resource_version.setter
def resource_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_version", value)
@property
@pulumi.getter(name="rkeConfig")
def rke_config(self) -> Optional[pulumi.Input['ClusterV2RkeConfigArgs']]:
"""
The RKE configuration for `k3s` and `rke2` Clusters v2. (list maxitems:1)
"""
return pulumi.get(self, "rke_config")
@rke_config.setter
def rke_config(self, value: Optional[pulumi.Input['ClusterV2RkeConfigArgs']]):
pulumi.set(self, "rke_config", value)
class ClusterV2(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
agent_env_vars: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterV2AgentEnvVarArgs']]]]] = None,
annotations: Optional[pulumi.Input[Mapping[str, Any]]] = None,
cloud_credential_secret_name: Optional[pulumi.Input[str]] = None,
default_cluster_role_for_project_members: Optional[pulumi.Input[str]] = None,
default_pod_security_policy_template_name: Optional[pulumi.Input[str]] = None,
enable_network_policy: Optional[pulumi.Input[bool]] = None,
fleet_namespace: Optional[pulumi.Input[str]] = None,
kubernetes_version: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, Any]]] = None,
name: Optional[pulumi.Input[str]] = None,
rke_config: Optional[pulumi.Input[pulumi.InputType['ClusterV2RkeConfigArgs']]] = None,
__props__=None):
"""
Provides a Rancher v2 Cluster v2 resource. This can be used to create RKE2 and K3S Clusters for Rancher v2 environments and retrieve their information. This resource is supported as tech preview from Rancher v2.6.0 and above.
## Example Usage
### Creating Rancher v2 custom cluster v2
```python
import pulumi
import pulumi_rancher2 as rancher2
# Create a new rancher v2 K3S custom Cluster v2
foo = rancher2.ClusterV2("foo",
default_cluster_role_for_project_members="user",
enable_network_policy=False,
fleet_namespace="fleet-ns",
kubernetes_version="v1.21.4+k3s1")
```
**Note** Once created, get the node command from `rancher2_cluster_v2.foo.cluster_registration_token`
## Import
Clusters v2 can be imported using the Rancher Cluster v2 ID, that is in the form <FLEET_NAMESPACE>/<CLUSTER_NAME>
```sh
$ pulumi import rancher2:index/clusterV2:ClusterV2 foo <FLEET_NAMESPACE>/<CLUSTER_NAME>
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterV2AgentEnvVarArgs']]]] agent_env_vars: Optional Agent Env Vars for Rancher agent (list)
:param pulumi.Input[Mapping[str, Any]] annotations: Annotations for cluster registration token object (map)
:param pulumi.Input[str] cloud_credential_secret_name: Cluster V2 cloud credential secret name (string)
:param pulumi.Input[str] default_cluster_role_for_project_members: Cluster V2 default cluster role for project members (string)
:param pulumi.Input[str] default_pod_security_policy_template_name: Cluster V2 default pod security policy template name (string)
:param pulumi.Input[bool] enable_network_policy: Enable k8s network policy at Cluster V2 (bool)
:param pulumi.Input[str] fleet_namespace: The fleet namespace of the Cluster v2. Default: `\"fleet-default\"` (string)
:param pulumi.Input[str] kubernetes_version: The kubernetes version of the Cluster v2 (list maxitems:1)
:param pulumi.Input[Mapping[str, Any]] labels: Labels for cluster registration token object (map)
:param pulumi.Input[str] name: Name of cluster registration token (string)
:param pulumi.Input[pulumi.InputType['ClusterV2RkeConfigArgs']] rke_config: The RKE configuration for `k3s` and `rke2` Clusters v2. (list maxitems:1)
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ClusterV2Args,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Rancher v2 Cluster v2 resource. This can be used to create RKE2 and K3S Clusters for Rancher v2 environments and retrieve their information. This resource is supported as tech preview from Rancher v2.6.0 and above.
## Example Usage
### Creating Rancher v2 custom cluster v2
```python
import pulumi
import pulumi_rancher2 as rancher2
# Create a new rancher v2 K3S custom Cluster v2
foo = rancher2.ClusterV2("foo",
default_cluster_role_for_project_members="user",
enable_network_policy=False,
fleet_namespace="fleet-ns",
kubernetes_version="v1.21.4+k3s1")
```
**Note** Once created, get the node command from `rancher2_cluster_v2.foo.cluster_registration_token`
## Import
Clusters v2 can be imported using the Rancher Cluster v2 ID, that is in the form <FLEET_NAMESPACE>/<CLUSTER_NAME>
```sh
$ pulumi import rancher2:index/clusterV2:ClusterV2 foo <FLEET_NAMESPACE>/<CLUSTER_NAME>
```
:param str resource_name: The name of the resource.
:param ClusterV2Args args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ClusterV2Args, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
agent_env_vars: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterV2AgentEnvVarArgs']]]]] = None,
annotations: Optional[pulumi.Input[Mapping[str, Any]]] = None,
cloud_credential_secret_name: Optional[pulumi.Input[str]] = None,
default_cluster_role_for_project_members: Optional[pulumi.Input[str]] = None,
default_pod_security_policy_template_name: Optional[pulumi.Input[str]] = None,
enable_network_policy: Optional[pulumi.Input[bool]] = None,
fleet_namespace: Optional[pulumi.Input[str]] = None,
kubernetes_version: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, Any]]] = None,
name: Optional[pulumi.Input[str]] = None,
rke_config: Optional[pulumi.Input[pulumi.InputType['ClusterV2RkeConfigArgs']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ClusterV2Args.__new__(ClusterV2Args)
__props__.__dict__["agent_env_vars"] = agent_env_vars
__props__.__dict__["annotations"] = annotations
__props__.__dict__["cloud_credential_secret_name"] = cloud_credential_secret_name
__props__.__dict__["default_cluster_role_for_project_members"] = default_cluster_role_for_project_members
__props__.__dict__["default_pod_security_policy_template_name"] = default_pod_security_policy_template_name
__props__.__dict__["enable_network_policy"] = enable_network_policy
__props__.__dict__["fleet_namespace"] = fleet_namespace
if kubernetes_version is None and not opts.urn:
raise TypeError("Missing required property 'kubernetes_version'")
__props__.__dict__["kubernetes_version"] = kubernetes_version
__props__.__dict__["labels"] = labels
__props__.__dict__["name"] = name
__props__.__dict__["rke_config"] = rke_config
__props__.__dict__["cluster_registration_token"] = None
__props__.__dict__["cluster_v1_id"] = None
__props__.__dict__["kube_config"] = None
__props__.__dict__["resource_version"] = None
super(ClusterV2, __self__).__init__(
'rancher2:index/clusterV2:ClusterV2',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
agent_env_vars: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterV2AgentEnvVarArgs']]]]] = None,
annotations: Optional[pulumi.Input[Mapping[str, Any]]] = None,
cloud_credential_secret_name: Optional[pulumi.Input[str]] = None,
cluster_registration_token: Optional[pulumi.Input[pulumi.InputType['ClusterV2ClusterRegistrationTokenArgs']]] = None,
cluster_v1_id: Optional[pulumi.Input[str]] = None,
default_cluster_role_for_project_members: Optional[pulumi.Input[str]] = None,
default_pod_security_policy_template_name: Optional[pulumi.Input[str]] = None,
enable_network_policy: Optional[pulumi.Input[bool]] = None,
fleet_namespace: Optional[pulumi.Input[str]] = None,
kube_config: Optional[pulumi.Input[str]] = None,
kubernetes_version: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, Any]]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_version: Optional[pulumi.Input[str]] = None,
rke_config: Optional[pulumi.Input[pulumi.InputType['ClusterV2RkeConfigArgs']]] = None) -> 'ClusterV2':
"""
Get an existing ClusterV2 resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterV2AgentEnvVarArgs']]]] agent_env_vars: Optional Agent Env Vars for Rancher agent (list)
:param pulumi.Input[Mapping[str, Any]] annotations: Annotations for cluster registration token object (map)
:param pulumi.Input[str] cloud_credential_secret_name: Cluster V2 cloud credential secret name (string)
:param pulumi.Input[pulumi.InputType['ClusterV2ClusterRegistrationTokenArgs']] cluster_registration_token: (Computed/Sensitive) Cluster Registration Token generated for the cluster v2 (list maxitems:1)
:param pulumi.Input[str] cluster_v1_id: (Computed) Cluster v1 id for cluster v2. (e.g to be used with `rancher2_sync`) (string)
:param pulumi.Input[str] default_cluster_role_for_project_members: Cluster V2 default cluster role for project members (string)
:param pulumi.Input[str] default_pod_security_policy_template_name: Cluster V2 default pod security policy template name (string)
:param pulumi.Input[bool] enable_network_policy: Enable k8s network policy at Cluster V2 (bool)
:param pulumi.Input[str] fleet_namespace: The fleet namespace of the Cluster v2. Default: `\"fleet-default\"` (string)
:param pulumi.Input[str] kube_config: (Computed/Sensitive) Kube Config generated for the cluster v2 (string)
:param pulumi.Input[str] kubernetes_version: The kubernetes version of the Cluster v2 (list maxitems:1)
:param pulumi.Input[Mapping[str, Any]] labels: Labels for cluster registration token object (map)
:param pulumi.Input[str] name: Name of cluster registration token (string)
:param pulumi.Input[str] resource_version: (Computed) Cluster v2 k8s resource version (string)
:param pulumi.Input[pulumi.InputType['ClusterV2RkeConfigArgs']] rke_config: The RKE configuration for `k3s` and `rke2` Clusters v2. (list maxitems:1)
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ClusterV2State.__new__(_ClusterV2State)
__props__.__dict__["agent_env_vars"] = agent_env_vars
__props__.__dict__["annotations"] = annotations
__props__.__dict__["cloud_credential_secret_name"] = cloud_credential_secret_name
__props__.__dict__["cluster_registration_token"] = cluster_registration_token
__props__.__dict__["cluster_v1_id"] = cluster_v1_id
__props__.__dict__["default_cluster_role_for_project_members"] = default_cluster_role_for_project_members
__props__.__dict__["default_pod_security_policy_template_name"] = default_pod_security_policy_template_name
__props__.__dict__["enable_network_policy"] = enable_network_policy
__props__.__dict__["fleet_namespace"] = fleet_namespace
__props__.__dict__["kube_config"] = kube_config
__props__.__dict__["kubernetes_version"] = kubernetes_version
__props__.__dict__["labels"] = labels
__props__.__dict__["name"] = name
__props__.__dict__["resource_version"] = resource_version
__props__.__dict__["rke_config"] = rke_config
return ClusterV2(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="agentEnvVars")
def agent_env_vars(self) -> pulumi.Output[Optional[Sequence['outputs.ClusterV2AgentEnvVar']]]:
"""
Optional Agent Env Vars for Rancher agent (list)
"""
return pulumi.get(self, "agent_env_vars")
@property
@pulumi.getter
def annotations(self) -> pulumi.Output[Mapping[str, Any]]:
"""
Annotations for cluster registration token object (map)
"""
return pulumi.get(self, "annotations")
@property
@pulumi.getter(name="cloudCredentialSecretName")
def cloud_credential_secret_name(self) -> pulumi.Output[Optional[str]]:
"""
Cluster V2 cloud credential secret name (string)
"""
return pulumi.get(self, "cloud_credential_secret_name")
@property
@pulumi.getter(name="clusterRegistrationToken")
def cluster_registration_token(self) -> pulumi.Output['outputs.ClusterV2ClusterRegistrationToken']:
"""
(Computed/Sensitive) Cluster Registration Token generated for the cluster v2 (list maxitems:1)
"""
return pulumi.get(self, "cluster_registration_token")
@property
@pulumi.getter(name="clusterV1Id")
def cluster_v1_id(self) -> pulumi.Output[str]:
"""
(Computed) Cluster v1 id for cluster v2. (e.g to be used with `rancher2_sync`) (string)
"""
return pulumi.get(self, "cluster_v1_id")
@property
@pulumi.getter(name="defaultClusterRoleForProjectMembers")
def default_cluster_role_for_project_members(self) -> pulumi.Output[Optional[str]]:
"""
Cluster V2 default cluster role for project members (string)
"""
return pulumi.get(self, "default_cluster_role_for_project_members")
@property
@pulumi.getter(name="defaultPodSecurityPolicyTemplateName")
def default_pod_security_policy_template_name(self) -> pulumi.Output[Optional[str]]:
"""
Cluster V2 default pod security policy template name (string)
"""
return pulumi.get(self, "default_pod_security_policy_template_name")
@property
@pulumi.getter(name="enableNetworkPolicy")
def enable_network_policy(self) -> pulumi.Output[bool]:
"""
Enable k8s network policy at Cluster V2 (bool)
"""
return pulumi.get(self, "enable_network_policy")
@property
@pulumi.getter(name="fleetNamespace")
def fleet_namespace(self) -> pulumi.Output[Optional[str]]:
"""
The fleet namespace of the Cluster v2. Default: `\"fleet-default\"` (string)
"""
return pulumi.get(self, "fleet_namespace")
@property
@pulumi.getter(name="kubeConfig")
def kube_config(self) -> pulumi.Output[str]:
"""
(Computed/Sensitive) Kube Config generated for the cluster v2 (string)
"""
return pulumi.get(self, "kube_config")
@property
@pulumi.getter(name="kubernetesVersion")
def kubernetes_version(self) -> pulumi.Output[str]:
"""
The kubernetes version of the Cluster v2 (list maxitems:1)
"""
return pulumi.get(self, "kubernetes_version")
@property
@pulumi.getter
def labels(self) -> pulumi.Output[Mapping[str, Any]]:
"""
Labels for cluster registration token object (map)
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of cluster registration token (string)
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resourceVersion")
def resource_version(self) -> pulumi.Output[str]:
"""
(Computed) Cluster v2 k8s resource version (string)
"""
return pulumi.get(self, "resource_version")
@property
@pulumi.getter(name="rkeConfig")
def rke_config(self) -> pulumi.Output['outputs.ClusterV2RkeConfig']:
"""
The RKE configuration for `k3s` and `rke2` Clusters v2. (list maxitems:1)
"""
return pulumi.get(self, "rke_config")
| 49.135377 | 233 | 0.682739 | 4,460 | 38,473 | 5.60852 | 0.051794 | 0.080915 | 0.085072 | 0.055409 | 0.913688 | 0.896258 | 0.881546 | 0.867994 | 0.859799 | 0.842168 | 0 | 0.008353 | 0.215866 | 38,473 | 782 | 234 | 49.19821 | 0.820804 | 0.29574 | 0 | 0.778523 | 1 | 0 | 0.151091 | 0.093214 | 0 | 0 | 0 | 0 | 0 | 1 | 0.165548 | false | 0.002237 | 0.01566 | 0 | 0.281879 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
3d301e577072dfafeb61fae3a6f5a7970c123ee8 | 858 | py | Python | fault_tolerant_flight_control_drl/envs/citation/__init__.py | kdally/fault-tolerant-flight-control-drl | 800a1c9319b44ab2b1d17f6e19266c2392d6e57b | [
"MIT"
] | 8 | 2021-02-27T09:49:57.000Z | 2022-03-21T16:28:08.000Z | fault_tolerant_flight_control_drl/envs/citation/__init__.py | kdally/fault-tolerant-flight-control-drl | 800a1c9319b44ab2b1d17f6e19266c2392d6e57b | [
"MIT"
] | null | null | null | fault_tolerant_flight_control_drl/envs/citation/__init__.py | kdally/fault-tolerant-flight-control-drl | 800a1c9319b44ab2b1d17f6e19266c2392d6e57b | [
"MIT"
] | 2 | 2021-03-04T07:24:35.000Z | 2021-11-17T04:21:08.000Z | from fault_tolerant_flight_control_drl.envs.citation.citation import CitationNormal
from fault_tolerant_flight_control_drl.envs.citation.citation import CitationIcing
from fault_tolerant_flight_control_drl.envs.citation.citation import CitationCgShift
from fault_tolerant_flight_control_drl.envs.citation.citation import CitationVertTail
from fault_tolerant_flight_control_drl.envs.citation.citation import CitationHorzTail
from fault_tolerant_flight_control_drl.envs.citation.citation import CitationElevRange
from fault_tolerant_flight_control_drl.envs.citation.citation import CitationAileronEff
from fault_tolerant_flight_control_drl.envs.citation.citation import CitationRudderStuck
from fault_tolerant_flight_control_drl.envs.citation.citation import CitationVerif
from fault_tolerant_flight_control_drl.envs.citation.citation import CitationDistAlpha
| 78 | 88 | 0.918415 | 110 | 858 | 6.8 | 0.172727 | 0.120321 | 0.227273 | 0.307487 | 0.78877 | 0.78877 | 0.78877 | 0.78877 | 0.78877 | 0.78877 | 0 | 0 | 0.04662 | 858 | 10 | 89 | 85.8 | 0.914425 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 10 |
3d3b2ff32c561608f1a104672aa1f404d5370db0 | 89 | py | Python | openwisp_network_topology/management/commands/save_snapshot.py | DaffyTheDuck/openwisp-network-topology | a8c9212f0d9cca76f83b41af0e3fc89330f408bb | [
"BSD-3-Clause"
] | 105 | 2017-06-14T06:06:16.000Z | 2022-03-29T18:50:38.000Z | openwisp_network_topology/management/commands/save_snapshot.py | DaffyTheDuck/openwisp-network-topology | a8c9212f0d9cca76f83b41af0e3fc89330f408bb | [
"BSD-3-Clause"
] | 127 | 2017-06-02T08:19:13.000Z | 2022-03-18T00:26:13.000Z | openwisp_network_topology/management/commands/save_snapshot.py | ManishShah120/openwisp-network-topology | 0ed720eff1eb733a00cdbfc83292f16fe7d56e12 | [
"BSD-3-Clause"
] | 62 | 2017-06-21T10:28:10.000Z | 2022-03-31T22:06:09.000Z | from . import BaseSaveSnapshotCommand
class Command(BaseSaveSnapshotCommand):
pass
| 14.833333 | 39 | 0.808989 | 7 | 89 | 10.285714 | 0.857143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.146067 | 89 | 5 | 40 | 17.8 | 0.947368 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
3d576c89083e0f12f3be6b9df5cf6768b1cc6699 | 21,226 | py | Python | analysis/my_plot.py | htem/cb2_project_analysis | a677cbadc7e3bf0074975a94ed1d06b4801899c0 | [
"MIT"
] | null | null | null | analysis/my_plot.py | htem/cb2_project_analysis | a677cbadc7e3bf0074975a94ed1d06b4801899c0 | [
"MIT"
] | null | null | null | analysis/my_plot.py | htem/cb2_project_analysis | a677cbadc7e3bf0074975a94ed1d06b4801899c0 | [
"MIT"
] | null | null | null |
import warnings
warnings.filterwarnings("ignore")
from collections import defaultdict
import matplotlib.pyplot as plt
# import pandas as pd
import seaborn as sns
import numpy as np
# import copy
import matplotlib
from matplotlib import rc
# font = {'size' : 16}
# matplotlib.rc('font', **font)
#rc('font',**{'family':'sans-serif','sans-serif':['Helvetica']})
## for Palatino and other serif fonts use:
#rc('font',**{'family':'serif','serif':['Palatino']})
#rc('text', usetex=True)
# change font
matplotlib.rcParams['font.sans-serif'] = "Arial"
matplotlib.rcParams['font.family'] = "sans-serif"
from my_plot_data import MyPlotData
def compute_aspect_ratio(height, width, aspect):
if height and width:
aspect = None
if height is None and width is None:
height = 6
if aspect is None:
aspect = width/height
if height is None:
height = width/aspect
print(f'Height: {height}, Aspect: {aspect}')
return height, aspect
def my_box_plot(
mpd,
y,
ylim=None,
y_tick_interval=None,
save_filename=None,
context='paper',
kind='box',
font_scale=None,
x_axis_label='',
y_axis_label='',
height=None,
width=None,
aspect=1/1.6,
):
height, aspect = compute_aspect_ratio(height, width, aspect)
if font_scale is None:
if context == 'talk': font_scale = 1
if context == 'paper': font_scale = 1.4
sns.set_style('whitegrid')
sns.set_context(context, font_scale=font_scale)
g = sns.catplot(
kind=kind,
y=y,
data=mpd.to_dataframe(),
linewidth=1,
height=height, aspect=aspect,
whis=(10, 90),
)
if ylim:
g.ax.set_ylim(ylim)
if y_tick_interval:
lims = g.ax.get_ylim()
g.ax.set_yticks(np.arange(lims[0], lims[1]+0.001, y_tick_interval))
g.set_axis_labels(x_axis_label, y_axis_label)
plt.tight_layout()
if save_filename is None:
plt.show()
else:
plt.savefig(save_filename, bbox_inches='tight')
# color = dict(boxes='black', whiskers='black', medians='red', caps='black')
# whiskerprops = dict(linestyle='-',linewidth=1, color='black')
# meanprops = dict(linestyle='-',linewidth=1, color='black')
def my_catplot(
mpd,
y,
x=None,
hue=None,
kind='bar',
hue_order=None,
ylim=None,
xlim=None,
y_tick_interval=None,
save_filename=None,
context='paper',
font_scale=None,
x_axis_label='',
y_axis_label='',
show=False,
xticklabels=None,
height=None,
width=None,
aspect=1.33,
add_swarm=False,
custom_legend_loc=False,
custom_legend_fn=None,
close=True,
linewidth=1,
add_box=False,
add_strip=False,
add_kwargs={},
add_data=None,
**kwargs,
):
height, aspect = compute_aspect_ratio(height, width, aspect)
if font_scale is None:
if context == 'talk': font_scale = 1
if context == 'paper': font_scale = 1.5
if context == 'paper':
sns.set_style('ticks')
else:
sns.set_style('whitegrid')
sns.set_context(context, font_scale=font_scale)
g = sns.catplot(
kind=kind,
x=x, y=y, hue=hue,
hue_order=hue_order,
data=mpd.to_dataframe(),
linewidth=linewidth,
height=height, aspect=aspect,
**kwargs,
)
if add_data is None:
add_data = mpd
if add_swarm:
sns.swarmplot(
ax=g.ax,
x=x, y=y,
# hue=hue,
data=add_data.to_dataframe(),
color=".25",
)
if add_box:
sns.boxplot(
ax=g.ax,
x=x, y=y,
data=add_data.to_dataframe(),
)
if add_strip:
sns.stripplot(
ax=g.ax,
x=x, y=y,
data=add_data.to_dataframe(),
**add_kwargs,
)
if ylim:
g.ax.set_ylim(ylim)
if xlim:
g.ax.set_xlim(xlim)
if y_tick_interval:
lims = g.ax.get_ylim()
g.ax.set_yticks(np.arange(lims[0], lims[1]+0.001, y_tick_interval))
# if kind != "violin":
plt.tight_layout()
if g.legend:
g.legend.set_title("")
g.set_axis_labels(x_axis_label, y_axis_label)
if xticklabels:
g.set_xticklabels(xticklabels)
if custom_legend_fn:
g._legend.remove()
custom_legend_fn(plt)
if save_filename:
plt.savefig(save_filename, bbox_inches='tight', transparent=True)
# plt.clf()
# asdf
if show or save_filename is None:
# pass
# asdf
plt.show()
else:
if close:
# asdf
plt.close()
return g
def my_cat_bar_plot(*args, **kwargs):
return my_catplot(*args, **kwargs)
def my_displot(
mpd,
x,
y=None,
hue=None,
style=None,
size=None,
alpha=1.0,
kind='hist',
hue_order=None,
ylim=None,
xlim=None,
y_tick_interval=None,
save_filename=None,
context='paper',
font_scale=None,
x_axis_label='',
y_axis_label='',
show=False,
xticklabels=None,
height=None,
width=None,
aspect=1.33,
palette=None,
draw_lines=None,
log_scale_x=False,
custom_legend_fn=None,
**kwargs
):
height, aspect = compute_aspect_ratio(height, width, aspect)
if font_scale is None:
if context == 'talk': font_scale = 1
if context == 'paper': font_scale = 1.5
# sns.set_style('whitegrid')
sns.set_context(context, font_scale=font_scale)
if context == 'talk':
sns.set_style('whitegrid')
else:
sns.set_style('ticks')
g = sns.displot(
# kind="point",
kind=kind,
x=x, y=y, hue=hue,
# style=style,
# size=size,
# alpha=alpha,
hue_order=hue_order,
data=mpd.to_dataframe(),
linewidth=1,
height=height, aspect=aspect,
# ci='sd',
palette=palette,
**kwargs
# ci=5,
# whis=(10, 90),
)
if log_scale_x:
for ax in g.axes[0]:
ax.set(xscale='log')
if draw_lines:
print(g.axes)
print(g.axes[0])
print(g.axes_dict)
my_draw_lines(draw_lines, g.axes[0][0])
if ylim:
g.ax.set_ylim(ylim)
if xlim:
g.ax.set_xlim(xlim)
if y_tick_interval:
lims = g.ax.get_ylim()
g.ax.set_yticks(np.arange(lims[0], lims[1]+0.001, y_tick_interval))
plt.tight_layout()
if g.legend:
g.legend.set_title("")
g.set_axis_labels(x_axis_label, y_axis_label)
if xticklabels:
g.set_xticklabels(xticklabels)
if custom_legend_fn:
g._legend.remove()
custom_legend_fn(plt)
if save_filename:
plt.savefig(save_filename, bbox_inches='tight', transparent=True)
# plt.clf()
if show or save_filename is None:
plt.show()
else:
plt.close()
def my_relplot(
mpd,
x, y, hue=None,
size=None,
alpha=1.0,
kind='line',
hue_order=None,
ylim=None,
xlim=None,
y_tick_interval=None,
save_filename=None,
context='paper',
font_scale=None,
x_axis_label='',
y_axis_label='',
show=False,
xticklabels=None,
height=None,
width=None,
aspect=1.33,
palette=None,
draw_lines=None,
custom_legend_loc=False,
custom_legend_fn=None,
log_scale_x=False,
log_scale_y=False,
xticks=None,
title=None,
old_tight_layout=False,
tight_layout=False,
linewidth=1,
**kwargs
):
height, aspect = compute_aspect_ratio(height, width, aspect)
if font_scale is None:
if context == 'talk': font_scale = 1
if context == 'paper': font_scale = 1.5
if context == 'talk':
sns.set_style('whitegrid')
else:
sns.set_style('ticks')
sns.set_context(context, font_scale=font_scale)
g = sns.relplot(
# kind="point",
kind=kind,
x=x, y=y, hue=hue,
size=size,
alpha=alpha,
hue_order=hue_order,
data=mpd.to_dataframe(),
linewidth=linewidth,
height=height, aspect=aspect,
# ci='sd',
palette=palette,
**kwargs
# ci=5,
# whis=(10, 90),
)
if draw_lines:
print(g.axes)
print(g.axes[0])
print(g.axes_dict)
my_draw_lines(draw_lines, g.axes[0][0])
if xlim:
g.ax.set_xlim(xlim)
if log_scale_x:
for ax in g.axes[0]:
ax.set(xscale='log')
if log_scale_y:
for ax in g.axes[0]:
ax.set(yscale='log')
if ylim:
g.ax.set_ylim(ylim)
if y_tick_interval:
lims = g.ax.get_ylim()
g.ax.set_yticks(np.arange(lims[0], lims[1]+0.001, y_tick_interval))
if xticks:
for ax in g.axes[0]:
ax.set_xticks(xticks)
# g.set_axis_labels(x_axis_label, y_axis_label, fontname='Arial')
# g.set_axis_labels(x_axis_label, y_axis_label, fontname='Monospace')
g.set_axis_labels(x_axis_label, y_axis_label)
if xticklabels:
g.set_xticklabels(xticklabels)
if title:
for ax in g.axes[0]:
ax.set_title(title)
if tight_layout:
plt.tight_layout()
if custom_legend_loc:
g._legend.remove()
plt.legend(loc=custom_legend_loc)
if custom_legend_fn:
g._legend.remove()
custom_legend_fn(plt)
if g.legend:
g.legend.set_title("")
if old_tight_layout:
plt.tight_layout()
if save_filename:
plt.savefig(save_filename, bbox_inches='tight', transparent=True)
# plt.clf()
if show or save_filename is None:
plt.show()
else:
plt.close()
return g
def my_regplot(
mpd,
x, y,
# hue=None,
# style=None,
# size=None,
# alpha=1.0,
# hue_order=None,
ylim=None,
xlim=None,
y_tick_interval=None,
save_filename=None,
context='paper',
font_scale=None,
x_axis_label='',
y_axis_label='',
show=False,
xticklabels=None,
height=None,
width=None,
aspect=1.33,
# palette=None,
# draw_lines=None,
custom_legend_loc=False,
custom_legend_fn=None,
log_scale_x=False,
log_scale_y=False,
xticks=None,
title=None,
old_tight_layout=False,
tight_layout=False,
# linewidth=1,
**kwargs
):
height, aspect = compute_aspect_ratio(height, width, aspect)
if font_scale is None:
if context == 'talk': font_scale = 1
if context == 'paper': font_scale = 1.5
sns.set_context(context, font_scale=font_scale)
if context == 'talk':
sns.set_style('whitegrid')
else:
sns.set_style('ticks')
g = sns.regplot(
# g = sns.lmplot(
x=x, y=y,
# hue=hue,
# style=style,
# size=size,
# alpha=alpha,
# hue_order=hue_order,
data=mpd.to_dataframe(),
# linewidth=linewidth,
# height=height, aspect=aspect,
# palette=palette,
**kwargs
)
# return g
if log_scale_x:
for ax in g.axes[0]:
ax.set(xscale='log')
if log_scale_y:
for ax in g.axes[0]:
ax.set(yscale='log')
if ylim:
g.axes.set_ylim(ylim)
if xlim:
g.axes.set_xlim(xlim)
if y_tick_interval:
lims = g.axes.get_ylim()
g.axes.set_yticks(np.arange(lims[0], lims[1]+0.001, y_tick_interval))
g.axes.set_xlabel(x_axis_label)
g.axes.set_ylabel(y_axis_label)
if xticks:
g.axes.set_xticks(xticks)
if xticklabels:
g.axes.set_xticklabels(xticklabels)
# if title:
# for ax in g.axes[0]:
# ax.set_title(title)
if tight_layout:
plt.tight_layout()
if custom_legend_loc:
g._legend.remove()
plt.legend(loc=custom_legend_loc)
if custom_legend_fn:
g._legend.remove()
custom_legend_fn(plt)
# if g.legend:
# g.legend.set_title("")
# if old_tight_layout:
# plt.tight_layout()
if save_filename:
plt.savefig(save_filename, bbox_inches='tight', transparent=True)
if show or save_filename is None:
plt.show()
else:
plt.close()
return g
def my_lmplot(
mpd,
x, y,
# hue=None,
# style=None,
# size=None,
# alpha=1.0,
# hue_order=None,
ylim=None,
xlim=None,
y_tick_interval=None,
save_filename=None,
context='paper',
font_scale=None,
x_axis_label='',
y_axis_label='',
show=False,
xticklabels=None,
height=None,
width=None,
aspect=1.33,
# palette=None,
# draw_lines=None,
custom_legend_loc=False,
custom_legend_fn=None,
log_scale_x=False,
log_scale_y=False,
xticks=None,
title=None,
old_tight_layout=False,
tight_layout=False,
# linewidth=1,
**kwargs
):
height, aspect = compute_aspect_ratio(height, width, aspect)
if font_scale is None:
if context == 'talk': font_scale = 1
if context == 'paper': font_scale = 1.5
sns.set_context(context, font_scale=font_scale)
if context == 'talk':
sns.set_style('whitegrid')
else:
sns.set_style('ticks')
g = sns.lmplot(
x=x, y=y,
# hue=hue,
# style=style,
# size=size,
# alpha=alpha,
# hue_order=hue_order,
data=mpd.to_dataframe(),
# linewidth=linewidth,
height=height, aspect=aspect,
# palette=palette,
**kwargs
)
# return g
if log_scale_x:
for ax in g.axes[0]:
ax.set(xscale='log')
if log_scale_y:
for ax in g.axes[0]:
ax.set(yscale='log')
if ylim:
g.ax.set_ylim(ylim)
if xlim:
g.ax.set_xlim(xlim)
if y_tick_interval:
lims = g.ax.get_ylim()
g.ax.set_yticks(np.arange(lims[0], lims[1]+0.001, y_tick_interval))
g.ax.set_xlabel(x_axis_label)
g.ax.set_ylabel(y_axis_label)
if xticks:
g.ax.set_xticks(xticks)
if xticklabels:
g.ax.set_xticklabels(xticklabels)
# if title:
# for ax in g.axes[0]:
# ax.set_title(title)
if tight_layout:
plt.tight_layout()
if custom_legend_loc:
g._legend.remove()
plt.legend(loc=custom_legend_loc)
if custom_legend_fn:
g._legend.remove()
custom_legend_fn(plt)
# if g.legend:
# g.legend.set_title("")
# if old_tight_layout:
# plt.tight_layout()
if save_filename:
plt.savefig(save_filename, bbox_inches='tight', transparent=True)
if show or save_filename is None:
plt.show()
else:
plt.close()
return g
def my_lineplot(
mpd,
x, y, hue=None,
style=None,
size=None,
alpha=1.0,
hue_order=None,
ylim=None,
xlim=None,
y_tick_interval=None,
save_filename=None,
context='paper',
font_scale=None,
x_axis_label='',
y_axis_label='',
show=False,
xticklabels=None,
height=None,
width=None,
aspect=1.33,
palette=None,
draw_lines=None,
custom_legend_loc=False,
custom_legend_fn=None,
log_scale_x=False,
log_scale_y=False,
xticks=None,
title=None,
old_tight_layout=False,
tight_layout=False,
linewidth=1,
no_show=False,
**kwargs
):
height, aspect = compute_aspect_ratio(height, width, aspect)
if font_scale is None:
if context == 'talk': font_scale = 1
if context == 'paper': font_scale = 1.5
sns.set_context(context, font_scale=font_scale)
sns.set_style('ticks')
g = sns.lineplot(
# kind="point",
x=x, y=y, hue=hue,
style=style,
size=size,
alpha=alpha,
hue_order=hue_order,
data=mpd.to_dataframe(),
linewidth=linewidth,
# height=height, aspect=aspect,
# ci='sd',
palette=palette,
legend=False,
**kwargs
# ci=5,
# whis=(10, 90),
)
if draw_lines:
print(g.axes)
print(g.axes[0])
print(g.axes_dict)
my_draw_lines(draw_lines, g.axes[0][0])
if log_scale_x:
for ax in g.axes[0]:
ax.set(xscale='log')
if log_scale_y:
for ax in g.axes[0]:
ax.set(yscale='log')
if ylim:
g.set_ylim(ylim)
if xlim:
g.set_xlim(xlim)
if y_tick_interval:
lims = g.ax.get_ylim()
g.ax.set_yticks(np.arange(lims[0], lims[1]+0.001, y_tick_interval))
if xticks:
for ax in g.axes[0]:
ax.set_xticks(xticks)
# g.set_axis_labels(x_axis_label, y_axis_label)
g.set_xlabel(x_axis_label)
g.set_ylabel(y_axis_label)
if xticklabels:
g.set_xticklabels(xticklabels)
if title:
for ax in g.axes[0]:
ax.set_title(title)
if tight_layout:
plt.tight_layout()
if custom_legend_loc:
g._legend.remove()
plt.legend(loc=custom_legend_loc)
if custom_legend_fn:
g._legend.remove()
custom_legend_fn(plt)
# if g.legend:
# g.legend.set_title("")
# g.legend()
if old_tight_layout:
plt.tight_layout()
if not no_show:
if save_filename:
plt.savefig(save_filename, bbox_inches='tight', transparent=True)
# plt.clf()
if show or save_filename is None:
plt.show()
else:
plt.close()
return g
def my_jointplot(
mpd,
x, y,
hue=None,
kind='scatter',
hue_order=None,
xlim=None,
ylim=None,
y_tick_interval=None,
save_filename=None,
context='paper',
font_scale=None,
x_axis_label='',
y_axis_label='',
show=False,
xticklabels=None,
height=None,
width=None,
aspect=1.33,
draw_lines=None,
log_scale_x=None,
log_scale_y=None,
**kwargs
):
height, aspect = compute_aspect_ratio(height, width, aspect)
if font_scale is None:
if context == 'talk': font_scale = 1
if context == 'paper': font_scale = 1.5
if context == 'paper':
sns.set_style('ticks')
else:
sns.set_style('whitegrid')
sns.set_context(context, font_scale=font_scale)
g = sns.jointplot(
# kind="point",
kind=kind,
x=x, y=y,
hue=hue,
hue_order=hue_order,
data=mpd.to_dataframe(),
# linewidth=1,
height=height,
# aspect=aspect,
# ci='sd',
# ci=5,
xlim=xlim,
ylim=ylim,
# whis=(10, 90),
**kwargs,
)
if draw_lines:
my_draw_lines(my_draw_lines, g.axes[0])
if log_scale_x:
g.ax_joint.set_xscale('log')
if log_scale_y:
g.ax_joint.set_yscale('log')
# max_lim = max(g.ax.get_ylim()[1], g.ax.get_xlim()[1])
# g.ax.set_ylim((g.ax.get_ylim()[0], max_lim))
# g.ax.set_xlim((g.ax.get_xlim()[0], max_lim))
# if ylim:
# g.ax.set_ylim(ylim)
# if y_tick_interval:
# lims = g.ax.get_ylim()
# g.ax.set_yticks(np.arange(lims[0], lims[1]+0.001, y_tick_interval))
plt.tight_layout()
# g.legend.set_title("")
g.set_axis_labels(x_axis_label, y_axis_label)
if xticklabels:
g.set_xticklabels(xticklabels)
if save_filename:
plt.savefig(save_filename, bbox_inches='tight', transparent=True)
if show or save_filename is None:
plt.show()
else:
plt.close()
def my_draw_lines(
point_pairs,
ax=None
):
if ax is None:
ax = plt.gca()
for pair in point_pairs:
print(f'Plotting from {pair[0]} to {pair[1]}')
ax.plot(
[pair[0][0], pair[1][0]],
[pair[0][1], pair[1][1]],
# ax.plot(point_pairs,
linestyle='--', linewidth=0.75, color='grey')
def my_scatterplot(
mpd,
ax=None,
xlim=None,
ylim=None,
x_axis_label=None,
y_axis_label=None,
# height=None,
# width=None,
kind='scatter',
context='paper',
font_scale=None,
save_filename=None,
**kwargs,
):
if ax is None:
ax = plt.gca()
if font_scale is None:
if context == 'talk': font_scale = 1
if context == 'paper': font_scale = 1.5
if context == 'paper':
sns.set_style('ticks')
# sns.set_style('whitegrid')
else:
sns.set_style('whitegrid')
sns.set_context(context, font_scale=font_scale)
if kind == 'scatter':
sns.scatterplot(
data=mpd.to_dataframe(),
ax=ax,
**kwargs,
)
elif kind == 'kde':
sns.kdeplot(
data=mpd.to_dataframe(),
ax=ax,
**kwargs,
)
if xlim:
ax.axes.set_xlim(xlim)
if ylim:
ax.axes.set_ylim(ylim)
if x_axis_label:
ax.axes.set_xlabel(x_axis_label)
if y_axis_label:
ax.axes.set_ylabel(y_axis_label)
if save_filename:
plt.savefig(save_filename, bbox_inches='tight', transparent=True)
# if close:
# plt.close()
return ax
| 24.341743 | 77 | 0.575473 | 2,916 | 21,226 | 3.977023 | 0.0631 | 0.041907 | 0.026904 | 0.030784 | 0.850565 | 0.828749 | 0.796413 | 0.7727 | 0.755713 | 0.747521 | 0 | 0.01277 | 0.29902 | 21,226 | 871 | 78 | 24.36969 | 0.766651 | 0.107274 | 0 | 0.809117 | 0 | 0 | 0.027139 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.017094 | false | 0 | 0.011396 | 0.001425 | 0.039886 | 0.01567 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
181beabf4a31d8c45e11ba29bde636afd1d3407c | 20,163 | py | Python | utils/layers.py | BarneyQiao/CondenseNetV2 | c771957cb8fe466d0ecbafe9060e4c342a33fc4d | [
"MIT"
] | 80 | 2021-04-05T18:12:01.000Z | 2022-03-31T03:32:47.000Z | utils/layers.py | MLDL/CondenseNetV2 | c771957cb8fe466d0ecbafe9060e4c342a33fc4d | [
"MIT"
] | 8 | 2021-05-05T03:23:06.000Z | 2022-03-29T02:12:46.000Z | utils/layers.py | MLDL/CondenseNetV2 | c771957cb8fe466d0ecbafe9060e4c342a33fc4d | [
"MIT"
] | 16 | 2021-04-06T06:12:40.000Z | 2021-09-28T12:35:28.000Z | import torch
import torch.nn as nn
import torch.nn.functional as F
class SELayer(nn.Module):
def __init__(self, inplanes, reduction=16):
super(SELayer, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Sequential(
nn.Linear(inplanes, inplanes // reduction, bias=False),
nn.ReLU(inplace=True),
nn.Linear(inplanes // reduction, inplanes, bias=False),
nn.Sigmoid()
)
def forward(self, x):
b, c, _, _ = x.size()
y = self.avg_pool(x).view(b, c)
y = self.fc(y).view(b, c, 1, 1)
return x * y.expand_as(x)
class HS(nn.Module):
def __init__(self):
super(HS, self).__init__()
self.relu6 = nn.ReLU6(inplace=True)
def forward(self, inputs):
return inputs * self.relu6(inputs + 3) / 6
class LGC(nn.Module):
global_progress = 0.0
def __init__(self, in_channels, out_channels, kernel_size, stride=1,
padding=0, dilation=1, groups=1, condense_factor=None,
dropout_rate=0., activation='ReLU', bn_momentum=0.1):
super(LGC, self).__init__()
self.norm = nn.BatchNorm2d(in_channels, momentum=bn_momentum)
self.activation_type = activation
if activation == 'ReLU':
self.add_module('activation', nn.ReLU(inplace=True))
elif activation == 'HS':
self.add_module('activation', HS())
else:
raise NotImplementedError
self.dropout_rate = dropout_rate
if self.dropout_rate > 0:
self.drop = nn.Dropout(dropout_rate, inplace=False)
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride,
padding, dilation, groups=1, bias=False)
self.in_channels = in_channels
self.out_channels = out_channels
self.groups = groups
self.condense_factor = condense_factor
if self.condense_factor is None:
self.condense_factor = self.groups
### Parameters that should be carefully used
self.register_buffer('_count', torch.zeros(1))
self.register_buffer('_stage', torch.zeros(1))
self.register_buffer('_mask', torch.ones(self.conv.weight.size()))
### Check if arguments are valid
assert self.in_channels % self.groups == 0, "group number can not be divided by input channels"
assert self.in_channels % self.condense_factor == 0, "condensation factor can not be divided by input channels"
assert self.out_channels % self.groups == 0, "group number can not be divided by output channels"
def forward(self, x):
self._check_drop()
x = self.norm(x)
x = self.activation(x)
if self.dropout_rate > 0:
x = self.drop(x)
### Masked output
weight = self.conv.weight * self.mask
return F.conv2d(x, weight, None, self.conv.stride,
self.conv.padding, self.conv.dilation, 1)
def _check_drop(self):
progress = LGC.global_progress
delta = 0
if progress * 2 < (1 + 1e-3):
### Get current stage
for i in range(self.condense_factor - 1):
if progress * 2 < (i + 1) / (self.condense_factor - 1):
stage = i
break
else:
stage = self.condense_factor - 1
### Check for dropping
if not self._reach_stage(stage):
self.stage = stage
delta = self.in_channels // self.condense_factor
if delta > 0:
self._dropping(delta)
return
def _dropping(self, delta):
print('LearnedGroupConv dropping')
weight = self.conv.weight * self.mask
### Sum up all kernels
### Assume only apply to 1x1 conv to speed up
assert weight.size()[-1] == 1
weight = weight.abs().squeeze()
assert weight.size()[0] == self.out_channels
assert weight.size()[1] == self.in_channels
d_out = self.out_channels // self.groups
### Shuffle weight
weight = weight.view(d_out, self.groups, self.in_channels)
weight = weight.transpose(0, 1).contiguous()
weight = weight.view(self.out_channels, self.in_channels)
### Sort and drop
for i in range(self.groups):
wi = weight[i * d_out:(i + 1) * d_out, :]
### Take corresponding delta index
di = wi.sum(0).sort()[1][self.count:self.count + delta]
for d in di.data:
self._mask[i::self.groups, d, :, :].fill_(0)
self.count = self.count + delta
@property
def count(self):
return int(self._count[0])
@count.setter
def count(self, val):
self._count.fill_(val)
@property
def stage(self):
return int(self._stage[0])
@stage.setter
def stage(self, val):
self._stage.fill_(val)
@property
def mask(self):
return self._mask
def _reach_stage(self, stage):
return (self._stage >= stage).all()
@property
def lasso_loss(self):
if self._reach_stage(self.groups - 1):
return 0
weight = self.conv.weight * self.mask
### Assume only apply to 1x1 conv to speed up
assert weight.size()[-1] == 1
weight = weight.squeeze().pow(2)
d_out = self.out_channels // self.groups
### Shuffle weight
weight = weight.view(d_out, self.groups, self.in_channels)
weight = weight.sum(0).clamp(min=1e-6).sqrt()
return weight.sum()
class SFR(nn.Module):
global_progress = 0.0
def __init__(self, in_channels, out_channels, kernel_size, stride=1,
padding=0, dilation=1, groups=1, condense_factor=None,
dropout_rate=0., activation='ReLU', bn_momentum=0.1):
super(SFR, self).__init__()
self.norm = nn.BatchNorm2d(in_channels, momentum=bn_momentum)
self.activation_type = activation
if activation == 'ReLU':
self.add_module('activation', nn.ReLU(inplace=True))
elif activation == 'HS':
self.add_module('activation', HS())
else:
raise NotImplementedError
self.dropout_rate = dropout_rate
if self.dropout_rate > 0:
self.drop = nn.Dropout(dropout_rate, inplace=False)
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride,
padding, dilation, groups=1, bias=False)
self.in_channels = in_channels
self.out_channels = out_channels
self.groups = groups
self.condense_factor = condense_factor
if self.condense_factor is None:
self.condense_factor = self.groups
### Parameters that should be carefully used
self.register_buffer('_count', torch.zeros(1))
self.register_buffer('_stage', torch.zeros(1))
self.register_buffer('_mask', torch.ones(self.conv.weight.size()))
### Check if arguments are valid
assert self.in_channels % self.groups == 0, "group number can not be divided by input channels"
assert self.out_channels % self.condense_factor == 0, "transpose factor can not be divided by input channels"
assert self.out_channels % self.groups == 0, "group number can not be divided by output channels"
self._init_weight()
def forward(self, x):
self._check_drop()
x = self.norm(x)
x = self.activation(x)
if self.dropout_rate > 0:
x = self.drop(x)
### Masked output
weight = self.conv.weight * self.mask
return F.conv2d(x, weight, None, self.conv.stride,
self.conv.padding, self.conv.dilation, 1)
def _check_drop(self):
progress = SFR.global_progress
delta = 0
if progress * 2 < (1 + 1e-3):
### Get current stage
for i in range(self.condense_factor - 1):
if progress * 2 < (i + 1) / (self.condense_factor - 1):
stage = i
break
else:
stage = self.condense_factor - 1
### Check for dropping
if not self._reach_stage(stage):
self.stage = stage
delta = self.out_channels // self.condense_factor
if delta > 0:
self._dropping(delta)
return
def _dropping(self, delta):
print('LearnedGroupConvTrans dropping')
weight = self.conv.weight * self.mask
### Sum up all kernels
### Assume only apply to 1x1 conv to speed up
assert weight.size()[-1] == 1
weight = weight.abs().squeeze()
assert weight.size()[0] == self.out_channels
assert weight.size()[1] == self.in_channels
d_in = self.in_channels // self.groups
### Shuffle weight
weight = weight.view(self.out_channels, d_in, self.groups)
weight = weight.transpose(1, 2).contiguous()
weight = weight.view(self.out_channels, self.in_channels)
### Sort and drop
for i in range(self.groups):
wi = weight[:, i * d_in:(i + 1) * d_in]
### Take corresponding delta index
di = wi.sum(1).sort()[1][self.count:self.count + delta]
for d in di.data:
self._mask[d, i::self.groups, :, :].fill_(0)
self.count = self.count + delta
@property
def count(self):
return int(self._count[0])
@count.setter
def count(self, val):
self._count.fill_(val)
@property
def stage(self):
return int(self._stage[0])
@stage.setter
def stage(self, val):
self._stage.fill_(val)
@property
def mask(self):
return self._mask
def _reach_stage(self, stage):
return (self._stage >= stage).all()
@property
def lasso_loss(self):
if self._reach_stage(self.groups - 1):
return 0
weight = self.conv.weight * self.mask
### Assume only apply to 1x1 conv to speed up
assert weight.size()[-1] == 1
weight = weight.squeeze().pow(2)
d_in = self.in_channels // self.groups
### Shuffle weight
weight = weight.view(self.out_channels, d_in, self.groups)
weight = weight.sum(1).clamp(min=1e-6).sqrt()
return weight.sum()
def _init_weight(self):
self.norm.weight.data.fill_(0)
self.norm.bias.data.zero_()
class Conv(nn.Sequential):
def __init__(self, in_channels, out_channels, kernel_size,
stride=1, padding=0, groups=1, activation='ReLU', bn_momentum=0.1):
super(Conv, self).__init__()
self.add_module('norm', nn.BatchNorm2d(in_channels, momentum=bn_momentum))
if activation == 'ReLU':
self.add_module('activation', nn.ReLU(inplace=True))
elif activation == 'HS':
self.add_module('activation', HS())
else:
raise NotImplementedError
self.add_module('conv', nn.Conv2d(in_channels, out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding, bias=False,
groups=groups))
def ShuffleLayer(x, groups):
batchsize, num_channels, height, width = x.data.size()
channels_per_group = num_channels // groups
### reshape
x = x.view(batchsize, groups, channels_per_group, height, width)
### transpose
x = torch.transpose(x, 1, 2).contiguous()
### reshape
x = x.view(batchsize, -1, height, width)
return x
def ShuffleLayerTrans(x, groups):
batchsize, num_channels, height, width = x.data.size()
channels_per_group = num_channels // groups
### reshape
x = x.view(batchsize, channels_per_group, groups, height, width)
### transpose
x = torch.transpose(x, 1, 2).contiguous()
### reshape
x = x.view(batchsize, -1, height, width)
return x
class CondensingLGC(nn.Module):
def __init__(self, model):
super(CondensingLGC, self).__init__()
layer_str = str(model)
type_name = layer_str[:layer_str.find('(')].strip()
self.typename = type_name
self.in_channels = model.conv.in_channels \
* model.groups // model.condense_factor
self.out_channels = model.conv.out_channels
self.groups = model.groups
self.condense_factor = model.condense_factor
self.norm = nn.BatchNorm2d(self.in_channels)
# self.relu = nn.ReLU(inplace=True)
if model.activation_type == 'ReLU':
self.activation = nn.ReLU(inplace=True)
elif model.activation_type == 'HS':
self.activation = HS()
else:
raise NotImplementedError
self.conv = nn.Conv2d(self.in_channels, self.out_channels,
kernel_size=model.conv.kernel_size,
padding=model.conv.padding,
groups=self.groups,
bias=False,
stride=model.conv.stride)
self.register_buffer('index', torch.LongTensor(self.in_channels))
index = 0
mask = model._mask.mean(-1).mean(-1)
## comments: mask.sum(1) = self.gtoups. the mask is shuffled weight
for i in range(self.groups):
for j in range(model.conv.in_channels):
if index < (self.in_channels // self.groups) * (i + 1) and mask[
i, j] == 1: # pattern is same inside group
for k in range(self.out_channels // self.groups):
idx_i = int(k + i * (self.out_channels // self.groups))
idx_j = index % (self.in_channels // self.groups)
self.conv.weight.data[idx_i, idx_j, :, :] = \
model.conv.weight.data[int(i + k * self.groups), j, :, :]
self.norm.weight.data[index] = model.norm.weight.data[j]
self.norm.bias.data[index] = model.norm.bias.data[j]
self.norm.running_mean[index] = model.norm.running_mean[j]
self.norm.running_var[index] = model.norm.running_var[j]
self.index[index] = j
index += 1
def forward(self, x):
x = torch.index_select(x, 1, self.index)
x = self.norm(x)
x = self.activation(x)
x = self.conv(x)
x = ShuffleLayer(x, self.groups)
return x
class CondensingSFR(nn.Module):
def __init__(self, model):
super(CondensingSFR, self).__init__()
layer_str = str(model)
type_name = layer_str[:layer_str.find('(')].strip()
self.typename = type_name
self.in_channels = model.conv.in_channels
self.out_channels = model.conv.out_channels \
* model.groups // model.condense_factor
self.groups = model.groups
self.condense_factor = model.condense_factor
self.norm = nn.BatchNorm2d(self.in_channels)
# self.relu = nn.ReLU(inplace=True)
if model.activation_type == 'ReLU':
self.activation = nn.ReLU(inplace=True)
elif model.activation_type == 'HS':
self.activation = HS()
else:
raise NotImplementedError
self.conv = nn.Conv2d(self.in_channels, self.out_channels,
kernel_size=model.conv.kernel_size,
padding=model.conv.padding,
groups=self.groups,
bias=False,
stride=model.conv.stride)
self.register_buffer('index', torch.zeros(self.out_channels, self.out_channels))
out_index = torch.zeros(self.groups)
mask = model._mask.mean(-1).mean(-1)
for i in range(model.conv.out_channels):
for j in range(self.groups):
if out_index[j] < (self.out_channels // self.groups) and mask[i, j] == 1:
for k in range(self.in_channels // self.groups):
idx_i = int(out_index[j] + j * (self.out_channels // self.groups)) # out_channel
idx_j = k # in_channel
self.conv.weight.data[idx_i, idx_j, :, :] = \
model.conv.weight.data[i, int(j + k * self.groups), :, :]
self.index[idx_i, i] = 1.0
out_index[j] += 1
self.norm.weight.data = model.norm.weight.data
self.norm.bias.data = model.norm.bias.data
self.norm.running_mean = model.norm.running_mean
self.norm.running_var = model.norm.running_var
def forward(self, x):
x = self.norm(x)
x = self.activation(x)
x = ShuffleLayerTrans(x, self.groups)
x = self.conv(x) # SIZE: N, C, H, W
N, C, H, W = x.size()
x = x.view(N, C, H * W)
x = x.transpose(1, 2).contiguous() # SIZE: N, HW, C
x = torch.matmul(x, self.index) # x SIZE: N, HW, C; self.index SIZE: C, C; OUTPUT SIZE: N, HW, C
x = x.transpose(1, 2).contiguous() # SIZE: N, C, HW
x = x.view(N, C, H, W) # SIZE: N, C, HW
return x
class CondenseLGC(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size,
stride=1, padding=0, groups=1, activation='ReLU'):
super(CondenseLGC, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.groups = groups
self.norm = nn.BatchNorm2d(self.in_channels)
if activation == 'ReLU':
self.activation = nn.ReLU(inplace=True)
elif activation == 'HS':
self.activation = HS()
else:
raise NotImplementedError
self.conv = nn.Conv2d(self.in_channels, self.out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
groups=self.groups,
bias=False)
self.register_buffer('index', torch.LongTensor(self.in_channels))
self.index.fill_(0)
def forward(self, x):
x = torch.index_select(x, 1, self.index)
x = self.norm(x)
x = self.activation(x)
x = self.conv(x)
x = ShuffleLayer(x, self.groups)
return x
class CondenseSFR(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size,
stride=1, padding=0, groups=1, activation='ReLU'):
super(CondenseSFR, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.groups = groups
self.norm = nn.BatchNorm2d(self.in_channels)
if activation == 'ReLU':
self.activation = nn.ReLU(inplace=True)
elif activation == 'HS':
self.activation = HS()
else:
raise NotImplementedError
self.conv = nn.Conv2d(self.in_channels, self.out_channels,
kernel_size=kernel_size,
padding=padding,
groups=self.groups,
bias=False,
stride=stride)
self.register_buffer('index', torch.zeros(self.out_channels, self.out_channels))
def forward(self, x):
x = self.norm(x)
x = self.activation(x)
x = ShuffleLayerTrans(x, self.groups)
x = self.conv(x) # SIZE: N, C, H, W
N, C, H, W = x.size()
x = x.view(N, C, H * W)
x = x.transpose(1, 2).contiguous() # SIZE: N, HW, C
x = torch.matmul(x, self.index) # x SIZE: N, HW, C; self.index SIZE: C, C; OUTPUT SIZE: N, HW, C
x = x.transpose(1, 2).contiguous() # SIZE: N, C, HW
x = x.view(N, C, H, W) # SIZE: N, C, HW
return x
| 38.849711 | 119 | 0.56316 | 2,498 | 20,163 | 4.393515 | 0.078463 | 0.044647 | 0.045923 | 0.026241 | 0.856583 | 0.848292 | 0.824328 | 0.788884 | 0.779317 | 0.770387 | 0 | 0.012787 | 0.321232 | 20,163 | 518 | 120 | 38.92471 | 0.789128 | 0.053811 | 0 | 0.757075 | 0 | 0 | 0.028888 | 0.001107 | 0 | 0 | 0 | 0 | 0.033019 | 1 | 0.089623 | false | 0 | 0.007075 | 0.021226 | 0.179245 | 0.004717 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
18431e7acaa01955a1fe27c5a4f01b33b4bb63b9 | 95 | py | Python | addons/to_paypal_unsupported_currencies/models/__init__.py | marionumza/vocal_v12 | 480990e919c9410903e06e7813ee92800bd6a569 | [
"Unlicense"
] | null | null | null | addons/to_paypal_unsupported_currencies/models/__init__.py | marionumza/vocal_v12 | 480990e919c9410903e06e7813ee92800bd6a569 | [
"Unlicense"
] | null | null | null | addons/to_paypal_unsupported_currencies/models/__init__.py | marionumza/vocal_v12 | 480990e919c9410903e06e7813ee92800bd6a569 | [
"Unlicense"
] | 1 | 2021-05-05T07:59:08.000Z | 2021-05-05T07:59:08.000Z | from . import account_payment
from . import payment_acquirer
from . import payment_transaction
| 23.75 | 33 | 0.842105 | 12 | 95 | 6.416667 | 0.5 | 0.38961 | 0.441558 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.126316 | 95 | 3 | 34 | 31.666667 | 0.927711 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
186735003557e6fcdb5e2993e1a7a233934ecf24 | 159 | py | Python | damn_index/__init__.py | sueastside/damn-index | d5479c45572316e1065f7027ee0b054e3946d975 | [
"BSD-3-Clause"
] | null | null | null | damn_index/__init__.py | sueastside/damn-index | d5479c45572316e1065f7027ee0b054e3946d975 | [
"BSD-3-Clause"
] | null | null | null | damn_index/__init__.py | sueastside/damn-index | d5479c45572316e1065f7027ee0b054e3946d975 | [
"BSD-3-Clause"
] | null | null | null | """
The DAMN INDEX module.
"""
from __future__ import absolute_import
from damn_index.damnsearch import DAMNSearch
from damn_index.damnindex import DAMNIndex
| 19.875 | 44 | 0.823899 | 21 | 159 | 5.904762 | 0.47619 | 0.217742 | 0.209677 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.119497 | 159 | 7 | 45 | 22.714286 | 0.885714 | 0.138365 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
a108a8940b588ce7c70c61183a8f498df1210938 | 551 | py | Python | sabad-ginti.py | sanjeev8386/Debugging | dd4ad640cf22af1d06d3bdccb74acab29bab47fc | [
"Apache-2.0"
] | 5 | 2019-03-03T06:18:45.000Z | 2019-03-03T06:27:29.000Z | sabad-ginti.py | sanjeev8386/Debugging | dd4ad640cf22af1d06d3bdccb74acab29bab47fc | [
"Apache-2.0"
] | null | null | null | sabad-ginti.py | sanjeev8386/Debugging | dd4ad640cf22af1d06d3bdccb74acab29bab47fc | [
"Apache-2.0"
] | null | null | null | def word_count(str):
counts = dict()
word = str.split()
for word in word:
if word in counts:
counts[word] =+ 1
else:
counts[word] =+ 1
return counts
print(word_count('the quick brown fox jumps over the lazy dog.'))
def word_count(str):
counts = dict()
word = str.split('')
for word in word:
if word in counts:
counts[word] =+ 1
else:
counts[word] =+ 1
return counts
print(word_counts("the quick brown fox jumps over the lazy dog.)) | 19.678571 | 65 | 0.557169 | 76 | 551 | 3.986842 | 0.289474 | 0.079208 | 0.145215 | 0.09901 | 0.963696 | 0.963696 | 0.963696 | 0.963696 | 0.963696 | 0.732673 | 0 | 0.010989 | 0.339383 | 551 | 28 | 66 | 19.678571 | 0.821429 | 0 | 0 | 0.8 | 0 | 0 | 0.07971 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
a10c2709cfc57fbc20a45fc4dd6610f05532e5ca | 13,830 | py | Python | adlmagics/adlmagics/test/testcases/adla_job_submission_magic_test.py | Azure/Azure-Data-Service-Notebook | 6bd28587c9fa0a7c1f9113f638b790b1773c5585 | [
"MIT"
] | 6 | 2018-06-06T08:37:53.000Z | 2020-06-01T13:13:13.000Z | adlmagics/adlmagics/test/testcases/adla_job_submission_magic_test.py | Azure/Azure-Data-Service-Notebook | 6bd28587c9fa0a7c1f9113f638b790b1773c5585 | [
"MIT"
] | 30 | 2018-06-08T02:47:18.000Z | 2018-07-25T07:07:07.000Z | adlmagics/adlmagics/test/testcases/adla_job_submission_magic_test.py | Azure/Azure-Data-Service-Notebook | 6bd28587c9fa0a7c1f9113f638b790b1773c5585 | [
"MIT"
] | 5 | 2018-06-06T08:37:55.000Z | 2021-01-07T09:15:15.000Z | from IPython.core.error import UsageError
from adlmagics.magics.adla.adla_job_submission_magic import AdlaJobSubmissionMagic
from adlmagics.session_consts import session_adla_account, session_job_parallelism, session_job_priority, session_job_runtime, session_null_value
from adlmagics.models.adla_job import AdlaJob
from adlmagics.exceptions import MagicArgumentError, MagicArgumentMissingError
from adlmagics.test.adla_magic_test_base import AdlaMagicTestBase
from adlmagics.test.mocks.mock_result_receiver import MockResultReceiver
class AdlaJobSubmissionMagicTest(AdlaMagicTestBase):
adla_account = "mock_adla_account"
job_name = "mock_job_name"
job_script = "mock job scrpt"
job_parallelism = 5
job_priority = 100
job_runtime = "mock_runtime"
result_job = "submitted_job"
def test_execute_with_correct_arg_string(self):
arg_string = "--account %s --name %s --parallelism %d --priority %d --runtime %s --result_job %s" % (AdlaJobSubmissionMagicTest.adla_account, AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_parallelism, AdlaJobSubmissionMagicTest.job_priority, AdlaJobSubmissionMagicTest.job_runtime, AdlaJobSubmissionMagicTest.result_job)
self.__magic.execute(arg_string, AdlaJobSubmissionMagicTest.job_script)
self.__validate()
def test_execute_with_incorrect_arg_string(self):
arg_string = "--account_1 %s --name %s --parallelism %d --priority %d --runtime %s --result_job %s" % (AdlaJobSubmissionMagicTest.adla_account, AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_parallelism, AdlaJobSubmissionMagicTest.job_priority, AdlaJobSubmissionMagicTest.job_runtime, AdlaJobSubmissionMagicTest.result_job)
self.assertRaises(UsageError, self.__magic.execute, arg_string, AdlaJobSubmissionMagicTest.job_script)
arg_string = "--account %s --name_1 %s --parallelism %d --priority %d --runtime %s --result_job %s" % (AdlaJobSubmissionMagicTest.adla_account, AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_parallelism, AdlaJobSubmissionMagicTest.job_priority, AdlaJobSubmissionMagicTest.job_runtime, AdlaJobSubmissionMagicTest.result_job)
self.assertRaises(UsageError, self.__magic.execute, arg_string, AdlaJobSubmissionMagicTest.job_script)
arg_string = "--account %s --name %s --parallelism_1 %d --priority %d --runtime %s --result_job %s" % (AdlaJobSubmissionMagicTest.adla_account, AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_parallelism, AdlaJobSubmissionMagicTest.job_priority, AdlaJobSubmissionMagicTest.job_runtime, AdlaJobSubmissionMagicTest.result_job)
self.assertRaises(UsageError, self.__magic.execute, arg_string, AdlaJobSubmissionMagicTest.job_script)
arg_string = "--account %s --name %s --parallelism %d --priority_1 %d --runtime %s --result_job %s" % (AdlaJobSubmissionMagicTest.adla_account, AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_parallelism, AdlaJobSubmissionMagicTest.job_priority, AdlaJobSubmissionMagicTest.job_runtime, AdlaJobSubmissionMagicTest.result_job)
self.assertRaises(UsageError, self.__magic.execute, arg_string, AdlaJobSubmissionMagicTest.job_script)
arg_string = "--account %s --name %s --parallelism %d --priority %d --runtime_1 %s --result_job %s" % (AdlaJobSubmissionMagicTest.adla_account, AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_parallelism, AdlaJobSubmissionMagicTest.job_priority, AdlaJobSubmissionMagicTest.job_runtime, AdlaJobSubmissionMagicTest.result_job)
self.assertRaises(UsageError, self.__magic.execute, arg_string, AdlaJobSubmissionMagicTest.job_script)
arg_string = "--account %s --name %s --parallelism %d --priority %d --runtime %s --result_job_1 %s" % (AdlaJobSubmissionMagicTest.adla_account, AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_parallelism, AdlaJobSubmissionMagicTest.job_priority, AdlaJobSubmissionMagicTest.job_runtime, AdlaJobSubmissionMagicTest.result_job)
self.assertRaises(UsageError, self.__magic.execute, arg_string, AdlaJobSubmissionMagicTest.job_script)
def test_execute_with_missing_account(self):
# account missing in arg_string, but not in session
arg_string = "--name %s --parallelism %d --priority %d --runtime %s --result_job %s" % (AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_parallelism, AdlaJobSubmissionMagicTest.job_priority, AdlaJobSubmissionMagicTest.job_runtime, AdlaJobSubmissionMagicTest.result_job)
self._session_service.set_session_item(session_adla_account.name, AdlaJobSubmissionMagicTest.adla_account)
self.__magic.execute(arg_string, AdlaJobSubmissionMagicTest.job_script)
self.__validate()
# account missing in both arg_string and session
arg_string = "--name %s --parallelism %d --priority %d --runtime %s" % (AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_parallelism, AdlaJobSubmissionMagicTest.job_priority, AdlaJobSubmissionMagicTest.job_runtime)
self._session_service.set_session_item(session_adla_account.name, session_null_value)
self.assertRaises(MagicArgumentError, self.__magic.execute, arg_string, AdlaJobSubmissionMagicTest.job_script)
def test_execute_with_missing_name(self):
arg_string = "--account %s --parallelism %d --priority %d --runtime %s" % (AdlaJobSubmissionMagicTest.adla_account, AdlaJobSubmissionMagicTest.job_parallelism, AdlaJobSubmissionMagicTest.job_priority, AdlaJobSubmissionMagicTest.job_runtime)
self.assertRaises(MagicArgumentMissingError, self.__magic.execute, arg_string, AdlaJobSubmissionMagicTest.job_script)
def test_execute_with_missing_parallelism_in_arg_string_but_not_in_session(self):
# parallelism missing in arg_string, but not in session
arg_string = "--account %s --name %s --priority %d --runtime %s --result_job %s" % (AdlaJobSubmissionMagicTest.adla_account, AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_priority, AdlaJobSubmissionMagicTest.job_runtime, AdlaJobSubmissionMagicTest.result_job)
self._session_service.set_session_item(session_job_parallelism.name, AdlaJobSubmissionMagicTest.job_parallelism)
self.__magic.execute(arg_string, AdlaJobSubmissionMagicTest.job_script)
self.__validate()
# parallelism missing both in arg_string and session
arg_string = "--account %s --name %s --priority %d --runtime %s" % (AdlaJobSubmissionMagicTest.adla_account, AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_priority, AdlaJobSubmissionMagicTest.job_runtime)
self._session_service.set_session_item(session_job_parallelism.name, session_null_value)
self.assertRaises(MagicArgumentError, self.__magic.execute, arg_string, AdlaJobSubmissionMagicTest.job_script)
def test_execute_with_missing_priority(self):
# priority missing in arg_string, but not in session
arg_string = "--account %s --name %s --parallelism %d --runtime %s --result_job %s" % (AdlaJobSubmissionMagicTest.adla_account, AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_parallelism, AdlaJobSubmissionMagicTest.job_runtime, AdlaJobSubmissionMagicTest.result_job)
self._session_service.set_session_item(session_job_priority.name, AdlaJobSubmissionMagicTest.job_priority)
self.__magic.execute(arg_string, AdlaJobSubmissionMagicTest.job_script)
self.__validate()
# priority missing in both arg_string and session
arg_string = "--account %s --name %s --parallelism %d --runtime %s" % (AdlaJobSubmissionMagicTest.adla_account, AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_parallelism, AdlaJobSubmissionMagicTest.job_runtime)
self._session_service.set_session_item(session_job_priority.name, session_null_value)
self.assertRaises(MagicArgumentError, self.__magic.execute, arg_string, AdlaJobSubmissionMagicTest.job_script)
def test_execute_with_boundary_priority_in_arg_string(self):
# Set boundary values to priority in arg_string
self._session_service.set_session_item(session_job_priority.name, session_null_value)
arg_string = "--account %s --name %s --parallelism %d --priority 0 --runtime %s" % (AdlaJobSubmissionMagicTest.adla_account, AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_parallelism, AdlaJobSubmissionMagicTest.job_runtime)
self.assertRaises(MagicArgumentError, self.__magic.execute, arg_string, AdlaJobSubmissionMagicTest.job_script)
arg_string = "--account %s --name %s --parallelism %d --priority 1 --runtime %s --result_job %s" % (AdlaJobSubmissionMagicTest.adla_account, AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_parallelism, AdlaJobSubmissionMagicTest.job_runtime, AdlaJobSubmissionMagicTest.result_job)
self.__magic.execute(arg_string, AdlaJobSubmissionMagicTest.job_script)
self.__validate()
arg_string = "--account %s --name %s --parallelism %d --priority 1000 --runtime %s --result_job %s" % (AdlaJobSubmissionMagicTest.adla_account, AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_parallelism, AdlaJobSubmissionMagicTest.job_runtime, AdlaJobSubmissionMagicTest.result_job)
self.__magic.execute(arg_string, AdlaJobSubmissionMagicTest.job_script)
self.__validate()
arg_string = "--account %s --name %s --parallelism %d --priority 1001 --runtime %s" % (AdlaJobSubmissionMagicTest.adla_account, AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_parallelism, AdlaJobSubmissionMagicTest.job_runtime)
self.assertRaises(MagicArgumentError, self.__magic.execute, arg_string, AdlaJobSubmissionMagicTest.job_script)
# priority missing in arg_string, set boundary values to priority in session
arg_string = "--account %s --name %s --parallelism %d --runtime %s --result_job %s" % (AdlaJobSubmissionMagicTest.adla_account, AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_parallelism, AdlaJobSubmissionMagicTest.job_runtime, AdlaJobSubmissionMagicTest.result_job)
self._session_service.set_session_item(session_job_priority.name, 0)
self.assertRaises(MagicArgumentError, self.__magic.execute, arg_string, AdlaJobSubmissionMagicTest.job_script)
self._session_service.set_session_item(session_job_priority.name, 1)
self.__magic.execute(arg_string, AdlaJobSubmissionMagicTest.job_script)
self.__validate()
self._session_service.set_session_item(session_job_priority.name, 1000)
self.__magic.execute(arg_string, AdlaJobSubmissionMagicTest.job_script)
self.__validate()
self._session_service.set_session_item(session_job_priority.name, 1001)
self.assertRaises(MagicArgumentError, self.__magic.execute, arg_string, AdlaJobSubmissionMagicTest.job_script)
def test_execute_with_missing_runtime(self):
# runtime missing in arg_string, but not in session
arg_string = "--account %s --name %s --parallelism %d --priority %d --result_job %s" % (AdlaJobSubmissionMagicTest.adla_account, AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_parallelism, AdlaJobSubmissionMagicTest.job_priority, AdlaJobSubmissionMagicTest.result_job)
self._session_service.set_session_item(session_job_runtime.name, AdlaJobSubmissionMagicTest.job_runtime)
self.__magic.execute(arg_string, AdlaJobSubmissionMagicTest.job_script)
self.__validate()
# runtime missing in both arg_string and session
arg_string = "--account %s --name %s --parallelism %d --priority %d" % (AdlaJobSubmissionMagicTest.adla_account, AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_parallelism, AdlaJobSubmissionMagicTest.job_priority)
self._session_service.set_session_item(session_job_runtime.name, session_null_value)
self.assertRaises(MagicArgumentError, self.__magic.execute, arg_string, AdlaJobSubmissionMagicTest.job_script)
def test_execute_with_missing_result_job(self):
arg_string = "--account %s --name %s --parallelism %d --priority %d --runtime %s" % (AdlaJobSubmissionMagicTest.adla_account, AdlaJobSubmissionMagicTest.job_name, AdlaJobSubmissionMagicTest.job_parallelism, AdlaJobSubmissionMagicTest.job_priority, AdlaJobSubmissionMagicTest.job_runtime)
self.__magic.execute(arg_string, AdlaJobSubmissionMagicTest.job_script)
self.assertIsNone(self.__result_receiver.last_received_result_name)
self.assertIsNone(self.__result_receiver.last_received_result_value)
def setUp(self):
super(AdlaJobSubmissionMagicTest, self).setUp()
self.__result_receiver = MockResultReceiver()
self.__magic = AdlaJobSubmissionMagic(self._session_service, self._presenter_factory, self._result_converter, self.__result_receiver, self._adla_service)
def tearDown(self):
self.__magic = None
self.__result_receiver = None
super(AdlaJobSubmissionMagicTest, self).tearDown()
def __validate(self):
# Verify that the magic actually returns something
self.assertIsNotNone(self.__result_receiver.last_received_result_name)
self.assertEqual(self.__result_receiver.last_received_result_name, AdlaJobSubmissionMagicTest.result_job)
self.assertIsNotNone(self.__result_receiver.last_received_result_value)
self.assertEquals([
"Submitting azure data lake job to account '%s'..." % (AdlaJobSubmissionMagicTest.adla_account),
"Job submitted.",
AdlaJob.__name__], self._presenter_factory.presented_logs)
self._presenter_factory.clear() | 86.4375 | 352 | 0.796963 | 1,436 | 13,830 | 7.314763 | 0.066852 | 0.287129 | 0.038081 | 0.045221 | 0.849772 | 0.840442 | 0.831493 | 0.824257 | 0.803123 | 0.796744 | 0 | 0.002464 | 0.119667 | 13,830 | 160 | 353 | 86.4375 | 0.860287 | 0.040998 | 0 | 0.330357 | 0 | 0.080357 | 0.127075 | 0 | 0 | 0 | 0 | 0 | 0.1875 | 1 | 0.107143 | false | 0 | 0.0625 | 0 | 0.241071 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a12667b0ad39cc4ce816e9f1f416b9769e364930 | 155 | py | Python | mlsurvey/sl/workflows/__init__.py | jlaumonier/mlsurvey | 373598d067c7f0930ba13fe8da9756ce26eecbaf | [
"MIT"
] | null | null | null | mlsurvey/sl/workflows/__init__.py | jlaumonier/mlsurvey | 373598d067c7f0930ba13fe8da9756ce26eecbaf | [
"MIT"
] | null | null | null | mlsurvey/sl/workflows/__init__.py | jlaumonier/mlsurvey | 373598d067c7f0930ba13fe8da9756ce26eecbaf | [
"MIT"
] | null | null | null | from . import tasks
from .multiple_learning_workflow import MultipleLearningWorkflow
from .supervised_learning_workflow import SupervisedLearningWorkflow
| 31 | 68 | 0.896774 | 15 | 155 | 9 | 0.6 | 0.237037 | 0.325926 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.083871 | 155 | 4 | 69 | 38.75 | 0.950704 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
a12f7f98fd944920956d76219e142434eef5d2fe | 14,007 | py | Python | attack_cli/navigator.py | cyware-labs/attack-cli | 6ceba9eba17da84b96167c9e31248ccd0a79f3e2 | [
"Apache-2.0"
] | 1 | 2019-04-24T18:39:34.000Z | 2019-04-24T18:39:34.000Z | attack_cli/navigator.py | cyware-labs/attack-cli | 6ceba9eba17da84b96167c9e31248ccd0a79f3e2 | [
"Apache-2.0"
] | null | null | null | attack_cli/navigator.py | cyware-labs/attack-cli | 6ceba9eba17da84b96167c9e31248ccd0a79f3e2 | [
"Apache-2.0"
] | 1 | 2019-04-24T18:40:51.000Z | 2019-04-24T18:40:51.000Z | from .attack_navigation import AttackNavigator
import sys
import os
def execute():
a = AttackNavigator()
a.initialize()
while True:
print('Hello World !!! Select from options provided below :')
print("Enter 1 to list all Tactics")
# print("Enter 2 to Search for a Tactic")
print("Enter 2 to List all Techniques")
# print("Enter 4 to Search a Technique")
print("Enter 3 to list all APT groups")
# print("Enter 6 to search for an APT group")
print("Enter 4 to exit")
inputt = int(input("Enter your choice:: "))
os.system('clear')
if int(inputt) not in range(1, 5):
print("Invalid Input. Please try again")
continue
if inputt == 1:
while True:
print("{:<20} || {:<10} || {:<30} ||".format('Name', 'ID', 'Url'))
for tactic in a.get_tactics():
print("{:<20} || {:<10} || {:<30} ||".format(tactic['name'], tactic['id'], tactic['url']))
print("Enter 1 to see the details of a Tactic.")
print("Enter 2 to return to the Main Screen.")
print("Enter 3 to exit the CLI.")
try:
inputtt = int(input("Enter your choice:: "))
except:
print("Invalid Input. Please try again")
os.system('clear')
continue
if inputtt not in range(1, 4):
print("Invalid Input. Please try again")
os.system('clear')
continue
if inputtt == 1:
input_id = int(input("Please enter the ID of the tactic you wish to see the details of :: "))
data = a.get_tactic(input_id)
print("Tactic Name")
print("-------------------------------------------------")
print(data['name'])
print("-------------------------------------------------")
print("-------------------------------------------------")
print("Description")
print("-------------------------------------------------")
print(data['description'])
print("-------------------------------------------------")
print("-------------------------------------------------")
print("Techniques associated with this Tactic")
print("-------------------------------------------------")
for technique in data['techniques']:
print(technique['name'])
print("-------------------------------------------------")
print("-------------------------------------------------")
print("-------------------------------------------------")
while True:
print("Enter 1 to return to the Previous Screen.")
print("Enter 2 to exit the CLI.")
inputt = int(input("Enter your choice:: "))
os.system('clear')
if inputt not in range(1, 3):
print("Invalid Input. Please try again")
continue
if inputt == 1:
os.system('clear')
break
if inputt == 2:
os.system('clear')
print("Good Bye!")
quit()
if inputtt == 2:
os.system('clear')
break
if inputtt == 3:
os.system('clear')
print("Good Bye!")
quit()
# if inputt == 2:
# while True:
# search_input = input("Enter the name of tactic you wish to search for ::")
# print("{:<20} || {:<10} || {:<30} ||".format('Name', 'ID', 'Url'))
# for tactic in a.get_tactics(query=search_input):
# print("{:<20} || {:<10} || {:<30} ||".format(tactic['name'], tactic['id'], tactic['url']))
# print("Enter 1 to see the details of a Tactic.")
# print("Enter 2 to return to the Main Screen.")
# print("Enter 3 to exit the CLI.")
# try:
# inputtt = int(input("Enter your choice:: "))
# os.system('clear')
# except:
# print("Invalid Input. Please try again")
# os.system('clear')
# continue
# if inputtt not in range(1, 4):
# print("Invalid Input. Please try again")
# os.system('clear')
# continue
# if inputtt == 1:
# input_id = int(input("Please enter the ID of the tactic you wish to see the details of :: "))
# data = a.get_tactic(input_id)
# print("Tactic Name")
# print("-------------------------------------------------")
# print(data['name'])
# print("-------------------------------------------------")
# print("-------------------------------------------------")
# print("Description")
# print("-------------------------------------------------")
# print(data['description'])
# print("-------------------------------------------------")
# print("-------------------------------------------------")
# print("Techniques associated with this Tactic")
# print("-------------------------------------------------")
# for technique in data['techniques']:
# print(technique['name'])
# print("-------------------------------------------------")
# print("-------------------------------------------------")
# print("-------------------------------------------------")
# while True:
# print("Enter 1 to return to the Previous Screen.")
# print("Enter 2 to exit the CLI.")
# inputt = int(input("Enter your choice:: "))
# os.system('clear')
# if inputt not in range(1, 3):
# print("Invalid Input. Please try again")
# continue
# if inputt == 1:
# os.system('clear')
# break
# if inputt == 2:
# os.system('clear')
# print("Good Bye!")
# quit()
# if inputtt == 2:
# os.system('clear')
# break
# if inputtt == 3:
# os.system('clear')
# print("Good Bye!")
# quit()
if inputt == 2:
while True:
print("{:<60} || {:<10} ||".format('Name', 'ID'))
for technique in a.get_techniques():
print("{:<60} || {:<10} ||".format(technique['name'], technique['id']))
print("Enter 1 to see the details of a Technique.")
print("Enter 2 to return to the Main Screen.")
print("Enter 3 to exit the CLI.")
try:
inputtt = int(input("Enter your choice:: "))
os.system('clear')
except:
print("Invalid Input. Please try again")
os.system('clear')
continue
if inputtt not in range(1, 4):
print("Invalid Input. Please try again")
os.system('clear')
continue
if inputtt == 1:
input_id = int(input("Please enter the ID of the technique you wish to see the details of :: "))
data = a.get_technique(input_id)
print("Technique Name")
print("-------------------------------------------------")
print(data['name'])
print("-------------------------------------------------")
print("-------------------------------------------------")
print("Description")
print("-------------------------------------------------")
print(data['description'])
print("-------------------------------------------------")
print("-------------------------------------------------")
if data['tactic']:
print("Tactic associated with this Technique")
print("-------------------------------------------------")
for tactic in data['tactics']:
print(tactic['name'])
print("-------------------------------------------------")
print("-------------------------------------------------")
print("-------------------------------------------------")
while True:
print("Enter 1 to return to the Previous Screen.")
print("Enter 2 to exit the CLI.")
inputt = int(input("Enter your choice:: "))
os.system('clear')
if inputt not in range(1, 3):
print("Invalid Input. Please try again")
continue
if inputt == 1:
os.system('clear')
break
if inputt == 2:
os.system('clear')
print("Good Bye!")
quit()
if inputtt == 2:
os.system('clear')
break
if inputtt == 3:
os.system('clear')
print("Good Bye!")
quit()
if inputt == 3:
while True:
print("{:<60} || {:<10} ||".format('Name', 'ID'))
for apt in a.get_apts():
print("{:<60} || {:<10} ||".format(apt['name'], apt['id']))
print("Enter 1 to see the details of an APT group.")
print("Enter 2 to return to the Main Screen.")
print("Enter 3 to exit the CLI.")
try:
inputt = int(input("Enter your choice:: "))
os.system('clear')
except:
print("Invalid Input. Please try again")
os.system('clear')
continue
if inputt not in range(1, 4):
print("Invalid Input. Please try again")
os.system('clear')
continue
if inputt == 1:
input_id = int(input("Please enter the ID of the APT group you wish to see the details of :: "))
data = a.get_apt(input_id)
print("Technique Name")
print("-------------------------------------------------")
print(data['name'])
print("-------------------------------------------------")
print("-------------------------------------------------")
print("Description")
print("-------------------------------------------------")
print(data['description'])
print("-------------------------------------------------")
print("-------------------------------------------------")
if data['techniques']:
print("Tactic associated with this Technique")
print("-------------------------------------------------")
for technique in data['techniques']:
print(technique['name'])
print("-------------------------------------------------")
print("-------------------------------------------------")
print("-------------------------------------------------")
while True:
print("Enter 1 to return to the Previous Screen.")
print("Enter 2 to exit the CLI.")
inputt = int(input("Enter your choice:: "))
os.system('clear')
if inputt not in range(1, 3):
print("Invalid Input. Please try again")
continue
if inputt == 1:
os.system('clear')
break
if inputt == 2:
os.system('clear')
print("Good Bye!")
quit()
if inputt == 2:
os.system('clear')
break
if inputt == 3:
os.system('clear')
print("Good Bye!")
quit()
if inputt == 4:
os.system('clear')
print("Good Bye!")
quit()
# print(a.get_apts())
# print(a.get_apt(0))
| 50.934545 | 116 | 0.321054 | 1,054 | 14,007 | 4.246679 | 0.086338 | 0.058981 | 0.095845 | 0.066801 | 0.86908 | 0.856122 | 0.845845 | 0.839366 | 0.820599 | 0.792895 | 0 | 0.014198 | 0.441851 | 14,007 | 274 | 117 | 51.120438 | 0.558327 | 0.213322 | 0 | 0.82439 | 0 | 0 | 0.313579 | 0.134234 | 0 | 0 | 0 | 0 | 0 | 1 | 0.004878 | false | 0 | 0.014634 | 0 | 0.019512 | 0.443902 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 8 |
a135a769fb3aa367a568d9ca3c89b2b767fd18ca | 1,769 | py | Python | tests/test_api_key.py | K900/httpx_auth | e8fe9d4b01e84cb707fae0c3624e3e649c602afe | [
"MIT"
] | 2 | 2020-06-02T13:51:22.000Z | 2020-06-02T13:51:48.000Z | tests/test_api_key.py | K900/httpx_auth | e8fe9d4b01e84cb707fae0c3624e3e649c602afe | [
"MIT"
] | null | null | null | tests/test_api_key.py | K900/httpx_auth | e8fe9d4b01e84cb707fae0c3624e3e649c602afe | [
"MIT"
] | null | null | null | import pytest
from pytest_httpx import HTTPXMock
import httpx
import httpx_auth
from tests.auth_helper import get_header
def test_header_api_key_requires_an_api_key():
with pytest.raises(Exception) as exception_info:
httpx_auth.HeaderApiKey(None)
assert str(exception_info.value) == "API Key is mandatory."
def test_query_api_key_requires_an_api_key():
with pytest.raises(Exception) as exception_info:
httpx_auth.QueryApiKey(None)
assert str(exception_info.value) == "API Key is mandatory."
def test_header_api_key_is_sent_in_x_api_key_by_default(httpx_mock: HTTPXMock):
auth = httpx_auth.HeaderApiKey("my_provided_api_key")
assert get_header(httpx_mock, auth).get("X-Api-Key") == "my_provided_api_key"
def test_query_api_key_is_sent_in_api_key_by_default(httpx_mock: HTTPXMock):
auth = httpx_auth.QueryApiKey("my_provided_api_key")
# Mock a dummy response
httpx_mock.add_response(url="http://authorized_only?api_key=my_provided_api_key")
# Send a request to this dummy URL with authentication
httpx.get("http://authorized_only", auth=auth)
def test_header_api_key_can_be_sent_in_a_custom_field_name(httpx_mock: HTTPXMock):
auth = httpx_auth.HeaderApiKey("my_provided_api_key", "X-API-HEADER-KEY")
assert get_header(httpx_mock, auth).get("X-Api-Header-Key") == "my_provided_api_key"
def test_query_api_key_can_be_sent_in_a_custom_field_name(httpx_mock: HTTPXMock):
auth = httpx_auth.QueryApiKey("my_provided_api_key", "X-API-QUERY-KEY")
# Mock a dummy response
httpx_mock.add_response(
url="http://authorized_only?X-API-QUERY-KEY=my_provided_api_key"
)
# Send a request to this dummy URL with authentication
httpx.get("http://authorized_only", auth=auth)
| 36.854167 | 88 | 0.775579 | 279 | 1,769 | 4.516129 | 0.200717 | 0.104762 | 0.08254 | 0.101587 | 0.879365 | 0.83254 | 0.82381 | 0.82381 | 0.82381 | 0.82381 | 0 | 0 | 0.130017 | 1,769 | 47 | 89 | 37.638298 | 0.818713 | 0.084228 | 0 | 0.206897 | 0 | 0 | 0.225387 | 0 | 0 | 0 | 0 | 0 | 0.137931 | 1 | 0.206897 | false | 0 | 0.172414 | 0 | 0.37931 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a16ac757d89c7201c9111350561799e4ce03dacc | 5,954 | py | Python | tests/thumbnails/tests.py | samuelmaudo/yepes | 1ef9a42d4eaa70d9b3e6e7fa519396c1e1174fcb | [
"BSD-3-Clause"
] | null | null | null | tests/thumbnails/tests.py | samuelmaudo/yepes | 1ef9a42d4eaa70d9b3e6e7fa519396c1e1174fcb | [
"BSD-3-Clause"
] | null | null | null | tests/thumbnails/tests.py | samuelmaudo/yepes | 1ef9a42d4eaa70d9b3e6e7fa519396c1e1174fcb | [
"BSD-3-Clause"
] | null | null | null | # -*- coding:utf-8 -*-
from __future__ import unicode_literals
from base64 import b64encode
from hashlib import md5
import os
from django.test import TestCase
from django.utils.encoding import force_bytes
from yepes.contrib.thumbnails.models import Configuration
from yepes.contrib.thumbnails.proxies import ConfigurationProxy
from yepes.contrib.thumbnails.test_mixins import ThumbnailsMixin
class ThumbnailsTest(ThumbnailsMixin, TestCase):
tempDirPrefix = 'test_thumbnails_'
def test_simple_configuration(self):
configuration = Configuration.objects.create(
key='default',
width=100,
height=50,
)
key = '/'.join((configuration.key, 'wolf.jpg'))
key = md5(force_bytes(key)).digest()
key = b64encode(key, b'ab').decode('ascii')[:6]
path = os.path.join(
self.temp_dir,
'thumbs',
'wolf_{0}.jpg'.format(key),
)
self.assertIsNone(self.source.get_existing_thumbnail('default'))
thumbnail = self.source.get_thumbnail('default')
self.assertEqual(thumbnail.path, path)
self.assertLessEqual(thumbnail.width, 100)
self.assertLessEqual(thumbnail.height, 50)
self.assertEqual(thumbnail.format, 'JPEG')
original_accessed_time = thumbnail.accessed_time
original_created_time = thumbnail.created_time
original_modified_time = thumbnail.modified_time
original_size = thumbnail.size
self.assertFalse(thumbnail.closed)
thumbnail = self.source.get_existing_thumbnail('default')
self.assertEqual(thumbnail.path, path)
self.assertLessEqual(thumbnail.width, 100)
self.assertLessEqual(thumbnail.height, 50)
self.assertEqual(thumbnail.format, 'JPEG')
self.assertGreaterEqual(thumbnail.accessed_time, original_accessed_time)
self.assertEqual(thumbnail.created_time, original_created_time)
self.assertEqual(thumbnail.modified_time, original_modified_time)
self.assertEqual(thumbnail.size, original_size)
self.assertTrue(thumbnail.closed)
thumbnail = self.source.get_thumbnail('default')
self.assertEqual(thumbnail.path, path)
self.assertLessEqual(thumbnail.width, 100)
self.assertLessEqual(thumbnail.height, 50)
self.assertEqual(thumbnail.format, 'JPEG')
self.assertGreaterEqual(thumbnail.accessed_time, original_accessed_time)
self.assertEqual(thumbnail.created_time, original_created_time)
self.assertEqual(thumbnail.modified_time, original_modified_time)
self.assertEqual(thumbnail.size, original_size)
self.assertTrue(thumbnail.closed)
def test_proxy_configuration(self):
configuration = ConfigurationProxy(
width=100,
height=50,
)
self.assertEqual(configuration.key, 'w100_h50')
key = '/'.join((configuration.key, 'wolf.jpg'))
key = md5(force_bytes(key)).digest()
key = b64encode(key, b'ab').decode('ascii')[:6]
path = os.path.join(
self.temp_dir,
'thumbs',
'wolf_{0}.jpg'.format(key),
)
self.assertIsNone(self.source.get_existing_thumbnail(configuration))
thumbnail = self.source.get_thumbnail(configuration)
self.assertEqual(thumbnail.path, path)
self.assertLessEqual(thumbnail.width, configuration.width)
self.assertLessEqual(thumbnail.height, configuration.height)
self.assertEqual(thumbnail.format, 'JPEG')
original_accessed_time = thumbnail.accessed_time
original_created_time = thumbnail.created_time
original_modified_time = thumbnail.modified_time
original_size = thumbnail.size
self.assertFalse(thumbnail.closed)
thumbnail = self.source.get_existing_thumbnail(configuration)
self.assertEqual(thumbnail.path, path)
self.assertLessEqual(thumbnail.width, configuration.width)
self.assertLessEqual(thumbnail.height, configuration.height)
self.assertEqual(thumbnail.format, configuration.format)
self.assertGreaterEqual(thumbnail.accessed_time, original_accessed_time)
self.assertEqual(thumbnail.created_time, original_created_time)
self.assertEqual(thumbnail.modified_time, original_modified_time)
self.assertEqual(thumbnail.size, original_size)
self.assertTrue(thumbnail.closed)
thumbnail = self.source.get_thumbnail(configuration)
self.assertEqual(thumbnail.path, path)
self.assertLessEqual(thumbnail.width, configuration.width)
self.assertLessEqual(thumbnail.height, configuration.height)
self.assertEqual(thumbnail.format, configuration.format)
self.assertGreaterEqual(thumbnail.accessed_time, original_accessed_time)
self.assertEqual(thumbnail.created_time, original_created_time)
self.assertEqual(thumbnail.modified_time, original_modified_time)
self.assertEqual(thumbnail.size, original_size)
self.assertTrue(thumbnail.closed)
def test_render_image_tag(self):
configuration = Configuration.objects.create(
key='default',
width=100,
height=50,
)
self.source.generate_thumbnail(configuration) # New thumbnails cannot be closed because they are based on ContentFile.
thumbnail = self.source.get_existing_thumbnail(configuration)
self.assertTrue(thumbnail.closed)
tag = thumbnail.get_tag()
self.assertTrue(thumbnail.closed)
self.assertTrue(tag.startswith('<img '))
self.assertTrue(tag.endswith('">'))
self.assertEqual(set(tag[1:-1].split()), {
'img',
'src="{0}"'.format(thumbnail.url),
'width="{0}"'.format(thumbnail.width),
'height="{0}"'.format(thumbnail.height),
})
self.assertTrue(thumbnail.closed)
| 41.636364 | 126 | 0.692812 | 614 | 5,954 | 6.557003 | 0.162866 | 0.09687 | 0.14307 | 0.083458 | 0.781421 | 0.77546 | 0.770492 | 0.770492 | 0.756582 | 0.756582 | 0 | 0.011864 | 0.207256 | 5,954 | 142 | 127 | 41.929577 | 0.841102 | 0.015284 | 0 | 0.719008 | 0 | 0 | 0.03277 | 0 | 0 | 0 | 0 | 0 | 0.454545 | 1 | 0.024793 | false | 0 | 0.07438 | 0 | 0.115702 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a181f90d6151c9cc37b8f73f75ce28e5c016f8c9 | 111 | py | Python | err.py | sushink8/vmresourece | a4e7dddf4149b12931ac9fd7477abbf83d6c532f | [
"MIT"
] | null | null | null | err.py | sushink8/vmresourece | a4e7dddf4149b12931ac9fd7477abbf83d6c532f | [
"MIT"
] | null | null | null | err.py | sushink8/vmresourece | a4e7dddf4149b12931ac9fd7477abbf83d6c532f | [
"MIT"
] | null | null | null | #!/usr/bin/python
# coding:utf8
import sys
def err(*s):
#sys.stderr.write(" ".join(s) + "\n")
pass
| 12.333333 | 39 | 0.558559 | 17 | 111 | 3.647059 | 0.882353 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.011494 | 0.216216 | 111 | 8 | 40 | 13.875 | 0.701149 | 0.576577 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
a182707247ddb73eb042e65fbea8203d60a7ebdd | 362 | py | Python | lib/__init__.py | mdietrichstein/tfhub-exporter | d2c17bdac3d4c3395b59f00c4818a0289ec23211 | [
"BSD-3-Clause"
] | 4 | 2019-05-21T14:50:22.000Z | 2020-07-13T09:34:38.000Z | lib/__init__.py | mdietrichstein/tfhub-exporter | d2c17bdac3d4c3395b59f00c4818a0289ec23211 | [
"BSD-3-Clause"
] | null | null | null | lib/__init__.py | mdietrichstein/tfhub-exporter | d2c17bdac3d4c3395b59f00c4818a0289ec23211 | [
"BSD-3-Clause"
] | null | null | null | from .tf_graph_utils import DEFAULT_TRANSFORMS
from .tf_graph_utils import get_module_info
from .tf_graph_utils import saved_model_to_frozen_graph
from .tf_graph_utils import optimize_graph
from .tf_graph_utils import convert_graph_def_to_saved_model
from .cli_utils import print_outputs
from .cli_utils import print_tensors
from .cli_utils import print_tensor
| 36.2 | 60 | 0.88674 | 61 | 362 | 4.786885 | 0.360656 | 0.30137 | 0.188356 | 0.273973 | 0.64726 | 0.184932 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09116 | 362 | 9 | 61 | 40.222222 | 0.887538 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0.375 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
a192984792a84dceb47fc2ce97c8e07f3c5bb864 | 5,258 | py | Python | metrics/metric_defaults.py | ayush12gupta/co-mod-gan | 12ced0ed84af464bb467252f6458ab9fd2b89e22 | [
"BSD-2-Clause"
] | 289 | 2021-03-18T19:30:48.000Z | 2022-03-30T11:10:48.000Z | metrics/metric_defaults.py | ayush12gupta/co-mod-gan | 12ced0ed84af464bb467252f6458ab9fd2b89e22 | [
"BSD-2-Clause"
] | 54 | 2021-03-22T16:32:15.000Z | 2022-03-29T23:01:26.000Z | metrics/metric_defaults.py | ayush12gupta/co-mod-gan | 12ced0ed84af464bb467252f6458ab9fd2b89e22 | [
"BSD-2-Clause"
] | 44 | 2021-03-19T06:10:27.000Z | 2022-03-17T13:10:36.000Z | # Copyright (c) 2019, NVIDIA Corporation. All rights reserved.
#
# This work is made available under the Nvidia Source Code License-NC.
# To view a copy of this license, visit
# https://nvlabs.github.io/stylegan2/license.html
"""Default metric definitions."""
from dnnlib import EasyDict
#----------------------------------------------------------------------------
metric_defaults = EasyDict([(args.name, args) for args in [
EasyDict(name='fid200-rt-shoes', func_name='metrics.frechet_inception_distance.FID', num_images=200, minibatch_per_gpu=1, ref_train=True, ref_samples=49825),
EasyDict(name='fid200-rt-handbags', func_name='metrics.frechet_inception_distance.FID', num_images=200, minibatch_per_gpu=1, ref_train=True, ref_samples=138567),
EasyDict(name='fid5k', func_name='metrics.frechet_inception_distance.FID', num_images=5000, minibatch_per_gpu=8),
EasyDict(name='fid10k', func_name='metrics.frechet_inception_distance.FID', num_images=10000, minibatch_per_gpu=8),
EasyDict(name='fid10k-b1', func_name='metrics.frechet_inception_distance.FID', num_images=10000, minibatch_per_gpu=1),
EasyDict(name='fid10k-h0', func_name='metrics.frechet_inception_distance.FID', num_images=10000, minibatch_per_gpu=8, hole_range=[0.0, 0.2]),
EasyDict(name='fid10k-h1', func_name='metrics.frechet_inception_distance.FID', num_images=10000, minibatch_per_gpu=8, hole_range=[0.2, 0.4]),
EasyDict(name='fid10k-h2', func_name='metrics.frechet_inception_distance.FID', num_images=10000, minibatch_per_gpu=8, hole_range=[0.4, 0.6]),
EasyDict(name='fid10k-h3', func_name='metrics.frechet_inception_distance.FID', num_images=10000, minibatch_per_gpu=8, hole_range=[0.6, 0.8]),
EasyDict(name='fid10k-h4', func_name='metrics.frechet_inception_distance.FID', num_images=10000, minibatch_per_gpu=8, hole_range=[0.8, 1.0]),
EasyDict(name='fid36k5', func_name='metrics.frechet_inception_distance.FID',num_images=36500, minibatch_per_gpu=8),
EasyDict(name='fid36k5-h0', func_name='metrics.frechet_inception_distance.FID', num_images=36500, minibatch_per_gpu=8, hole_range=[0.0, 0.2]),
EasyDict(name='fid36k5-h1', func_name='metrics.frechet_inception_distance.FID', num_images=36500, minibatch_per_gpu=8, hole_range=[0.2, 0.4]),
EasyDict(name='fid36k5-h2', func_name='metrics.frechet_inception_distance.FID', num_images=36500, minibatch_per_gpu=8, hole_range=[0.4, 0.6]),
EasyDict(name='fid36k5-h3', func_name='metrics.frechet_inception_distance.FID', num_images=36500, minibatch_per_gpu=8, hole_range=[0.6, 0.8]),
EasyDict(name='fid36k5-h4', func_name='metrics.frechet_inception_distance.FID', num_images=36500, minibatch_per_gpu=8, hole_range=[0.8, 1.0]),
EasyDict(name='fid50k', func_name='metrics.frechet_inception_distance.FID', num_images=50000, minibatch_per_gpu=8),
EasyDict(name='ids5k', func_name='metrics.inception_discriminator_score.IDS', num_images=5000, minibatch_per_gpu=8),
EasyDict(name='ids10k', func_name='metrics.inception_discriminative_score.IDS', num_images=10000, minibatch_per_gpu=8),
EasyDict(name='ids10k-b1', func_name='metrics.inception_discriminative_score.IDS', num_images=10000, minibatch_per_gpu=1),
EasyDict(name='ids10k-h0', func_name='metrics.inception_discriminative_score.IDS', num_images=10000, minibatch_per_gpu=8, hole_range=[0.0, 0.2]),
EasyDict(name='ids10k-h1', func_name='metrics.inception_discriminative_score.IDS', num_images=10000, minibatch_per_gpu=8, hole_range=[0.2, 0.4]),
EasyDict(name='ids10k-h2', func_name='metrics.inception_discriminative_score.IDS', num_images=10000, minibatch_per_gpu=8, hole_range=[0.4, 0.6]),
EasyDict(name='ids10k-h3', func_name='metrics.inception_discriminative_score.IDS', num_images=10000, minibatch_per_gpu=8, hole_range=[0.6, 0.8]),
EasyDict(name='ids10k-h4', func_name='metrics.inception_discriminative_score.IDS', num_images=10000, minibatch_per_gpu=8, hole_range=[0.8, 1.0]),
EasyDict(name='ids36k5', func_name='metrics.inception_discriminative_score.IDS',num_images=36500, minibatch_per_gpu=8),
EasyDict(name='ids36k5-h0', func_name='metrics.inception_discriminative_score.IDS', num_images=36500, minibatch_per_gpu=8, hole_range=[0.0, 0.2]),
EasyDict(name='ids36k5-h1', func_name='metrics.inception_discriminative_score.IDS', num_images=36500, minibatch_per_gpu=8, hole_range=[0.2, 0.4]),
EasyDict(name='ids36k5-h2', func_name='metrics.inception_discriminative_score.IDS', num_images=36500, minibatch_per_gpu=8, hole_range=[0.4, 0.6]),
EasyDict(name='ids36k5-h3', func_name='metrics.inception_discriminative_score.IDS', num_images=36500, minibatch_per_gpu=8, hole_range=[0.6, 0.8]),
EasyDict(name='ids36k5-h4', func_name='metrics.inception_discriminative_score.IDS', num_images=36500, minibatch_per_gpu=8, hole_range=[0.8, 1.0]),
EasyDict(name='ids50k', func_name='metrics.inception_discriminative_score.IDS', num_images=50000, minibatch_per_gpu=8),
EasyDict(name='lpips2k', func_name='metrics.learned_perceptual_image_patch_similarity.LPIPS', num_pairs=2000, minibatch_per_gpu=8),
]])
#----------------------------------------------------------------------------
| 105.16 | 165 | 0.743819 | 772 | 5,258 | 4.774611 | 0.134715 | 0.107434 | 0.134292 | 0.125882 | 0.824742 | 0.824742 | 0.824742 | 0.818231 | 0.818231 | 0.717851 | 0 | 0.080316 | 0.085964 | 5,258 | 49 | 166 | 107.306122 | 0.686642 | 0.075314 | 0 | 0 | 0 | 0 | 0.335258 | 0.274227 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.027778 | 0 | 0.027778 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a1a3c5bc76c29c323cc955573d2b26a6f7e7bb08 | 785 | py | Python | tests/parser/27-GraphColouring.asp.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | tests/parser/27-GraphColouring.asp.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | tests/parser/27-GraphColouring.asp.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | input = """
% Guess colours.
chosenColour(N,C) | notChosenColour(N,C) :- node(N), colour(C).
% At least one color per node.
:- node(X), not colored(X).
colored(X) :- chosenColour(X,Fv1).
% Only one color per node.
:- chosenColour(N,C1), chosenColour(N,C2), C1!=C2.
% No two adjacent nodes have the same colour.
:- link(X,Y), X<Y, chosenColour(X,C), chosenColour(Y,C).
"""
output = """
% Guess colours.
chosenColour(N,C) | notChosenColour(N,C) :- node(N), colour(C).
% At least one color per node.
:- node(X), not colored(X).
colored(X) :- chosenColour(X,Fv1).
% Only one color per node.
:- chosenColour(N,C1), chosenColour(N,C2), C1!=C2.
% No two adjacent nodes have the same colour.
:- link(X,Y), X<Y, chosenColour(X,C), chosenColour(Y,C).
"""
| 25.322581 | 64 | 0.628025 | 122 | 785 | 4.040984 | 0.254098 | 0.158215 | 0.089249 | 0.121704 | 0.977688 | 0.977688 | 0.977688 | 0.977688 | 0.977688 | 0.977688 | 0 | 0.015552 | 0.180892 | 785 | 30 | 65 | 26.166667 | 0.751166 | 0 | 0 | 0.909091 | 0 | 0.181818 | 0.959157 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
a1e4141fd85385a5601cae5a7ca435d7791e79a2 | 43 | py | Python | helow.py | jamiroq/helow | bba980994417ae11e8161d09a8fd29f35b6cd15e | [
"MIT"
] | null | null | null | helow.py | jamiroq/helow | bba980994417ae11e8161d09a8fd29f35b6cd15e | [
"MIT"
] | null | null | null | helow.py | jamiroq/helow | bba980994417ae11e8161d09a8fd29f35b6cd15e | [
"MIT"
] | null | null | null | def helow(prt):
return "helow, " + prt
| 14.333333 | 26 | 0.581395 | 6 | 43 | 4.166667 | 0.666667 | 0.64 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.255814 | 43 | 2 | 27 | 21.5 | 0.78125 | 0 | 0 | 0 | 0 | 0 | 0.162791 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
b80e8dfcc96e5145e02d1e561c523c577a7d9f3e | 21,336 | py | Python | tests/sentry/integrations/bitbucket/testutils.py | AlexWayfer/sentry | ef935cda2b2e960bd602fda590540882d1b0712d | [
"BSD-3-Clause"
] | 2 | 2019-03-04T12:45:54.000Z | 2019-03-04T12:45:55.000Z | tests/sentry/integrations/bitbucket/testutils.py | AlexWayfer/sentry | ef935cda2b2e960bd602fda590540882d1b0712d | [
"BSD-3-Clause"
] | 196 | 2019-06-10T08:34:10.000Z | 2022-02-22T01:26:13.000Z | tests/sentry/integrations/bitbucket/testutils.py | AlexWayfer/sentry | ef935cda2b2e960bd602fda590540882d1b0712d | [
"BSD-3-Clause"
] | 1 | 2017-02-09T06:36:57.000Z | 2017-02-09T06:36:57.000Z | from __future__ import absolute_import
from collections import OrderedDict
COMPARE_COMMITS_EXAMPLE = b"""{
"pagelen": 30,
"values":
[{"hash": "e18e4e72de0d824edfbe0d73efe34cbd0d01d301",
"repository": {"links": {"self": {"href": "https://api.bitbucket.org/2.0/repositories/sentryuser/newsdiffs"}, "html": {"href": "https://bitbucket.org/sentryuser/newsdiffs"}, "avatar": {"href": "https://bitbucket.org/sentryuser/newsdiffs/avatar/32/"}}, "type": "repository", "name": "newsdiffs", "full_name": "sentryuser/newsdiffs", "uuid": "{c78dfb25-7882-4550-97b1-4e0d38f32859}"}, "links": {"self": {"href": "https://api.bitbucket.org/2.0/repositories/sentryuser/newsdiffs/commit/e18e4e72de0d824edfbe0d73efe34cbd0d01d301"}, "comments": {"href": "https://api.bitbucket.org/2.0/repositories/sentryuser/newsdiffs/commit/e18e4e72de0d824edfbe0d73efe34cbd0d01d301/comments"}, "patch": {"href": "https://api.bitbucket.org/2.0/repositories/sentryuser/newsdiffs/patch/e18e4e72de0d824edfbe0d73efe34cbd0d01d301"}, "html": {"href": "https://bitbucket.org/sentryuser/newsdiffs/commits/e18e4e72de0d824edfbe0d73efe34cbd0d01d301"}, "diff": {"href": "https://api.bitbucket.org/2.0/repositories/sentryuser/newsdiffs/diff/e18e4e72de0d824edfbe0d73efe34cbd0d01d301"}, "approve": {"href": "https://api.bitbucket.org/2.0/repositories/sentryuser/newsdiffs/commit/e18e4e72de0d824edfbe0d73efe34cbd0d01d301/approve"}, "statuses": {"href": "https://api.bitbucket.org/2.0/repositories/sentryuser/newsdiffs/commit/e18e4e72de0d824edfbe0d73efe34cbd0d01d301/statuses"}},
"author": {
"raw": "Sentry User <sentryuser@getsentry.com>",
"type": "author"
},
"parents": [{"hash": "26de9b63d09aa9c787e899f149c672023e292925", "type": "commit", "links": {"self": {"href": "https://api.bitbucket.org/2.0/repositories/sentryuser/newsdiffs/commit/26de9b63d09aa9c787e899f149c672023e292925"}, "html": {"href": "https://bitbucket.org/sentryuser/newsdiffs/commits/26de9b63d09aa9c787e899f149c672023e292925"}}}], "date": "2017-05-16T23:21:40+00:00", "message": "README.md edited online with Bitbucket", "type": "commit"}],
"next": "https://api.bitbucket.org/2.0/repositories/sentryuser/sentryrepo/commits/e18e4e72de0d824edfbe0d73efe34cbd0d01d301?page=2"
}
"""
GET_LAST_COMMITS_EXAMPLE = b"""{
"pagelen": 30,
"values":
[{"hash": "e18e4e72de0d824edfbe0d73efe34cbd0d01d301",
"repository": {"links": {"self": {"href": "https://api.bitbucket.org/2.0/repositories/sentryuser/newsdiffs"}, "html": {"href": "https://bitbucket.org/sentryuser/newsdiffs"}, "avatar": {"href": "https://bitbucket.org/sentryuser/newsdiffs/avatar/32/"}}, "type": "repository", "name": "newsdiffs", "full_name": "sentryuser/newsdiffs", "uuid": "{c78dfb25-7882-4550-97b1-4e0d38f32859}"}, "links": {"self": {"href": "https://api.bitbucket.org/2.0/repositories/sentryuser/newsdiffs/commit/e18e4e72de0d824edfbe0d73efe34cbd0d01d301"}, "comments": {"href": "https://api.bitbucket.org/2.0/repositories/sentryuser/newsdiffs/commit/e18e4e72de0d824edfbe0d73efe34cbd0d01d301/comments"}, "patch": {"href": "https://api.bitbucket.org/2.0/repositories/sentryuser/newsdiffs/patch/e18e4e72de0d824edfbe0d73efe34cbd0d01d301"}, "html": {"href": "https://bitbucket.org/sentryuser/newsdiffs/commits/e18e4e72de0d824edfbe0d73efe34cbd0d01d301"}, "diff": {"href": "https://api.bitbucket.org/2.0/repositories/sentryuser/newsdiffs/diff/e18e4e72de0d824edfbe0d73efe34cbd0d01d301"}, "approve": {"href": "https://api.bitbucket.org/2.0/repositories/sentryuser/newsdiffs/commit/e18e4e72de0d824edfbe0d73efe34cbd0d01d301/approve"}, "statuses": {"href": "https://api.bitbucket.org/2.0/repositories/sentryuser/newsdiffs/commit/e18e4e72de0d824edfbe0d73efe34cbd0d01d301/statuses"}}, "author": {"raw": "Sentry User <sentryuser@getsentry.com>", "type": "author", "user": {"username": "sentryuser", "display_name": "Sentry User", "type": "user", "uuid": "{b128e0f6-196a-4dde-b72d-f42abc6dc239}", "links": {"self": {"href": "https://api.bitbucket.org/2.0/users/sentryuser"}, "html": {"href": "https://bitbucket.org/sentryuser/"}, "avatar": {"href": "https://bitbucket.org/account/sentryuser/avatar/32/"}}}}, "parents": [{"hash": "26de9b63d09aa9c787e899f149c672023e292925", "type": "commit", "links": {"self": {"href": "https://api.bitbucket.org/2.0/repositories/sentryuser/newsdiffs/commit/26de9b63d09aa9c787e899f149c672023e292925"}, "html": {"href": "https://bitbucket.org/sentryuser/newsdiffs/commits/26de9b63d09aa9c787e899f149c672023e292925"}}}], "date": "2017-05-16T23:21:40+00:00", "message": "README.md edited online with Bitbucket", "type": "commit"}],
"next": "https://api.bitbucket.org/2.0/repositories/sentryuser/newsdiffs/commits/e18e4e72de0d824edfbe0d73efe34cbd0d01d301?page=2"
}
"""
COMMIT_DIFF_PATCH = b"""diff --git a/README.md b/README.md
index 89821ce..9e09a8a 100644
--- a/README.md
+++ b/README.md
@@ -1 +1 @@
-A twitter bot to when words are said by the NYT for the first time.
\ No newline at end of file
+A twitter bot to when words are said by the NYT for the first time.sdfsdf
\ No newline at end of file"""
PUSH_EVENT_EXAMPLE = b"""{
"push": {
"changes": [
{
"links": {
"html": {
"href": "https://bitbucket.org/maxbittker/newsdiffs/branches/compare/e0e377d186e4f0e937bdb487a23384fe002df649..8f5952f4dcffd7b311181d48eb0394b0cca21410"
},
"commits": {
"href": "https://api.bitbucket.org/2.0/repositories/maxbittker/newsdiffs/commits?include=e0e377d186e4f0e937bdb487a23384fe002df649&exclude=8f5952f4dcffd7b311181d48eb0394b0cca21410"
},
"diff": {
"href": "https://api.bitbucket.org/2.0/repositories/maxbittker/newsdiffs/diff/e0e377d186e4f0e937bdb487a23384fe002df649..8f5952f4dcffd7b311181d48eb0394b0cca21410"
}
},
"commits": [
{
"links": {
"approve": {
"href": "https://api.bitbucket.org/2.0/repositories/maxbittker/newsdiffs/commit/e0e377d186e4f0e937bdb487a23384fe002df649/approve"
},
"statuses": {
"href": "https://api.bitbucket.org/2.0/repositories/maxbittker/newsdiffs/commit/e0e377d186e4f0e937bdb487a23384fe002df649/statuses"
},
"comments": {
"href": "https://api.bitbucket.org/2.0/repositories/maxbittker/newsdiffs/commit/e0e377d186e4f0e937bdb487a23384fe002df649/comments"
},
"self": {
"href": "https://api.bitbucket.org/2.0/repositories/maxbittker/newsdiffs/commit/e0e377d186e4f0e937bdb487a23384fe002df649"
},
"patch": {
"href": "https://api.bitbucket.org/2.0/repositories/maxbittker/newsdiffs/patch/e0e377d186e4f0e937bdb487a23384fe002df649"
},
"html": {
"href": "https://bitbucket.org/maxbittker/newsdiffs/commits/e0e377d186e4f0e937bdb487a23384fe002df649"
},
"diff": {
"href": "https://api.bitbucket.org/2.0/repositories/maxbittker/newsdiffs/diff/e0e377d186e4f0e937bdb487a23384fe002df649"
}
},
"date": "2017-05-24T01:05:47+00:00",
"hash": "e0e377d186e4f0e937bdb487a23384fe002df649",
"parents": [
{
"type": "commit",
"links": {
"html": {
"href": "https://bitbucket.org/maxbittker/newsdiffs/commits/8f5952f4dcffd7b311181d48eb0394b0cca21410"
},
"self": {
"href": "https://api.bitbucket.org/2.0/repositories/maxbittker/newsdiffs/commit/8f5952f4dcffd7b311181d48eb0394b0cca21410"
}
},
"hash": "8f5952f4dcffd7b311181d48eb0394b0cca21410"
}
],
"type": "commit",
"message": "README.md edited online with Bitbucket",
"author": {
"type": "author",
"user": {
"type": "user",
"display_name": "Max Bittker",
"uuid": "{b128e0f6-196a-4dde-b72d-f42abc6dc239}",
"username": "maxbittker",
"links": {
"html": {
"href": "https://bitbucket.org/maxbittker/"
},
"avatar": {
"href": "https://bitbucket.org/account/maxbittker/avatar/32/"
},
"self": {
"href": "https://api.bitbucket.org/2.0/users/maxbittker"
}
}
},
"raw": "Max Bittker <max@getsentry.com>"
}
}
],
"old": {
"type": "branch",
"links": {
"html": {
"href": "https://bitbucket.org/maxbittker/newsdiffs/branch/master"
},
"commits": {
"href": "https://api.bitbucket.org/2.0/repositories/maxbittker/newsdiffs/commits/master"
},
"self": {
"href": "https://api.bitbucket.org/2.0/repositories/maxbittker/newsdiffs/refs/branches/master"
}
},
"target": {
"links": {
"html": {
"href": "https://bitbucket.org/maxbittker/newsdiffs/commits/8f5952f4dcffd7b311181d48eb0394b0cca21410"
},
"self": {
"href": "https://api.bitbucket.org/2.0/repositories/maxbittker/newsdiffs/commit/8f5952f4dcffd7b311181d48eb0394b0cca21410"
}
},
"date": "2017-05-19T22:53:22+00:00",
"hash": "8f5952f4dcffd7b311181d48eb0394b0cca21410",
"parents": [
{
"type": "commit",
"links": {
"html": {
"href": "https://bitbucket.org/maxbittker/newsdiffs/commits/1cdfa36e62e615cdc73a1d5fcff1c706965b186d"
},
"self": {
"href": "https://api.bitbucket.org/2.0/repositories/maxbittker/newsdiffs/commit/1cdfa36e62e615cdc73a1d5fcff1c706965b186d"
}
},
"hash": "1cdfa36e62e615cdc73a1d5fcff1c706965b186d"
}
],
"type": "commit",
"message": "README.md edited online with Bitbucket",
"author": {
"type": "author",
"raw": "Max Bittker <max@getsentry.com>"
}
},
"name": "master"
},
"truncated": false,
"new": {
"type": "branch",
"links": {
"html": {
"href": "https://bitbucket.org/maxbittker/newsdiffs/branch/master"
},
"commits": {
"href": "https://api.bitbucket.org/2.0/repositories/maxbittker/newsdiffs/commits/master"
},
"self": {
"href": "https://api.bitbucket.org/2.0/repositories/maxbittker/newsdiffs/refs/branches/master"
}
},
"target": {
"links": {
"html": {
"href": "https://bitbucket.org/maxbittker/newsdiffs/commits/e0e377d186e4f0e937bdb487a23384fe002df649"
},
"self": {
"href": "https://api.bitbucket.org/2.0/repositories/maxbittker/newsdiffs/commit/e0e377d186e4f0e937bdb487a23384fe002df649"
}
},
"date": "2017-05-24T01:05:47+00:00",
"hash": "e0e377d186e4f0e937bdb487a23384fe002df649",
"parents": [
{
"type": "commit",
"links": {
"html": {
"href": "https://bitbucket.org/maxbittker/newsdiffs/commits/8f5952f4dcffd7b311181d48eb0394b0cca21410"
},
"self": {
"href": "https://api.bitbucket.org/2.0/repositories/maxbittker/newsdiffs/commit/8f5952f4dcffd7b311181d48eb0394b0cca21410"
}
},
"hash": "8f5952f4dcffd7b311181d48eb0394b0cca21410"
}
],
"type": "commit",
"message": "README.md edited online with Bitbucket",
"author": {
"type": "author",
"raw": "Max Bittker <max@getsentry.com>"
}
},
"name": "master"
},
"created": false,
"forced": false,
"closed": false
}
]
},
"repository": {
"links": {
"html": {
"href": "https://bitbucket.org/maxbittker/newsdiffs"
},
"avatar": {
"href": "https://bitbucket.org/maxbittker/newsdiffs/avatar/32/"
},
"self": {
"href": "https://api.bitbucket.org/2.0/repositories/maxbittker/newsdiffs"
}
},
"full_name": "maxbittker/newsdiffs",
"scm": "git",
"uuid": "{c78dfb25-7882-4550-97b1-4e0d38f32859}",
"type": "repository",
"is_private": false,
"owner": {
"type": "user",
"display_name": "Max Bittker",
"uuid": "{b128e0f6-196a-4dde-b72d-f42abc6dc239}",
"username": "maxbittker",
"links": {
"html": {
"href": "https://bitbucket.org/maxbittker/"
},
"avatar": {
"href": "https://bitbucket.org/account/maxbittker/avatar/32/"
},
"self": {
"href": "https://api.bitbucket.org/2.0/users/maxbittker"
}
}
},
"name": "newsdiffs",
"website": ""
},
"actor": {
"type": "user",
"display_name": "Max Bittker",
"uuid": "{b128e0f6-196a-4dde-b72d-f42abc6dc239}",
"username": "maxbittker",
"links": {
"html": {
"href": "https://bitbucket.org/maxbittker/"
},
"avatar": {
"href": "https://bitbucket.org/account/maxbittker/avatar/32/"
},
"self": {
"href": "https://api.bitbucket.org/2.0/users/maxbittker"
}
}
}
}
"""
REPO = {
u'scm': u'git',
u'website': u'',
u'has_wiki': True,
u'description': u'',
u'links': OrderedDict([
(u'watchers', OrderedDict([(u'href',
u'https://api.bitbucket.org/2.0/repositories/laurynsentry/helloworld/watchers'
)])),
(u'branches', OrderedDict([(u'href',
u'https://api.bitbucket.org/2.0/repositories/laurynsentry/helloworld/refs/branches'
)])),
(u'tags', OrderedDict([(u'href',
u'https://api.bitbucket.org/2.0/repositories/laurynsentry/helloworld/refs/tags'
)])),
(u'commits', OrderedDict([(u'href',
u'https://api.bitbucket.org/2.0/repositories/laurynsentry/helloworld/commits'
)])),
(u'clone', [OrderedDict([(u'href',
u'https://laurynsentry@bitbucket.org/laurynsentry/helloworld.git'
), (u'name', u'https')]), OrderedDict([(u'href',
u'git@bitbucket.org:laurynsentry/helloworld.git'), (u'name',
u'ssh')])]),
(u'self', OrderedDict([(u'href',
u'https://api.bitbucket.org/2.0/repositories/laurynsentry/helloworld'
)])),
(u'source', OrderedDict([(u'href',
u'https://api.bitbucket.org/2.0/repositories/laurynsentry/helloworld/src'
)])),
(u'html', OrderedDict([(u'href',
u'https://bitbucket.org/laurynsentry/helloworld')])),
(u'avatar', OrderedDict([(u'href',
u'https://bytebucket.org/ravatar/%7B2a47ac11-098a-4054-8496-193754cae14b%7D?ts=default'
)])),
(u'hooks', OrderedDict([(u'href',
u'https://api.bitbucket.org/2.0/repositories/laurynsentry/helloworld/hooks'
)])),
(u'forks', OrderedDict([(u'href',
u'https://api.bitbucket.org/2.0/repositories/laurynsentry/helloworld/forks'
)])),
(u'downloads', OrderedDict([(u'href',
u'https://api.bitbucket.org/2.0/repositories/laurynsentry/helloworld/downloads'
)])),
(u'issues', OrderedDict([(u'href',
u'https://api.bitbucket.org/2.0/repositories/laurynsentry/helloworld/issues'
)])),
(u'pullrequests', OrderedDict([(u'href',
u'https://api.bitbucket.org/2.0/repositories/laurynsentry/helloworld/pullrequests'
)])),
]),
u'created_on': u'2018-05-14T23:53:37.377674+00:00',
u'full_name': u'laurynsentry/helloworld',
u'owner': OrderedDict([
(u'username', u'laurynsentry'),
(u'display_name', u'Lauryn Brown'),
(u'account_id', u'5a00066393915e620920e0ae'),
(u'links', OrderedDict([(u'self', OrderedDict([(u'href',
u'https://api.bitbucket.org/2.0/users/laurynsentry')])),
(u'html', OrderedDict([(u'href',
u'https://bitbucket.org/laurynsentry/')])), (u'avatar',
OrderedDict([(u'href',
u'https://bitbucket.org/account/laurynsentry/avatar/')]))])),
(u'type', u'user'),
(u'uuid', u'{e50a27fe-0686-4d75-ba44-d27608bbb718}'),
]),
u'has_issues': True,
u'slug': u'helloworld',
u'is_private': False,
u'size': 221349,
u'name': u'HelloWorld',
u'language': u'',
u'fork_policy': u'allow_forks',
u'uuid': u'{2a47ac11-098a-4054-8496-193754cae14b}',
u'mainbranch': OrderedDict([(u'type', u'branch'), (u'name',
u'master')]),
u'updated_on': u'2018-05-30T18:21:08.780363+00:00',
u'type': u'repository',
}
| 60.613636 | 2,218 | 0.47844 | 1,565 | 21,336 | 6.502236 | 0.126518 | 0.099057 | 0.086871 | 0.102201 | 0.818789 | 0.816627 | 0.792551 | 0.78066 | 0.762579 | 0.742925 | 0 | 0.13464 | 0.380718 | 21,336 | 351 | 2,219 | 60.786325 | 0.63551 | 0 | 0 | 0.422414 | 0 | 0.077586 | 0.859908 | 0.056852 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.005747 | 0 | 0.005747 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
62d51dc359391f62ef4ff2f207b077e5bfa02c5b | 52 | py | Python | samples/src/main/resources/datasets/python/48.py | sritchie/kotlingrad | 8165ed1cd77220a5347c58cded4c6f2bcf22ee30 | [
"Apache-2.0"
] | 11 | 2020-12-19T01:19:44.000Z | 2021-12-25T20:43:33.000Z | src/main/resources/datasets/python/48.py | breandan/katholic | 081c39f3acc73ff41f5865563debe78a36e1038f | [
"Apache-2.0"
] | null | null | null | src/main/resources/datasets/python/48.py | breandan/katholic | 081c39f3acc73ff41f5865563debe78a36e1038f | [
"Apache-2.0"
] | 2 | 2021-01-25T07:59:20.000Z | 2021-08-07T07:13:49.000Z | def bool4(a, b, c, d):
return ((a < b) < c) < d
| 17.333333 | 28 | 0.423077 | 11 | 52 | 2 | 0.636364 | 0.181818 | 0.272727 | 0.363636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.028571 | 0.326923 | 52 | 2 | 29 | 26 | 0.6 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
c50b9a8f0357429c457d101be1347113fbc412c5 | 86,990 | py | Python | pynetdicom/dimse_primitives.py | howardpchen/pynetdicom | a67ad0422dbbc8b7c196da2b2c5fd38e7caf8d47 | [
"MIT"
] | null | null | null | pynetdicom/dimse_primitives.py | howardpchen/pynetdicom | a67ad0422dbbc8b7c196da2b2c5fd38e7caf8d47 | [
"MIT"
] | null | null | null | pynetdicom/dimse_primitives.py | howardpchen/pynetdicom | a67ad0422dbbc8b7c196da2b2c5fd38e7caf8d47 | [
"MIT"
] | null | null | null | """
Define the DIMSE-C and DIMSE-N service parameter primitives.
Notes:
* The class member names must match their corresponding DICOM element keyword
in order for the DIMSE messages/primitives to be created correctly.
TODO: Implement properties for DIMSE-N parameters
TODO: Implement status related parameters for DIMSE-N classes
TODO: Add string output for the DIMSE-C classes
"""
import codecs
try:
from collections.abc import MutableSequence
except ImportError:
from collections import MutableSequence
from io import BytesIO
import logging
from pydicom.tag import Tag
from pydicom.uid import UID
from pynetdicom.utils import validate_ae_title
LOGGER = logging.getLogger('pynetdicom.dimse_primitives')
# pylint: disable=invalid-name
# pylint: disable=attribute-defined-outside-init
# pylint: disable=too-many-instance-attributes
# pylint: disable=anomalous-backslash-in-string
class DIMSEPrimitive(object):
"""Base class for the DIMSE primitives."""
STATUS_OPTIONAL_KEYWORDS = ()
REQUEST_KEYWORDS = ()
RESPONSE_KEYWORDS = ('MessageIDBeingRespondedTo', 'Status')
@property
def AffectedSOPClassUID(self):
"""Return the *Affected SOP Class UID*."""
return self._affected_sop_class_uid
@AffectedSOPClassUID.setter
def AffectedSOPClassUID(self, value):
"""Set the *Affected SOP Class UID*.
Parameters
----------
value : pydicom.uid.UID, bytes or str
The value for the Affected SOP Class UID
"""
if isinstance(value, UID):
pass
elif isinstance(value, str):
value = UID(value)
elif isinstance(value, bytes):
value = UID(value.decode('ascii'))
elif value is None:
pass
else:
raise TypeError("Affected SOP Class UID must be a "
"pydicom.uid.UID, str or bytes")
if value is not None and not value.is_valid:
LOGGER.error("Affected SOP Class UID is an invalid UID")
LOGGER.error(value)
#raise ValueError("Affected SOP Class UID is an invalid UID")
self._affected_sop_class_uid = value
@property
def _dataset_variant(self):
"""Return the Dataset-like parameter value.
Used for EventInformation, EventReply, AttributeList,
ActionInformation, ActionReply, DataSet, Identifier and
ModificationList dataset-like parameter values.
Returns
-------
BytesIO or None
"""
return self._dataset
@_dataset_variant.setter
def _dataset_variant(self, value):
"""Set the Dataset-like parameter.
Used for EventInformation, EventReply, AttributeList,
ActionInformation, ActionReply, DataSet, Identifier and
ModificationList dataset-like parameter values.
Parameters
----------
value : tuple
The (dataset, variant name) to set, where dataset is either None
or BytesIO and variant name is str.
"""
if value[0] is None:
self._dataset = value[0]
elif isinstance(value[0], BytesIO):
self._dataset = value[0]
else:
raise TypeError(
"'{}' parameter must be a BytesIO object".format(value[1])
)
@property
def is_valid_request(self):
"""Return True if the request is valid, False otherwise."""
for keyword in self.REQUEST_KEYWORDS:
if getattr(self, keyword) is None:
return False
return True
@property
def is_valid_response(self):
"""Return True if the response is valid, False otherwise."""
for keyword in self.RESPONSE_KEYWORDS:
if getattr(self, keyword) is None:
return False
return True
@property
def MessageID(self):
"""Return the DIMSE *Message ID*."""
return self._message_id
@MessageID.setter
def MessageID(self, value):
"""Set the DIMSE *Message ID*."""
if isinstance(value, int):
if 0 <= value < 2**16:
self._message_id = value
else:
raise ValueError("Message ID must be between 0 and 65535, "
"inclusive")
elif value is None:
self._message_id = value
else:
raise TypeError("Message ID must be an int")
@property
def MessageIDBeingRespondedTo(self):
"""Return the *Message ID Being Responded To*."""
return self._message_id_being_responded_to
@MessageIDBeingRespondedTo.setter
def MessageIDBeingRespondedTo(self, value):
"""Set the *Message ID Being Responded To*."""
if isinstance(value, int):
if 0 <= value < 2**16:
self._message_id_being_responded_to = value
else:
raise ValueError("Message ID Being Responded To must be "
"between 0 and 65535, inclusive")
elif value is None:
self._message_id_being_responded_to = value
else:
raise TypeError("Message ID Being Responded To must be an int")
@property
def Status(self):
"""Return the *Status*."""
return self._status
@Status.setter
def Status(self, value):
"""Set the *Status*."""
if isinstance(value, int) or value is None:
self._status = value
else:
raise TypeError("DIMSE primitive's 'Status' must be an int")
# DIMSE-C Service Primitives
class C_STORE(DIMSEPrimitive):
"""Represents a C-STORE primitive.
+------------------------------------------+---------+----------+
| Parameter | Req/ind | Rsp/conf |
+==========================================+=========+==========+
| Message ID | M | U |
+------------------------------------------+---------+----------+
| Message ID Being Responded To | \- | M |
+------------------------------------------+---------+----------+
| Affected SOP Class UID | M | U(=) |
+------------------------------------------+---------+----------+
| Affected SOP Instance UID | M | U(=) |
+------------------------------------------+---------+----------+
| Priority | M | \- |
+------------------------------------------+---------+----------+
| Move Originator Application Entity Title | U | \- |
+------------------------------------------+---------+----------+
| Move Originator Message ID | U | \- |
+------------------------------------------+---------+----------+
| Data Set | M | \- |
+------------------------------------------+---------+----------+
| Status | \- | M |
+------------------------------------------+---------+----------+
| Offending Element | \- | C |
+------------------------------------------+---------+----------+
| Error Comment | \- | C |
+------------------------------------------+---------+----------+
| (=) - The value of the parameter is equal to the value of the parameter
in the column to the left
| C - The parameter is conditional.
| M - Mandatory
| MF - Mandatory with a fixed value
| U - The use of this parameter is a DIMSE service user option
| UF - User option with a fixed value
Attributes
----------
MessageID : int
Identifies the operation and is used to distinguish this
operation from other notifications or operations that may be in
progress. No two identical values for the Message ID shall be used for
outstanding operations.
MessageIDBeingRespondedTo : int
The Message ID of the operation request/indication to which this
response/confirmation applies.
AffectedSOPClassUID : pydicom.uid.UID, bytes or str
For the request/indication this specifies the SOP Class for
storage. If included in the response/confirmation, it shall be equal
to the value in the request/indication
AffectedSOPInstanceUID : pydicom.uid.UID, bytes or str
For the request/indication this specifies the SOP Instance
for storage. If included in the response/confirmation, it shall be
equal to the value in the request/indication
Priority : int
The priority of the C-STORE operation. It shall be one of the
following:
* 0: Medium
* 1: High
* 2: Low (Default)
MoveOriginatorApplicationEntityTitle : bytes
The DICOM AE Title of the AE that invoked the C-MOVE operation
from which this C-STORE sub-operation is being performed
MoveOriginatorMessageID : int
The Message ID of the C-MOVE request/indication primitive from
which this C-STORE sub-operation is being performed
DataSet : io.BytesIO
The pydicom Dataset containing the Attributes of the Composite
SOP Instance to be stored, encoded as a BytesIO object
Status : int
The error or success notification of the operation.
OffendingElement : list of int or None
An optional status related field containing a list of the
elements in which an error was detected.
ErrorComment : str or None
An optional status related field containing a text description
of the error detected. 64 characters maximum.
"""
STATUS_OPTIONAL_KEYWORDS = ('OffendingElement', 'ErrorComment', )
REQUEST_KEYWORDS = (
'MessageID', 'AffectedSOPClassUID', 'AffectedSOPInstanceUID',
'Priority', 'DataSet'
)
def __init__(self):
# Variable names need to match the corresponding DICOM Element keywords
# in order for the DIMSE Message classes to be built correctly.
# Changes to the variable names can be made provided the DIMSEMessage()
# class' message_to_primitive() and primitive_to_message() methods
# are also changed
self.MessageID = None
self.MessageIDBeingRespondedTo = None
self.AffectedSOPClassUID = None
self.AffectedSOPInstanceUID = None
self.Priority = 0x02
self.MoveOriginatorApplicationEntityTitle = None
self.MoveOriginatorMessageID = None
self.DataSet = None
self.Status = None
# Optional Command Set elements used with specific Status values
# For Warning statuses 0xB000, 0xB006, 0xB007
# For Failure statuses 0xCxxx, 0xA9xx,
self.OffendingElement = None
# For Warning statuses 0xB000, 0xB006, 0xB007
# For Failure statuses 0xCxxx, 0xA9xx, 0xA7xx, 0x0122, 0x0124
self.ErrorComment = None
# For Failure statuses 0x0117
# self.AffectedSOPInstanceUID
@property
def AffectedSOPInstanceUID(self):
"""Return the *Affected SOP Instance UID*."""
return self._affected_sop_instance_uid
@AffectedSOPInstanceUID.setter
def AffectedSOPInstanceUID(self, value):
"""Set the *Affected SOP Instance UID*.
Parameters
----------
value : pydicom.uid.UID, bytes or str
The value for the Affected SOP Class UID
"""
if isinstance(value, UID):
pass
elif isinstance(value, str):
value = UID(value)
elif isinstance(value, bytes):
value = UID(value.decode('ascii'))
elif value is None:
pass
else:
raise TypeError("Affected SOP Instance UID must be a "
"pydicom.uid.UID, str or bytes")
if value is not None and not value.is_valid:
LOGGER.error("Affected SOP Instance UID is an invalid UID")
raise ValueError("Affected SOP Instance UID is an invalid UID")
self._affected_sop_instance_uid = value
@property
def Priority(self):
"""Return the *Priority*."""
return self._priority
@Priority.setter
def Priority(self, value):
"""Set the *Priority*."""
if value in [0, 1, 2]:
self._priority = value
else:
LOGGER.warning("Attempted to set C-STORE Priority parameter to "
"an invalid value")
raise ValueError("C-STORE Priority must be 0, 1, or 2")
@property
def MoveOriginatorApplicationEntityTitle(self):
"""Return the *Move Originator Application Entity Title*."""
return self._move_originator_application_entity_title
@MoveOriginatorApplicationEntityTitle.setter
def MoveOriginatorApplicationEntityTitle(self, value):
"""Set the *Move Originator Application Entity Title*.
Parameters
----------
value : str or bytes
The Move Originator AE Title as a string or bytes object. Cannot be
an empty string and will be truncated to 16 characters long
"""
if isinstance(value, str):
value = codecs.encode(value, 'ascii')
if value is not None:
self._move_originator_application_entity_title = validate_ae_title(
value
)
else:
self._move_originator_application_entity_title = None
@property
def MoveOriginatorMessageID(self):
"""Return the *Move Originator Message ID*."""
return self._move_originator_message_id
@MoveOriginatorMessageID.setter
def MoveOriginatorMessageID(self, value):
"""Set the *Move Originator Message ID*."""
if isinstance(value, int):
if 0 <= value < 2**16:
self._move_originator_message_id = value
else:
raise ValueError("Move Originator Message ID To must be "
"between 0 and 65535, inclusive")
elif value is None:
self._move_originator_message_id = value
else:
raise TypeError("Move Originator Message ID To must be an int")
@property
def DataSet(self):
"""Return the *Data Set*."""
return self._dataset_variant
@DataSet.setter
def DataSet(self, value):
"""Set the *Data Set*."""
self._dataset_variant = (value, 'DataSet')
class C_FIND(DIMSEPrimitive):
"""Represents a C-FIND primitive.
+-------------------------------+---------+----------+
| Parameter | Req/ind | Rsp/conf |
+===============================+=========+==========+
| Message ID | M | U |
+-------------------------------+---------+----------+
| Message ID Being Responded To | \- | M |
+-------------------------------+---------+----------+
| Affected SOP Class UID | M | U(=) |
+-------------------------------+---------+----------+
| Priority | M | \- |
+-------------------------------+---------+----------+
| Identifier | M | C |
+-------------------------------+---------+----------+
| Status | \- | M |
+-------------------------------+---------+----------+
| Offending Element | \- | C |
+-------------------------------+---------+----------+
| Error Comment | \- | C |
+-------------------------------+---------+----------+
| (=) - The value of the parameter is equal to the value of the parameter
in the column to the left
| C - The parameter is conditional.
| M - Mandatory
| MF - Mandatory with a fixed value
| U - The use of this parameter is a DIMSE service user option
| UF - User option with a fixed value
Attributes
----------
MessageID : int
Identifies the operation and is used to distinguish this
operation from other notifications or operations that may be in
progress. No two identical values for the Message ID shall be used for
outstanding operations.
MessageIDBeingRespondedTo : int
The Message ID of the operation request/indication to which
this response/confirmation applies.
AffectedSOPClassUID : pydicom.uid.UID, bytes or str
For the request/indication this specifies the SOP Class
for storage. If included in the response/confirmation, it shall be
equal to the value in the request/indication
Priority : int
The priority of the C-STORE operation. It shall be one of the
following:
* 0: Medium
* 1: High
* 2: Low (Default)
Identifier : io.BytesIO
A list of Attributes (in the form of an encoded pydicom
Dataset) to be matched against the values of the Attributes in the
instances of the composite objects known to the performing DIMSE
service-user.
Status : int
The error or success notification of the operation.
OffendingElement : list of int or None
An optional status related field containing a list of the
elements in which an error was detected.
ErrorComment : str or None
An optional status related field containing a text
description of the error detected. 64 characters maximum.
"""
STATUS_OPTIONAL_KEYWORDS = ('OffendingElement', 'ErrorComment', )
REQUEST_KEYWORDS = (
'MessageID', 'AffectedSOPClassUID', 'Priority', 'Identifier'
)
def __init__(self):
# Variable names need to match the corresponding DICOM Element keywords
# in order for the DIMSE Message classes to be built correctly.
# Changes to the variable names can be made provided the DIMSEMessage()
# class' message_to_primitive() and primitive_to_message() methods
# are also changed
self.MessageID = None
self.MessageIDBeingRespondedTo = None
self.AffectedSOPClassUID = None
self.Priority = 0x02
self.Identifier = None
self.Status = None
# Optional Command Set elements used in with specific Status values
# For Failure statuses 0xA900, 0xCxxx
self.OffendingElement = None
# For Failure statuses 0xA900, 0xA700, 0x0122, 0xCxxx
self.ErrorComment = None
@property
def Priority(self):
"""Return the *Priority*."""
return self._priority
@Priority.setter
def Priority(self, value):
"""Set the *Priority*."""
if value in [0, 1, 2]:
self._priority = value
else:
LOGGER.warning("Attempted to set C-FIND Priority parameter to an "
"invalid value")
raise ValueError("Priority must be 0, 1, or 2")
@property
def Identifier(self):
"""Return the *Identifier*."""
return self._dataset_variant
@Identifier.setter
def Identifier(self, value):
"""Set the *Identifier*."""
self._dataset_variant = (value, 'Identifier')
class C_GET(DIMSEPrimitive):
"""Represents a C-GET primitive.
+-------------------------------+---------+----------+
| Parameter | Req/ind | Rsp/conf |
+===============================+=========+==========+
| Message ID | M | U |
+-------------------------------+---------+----------+
| Message ID Being Responded To | \- | M |
+-------------------------------+---------+----------+
| Affected SOP Class UID | M | U(=) |
+-------------------------------+---------+----------+
| Priority | M | \- |
+-------------------------------+---------+----------+
| Identifier | M | U |
+-------------------------------+---------+----------+
| Status | \- | M |
+-------------------------------+---------+----------+
| Number of Remaining Sub-ops | \- | C |
+-------------------------------+---------+----------+
| Number of Completed Sub-ops | \- | C |
+-------------------------------+---------+----------+
| Number of Failed Sub-ops | \- | C |
+-------------------------------+---------+----------+
| Number of Warning Sub-ops | \- | C |
+-------------------------------+---------+----------+
| Offending Element | \- | C |
+-------------------------------+---------+----------+
| Error Comment | \- | C |
+-------------------------------+---------+----------+
| (=) - The value of the parameter is equal to the value of the parameter
in the column to the left
| C - The parameter is conditional.
| M - Mandatory
| MF - Mandatory with a fixed value
| U - The use of this parameter is a DIMSE service user option
| UF - User option with a fixed value
Attributes
----------
MessageID : int
Identifies the operation and is used to distinguish this
operation from other notifications or operations that may be in
progress. No two identical values for the Message ID shall be used for
outstanding operations.
MessageIDBeingRespondedTo : int
The Message ID of the operation request/indication to which
this response/confirmation applies.
AffectedSOPClassUID : pydicom.uid.UID, bytes or str
For the request/indication this specifies the SOP Class
for storage. If included in the response/confirmation, it shall be
equal to the value in the request/indication
Priority : int
The priority of the C-STORE operation. It shall be one of the
following:
* 0: Medium
* 1: High
* 2: Low (Default)
Identifier : io.BytesIO
The pydicom Dataset containing the list of Attributes to be
matched against the values of Attributes of known composite SOP
Instances of the performing DIMSE service-user, encoded as a BytesIO
object. For the list of allowed Attributes and the rules defining their
usage see the section corresponding to the service class in the DICOM
Standard, Part 4.
Status : int
The error or success notification of the operation.
NumberOfRemainingSuboperations : int
The number of remaining C-STORE sub-operations to be invoked
by this C-GET operation. It may be included in any response and shall
be included if the status is Pending
NumberOfCompletedSuboperations : int
The number of C-STORE sub-operations that have completed
successfully. It may be included in any response and shall be included
if the status is Pending
NumberOfFailedSuboperations : int
The number of C-STORE sub-operations that have failed. It may
be included in any response and shall be included if the status is
Pending
NumberOfWarningSuboperations : int
The number of C-STORE operations that generated Warning
responses. It may be included in any response and shall be included if
the status is Pending
OffendingElement : list of int or None
An optional status related field containing a list of the
elements in which an error was detected.
ErrorComment : str or None
An optional status related field containing a text
description of the error detected. 64 characters maximum.
"""
STATUS_OPTIONAL_KEYWORDS = (
'ErrorComment', 'OffendingElement', 'NumberOfRemainingSuboperations',
'NumberOfCompletedSuboperations', 'NumberOfFailedSuboperations',
'NumberOfWarningSuboperations'
)
REQUEST_KEYWORDS = (
'MessageID', 'AffectedSOPClassUID', 'Priority', 'Identifier'
)
def __init__(self):
# Variable names need to match the corresponding DICOM Element keywords
# in order for the DIMSE Message classes to be built correctly.
# Changes to the variable names can be made provided the DIMSEMessage()
# class' message_to_primitive() and primitive_to_message() methods
# are also changed
self.MessageID = None
self.MessageIDBeingRespondedTo = None
self.AffectedSOPClassUID = None
self.Priority = 0x02
self.Identifier = None
self.Status = None
self.NumberOfRemainingSuboperations = None
self.NumberOfCompletedSuboperations = None
self.NumberOfFailedSuboperations = None
self.NumberOfWarningSuboperations = None
# For Failure statuses 0xA701, 0xA900
self.ErrorComment = None
self.OffendingElement = None
# For 0xA702, 0xFE00, 0xB000, 0x0000
# self.NumberOfRemainingSuboperations
# self.NumberOfCompletedSuboperations
# self.NumberOfFailedSuboperations
# self.NumberOfWarningSuboperations
@property
def Priority(self):
"""Return the *Priority*."""
return self._priority
@Priority.setter
def Priority(self, value):
"""Set the *Priority*."""
if value in [0, 1, 2]:
self._priority = value
else:
LOGGER.warning("Attempted to set C-FIND Priority parameter to an "
"invalid value")
raise ValueError("Priority must be 0, 1, or 2")
@property
def Identifier(self):
"""Return the *Identifier*."""
return self._dataset_variant
@Identifier.setter
def Identifier(self, value):
"""Set the *Identifier*."""
self._dataset_variant = (value, 'Identifier')
@property
def NumberOfRemainingSuboperations(self):
"""Return the *Number of Remaining Suboperations*."""
return self._number_of_remaining_suboperations
@NumberOfRemainingSuboperations.setter
def NumberOfRemainingSuboperations(self, value):
"""Set the *Number of Remaining Suboperations*."""
if isinstance(value, int):
if value >= 0:
self._number_of_remaining_suboperations = value
else:
raise ValueError("Number of Remaining Suboperations must be "
"greater than or equal to 0")
elif value is None:
self._number_of_remaining_suboperations = value
else:
raise TypeError("Number of Remaining Suboperations must be an int")
@property
def NumberOfCompletedSuboperations(self):
"""Return the *Number of Completed Suboperations*."""
return self._number_of_completed_suboperations
@NumberOfCompletedSuboperations.setter
def NumberOfCompletedSuboperations(self, value):
"""Set the *Number of Completed Suboperations*."""
if isinstance(value, int):
if value >= 0:
self._number_of_completed_suboperations = value
else:
raise ValueError("Number of Completed Suboperations must be "
"greater than or equal to 0")
elif value is None:
self._number_of_completed_suboperations = value
else:
raise TypeError("Number of Completed Suboperations must be an int")
@property
def NumberOfFailedSuboperations(self):
"""Return the *Number of Failed Suboperations*."""
return self._number_of_failed_suboperations
@NumberOfFailedSuboperations.setter
def NumberOfFailedSuboperations(self, value):
"""Set the *Number of Failed Suboperations*."""
if isinstance(value, int):
if value >= 0:
self._number_of_failed_suboperations = value
else:
raise ValueError("Number of Failed Suboperations must be "
"greater than or equal to 0")
elif value is None:
self._number_of_failed_suboperations = value
else:
raise TypeError("Number of Failed Suboperations must be an int")
@property
def NumberOfWarningSuboperations(self):
"""Return the *Number of Warning Suboperations*."""
return self._number_of_warning_suboperations
@NumberOfWarningSuboperations.setter
def NumberOfWarningSuboperations(self, value):
"""Set the *Number of Warning Suboperations*."""
if isinstance(value, int):
if value >= 0:
self._number_of_warning_suboperations = value
else:
raise ValueError("Number of Warning Suboperations must be "
"greater than or equal to 0")
elif value is None:
self._number_of_warning_suboperations = value
else:
raise TypeError("Number of Warning Suboperations must be an int")
class C_MOVE(DIMSEPrimitive):
"""Represents a C-MOVE primitive.
+-------------------------------+---------+----------+
| Parameter | Req/ind | Rsp/conf |
+===============================+=========+==========+
| Message ID | M | U |
+-------------------------------+---------+----------+
| Message ID Being Responded To | \- | M |
+-------------------------------+---------+----------+
| Affected SOP Class UID | M | U(=) |
+-------------------------------+---------+----------+
| Priority | M | \- |
+-------------------------------+---------+----------+
| Move Destination | M | \- |
+-------------------------------+---------+----------+
| Identifier | M | U |
+-------------------------------+---------+----------+
| Status | \- | M |
+-------------------------------+---------+----------+
| Number of Remaining Sub-ops | \- | C |
+-------------------------------+---------+----------+
| Number of Completed Sub-ops | \- | C |
+-------------------------------+---------+----------+
| Number of Failed Sub-ops | \- | C |
+-------------------------------+---------+----------+
| Number of Warning Sub-ops | \- | C |
+-------------------------------+---------+----------+
| Offending Element | \- | C |
+-------------------------------+---------+----------+
| Error Comment | \- | C |
+-------------------------------+---------+----------+
| (=) - The value of the parameter is equal to the value of the parameter
in the column to the left
| C - The parameter is conditional.
| M - Mandatory
| MF - Mandatory with a fixed value
| U - The use of this parameter is a DIMSE service user option
| UF - User option with a fixed value
Attributes
----------
MessageID : int
Identifies the operation and is used to distinguish this
operation from other notifications or operations that may be in
progress. No two identical values for the Message ID shall be used for
outstanding operations.
MessageIDBeingRespondedTo : int
The Message ID of the operation request/indication to which
this response/confirmation applies.
AffectedSOPClassUID : pydicom.uid.UID, bytes or str
For the request/indication this specifies the SOP Class
for storage. If included in the response/confirmation, it shall be
equal to the value in the request/indication
Priority : int
The priority of the C-STORE operation. It shall be one of the
following:
* 0: Medium
* 1: High
* 2: Low (Default)
MoveDestination : bytes or str
Specifies the DICOM AE Title of the destination DICOM AE to
which the C-STORE sub-operations are being performed.
Identifier : io.BytesIO
The pydicom Dataset containing the list of Attributes to be
matched against the values of Attributes of known composite SOP
Instances of the performing DIMSE service-user, encoded as a BytesIO
object. For the list of allowed Attributes and the rules defining their
usage see the section corresponding to the service class in the DICOM
Standard, Part 4.
Status : int
The error or success notification of the operation.
NumberOfRemainingSuboperations : int
The number of remaining C-STORE sub-operations to be invoked
by this C-MOVE operation. It may be included in any response and shall
be included if the status is Pending
NumberOfCompletedSuboperations : int
The number of C-STORE sub-operations that have completed
successfully. It may be included in any response and shall be included
if the status is Pending
NumberOfFailedSuboperations : int
The number of C-STORE sub-operations that have failed. It may
be included in any response and shall be included if the status is
Pending
NumberOfWarningSuboperations : int
The number of C-STORE operations that generated Warning
responses. It may be included in any response and shall be included if
the status is Pending
OffendingElement : list of int or None
An optional status related field containing a list of the
elements in which an error was detected.
ErrorComment : str or None
An optional status related field containing a text
description of the error detected. 64 characters maximum.
"""
STATUS_OPTIONAL_KEYWORDS = (
'ErrorComment', 'OffendingElement', 'NumberOfRemainingSuboperations',
'NumberOfCompletedSuboperations', 'NumberOfFailedSuboperations',
'NumberOfWarningSuboperations'
)
REQUEST_KEYWORDS = (
'MessageID', 'AffectedSOPClassUID', 'Priority', 'Identifier',
'MoveDestination'
)
def __init__(self):
# Variable names need to match the corresponding DICOM Element keywords
# in order for the DIMSE Message classes to be built correctly.
# Changes to the variable names can be made provided the DIMSEMessage()
# class' message_to_primitive() and primitive_to_message() methods
# are also changed
self.MessageID = None
self.MessageIDBeingRespondedTo = None
self.AffectedSOPClassUID = None
self.Priority = 0x02
self.MoveDestination = None
self.Identifier = None
self.Status = None
self.NumberOfRemainingSuboperations = None
self.NumberOfCompletedSuboperations = None
self.NumberOfFailedSuboperations = None
self.NumberOfWarningSuboperations = None
# Optional Command Set elements used in with specific Status values
# For Failure statuses 0xA900
self.OffendingElement = None
# For Failure statuses 0xA801, 0xA701, 0xA702, 0x0122, 0xA900, 0xCxxx
# 0x0124
self.ErrorComment = None
@property
def Priority(self):
"""Return the *Priority*."""
return self._priority
@Priority.setter
def Priority(self, value):
"""Set the *Priority*."""
if value in [0, 1, 2]:
self._priority = value
else:
LOGGER.warning("Attempted to set C-FIND Priority parameter to an "
"invalid value")
raise ValueError("Priority must be 0, 1, or 2")
@property
def MoveDestination(self):
"""Return the *Move Destination*."""
return self._move_destination
@MoveDestination.setter
def MoveDestination(self, value):
"""Set the *Move Destination*.
Parameters
----------
value : str or bytes
The Move Destination AE Title as a string or bytes object. Cannot
be an empty string and will be truncated to 16 characters long
"""
if isinstance(value, str):
value = codecs.encode(value, 'ascii')
if value is not None:
self._move_destination = validate_ae_title(value)
else:
self._move_destination = None
@property
def Identifier(self):
"""Return the *Identifier*."""
return self._dataset_variant
@Identifier.setter
def Identifier(self, value):
"""Set the *Identifier*."""
self._dataset_variant = (value, 'Identifier')
@property
def NumberOfRemainingSuboperations(self):
"""Return the *Number of Remaining Suboperations*."""
return self._number_of_remaining_suboperations
@NumberOfRemainingSuboperations.setter
def NumberOfRemainingSuboperations(self, value):
"""Set the *Number of Remaining Suboperations*."""
if isinstance(value, int):
if value >= 0:
self._number_of_remaining_suboperations = value
else:
raise ValueError("Number of Remaining Suboperations must be "
"greater than or equal to 0")
elif value is None:
self._number_of_remaining_suboperations = value
else:
raise TypeError("Number of Remaining Suboperations must be an int")
@property
def NumberOfCompletedSuboperations(self):
"""Return the *Number of Completed Suboperations*."""
return self._number_of_completed_suboperations
@NumberOfCompletedSuboperations.setter
def NumberOfCompletedSuboperations(self, value):
"""Set the *Number of Completed Suboperations*."""
if isinstance(value, int):
if value >= 0:
self._number_of_completed_suboperations = value
else:
raise ValueError("Number of Completed Suboperations must be "
"greater than or equal to 0")
elif value is None:
self._number_of_completed_suboperations = value
else:
raise TypeError("Number of Completed Suboperations must be an int")
@property
def NumberOfFailedSuboperations(self):
"""Return the *Number of Failed Suboperations*."""
return self._number_of_failed_suboperations
@NumberOfFailedSuboperations.setter
def NumberOfFailedSuboperations(self, value):
"""Set the *Number of Failed Suboperations*."""
if isinstance(value, int):
if value >= 0:
self._number_of_failed_suboperations = value
else:
raise ValueError("Number of Failed Suboperations must be "
"greater than or equal to 0")
elif value is None:
self._number_of_failed_suboperations = value
else:
raise TypeError("Number of Failed Suboperations must be an int")
@property
def NumberOfWarningSuboperations(self):
"""Return the *Number of Warning Suboperations*."""
return self._number_of_warning_suboperations
@NumberOfWarningSuboperations.setter
def NumberOfWarningSuboperations(self, value):
"""Set the *Number of Warning Suboperations*."""
if isinstance(value, int):
if value >= 0:
self._number_of_warning_suboperations = value
else:
raise ValueError("Number of Warning Suboperations must be "
"greater than or equal to 0")
elif value is None:
self._number_of_warning_suboperations = value
else:
raise TypeError("Number of Warning Suboperations must be an int")
class C_ECHO(DIMSEPrimitive):
"""Represents a C-ECHO primitive.
+-------------------------------+---------+----------+
| Parameter | Req/ind | Rsp/conf |
+===============================+=========+==========+
| Message ID | M | U |
+-------------------------------+---------+----------+
| Message ID Being Responded To | \- | M |
+-------------------------------+---------+----------+
| Affected SOP Class UID | M | U(=) |
+-------------------------------+---------+----------+
| Status | \- | M |
+-------------------------------+---------+----------+
| Error Comment | \- | C |
+-------------------------------+---------+----------+
| (=) - The value of the parameter is equal to the value of the parameter
in the column to the left
| C - The parameter is conditional.
| M - Mandatory
| MF - Mandatory with a fixed value
| U - The use of this parameter is a DIMSE service user option
| UF - User option with a fixed value
Attributes
----------
MessageID : int or None
Identifies the operation and is used to distinguish this
operation from other notifications or operations that may be in
progress. No two identical values for the Message ID shall be used for
outstanding operations.
MessageIDBeingRespondedTo : int or None
The Message ID of the operation request/indication to which this
response/confirmation applies.
AffectedSOPClassUID : pydicom.uid.UID, bytes or str or None
For the request/indication this specifies the SOP Class for
storage. If included in the response/confirmation, it shall be equal
to the value in the request/indication
Status : int or None
The error or success notification of the operation.
ErrorComment : str or None
An optional status related field containing a text description
of the error detected. 64 characters maximum.
"""
STATUS_OPTIONAL_KEYWORDS = ('ErrorComment', )
REQUEST_KEYWORDS = ('MessageID', 'AffectedSOPClassUID')
def __init__(self):
# Variable names need to match the corresponding DICOM Element keywords
# in order for the DIMSE Message classes to be built correctly.
# Changes to the variable names can be made provided the DIMSEMessage()
# class' message_to_primitive() and primitive_to_message() methods
# are also changed
self.MessageID = None
self.MessageIDBeingRespondedTo = None
self.AffectedSOPClassUID = None
self.Status = None
# (Optional) for Failure status 0x0122
self.ErrorComment = None
class C_CANCEL(object):
"""Represents a C-CANCEL primitive.
+-------------------------------+---------+
| Parameter | Req/ind |
+===============================+=========+
| Message ID Being Responded To | M |
+-------------------------------+---------+
| (=) - The value of the parameter is equal to the value of the parameter in
the column to the left
| C - The parameter is conditional.
| M - Mandatory
| MF - Mandatory with a fixed value
| U - The use of this parameter is a DIMSE service user option
| UF - User option with a fixed value
Attributes
----------
MessageIDBeingRespondedTo : int
The Message ID of the operation request/indication to which this
response/confirmation applies.
References
----------
* DICOM Standard, Part 7, Section 9.3.2.3-4
"""
def __init__(self):
"""Initialise the C_CANCEL"""
# Variable names need to match the corresponding DICOM Element keywords
# in order for the DIMSE Message classes to be built correctly.
# Changes to the variable names can be made provided the DIMSEMessage()
# class' message_to_primitive() and primitive_to_message() methods
# are also changed
self.MessageIDBeingRespondedTo = None
@property
def MessageIDBeingRespondedTo(self):
"""Return the *Message ID Being Responded To*."""
return self._message_id_being_responded_to
@MessageIDBeingRespondedTo.setter
def MessageIDBeingRespondedTo(self, value):
"""Set the *Message ID Being Responded To*."""
if isinstance(value, int):
if 0 <= value < 2**16:
self._message_id_being_responded_to = value
else:
raise ValueError("Message ID Being Responded To must be "
"between 0 and 65535, inclusive")
elif value is None:
self._message_id_being_responded_to = value
else:
raise TypeError("Message ID Being Responded To must be an int")
# DIMSE-N Service Primitives
class N_EVENT_REPORT(DIMSEPrimitive):
"""Represents a N-EVENT-REPORT primitive.
+------------------------------------------+---------+----------+
| Parameter | Req/ind | Rsp/conf |
+==========================================+=========+==========+
| Message ID | M | \- |
+------------------------------------------+---------+----------+
| Message ID Being Responded To | \- | M |
+------------------------------------------+---------+----------+
| Affected SOP Class UID | M | U(=) |
+------------------------------------------+---------+----------+
| Affected SOP Instance UID | M | U(=) |
+------------------------------------------+---------+----------+
| Event Type ID | M | C(=) |
+------------------------------------------+---------+----------+
| Event Information | U | \- |
+------------------------------------------+---------+----------+
| Event Reply | \- | C |
+------------------------------------------+---------+----------+
| Status | \- | M |
+------------------------------------------+---------+----------+
| (=) - The value of the parameter is equal to the value of the parameter
in the column to the left
| C - The parameter is conditional.
| M - Mandatory
| MF - Mandatory with a fixed value
| U - The use of this parameter is a DIMSE service user option
| UF - User option with a fixed value
Attributes
----------
MessageID : int
Identifies the operation and is used to distinguish this
operation from other notifications or operations that may be in
progress. No two identical values for the Message ID shall be used for
outstanding operations.
MessageIDBeingRespondedTo : int
The Message ID of the operation request/indication to which this
response/confirmation applies.
AffectedSOPClassUID : pydicom.uid.UID, bytes or str
For the request/indication this specifies the SOP Class for
storage. If included in the response/confirmation, it shall be equal
to the value in the request/indication
AffectedSOPInstanceUID : pydicom.uid.UID, bytes or str
For the request/indication this specifies the SOP Instance
for storage. If included in the response/confirmation, it shall be
equal to the value in the request/indication
EventTypeID : int
The type of event being reported, depends on the Service Class
specification. Shall be included if Event Reply is included.
EventInformation : io.BytesIO
Contains information the invoking DIMSE user is able to supply about
the event. An encoded DICOM Dataset containing additional Service
Class specific information related to the operation.
EventReply : io.BytesIO
Contains the optional reply to the event report. An encoded DICOM
Dataset containing additional Service Class specific information.
Status : int
The error or success notification of the operation.
"""
# Optional status element keywords other than 'Status'
STATUS_OPTIONAL_KEYWORDS = (
'AffectedSOPClassUID', 'AffectedSOPInstanceUID', 'EventTypeID',
'EventInformation', 'ErrorComment', 'ErrorID'
)
REQUEST_KEYWORDS = (
'MessageID', 'AffectedSOPClassUID', 'EventTypeID',
'AffectedSOPInstanceUID'
)
def __init__(self):
self.MessageID = None
self.MessageIDBeingRespondedTo = None
self.AffectedSOPClassUID = None
self.AffectedSOPInstanceUID = None
self.EventTypeID = None
self.EventInformation = None
self.EventReply = None
self.Status = None
# Optional status elements
self.ErrorComment = None
self.ErrorID = None
@property
def AffectedSOPInstanceUID(self):
"""Return the *Affected SOP Instance UID*."""
return self._affected_sop_instance_uid
@AffectedSOPInstanceUID.setter
def AffectedSOPInstanceUID(self, value):
"""Set the *Affected SOP Instance UID*.
Parameters
----------
value : pydicom.uid.UID, bytes or str
The value for the Affected SOP Instance UID
"""
if isinstance(value, UID):
pass
elif isinstance(value, str):
value = UID(value)
elif isinstance(value, bytes):
value = UID(value.decode('ascii'))
elif value is None:
pass
else:
raise TypeError("Affected SOP Instance UID must be a "
"pydicom.uid.UID, str or bytes")
if value is not None and not value.is_valid:
LOGGER.error("Affected SOP Instance UID is an invalid UID")
raise ValueError("Affected SOP Instance UID is an invalid UID")
self._affected_sop_instance_uid = value
@property
def EventInformation(self):
"""Return the *Event Information*."""
return self._dataset_variant
@EventInformation.setter
def EventInformation(self, value):
"""Set the *Event Information*."""
self._dataset_variant = (value, 'EventInformation')
@property
def EventReply(self):
"""Return the *Event Reply*."""
return self._dataset_variant
@EventReply.setter
def EventReply(self, value):
"""Set the *Event Reply*."""
self._dataset_variant = (value, 'EventReply')
@property
def EventTypeID(self):
"""Return the *Event Type ID*."""
return self._event_type_id
@EventTypeID.setter
def EventTypeID(self, value):
"""Set the *Event Type ID*."""
if isinstance(value, int) or value is None:
self._event_type_id = value
else:
raise TypeError("'N_EVENT_REPORT.EventTypeID' must be an int.")
class N_GET(DIMSEPrimitive):
"""Represents an N-GET primitive.
+------------------------------------------+---------+----------+
| Parameter | Req/ind | Rsp/conf |
+==========================================+=========+==========+
| Message ID | M | \- |
+------------------------------------------+---------+----------+
| Message ID Being Responded To | \- | M |
+------------------------------------------+---------+----------+
| Requested SOP Class UID | M | \- |
+------------------------------------------+---------+----------+
| Requested SOP Instance UID | M | \- |
+------------------------------------------+---------+----------+
| Attribute Identifier List | U | \- |
+------------------------------------------+---------+----------+
| Affected SOP Class UID | \- | U |
+------------------------------------------+---------+----------+
| Affected SOP Instance UID | \- | U |
+------------------------------------------+---------+----------+
| Attribute List | \- | C |
+------------------------------------------+---------+----------+
| Status | \- | M |
+------------------------------------------+---------+----------+
| (=) - The value of the parameter is equal to the value of the parameter
in the column to the left
| C - The parameter is conditional.
| M - Mandatory
| MF - Mandatory with a fixed value
| U - The use of this parameter is a DIMSE service user option
| UF - User option with a fixed value
Attributes
----------
MessageID : int
Identifies the operation and is used to distinguish this
operation from other notifications or operations that may be in
progress. No two identical values for the Message ID shall be used for
outstanding operations.
MessageIDBeingRespondedTo : int
The Message ID of the operation request/indication to which this
response/confirmation applies.
RequestedSOPClassUID : pydicom.uid.UID, bytes or str
The UID of the SOP Class for which attribute values are to be
retrieved.
RequestedSOPInstanceUID : pydicom.uid.UID, bytes or str
The SOP Instance for which attribute values are to be retrieved.
AttributeIdentifierList : list of pydicom.tag.Tag
A list of attribute tags to be sent to the peer.
AffectedSOPClassUID : pydicom.uid.UID, bytes or str
The SOP Class UID of the SOP Instance for which the attributes were
retrieved.
AffectedSOPInstanceUID : pydicom.uid.UID, bytes or str
The SOP Instance UID of the SOP Instance for which the attributes were
retrieved.
AttributeList : pydicom.dataset.Dataset
A dataset containing elements matching those supplied in
AttributeIdentifierList.
Status : int
The error or success notification of the operation.
"""
STATUS_OPTIONAL_KEYWORDS = ('ErrorComment', 'ErrorID', )
REQUEST_KEYWORDS = (
'MessageID', 'RequestedSOPClassUID', 'RequestedSOPInstanceUID'
)
def __init__(self):
self.MessageID = None
self.MessageIDBeingRespondedTo = None
self.RequestedSOPClassUID = None
self.RequestedSOPInstanceUID = None
self.AttributeIdentifierList = None
self.AffectedSOPClassUID = None
self.AffectedSOPInstanceUID = None
self.AttributeList = None
self.Status = None
# (Optional) elements for specific status values
self.ErrorComment = None
self.ErrorID = None
@property
def AffectedSOPInstanceUID(self):
"""Return the *Affected SOP Instance UID*."""
return self._affected_sop_instance_uid
@AffectedSOPInstanceUID.setter
def AffectedSOPInstanceUID(self, value):
"""Set the *Affected SOP Instance UID*.
Parameters
----------
value : pydicom.uid.UID, bytes or str
The value for the Affected SOP Instance UID
"""
if isinstance(value, UID):
pass
elif isinstance(value, str):
value = UID(value)
elif isinstance(value, bytes):
value = UID(value.decode('ascii'))
elif value is None:
pass
else:
raise TypeError("Affected SOP Instance UID must be a "
"pydicom.uid.UID, str or bytes")
if value is not None and not value.is_valid:
LOGGER.error("Affected SOP Instance UID is an invalid UID")
raise ValueError("Affected SOP Instance UID is an invalid UID")
self._affected_sop_instance_uid = value
@property
def AttributeIdentifierList(self):
"""Return the value of (0000,1005) *Attribute Identifier List*."""
return self._attribute_identifier_list
@AttributeIdentifierList.setter
def AttributeIdentifierList(self, value):
"""Set the value of (0000,1005) *Attribute Identifier List*.
Parameters
----------
value : list of pydicom.tag.Tag
A list of pydicom Tags or any values acceptable for creating a new
pydicom Tag object.
"""
if value:
if not isinstance(value, (list, MutableSequence)):
value = [value]
try:
self._attribute_identifier_list = [Tag(tag) for tag in value]
except (TypeError, ValueError):
raise ValueError(
"Attribute Identifier List must be a list of pydicom Tags"
)
elif value is None:
self._attribute_identifier_list = None
else:
raise ValueError(
"Attribute Identifier List must be a list of pydicom Tags"
)
@property
def AttributeList(self):
"""Return the *Attribute List*."""
return self._dataset_variant
@AttributeList.setter
def AttributeList(self, value):
"""Set the *Attribute List*."""
self._dataset_variant = (value, 'AttributeList')
@property
def RequestedSOPClassUID(self):
"""Return the *Requested SOP Class UID*."""
return self._requested_sop_class_uid
@RequestedSOPClassUID.setter
def RequestedSOPClassUID(self, value):
"""Set the *Requested SOP Class UID*.
Parameters
----------
value : pydicom.uid.UID, bytes or str
The value for the Requested SOP Class UID
"""
if isinstance(value, UID):
pass
elif isinstance(value, str):
value = UID(value)
elif isinstance(value, bytes):
value = UID(value.decode('ascii'))
elif value is None:
pass
else:
raise TypeError("Requested SOP Class UID must be a "
"pydicom.uid.UID, str or bytes")
if value is not None and not value.is_valid:
LOGGER.error("Requested SOP Class UID is an invalid UID")
raise ValueError("Requested SOP Class UID is an invalid UID")
self._requested_sop_class_uid = value
@property
def RequestedSOPInstanceUID(self):
"""Return the *Requested SOP Instance UID*."""
return self._requested_sop_instance_uid
@RequestedSOPInstanceUID.setter
def RequestedSOPInstanceUID(self, value):
"""Set the *Requested SOP Instance UID*.
Parameters
----------
value : pydicom.uid.UID, bytes or str
The value for the Requested SOP Instance UID
"""
if isinstance(value, UID):
pass
elif isinstance(value, str):
value = UID(value)
elif isinstance(value, bytes):
value = UID(value.decode('ascii'))
elif value is None:
pass
else:
raise TypeError("Requested SOP Instance UID must be a "
"pydicom.uid.UID, str or bytes")
if value is not None and not value.is_valid:
LOGGER.error("Requested SOP Instance UID is an invalid UID")
raise ValueError("Requested SOP Instance UID is an invalid UID")
self._requested_sop_instance_uid = value
class N_SET(DIMSEPrimitive):
"""Represents a N-SET primitive.
+------------------------------------------+---------+----------+
| Parameter | Req/ind | Rsp/conf |
+==========================================+=========+==========+
| Message ID | M | \- |
+------------------------------------------+---------+----------+
| Message ID Being Responded To | \- | M |
+------------------------------------------+---------+----------+
| Requested SOP Class UID | M | \- |
+------------------------------------------+---------+----------+
| Requested SOP Instance UID | M | \- |
+------------------------------------------+---------+----------+
| Modification List | M | \- |
+------------------------------------------+---------+----------+
| Attribute List | \- | U |
+------------------------------------------+---------+----------+
| Affected SOP Class UID | \- | U |
+------------------------------------------+---------+----------+
| Affected SOP Instance UID | \- | U |
+------------------------------------------+---------+----------+
| Status | \- | M |
+------------------------------------------+---------+----------+
| (=) - The value of the parameter is equal to the value of the parameter
in the column to the left
| C - The parameter is conditional.
| M - Mandatory
| MF - Mandatory with a fixed value
| U - The use of this parameter is a DIMSE service user option
| UF - User option with a fixed value
Attributes
----------
MessageID : int
Identifies the operation and is used to distinguish this
operation from other notifications or operations that may be in
progress. No two identical values for the Message ID shall be used for
outstanding operations.
MessageIDBeingRespondedTo : int
The Message ID of the operation request/indication to which this
response/confirmation applies.
RequestedSOPClassUID : pydicom.uid.UID, bytes or str
The UID of the SOP Class for which attribute values are to be
modified.
RequestedSOPInstanceUID : pydicom.uid.UID, bytes or str
The SOP Instance for which attribute values are to be modified.
ModificationList : pydicom.dataset.Dataset
A dataset containing the attributes and values that are to be used
to modify the SOP Instance.
AttributeList : pydicom.dataset.Dataset
A dataset containing the attributes and values that were used to
modify the SOP Instance.
AffectedSOPClassUID : pydicom.uid.UID, bytes or str
The SOP Class UID of the modified SOP Instance.
AffectedSOPInstanceUID : pydicom.uid.UID, bytes or str
The SOP Instance UID of the modified SOP Instance.
Status : int
The error or success notification of the operation.
"""
STATUS_OPTIONAL_KEYWORDS = (
'ErrorComment', 'ErrorID', 'AttributeIdentifierList'
)
REQUEST_KEYWORDS = (
'MessageID', 'RequestedSOPClassUID', 'RequestedSOPInstanceUID',
'ModificationList'
)
def __init__(self):
self.MessageID = None
self.MessageIDBeingRespondedTo = None
self.RequestedSOPClassUID = None
self.RequestedSOPInstanceUID = None
self.ModificationList = None
self.AttributeList = None
self.AffectedSOPClassUID = None
self.AffectedSOPInstanceUID = None
self.Status = None
# Optional
self.ErrorComment = None
self.ErrorID = None
self.AttributeIdentifierList = None
@property
def AffectedSOPInstanceUID(self):
"""Return the *Affected SOP Instance UID*."""
return self._affected_sop_instance_uid
@AffectedSOPInstanceUID.setter
def AffectedSOPInstanceUID(self, value):
"""Set the *Affected SOP Instance UID*.
Parameters
----------
value : pydicom.uid.UID, bytes or str
The value for the Affected SOP Instance UID
"""
if isinstance(value, UID):
pass
elif isinstance(value, str):
value = UID(value)
elif isinstance(value, bytes):
value = UID(value.decode('ascii'))
elif value is None:
pass
else:
raise TypeError("Affected SOP Instance UID must be a "
"pydicom.uid.UID, str or bytes")
if value is not None and not value.is_valid:
LOGGER.error("Affected SOP Instance UID is an invalid UID")
raise ValueError("Affected SOP Instance UID is an invalid UID")
self._affected_sop_instance_uid = value
@property
def AttributeList(self):
"""Return the *Attribute List*."""
return self._dataset_variant
@AttributeList.setter
def AttributeList(self, value):
"""Set the *Attribute List*."""
self._dataset_variant = (value, 'AttributeList')
@property
def ModificationList(self):
"""Return the *Modification List*."""
return self._dataset_variant
@ModificationList.setter
def ModificationList(self, value):
"""Set the *Modification List*."""
self._dataset_variant = (value, 'ModificationList')
@property
def RequestedSOPClassUID(self):
"""Return the *Requested SOP Class UID*."""
return self._requested_sop_class_uid
@RequestedSOPClassUID.setter
def RequestedSOPClassUID(self, value):
"""Set the *Requested SOP Class UID*.
Parameters
----------
value : pydicom.uid.UID, bytes or str
The value for the Requested SOP Class UID
"""
if isinstance(value, UID):
pass
elif isinstance(value, str):
value = UID(value)
elif isinstance(value, bytes):
value = UID(value.decode('ascii'))
elif value is None:
pass
else:
raise TypeError("Requested SOP Class UID must be a "
"pydicom.uid.UID, str or bytes")
if value is not None and not value.is_valid:
LOGGER.error("Requested SOP Class UID is an invalid UID")
raise ValueError("Requested SOP Class UID is an invalid UID")
self._requested_sop_class_uid = value
@property
def RequestedSOPInstanceUID(self):
"""Return the *Requested SOP Instance UID*."""
return self._requested_sop_instance_uid
@RequestedSOPInstanceUID.setter
def RequestedSOPInstanceUID(self, value):
"""Set the *Requested SOP Instance UID*.
Parameters
----------
value : pydicom.uid.UID, bytes or str
The value for the Requested SOP Instance UID
"""
if isinstance(value, UID):
pass
elif isinstance(value, str):
value = UID(value)
elif isinstance(value, bytes):
value = UID(value.decode('ascii'))
elif value is None:
pass
else:
raise TypeError("Requested SOP Instance UID must be a "
"pydicom.uid.UID, str or bytes")
if value is not None and not value.is_valid:
LOGGER.error("Requested SOP Instance UID is an invalid UID")
raise ValueError("Requested SOP Instance UID is an invalid UID")
self._requested_sop_instance_uid = value
class N_ACTION(DIMSEPrimitive):
"""Represents a N-ACTION primitive.
+------------------------------------------+---------+----------+
| Parameter | Req/ind | Rsp/conf |
+==========================================+=========+==========+
| Message ID | M | \- |
+------------------------------------------+---------+----------+
| Message ID Being Responded To | \- | M |
+------------------------------------------+---------+----------+
| Requested SOP Class UID | M | \- |
+------------------------------------------+---------+----------+
| Requested SOP Instance UID | M | \- |
+------------------------------------------+---------+----------+
| Action Type ID | M | C(=) |
+------------------------------------------+---------+----------+
| Action Information | U | \- |
+------------------------------------------+---------+----------+
| Affected SOP Class UID | \- | U |
+------------------------------------------+---------+----------+
| Affected SOP Instance UID | \- | U |
+------------------------------------------+---------+----------+
| Action Reply | \- | C |
+------------------------------------------+---------+----------+
| Status | \- | M |
+------------------------------------------+---------+----------+
| (=) - The value of the parameter is equal to the value of the parameter
in the column to the left
| C - The parameter is conditional.
| M - Mandatory
| MF - Mandatory with a fixed value
| U - The use of this parameter is a DIMSE service user option
| UF - User option with a fixed value
Attributes
----------
MessageID : int
Identifies the operation and is used to distinguish this
operation from other notifications or operations that may be in
progress. No two identical values for the Message ID shall be used for
outstanding operations.
MessageIDBeingRespondedTo : int
The Message ID of the operation request/indication to which this
response/confirmation applies.
RequestedSOPClassUID : pydicom.uid.UID, bytes or str
The SOP Class for which the action is to be performed.
RequestedSOPInstanceUID : pydicom.uid.UID, bytes or str
The SOP Instance for which the action is to be performed.
ActionTypeID : int
The type of action that is to be performed.
ActionInformation : pydicom.dataset.Dataset
Extra information required to perform the action.
AffectedSOPClassUID : pydicom.uid.UID, bytes or str
For the request/indication this specifies the SOP Class for
storage. If included in the response/confirmation, it shall be equal
to the value in the request/indication
AffectedSOPInstanceUID : pydicom.uid.UID, bytes or str
For the request/indication this specifies the SOP Instance for
storage. If included in the response/confirmation, it shall be equal
to the value in the request/indication
ActionReply : pydicom.dataset.Dataset
The reply to the action.
Status : int
The error or success notification of the operation.
"""
STATUS_OPTIONAL_KEYWORDS = (
'ErrorComment', 'ErrorID', 'AttributeIdentifierList'
)
REQUEST_KEYWORDS = (
'MessageID', 'RequestedSOPClassUID', 'RequestedSOPInstanceUID',
'ActionTypeID'
)
def __init__(self):
self.MessageID = None
self.MessageIDBeingRespondedTo = None
self.RequestedSOPClassUID = None
self.RequestedSOPInstanceUID = None
self.ActionTypeID = None
self.ActionInformation = None
self.AffectedSOPClassUID = None
self.AffectedSOPInstanceUID = None
self.ActionReply = None
self.Status = None
# Optional status elements
self.ErrorComment = None
self.ErrorID = None
@property
def AffectedSOPInstanceUID(self):
"""Return the *Affected SOP Instance UID*."""
return self._affected_sop_instance_uid
@AffectedSOPInstanceUID.setter
def AffectedSOPInstanceUID(self, value):
"""Set the *Affected SOP Instance UID*.
Parameters
----------
value : pydicom.uid.UID, bytes or str
The value for the Affected SOP Instance UID
"""
if isinstance(value, UID):
pass
elif isinstance(value, str):
value = UID(value)
elif isinstance(value, bytes):
value = UID(value.decode('ascii'))
elif value is None:
pass
else:
raise TypeError("Affected SOP Instance UID must be a "
"pydicom.uid.UID, str or bytes")
if value is not None and not value.is_valid:
LOGGER.error("Affected SOP Instance UID is an invalid UID")
raise ValueError("Affected SOP Instance UID is an invalid UID")
self._affected_sop_instance_uid = value
@property
def ActionInformation(self):
"""Return the *Action Information*."""
return self._dataset_variant
@ActionInformation.setter
def ActionInformation(self, value):
"""Set the *Action Information*."""
self._dataset_variant = (value, 'ActionInformation')
@property
def ActionReply(self):
"""Return the *Action Reply*."""
return self._dataset_variant
@ActionReply.setter
def ActionReply(self, value):
"""Set the *Action Reply List*."""
self._dataset_variant = (value, 'ActionReply')
@property
def RequestedSOPClassUID(self):
"""Return the *Requested SOP Class UID*."""
return self._requested_sop_class_uid
@RequestedSOPClassUID.setter
def RequestedSOPClassUID(self, value):
"""Set the *Requested SOP Class UID*.
Parameters
----------
value : pydicom.uid.UID, bytes or str
The value for the Requested SOP Class UID
"""
if isinstance(value, UID):
pass
elif isinstance(value, str):
value = UID(value)
elif isinstance(value, bytes):
value = UID(value.decode('ascii'))
elif value is None:
pass
else:
raise TypeError("Requested SOP Class UID must be a "
"pydicom.uid.UID, str or bytes")
if value is not None and not value.is_valid:
LOGGER.error("Requested SOP Class UID is an invalid UID")
raise ValueError("Requested SOP Class UID is an invalid UID")
self._requested_sop_class_uid = value
@property
def RequestedSOPInstanceUID(self):
"""Return the *Requested SOP Instance UID*."""
return self._requested_sop_instance_uid
@RequestedSOPInstanceUID.setter
def RequestedSOPInstanceUID(self, value):
"""Set the *Requested SOP Instance UID*.
Parameters
----------
value : pydicom.uid.UID, bytes or str
The value for the Requested SOP Instance UID
"""
if isinstance(value, UID):
pass
elif isinstance(value, str):
value = UID(value)
elif isinstance(value, bytes):
value = UID(value.decode('ascii'))
elif value is None:
pass
else:
raise TypeError("Requested SOP Instance UID must be a "
"pydicom.uid.UID, str or bytes")
if value is not None and not value.is_valid:
LOGGER.error("Requested SOP Instance UID is an invalid UID")
raise ValueError("Requested SOP Instance UID is an invalid UID")
self._requested_sop_instance_uid = value
@property
def ActionTypeID(self):
"""Return the *Action Type ID*."""
return self._action_type_id
@ActionTypeID.setter
def ActionTypeID(self, value):
"""Set the *Action Type ID*."""
if isinstance(value, int) or value is None:
self._action_type_id = value
else:
raise TypeError("'N_ACTION.ActionTypeID' must be an int.")
class N_CREATE(DIMSEPrimitive):
"""Represents a N-CREATE primitive.
+------------------------------------------+---------+----------+
| Parameter | Req/ind | Rsp/conf |
+==========================================+=========+==========+
| Message ID | M | \- |
+------------------------------------------+---------+----------+
+------------------------------------------+---------+----------+
| Message ID Being Responded To | \- | M |
| Affected SOP Class UID | M | U(=) |
+------------------------------------------+---------+----------+
| Affected SOP Instance UID | U | C |
+------------------------------------------+---------+----------+
| Affected SOP Instance UID | U | U |
+------------------------------------------+---------+----------+
| Status | \- | M |
+------------------------------------------+---------+----------+
| (=) - The value of the parameter is equal to the value of the parameter
in the column to the left
| C - The parameter is conditional.
| M - Mandatory
| MF - Mandatory with a fixed value
| U - The use of this parameter is a DIMSE service user option
| UF - User option with a fixed value
Attributes
----------
MessageID : int
Identifies the operation and is used to distinguish this
operation from other notifications or operations that may be in
progress. No two identical values for the Message ID shall be used for
outstanding operations.
MessageIDBeingRespondedTo : int
The Message ID of the operation request/indication to which this
response/confirmation applies.
AffectedSOPClassUID : pydicom.uid.UID, bytes or str
For the request/indication this specifies the SOP Class for
storage. If included in the response/confirmation, it shall be equal
to the value in the request/indication
AffectedSOPInstanceUID : pydicom.uid.UID, bytes or str
For the request/indication this specifies the SOP Instance for
storage. If included in the response/confirmation, it shall be equal
to the value in the request/indication
AttributeList : pydicom.dataset.Dataset
A set of attributes and values that are to be assigned to the new
SOP Instance.
Status : int
The error or success notification of the operation. It shall be
one of the following values:
"""
STATUS_OPTIONAL_KEYWORDS = ('ErrorComment', 'ErrorID', )
REQUEST_KEYWORDS = ('MessageID', 'AffectedSOPClassUID')
def __init__(self):
self.MessageID = None
self.MessageIDBeingRespondedTo = None
self.AffectedSOPClassUID = None
self.AffectedSOPInstanceUID = None
self.AttributeList = None
self.Status = None
# Optional elements
self.ErrorComment = None
self.ErrorID = None
@property
def AffectedSOPInstanceUID(self):
"""Return the *Affected SOP Instance UID*."""
return self._affected_sop_instance_uid
@AffectedSOPInstanceUID.setter
def AffectedSOPInstanceUID(self, value):
"""Set the *Affected SOP Instance UID*.
Parameters
----------
value : pydicom.uid.UID, bytes or str
The value for the Affected SOP Instance UID
"""
if isinstance(value, UID):
pass
elif isinstance(value, str):
value = UID(value)
elif isinstance(value, bytes):
value = UID(value.decode('ascii'))
elif value is None:
pass
else:
raise TypeError("Affected SOP Instance UID must be a "
"pydicom.uid.UID, str or bytes")
if value is not None and not value.is_valid:
LOGGER.error("Affected SOP Instance UID is an invalid UID")
raise ValueError("Affected SOP Instance UID is an invalid UID")
self._affected_sop_instance_uid = value
@property
def AttributeList(self):
"""Return the *Attribute List*."""
return self._dataset_variant
@AttributeList.setter
def AttributeList(self, value):
"""Set the *Attribute List*."""
self._dataset_variant = (value, 'AttributeList')
class N_DELETE(DIMSEPrimitive):
"""Represents a N-DELETE primitive.
+------------------------------------------+---------+----------+
| Parameter | Req/ind | Rsp/conf |
+==========================================+=========+==========+
| Message ID | M | \- |
+------------------------------------------+---------+----------+
| Message ID Being Responded To | \- | M |
+------------------------------------------+---------+----------+
| Requested SOP Class UID | M | \- |
+------------------------------------------+---------+----------+
| Requested SOP Instance UID | M | \- |
+------------------------------------------+---------+----------+
| Affected SOP Class UID | \- | U |
+------------------------------------------+---------+----------+
| Affected SOP Instance UID | \- | U |
+------------------------------------------+---------+----------+
| Status | \- | M |
+------------------------------------------+---------+----------+
| (=) - The value of the parameter is equal to the value of the parameter
in the column to the left
| C - The parameter is conditional.
| M - Mandatory
| MF - Mandatory with a fixed value
| U - The use of this parameter is a DIMSE service user option
| UF - User option with a fixed value
Attributes
----------
MessageID : int
Identifies the operation and is used to distinguish this
operation from other notifications or operations that may be in
progress. No two identical values for the Message ID shall be used for
outstanding operations.
MessageIDBeingRespondedTo : int
The Message ID of the operation request/indication to which this
response/confirmation applies.
RequestedSOPClassUID : pydicom.uid.UID, bytes or str
The UID of the SOP Class to be deleted.
RequestedSOPInstanceUID : pydicom.uid.UID, bytes or str
The SOP Instance to be deleted.
AffectedSOPClassUID : pydicom.uid.UID, bytes or str
For the request/indication this specifies the SOP Class for
storage. If included in the response/confirmation, it shall be equal
to the value in the request/indication
AffectedSOPInstanceUID : pydicom.uid.UID, bytes or str
For the request/indication this specifies the SOP Instance for
storage. If included in the response/confirmation, it shall be equal
to the value in the request/indication
Status : int
The error or success notification of the operation.
"""
STATUS_OPTIONAL_KEYWORDS = ('ErrorComment', 'ErrorID', )
REQUEST_KEYWORDS = (
'MessageID', 'RequestedSOPClassUID', 'RequestedSOPInstanceUID'
)
def __init__(self):
self.MessageID = None
self.MessageIDBeingRespondedTo = None
self.RequestedSOPClassUID = None
self.RequestedSOPInstanceUID = None
self.AffectedSOPClassUID = None
self.AffectedSOPInstanceUID = None
self.Status = None
# Optional
self.ErrorComment = None
self.ErrorID = None
@property
def AffectedSOPInstanceUID(self):
"""Return the *Affected SOP Instance UID*."""
return self._affected_sop_instance_uid
@AffectedSOPInstanceUID.setter
def AffectedSOPInstanceUID(self, value):
"""Set the *Affected SOP Instance UID*.
Parameters
----------
value : pydicom.uid.UID, bytes or str
The value for the Affected SOP Instance UID
"""
if isinstance(value, UID):
pass
elif isinstance(value, str):
value = UID(value)
elif isinstance(value, bytes):
value = UID(value.decode('ascii'))
elif value is None:
pass
else:
raise TypeError("Affected SOP Instance UID must be a "
"pydicom.uid.UID, str or bytes")
if value is not None and not value.is_valid:
LOGGER.error("Affected SOP Instance UID is an invalid UID")
raise ValueError("Affected SOP Instance UID is an invalid UID")
self._affected_sop_instance_uid = value
@property
def RequestedSOPClassUID(self):
"""Return the *Requested SOP Class UID*."""
return self._requested_sop_class_uid
@RequestedSOPClassUID.setter
def RequestedSOPClassUID(self, value):
"""Set the *Requested SOP Class UID*.
Parameters
----------
value : pydicom.uid.UID, bytes or str
The value for the Requested SOP Class UID
"""
if isinstance(value, UID):
pass
elif isinstance(value, str):
value = UID(value)
elif isinstance(value, bytes):
value = UID(value.decode('ascii'))
elif value is None:
pass
else:
raise TypeError("Requested SOP Class UID must be a "
"pydicom.uid.UID, str or bytes")
if value is not None and not value.is_valid:
LOGGER.error("Requested SOP Class UID is an invalid UID")
raise ValueError("Requested SOP Class UID is an invalid UID")
self._requested_sop_class_uid = value
@property
def RequestedSOPInstanceUID(self):
"""Return the *Requested SOP Instance UID*."""
return self._requested_sop_instance_uid
@RequestedSOPInstanceUID.setter
def RequestedSOPInstanceUID(self, value):
"""Set the *Requested SOP Instance UID*.
Parameters
----------
value : pydicom.uid.UID, bytes or str
The value for the Requested SOP Instance UID
"""
if isinstance(value, UID):
pass
elif isinstance(value, str):
value = UID(value)
elif isinstance(value, bytes):
value = UID(value.decode('ascii'))
elif value is None:
pass
else:
raise TypeError("Requested SOP Instance UID must be a "
"pydicom.uid.UID, str or bytes")
if value is not None and not value.is_valid:
LOGGER.error("Requested SOP Instance UID is an invalid UID")
raise ValueError("Requested SOP Instance UID is an invalid UID")
self._requested_sop_instance_uid = value
| 39.75777 | 80 | 0.548638 | 8,673 | 86,990 | 5.441024 | 0.041969 | 0.027506 | 0.029964 | 0.029371 | 0.8701 | 0.851409 | 0.83577 | 0.825154 | 0.810553 | 0.807947 | 0 | 0.004497 | 0.302161 | 86,990 | 2,187 | 81 | 39.775949 | 0.772869 | 0.517933 | 0 | 0.819068 | 0 | 0 | 0.153469 | 0.014308 | 0 | 0 | 0.000426 | 0.001372 | 0 | 1 | 0.125677 | false | 0.03467 | 0.009751 | 0 | 0.236186 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c518acbdf05b6974f6fdec63d804fc54e76aaf05 | 22,091 | py | Python | app/tests/api/observable_instance/test_create.py | hollyfoxx/ace2-ams-api | 08ecf3f3dc8ac9abd224465731458950d4f78b7d | [
"Apache-2.0"
] | null | null | null | app/tests/api/observable_instance/test_create.py | hollyfoxx/ace2-ams-api | 08ecf3f3dc8ac9abd224465731458950d4f78b7d | [
"Apache-2.0"
] | null | null | null | app/tests/api/observable_instance/test_create.py | hollyfoxx/ace2-ams-api | 08ecf3f3dc8ac9abd224465731458950d4f78b7d | [
"Apache-2.0"
] | null | null | null | import pytest
import uuid
from fastapi import status
from tests.api.node import (
INVALID_CREATE_FIELDS,
NONEXISTENT_FIELDS,
VALID_DIRECTIVES,
VALID_TAGS,
VALID_THREAT_ACTOR,
VALID_THREATS,
)
from tests.helpers import create_alert
#
# INVALID TESTS
#
@pytest.mark.parametrize(
"key,value",
[
("alert_uuid", 123),
("alert_uuid", None),
("alert_uuid", ""),
("alert_uuid", "abc"),
("context", 123),
("context", ""),
("parent_analysis_uuid", 123),
("parent_analysis_uuid", None),
("parent_analysis_uuid", ""),
("parent_analysis_uuid", "abc"),
("performed_analysis_uuids", 123),
("performed_analysis_uuids", "abc"),
("performed_analysis_uuids", [123]),
("performed_analysis_uuids", [None]),
("performed_analysis_uuids", [""]),
("performed_analysis_uuids", ["abc"]),
("redirection_uuid", 123),
("redirection_uuid", ""),
("redirection_uuid", "abc"),
("time", None),
("time", ""),
("time", "Monday"),
("time", "2022-01-01"),
("type", 123),
("type", None),
("type", ""),
("uuid", 123),
("uuid", None),
("uuid", ""),
("uuid", "abc"),
("value", 123),
("value", None),
("value", ""),
],
)
def test_create_invalid_fields(client_valid_access_token, key, value):
create_json = {
key: value,
"alert_uuid": str(uuid.uuid4()),
"parent_analysis_uuid": str(uuid.uuid4()),
"type": "test_type",
"value": "test",
}
create_json[key] = value
create = client_valid_access_token.post("/api/observable/instance/", json=[create_json])
assert create.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
assert key in create.text
@pytest.mark.parametrize(
"key,value",
INVALID_CREATE_FIELDS,
)
def test_create_invalid_node_fields(client_valid_access_token, key, value):
create_json = {
key: value,
"alert_uuid": str(uuid.uuid4()),
"parent_analysis_uuid": str(uuid.uuid4()),
"type": "test_type",
"value": "test",
}
create_json[key] = value
create = client_valid_access_token.post("/api/observable/instance/", json=[create_json])
assert create.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
def test_create_duplicate_uuid(client_valid_access_token):
# Create an alert
alert_uuid, analysis_uuid = create_alert(client_valid_access_token=client_valid_access_token)
# Create an observable type
client_valid_access_token.post("/api/observable/type/", json={"value": "test_type"})
# Create an object
create_json = {
"uuid": str(uuid.uuid4()),
"alert_uuid": alert_uuid,
"parent_analysis_uuid": analysis_uuid,
"type": "test_type",
"value": "test",
}
client_valid_access_token.post("/api/observable/instance/", json=[create_json])
# Ensure you cannot create another object with the same UUID
create2 = client_valid_access_token.post("/api/observable/instance/", json=[create_json])
assert create2.status_code == status.HTTP_409_CONFLICT
def test_create_nonexistent_alert(client_valid_access_token):
# Create an alert
_, analysis_uuid = create_alert(client_valid_access_token=client_valid_access_token)
alert_uuid = str(uuid.uuid4())
# Create an observable type
client_valid_access_token.post("/api/observable/type/", json={"value": "test_type"})
# Ensure you cannot create an observable instance with a nonexistent alert
create_json = {
"alert_uuid": alert_uuid,
"parent_analysis_uuid": analysis_uuid,
"type": "test_type",
"value": "test",
}
create = client_valid_access_token.post("/api/observable/instance/", json=[create_json])
assert create.status_code == status.HTTP_404_NOT_FOUND
assert alert_uuid in create.text
def test_create_nonexistent_analysis(client_valid_access_token):
# Create an alert
alert_uuid, _ = create_alert(client_valid_access_token=client_valid_access_token)
analysis_uuid = str(uuid.uuid4())
# Create an observable type
client_valid_access_token.post("/api/observable/type/", json={"value": "test_type"})
# Ensure you cannot create an observable instance with a nonexistent analysis
create_json = {
"alert_uuid": alert_uuid,
"parent_analysis_uuid": analysis_uuid,
"type": "test_type",
"value": "test",
}
create = client_valid_access_token.post("/api/observable/instance/", json=[create_json])
assert create.status_code == status.HTTP_404_NOT_FOUND
assert analysis_uuid in create.text
def test_create_nonexistent_performed_analysis(client_valid_access_token):
# Create an alert
alert_uuid, analysis_uuid = create_alert(client_valid_access_token=client_valid_access_token)
# Create an observable type
client_valid_access_token.post("/api/observable/type/", json={"value": "test_type"})
# Ensure you cannot create an observable instance with a nonexistent performed analysis
performed_analysis_uuid = str(uuid.uuid4())
create_json = {
"alert_uuid": alert_uuid,
"parent_analysis_uuid": analysis_uuid,
"performed_analysis_uuids": [performed_analysis_uuid],
"type": "test_type",
"value": "test",
}
create = client_valid_access_token.post("/api/observable/instance/", json=[create_json])
assert create.status_code == status.HTTP_404_NOT_FOUND
assert performed_analysis_uuid in create.text
def test_create_nonexistent_redirection(client_valid_access_token):
# Create an alert
alert_uuid, analysis_uuid = create_alert(client_valid_access_token=client_valid_access_token)
# Create an observable type
client_valid_access_token.post("/api/observable/type/", json={"value": "test_type"})
# Ensure you cannot create an observable instance with a nonexistent redirection target
redirection_uuid = str(uuid.uuid4())
create_json = {
"alert_uuid": alert_uuid,
"parent_analysis_uuid": analysis_uuid,
"redirection_uuid": redirection_uuid,
"type": "test_type",
"value": "test",
}
create = client_valid_access_token.post("/api/observable/instance/", json=[create_json])
assert create.status_code == status.HTTP_404_NOT_FOUND
assert redirection_uuid in create.text
def test_create_nonexistent_type(client_valid_access_token):
# Create an alert
alert_uuid, analysis_uuid = create_alert(client_valid_access_token=client_valid_access_token)
# Create an observable type
client_valid_access_token.post("/api/observable/type/", json={"value": "test_type"})
# Ensure you cannot create an observable instance with a nonexistent type
create_json = {
"alert_uuid": alert_uuid,
"parent_analysis_uuid": analysis_uuid,
"type": "abc",
"value": "test",
}
create = client_valid_access_token.post("/api/observable/instance/", json=[create_json])
assert create.status_code == status.HTTP_404_NOT_FOUND
assert "abc" in create.text
@pytest.mark.parametrize(
"key,value",
NONEXISTENT_FIELDS,
)
def test_create_nonexistent_node_fields(client_valid_access_token, key, value):
# Create an alert
alert_uuid, analysis_uuid = create_alert(client_valid_access_token=client_valid_access_token)
# Create an observable type
client_valid_access_token.post("/api/observable/type/", json={"value": "test_type"})
# Ensure you cannot create an observable instance with a nonexistent type
create_json = {
"alert_uuid": alert_uuid,
"parent_analysis_uuid": analysis_uuid,
"type": "test_type",
"value": "test",
}
create_json[key] = value
create = client_valid_access_token.post("/api/observable/instance/", json=[create_json])
assert create.status_code == status.HTTP_404_NOT_FOUND
#
# VALID TESTS
#
def test_create_bulk(client_valid_access_token):
# Create an alert
alert_uuid, analysis_uuid = create_alert(client_valid_access_token=client_valid_access_token)
# Read the alert back to get its current version
# TODO: Fix this hardcoded URL
get_alert = client_valid_access_token.get(f"http://testserver/api/alert/{alert_uuid}")
initial_alert_version = get_alert.json()["version"]
# Read the analysis back to get its current version
# TODO: Fix this hardcoded URL
get_analysis = client_valid_access_token.get(f"http://testserver/api/analysis/{analysis_uuid}")
initial_analysis_version = get_analysis.json()["version"]
# Create an observable type
client_valid_access_token.post("/api/observable/type/", json={"value": "test_type"})
# Create some observable instances
observable_instances = []
for i in range(3):
observable_uuid = str(uuid.uuid4())
observable_instances.append(
{
"alert_uuid": alert_uuid,
"parent_analysis_uuid": analysis_uuid,
"type": "test_type",
"uuid": observable_uuid,
"value": f"test{i}",
}
)
create = client_valid_access_token.post("/api/observable/instance/", json=observable_instances)
assert create.status_code == status.HTTP_201_CREATED
# Read the parent analysis back. There should be 3 discovered observable instance UUIDs.
# TODO: Fix this hardcoded URL
get_analysis = client_valid_access_token.get(f"http://testserver/api/analysis/{analysis_uuid}")
assert len(get_analysis.json()["discovered_observable_uuids"]) == 3
# Additionally, creating an observable instance should trigger the alert and analysis to get a new version.
# TODO: Fix this hardcoded URL
get_alert = client_valid_access_token.get(f"http://testserver/api/alert/{alert_uuid}")
assert get_alert.json()["version"] != initial_alert_version
assert get_analysis.json()["version"] != initial_analysis_version
@pytest.mark.parametrize(
"key,value",
[
("context", None),
("context", "test"),
("time", 1640995200),
("time", "2022-01-01T00:00:00Z"),
("time", "2022-01-01 00:00:00"),
("time", "2022-01-01 00:00:00.000000"),
("time", "2021-12-31 19:00:00-05:00"),
("uuid", str(uuid.uuid4())),
],
)
def test_create_valid_optional_fields(client_valid_access_token, key, value):
# Create an alert
alert_uuid, analysis_uuid = create_alert(client_valid_access_token=client_valid_access_token)
# Create an observable type
client_valid_access_token.post("/api/observable/type/", json={"value": "test_type"})
# Create the observable instance
create_json = {
"alert_uuid": alert_uuid,
"parent_analysis_uuid": analysis_uuid,
"type": "test_type",
"value": "test",
}
create_json[key] = value
create = client_valid_access_token.post("/api/observable/instance/", json=[create_json])
# Read it back
get = client_valid_access_token.get(create.headers["Content-Location"])
# If the test is for time, make sure that the retrieved value matches the proper UTC timestamp
if key == "time" and value:
assert get.json()[key] == "2022-01-01T00:00:00+00:00"
else:
assert get.json()[key] == value
def test_create_valid_performed_analysis_uuids(client_valid_access_token):
# Create an alert
alert_uuid, analysis_uuid = create_alert(client_valid_access_token=client_valid_access_token)
# Create an observable type
client_valid_access_token.post("/api/observable/type/", json={"value": "test_type"})
# Create a child analysis for the observable instance
child_analysis_uuid = str(uuid.uuid4())
analysis_create = client_valid_access_token.post("/api/analysis/", json={"uuid": child_analysis_uuid})
# Read the analysis back to get its current version
get_analysis = client_valid_access_token.get(analysis_create.headers["Content-Location"])
initial_version = get_analysis.json()["version"]
# Create the observable instance
create_json = {
"alert_uuid": alert_uuid,
"parent_analysis_uuid": analysis_uuid,
"performed_analysis_uuids": [child_analysis_uuid],
"type": "test_type",
"value": "test",
}
create = client_valid_access_token.post("/api/observable/instance/", json=[create_json])
# Read it back
get = client_valid_access_token.get(create.headers["Content-Location"])
assert get.json()["performed_analysis_uuids"] == [child_analysis_uuid]
# Read the analysis back. By creating the observable instance and setting its performed_analysis_uuids, you should
# be able to read the analysis back and see the observable instance listed as its parent_observable_uuid even
# though it was not explicitly added.
get_analysis = client_valid_access_token.get(analysis_create.headers["Content-Location"])
assert get_analysis.json()["parent_observable_uuid"] == get.json()["uuid"]
# Additionally, adding the observable instance as the parent should trigger the analysis to have a new version.
assert get_analysis.json()["version"] != initial_version
def test_create_valid_redirection(client_valid_access_token):
# Create an alert
alert_uuid, analysis_uuid = create_alert(client_valid_access_token=client_valid_access_token)
# Create an observable type
client_valid_access_token.post("/api/observable/type/", json={"value": "test_type"})
# Create an observable instance
observable_instance_uuid = str(uuid.uuid4())
create_json = {
"alert_uuid": alert_uuid,
"parent_analysis_uuid": analysis_uuid,
"type": "test_type",
"uuid": observable_instance_uuid,
"value": "test",
}
client_valid_access_token.post("/api/observable/instance/", json=[create_json])
# Create another observable instance that redirects to the previously created one
create_json = {
"alert_uuid": alert_uuid,
"parent_analysis_uuid": analysis_uuid,
"redirection_uuid": observable_instance_uuid,
"type": "test_type",
"value": "test",
}
create = client_valid_access_token.post("/api/observable/instance/", json=[create_json])
# Read it back
get = client_valid_access_token.get(create.headers["Content-Location"])
assert get.json()["redirection_uuid"] == observable_instance_uuid
def test_create_valid_required_fields(client_valid_access_token):
# Create an alert
alert_uuid, analysis_uuid = create_alert(client_valid_access_token=client_valid_access_token)
# Read the alert back to get its current version
# TODO: Fix this hardcoded URL
get_alert = client_valid_access_token.get(f"http://testserver/api/alert/{alert_uuid}")
initial_alert_version = get_alert.json()["version"]
# Read the analysis back to get its current version
# TODO: Fix this hardcoded URL
get_analysis = client_valid_access_token.get(f"http://testserver/api/analysis/{analysis_uuid}")
initial_analysis_version = get_analysis.json()["version"]
# Create an observable type
client_valid_access_token.post("/api/observable/type/", json={"value": "test_type"})
# Create an observable instance
observable_uuid = str(uuid.uuid4())
create_json = {
"alert_uuid": alert_uuid,
"parent_analysis_uuid": analysis_uuid,
"type": "test_type",
"uuid": observable_uuid,
"value": "test",
}
create = client_valid_access_token.post("/api/observable/instance/", json=[create_json])
assert create.status_code == status.HTTP_201_CREATED
# Read it back
get = client_valid_access_token.get(create.headers["Content-Location"])
assert get.status_code == 200
assert get.json()["alert_uuid"] == alert_uuid
assert get.json()["parent_analysis_uuid"] == analysis_uuid
assert get.json()["observable"]["type"]["value"] == "test_type"
assert get.json()["uuid"] == observable_uuid
assert get.json()["observable"]["value"] == "test"
# Read the parent analysis back. You should see this observable instance in its discovered_observable_uuids list
# even though it was not explicitly added.
# TODO: Fix this hardcoded URL
get_analysis = client_valid_access_token.get(f"http://testserver/api/analysis/{analysis_uuid}")
assert get_analysis.json()["discovered_observable_uuids"] == [observable_uuid]
# Additionally, creating an observable instance should trigger the alert and analysis to get a new version.
# TODO: Fix this hardcoded URL
get_alert = client_valid_access_token.get(f"http://testserver/api/alert/{alert_uuid}")
assert get_alert.json()["version"] != initial_alert_version
assert get_analysis.json()["version"] != initial_analysis_version
@pytest.mark.parametrize(
"values",
VALID_DIRECTIVES,
)
def test_create_valid_node_directives(client_valid_access_token, values):
# Create the directives. Need to only create unique values, otherwise the database will return a 409
# conflict exception and will roll back the test's database session (causing the test to fail).
for value in list(set(values)):
client_valid_access_token.post("/api/node/directive/", json={"value": value})
# Create an alert
alert_uuid, analysis_uuid = create_alert(client_valid_access_token=client_valid_access_token)
# Create an observable type
client_valid_access_token.post("/api/observable/type/", json={"value": "test_type"})
# Create an observable instance
create_json = {
"alert_uuid": alert_uuid,
"parent_analysis_uuid": analysis_uuid,
"type": "test_type",
"value": "test",
"directives": values,
}
create = client_valid_access_token.post("/api/observable/instance/", json=[create_json])
assert create.status_code == status.HTTP_201_CREATED
# Read it back
get = client_valid_access_token.get(create.headers["Content-Location"])
assert len(get.json()["directives"]) == len(list(set(values)))
@pytest.mark.parametrize(
"values",
VALID_TAGS,
)
def test_create_valid_node_tags(client_valid_access_token, values):
# Create the tags. Need to only create unique values, otherwise the database will return a 409
# conflict exception and will roll back the test's database session (causing the test to fail).
for value in list(set(values)):
client_valid_access_token.post("/api/node/tag/", json={"value": value})
# Create an alert
alert_uuid, analysis_uuid = create_alert(client_valid_access_token=client_valid_access_token)
# Create an observable type
client_valid_access_token.post("/api/observable/type/", json={"value": "test_type"})
# Create an observable instance
create_json = {
"alert_uuid": alert_uuid,
"parent_analysis_uuid": analysis_uuid,
"type": "test_type",
"value": "test",
"tags": values,
}
create = client_valid_access_token.post("/api/observable/instance/", json=[create_json])
assert create.status_code == status.HTTP_201_CREATED
# Read it back
get = client_valid_access_token.get(create.headers["Content-Location"])
assert len(get.json()["tags"]) == len(list(set(values)))
@pytest.mark.parametrize(
"value",
VALID_THREAT_ACTOR,
)
def test_create_valid_node_threat_actor(client_valid_access_token, value):
# Create the threat actor. Need to only create unique values, otherwise the database will return a 409
# conflict exception and will roll back the test's database session (causing the test to fail).
if value:
client_valid_access_token.post("/api/node/threat_actor/", json={"value": value})
# Create an alert
alert_uuid, analysis_uuid = create_alert(client_valid_access_token=client_valid_access_token)
# Create an observable type
client_valid_access_token.post("/api/observable/type/", json={"value": "test_type"})
# Create an observable instance
create_json = {
"alert_uuid": alert_uuid,
"parent_analysis_uuid": analysis_uuid,
"type": "test_type",
"value": "test",
"threat_actor": value,
}
create = client_valid_access_token.post("/api/observable/instance/", json=[create_json])
assert create.status_code == status.HTTP_201_CREATED
# Read it back
get = client_valid_access_token.get(create.headers["Content-Location"])
if value:
assert get.json()["threat_actor"]["value"] == value
else:
assert get.json()["threat_actor"] is None
@pytest.mark.parametrize(
"values",
VALID_THREATS,
)
def test_create_valid_node_threats(client_valid_access_token, values):
# Create a threat type
client_valid_access_token.post("/api/node/threat/type/", json={"value": "test_type"})
# Create the threats. Need to only create unique values, otherwise the database will return a 409
# conflict exception and will roll back the test's database session (causing the test to fail).
for value in list(set(values)):
client_valid_access_token.post("/api/node/threat/", json={"types": ["test_type"], "value": value})
# Create an alert
alert_uuid, analysis_uuid = create_alert(client_valid_access_token=client_valid_access_token)
# Create an observable type
client_valid_access_token.post("/api/observable/type/", json={"value": "test_type"})
# Create an observable instance
create_json = {
"alert_uuid": alert_uuid,
"parent_analysis_uuid": analysis_uuid,
"type": "test_type",
"value": "test",
"threats": values,
}
create = client_valid_access_token.post("/api/observable/instance/", json=[create_json])
assert create.status_code == status.HTTP_201_CREATED
# Read it back
get = client_valid_access_token.get(create.headers["Content-Location"])
assert len(get.json()["threats"]) == len(list(set(values)))
| 37.957045 | 118 | 0.695849 | 2,805 | 22,091 | 5.184314 | 0.062389 | 0.083207 | 0.128593 | 0.166415 | 0.837574 | 0.79989 | 0.786481 | 0.755604 | 0.731811 | 0.718677 | 0 | 0.011595 | 0.18795 | 22,091 | 581 | 119 | 38.022375 | 0.799041 | 0.177538 | 0 | 0.591623 | 0 | 0 | 0.210448 | 0.066294 | 0 | 0 | 0 | 0.001721 | 0.115183 | 1 | 0.04712 | false | 0 | 0.013089 | 0 | 0.060209 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c528fab609bd7ac6af379d81cbe61013e82e97b8 | 70 | py | Python | lib/crypto.py | sn0wfa11/Red-Team-Toolkit | e21f539425cc7653c6232fbbe15c8fa75ddf6cbb | [
"MIT"
] | 3 | 2019-03-31T19:48:45.000Z | 2019-04-03T02:23:37.000Z | lib/crypto.py | sn0wfa11/Red-Team-Toolkit | e21f539425cc7653c6232fbbe15c8fa75ddf6cbb | [
"MIT"
] | null | null | null | lib/crypto.py | sn0wfa11/Red-Team-Toolkit | e21f539425cc7653c6232fbbe15c8fa75ddf6cbb | [
"MIT"
] | null | null | null | import hashlib
def md5(data):
return hashlib.md5(data).hexdigest()
| 14 | 38 | 0.742857 | 10 | 70 | 5.2 | 0.7 | 0.269231 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.032787 | 0.128571 | 70 | 4 | 39 | 17.5 | 0.819672 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
c5502ae7bb7a650b69834ebc64ade285ced5606d | 87 | py | Python | examples/set.update/ex1.py | mcorne/python-by-example | 15339c0909c84b51075587a6a66391100971c033 | [
"MIT"
] | null | null | null | examples/set.update/ex1.py | mcorne/python-by-example | 15339c0909c84b51075587a6a66391100971c033 | [
"MIT"
] | null | null | null | examples/set.update/ex1.py | mcorne/python-by-example | 15339c0909c84b51075587a6a66391100971c033 | [
"MIT"
] | null | null | null | # s.update({4, 5, 6}, {7, 8, 9})
s = {1, 2, 3}
s.update({4, 5, 6}, {7, 8, 9})
print(s)
| 17.4 | 32 | 0.413793 | 22 | 87 | 1.636364 | 0.545455 | 0.388889 | 0.444444 | 0.5 | 0.722222 | 0.722222 | 0.722222 | 0.722222 | 0 | 0 | 0 | 0.220588 | 0.218391 | 87 | 4 | 33 | 21.75 | 0.308824 | 0.344828 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0 | 1 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
c57654ab8d1e0e7ea47a6baf8ee4b16f27cb9f55 | 2,702 | py | Python | project/tests/api/test_register_api.py | sunday-ucheawaji/API- | 07fb4b596cfe8e85b8575a8e70a8c886d3ab627a | [
"MIT"
] | null | null | null | project/tests/api/test_register_api.py | sunday-ucheawaji/API- | 07fb4b596cfe8e85b8575a8e70a8c886d3ab627a | [
"MIT"
] | null | null | null | project/tests/api/test_register_api.py | sunday-ucheawaji/API- | 07fb4b596cfe8e85b8575a8e70a8c886d3ab627a | [
"MIT"
] | 1 | 2022-02-09T14:13:20.000Z | 2022-02-09T14:13:20.000Z | from django.test import TestCase
from django.urls import reverse
from rest_framework.test import APIClient
from rest_framework import status
# Create your tests here.
class RegisterAPITest(TestCase):
def setUp(self):
self.client = APIClient()
def test_api_can_create_user(self):
valid_user_data = {
'first_name': 'Tolani',
'last_name': 'Akinola',
'email': 'augustine.jibunoh@decagon.dev',
'password': 'wiTTy007waTT'
}
self.response = self.client.post(
reverse('register'),
valid_user_data,
format='json'
)
self.assertEqual(self.response.status_code, status.HTTP_201_CREATED)
def test_api_firstname_field_may_not_be_blank(self):
invalid_user_data = {
'first_name': '',
'last_name': 'Akinola',
'email': 'augustine.jibunoh@decagon.dev',
'password': 'wiTTy007waTT'
}
self.response = self.client.post(
reverse('register'),
invalid_user_data,
format='json'
)
self.assertEqual(self.response.status_code,
status.HTTP_400_BAD_REQUEST)
def test_api_lastname_field_may_not_be_blank(self):
invalid_user_data = {
'first_name': 'Tolani',
'last_name': '',
'email': 'augustine.jibunoh@decagon.dev',
'password': 'wiTTy007waTT'
}
self.response = self.client.post(
reverse('register'),
invalid_user_data,
format='json'
)
self.assertEqual(self.response.status_code,
status.HTTP_400_BAD_REQUEST)
def test_api_email_field_may_not_be_blank(self):
invalid_user_data = {
'first_name': 'Tolani',
'last_name': 'Akinola',
'email': '',
'password': 'wiTTy007waTT'
}
self.response = self.client.post(
reverse('register'),
invalid_user_data,
format='json'
)
self.assertEqual(self.response.status_code,
status.HTTP_400_BAD_REQUEST)
def test_api_password_field_may_not_be_blank(self):
invalid_user_data = {
'first_name': 'Tolani',
'last_name': 'Akinola',
'email': 'augustine.jibunoh@decagon.dev',
'password': ''
}
self.response = self.client.post(
reverse('register'),
invalid_user_data,
format='json'
)
self.assertEqual(self.response.status_code,
status.HTTP_400_BAD_REQUEST)
| 31.788235 | 76 | 0.561806 | 268 | 2,702 | 5.354478 | 0.220149 | 0.055749 | 0.083624 | 0.059233 | 0.801394 | 0.801394 | 0.801394 | 0.801394 | 0.801394 | 0.801394 | 0 | 0.015 | 0.333827 | 2,702 | 84 | 77 | 32.166667 | 0.782222 | 0.008512 | 0 | 0.618421 | 0 | 0 | 0.162869 | 0.043332 | 0 | 0 | 0 | 0 | 0.065789 | 1 | 0.078947 | false | 0.078947 | 0.052632 | 0 | 0.144737 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
3d87ed75ed5e33648f224f3a87d83dc2c0cd471b | 79,136 | py | Python | tests/test_utils.py | K-Mike/deep_ner | ffe1bcd64f7e38066866daa0cdd943300ba9ed4e | [
"Apache-2.0"
] | null | null | null | tests/test_utils.py | K-Mike/deep_ner | ffe1bcd64f7e38066866daa0cdd943300ba9ed4e | [
"Apache-2.0"
] | null | null | null | tests/test_utils.py | K-Mike/deep_ner | ffe1bcd64f7e38066866daa0cdd943300ba9ed4e | [
"Apache-2.0"
] | null | null | null | import os
import sys
import unittest
try:
from deep_ner.utils import load_dataset
from deep_ner.utils import load_tokens_from_factrueval2016_by_paragraphs
from deep_ner.utils import load_tokens_from_factrueval2016_by_sentences
except:
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from deep_ner.utils import load_dataset
from deep_ner.utils import load_tokens_from_factrueval2016_by_paragraphs
from deep_ner.utils import load_tokens_from_factrueval2016_by_sentences
class TestUtils(unittest.TestCase):
def test_load_tokens_from_factrueval2016_by_sentences(self):
true_tokens = {
1802186: (0, 8, 'Назначен'),
1802187: (9, 16, 'куратор'),
1802188: (17, 30, 'строительства'),
1802189: (31, 41, 'российской'),
1802190: (42, 52, 'Кремниевой'),
1802191: (53, 59, 'долины'),
1802192: (61, 68, 'Дмитрий'),
1802193: (69, 77, 'Медведев'),
1802194: (78, 85, 'доверил'),
1802195: (86, 90, 'пост'),
1802196: (91, 103, 'руководителя'),
1802197: (104, 113, 'иннограда'),
1802198: (114, 125, 'миллиардеру'),
1802199: (126, 133, 'Виктору'),
1802200: (134, 146, 'Вексельбергу'),
1802201: (146, 147, '.'),
1802202: (149, 152, 'Всё'),
1802203: (153, 159, 'меньше'),
1802204: (160, 168, 'остаётся'),
1802205: (169, 179, 'нерешённых'),
1802206: (180, 188, 'вопросов'),
1802207: (188, 189, ','),
1802208: (190, 200, 'касающихся'),
1802209: (201, 211, 'возведения'),
1802210: (212, 213, 'в'),
1802211: (214, 220, 'России'),
1802212: (221, 232, 'уникального'),
1802213: (233, 239, 'Центра'),
1802214: (240, 242, 'по'),
1802215: (243, 253, 'разработке'),
1802216: (254, 255, 'и'),
1802217: (256, 272, 'коммерциализации'),
1802218: (273, 278, 'новых'),
1802219: (279, 289, 'технологий'),
1802220: (289, 290, '.'),
1802221: (291, 297, 'Власти'),
1802222: (298, 301, 'уже'),
1802223: (302, 304, 'не'),
1802224: (305, 311, 'только'),
1802225: (312, 319, 'выбрали'),
1802226: (320, 328, 'площадку'),
1802227: (329, 332, 'для'),
1802228: (333, 346, 'строительства'),
1802229: (347, 360, 'отечественной'),
1802230: (361, 371, 'Кремниевой'),
1802231: (372, 378, 'долины'),
1802232: (379, 380, 'в'),
1802233: (381, 393, 'подмосковном'),
1802234: (394, 402, 'Сколково'),
1802235: (402, 403, ','),
1802236: (404, 405, 'а'),
1802237: (406, 411, 'также'),
1802238: (412, 420, 'частично'),
1802239: (421, 429, 'одобрили'),
1802240: (430, 439, 'концепцию'),
1802241: (440, 450, 'наукограда'),
1802242: (450, 451, ','),
1802243: (452, 454, 'но'),
1802244: (455, 456, 'и'),
1802245: (457, 467, 'определили'),
1802246: (468, 476, 'куратора'),
1802247: (477, 484, 'большой'),
1802248: (485, 498, 'инновационной'),
1802249: (499, 506, 'стройки'),
1802250: (506, 507, '.'),
1802251: (508, 509, '«'),
1802252: (509, 513, 'Были'),
1802253: (514, 523, 'проведены'),
1802254: (524, 536, 'определённые'),
1802255: (537, 549, 'консультации'),
1802256: (550, 552, 'по'),
1802257: (553, 559, 'поводу'),
1802258: (560, 564, 'того'),
1802259: (564, 565, ','),
1802260: (566, 569, 'кто'),
1802261: (570, 579, 'конкретно'),
1802262: (580, 583, 'мог'),
1802263: (584, 586, 'бы'),
1802264: (587, 599, 'осуществлять'),
1802265: (600, 606, 'такого'),
1802266: (607, 611, 'рода'),
1802267: (612, 618, 'работу'),
1802268: (618, 619, '.'),
1802269: (620, 624, 'Мною'),
1802270: (625, 632, 'принято'),
1802271: (633, 640, 'решение'),
1802272: (640, 641, ','),
1802273: (642, 645, 'что'),
1802274: (646, 656, 'российскую'),
1802275: (657, 662, 'часть'),
1802276: (663, 667, 'этой'),
1802277: (668, 682, 'координирующей'),
1802278: (683, 692, 'структуры'),
1802279: (692, 693, ','),
1802280: (694, 701, 'которую'),
1802281: (702, 704, 'мы'),
1802282: (705, 713, 'создадим'),
1802283: (713, 714, ','),
1802284: (715, 724, 'возглавит'),
1802285: (725, 731, 'Виктор'),
1802286: (732, 742, 'Феликсович'),
1802287: (743, 754, 'Вексельберг'),
1802288: (754, 755, '»'),
1802289: (755, 756, ','),
1802290: (757, 758, '—'),
1802291: (759, 767, 'цитирует'),
1802292: (768, 769, '«'),
1802293: (769, 775, 'Взгляд'),
1802294: (775, 776, '»'),
1802295: (777, 784, 'Дмитрия'),
1802296: (785, 794, 'Медведева'),
1802297: (794, 795, '.')
}
true_text = 'Назначен куратор строительства российской Кремниевой долины Дмитрий Медведев доверил пост ' \
'руководителя иннограда миллиардеру Виктору Вексельбергу. Всё меньше остаётся нерешённых ' \
'вопросов, касающихся возведения в России уникального Центра по разработке и коммерциализации ' \
'новых технологий. Власти уже не только выбрали площадку для строительства отечественной ' \
'Кремниевой долины в подмосковном Сколково, а также частично одобрили концепцию наукограда, но и ' \
'определили куратора большой инновационной стройки. «Были проведены определённые консультации по ' \
'поводу того, кто конкретно мог бы осуществлять такого рода работу. Мною принято решение, что ' \
'российскую часть этой координирующей структуры, которую мы создадим, возглавит Виктор Феликсович' \
' Вексельберг», — цитирует «Взгляд» Дмитрия Медведева.'
true_bounds_of_sentences = ((0, 59), (61, 147), (149, 290), (291, 507), (508, 619), (620, 756), (757, 795))
loaded_tokens, loaded_text, loaded_sentence_bounds = load_tokens_from_factrueval2016_by_sentences(
os.path.join(os.path.dirname(__file__), 'testdata', 'factrueval_data', 'book_3543.tokens')
)
self.assertIsInstance(loaded_tokens, dict)
self.assertEqual(set(true_tokens.keys()), set(loaded_tokens.keys()))
for token_ID in true_tokens:
self.assertEqual(true_tokens[token_ID], loaded_tokens[token_ID])
self.assertEqual(true_text, loaded_text)
self.assertEqual(true_bounds_of_sentences, loaded_sentence_bounds)
def test_load_tokens_from_factrueval2016_by_paragraphs(self):
true_tokens = {
1802186: (0, 8, 'Назначен'),
1802187: (9, 16, 'куратор'),
1802188: (17, 30, 'строительства'),
1802189: (31, 41, 'российской'),
1802190: (42, 52, 'Кремниевой'),
1802191: (53, 59, 'долины'),
1802192: (61, 68, 'Дмитрий'),
1802193: (69, 77, 'Медведев'),
1802194: (78, 85, 'доверил'),
1802195: (86, 90, 'пост'),
1802196: (91, 103, 'руководителя'),
1802197: (104, 113, 'иннограда'),
1802198: (114, 125, 'миллиардеру'),
1802199: (126, 133, 'Виктору'),
1802200: (134, 146, 'Вексельбергу'),
1802201: (146, 147, '.'),
1802202: (149, 152, 'Всё'),
1802203: (153, 159, 'меньше'),
1802204: (160, 168, 'остаётся'),
1802205: (169, 179, 'нерешённых'),
1802206: (180, 188, 'вопросов'),
1802207: (188, 189, ','),
1802208: (190, 200, 'касающихся'),
1802209: (201, 211, 'возведения'),
1802210: (212, 213, 'в'),
1802211: (214, 220, 'России'),
1802212: (221, 232, 'уникального'),
1802213: (233, 239, 'Центра'),
1802214: (240, 242, 'по'),
1802215: (243, 253, 'разработке'),
1802216: (254, 255, 'и'),
1802217: (256, 272, 'коммерциализации'),
1802218: (273, 278, 'новых'),
1802219: (279, 289, 'технологий'),
1802220: (289, 290, '.'),
1802221: (291, 297, 'Власти'),
1802222: (298, 301, 'уже'),
1802223: (302, 304, 'не'),
1802224: (305, 311, 'только'),
1802225: (312, 319, 'выбрали'),
1802226: (320, 328, 'площадку'),
1802227: (329, 332, 'для'),
1802228: (333, 346, 'строительства'),
1802229: (347, 360, 'отечественной'),
1802230: (361, 371, 'Кремниевой'),
1802231: (372, 378, 'долины'),
1802232: (379, 380, 'в'),
1802233: (381, 393, 'подмосковном'),
1802234: (394, 402, 'Сколково'),
1802235: (402, 403, ','),
1802236: (404, 405, 'а'),
1802237: (406, 411, 'также'),
1802238: (412, 420, 'частично'),
1802239: (421, 429, 'одобрили'),
1802240: (430, 439, 'концепцию'),
1802241: (440, 450, 'наукограда'),
1802242: (450, 451, ','),
1802243: (452, 454, 'но'),
1802244: (455, 456, 'и'),
1802245: (457, 467, 'определили'),
1802246: (468, 476, 'куратора'),
1802247: (477, 484, 'большой'),
1802248: (485, 498, 'инновационной'),
1802249: (499, 506, 'стройки'),
1802250: (506, 507, '.'),
1802251: (508, 509, '«'),
1802252: (509, 513, 'Были'),
1802253: (514, 523, 'проведены'),
1802254: (524, 536, 'определённые'),
1802255: (537, 549, 'консультации'),
1802256: (550, 552, 'по'),
1802257: (553, 559, 'поводу'),
1802258: (560, 564, 'того'),
1802259: (564, 565, ','),
1802260: (566, 569, 'кто'),
1802261: (570, 579, 'конкретно'),
1802262: (580, 583, 'мог'),
1802263: (584, 586, 'бы'),
1802264: (587, 599, 'осуществлять'),
1802265: (600, 606, 'такого'),
1802266: (607, 611, 'рода'),
1802267: (612, 618, 'работу'),
1802268: (618, 619, '.'),
1802269: (620, 624, 'Мною'),
1802270: (625, 632, 'принято'),
1802271: (633, 640, 'решение'),
1802272: (640, 641, ','),
1802273: (642, 645, 'что'),
1802274: (646, 656, 'российскую'),
1802275: (657, 662, 'часть'),
1802276: (663, 667, 'этой'),
1802277: (668, 682, 'координирующей'),
1802278: (683, 692, 'структуры'),
1802279: (692, 693, ','),
1802280: (694, 701, 'которую'),
1802281: (702, 704, 'мы'),
1802282: (705, 713, 'создадим'),
1802283: (713, 714, ','),
1802284: (715, 724, 'возглавит'),
1802285: (725, 731, 'Виктор'),
1802286: (732, 742, 'Феликсович'),
1802287: (743, 754, 'Вексельберг'),
1802288: (754, 755, '»'),
1802289: (755, 756, ','),
1802290: (757, 758, '—'),
1802291: (759, 767, 'цитирует'),
1802292: (768, 769, '«'),
1802293: (769, 775, 'Взгляд'),
1802294: (775, 776, '»'),
1802295: (777, 784, 'Дмитрия'),
1802296: (785, 794, 'Медведева'),
1802297: (794, 795, '.')
}
true_text = 'Назначен куратор строительства российской Кремниевой долины Дмитрий Медведев доверил пост ' \
'руководителя иннограда миллиардеру Виктору Вексельбергу. Всё меньше остаётся нерешённых ' \
'вопросов, касающихся возведения в России уникального Центра по разработке и коммерциализации ' \
'новых технологий. Власти уже не только выбрали площадку для строительства отечественной ' \
'Кремниевой долины в подмосковном Сколково, а также частично одобрили концепцию наукограда, но и ' \
'определили куратора большой инновационной стройки. «Были проведены определённые консультации по ' \
'поводу того, кто конкретно мог бы осуществлять такого рода работу. Мною принято решение, что ' \
'российскую часть этой координирующей структуры, которую мы создадим, возглавит Виктор Феликсович' \
' Вексельберг», — цитирует «Взгляд» Дмитрия Медведева.'
true_bounds_of_paragraphs = ((0, 59), (61, 147), (149, 795))
loaded_tokens, loaded_text, loaded_paragraph_bounds = load_tokens_from_factrueval2016_by_paragraphs(
os.path.join(os.path.dirname(__file__), 'testdata', 'factrueval_data', 'book_3543.txt'),
os.path.join(os.path.dirname(__file__), 'testdata', 'factrueval_data', 'book_3543.tokens')
)
self.assertIsInstance(loaded_tokens, dict)
self.assertEqual(set(true_tokens.keys()), set(loaded_tokens.keys()))
for token_ID in true_tokens:
self.assertEqual(true_tokens[token_ID], loaded_tokens[token_ID])
self.assertEqual(true_text, loaded_text)
self.assertEqual(true_bounds_of_paragraphs, loaded_paragraph_bounds)
def test_load_dataset_positive01(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
file_name = os.path.join(base_dir, 'dataset_with_paragraphs.json')
X_true = [
'Александр Вертинский. «Я не знаю, зачем и кому это нужно…»',
'21 марта 1889 года родился главный русский шансонье XX века, печальный Пьеро, вписавший свою судьбу в '
'историю отечественной культуры',
'Жизнь с самого начала оставляла для Александра Вертинского слишком много вопросов без ответов. Слишком '
'много «пустого» пространства. И он научился заполнять его вымыслом. Создал собственный театр с безумным '
'множеством персонажей, каждый из которых — от сироток-калек и безымянных кокаинеточек до гениальных '
'скрипачей и кинодив — был им самим.',
'Трехкомнатная квартира на последнем этаже дома на углу Тверской и Козицкого переулка в Москве и сегодня '
'выглядит так, словно ее хозяин вот-вот вернется. В просторном кабинете все те же большие книжные шкафы, '
'все тот же гигантский письменный стол с наполеоновским вензелем и бюстом Вольтера.',
'Сейчас в кабинете все чаще бывает лишь вдова Вертинского. Вновь и вновь перечитывает его письма, '
'рукописи. Он смотрит на нее с фотографий, развешанных на стенах, расставленных на столе, и словно '
'возвращает в те пятнадцать лет неизбывного счастья, когда по квартире витает запах табака и лаванды, дом '
'полон гостей и шумные застолья длятся допоздна. И все это — будто здесь и сейчас. Нет, время не '
'остановилось, оно сомкнуло объятия, чтобы вновь и вновь перечитывать эту странную, загадочную судьбу.',
'Считается, что свой голос Георгий Иванов обрёл в эмиграции и благодаря эмиграции. Мол, утрата родины '
'стала для него тем «простым человеческим горем», которого так не хватало по форме безупречным его стихам, '
'чтобы они наполнились содержанием. На самом деле это не совсем так, потому что точка сборки Георгия '
'Иванова была смещена ещё в Петербурге.',
'Георгий Иванов. На грани музыки и сна',
'Первое детское воспоминание Вертинского — о смерти матери. Трехлетний Саша сидит на горшке и выковыривает '
'глаза у плюшевого медвежонка. Горничная Лизка отрывает мальчика от увлекательного занятия: «Вставай, твоя '
'мама умерла!» Мать лежит в серебристом гробу на столе, тело ее скрывают цветы; у изголовья стоят '
'серебряные подсвечники и маленькая табуретка. В руке Саша сжимает шоколадку, он бросается к матери, чтобы '
'угостить. Но мать не раскрывает рта…',
'Через два года от чахотки умер отец. Однажды ранней весной его нашли без чувств на могиле супруги. '
'Оправиться от болезни он уже не смог. Когда кровь хлынула горлом, рядом с ним была только десятилетняя '
'дочь Надя, не знавшая, как помочь. Обессиленный отец упал на подушку и захлебнулся кровью.',
'Старшая сестра матери забрала Надю к себе в Ковно. Саша остался жить в Киеве с другой сестрой матери, '
'которая уверила мальчика в том, что его сестра умерла. То же самое было сказано Наде о брате. Спустя годы '
'Александр случайно обнаружит упоминание о Н. Н. Вертинской в журнале «Театр и искусство», напишет ей, и '
'выяснится, что это его сестра. Во время Первой мировой Вертинскому сообщат, что Надя покончила с собой. '
'Только после смерти Вертинского его вдова выяснит, что Надежда Николаевна живет в Ленинграде.',
'Смерть причудливо и неотвратимо вписалась в его жизнь. Смерть была тем миром, где кончались тщета '
'мальчика Мая и тревоги Безноженьки и наступал долгожданный покой.',
'Александр Вертинский появился на свет «незаконнорожденным». Родственники отца и матери не одобряли союз '
'Николая Вертинского с Евгенией Скалацкой (Сколацкой) даже тогда, когда родились Надя и Саша. Евгения '
'Степановна происходила из дворянского рода, а Николай Петрович был присяжным поверенным. Первая жена отца '
'по настоянию родственников Николая Вертинского не давала ему развода. Так что пришлось усыновить '
'собственных детей.',
'Жизнь с самого начала оставляла для Александра Вертинского слишком много вопросов без ответов. Слишком '
'много «пустого» пространства. И он научился заполнять его вымыслом. Создал собственный театр с безумным '
'множеством персонажей, каждый из которых — от сироток-калек и безымянных кокаинеточек до гениальных '
'скрипачей и кинодив — был им самим.',
'Театр стал маниакальной страстью Вертинского еще с гимназических лет. Он любыми способами проникал на '
'спектакли, оперы, концерты, выступал в любительских постановках в контрактовом зале на киевском Подоле и '
'подвизался статистом в Соловцовском театре — разумеется, бесплатно. А чтобы не умереть с голоду, брался '
'за любую работу — пописывал рецензии на выступления гастролеров, служил корректором в типографии, '
'нанимался помощником бухгалтера в гостиницу, продавал открытки, грузил арбузы на барках и даже '
'подворовывал у двоюродной сестры безделушки, чтобы сбыть их на толкучке.',
'С армией Колчака бежала из Владивостока семья цыган Димитриевичей, на пароходах генерала Врангеля '
'спасались Александр Вертинский и Надежда Плевицкая, уходили куда угодно, лишь бы подальше от Советов, '
'многие звёзды и звёздочки... Да, в первой эмиграции оказалось немало творческих личностей, работавших в '
'интересующем нас жанре русской песни, но даже самые яркие их имена блекнут рядом со сверкающей снежной '
'шапкой Монблана в лице Фёдора Ивановича Шаляпина.',
'Живой бог русской музыки',
'В 1911–1912 годах журналы «Киевская неделя» и «Лукоморье» опубликовали первые рассказы Вертинского: '
'«Красные бабочки» и «Моя невеста» — декадентские, но с бунинской интонацией. «Красные бабочки» — о '
'мальчике-сироте, случайно погубившем красных бабочек, вышитых на черном платье. Мальчик наказан суровой '
'теткой, но бабочки являются ему во сне, чтобы отомстить за погибших сестер. «Моя невеста» — о сумасшедшей '
'бездомной, читающей стихи на эстраде опустевшего осеннего парка. Эта «светлая малютка-невеста» при '
'ближайшем рассмотрении оказывается «маленьким уродливым существом» с «длинным, острым, серо-зеленого '
'цвета лицом», «черно-синими припухшими губами», «без бровей, без ресниц, с глубоко вдавленными в череп '
'глазами».',
'Свободное от литературных посиделок и работы время Вертинский коротал с киевской богемной молодежью в '
'подвальном кабачке, закусывая дешевое вино дешевым сыром. В приобретенном на толкучке подержанном фраке, '
'всегда с живым цветком в петлице, всегда презрительный и надменный, он сыпал заранее продуманными '
'афоризмами и производил на окружающих впечатление большого оригинала. Но прекрасно понимал, что вечно так '
'продолжаться не может.',
'Скопив 25 рублей и подыскав компаньона с театральным гардеробчиком (без собственных костюмов в театрах '
'тогда статистов не брали), Вертинский подался в Москву.',
'Здесь он играл небольшие роли в любительских студиях, поступил в театр миниатюр Марьи Арцыбушевой, где '
'служил за котлеты и борщ, соглашался на любые роли в кино, показывался во МХАТе — но из-за своего '
'грассирующего «р» был отвергнут Станиславским.',
'А внутри бурлило и клокотало, требовало выхода и не находило его. Слишком много вокруг было никому '
'неизвестных талантов и знаменитых бездарностей. Столицы захлестнула эпидемия увлечения кокаином. Его '
'покупали сначала в аптеках, затем с рук, носили в пудреницах и портсигарах, щедро одалживали и '
'одалживались. Однажды выглянув из выходившего на крышу окна мансарды, которую Вертинский снимал, он '
'обнаружил, что весь скат усеян пустыми коричневыми бутылочками из-под кокаина.',
'Вертинский отправился к психиатру, профессору Баженову, и, подойдя к трамвайной остановке, увидел, как '
'Пушкин сошел со своего пьедестала, оставив на нем четкий след. Александр Сергеевич сел вместе с '
'Вертинским в трамвай и достал большой старинный медный пятак — для оплаты.',
'Справиться с пристрастием к кокаину Вертинскому помогла война. Под именем Брат Пьеро он записался в '
'санитарный поезд, курсировавший от Москвы к фронту и обратно. Почти два года Вертинский перевязывал '
'раненых, читал им письма от родных, пел и даже, по его уверению, оперировал.',
'В 1915 году Вертинский вернулся в театр миниатюр Арцыбушевой с собственным номером — «Ариетки Пьеро». На '
'фоне черного занавеса в лунном луче прожектора на сцене появлялся высокий молодой человек. На его густо '
'покрытом белилами лице резко выделялись ярко-красный рот, обведенные тушью большие глаза и печально '
'вздернутые нарисованные брови. После вступления рояля этот странный юноша взмахивал руками и тихо '
'начинал:',
'Я люблю Вас, моя сегоглазочка, Золотая ошибка моя! Вы — вечегняя жуткая сказочка, Вы — цветок из кагтины '
'Гойя.',
'После бесконечных ямщиков и соловьев, аллей и ночей, дышащих сладострастьем, с одной стороны, а с другой '
'с другой — на фоне бравад футуристов, претенциозных поэз Игоря Северянина и одесской шансоньетки Изы '
'Кремер с ее занзибарами-кларами, — печальный Пьеро Вертинского стал сенсацией. Ему удалось невозможное: '
'вписать богемную экзотику — всех этих маленьких креольчиков, смуглых принцев с Антильских островов, '
'китайчат Ли, лиловых негров — в живописный ландшафт одинокой и беззащитной души; превратить ироничную '
'игру культурными символами в откровение глубокой печали.',
'Так певец без выдающихся вокальных данных, композитор, не знавший нотной грамоты, актер с дефектом дикции '
'стал всероссийским кумиром. Издательство «Прогрессивные новости» Б. Андржеевского огромными тиражами '
'выпускало «Песенки Вертинского», которые впечатлительные курсистки развозили по всей стране.',
'Начались гастроли и бенефисы, от восторженной и возмущенной публики нередко приходилось спасаться через '
'черный ход. Посыпались приглашения в кино. Популярность Вертинского была столь велика, что в феврале 1917 '
'года Александра Керенского называли «печальным Пьеро российской революции».',
'Как и подавляющее большинство представителей русской интеллигенции, Вертинский связывал с Февральской '
'революцией опьяняющие надежды на обновление и очищение. Октябрьский переворот заставил протрезветь. Под '
'впечатлением гибели московских юнкеров, убитых большевиками, Вертинский написал знаменитых «Юнкеров»:',
'Я не знаю, зачем и кому это нужно, Кто послал их на смерть недрожавшей рукой, Только так беспощадно, так '
'зло и ненужно Опустили их в вечный покой.',
'Песня стала настоящим белогвардейским гимном — с нею шли в бой и умирали русские офицеры и юнкера. '
'Существует легенда, что Вертинского вызывали в ЧК для дачи объяснений по поводу контрреволюционной песни. '
'Артист возмутился: «Но вы же не можете запретить мне их жалеть!» И в ответ услышал: «Дышать запретим, '
'если потребуется».',
'Как и многие эпизоды из жизни Вертинского, допрос в ЧК не имеет документальных подтверждений. Тем не '
'менее факт остается фактом: вслед за отступающей белой армией, как и многие российские артисты, '
'Вертинский подался на юг, где все еще верили в счастливую развязку и мучились тяжелым предчувствием, что '
'ее никогда не будет.',
'В 1920 году на пароходе «Великий князь Александр Михайлович», увозящем барона Врангеля, Вертинский '
'покинул Россию, отправившись в добровольное изгнание на 23 года.',
'Его одиссея началась с Константинополя, где он пел разноязыким эмигрантам цыганские романсы и раздобыл '
'греческий паспорт на имя Александра Вертидиса. Закружилась круговерть авантюр, лиц, городов, стран. '
'Румыния, Польша, Германия, Австрия, Венгрия, Палестина, Египет, Ливия, Франция, США… Выступления в '
'ресторанах и кабаках — между горячим и десертом; в мюзик-холлах и фешенебельных отелях — для королей '
'Густава Шведского, Альфонса Испанского, принца Уэльского, для Вандербильтов и Ротшильдов.',
'В Бессарабии его арестовали по обвинению в просоветской пропаганде песней «В степи молдаванской» — в '
'особенности строками «О, как сладко, как больно сквозь слезы / Хоть взглянуть на родную страну…» '
'Естественно, в деятельности Вертинского усмотрели происки НКВД. С тех пор слава чекистского агента '
'бросает тень на его репутацию по сей день — как будто агент НКВД не может быть великим артистом…',
'Все двадцать с лишним лет, где бы Вертинский ни выступал, он пел только на русском (исключение делал лишь '
'для любимой Франции, где исполнял несколько своих песенок по-французски). Его основной аудиторией, '
'конечно же, была русская эмиграция, для которой печальный Пьеро являлся не просто символом утраченной '
'России, но, по выражению Шаляпина, «сказителем земли русской».',
'Уже с начала 1920-х Вертинский просил разрешения вернуться — через советское консульство, через Анатолия '
'Луначарского, возглавившего советскую делегацию в Берлине, — но неизменно получал отказ.',
'В конце 1935 года он приехал в Китай — в Шанхае и Харбине была довольно обширная русская община. В Шанхае '
'артист дал двадцать аншлаговых концертов (даже Шаляпину здесь сумели организовать только два '
'выступления), однако бесконечно петь для одной и той же аудитории невозможно, и Вертинский намеревался '
'через какое-то время вернуться в Европу. Но в 1937 году его вдруг пригласили в СССР — без всяких просьб '
'со стороны артиста. Вертинский остался в Китае, ожидая, когда организуют возвращение. Он ждал пять лет.',
'Что побудило Сталина позвать Вертинского? Рассказывали, что генералиссимус любил слушать ариетки Брата '
'Пьеро в часы отдыха — особенно песню «В синем и далеком океане». Легенда приписывает также Сталину '
'известную фразу «Дадим артисту Вертинскому спокойно дожить на Родине», произнесенную после того, как '
'«отец всех народов» лично вычеркнул артиста из ждановского постановления, громившего Дмитрия Шостаковича '
'и Сергея Прокофьева. Нравился Сталину Вертинский или нет, несомненно одно — возвращение «соловья '
'белоэмиграции», мировой знаменитости было идеологически выгодно советскому режиму, тем более в 1943 году, '
'когда открылся союзный фронт и в стране бродили оттепельные настроения.',
'Вертинский же всегда и всем говорил о том, что возвращается, чтобы «рассказать о страданиях эмиграции» и '
'«помирить Родину с ней». «Шанхайская Тэффи» Наталия Ильина не преминула по этому поводу съязвить в '
'автобиографическом романе «Возвращение». Ее Джордж Эрмин (Георгий Еремин), подозрительно похожий на '
'Вертинского, прочитав Конституцию СССР, перекрестился и изрек: «Я подумал, что же это — Китеж, '
'воскресающий без нас!»',
'Ранним утром 4 ноября 1943 года на пароходе «Дайрен-Мару» Вертинский покинул Шанхай. С ним были его '
'двадцатилетняя жена Лидия и ее мать, на руках он держал трехмесячную дочь Марианну. Необходимость '
'содержать семью была не самой последней причиной переезда в СССР. Шла война, зверствовала инфляция, '
'иностранные конторы в Китае закрывались, русские эмигранты спасались от японской оккупации. Выступать '
'становилось все труднее. Вертинский пускался в рискованные финансовые авантюры, не имевшие успеха. Его '
'самой удачной коммерческой операцией была закупка пяти бутылей водки накануне рождения ребенка. Продав '
'их после повышения цен, Вертинский оплатил счета за услуги роддома.',
'Первым советским городом на их пути стала Чита. Стоял жуткий мороз, семью Вертинского поселили в '
'гостинице, где практически не топили, а по стенам ползали клопы. А в местной филармонии артиста уже '
'поджидала телеграмма из Москвы с распоряжением дать в Чите несколько концертов. Родина встречала блудного '
'сына.',
'О его возвращении ходили анекдоты. В одном из них рассказывалось, как Вертинский, приехав в СССР, выходит '
'из вагона с двумя чемоданами, ставит их, целует землю и смотрит вокруг: «Не узнаю тебя, Россия!» '
'Обернувшись, обнаруживает, что чемоданов нет. «Узнаю тебя, Россия!» — восклицает артист. В другом '
'повествовалось о приеме, устроенном в честь Вертинского «пролетарским графом» Алексеем Николаевичем '
'Толстым. Гости долго томятся, ожидая, когда их пригласят к столу. Кто-то из присутствующих, оглядев '
'собравшееся общество — граф Толстой, граф Игнатьев, митрополит Николай Крутицкий, Александр Вертинский, —'
' спрашивает: «Кого ждем?» Остроумец-куплетист Смирнов-Сокольский отвечает: «Государя!»',
'Первой советской киноролью Вертинского стал кардинал Бирнч в фильме Михаила Калатозова «Заговор '
'обреченных». Актер сыграл изысканного, сладкоречивого патриция со следами былого донжуанства. Так и '
'должен выглядеть настоящий враг советского режима — образованный, воспитанный, обвораживающий своим '
'лоском. Только такие и могут строить заговоры и вынашивать планы государственного переворота. Сталинская '
'премия за роль кардинала свидетельствовала о высочайшем одобрении этой трактовки.',
'Такого же двуликого Януса Вертинский исполнил в помпезном фильме Сергея Юткевича «Великий воин '
'Скандербег». Возможно, он играл бы маскирующихся иродов и дальше, если бы Исидор Анненский не предложил '
'ему роль князя в экранизации чеховской «Анны на шее». Одним своим появлением на экране Вертинский, этот '
'обломок царской России, воскрешал шик дворянских собраний и балов при дворе.',
'Положение «советского артиста» Вертинского было довольно странным. С одной стороны, явное благоволение '
'властей: его с семьей поселили в «Метрополе», затем выделили квартиру, наградили высшей государственной '
'премией. Правда, семья в течение трех лет обитала в «Метрополе» не от хорошей жизни. Съехать было просто '
'некуда, потому что выделенная квартира находилась на первом этаже двухэтажного дома на Хорошевском шоссе. '
'Артист опасался поселяться в ней и с помощью сложных маневров обменял ее на квартиру на улице Горького, '
'которая была в таком жутком состоянии, что нуждалась в капитальном ремонте. Опасения Вертинского, как '
'выяснилось позже, были не напрасны — квартира на Хорошевском шоссе подверглась налету знаменитой «Черной '
'кошки».',
'С другой стороны, из ста с лишним песен к исполнению было разрешено не более тридцати (авторство текстов '
'Георгия Иванова и Николая Гумилева Вертинскому пришлось приписать себе), единственная прижизненная '
'пластинка вышла в 1944 году, о концертах — ни строчки в прессе. «Я существую на правах публичного дома, —'
' горько шутил Вертинский, — все ходят, но в обществе говорить об этом не принято».',
'Из эмиграции Вертинский вернулся практически с пустыми карманами, вскоре родилась вторая дочь, Настя. '
'Гастрольбюро обеспечило артисту по 20–25 концертов в месяц по всей стране от Средней Азии до Дальнего '
'Востока — в нетопленных, неприспособленных для выступлений залах с расстроенными роялями и пьяной '
'публикой. Но концертная жизнь в европейских кабаках приучила его работать в любых условиях.',
'Платили Вертинскому по самому низкому тарифу, поскольку у него не было никаких званий. За концерт артист '
'получал около 800 рублей, при этом его выступления всегда проходили при аншлагах и собирали десятки тысяч '
'рублей. Приходилось соглашаться на все, давать левые концерты, выкручиваться, объясняться… Вместе с '
'аккомпаниатором Михаилом Брохесом он вдоль и поперек исколесил всю страну по нескольку раз, дав около '
'трех тысяч концертов. Написал два десятка стихов, работал над мемуарами, которые не успел закончить. 14 '
'лет на Родине превратили бодрого, моложавого мужчину в глубокого старика.',
'Он не хотел умереть дома, не желал, чтобы родные видели «кухню смерти». 21 мая 1957 года Вертинский '
'готовился к концерту в Ленинграде, был сдержан и немногословен. Он находился в своем 208-м номере '
'«Астории», когда начался сердечный приступ. Лекарства под рукой не оказалось. Как выяснилось позже — оно '
'бы уже не помогло. При вскрытии сосуды рассыпались, как хрупкое стекло',
'Назначен куратор строительства российской Кремниевой долины',
'Дмитрий Медведев доверил пост руководителя иннограда миллиардеру Виктору Вексельбергу.',
'Всё меньше остаётся нерешённых вопросов, касающихся возведения в России уникального Центра по разработке '
'и коммерциализации новых технологий. Власти уже не только выбрали площадку для строительства '
'отечественной Кремниевой долины в подмосковном Сколково, а также частично одобрили концепцию наукограда, '
'но и определили куратора большой инновационной стройки. «Были проведены определённые консультации по '
'поводу того, кто конкретно мог бы осуществлять такого рода работу. Мною принято решение, что российскую '
'часть этой координирующей структуры, которую мы создадим, возглавит Виктор Феликсович Вексельберг», — '
'цитирует «Взгляд» Дмитрия Медведева.',
'Исходя из заявления президента, понятно, что у проекта будут не только российские инвесторы, но и '
'иностранные партнёры, в числе которых, по словам главы государства, будут и представители иностранных '
'научных кругов. Именно на базе взаимодействия науки и бизнеса должен появиться и работать инноград. «Всё '
'это затеяли не ради того, чтобы построить определённое количество коттеджей или же создать там нормальные '
'производственные условия, лаборатории. Это всё важно, но это всё инфраструктура. Самое главное, чтобы '
'там появились люди. Для того чтобы люди появились, должна быть внятная система управления. Эта система '
'управления зависит от нас. Я думаю, что с учётом масштабности этого проекта, а с другой стороны, того, '
'что в реализации этого проекта должны быть заинтересованы не только государственные структуры, но, '
'прежде всего, российский бизнес, я считаю, что координацией российский бизнес и мог бы заняться», — '
'заявил Дмитрий Медведев.',
'Это выступление президента вполне объясняет выбор руководителя проекта. Виктор Вексельберг — бизнесмен с '
'30-летним стажем, капитал которого оценивается в 6,4 млрд долларов. Вексельберг является главой правления '
'ОАО «Тюменская нефтяная компания» (ТНК) и президентом ЗАО «Ренова». Именно он является владельцем '
'значительной части российского титана и алюминиевого бизнеса.',
'О том, почему площадкой для строительства Кремниевой долины выбрано Подмосковье, читайте в статье '
'Частного корреспондента «Сколково назначили Кремниевой долиной»'
]
y_true = [
{
"PERSON": [(0, 20)]
},
{
"PERSON": [(71, 76)]
},
{
"PERSON": [(36, 58)]
},
{
"LOCATION": [(55, 63), (66, 84), (87, 93)],
"PERSON": [(281, 289)]
},
{
"PERSON": [(45, 56)]
},
{
"LOCATION": [(334, 344)],
"PERSON": [(26, 40), (299, 314)]
},
{
"PERSON": [(0, 14)]
},
{
"PERSON": [(28, 39), (70, 74), (146, 151), (362, 366)]
},
{
"PERSON": [(207, 211)]
},
{
"PERSON": [(30, 34), (51, 55), (182, 186), (208, 217), (250, 266), (367, 378), (392, 396), (436, 447),
(471, 489)],
"LOCATION": [(44, 49), (71, 76), (498, 508)],
"ORG": [(269, 295)]
},
{
"PERSON": [(107, 110), (121, 132)]
},
{
"PERSON": [(0, 20), (104, 123), (126, 144), (146, 155), (184, 188), (191, 195), (197, 215), (251, 267),
(338, 357)]
},
{
"PERSON": [(36, 58)]
},
{
"PERSON": [(33, 44)],
"LOCATION": [(168, 185), (189, 197), (198, 204)],
"ORG": [(230, 249)]
},
{
"PERSON": [(9, 16), (89, 97), (108, 128), (131, 148), (430, 455)],
"LOCATION": [(27, 39), (191, 198), (414, 422)],
"ORG": [(2, 16)]
},
dict(),
{
"PERSON": [(87, 98)],
"ORG": [(18, 56)]
},
{
"LOCATION": [(72, 80)],
"PERSON": [(51, 61)]
},
{
"LOCATION": [(151, 157)],
"PERSON": [(130, 140)]
},
{
"PERSON": [(80, 97), (233, 246)],
"ORG": [(65, 97), (177, 182)]
},
{
"PERSON": [(373, 383)]
},
{
"PERSON": [(0, 10), (46, 54), (103, 109), (166, 185), (199, 209)]
},
{
"LOCATION": [(135, 141)],
"PERSON": [(36, 47), (79, 84), (177, 187)]
},
{
"PERSON": [(12, 22), (49, 60), (94, 99)],
"ORG": [(34, 60)]
},
{
"PERSON": [(105, 109)]
},
{
"LOCATION": [(389, 408)],
"PERSON": [(162, 178), (202, 212), (251, 256), (257, 268)]
},
{
"PERSON": [(171, 187), (226, 237)],
"ORG": [(134, 169)]
},
{
"PERSON": [(160, 171), (215, 236), (257, 262)]
},
{
"PERSON": [(68, 78), (267, 277)]
},
dict(),
{
"PERSON": [(123, 134)],
"ORG": [(146, 148)]
},
{
"PERSON": [(30, 41), (197, 207)],
"ORG": [(52, 54)]
},
{
"LOCATION": [(107, 113)],
"PERSON": [(39, 59), (78, 86), (88, 98)]
},
{
"LOCATION": [(23, 38), (203, 210), (212, 218), (220, 228), (230, 237), (239, 246), (248, 257),
(259, 265), (267, 272), (274, 281), (283, 286)],
"PERSON": [(128, 148), (403, 420), (422, 441), (450, 459)]
},
{
"PERSON": [(226, 237)],
"LOCATION": [(2, 12)],
"ORG": [(256, 260), (357, 361)]
},
{
"LOCATION": [(118, 125), (307, 313)],
"PERSON": [(34, 44), (263, 268), (332, 340)]
},
{
"PERSON": [(20, 30), (96, 117)],
"LOCATION": [(155, 162)],
"ORG": [(67, 88), (133, 152)]
},
{
"LOCATION": [(31, 36), (41, 47), (50, 57), (99, 105), (335, 341), (381, 385), (447, 452)],
"PERSON": [(153, 161), (279, 289), (426, 436)]
},
{
"PERSON": [(13, 20), (29, 40), (103, 108), (194, 201), (233, 244), (388, 407), (410, 427), (438, 445),
(446, 456)]
},
{
"LOCATION": [(338, 342), (392, 397)],
"PERSON": [(0, 10), (142, 147), (149, 163), (248, 260), (262, 276), (304, 315)]
},
{
"LOCATION": [(45, 51), (52, 56), (77, 83), (258, 262), (320, 325)],
"PERSON": [(58, 68), (120, 125), (174, 182), (425, 435), (630, 640)]
},
{
"LOCATION": [(42, 46), (221, 227), (251, 255)],
"PERSON": [(74, 85)]
},
{
"LOCATION": [(92, 96), (194, 200), (262, 268)],
"PERSON": [(70, 80), (345, 356), (379, 408), (529, 536), (543, 551), (564, 581), (583, 603), (652, 670)]
},
{
"PERSON": [(27, 38), (53, 58), (68, 86)]
},
{
"LOCATION": [(319, 325)],
"PERSON": [(20, 25), (26, 36), (65, 80), (95, 105), (169, 185), (239, 243), (286, 296)]
},
{
"PERSON": [(31, 42), (607, 618)],
"LOCATION": [(137, 146), (260, 269), (399, 416), (506, 520), (673, 690)],
"ORG": [(722, 734)]
},
{
"PERSON": [(105, 120), (123, 139), (140, 151), (323, 333)]
},
{
"PERSON": [(13, 23), (95, 100)],
"LOCATION": [(179, 191), (195, 211)],
"ORG": [(102, 114)]
},
{
"PERSON": [(8, 19), (327, 344)]
},
{
"LOCATION": [(123, 133), (199, 206)],
"PERSON": [(89, 99)]
},
{
"LOCATION": [(31, 59)]
},
{
"LOCATION": [(43, 52)],
"PERSON": [(0, 16), (65, 85)]
},
{
"ORG": [(84, 140), (620, 626)],
"LOCATION": [(65, 71), (212, 229), (232, 253), (291, 301)],
"PERSON": [(576, 605), (628, 645)]
},
{
"LOCATION": [(153, 164), (290, 298)],
"PERSON": [(925, 941)]
},
{
"ORG": [(201, 243), (246, 249), (265, 276)],
"PERSON": [(72, 90), (173, 184)]
},
{
"LOCATION": [(42, 59), (68, 79), (123, 131), (142, 160)],
"ORG": [(98, 121)]
}
]
X_loaded, y_loaded = load_dataset(file_name)
self.assertIsInstance(X_loaded, list)
self.assertIsInstance(y_loaded, list)
self.assertEqual(len(X_true), len(X_loaded))
self.assertEqual(len(y_true), len(y_loaded))
for sample_idx in range(len(X_true)):
self.assertEqual(X_true[sample_idx], X_loaded[sample_idx])
self.assertIsInstance(y_loaded[sample_idx], dict)
self.assertEqual(set(y_true[sample_idx]), set(y_loaded[sample_idx]))
for ne_type in y_true[sample_idx]:
self.assertIsInstance(y_loaded[sample_idx][ne_type], list)
self.assertEqual(len(y_true[sample_idx][ne_type]), len(y_loaded[sample_idx][ne_type]))
for entity_idx in range(len(y_true[sample_idx][ne_type])):
self.assertEqual(y_true[sample_idx][ne_type][entity_idx], y_loaded[sample_idx][ne_type][entity_idx])
def test_load_dataset_positive02(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
file_name = os.path.join(base_dir, 'dataset_without_paragraphs.json')
X_true = [
'Александр Вертинский. «Я не знаю, зачем и кому это нужно…»',
'21 марта 1889 года родился главный русский шансонье XX века, печальный Пьеро, вписавший свою судьбу в '
'историю отечественной культуры',
'Жизнь с самого начала оставляла для Александра Вертинского слишком много вопросов без ответов. Слишком '
'много «пустого» пространства. И он научился заполнять его вымыслом. Создал собственный театр с безумным '
'множеством персонажей, каждый из которых — от сироток-калек и безымянных кокаинеточек до гениальных '
'скрипачей и кинодив — был им самим.',
'Трехкомнатная квартира на последнем этаже дома на углу Тверской и Козицкого переулка в Москве и сегодня '
'выглядит так, словно ее хозяин вот-вот вернется. В просторном кабинете все те же большие книжные шкафы, '
'все тот же гигантский письменный стол с наполеоновским вензелем и бюстом Вольтера.',
'Сейчас в кабинете все чаще бывает лишь вдова Вертинского. Вновь и вновь перечитывает его письма, '
'рукописи. Он смотрит на нее с фотографий, развешанных на стенах, расставленных на столе, и словно '
'возвращает в те пятнадцать лет неизбывного счастья, когда по квартире витает запах табака и лаванды, дом '
'полон гостей и шумные застолья длятся допоздна. И все это — будто здесь и сейчас. Нет, время не '
'остановилось, оно сомкнуло объятия, чтобы вновь и вновь перечитывать эту странную, загадочную судьбу.',
'Считается, что свой голос Георгий Иванов обрёл в эмиграции и благодаря эмиграции. Мол, утрата родины '
'стала для него тем «простым человеческим горем», которого так не хватало по форме безупречным его стихам, '
'чтобы они наполнились содержанием. На самом деле это не совсем так, потому что точка сборки Георгия '
'Иванова была смещена ещё в Петербурге.',
'Георгий Иванов. На грани музыки и сна',
'Первое детское воспоминание Вертинского — о смерти матери. Трехлетний Саша сидит на горшке и выковыривает '
'глаза у плюшевого медвежонка. Горничная Лизка отрывает мальчика от увлекательного занятия: «Вставай, твоя '
'мама умерла!» Мать лежит в серебристом гробу на столе, тело ее скрывают цветы; у изголовья стоят '
'серебряные подсвечники и маленькая табуретка. В руке Саша сжимает шоколадку, он бросается к матери, чтобы '
'угостить. Но мать не раскрывает рта…',
'Через два года от чахотки умер отец. Однажды ранней весной его нашли без чувств на могиле супруги. '
'Оправиться от болезни он уже не смог. Когда кровь хлынула горлом, рядом с ним была только десятилетняя '
'дочь Надя, не знавшая, как помочь. Обессиленный отец упал на подушку и захлебнулся кровью.',
'Старшая сестра матери забрала Надю к себе в Ковно. Саша остался жить в Киеве с другой сестрой матери, '
'которая уверила мальчика в том, что его сестра умерла. То же самое было сказано Наде о брате. Спустя годы '
'Александр случайно обнаружит упоминание о Н. Н. Вертинской в журнале «Театр и искусство», напишет ей, и '
'выяснится, что это его сестра. Во время Первой мировой Вертинскому сообщат, что Надя покончила с собой. '
'Только после смерти Вертинского его вдова выяснит, что Надежда Николаевна живет в Ленинграде.',
'Смерть причудливо и неотвратимо вписалась в его жизнь. Смерть была тем миром, где кончались тщета '
'мальчика Мая и тревоги Безноженьки и наступал долгожданный покой.',
'Александр Вертинский появился на свет «незаконнорожденным». Родственники отца и матери не одобряли союз '
'Николая Вертинского с Евгенией Скалацкой (Сколацкой) даже тогда, когда родились Надя и Саша. Евгения '
'Степановна происходила из дворянского рода, а Николай Петрович был присяжным поверенным. Первая жена отца '
'по настоянию родственников Николая Вертинского не давала ему развода. Так что пришлось усыновить '
'собственных детей.',
'Жизнь с самого начала оставляла для Александра Вертинского слишком много вопросов без ответов. Слишком '
'много «пустого» пространства. И он научился заполнять его вымыслом. Создал собственный театр с безумным '
'множеством персонажей, каждый из которых — от сироток-калек и безымянных кокаинеточек до гениальных '
'скрипачей и кинодив — был им самим.',
'Театр стал маниакальной страстью Вертинского еще с гимназических лет. Он любыми способами проникал на '
'спектакли, оперы, концерты, выступал в любительских постановках в контрактовом зале на киевском Подоле и '
'подвизался статистом в Соловцовском театре — разумеется, бесплатно. А чтобы не умереть с голоду, брался '
'за любую работу — пописывал рецензии на выступления гастролеров, служил корректором в типографии, '
'нанимался помощником бухгалтера в гостиницу, продавал открытки, грузил арбузы на барках и даже '
'подворовывал у двоюродной сестры безделушки, чтобы сбыть их на толкучке.',
'С армией Колчака бежала из Владивостока семья цыган Димитриевичей, на пароходах генерала Врангеля '
'спасались Александр Вертинский и Надежда Плевицкая, уходили куда угодно, лишь бы подальше от Советов, '
'многие звёзды и звёздочки... Да, в первой эмиграции оказалось немало творческих личностей, работавших в '
'интересующем нас жанре русской песни, но даже самые яркие их имена блекнут рядом со сверкающей снежной '
'шапкой Монблана в лице Фёдора Ивановича Шаляпина.',
'Живой бог русской музыки',
'В 1911–1912 годах журналы «Киевская неделя» и «Лукоморье» опубликовали первые рассказы Вертинского: '
'«Красные бабочки» и «Моя невеста» — декадентские, но с бунинской интонацией. «Красные бабочки» — о '
'мальчике-сироте, случайно погубившем красных бабочек, вышитых на черном платье. Мальчик наказан суровой '
'теткой, но бабочки являются ему во сне, чтобы отомстить за погибших сестер. «Моя невеста» — о сумасшедшей '
'бездомной, читающей стихи на эстраде опустевшего осеннего парка. Эта «светлая малютка-невеста» при '
'ближайшем рассмотрении оказывается «маленьким уродливым существом» с «длинным, острым, серо-зеленого '
'цвета лицом», «черно-синими припухшими губами», «без бровей, без ресниц, с глубоко вдавленными в череп '
'глазами».',
'Свободное от литературных посиделок и работы время Вертинский коротал с киевской богемной молодежью в '
'подвальном кабачке, закусывая дешевое вино дешевым сыром. В приобретенном на толкучке подержанном фраке, '
'всегда с живым цветком в петлице, всегда презрительный и надменный, он сыпал заранее продуманными '
'афоризмами и производил на окружающих впечатление большого оригинала. Но прекрасно понимал, что вечно так '
'продолжаться не может.',
'Скопив 25 рублей и подыскав компаньона с театральным гардеробчиком (без собственных костюмов в театрах '
'тогда статистов не брали), Вертинский подался в Москву.',
'Здесь он играл небольшие роли в любительских студиях, поступил в театр миниатюр Марьи Арцыбушевой, где '
'служил за котлеты и борщ, соглашался на любые роли в кино, показывался во МХАТе — но из-за своего '
'грассирующего «р» был отвергнут Станиславским.',
'А внутри бурлило и клокотало, требовало выхода и не находило его. Слишком много вокруг было никому '
'неизвестных талантов и знаменитых бездарностей. Столицы захлестнула эпидемия увлечения кокаином. Его '
'покупали сначала в аптеках, затем с рук, носили в пудреницах и портсигарах, щедро одалживали и '
'одалживались. Однажды выглянув из выходившего на крышу окна мансарды, которую Вертинский снимал, он '
'обнаружил, что весь скат усеян пустыми коричневыми бутылочками из-под кокаина.',
'Вертинский отправился к психиатру, профессору Баженову, и, подойдя к трамвайной остановке, увидел, как '
'Пушкин сошел со своего пьедестала, оставив на нем четкий след. Александр Сергеевич сел вместе с '
'Вертинским в трамвай и достал большой старинный медный пятак — для оплаты.',
'Справиться с пристрастием к кокаину Вертинскому помогла война. Под именем Брат Пьеро он записался в '
'санитарный поезд, курсировавший от Москвы к фронту и обратно. Почти два года Вертинский перевязывал '
'раненых, читал им письма от родных, пел и даже, по его уверению, оперировал.',
'В 1915 году Вертинский вернулся в театр миниатюр Арцыбушевой с собственным номером — «Ариетки Пьеро». На '
'фоне черного занавеса в лунном луче прожектора на сцене появлялся высокий молодой человек. На его густо '
'покрытом белилами лице резко выделялись ярко-красный рот, обведенные тушью большие глаза и печально '
'вздернутые нарисованные брови. После вступления рояля этот странный юноша взмахивал руками и тихо '
'начинал:',
'Я люблю Вас, моя сегоглазочка, Золотая ошибка моя! Вы — вечегняя жуткая сказочка, Вы — цветок из кагтины '
'Гойя.',
'После бесконечных ямщиков и соловьев, аллей и ночей, дышащих сладострастьем, с одной стороны, а с другой '
'с другой — на фоне бравад футуристов, претенциозных поэз Игоря Северянина и одесской шансоньетки Изы '
'Кремер с ее занзибарами-кларами, — печальный Пьеро Вертинского стал сенсацией. Ему удалось невозможное: '
'вписать богемную экзотику — всех этих маленьких креольчиков, смуглых принцев с Антильских островов, '
'китайчат Ли, лиловых негров — в живописный ландшафт одинокой и беззащитной души; превратить ироничную '
'игру культурными символами в откровение глубокой печали.',
'Так певец без выдающихся вокальных данных, композитор, не знавший нотной грамоты, актер с дефектом дикции '
'стал всероссийским кумиром. Издательство «Прогрессивные новости» Б. Андржеевского огромными тиражами '
'выпускало «Песенки Вертинского», которые впечатлительные курсистки развозили по всей стране.',
'Начались гастроли и бенефисы, от восторженной и возмущенной публики нередко приходилось спасаться через '
'черный ход. Посыпались приглашения в кино. Популярность Вертинского была столь велика, что в феврале 1917 '
'года Александра Керенского называли «печальным Пьеро российской революции».',
'Как и подавляющее большинство представителей русской интеллигенции, Вертинский связывал с Февральской '
'революцией опьяняющие надежды на обновление и очищение. Октябрьский переворот заставил протрезветь. Под '
'впечатлением гибели московских юнкеров, убитых большевиками, Вертинский написал знаменитых «Юнкеров»:',
'Я не знаю, зачем и кому это нужно, Кто послал их на смерть недрожавшей рукой, Только так беспощадно, так '
'зло и ненужно Опустили их в вечный покой.',
'Песня стала настоящим белогвардейским гимном — с нею шли в бой и умирали русские офицеры и юнкера. '
'Существует легенда, что Вертинского вызывали в ЧК для дачи объяснений по поводу контрреволюционной песни. '
'Артист возмутился: «Но вы же не можете запретить мне их жалеть!» И в ответ услышал: «Дышать запретим, '
'если потребуется».',
'Как и многие эпизоды из жизни Вертинского, допрос в ЧК не имеет документальных подтверждений. Тем не '
'менее факт остается фактом: вслед за отступающей белой армией, как и многие российские артисты, '
'Вертинский подался на юг, где все еще верили в счастливую развязку и мучились тяжелым предчувствием, что '
'ее никогда не будет.',
'В 1920 году на пароходе «Великий князь Александр Михайлович», увозящем барона Врангеля, Вертинский '
'покинул Россию, отправившись в добровольное изгнание на 23 года.',
'Его одиссея началась с Константинополя, где он пел разноязыким эмигрантам цыганские романсы и раздобыл '
'греческий паспорт на имя Александра Вертидиса. Закружилась круговерть авантюр, лиц, городов, стран. '
'Румыния, Польша, Германия, Австрия, Венгрия, Палестина, Египет, Ливия, Франция, США… Выступления в '
'ресторанах и кабаках — между горячим и десертом; в мюзик-холлах и фешенебельных отелях — для королей '
'Густава Шведского, Альфонса Испанского, принца Уэльского, для Вандербильтов и Ротшильдов.',
'В Бессарабии его арестовали по обвинению в просоветской пропаганде песней «В степи молдаванской» — в '
'особенности строками «О, как сладко, как больно сквозь слезы / Хоть взглянуть на родную страну…» '
'Естественно, в деятельности Вертинского усмотрели происки НКВД. С тех пор слава чекистского агента '
'бросает тень на его репутацию по сей день — как будто агент НКВД не может быть великим артистом…',
'Все двадцать с лишним лет, где бы Вертинский ни выступал, он пел только на русском (исключение делал лишь '
'для любимой Франции, где исполнял несколько своих песенок по-французски). Его основной аудиторией, '
'конечно же, была русская эмиграция, для которой печальный Пьеро являлся не просто символом утраченной '
'России, но, по выражению Шаляпина, «сказителем земли русской».',
'Уже с начала 1920-х Вертинский просил разрешения вернуться — через советское консульство, через Анатолия '
'Луначарского, возглавившего советскую делегацию в Берлине, — но неизменно получал отказ.',
'В конце 1935 года он приехал в Китай — в Шанхае и Харбине была довольно обширная русская община. В Шанхае '
'артист дал двадцать аншлаговых концертов (даже Шаляпину здесь сумели организовать только два '
'выступления), однако бесконечно петь для одной и той же аудитории невозможно, и Вертинский намеревался '
'через какое-то время вернуться в Европу. Но в 1937 году его вдруг пригласили в СССР — без всяких просьб '
'со стороны артиста. Вертинский остался в Китае, ожидая, когда организуют возвращение. Он ждал пять лет.',
'Что побудило Сталина позвать Вертинского? Рассказывали, что генералиссимус любил слушать ариетки Брата '
'Пьеро в часы отдыха — особенно песню «В синем и далеком океане». Легенда приписывает также Сталину '
'известную фразу «Дадим артисту Вертинскому спокойно дожить на Родине», произнесенную после того, как '
'«отец всех народов» лично вычеркнул артиста из ждановского постановления, громившего Дмитрия Шостаковича '
'и Сергея Прокофьева. Нравился Сталину Вертинский или нет, несомненно одно — возвращение «соловья '
'белоэмиграции», мировой знаменитости было идеологически выгодно советскому режиму, тем более в 1943 году, '
'когда открылся союзный фронт и в стране бродили оттепельные настроения.',
'Вертинский же всегда и всем говорил о том, что возвращается, чтобы «рассказать о страданиях эмиграции» и '
'«помирить Родину с ней». «Шанхайская Тэффи» Наталия Ильина не преминула по этому поводу съязвить в '
'автобиографическом романе «Возвращение». Ее Джордж Эрмин (Георгий Еремин), подозрительно похожий на '
'Вертинского, прочитав Конституцию СССР, перекрестился и изрек: «Я подумал, что же это — Китеж, '
'воскресающий без нас!»',
'Ранним утром 4 ноября 1943 года на пароходе «Дайрен-Мару» Вертинский покинул Шанхай. С ним были его '
'двадцатилетняя жена Лидия и ее мать, на руках он держал трехмесячную дочь Марианну. Необходимость '
'содержать семью была не самой последней причиной переезда в СССР. Шла война, зверствовала инфляция, '
'иностранные конторы в Китае закрывались, русские эмигранты спасались от японской оккупации. Выступать '
'становилось все труднее. Вертинский пускался в рискованные финансовые авантюры, не имевшие успеха. Его '
'самой удачной коммерческой операцией была закупка пяти бутылей водки накануне рождения ребенка. Продав '
'их после повышения цен, Вертинский оплатил счета за услуги роддома.',
'Первым советским городом на их пути стала Чита. Стоял жуткий мороз, семью Вертинского поселили в '
'гостинице, где практически не топили, а по стенам ползали клопы. А в местной филармонии артиста уже '
'поджидала телеграмма из Москвы с распоряжением дать в Чите несколько концертов. Родина встречала блудного '
'сына.',
'О его возвращении ходили анекдоты. В одном из них рассказывалось, как Вертинский, приехав в СССР, выходит '
'из вагона с двумя чемоданами, ставит их, целует землю и смотрит вокруг: «Не узнаю тебя, Россия!» '
'Обернувшись, обнаруживает, что чемоданов нет. «Узнаю тебя, Россия!» — восклицает артист. В другом '
'повествовалось о приеме, устроенном в честь Вертинского «пролетарским графом» Алексеем Николаевичем '
'Толстым. Гости долго томятся, ожидая, когда их пригласят к столу. Кто-то из присутствующих, оглядев '
'собравшееся общество — граф Толстой, граф Игнатьев, митрополит Николай Крутицкий, Александр Вертинский, —'
' спрашивает: «Кого ждем?» Остроумец-куплетист Смирнов-Сокольский отвечает: «Государя!»',
'Первой советской киноролью Вертинского стал кардинал Бирнч в фильме Михаила Калатозова «Заговор '
'обреченных». Актер сыграл изысканного, сладкоречивого патриция со следами былого донжуанства. Так и '
'должен выглядеть настоящий враг советского режима — образованный, воспитанный, обвораживающий своим '
'лоском. Только такие и могут строить заговоры и вынашивать планы государственного переворота. Сталинская '
'премия за роль кардинала свидетельствовала о высочайшем одобрении этой трактовки.',
'Такого же двуликого Януса Вертинский исполнил в помпезном фильме Сергея Юткевича «Великий воин '
'Скандербег». Возможно, он играл бы маскирующихся иродов и дальше, если бы Исидор Анненский не предложил '
'ему роль князя в экранизации чеховской «Анны на шее». Одним своим появлением на экране Вертинский, этот '
'обломок царской России, воскрешал шик дворянских собраний и балов при дворе.',
'Положение «советского артиста» Вертинского было довольно странным. С одной стороны, явное благоволение '
'властей: его с семьей поселили в «Метрополе», затем выделили квартиру, наградили высшей государственной '
'премией. Правда, семья в течение трех лет обитала в «Метрополе» не от хорошей жизни. Съехать было просто '
'некуда, потому что выделенная квартира находилась на первом этаже двухэтажного дома на Хорошевском шоссе. '
'Артист опасался поселяться в ней и с помощью сложных маневров обменял ее на квартиру на улице Горького, '
'которая была в таком жутком состоянии, что нуждалась в капитальном ремонте. Опасения Вертинского, как '
'выяснилось позже, были не напрасны — квартира на Хорошевском шоссе подверглась налету знаменитой «Черной '
'кошки».',
'С другой стороны, из ста с лишним песен к исполнению было разрешено не более тридцати (авторство текстов '
'Георгия Иванова и Николая Гумилева Вертинскому пришлось приписать себе), единственная прижизненная '
'пластинка вышла в 1944 году, о концертах — ни строчки в прессе. «Я существую на правах публичного дома, —'
' горько шутил Вертинский, — все ходят, но в обществе говорить об этом не принято».',
'Из эмиграции Вертинский вернулся практически с пустыми карманами, вскоре родилась вторая дочь, Настя. '
'Гастрольбюро обеспечило артисту по 20–25 концертов в месяц по всей стране от Средней Азии до Дальнего '
'Востока — в нетопленных, неприспособленных для выступлений залах с расстроенными роялями и пьяной '
'публикой. Но концертная жизнь в европейских кабаках приучила его работать в любых условиях.',
'Платили Вертинскому по самому низкому тарифу, поскольку у него не было никаких званий. За концерт артист '
'получал около 800 рублей, при этом его выступления всегда проходили при аншлагах и собирали десятки тысяч '
'рублей. Приходилось соглашаться на все, давать левые концерты, выкручиваться, объясняться… Вместе с '
'аккомпаниатором Михаилом Брохесом он вдоль и поперек исколесил всю страну по нескольку раз, дав около '
'трех тысяч концертов. Написал два десятка стихов, работал над мемуарами, которые не успел закончить. 14 '
'лет на Родине превратили бодрого, моложавого мужчину в глубокого старика.',
'Он не хотел умереть дома, не желал, чтобы родные видели «кухню смерти». 21 мая 1957 года Вертинский '
'готовился к концерту в Ленинграде, был сдержан и немногословен. Он находился в своем 208-м номере '
'«Астории», когда начался сердечный приступ. Лекарства под рукой не оказалось. Как выяснилось позже — оно '
'бы уже не помогло. При вскрытии сосуды рассыпались, как хрупкое стекло',
'Назначен куратор строительства российской Кремниевой долины',
'Дмитрий Медведев доверил пост руководителя иннограда миллиардеру Виктору Вексельбергу.',
'Всё меньше остаётся нерешённых вопросов, касающихся возведения в России уникального Центра по разработке '
'и коммерциализации новых технологий. Власти уже не только выбрали площадку для строительства '
'отечественной Кремниевой долины в подмосковном Сколково, а также частично одобрили концепцию наукограда, '
'но и определили куратора большой инновационной стройки. «Были проведены определённые консультации по '
'поводу того, кто конкретно мог бы осуществлять такого рода работу. Мною принято решение, что российскую '
'часть этой координирующей структуры, которую мы создадим, возглавит Виктор Феликсович Вексельберг», — '
'цитирует «Взгляд» Дмитрия Медведева.',
'Исходя из заявления президента, понятно, что у проекта будут не только российские инвесторы, но и '
'иностранные партнёры, в числе которых, по словам главы государства, будут и представители иностранных '
'научных кругов. Именно на базе взаимодействия науки и бизнеса должен появиться и работать инноград. «Всё '
'это затеяли не ради того, чтобы построить определённое количество коттеджей или же создать там нормальные '
'производственные условия, лаборатории. Это всё важно, но это всё инфраструктура. Самое главное, чтобы '
'там появились люди. Для того чтобы люди появились, должна быть внятная система управления. Эта система '
'управления зависит от нас. Я думаю, что с учётом масштабности этого проекта, а с другой стороны, того, '
'что в реализации этого проекта должны быть заинтересованы не только государственные структуры, но, '
'прежде всего, российский бизнес, я считаю, что координацией российский бизнес и мог бы заняться», — '
'заявил Дмитрий Медведев.',
'Это выступление президента вполне объясняет выбор руководителя проекта. Виктор Вексельберг — бизнесмен с '
'30-летним стажем, капитал которого оценивается в 6,4 млрд долларов. Вексельберг является главой правления '
'ОАО «Тюменская нефтяная компания» (ТНК) и президентом ЗАО «Ренова». Именно он является владельцем '
'значительной части российского титана и алюминиевого бизнеса.',
'О том, почему площадкой для строительства Кремниевой долины выбрано Подмосковье, читайте в статье '
'Частного корреспондента «Сколково назначили Кремниевой долиной»'
]
y_true = [
{
"PERSON": [(0, 20)]
},
{
"PERSON": [(71, 76)]
},
{
"PERSON": [(36, 58)]
},
{
"LOCATION": [(55, 63), (66, 84), (87, 93)],
"PERSON": [(281, 289)]
},
{
"PERSON": [(45, 56)]
},
{
"LOCATION": [(334, 344)],
"PERSON": [(26, 40), (299, 314)]
},
{
"PERSON": [(0, 14)]
},
{
"PERSON": [(28, 39), (70, 74), (146, 151), (362, 366)]
},
{
"PERSON": [(207, 211)]
},
{
"PERSON": [(30, 34), (51, 55), (182, 186), (208, 217), (250, 266), (367, 378), (392, 396), (436, 447),
(471, 489)],
"LOCATION": [(44, 49), (71, 76), (498, 508)],
"ORG": [(269, 295)]
},
{
"PERSON": [(107, 110), (121, 132)]
},
{
"PERSON": [(0, 20), (104, 123), (126, 144), (146, 155), (184, 188), (191, 195), (197, 215), (251, 267),
(338, 357)]
},
{
"PERSON": [(36, 58)]
},
{
"PERSON": [(33, 44)],
"LOCATION": [(168, 185), (189, 197), (198, 204)],
"ORG": [(230, 249)]
},
{
"PERSON": [(9, 16), (89, 97), (108, 128), (131, 148), (430, 455)],
"LOCATION": [(27, 39), (191, 198), (414, 422)],
"ORG": [(2, 16)]
},
dict(),
{
"PERSON": [(87, 98)],
"ORG": [(18, 56)]
},
{
"LOCATION": [(72, 80)],
"PERSON": [(51, 61)]
},
{
"LOCATION": [(151, 157)],
"PERSON": [(130, 140)]
},
{
"PERSON": [(80, 97), (233, 246)],
"ORG": [(65, 97), (177, 182)]
},
{
"PERSON": [(373, 383)]
},
{
"PERSON": [(0, 10), (46, 54), (103, 109), (166, 185), (199, 209)]
},
{
"LOCATION": [(135, 141)],
"PERSON": [(36, 47), (79, 84), (177, 187)]
},
{
"PERSON": [(12, 22), (49, 60), (94, 99)],
"ORG": [(34, 60)]
},
{
"PERSON": [(105, 109)]
},
{
"LOCATION": [(389, 408)],
"PERSON": [(162, 178), (202, 212), (251, 256), (257, 268)]
},
{
"PERSON": [(171, 187), (226, 237)],
"ORG": [(134, 169)]
},
{
"PERSON": [(160, 171), (215, 236), (257, 262)]
},
{
"PERSON": [(68, 78), (267, 277)]
},
dict(),
{
"PERSON": [(123, 134)],
"ORG": [(146, 148)]
},
{
"PERSON": [(30, 41), (197, 207)],
"ORG": [(52, 54)]
},
{
"LOCATION": [(107, 113)],
"PERSON": [(39, 59), (78, 86), (88, 98)]
},
{
"LOCATION": [(23, 38), (203, 210), (212, 218), (220, 228), (230, 237), (239, 246), (248, 257),
(259, 265), (267, 272), (274, 281), (283, 286)],
"PERSON": [(128, 148), (403, 420), (422, 441), (450, 459)]
},
{
"PERSON": [(226, 237)],
"LOCATION": [(2, 12)],
"ORG": [(256, 260), (357, 361)]
},
{
"LOCATION": [(118, 125), (307, 313)],
"PERSON": [(34, 44), (263, 268), (332, 340)]
},
{
"PERSON": [(20, 30), (96, 117)],
"LOCATION": [(155, 162)],
"ORG": [(67, 88), (133, 152)]
},
{
"LOCATION": [(31, 36), (41, 47), (50, 57), (99, 105), (335, 341), (381, 385), (447, 452)],
"PERSON": [(153, 161), (279, 289), (426, 436)]
},
{
"PERSON": [(13, 20), (29, 40), (103, 108), (194, 201), (233, 244), (388, 407), (410, 427), (438, 445),
(446, 456)]
},
{
"LOCATION": [(338, 342), (392, 397)],
"PERSON": [(0, 10), (142, 147), (149, 163), (248, 260), (262, 276), (304, 315)]
},
{
"LOCATION": [(45, 51), (52, 56), (77, 83), (258, 262), (320, 325)],
"PERSON": [(58, 68), (120, 125), (174, 182), (425, 435), (630, 640)]
},
{
"LOCATION": [(42, 46), (221, 227), (251, 255)],
"PERSON": [(74, 85)]
},
{
"LOCATION": [(92, 96), (194, 200), (262, 268)],
"PERSON": [(70, 80), (345, 356), (379, 408), (529, 536), (543, 551), (564, 581), (583, 603), (652, 670)]
},
{
"PERSON": [(27, 38), (53, 58), (68, 86)]
},
{
"LOCATION": [(319, 325)],
"PERSON": [(20, 25), (26, 36), (65, 80), (95, 105), (169, 185), (239, 243), (286, 296)]
},
{
"PERSON": [(31, 42), (607, 618)],
"LOCATION": [(137, 146), (260, 269), (399, 416), (506, 520), (673, 690)],
"ORG": [(722, 734)]
},
{
"PERSON": [(105, 120), (123, 139), (140, 151), (323, 333)]
},
{
"PERSON": [(13, 23), (95, 100)],
"LOCATION": [(179, 191), (195, 211)],
"ORG": [(102, 114)]
},
{
"PERSON": [(8, 19), (327, 344)]
},
{
"LOCATION": [(123, 133), (199, 206)],
"PERSON": [(89, 99)]
},
{
"LOCATION": [(31, 59)]
},
{
"LOCATION": [(43, 52)],
"PERSON": [(0, 16), (65, 85)]
},
{
"ORG": [(84, 140), (620, 626)],
"LOCATION": [(65, 71), (212, 229), (232, 253), (291, 301)],
"PERSON": [(576, 605), (628, 645)]
},
{
"LOCATION": [(153, 164), (290, 298)],
"PERSON": [(925, 941)]
},
{
"ORG": [(201, 243), (246, 249), (265, 276)],
"PERSON": [(72, 90), (173, 184)]
},
{
"LOCATION": [(42, 59), (68, 79), (123, 131), (142, 160)],
"ORG": [(98, 121)]
}
]
X_loaded, y_loaded = load_dataset(file_name)
self.assertIsInstance(X_loaded, list)
self.assertIsInstance(y_loaded, list)
self.assertEqual(len(X_true), len(X_loaded))
self.assertEqual(len(y_true), len(y_loaded))
for sample_idx in range(len(X_true)):
self.assertEqual(X_true[sample_idx], X_loaded[sample_idx])
self.assertIsInstance(y_loaded[sample_idx], dict)
self.assertEqual(set(y_true[sample_idx]), set(y_loaded[sample_idx]))
for ne_type in y_true[sample_idx]:
self.assertIsInstance(y_loaded[sample_idx][ne_type], list)
self.assertEqual(len(y_true[sample_idx][ne_type]), len(y_loaded[sample_idx][ne_type]),
msg='Sample {0}'.format(sample_idx))
for entity_idx in range(len(y_true[sample_idx][ne_type])):
self.assertEqual(y_true[sample_idx][ne_type][entity_idx], y_loaded[sample_idx][ne_type][entity_idx])
if __name__ == '__main__':
unittest.main(verbosity=2)
| 65.13251 | 120 | 0.59466 | 9,008 | 79,136 | 5.240009 | 0.261878 | 0.005148 | 0.003305 | 0.003813 | 0.993009 | 0.992543 | 0.988899 | 0.986759 | 0.986759 | 0.986759 | 0 | 0.103164 | 0.304754 | 79,136 | 1,214 | 121 | 65.186161 | 0.747596 | 0 | 0 | 0.793361 | 0 | 0.036515 | 0.580861 | 0.001276 | 0 | 0 | 0 | 0 | 0.024896 | 1 | 0.00332 | false | 0 | 0.007469 | 0 | 0.011618 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9a880ada6e4a60d2a950d6833b627c4b7bbb4fdf | 159,950 | py | Python | examples/gmplib_dev/pygmplib/__init__.py | ISoirar/pypp11 | 7f929064766a48d9cb3f3b29c93fdc938b83bac5 | [
"BSL-1.0"
] | 9 | 2016-06-07T19:14:53.000Z | 2020-02-28T09:06:19.000Z | examples/gmplib_dev/pygmplib/__init__.py | asford/pyplusplus | 18485e9013e30b1f7776b6039eeaa2fbdb73f183 | [
"BSL-1.0"
] | 1 | 2018-08-15T11:33:40.000Z | 2018-08-15T11:33:40.000Z | examples/gmplib_dev/pygmplib/__init__.py | ISoirar/pypp11 | 7f929064766a48d9cb3f3b29c93fdc938b83bac5 | [
"BSL-1.0"
] | 5 | 2016-06-23T09:37:00.000Z | 2019-12-18T13:51:29.000Z | # This file has been generated by Py++.
import ctypes
import ctypes_utils
libgmp_lib = ctypes.CDLL( r"/usr/lib/libgmp.so.3.5.0" )
libgmp_lib.undecorated_names = {#mapping between decorated and undecorated names
"extern double __gmpf_get_d(mpf_srcptr arg0) [free function]" : "__gmpf_get_d",
"extern int __gmpf_cmp_ui(mpf_srcptr arg0, long unsigned int arg1) [free function]" : "__gmpf_cmp_ui",
"extern void __gmpz_mul_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_mul_ui",
"extern void __gmpz_and(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_and",
"extern void __gmpf_urandomb(__mpf_struct * arg0, __gmp_randstate_struct * arg1, long unsigned int arg2) [free function]" : "__gmpf_urandomb",
"extern long unsigned int __gmpz_tdiv_q_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_tdiv_q_ui",
"extern void __gmpz_clrbit(mpz_ptr arg0, long unsigned int arg1) [free function]" : "__gmpz_clrbit",
"extern void __gmpz_cdiv_r_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_cdiv_r_2exp",
"extern void __gmpz_lcm(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_lcm",
"extern double __gmpf_get_d_2exp(long int * arg0, mpf_srcptr arg1) [free function]" : "__gmpf_get_d_2exp",
"extern int __gmpz_divisible_2exp_p(mpz_srcptr arg0, long unsigned int arg1) [free function]" : "__gmpz_divisible_2exp_p",
"extern int __gmpz_congruent_2exp_p(mpz_srcptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_congruent_2exp_p",
"extern void __gmpz_pow_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_pow_ui",
"void __gmpq_neg(mpq_ptr __gmp_w, mpq_srcptr __gmp_u) [free function]" : "__gmpq_neg",
"extern void __gmpf_reldiff(mpf_ptr arg0, mpf_srcptr arg1, mpf_srcptr arg2) [free function]" : "__gmpf_reldiff",
"extern void __gmpz_import(mpz_ptr arg0, size_t arg1, int arg2, size_t arg3, int arg4, size_t arg5, void const * arg6) [free function]" : "__gmpz_import",
"extern void __gmpz_fac_ui(mpz_ptr arg0, long unsigned int arg1) [free function]" : "__gmpz_fac_ui",
"extern int __gmpz_root(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_root",
"extern void __gmpz_fdiv_q(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_fdiv_q",
"extern void __gmpz_fdiv_r(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_fdiv_r",
"extern void __gmp_set_memory_functions(void * (*)( ::size_t ) * arg0, void * (*)( void *,::size_t,::size_t ) * arg1, void (*)( void *,::size_t ) * arg2) [free function]" : "__gmp_set_memory_functions",
"extern long unsigned int __gmpz_tdiv_r_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_tdiv_r_ui",
"extern long unsigned int __gmpz_cdiv_r_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_cdiv_r_ui",
"extern void __gmpz_realloc2(mpz_ptr arg0, long unsigned int arg1) [free function]" : "__gmpz_realloc2",
"extern void __gmpn_tdiv_qr(mp_ptr arg0, mp_ptr arg1, mp_size_t arg2, mp_srcptr arg3, mp_size_t arg4, mp_srcptr arg5, mp_size_t arg6) [free function]" : "__gmpn_tdiv_qr",
"extern void __gmpz_fdiv_r_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_fdiv_r_2exp",
"extern void __gmpz_sqrt(mpz_ptr arg0, mpz_srcptr arg1) [free function]" : "__gmpz_sqrt",
"extern void __gmpq_add(mpq_ptr arg0, mpq_srcptr arg1, mpq_srcptr arg2) [free function]" : "__gmpq_add",
"extern void __gmpq_div(mpq_ptr arg0, mpq_srcptr arg1, mpq_srcptr arg2) [free function]" : "__gmpq_div",
"extern long unsigned int __gmpf_get_default_prec() [free function]" : "__gmpf_get_default_prec",
"extern void __gmpq_sub(mpq_ptr arg0, mpq_srcptr arg1, mpq_srcptr arg2) [free function]" : "__gmpq_sub",
"extern void __gmpf_set_ui(mpf_ptr arg0, long unsigned int arg1) [free function]" : "__gmpf_set_ui",
"extern double __gmpz_get_d(mpz_srcptr arg0) [free function]" : "__gmpz_get_d",
"extern void __gmpz_add(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_add",
"int __gmpn_cmp(mp_srcptr __gmp_xp, mp_srcptr __gmp_yp, mp_size_t __gmp_size) [free function]" : "__gmpn_cmp",
"extern void __gmpz_divexact_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_divexact_ui",
"extern long unsigned int __gmpz_gcd_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_gcd_ui",
"extern size_t __gmpz_inp_str(mpz_ptr arg0, FILE * arg1, int arg2) [free function]" : "__gmpz_inp_str",
"extern int __gmp_snprintf(char * arg0, size_t arg1, char const * arg2, ...) [free function]" : "__gmp_snprintf",
"extern void __gmpf_set_prec_raw(mpf_ptr arg0, long unsigned int arg1) [free function]" : "__gmpf_set_prec_raw",
"extern void __gmpz_cdiv_q_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_cdiv_q_2exp",
"extern int __gmpz_fits_sshort_p(mpz_srcptr arg0) [free function]" : "__gmpz_fits_sshort_p",
"extern mp_limb_t __gmpn_divrem(mp_ptr arg0, mp_size_t arg1, mp_ptr arg2, mp_size_t arg3, mp_srcptr arg4, mp_size_t arg5) [free function]" : "__gmpn_divrem",
"extern void __gmpz_submul(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_submul",
"extern void __gmpz_init_set(mpz_ptr arg0, mpz_srcptr arg1) [free function]" : "__gmpz_init_set",
"extern void __gmpz_xor(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_xor",
"extern void __gmpz_init_set_d(mpz_ptr arg0, double arg1) [free function]" : "__gmpz_init_set_d",
"int __gmpz_fits_ushort_p(mpz_srcptr __gmp_z) [free function]" : "__gmpz_fits_ushort_p",
"extern int __gmp_sscanf(char const * arg0, char const * arg1, ...) [free function]" : "__gmp_sscanf",
"extern void __gmpz_mul_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_mul_2exp",
"extern void __gmpz_sub(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_sub",
"extern int __gmpf_fits_ulong_p(mpf_srcptr arg0) [free function]" : "__gmpf_fits_ulong_p",
"extern void __gmpz_ui_pow_ui(mpz_ptr arg0, long unsigned int arg1, long unsigned int arg2) [free function]" : "__gmpz_ui_pow_ui",
"extern long unsigned int __gmp_urandomm_ui(__gmp_randstate_struct * arg0, long unsigned int arg1) [free function]" : "__gmp_urandomm_ui",
"long unsigned int __gmpz_get_ui(mpz_srcptr __gmp_z) [free function]" : "__gmpz_get_ui",
"extern int __gmpz_cmpabs_ui(mpz_srcptr arg0, long unsigned int arg1) [free function]" : "__gmpz_cmpabs_ui",
"extern void __gmpz_tdiv_q_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_tdiv_q_2exp",
"int __gmpz_perfect_square_p(mpz_srcptr __gmp_a) [free function]" : "__gmpz_perfect_square_p",
"extern void __gmpq_set_d(mpq_ptr arg0, double arg1) [free function]" : "__gmpq_set_d",
"extern int __gmpz_cmp_d(mpz_srcptr arg0, double arg1) [free function]" : "__gmpz_cmp_d",
"extern void __gmpz_cdiv_qr(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, mpz_srcptr arg3) [free function]" : "__gmpz_cdiv_qr",
"extern void __gmpf_add(mpf_ptr arg0, mpf_srcptr arg1, mpf_srcptr arg2) [free function]" : "__gmpf_add",
"extern int __gmpz_probab_prime_p(mpz_srcptr arg0, int arg1) [free function]" : "__gmpz_probab_prime_p",
"extern mp_limb_t __gmpn_rshift(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, unsigned int arg3) [free function]" : "__gmpn_rshift",
"extern void __gmpz_array_init(mpz_ptr arg0, mp_size_t arg1, mp_size_t arg2) [free function]" : "__gmpz_array_init",
"int __gmpz_fits_uint_p(mpz_srcptr __gmp_z) [free function]" : "__gmpz_fits_uint_p",
"extern void __gmpf_random2(mpf_ptr arg0, mp_size_t arg1, mp_exp_t arg2) [free function]" : "__gmpf_random2",
"extern void __gmp_randinit_set(__gmp_randstate_struct * arg0, __gmp_randstate_struct const * arg1) [free function]" : "__gmp_randinit_set",
"extern void __gmpz_tdiv_qr(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, mpz_srcptr arg3) [free function]" : "__gmpz_tdiv_qr",
"extern mp_size_t __gmpn_set_str(mp_ptr arg0, unsigned char const * arg1, size_t arg2, int arg3) [free function]" : "__gmpn_set_str",
"extern long unsigned int __gmpn_scan0(mp_srcptr arg0, long unsigned int arg1) [free function]" : "__gmpn_scan0",
"extern void __gmpz_cdiv_r(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_cdiv_r",
"extern long unsigned int __gmpz_fdiv_qr_ui(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, long unsigned int arg3) [free function]" : "__gmpz_fdiv_qr_ui",
"extern void __gmpf_init_set_ui(mpf_ptr arg0, long unsigned int arg1) [free function]" : "__gmpf_init_set_ui",
"extern void __gmpn_mul_n(mp_ptr arg0, mp_srcptr arg1, mp_srcptr arg2, mp_size_t arg3) [free function]" : "__gmpn_mul_n",
"extern int __gmpq_cmp_ui(mpq_srcptr arg0, long unsigned int arg1, long unsigned int arg2) [free function]" : "__gmpq_cmp_ui",
"extern void __gmpz_mul_si(mpz_ptr arg0, mpz_srcptr arg1, long int arg2) [free function]" : "__gmpz_mul_si",
"extern void __gmpq_set_si(mpq_ptr arg0, long int arg1, long unsigned int arg2) [free function]" : "__gmpq_set_si",
"extern void __gmpq_set_ui(mpq_ptr arg0, long unsigned int arg1, long unsigned int arg2) [free function]" : "__gmpq_set_ui",
"extern void __gmpf_sqrt_ui(mpf_ptr arg0, long unsigned int arg1) [free function]" : "__gmpf_sqrt_ui",
"extern size_t __gmpq_inp_str(mpq_ptr arg0, FILE * arg1, int arg2) [free function]" : "__gmpq_inp_str",
"extern int __gmpf_fits_sint_p(mpf_srcptr arg0) [free function]" : "__gmpf_fits_sint_p",
"extern void __gmpq_swap(mpq_ptr arg0, mpq_ptr arg1) [free function]" : "__gmpq_swap",
"extern int __gmpf_set_str(mpf_ptr arg0, char const * arg1, int arg2) [free function]" : "__gmpf_set_str",
"extern void __gmpz_sub_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_sub_ui",
"extern void __gmpz_divexact(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_divexact",
"extern void __gmpz_com(mpz_ptr arg0, mpz_srcptr arg1) [free function]" : "__gmpz_com",
"extern void __gmpz_ior(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_ior",
"extern int __gmpz_fits_slong_p(mpz_srcptr arg0) [free function]" : "__gmpz_fits_slong_p",
"extern int __gmp_asprintf(char * * arg0, char const * arg1, ...) [free function]" : "__gmp_asprintf",
"__gmp_bits_per_limb [variable]" : "__gmp_bits_per_limb",
"extern void __gmpf_set_prec(mpf_ptr arg0, long unsigned int arg1) [free function]" : "__gmpf_set_prec",
"extern int __gmpz_init_set_str(mpz_ptr arg0, char const * arg1, int arg2) [free function]" : "__gmpz_init_set_str",
"mp_limb_t __gmpn_sub_1(mp_ptr __gmp_dst, mp_srcptr __gmp_src, mp_size_t __gmp_size, mp_limb_t __gmp_n) [free function]" : "__gmpn_sub_1",
"extern int __gmpz_millerrabin(mpz_srcptr arg0, int arg1) [free function]" : "__gmpz_millerrabin",
"extern void __gmpz_mod(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_mod",
"extern int __gmpz_invert(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_invert",
"extern void __gmp_randinit_mt(__gmp_randstate_struct * arg0) [free function]" : "__gmp_randinit_mt",
"extern void __gmpf_set_d(mpf_ptr arg0, double arg1) [free function]" : "__gmpf_set_d",
"extern void __gmpf_sub(mpf_ptr arg0, mpf_srcptr arg1, mpf_srcptr arg2) [free function]" : "__gmpf_sub",
"extern mp_limb_t __gmpn_addmul_1(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, mp_limb_t arg3) [free function]" : "__gmpn_addmul_1",
"extern void __gmpf_set_z(mpf_ptr arg0, mpz_srcptr arg1) [free function]" : "__gmpf_set_z",
"extern void __gmpz_ui_sub(mpz_ptr arg0, long unsigned int arg1, mpz_srcptr arg2) [free function]" : "__gmpz_ui_sub",
"extern void __gmpf_div(mpf_ptr arg0, mpf_srcptr arg1, mpf_srcptr arg2) [free function]" : "__gmpf_div",
"extern void __gmpn_random2(mp_ptr arg0, mp_size_t arg1) [free function]" : "__gmpn_random2",
"extern mp_limb_t __gmpn_divexact_by3c(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, mp_limb_t arg3) [free function]" : "__gmpn_divexact_by3c",
"extern void __gmpz_lucnum_ui(mpz_ptr arg0, long unsigned int arg1) [free function]" : "__gmpz_lucnum_ui",
"extern void __gmpf_set_q(mpf_ptr arg0, mpq_srcptr arg1) [free function]" : "__gmpf_set_q",
"extern void __gmpz_random(mpz_ptr arg0, mp_size_t arg1) [free function]" : "__gmpz_random",
"extern int __gmp_scanf(char const * arg0, ...) [free function]" : "__gmp_scanf",
"extern mp_size_t __gmpn_sqrtrem(mp_ptr arg0, mp_ptr arg1, mp_srcptr arg2, mp_size_t arg3) [free function]" : "__gmpn_sqrtrem",
"extern int __gmpq_set_str(mpq_ptr arg0, char const * arg1, int arg2) [free function]" : "__gmpq_set_str",
"extern int __gmpf_fits_slong_p(mpf_srcptr arg0) [free function]" : "__gmpf_fits_slong_p",
"extern void __gmpz_setbit(mpz_ptr arg0, long unsigned int arg1) [free function]" : "__gmpz_setbit",
"extern void __gmp_randinit_lc_2exp(__gmp_randstate_struct * arg0, mpz_srcptr arg1, long unsigned int arg2, long unsigned int arg3) [free function]" : "__gmp_randinit_lc_2exp",
"extern int __gmp_randinit_lc_2exp_size(__gmp_randstate_struct * arg0, long unsigned int arg1) [free function]" : "__gmp_randinit_lc_2exp_size",
"extern void __gmpz_set_d(mpz_ptr arg0, double arg1) [free function]" : "__gmpz_set_d",
"extern int __gmpz_jacobi(mpz_srcptr arg0, mpz_srcptr arg1) [free function]" : "__gmpz_jacobi",
"extern void __gmpz_set_f(mpz_ptr arg0, mpf_srcptr arg1) [free function]" : "__gmpz_set_f",
"extern size_t __gmpf_out_str(FILE * arg0, int arg1, size_t arg2, mpf_srcptr arg3) [free function]" : "__gmpf_out_str",
"extern int __gmpf_fits_sshort_p(mpf_srcptr arg0) [free function]" : "__gmpf_fits_sshort_p",
"extern void __gmpq_div_2exp(mpq_ptr arg0, mpq_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpq_div_2exp",
"extern long unsigned int __gmpf_get_prec(mpf_srcptr arg0) [free function]" : "__gmpf_get_prec",
"extern int __gmpz_kronecker_si(mpz_srcptr arg0, long int arg1) [free function]" : "__gmpz_kronecker_si",
"extern void __gmpf_floor(mpf_ptr arg0, mpf_srcptr arg1) [free function]" : "__gmpf_floor",
"extern int __gmpq_cmp(mpq_srcptr arg0, mpq_srcptr arg1) [free function]" : "__gmpq_cmp",
"extern int __gmpf_integer_p(mpf_srcptr arg0) [free function]" : "__gmpf_integer_p",
"extern void __gmpz_powm(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2, mpz_srcptr arg3) [free function]" : "__gmpz_powm",
"extern long unsigned int __gmpz_hamdist(mpz_srcptr arg0, mpz_srcptr arg1) [free function]" : "__gmpz_hamdist",
"extern void __gmpz_fib_ui(mpz_ptr arg0, long unsigned int arg1) [free function]" : "__gmpz_fib_ui",
"extern int __gmpz_cmp_ui(mpz_srcptr arg0, long unsigned int arg1) [free function]" : "__gmpz_cmp_ui",
"extern mp_limb_t __gmpn_submul_1(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, mp_limb_t arg3) [free function]" : "__gmpn_submul_1",
"extern void __gmpf_init2(mpf_ptr arg0, long unsigned int arg1) [free function]" : "__gmpf_init2",
"extern mp_limb_t __gmpn_mul_1(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, mp_limb_t arg3) [free function]" : "__gmpn_mul_1",
"extern mp_limb_t __gmpn_mod_1(mp_srcptr arg0, mp_size_t arg1, mp_limb_t arg2) [free function]" : "__gmpn_mod_1",
"size_t __gmpz_size(mpz_srcptr __gmp_z) [free function]" : "__gmpz_size",
"extern void __gmpq_get_den(mpz_ptr arg0, mpq_srcptr arg1) [free function]" : "__gmpq_get_den",
"extern mp_limb_t __gmpn_preinv_mod_1(mp_srcptr arg0, mp_size_t arg1, mp_limb_t arg2, mp_limb_t arg3) [free function]" : "__gmpn_preinv_mod_1",
"extern long unsigned int __gmpz_tdiv_ui(mpz_srcptr arg0, long unsigned int arg1) [free function]" : "__gmpz_tdiv_ui",
"extern mp_limb_t __gmpn_gcd_1(mp_srcptr arg0, mp_size_t arg1, mp_limb_t arg2) [free function]" : "__gmpn_gcd_1",
"extern void __gmp_randinit(__gmp_randstate_struct * arg0, gmp_randalg_t arg1, ...) [free function]" : "__gmp_randinit",
"extern void __gmpf_init(mpf_ptr arg0) [free function]" : "__gmpf_init",
"extern void __gmpz_mul(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_mul",
"extern long unsigned int __gmpn_scan1(mp_srcptr arg0, long unsigned int arg1) [free function]" : "__gmpn_scan1",
"extern void __gmpq_set(mpq_ptr arg0, mpq_srcptr arg1) [free function]" : "__gmpq_set",
"extern int __gmpz_fits_sint_p(mpz_srcptr arg0) [free function]" : "__gmpz_fits_sint_p",
"extern long unsigned int __gmpz_cdiv_qr_ui(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, long unsigned int arg3) [free function]" : "__gmpz_cdiv_qr_ui",
"extern void __gmpz_clear(mpz_ptr arg0) [free function]" : "__gmpz_clear",
"extern mp_limb_t __gmpn_mul(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, mp_srcptr arg3, mp_size_t arg4) [free function]" : "__gmpn_mul",
"extern void __gmpz_init_set_si(mpz_ptr arg0, long int arg1) [free function]" : "__gmpz_init_set_si",
"extern int __gmpz_divisible_p(mpz_srcptr arg0, mpz_srcptr arg1) [free function]" : "__gmpz_divisible_p",
"__gmp_errno [variable]" : "__gmp_errno",
"extern void __gmpf_sub_ui(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpf_sub_ui",
"extern void __gmpz_swap(mpz_ptr arg0, mpz_ptr arg1) [free function]" : "__gmpz_swap",
"extern int __gmpz_cmp(mpz_srcptr arg0, mpz_srcptr arg1) [free function]" : "__gmpz_cmp",
"extern void __gmpf_init_set_si(mpf_ptr arg0, long int arg1) [free function]" : "__gmpf_init_set_si",
"extern mp_limb_t __gmpn_lshift(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, unsigned int arg3) [free function]" : "__gmpn_lshift",
"extern int __gmpq_cmp_si(mpq_srcptr arg0, long int arg1, long unsigned int arg2) [free function]" : "__gmpq_cmp_si",
"void __gmpz_abs(mpz_ptr __gmp_w, mpz_srcptr __gmp_u) [free function]" : "__gmpz_abs",
"extern int __gmp_fprintf(FILE * arg0, char const * arg1, ...) [free function]" : "__gmp_fprintf",
"extern void __gmpf_set(mpf_ptr arg0, mpf_srcptr arg1) [free function]" : "__gmpf_set",
"extern int __gmpz_divisible_ui_p(mpz_srcptr arg0, long unsigned int arg1) [free function]" : "__gmpz_divisible_ui_p",
"extern int __gmpf_cmp_d(mpf_srcptr arg0, double arg1) [free function]" : "__gmpf_cmp_d",
"extern char * __gmpf_get_str(char * arg0, mp_exp_t * arg1, int arg2, size_t arg3, mpf_srcptr arg4) [free function]" : "__gmpf_get_str",
"extern long unsigned int __gmpz_fdiv_q_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_fdiv_q_ui",
"extern void __gmpz_urandomb(mpz_ptr arg0, __gmp_randstate_struct * arg1, long unsigned int arg2) [free function]" : "__gmpz_urandomb",
"extern char * __gmpz_get_str(char * arg0, int arg1, mpz_srcptr arg2) [free function]" : "__gmpz_get_str",
"extern void __gmpz_tdiv_r(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_tdiv_r",
"extern void __gmpz_urandomm(mpz_ptr arg0, __gmp_randstate_struct * arg1, mpz_srcptr arg2) [free function]" : "__gmpz_urandomm",
"extern void __gmpq_mul(mpq_ptr arg0, mpq_srcptr arg1, mpq_srcptr arg2) [free function]" : "__gmpq_mul",
"extern void __gmpz_tdiv_q(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_tdiv_q",
"extern int __gmpf_fits_uint_p(mpf_srcptr arg0) [free function]" : "__gmpf_fits_uint_p",
"extern void * __gmpz_realloc(mpz_ptr arg0, mp_size_t arg1) [free function]" : "__gmpz_realloc",
"extern long unsigned int __gmp_urandomb_ui(__gmp_randstate_struct * arg0, long unsigned int arg1) [free function]" : "__gmp_urandomb_ui",
"extern int __gmpz_perfect_power_p(mpz_srcptr arg0) [free function]" : "__gmpz_perfect_power_p",
"extern char * __gmpq_get_str(char * arg0, int arg1, mpq_srcptr arg2) [free function]" : "__gmpq_get_str",
"extern int __gmpn_perfect_square_p(mp_srcptr arg0, mp_size_t arg1) [free function]" : "__gmpn_perfect_square_p",
"extern void __gmpz_addmul(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_addmul",
"extern long unsigned int __gmpz_fdiv_ui(mpz_srcptr arg0, long unsigned int arg1) [free function]" : "__gmpz_fdiv_ui",
"extern int __gmp_sprintf(char * arg0, char const * arg1, ...) [free function]" : "__gmp_sprintf",
"extern void __gmpz_bin_uiui(mpz_ptr arg0, long unsigned int arg1, long unsigned int arg2) [free function]" : "__gmpz_bin_uiui",
"void __gmpz_set_q(mpz_ptr __gmp_w, mpq_srcptr __gmp_u) [free function]" : "__gmpz_set_q",
"mp_limb_t __gmpn_neg_n(mp_ptr __gmp_rp, mp_srcptr __gmp_up, mp_size_t __gmp_n) [free function]" : "__gmpn_neg_n",
"extern void __gmpf_neg(mpf_ptr arg0, mpf_srcptr arg1) [free function]" : "__gmpf_neg",
"extern void __gmp_randseed(__gmp_randstate_struct * arg0, mpz_srcptr arg1) [free function]" : "__gmp_randseed",
"extern long unsigned int __gmpz_scan1(mpz_srcptr arg0, long unsigned int arg1) [free function]" : "__gmpz_scan1",
"extern void __gmpz_nextprime(mpz_ptr arg0, mpz_srcptr arg1) [free function]" : "__gmpz_nextprime",
"extern int __gmpz_si_kronecker(long int arg0, mpz_srcptr arg1) [free function]" : "__gmpz_si_kronecker",
"extern int __gmpz_congruent_ui_p(mpz_srcptr arg0, long unsigned int arg1, long unsigned int arg2) [free function]" : "__gmpz_congruent_ui_p",
"extern long unsigned int __gmpz_cdiv_ui(mpz_srcptr arg0, long unsigned int arg1) [free function]" : "__gmpz_cdiv_ui",
"extern void __gmpz_init_set_ui(mpz_ptr arg0, long unsigned int arg1) [free function]" : "__gmpz_init_set_ui",
"extern mp_limb_t __gmpn_sub_n(mp_ptr arg0, mp_srcptr arg1, mp_srcptr arg2, mp_size_t arg3) [free function]" : "__gmpn_sub_n",
"extern void __gmpq_set_num(mpq_ptr arg0, mpz_srcptr arg1) [free function]" : "__gmpq_set_num",
"extern int __gmpz_kronecker_ui(mpz_srcptr arg0, long unsigned int arg1) [free function]" : "__gmpz_kronecker_ui",
"extern void __gmpf_add_ui(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpf_add_ui",
"extern void __gmpz_gcdext(mpz_ptr arg0, mpz_ptr arg1, mpz_ptr arg2, mpz_srcptr arg3, mpz_srcptr arg4) [free function]" : "__gmpz_gcdext",
"extern void __gmpq_get_num(mpz_ptr arg0, mpq_srcptr arg1) [free function]" : "__gmpq_get_num",
"extern size_t __gmpf_size(mpf_srcptr arg0) [free function]" : "__gmpf_size",
"extern void __gmpq_inv(mpq_ptr arg0, mpq_srcptr arg1) [free function]" : "__gmpq_inv",
"extern mp_limb_t __gmpn_divrem_1(mp_ptr arg0, mp_size_t arg1, mp_srcptr arg2, mp_size_t arg3, mp_limb_t arg4) [free function]" : "__gmpn_divrem_1",
"extern void __gmpq_canonicalize(mpq_ptr arg0) [free function]" : "__gmpq_canonicalize",
"long unsigned int __gmpz_popcount(mpz_srcptr __gmp_u) [free function]" : "__gmpz_popcount",
"extern void __gmpf_ui_sub(mpf_ptr arg0, long unsigned int arg1, mpf_srcptr arg2) [free function]" : "__gmpf_ui_sub",
"extern int __gmpz_cmp_si(mpz_srcptr arg0, long int arg1) [free function]" : "__gmpz_cmp_si",
"extern int __gmpz_set_str(mpz_ptr arg0, char const * arg1, int arg2) [free function]" : "__gmpz_set_str",
"extern int __gmpz_tstbit(mpz_srcptr arg0, long unsigned int arg1) [free function]" : "__gmpz_tstbit",
"extern void __gmpz_set_si(mpz_ptr arg0, long int arg1) [free function]" : "__gmpz_set_si",
"extern void __gmpq_init(mpq_ptr arg0) [free function]" : "__gmpq_init",
"extern size_t __gmpz_out_raw(FILE * arg0, mpz_srcptr arg1) [free function]" : "__gmpz_out_raw",
"extern void __gmpf_trunc(mpf_ptr arg0, mpf_srcptr arg1) [free function]" : "__gmpf_trunc",
"extern mp_limb_t __gmpn_gcdext_1(mp_ptr arg0, mp_ptr arg1, mp_limb_t arg2, mp_limb_t arg3) [free function]" : "__gmpn_gcdext_1",
"extern int __gmpz_cmpabs_d(mpz_srcptr arg0, double arg1) [free function]" : "__gmpz_cmpabs_d",
"extern void * __gmpz_export(void * arg0, size_t * arg1, int arg2, size_t arg3, int arg4, size_t arg5, mpz_srcptr arg6) [free function]" : "__gmpz_export",
"extern double __gmpz_get_d_2exp(long int * arg0, mpz_srcptr arg1) [free function]" : "__gmpz_get_d_2exp",
"extern void __gmpz_add_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_add_ui",
"extern mp_size_t __gmpn_gcdext(mp_ptr arg0, mp_ptr arg1, mp_size_t * arg2, mp_ptr arg3, mp_size_t arg4, mp_ptr arg5, mp_size_t arg6) [free function]" : "__gmpn_gcdext",
"extern void __gmpf_sqrt(mpf_ptr arg0, mpf_srcptr arg1) [free function]" : "__gmpf_sqrt",
"extern size_t __gmpz_sizeinbase(mpz_srcptr arg0, int arg1) [free function]" : "__gmpz_sizeinbase",
"extern long unsigned int __gmpz_fdiv_r_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_fdiv_r_ui",
"extern void __gmp_randinit_default(__gmp_randstate_struct * arg0) [free function]" : "__gmp_randinit_default",
"mp_limb_t __gmpz_getlimbn(mpz_srcptr __gmp_z, mp_size_t __gmp_n) [free function]" : "__gmpz_getlimbn",
"extern long int __gmpf_get_si(mpf_srcptr arg0) [free function]" : "__gmpf_get_si",
"extern void __gmpz_init(mpz_ptr arg0) [free function]" : "__gmpz_init",
"extern void __gmpf_div_2exp(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpf_div_2exp",
"extern void __gmpf_set_si(mpf_ptr arg0, long int arg1) [free function]" : "__gmpf_set_si",
"extern int __gmpq_equal(mpq_srcptr arg0, mpq_srcptr arg1) [free function]" : "__gmpq_equal",
"extern void __gmpz_rrandomb(mpz_ptr arg0, __gmp_randstate_struct * arg1, long unsigned int arg2) [free function]" : "__gmpz_rrandomb",
"extern int __gmpf_cmp_si(mpf_srcptr arg0, long int arg1) [free function]" : "__gmpf_cmp_si",
"extern long unsigned int __gmpz_scan0(mpz_srcptr arg0, long unsigned int arg1) [free function]" : "__gmpz_scan0",
"extern void __gmpz_init2(mpz_ptr arg0, long unsigned int arg1) [free function]" : "__gmpz_init2",
"extern void __gmpz_random2(mpz_ptr arg0, mp_size_t arg1) [free function]" : "__gmpz_random2",
"extern mp_size_t __gmpn_pow_1(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, mp_limb_t arg3, mp_ptr arg4) [free function]" : "__gmpn_pow_1",
"extern void __gmpz_gcd(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_gcd",
"extern void __gmpf_mul_2exp(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpf_mul_2exp",
"extern double __gmpq_get_d(mpq_srcptr arg0) [free function]" : "__gmpq_get_d",
"extern void __gmpf_mul(mpf_ptr arg0, mpf_srcptr arg1, mpf_srcptr arg2) [free function]" : "__gmpf_mul",
"extern void __gmpf_div_ui(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpf_div_ui",
"extern void __gmpq_mul_2exp(mpq_ptr arg0, mpq_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpq_mul_2exp",
"extern size_t __gmpz_out_str(FILE * arg0, int arg1, mpz_srcptr arg2) [free function]" : "__gmpz_out_str",
"extern mp_limb_t __gmpn_divrem_2(mp_ptr arg0, mp_size_t arg1, mp_ptr arg2, mp_size_t arg3, mp_srcptr arg4) [free function]" : "__gmpn_divrem_2",
"extern int __gmpz_cmpabs(mpz_srcptr arg0, mpz_srcptr arg1) [free function]" : "__gmpz_cmpabs",
"extern void __gmpz_powm_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2, mpz_srcptr arg3) [free function]" : "__gmpz_powm_ui",
"extern size_t __gmpq_out_str(FILE * arg0, int arg1, mpq_srcptr arg2) [free function]" : "__gmpq_out_str",
"void __gmpz_neg(mpz_ptr __gmp_w, mpz_srcptr __gmp_u) [free function]" : "__gmpz_neg",
"extern void __gmpf_swap(mpf_ptr arg0, mpf_ptr arg1) [free function]" : "__gmpf_swap",
"extern void __gmp_randseed_ui(__gmp_randstate_struct * arg0, long unsigned int arg1) [free function]" : "__gmp_randseed_ui",
"extern void __gmpz_sqrtrem(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_sqrtrem",
"extern long unsigned int __gmpz_tdiv_qr_ui(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, long unsigned int arg3) [free function]" : "__gmpz_tdiv_qr_ui",
"extern mp_limb_t __gmpn_bdivmod(mp_ptr arg0, mp_ptr arg1, mp_size_t arg2, mp_srcptr arg3, mp_size_t arg4, long unsigned int arg5) [free function]" : "__gmpn_bdivmod",
"extern void __gmpn_random(mp_ptr arg0, mp_size_t arg1) [free function]" : "__gmpn_random",
"extern void __gmpq_set_z(mpq_ptr arg0, mpz_srcptr arg1) [free function]" : "__gmpq_set_z",
"extern void __gmpz_cdiv_q(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_cdiv_q",
"extern long int __gmpz_get_si(mpz_srcptr arg0) [free function]" : "__gmpz_get_si",
"extern void __gmpf_init_set(mpf_ptr arg0, mpf_srcptr arg1) [free function]" : "__gmpf_init_set",
"extern void __gmpf_init_set_d(mpf_ptr arg0, double arg1) [free function]" : "__gmpf_init_set_d",
"extern int __gmpf_cmp(mpf_srcptr arg0, mpf_srcptr arg1) [free function]" : "__gmpf_cmp",
"extern int __gmpf_eq(mpf_srcptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpf_eq",
"extern long unsigned int __gmpn_popcount(mp_srcptr arg0, mp_size_t arg1) [free function]" : "__gmpn_popcount",
"extern void __gmpf_ceil(mpf_ptr arg0, mpf_srcptr arg1) [free function]" : "__gmpf_ceil",
"mp_limb_t __gmpn_add_1(mp_ptr __gmp_dst, mp_srcptr __gmp_src, mp_size_t __gmp_size, mp_limb_t __gmp_n) [free function]" : "__gmpn_add_1",
"extern void __gmpz_fib2_ui(mpz_ptr arg0, mpz_ptr arg1, long unsigned int arg2) [free function]" : "__gmpz_fib2_ui",
"extern int __gmp_printf(char const * arg0, ...) [free function]" : "__gmp_printf",
"extern void __gmpq_set_f(mpq_ptr arg0, mpf_srcptr arg1) [free function]" : "__gmpq_set_f",
"extern void __gmpf_clear(mpf_ptr arg0) [free function]" : "__gmpf_clear",
"extern size_t __gmpn_get_str(unsigned char * arg0, int arg1, mp_ptr arg2, mp_size_t arg3) [free function]" : "__gmpn_get_str",
"extern int __gmp_fscanf(FILE * arg0, char const * arg1, ...) [free function]" : "__gmp_fscanf",
"extern int __gmpz_ui_kronecker(long unsigned int arg0, mpz_srcptr arg1) [free function]" : "__gmpz_ui_kronecker",
"mp_limb_t __gmpn_add(mp_ptr __gmp_wp, mp_srcptr __gmp_xp, mp_size_t __gmp_xsize, mp_srcptr __gmp_yp, mp_size_t __gmp_ysize) [free function]" : "__gmpn_add",
"mp_limb_t __gmpn_sub(mp_ptr __gmp_wp, mp_srcptr __gmp_xp, mp_size_t __gmp_xsize, mp_srcptr __gmp_yp, mp_size_t __gmp_ysize) [free function]" : "__gmpn_sub",
"extern void __gmpz_bin_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_bin_ui",
"extern void __gmpz_fdiv_q_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_fdiv_q_2exp",
"extern void __gmpf_dump(mpf_srcptr arg0) [free function]" : "__gmpf_dump",
"extern void __gmpz_tdiv_r_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_tdiv_r_2exp",
"extern void __gmpz_submul_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_submul_ui",
"extern long unsigned int __gmpz_cdiv_q_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_cdiv_q_ui",
"extern void __gmpz_dump(mpz_srcptr arg0) [free function]" : "__gmpz_dump",
"extern void __gmp_randclear(__gmp_randstate_struct * arg0) [free function]" : "__gmp_randclear",
"__gmp_version [variable]" : "__gmp_version",
"extern long unsigned int __gmpz_remove(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_remove",
"extern void __gmpf_set_default_prec(long unsigned int arg0) [free function]" : "__gmpf_set_default_prec",
"extern int __gmpz_congruent_p(mpz_srcptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]" : "__gmpz_congruent_p",
"extern void __gmpf_pow_ui(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpf_pow_ui",
"extern void __gmpz_lcm_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_lcm_ui",
"extern void __gmpz_rootrem(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, long unsigned int arg3) [free function]" : "__gmpz_rootrem",
"extern void __gmpz_lucnum2_ui(mpz_ptr arg0, mpz_ptr arg1, long unsigned int arg2) [free function]" : "__gmpz_lucnum2_ui",
"extern void __gmpz_set_ui(mpz_ptr arg0, long unsigned int arg1) [free function]" : "__gmpz_set_ui",
"void __gmpq_abs(mpq_ptr __gmp_w, mpq_srcptr __gmp_u) [free function]" : "__gmpq_abs",
"extern long unsigned int __gmpn_hamdist(mp_srcptr arg0, mp_srcptr arg1, mp_size_t arg2) [free function]" : "__gmpn_hamdist",
"extern int __gmpf_fits_ushort_p(mpf_srcptr arg0) [free function]" : "__gmpf_fits_ushort_p",
"extern void __gmpz_set(mpz_ptr arg0, mpz_srcptr arg1) [free function]" : "__gmpz_set",
"extern void __gmpq_set_den(mpq_ptr arg0, mpz_srcptr arg1) [free function]" : "__gmpq_set_den",
"extern void __gmpf_abs(mpf_ptr arg0, mpf_srcptr arg1) [free function]" : "__gmpf_abs",
"extern void __gmp_get_memory_functions(void * (*)( ::size_t ) * * arg0, void * (*)( void *,::size_t,::size_t ) * * arg1, void (*)( void *,::size_t ) * * arg2) [free function]" : "__gmp_get_memory_functions",
"extern void __gmpf_ui_div(mpf_ptr arg0, long unsigned int arg1, mpf_srcptr arg2) [free function]" : "__gmpf_ui_div",
"extern mp_size_t __gmpn_gcd(mp_ptr arg0, mp_ptr arg1, mp_size_t arg2, mp_ptr arg3, mp_size_t arg4) [free function]" : "__gmpn_gcd",
"extern mp_limb_t __gmpn_add_n(mp_ptr arg0, mp_srcptr arg1, mp_srcptr arg2, mp_size_t arg3) [free function]" : "__gmpn_add_n",
"extern size_t __gmpz_inp_raw(mpz_ptr arg0, FILE * arg1) [free function]" : "__gmpz_inp_raw",
"int __gmpz_fits_ulong_p(mpz_srcptr __gmp_z) [free function]" : "__gmpz_fits_ulong_p",
"extern void __gmpq_clear(mpq_ptr arg0) [free function]" : "__gmpq_clear",
"extern long unsigned int __gmpf_get_ui(mpf_srcptr arg0) [free function]" : "__gmpf_get_ui",
"extern void __gmpz_fdiv_qr(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, mpz_srcptr arg3) [free function]" : "__gmpz_fdiv_qr",
"extern void __gmpf_mul_ui(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpf_mul_ui",
"extern void __gmpz_combit(mpz_ptr arg0, long unsigned int arg1) [free function]" : "__gmpz_combit",
"extern void __gmpz_addmul_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]" : "__gmpz_addmul_ui",
"extern size_t __gmpf_inp_str(mpf_ptr arg0, FILE * arg1, int arg2) [free function]" : "__gmpf_inp_str",
"extern int __gmpf_init_set_str(mpf_ptr arg0, char const * arg1, int arg2) [free function]" : "__gmpf_init_set_str",
"__gmpf_get_d" : "extern double __gmpf_get_d(mpf_srcptr arg0) [free function]",
"__gmpf_cmp_ui" : "extern int __gmpf_cmp_ui(mpf_srcptr arg0, long unsigned int arg1) [free function]",
"__gmpz_mul_ui" : "extern void __gmpz_mul_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_and" : "extern void __gmpz_and(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmpf_urandomb" : "extern void __gmpf_urandomb(__mpf_struct * arg0, __gmp_randstate_struct * arg1, long unsigned int arg2) [free function]",
"__gmpz_tdiv_q_ui" : "extern long unsigned int __gmpz_tdiv_q_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_clrbit" : "extern void __gmpz_clrbit(mpz_ptr arg0, long unsigned int arg1) [free function]",
"__gmpz_cdiv_r_2exp" : "extern void __gmpz_cdiv_r_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_lcm" : "extern void __gmpz_lcm(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmpf_get_d_2exp" : "extern double __gmpf_get_d_2exp(long int * arg0, mpf_srcptr arg1) [free function]",
"__gmpz_divisible_2exp_p" : "extern int __gmpz_divisible_2exp_p(mpz_srcptr arg0, long unsigned int arg1) [free function]",
"__gmpz_congruent_2exp_p" : "extern int __gmpz_congruent_2exp_p(mpz_srcptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_pow_ui" : "extern void __gmpz_pow_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpq_neg" : "void __gmpq_neg(mpq_ptr __gmp_w, mpq_srcptr __gmp_u) [free function]",
"__gmpf_reldiff" : "extern void __gmpf_reldiff(mpf_ptr arg0, mpf_srcptr arg1, mpf_srcptr arg2) [free function]",
"__gmpz_import" : "extern void __gmpz_import(mpz_ptr arg0, size_t arg1, int arg2, size_t arg3, int arg4, size_t arg5, void const * arg6) [free function]",
"__gmpz_fac_ui" : "extern void __gmpz_fac_ui(mpz_ptr arg0, long unsigned int arg1) [free function]",
"__gmpz_root" : "extern int __gmpz_root(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_fdiv_q" : "extern void __gmpz_fdiv_q(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmpz_fdiv_r" : "extern void __gmpz_fdiv_r(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmp_set_memory_functions" : "extern void __gmp_set_memory_functions(void * (*)( ::size_t ) * arg0, void * (*)( void *,::size_t,::size_t ) * arg1, void (*)( void *,::size_t ) * arg2) [free function]",
"__gmpz_tdiv_r_ui" : "extern long unsigned int __gmpz_tdiv_r_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_cdiv_r_ui" : "extern long unsigned int __gmpz_cdiv_r_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_realloc2" : "extern void __gmpz_realloc2(mpz_ptr arg0, long unsigned int arg1) [free function]",
"__gmpn_tdiv_qr" : "extern void __gmpn_tdiv_qr(mp_ptr arg0, mp_ptr arg1, mp_size_t arg2, mp_srcptr arg3, mp_size_t arg4, mp_srcptr arg5, mp_size_t arg6) [free function]",
"__gmpz_fdiv_r_2exp" : "extern void __gmpz_fdiv_r_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_sqrt" : "extern void __gmpz_sqrt(mpz_ptr arg0, mpz_srcptr arg1) [free function]",
"__gmpq_add" : "extern void __gmpq_add(mpq_ptr arg0, mpq_srcptr arg1, mpq_srcptr arg2) [free function]",
"__gmpq_div" : "extern void __gmpq_div(mpq_ptr arg0, mpq_srcptr arg1, mpq_srcptr arg2) [free function]",
"__gmpf_get_default_prec" : "extern long unsigned int __gmpf_get_default_prec() [free function]",
"__gmpq_sub" : "extern void __gmpq_sub(mpq_ptr arg0, mpq_srcptr arg1, mpq_srcptr arg2) [free function]",
"__gmpf_set_ui" : "extern void __gmpf_set_ui(mpf_ptr arg0, long unsigned int arg1) [free function]",
"__gmpz_get_d" : "extern double __gmpz_get_d(mpz_srcptr arg0) [free function]",
"__gmpz_add" : "extern void __gmpz_add(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmpn_cmp" : "int __gmpn_cmp(mp_srcptr __gmp_xp, mp_srcptr __gmp_yp, mp_size_t __gmp_size) [free function]",
"__gmpz_divexact_ui" : "extern void __gmpz_divexact_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_gcd_ui" : "extern long unsigned int __gmpz_gcd_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_inp_str" : "extern size_t __gmpz_inp_str(mpz_ptr arg0, FILE * arg1, int arg2) [free function]",
"__gmp_snprintf" : "extern int __gmp_snprintf(char * arg0, size_t arg1, char const * arg2, ...) [free function]",
"__gmpf_set_prec_raw" : "extern void __gmpf_set_prec_raw(mpf_ptr arg0, long unsigned int arg1) [free function]",
"__gmpz_cdiv_q_2exp" : "extern void __gmpz_cdiv_q_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_fits_sshort_p" : "extern int __gmpz_fits_sshort_p(mpz_srcptr arg0) [free function]",
"__gmpn_divrem" : "extern mp_limb_t __gmpn_divrem(mp_ptr arg0, mp_size_t arg1, mp_ptr arg2, mp_size_t arg3, mp_srcptr arg4, mp_size_t arg5) [free function]",
"__gmpz_submul" : "extern void __gmpz_submul(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmpz_init_set" : "extern void __gmpz_init_set(mpz_ptr arg0, mpz_srcptr arg1) [free function]",
"__gmpz_xor" : "extern void __gmpz_xor(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmpz_init_set_d" : "extern void __gmpz_init_set_d(mpz_ptr arg0, double arg1) [free function]",
"__gmpz_fits_ushort_p" : "int __gmpz_fits_ushort_p(mpz_srcptr __gmp_z) [free function]",
"__gmp_sscanf" : "extern int __gmp_sscanf(char const * arg0, char const * arg1, ...) [free function]",
"__gmpz_mul_2exp" : "extern void __gmpz_mul_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_sub" : "extern void __gmpz_sub(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmpf_fits_ulong_p" : "extern int __gmpf_fits_ulong_p(mpf_srcptr arg0) [free function]",
"__gmpz_ui_pow_ui" : "extern void __gmpz_ui_pow_ui(mpz_ptr arg0, long unsigned int arg1, long unsigned int arg2) [free function]",
"__gmp_urandomm_ui" : "extern long unsigned int __gmp_urandomm_ui(__gmp_randstate_struct * arg0, long unsigned int arg1) [free function]",
"__gmpz_get_ui" : "long unsigned int __gmpz_get_ui(mpz_srcptr __gmp_z) [free function]",
"__gmpz_cmpabs_ui" : "extern int __gmpz_cmpabs_ui(mpz_srcptr arg0, long unsigned int arg1) [free function]",
"__gmpz_tdiv_q_2exp" : "extern void __gmpz_tdiv_q_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_perfect_square_p" : "int __gmpz_perfect_square_p(mpz_srcptr __gmp_a) [free function]",
"__gmpq_set_d" : "extern void __gmpq_set_d(mpq_ptr arg0, double arg1) [free function]",
"__gmpz_cmp_d" : "extern int __gmpz_cmp_d(mpz_srcptr arg0, double arg1) [free function]",
"__gmpz_cdiv_qr" : "extern void __gmpz_cdiv_qr(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, mpz_srcptr arg3) [free function]",
"__gmpf_add" : "extern void __gmpf_add(mpf_ptr arg0, mpf_srcptr arg1, mpf_srcptr arg2) [free function]",
"__gmpz_probab_prime_p" : "extern int __gmpz_probab_prime_p(mpz_srcptr arg0, int arg1) [free function]",
"__gmpn_rshift" : "extern mp_limb_t __gmpn_rshift(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, unsigned int arg3) [free function]",
"__gmpz_array_init" : "extern void __gmpz_array_init(mpz_ptr arg0, mp_size_t arg1, mp_size_t arg2) [free function]",
"__gmpz_fits_uint_p" : "int __gmpz_fits_uint_p(mpz_srcptr __gmp_z) [free function]",
"__gmpf_random2" : "extern void __gmpf_random2(mpf_ptr arg0, mp_size_t arg1, mp_exp_t arg2) [free function]",
"__gmp_randinit_set" : "extern void __gmp_randinit_set(__gmp_randstate_struct * arg0, __gmp_randstate_struct const * arg1) [free function]",
"__gmpz_tdiv_qr" : "extern void __gmpz_tdiv_qr(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, mpz_srcptr arg3) [free function]",
"__gmpn_set_str" : "extern mp_size_t __gmpn_set_str(mp_ptr arg0, unsigned char const * arg1, size_t arg2, int arg3) [free function]",
"__gmpn_scan0" : "extern long unsigned int __gmpn_scan0(mp_srcptr arg0, long unsigned int arg1) [free function]",
"__gmpz_cdiv_r" : "extern void __gmpz_cdiv_r(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmpz_fdiv_qr_ui" : "extern long unsigned int __gmpz_fdiv_qr_ui(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, long unsigned int arg3) [free function]",
"__gmpf_init_set_ui" : "extern void __gmpf_init_set_ui(mpf_ptr arg0, long unsigned int arg1) [free function]",
"__gmpn_mul_n" : "extern void __gmpn_mul_n(mp_ptr arg0, mp_srcptr arg1, mp_srcptr arg2, mp_size_t arg3) [free function]",
"__gmpq_cmp_ui" : "extern int __gmpq_cmp_ui(mpq_srcptr arg0, long unsigned int arg1, long unsigned int arg2) [free function]",
"__gmpz_mul_si" : "extern void __gmpz_mul_si(mpz_ptr arg0, mpz_srcptr arg1, long int arg2) [free function]",
"__gmpq_set_si" : "extern void __gmpq_set_si(mpq_ptr arg0, long int arg1, long unsigned int arg2) [free function]",
"__gmpq_set_ui" : "extern void __gmpq_set_ui(mpq_ptr arg0, long unsigned int arg1, long unsigned int arg2) [free function]",
"__gmpf_sqrt_ui" : "extern void __gmpf_sqrt_ui(mpf_ptr arg0, long unsigned int arg1) [free function]",
"__gmpq_inp_str" : "extern size_t __gmpq_inp_str(mpq_ptr arg0, FILE * arg1, int arg2) [free function]",
"__gmpf_fits_sint_p" : "extern int __gmpf_fits_sint_p(mpf_srcptr arg0) [free function]",
"__gmpq_swap" : "extern void __gmpq_swap(mpq_ptr arg0, mpq_ptr arg1) [free function]",
"__gmpf_set_str" : "extern int __gmpf_set_str(mpf_ptr arg0, char const * arg1, int arg2) [free function]",
"__gmpz_sub_ui" : "extern void __gmpz_sub_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_divexact" : "extern void __gmpz_divexact(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmpz_com" : "extern void __gmpz_com(mpz_ptr arg0, mpz_srcptr arg1) [free function]",
"__gmpz_ior" : "extern void __gmpz_ior(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmpz_fits_slong_p" : "extern int __gmpz_fits_slong_p(mpz_srcptr arg0) [free function]",
"__gmp_asprintf" : "extern int __gmp_asprintf(char * * arg0, char const * arg1, ...) [free function]",
"__gmp_bits_per_limb" : "__gmp_bits_per_limb [variable]",
"__gmpf_set_prec" : "extern void __gmpf_set_prec(mpf_ptr arg0, long unsigned int arg1) [free function]",
"__gmpz_init_set_str" : "extern int __gmpz_init_set_str(mpz_ptr arg0, char const * arg1, int arg2) [free function]",
"__gmpn_sub_1" : "mp_limb_t __gmpn_sub_1(mp_ptr __gmp_dst, mp_srcptr __gmp_src, mp_size_t __gmp_size, mp_limb_t __gmp_n) [free function]",
"__gmpz_millerrabin" : "extern int __gmpz_millerrabin(mpz_srcptr arg0, int arg1) [free function]",
"__gmpz_mod" : "extern void __gmpz_mod(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmpz_invert" : "extern int __gmpz_invert(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmp_randinit_mt" : "extern void __gmp_randinit_mt(__gmp_randstate_struct * arg0) [free function]",
"__gmpf_set_d" : "extern void __gmpf_set_d(mpf_ptr arg0, double arg1) [free function]",
"__gmpf_sub" : "extern void __gmpf_sub(mpf_ptr arg0, mpf_srcptr arg1, mpf_srcptr arg2) [free function]",
"__gmpn_addmul_1" : "extern mp_limb_t __gmpn_addmul_1(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, mp_limb_t arg3) [free function]",
"__gmpf_set_z" : "extern void __gmpf_set_z(mpf_ptr arg0, mpz_srcptr arg1) [free function]",
"__gmpz_ui_sub" : "extern void __gmpz_ui_sub(mpz_ptr arg0, long unsigned int arg1, mpz_srcptr arg2) [free function]",
"__gmpf_div" : "extern void __gmpf_div(mpf_ptr arg0, mpf_srcptr arg1, mpf_srcptr arg2) [free function]",
"__gmpn_random2" : "extern void __gmpn_random2(mp_ptr arg0, mp_size_t arg1) [free function]",
"__gmpn_divexact_by3c" : "extern mp_limb_t __gmpn_divexact_by3c(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, mp_limb_t arg3) [free function]",
"__gmpz_lucnum_ui" : "extern void __gmpz_lucnum_ui(mpz_ptr arg0, long unsigned int arg1) [free function]",
"__gmpf_set_q" : "extern void __gmpf_set_q(mpf_ptr arg0, mpq_srcptr arg1) [free function]",
"__gmpz_random" : "extern void __gmpz_random(mpz_ptr arg0, mp_size_t arg1) [free function]",
"__gmp_scanf" : "extern int __gmp_scanf(char const * arg0, ...) [free function]",
"__gmpn_sqrtrem" : "extern mp_size_t __gmpn_sqrtrem(mp_ptr arg0, mp_ptr arg1, mp_srcptr arg2, mp_size_t arg3) [free function]",
"__gmpq_set_str" : "extern int __gmpq_set_str(mpq_ptr arg0, char const * arg1, int arg2) [free function]",
"__gmpf_fits_slong_p" : "extern int __gmpf_fits_slong_p(mpf_srcptr arg0) [free function]",
"__gmpz_setbit" : "extern void __gmpz_setbit(mpz_ptr arg0, long unsigned int arg1) [free function]",
"__gmp_randinit_lc_2exp" : "extern void __gmp_randinit_lc_2exp(__gmp_randstate_struct * arg0, mpz_srcptr arg1, long unsigned int arg2, long unsigned int arg3) [free function]",
"__gmp_randinit_lc_2exp_size" : "extern int __gmp_randinit_lc_2exp_size(__gmp_randstate_struct * arg0, long unsigned int arg1) [free function]",
"__gmpz_set_d" : "extern void __gmpz_set_d(mpz_ptr arg0, double arg1) [free function]",
"__gmpz_jacobi" : "extern int __gmpz_jacobi(mpz_srcptr arg0, mpz_srcptr arg1) [free function]",
"__gmpz_set_f" : "extern void __gmpz_set_f(mpz_ptr arg0, mpf_srcptr arg1) [free function]",
"__gmpf_out_str" : "extern size_t __gmpf_out_str(FILE * arg0, int arg1, size_t arg2, mpf_srcptr arg3) [free function]",
"__gmpf_fits_sshort_p" : "extern int __gmpf_fits_sshort_p(mpf_srcptr arg0) [free function]",
"__gmpq_div_2exp" : "extern void __gmpq_div_2exp(mpq_ptr arg0, mpq_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpf_get_prec" : "extern long unsigned int __gmpf_get_prec(mpf_srcptr arg0) [free function]",
"__gmpz_kronecker_si" : "extern int __gmpz_kronecker_si(mpz_srcptr arg0, long int arg1) [free function]",
"__gmpf_floor" : "extern void __gmpf_floor(mpf_ptr arg0, mpf_srcptr arg1) [free function]",
"__gmpq_cmp" : "extern int __gmpq_cmp(mpq_srcptr arg0, mpq_srcptr arg1) [free function]",
"__gmpf_integer_p" : "extern int __gmpf_integer_p(mpf_srcptr arg0) [free function]",
"__gmpz_powm" : "extern void __gmpz_powm(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2, mpz_srcptr arg3) [free function]",
"__gmpz_hamdist" : "extern long unsigned int __gmpz_hamdist(mpz_srcptr arg0, mpz_srcptr arg1) [free function]",
"__gmpz_fib_ui" : "extern void __gmpz_fib_ui(mpz_ptr arg0, long unsigned int arg1) [free function]",
"__gmpz_cmp_ui" : "extern int __gmpz_cmp_ui(mpz_srcptr arg0, long unsigned int arg1) [free function]",
"__gmpn_submul_1" : "extern mp_limb_t __gmpn_submul_1(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, mp_limb_t arg3) [free function]",
"__gmpf_init2" : "extern void __gmpf_init2(mpf_ptr arg0, long unsigned int arg1) [free function]",
"__gmpn_mul_1" : "extern mp_limb_t __gmpn_mul_1(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, mp_limb_t arg3) [free function]",
"__gmpn_mod_1" : "extern mp_limb_t __gmpn_mod_1(mp_srcptr arg0, mp_size_t arg1, mp_limb_t arg2) [free function]",
"__gmpz_size" : "size_t __gmpz_size(mpz_srcptr __gmp_z) [free function]",
"__gmpq_get_den" : "extern void __gmpq_get_den(mpz_ptr arg0, mpq_srcptr arg1) [free function]",
"__gmpn_preinv_mod_1" : "extern mp_limb_t __gmpn_preinv_mod_1(mp_srcptr arg0, mp_size_t arg1, mp_limb_t arg2, mp_limb_t arg3) [free function]",
"__gmpz_tdiv_ui" : "extern long unsigned int __gmpz_tdiv_ui(mpz_srcptr arg0, long unsigned int arg1) [free function]",
"__gmpn_gcd_1" : "extern mp_limb_t __gmpn_gcd_1(mp_srcptr arg0, mp_size_t arg1, mp_limb_t arg2) [free function]",
"__gmp_randinit" : "extern void __gmp_randinit(__gmp_randstate_struct * arg0, gmp_randalg_t arg1, ...) [free function]",
"__gmpf_init" : "extern void __gmpf_init(mpf_ptr arg0) [free function]",
"__gmpz_mul" : "extern void __gmpz_mul(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmpn_scan1" : "extern long unsigned int __gmpn_scan1(mp_srcptr arg0, long unsigned int arg1) [free function]",
"__gmpq_set" : "extern void __gmpq_set(mpq_ptr arg0, mpq_srcptr arg1) [free function]",
"__gmpz_fits_sint_p" : "extern int __gmpz_fits_sint_p(mpz_srcptr arg0) [free function]",
"__gmpz_cdiv_qr_ui" : "extern long unsigned int __gmpz_cdiv_qr_ui(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, long unsigned int arg3) [free function]",
"__gmpz_clear" : "extern void __gmpz_clear(mpz_ptr arg0) [free function]",
"__gmpn_mul" : "extern mp_limb_t __gmpn_mul(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, mp_srcptr arg3, mp_size_t arg4) [free function]",
"__gmpz_init_set_si" : "extern void __gmpz_init_set_si(mpz_ptr arg0, long int arg1) [free function]",
"__gmpz_divisible_p" : "extern int __gmpz_divisible_p(mpz_srcptr arg0, mpz_srcptr arg1) [free function]",
"__gmp_errno" : "__gmp_errno [variable]",
"__gmpf_sub_ui" : "extern void __gmpf_sub_ui(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_swap" : "extern void __gmpz_swap(mpz_ptr arg0, mpz_ptr arg1) [free function]",
"__gmpz_cmp" : "extern int __gmpz_cmp(mpz_srcptr arg0, mpz_srcptr arg1) [free function]",
"__gmpf_init_set_si" : "extern void __gmpf_init_set_si(mpf_ptr arg0, long int arg1) [free function]",
"__gmpn_lshift" : "extern mp_limb_t __gmpn_lshift(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, unsigned int arg3) [free function]",
"__gmpq_cmp_si" : "extern int __gmpq_cmp_si(mpq_srcptr arg0, long int arg1, long unsigned int arg2) [free function]",
"__gmpz_abs" : "void __gmpz_abs(mpz_ptr __gmp_w, mpz_srcptr __gmp_u) [free function]",
"__gmp_fprintf" : "extern int __gmp_fprintf(FILE * arg0, char const * arg1, ...) [free function]",
"__gmpf_set" : "extern void __gmpf_set(mpf_ptr arg0, mpf_srcptr arg1) [free function]",
"__gmpz_divisible_ui_p" : "extern int __gmpz_divisible_ui_p(mpz_srcptr arg0, long unsigned int arg1) [free function]",
"__gmpf_cmp_d" : "extern int __gmpf_cmp_d(mpf_srcptr arg0, double arg1) [free function]",
"__gmpf_get_str" : "extern char * __gmpf_get_str(char * arg0, mp_exp_t * arg1, int arg2, size_t arg3, mpf_srcptr arg4) [free function]",
"__gmpz_fdiv_q_ui" : "extern long unsigned int __gmpz_fdiv_q_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_urandomb" : "extern void __gmpz_urandomb(mpz_ptr arg0, __gmp_randstate_struct * arg1, long unsigned int arg2) [free function]",
"__gmpz_get_str" : "extern char * __gmpz_get_str(char * arg0, int arg1, mpz_srcptr arg2) [free function]",
"__gmpz_tdiv_r" : "extern void __gmpz_tdiv_r(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmpz_urandomm" : "extern void __gmpz_urandomm(mpz_ptr arg0, __gmp_randstate_struct * arg1, mpz_srcptr arg2) [free function]",
"__gmpq_mul" : "extern void __gmpq_mul(mpq_ptr arg0, mpq_srcptr arg1, mpq_srcptr arg2) [free function]",
"__gmpz_tdiv_q" : "extern void __gmpz_tdiv_q(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmpf_fits_uint_p" : "extern int __gmpf_fits_uint_p(mpf_srcptr arg0) [free function]",
"__gmpz_realloc" : "extern void * __gmpz_realloc(mpz_ptr arg0, mp_size_t arg1) [free function]",
"__gmp_urandomb_ui" : "extern long unsigned int __gmp_urandomb_ui(__gmp_randstate_struct * arg0, long unsigned int arg1) [free function]",
"__gmpz_perfect_power_p" : "extern int __gmpz_perfect_power_p(mpz_srcptr arg0) [free function]",
"__gmpq_get_str" : "extern char * __gmpq_get_str(char * arg0, int arg1, mpq_srcptr arg2) [free function]",
"__gmpn_perfect_square_p" : "extern int __gmpn_perfect_square_p(mp_srcptr arg0, mp_size_t arg1) [free function]",
"__gmpz_addmul" : "extern void __gmpz_addmul(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmpz_fdiv_ui" : "extern long unsigned int __gmpz_fdiv_ui(mpz_srcptr arg0, long unsigned int arg1) [free function]",
"__gmp_sprintf" : "extern int __gmp_sprintf(char * arg0, char const * arg1, ...) [free function]",
"__gmpz_bin_uiui" : "extern void __gmpz_bin_uiui(mpz_ptr arg0, long unsigned int arg1, long unsigned int arg2) [free function]",
"__gmpz_set_q" : "void __gmpz_set_q(mpz_ptr __gmp_w, mpq_srcptr __gmp_u) [free function]",
"__gmpn_neg_n" : "mp_limb_t __gmpn_neg_n(mp_ptr __gmp_rp, mp_srcptr __gmp_up, mp_size_t __gmp_n) [free function]",
"__gmpf_neg" : "extern void __gmpf_neg(mpf_ptr arg0, mpf_srcptr arg1) [free function]",
"__gmp_randseed" : "extern void __gmp_randseed(__gmp_randstate_struct * arg0, mpz_srcptr arg1) [free function]",
"__gmpz_scan1" : "extern long unsigned int __gmpz_scan1(mpz_srcptr arg0, long unsigned int arg1) [free function]",
"__gmpz_nextprime" : "extern void __gmpz_nextprime(mpz_ptr arg0, mpz_srcptr arg1) [free function]",
"__gmpz_si_kronecker" : "extern int __gmpz_si_kronecker(long int arg0, mpz_srcptr arg1) [free function]",
"__gmpz_congruent_ui_p" : "extern int __gmpz_congruent_ui_p(mpz_srcptr arg0, long unsigned int arg1, long unsigned int arg2) [free function]",
"__gmpz_cdiv_ui" : "extern long unsigned int __gmpz_cdiv_ui(mpz_srcptr arg0, long unsigned int arg1) [free function]",
"__gmpz_init_set_ui" : "extern void __gmpz_init_set_ui(mpz_ptr arg0, long unsigned int arg1) [free function]",
"__gmpn_sub_n" : "extern mp_limb_t __gmpn_sub_n(mp_ptr arg0, mp_srcptr arg1, mp_srcptr arg2, mp_size_t arg3) [free function]",
"__gmpq_set_num" : "extern void __gmpq_set_num(mpq_ptr arg0, mpz_srcptr arg1) [free function]",
"__gmpz_kronecker_ui" : "extern int __gmpz_kronecker_ui(mpz_srcptr arg0, long unsigned int arg1) [free function]",
"__gmpf_add_ui" : "extern void __gmpf_add_ui(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_gcdext" : "extern void __gmpz_gcdext(mpz_ptr arg0, mpz_ptr arg1, mpz_ptr arg2, mpz_srcptr arg3, mpz_srcptr arg4) [free function]",
"__gmpq_get_num" : "extern void __gmpq_get_num(mpz_ptr arg0, mpq_srcptr arg1) [free function]",
"__gmpf_size" : "extern size_t __gmpf_size(mpf_srcptr arg0) [free function]",
"__gmpq_inv" : "extern void __gmpq_inv(mpq_ptr arg0, mpq_srcptr arg1) [free function]",
"__gmpn_divrem_1" : "extern mp_limb_t __gmpn_divrem_1(mp_ptr arg0, mp_size_t arg1, mp_srcptr arg2, mp_size_t arg3, mp_limb_t arg4) [free function]",
"__gmpq_canonicalize" : "extern void __gmpq_canonicalize(mpq_ptr arg0) [free function]",
"__gmpz_popcount" : "long unsigned int __gmpz_popcount(mpz_srcptr __gmp_u) [free function]",
"__gmpf_ui_sub" : "extern void __gmpf_ui_sub(mpf_ptr arg0, long unsigned int arg1, mpf_srcptr arg2) [free function]",
"__gmpz_cmp_si" : "extern int __gmpz_cmp_si(mpz_srcptr arg0, long int arg1) [free function]",
"__gmpz_set_str" : "extern int __gmpz_set_str(mpz_ptr arg0, char const * arg1, int arg2) [free function]",
"__gmpz_tstbit" : "extern int __gmpz_tstbit(mpz_srcptr arg0, long unsigned int arg1) [free function]",
"__gmpz_set_si" : "extern void __gmpz_set_si(mpz_ptr arg0, long int arg1) [free function]",
"__gmpq_init" : "extern void __gmpq_init(mpq_ptr arg0) [free function]",
"__gmpz_out_raw" : "extern size_t __gmpz_out_raw(FILE * arg0, mpz_srcptr arg1) [free function]",
"__gmpf_trunc" : "extern void __gmpf_trunc(mpf_ptr arg0, mpf_srcptr arg1) [free function]",
"__gmpn_gcdext_1" : "extern mp_limb_t __gmpn_gcdext_1(mp_ptr arg0, mp_ptr arg1, mp_limb_t arg2, mp_limb_t arg3) [free function]",
"__gmpz_cmpabs_d" : "extern int __gmpz_cmpabs_d(mpz_srcptr arg0, double arg1) [free function]",
"__gmpz_export" : "extern void * __gmpz_export(void * arg0, size_t * arg1, int arg2, size_t arg3, int arg4, size_t arg5, mpz_srcptr arg6) [free function]",
"__gmpz_get_d_2exp" : "extern double __gmpz_get_d_2exp(long int * arg0, mpz_srcptr arg1) [free function]",
"__gmpz_add_ui" : "extern void __gmpz_add_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpn_gcdext" : "extern mp_size_t __gmpn_gcdext(mp_ptr arg0, mp_ptr arg1, mp_size_t * arg2, mp_ptr arg3, mp_size_t arg4, mp_ptr arg5, mp_size_t arg6) [free function]",
"__gmpf_sqrt" : "extern void __gmpf_sqrt(mpf_ptr arg0, mpf_srcptr arg1) [free function]",
"__gmpz_sizeinbase" : "extern size_t __gmpz_sizeinbase(mpz_srcptr arg0, int arg1) [free function]",
"__gmpz_fdiv_r_ui" : "extern long unsigned int __gmpz_fdiv_r_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmp_randinit_default" : "extern void __gmp_randinit_default(__gmp_randstate_struct * arg0) [free function]",
"__gmpz_getlimbn" : "mp_limb_t __gmpz_getlimbn(mpz_srcptr __gmp_z, mp_size_t __gmp_n) [free function]",
"__gmpf_get_si" : "extern long int __gmpf_get_si(mpf_srcptr arg0) [free function]",
"__gmpz_init" : "extern void __gmpz_init(mpz_ptr arg0) [free function]",
"__gmpf_div_2exp" : "extern void __gmpf_div_2exp(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpf_set_si" : "extern void __gmpf_set_si(mpf_ptr arg0, long int arg1) [free function]",
"__gmpq_equal" : "extern int __gmpq_equal(mpq_srcptr arg0, mpq_srcptr arg1) [free function]",
"__gmpz_rrandomb" : "extern void __gmpz_rrandomb(mpz_ptr arg0, __gmp_randstate_struct * arg1, long unsigned int arg2) [free function]",
"__gmpf_cmp_si" : "extern int __gmpf_cmp_si(mpf_srcptr arg0, long int arg1) [free function]",
"__gmpz_scan0" : "extern long unsigned int __gmpz_scan0(mpz_srcptr arg0, long unsigned int arg1) [free function]",
"__gmpz_init2" : "extern void __gmpz_init2(mpz_ptr arg0, long unsigned int arg1) [free function]",
"__gmpz_random2" : "extern void __gmpz_random2(mpz_ptr arg0, mp_size_t arg1) [free function]",
"__gmpn_pow_1" : "extern mp_size_t __gmpn_pow_1(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, mp_limb_t arg3, mp_ptr arg4) [free function]",
"__gmpz_gcd" : "extern void __gmpz_gcd(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmpf_mul_2exp" : "extern void __gmpf_mul_2exp(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpq_get_d" : "extern double __gmpq_get_d(mpq_srcptr arg0) [free function]",
"__gmpf_mul" : "extern void __gmpf_mul(mpf_ptr arg0, mpf_srcptr arg1, mpf_srcptr arg2) [free function]",
"__gmpf_div_ui" : "extern void __gmpf_div_ui(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpq_mul_2exp" : "extern void __gmpq_mul_2exp(mpq_ptr arg0, mpq_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_out_str" : "extern size_t __gmpz_out_str(FILE * arg0, int arg1, mpz_srcptr arg2) [free function]",
"__gmpn_divrem_2" : "extern mp_limb_t __gmpn_divrem_2(mp_ptr arg0, mp_size_t arg1, mp_ptr arg2, mp_size_t arg3, mp_srcptr arg4) [free function]",
"__gmpz_cmpabs" : "extern int __gmpz_cmpabs(mpz_srcptr arg0, mpz_srcptr arg1) [free function]",
"__gmpz_powm_ui" : "extern void __gmpz_powm_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2, mpz_srcptr arg3) [free function]",
"__gmpq_out_str" : "extern size_t __gmpq_out_str(FILE * arg0, int arg1, mpq_srcptr arg2) [free function]",
"__gmpz_neg" : "void __gmpz_neg(mpz_ptr __gmp_w, mpz_srcptr __gmp_u) [free function]",
"__gmpf_swap" : "extern void __gmpf_swap(mpf_ptr arg0, mpf_ptr arg1) [free function]",
"__gmp_randseed_ui" : "extern void __gmp_randseed_ui(__gmp_randstate_struct * arg0, long unsigned int arg1) [free function]",
"__gmpz_sqrtrem" : "extern void __gmpz_sqrtrem(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2) [free function]",
"__gmpz_tdiv_qr_ui" : "extern long unsigned int __gmpz_tdiv_qr_ui(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, long unsigned int arg3) [free function]",
"__gmpn_bdivmod" : "extern mp_limb_t __gmpn_bdivmod(mp_ptr arg0, mp_ptr arg1, mp_size_t arg2, mp_srcptr arg3, mp_size_t arg4, long unsigned int arg5) [free function]",
"__gmpn_random" : "extern void __gmpn_random(mp_ptr arg0, mp_size_t arg1) [free function]",
"__gmpq_set_z" : "extern void __gmpq_set_z(mpq_ptr arg0, mpz_srcptr arg1) [free function]",
"__gmpz_cdiv_q" : "extern void __gmpz_cdiv_q(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmpz_get_si" : "extern long int __gmpz_get_si(mpz_srcptr arg0) [free function]",
"__gmpf_init_set" : "extern void __gmpf_init_set(mpf_ptr arg0, mpf_srcptr arg1) [free function]",
"__gmpf_init_set_d" : "extern void __gmpf_init_set_d(mpf_ptr arg0, double arg1) [free function]",
"__gmpf_cmp" : "extern int __gmpf_cmp(mpf_srcptr arg0, mpf_srcptr arg1) [free function]",
"__gmpf_eq" : "extern int __gmpf_eq(mpf_srcptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpn_popcount" : "extern long unsigned int __gmpn_popcount(mp_srcptr arg0, mp_size_t arg1) [free function]",
"__gmpf_ceil" : "extern void __gmpf_ceil(mpf_ptr arg0, mpf_srcptr arg1) [free function]",
"__gmpn_add_1" : "mp_limb_t __gmpn_add_1(mp_ptr __gmp_dst, mp_srcptr __gmp_src, mp_size_t __gmp_size, mp_limb_t __gmp_n) [free function]",
"__gmpz_fib2_ui" : "extern void __gmpz_fib2_ui(mpz_ptr arg0, mpz_ptr arg1, long unsigned int arg2) [free function]",
"__gmp_printf" : "extern int __gmp_printf(char const * arg0, ...) [free function]",
"__gmpq_set_f" : "extern void __gmpq_set_f(mpq_ptr arg0, mpf_srcptr arg1) [free function]",
"__gmpf_clear" : "extern void __gmpf_clear(mpf_ptr arg0) [free function]",
"__gmpn_get_str" : "extern size_t __gmpn_get_str(unsigned char * arg0, int arg1, mp_ptr arg2, mp_size_t arg3) [free function]",
"__gmp_fscanf" : "extern int __gmp_fscanf(FILE * arg0, char const * arg1, ...) [free function]",
"__gmpz_ui_kronecker" : "extern int __gmpz_ui_kronecker(long unsigned int arg0, mpz_srcptr arg1) [free function]",
"__gmpn_add" : "mp_limb_t __gmpn_add(mp_ptr __gmp_wp, mp_srcptr __gmp_xp, mp_size_t __gmp_xsize, mp_srcptr __gmp_yp, mp_size_t __gmp_ysize) [free function]",
"__gmpn_sub" : "mp_limb_t __gmpn_sub(mp_ptr __gmp_wp, mp_srcptr __gmp_xp, mp_size_t __gmp_xsize, mp_srcptr __gmp_yp, mp_size_t __gmp_ysize) [free function]",
"__gmpz_bin_ui" : "extern void __gmpz_bin_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_fdiv_q_2exp" : "extern void __gmpz_fdiv_q_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpf_dump" : "extern void __gmpf_dump(mpf_srcptr arg0) [free function]",
"__gmpz_tdiv_r_2exp" : "extern void __gmpz_tdiv_r_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_submul_ui" : "extern void __gmpz_submul_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_cdiv_q_ui" : "extern long unsigned int __gmpz_cdiv_q_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_dump" : "extern void __gmpz_dump(mpz_srcptr arg0) [free function]",
"__gmp_randclear" : "extern void __gmp_randclear(__gmp_randstate_struct * arg0) [free function]",
"__gmp_version" : "__gmp_version [variable]",
"__gmpz_remove" : "extern long unsigned int __gmpz_remove(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmpf_set_default_prec" : "extern void __gmpf_set_default_prec(long unsigned int arg0) [free function]",
"__gmpz_congruent_p" : "extern int __gmpz_congruent_p(mpz_srcptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]",
"__gmpf_pow_ui" : "extern void __gmpf_pow_ui(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_lcm_ui" : "extern void __gmpz_lcm_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_rootrem" : "extern void __gmpz_rootrem(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, long unsigned int arg3) [free function]",
"__gmpz_lucnum2_ui" : "extern void __gmpz_lucnum2_ui(mpz_ptr arg0, mpz_ptr arg1, long unsigned int arg2) [free function]",
"__gmpz_set_ui" : "extern void __gmpz_set_ui(mpz_ptr arg0, long unsigned int arg1) [free function]",
"__gmpq_abs" : "void __gmpq_abs(mpq_ptr __gmp_w, mpq_srcptr __gmp_u) [free function]",
"__gmpn_hamdist" : "extern long unsigned int __gmpn_hamdist(mp_srcptr arg0, mp_srcptr arg1, mp_size_t arg2) [free function]",
"__gmpf_fits_ushort_p" : "extern int __gmpf_fits_ushort_p(mpf_srcptr arg0) [free function]",
"__gmpz_set" : "extern void __gmpz_set(mpz_ptr arg0, mpz_srcptr arg1) [free function]",
"__gmpq_set_den" : "extern void __gmpq_set_den(mpq_ptr arg0, mpz_srcptr arg1) [free function]",
"__gmpf_abs" : "extern void __gmpf_abs(mpf_ptr arg0, mpf_srcptr arg1) [free function]",
"__gmp_get_memory_functions" : "extern void __gmp_get_memory_functions(void * (*)( ::size_t ) * * arg0, void * (*)( void *,::size_t,::size_t ) * * arg1, void (*)( void *,::size_t ) * * arg2) [free function]",
"__gmpf_ui_div" : "extern void __gmpf_ui_div(mpf_ptr arg0, long unsigned int arg1, mpf_srcptr arg2) [free function]",
"__gmpn_gcd" : "extern mp_size_t __gmpn_gcd(mp_ptr arg0, mp_ptr arg1, mp_size_t arg2, mp_ptr arg3, mp_size_t arg4) [free function]",
"__gmpn_add_n" : "extern mp_limb_t __gmpn_add_n(mp_ptr arg0, mp_srcptr arg1, mp_srcptr arg2, mp_size_t arg3) [free function]",
"__gmpz_inp_raw" : "extern size_t __gmpz_inp_raw(mpz_ptr arg0, FILE * arg1) [free function]",
"__gmpz_fits_ulong_p" : "int __gmpz_fits_ulong_p(mpz_srcptr __gmp_z) [free function]",
"__gmpq_clear" : "extern void __gmpq_clear(mpq_ptr arg0) [free function]",
"__gmpf_get_ui" : "extern long unsigned int __gmpf_get_ui(mpf_srcptr arg0) [free function]",
"__gmpz_fdiv_qr" : "extern void __gmpz_fdiv_qr(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, mpz_srcptr arg3) [free function]",
"__gmpf_mul_ui" : "extern void __gmpf_mul_ui(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpz_combit" : "extern void __gmpz_combit(mpz_ptr arg0, long unsigned int arg1) [free function]",
"__gmpz_addmul_ui" : "extern void __gmpz_addmul_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]",
"__gmpf_inp_str" : "extern size_t __gmpf_inp_str(mpf_ptr arg0, FILE * arg1, int arg2) [free function]",
"__gmpf_init_set_str" : "extern int __gmpf_init_set_str(mpf_ptr arg0, char const * arg1, int arg2) [free function]",
}
class gmp_randalg_t( ctypes_utils.Enumeration ):
GMP_RAND_ALG_DEFAULT = 0
GMP_RAND_ALG_LC = 0
class _IO_FILE(ctypes.Structure):
"""class _IO_FILE"""
def __init__( self, *args, **keywd ):
raise RuntimeError( "Unable to create instance of opaque type." )
class _IO_marker(ctypes.Structure):
"""class _IO_marker"""
def __init__( self, *args, **keywd ):
raise RuntimeError( "Unable to create instance of opaque type." )
class __mpz_struct(ctypes.Structure):
"""class __mpz_struct"""
class __gmp_randstate_struct(ctypes.Structure):
"""class __gmp_randstate_struct"""
class _(ctypes.Union):
"""class __gmp_randstate_struct"""
class __mpf_struct(ctypes.Structure):
"""class __mpf_struct"""
class __mpq_struct(ctypes.Structure):
"""class __mpq_struct"""
__mpz_struct._fields_ = [ #class __mpz_struct
("_mp_alloc", ctypes.c_int),
("_mp_size", ctypes.c_int),
("_mp_d", ctypes.POINTER( ctypes.c_ulong )),
]
__gmp_randstate_struct._._fields_ = [ #class __gmp_randstate_struct
("_mp_lc", ctypes.c_void_p),
]
__gmp_randstate_struct._anonymous_ = ["_mp_algdata"]
__gmp_randstate_struct._fields_ = [ #class __gmp_randstate_struct
("_mp_seed", ( __mpz_struct * 1 )),
("_mp_alg", gmp_randalg_t),
("_mp_algdata", __gmp_randstate_struct._),
]
__mpf_struct._fields_ = [ #class __mpf_struct
("_mp_prec", ctypes.c_int),
("_mp_size", ctypes.c_int),
("_mp_exp", ctypes.c_long),
("_mp_d", ctypes.POINTER( ctypes.c_ulong )),
]
__mpq_struct._fields_ = [ #class __mpq_struct
("_mp_num", __mpz_struct),
("_mp_den", __mpz_struct),
]
gmpq_add_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpq_struct ) )
gmpq_add = gmpq_add_type( ( libgmp_lib.undecorated_names["extern void __gmpq_add(mpq_ptr arg0, mpq_srcptr arg1, mpq_srcptr arg2) [free function]"], libgmp_lib ) )
gmpq_canonicalize_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ) )
gmpq_canonicalize = gmpq_canonicalize_type( ( libgmp_lib.undecorated_names["extern void __gmpq_canonicalize(mpq_ptr arg0) [free function]"], libgmp_lib ) )
gmpq_sub_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpq_struct ) )
gmpq_sub = gmpq_sub_type( ( libgmp_lib.undecorated_names["extern void __gmpq_sub(mpq_ptr arg0, mpq_srcptr arg1, mpq_srcptr arg2) [free function]"], libgmp_lib ) )
gmpz_tdiv_q_ui_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_tdiv_q_ui = gmpz_tdiv_q_ui_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpz_tdiv_q_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpz_hamdist_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_hamdist = gmpz_hamdist_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpz_hamdist(mpz_srcptr arg0, mpz_srcptr arg1) [free function]"], libgmp_lib ) )
gmpn_gcdext_type = ctypes.CFUNCTYPE( ctypes.c_long, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_long ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long )
gmpn_gcdext = gmpn_gcdext_type( ( libgmp_lib.undecorated_names["extern mp_size_t __gmpn_gcdext(mp_ptr arg0, mp_ptr arg1, mp_size_t * arg2, mp_ptr arg3, mp_size_t arg4, mp_ptr arg5, mp_size_t arg6) [free function]"], libgmp_lib ) )
gmpn_scan0_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_ulong )
gmpn_scan0 = gmpn_scan0_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpn_scan0(mp_srcptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpn_scan1_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_ulong )
gmpn_scan1 = gmpn_scan1_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpn_scan1(mp_srcptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpn_gcdext_1_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_ulong, ctypes.c_ulong )
gmpn_gcdext_1 = gmpn_gcdext_1_type( ( libgmp_lib.undecorated_names["extern mp_limb_t __gmpn_gcdext_1(mp_ptr arg0, mp_ptr arg1, mp_limb_t arg2, mp_limb_t arg3) [free function]"], libgmp_lib ) )
gmpz_init_set_d_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_double )
gmpz_init_set_d = gmpz_init_set_d_type( ( libgmp_lib.undecorated_names["extern void __gmpz_init_set_d(mpz_ptr arg0, double arg1) [free function]"], libgmp_lib ) )
gmpz_popcount_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpz_struct ) )
gmpz_popcount = gmpz_popcount_type( ( libgmp_lib.undecorated_names["long unsigned int __gmpz_popcount(mpz_srcptr __gmp_u) [free function]"], libgmp_lib ) )
gmpz_get_d_2exp_type = ctypes.CFUNCTYPE( ctypes.c_double, ctypes.POINTER( ctypes.c_long ), ctypes.POINTER( __mpz_struct ) )
gmpz_get_d_2exp = gmpz_get_d_2exp_type( ( libgmp_lib.undecorated_names["extern double __gmpz_get_d_2exp(long int * arg0, mpz_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_powm_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong, ctypes.POINTER( __mpz_struct ) )
gmpz_powm_ui = gmpz_powm_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpz_powm_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2, mpz_srcptr arg3) [free function]"], libgmp_lib ) )
gmpn_add_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long )
gmpn_add = gmpn_add_type( ( libgmp_lib.undecorated_names["mp_limb_t __gmpn_add(mp_ptr __gmp_wp, mp_srcptr __gmp_xp, mp_size_t __gmp_xsize, mp_srcptr __gmp_yp, mp_size_t __gmp_ysize) [free function]"], libgmp_lib ) )
gmpz_ui_pow_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong, ctypes.c_ulong )
gmpz_ui_pow_ui = gmpz_ui_pow_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpz_ui_pow_ui(mpz_ptr arg0, long unsigned int arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpz_nextprime_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_nextprime = gmpz_nextprime_type( ( libgmp_lib.undecorated_names["extern void __gmpz_nextprime(mpz_ptr arg0, mpz_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_size_type = ctypes.CFUNCTYPE( ctypes.c_uint, ctypes.POINTER( __mpz_struct ) )
gmpz_size = gmpz_size_type( ( libgmp_lib.undecorated_names["size_t __gmpz_size(mpz_srcptr __gmp_z) [free function]"], libgmp_lib ) )
gmpf_sqrt_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ) )
gmpf_sqrt = gmpf_sqrt_type( ( libgmp_lib.undecorated_names["extern void __gmpf_sqrt(mpf_ptr arg0, mpf_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_ui_sub_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong, ctypes.POINTER( __mpz_struct ) )
gmpz_ui_sub = gmpz_ui_sub_type( ( libgmp_lib.undecorated_names["extern void __gmpz_ui_sub(mpz_ptr arg0, long unsigned int arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmp_errno = ctypes.c_int.in_dll( libgmp_lib, libgmp_lib.undecorated_names["__gmp_errno [variable]"] )
gmpn_bdivmod_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.c_ulong )
gmpn_bdivmod = gmpn_bdivmod_type( ( libgmp_lib.undecorated_names["extern mp_limb_t __gmpn_bdivmod(mp_ptr arg0, mp_ptr arg1, mp_size_t arg2, mp_srcptr arg3, mp_size_t arg4, long unsigned int arg5) [free function]"], libgmp_lib ) )
gmpz_cdiv_q_ui_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_cdiv_q_ui = gmpz_cdiv_q_ui_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpz_cdiv_q_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpz_tdiv_r_ui_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_tdiv_r_ui = gmpz_tdiv_r_ui_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpz_tdiv_r_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpz_export_type = ctypes.CFUNCTYPE( ctypes.c_void_p, ctypes.c_void_p, ctypes.POINTER( ctypes.c_uint ), ctypes.c_int, ctypes.c_uint, ctypes.c_int, ctypes.c_uint, ctypes.POINTER( __mpz_struct ) )
gmpz_export = gmpz_export_type( ( libgmp_lib.undecorated_names["extern void * __gmpz_export(void * arg0, size_t * arg1, int arg2, size_t arg3, int arg4, size_t arg5, mpz_srcptr arg6) [free function]"], libgmp_lib ) )
gmp_randseed_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __gmp_randstate_struct ), ctypes.c_ulong )
gmp_randseed_ui = gmp_randseed_ui_type( ( libgmp_lib.undecorated_names["extern void __gmp_randseed_ui(__gmp_randstate_struct * arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpf_mul_2exp_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ), ctypes.c_ulong )
gmpf_mul_2exp = gmpf_mul_2exp_type( ( libgmp_lib.undecorated_names["extern void __gmpf_mul_2exp(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpn_sub_1_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.c_ulong )
gmpn_sub_1 = gmpn_sub_1_type( ( libgmp_lib.undecorated_names["mp_limb_t __gmpn_sub_1(mp_ptr __gmp_dst, mp_srcptr __gmp_src, mp_size_t __gmp_size, mp_limb_t __gmp_n) [free function]"], libgmp_lib ) )
gmpq_cmp_ui_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpq_struct ), ctypes.c_ulong, ctypes.c_ulong )
gmpq_cmp_ui = gmpq_cmp_ui_type( ( libgmp_lib.undecorated_names["extern int __gmpq_cmp_ui(mpq_srcptr arg0, long unsigned int arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpq_out_str_type = ctypes.CFUNCTYPE( ctypes.c_uint, ctypes.POINTER( _IO_FILE ), ctypes.c_int, ctypes.POINTER( __mpq_struct ) )
gmpq_out_str = gmpq_out_str_type( ( libgmp_lib.undecorated_names["extern size_t __gmpq_out_str(FILE * arg0, int arg1, mpq_srcptr arg2) [free function]"], libgmp_lib ) )
gmpn_hamdist_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long )
gmpn_hamdist = gmpn_hamdist_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpn_hamdist(mp_srcptr arg0, mp_srcptr arg1, mp_size_t arg2) [free function]"], libgmp_lib ) )
gmpz_submul_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_submul = gmpz_submul_type( ( libgmp_lib.undecorated_names["extern void __gmpz_submul(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmp_randseed_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __gmp_randstate_struct ), ctypes.POINTER( __mpz_struct ) )
gmp_randseed = gmp_randseed_type( ( libgmp_lib.undecorated_names["extern void __gmp_randseed(__gmp_randstate_struct * arg0, mpz_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_divisible_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_divisible_p = gmpz_divisible_p_type( ( libgmp_lib.undecorated_names["extern int __gmpz_divisible_p(mpz_srcptr arg0, mpz_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_get_ui_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpz_struct ) )
gmpz_get_ui = gmpz_get_ui_type( ( libgmp_lib.undecorated_names["long unsigned int __gmpz_get_ui(mpz_srcptr __gmp_z) [free function]"], libgmp_lib ) )
gmpz_getlimbn_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpz_struct ), ctypes.c_long )
gmpz_getlimbn = gmpz_getlimbn_type( ( libgmp_lib.undecorated_names["mp_limb_t __gmpz_getlimbn(mpz_srcptr __gmp_z, mp_size_t __gmp_n) [free function]"], libgmp_lib ) )
gmp_randinit_lc_2exp_size_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __gmp_randstate_struct ), ctypes.c_ulong )
gmp_randinit_lc_2exp_size = gmp_randinit_lc_2exp_size_type( ( libgmp_lib.undecorated_names["extern int __gmp_randinit_lc_2exp_size(__gmp_randstate_struct * arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpz_inp_raw_type = ctypes.CFUNCTYPE( ctypes.c_uint, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( _IO_FILE ) )
gmpz_inp_raw = gmpz_inp_raw_type( ( libgmp_lib.undecorated_names["extern size_t __gmpz_inp_raw(mpz_ptr arg0, FILE * arg1) [free function]"], libgmp_lib ) )
gmp_version = ctypes.c_char_p.in_dll( libgmp_lib, libgmp_lib.undecorated_names["__gmp_version [variable]"] )
gmpz_tdiv_qr_ui_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_tdiv_qr_ui = gmpz_tdiv_qr_ui_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpz_tdiv_qr_ui(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, long unsigned int arg3) [free function]"], libgmp_lib ) )
gmpz_tdiv_ui_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_tdiv_ui = gmpz_tdiv_ui_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpz_tdiv_ui(mpz_srcptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpq_set_num_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpz_struct ) )
gmpq_set_num = gmpq_set_num_type( ( libgmp_lib.undecorated_names["extern void __gmpq_set_num(mpq_ptr arg0, mpz_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_mul_2exp_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_mul_2exp = gmpz_mul_2exp_type( ( libgmp_lib.undecorated_names["extern void __gmpz_mul_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpf_mul_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ) )
gmpf_mul = gmpf_mul_type( ( libgmp_lib.undecorated_names["extern void __gmpf_mul(mpf_ptr arg0, mpf_srcptr arg1, mpf_srcptr arg2) [free function]"], libgmp_lib ) )
gmpz_submul_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_submul_ui = gmpz_submul_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpz_submul_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpf_div_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ), ctypes.c_ulong )
gmpf_div_ui = gmpf_div_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpf_div_ui(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpf_size_type = ctypes.CFUNCTYPE( ctypes.c_uint, ctypes.POINTER( __mpf_struct ) )
gmpf_size = gmpf_size_type( ( libgmp_lib.undecorated_names["extern size_t __gmpf_size(mpf_srcptr arg0) [free function]"], libgmp_lib ) )
gmpn_mul_n_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long )
gmpn_mul_n = gmpn_mul_n_type( ( libgmp_lib.undecorated_names["extern void __gmpn_mul_n(mp_ptr arg0, mp_srcptr arg1, mp_srcptr arg2, mp_size_t arg3) [free function]"], libgmp_lib ) )
gmpz_addmul_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_addmul = gmpz_addmul_type( ( libgmp_lib.undecorated_names["extern void __gmpz_addmul(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmp_set_memory_functions_type = ctypes.CFUNCTYPE( None, ctypes.CFUNCTYPE( ctypes.c_void_p, ctypes.c_uint ), ctypes.CFUNCTYPE( ctypes.c_void_p, ctypes.c_void_p, ctypes.c_uint, ctypes.c_uint ), ctypes.CFUNCTYPE( None, ctypes.c_void_p, ctypes.c_uint ) )
gmp_set_memory_functions = gmp_set_memory_functions_type( ( libgmp_lib.undecorated_names["extern void __gmp_set_memory_functions(void * (*)( ::size_t ) * arg0, void * (*)( void *,::size_t,::size_t ) * arg1, void (*)( void *,::size_t ) * arg2) [free function]"], libgmp_lib ) )
gmpz_sqrt_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_sqrt = gmpz_sqrt_type( ( libgmp_lib.undecorated_names["extern void __gmpz_sqrt(mpz_ptr arg0, mpz_srcptr arg1) [free function]"], libgmp_lib ) )
gmpf_sqrt_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.c_ulong )
gmpf_sqrt_ui = gmpf_sqrt_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpf_sqrt_ui(mpf_ptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpn_sub_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long )
gmpn_sub = gmpn_sub_type( ( libgmp_lib.undecorated_names["mp_limb_t __gmpn_sub(mp_ptr __gmp_wp, mp_srcptr __gmp_xp, mp_size_t __gmp_xsize, mp_srcptr __gmp_yp, mp_size_t __gmp_ysize) [free function]"], libgmp_lib ) )
gmpz_mod_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_mod = gmpz_mod_type( ( libgmp_lib.undecorated_names["extern void __gmpz_mod(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpq_get_d_type = ctypes.CFUNCTYPE( ctypes.c_double, ctypes.POINTER( __mpq_struct ) )
gmpq_get_d = gmpq_get_d_type( ( libgmp_lib.undecorated_names["extern double __gmpq_get_d(mpq_srcptr arg0) [free function]"], libgmp_lib ) )
gmpz_set_str_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.c_char_p, ctypes.c_int )
gmpz_set_str = gmpz_set_str_type( ( libgmp_lib.undecorated_names["extern int __gmpz_set_str(mpz_ptr arg0, char const * arg1, int arg2) [free function]"], libgmp_lib ) )
gmpq_inv_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpq_struct ) )
gmpq_inv = gmpq_inv_type( ( libgmp_lib.undecorated_names["extern void __gmpq_inv(mpq_ptr arg0, mpq_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_rootrem_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_rootrem = gmpz_rootrem_type( ( libgmp_lib.undecorated_names["extern void __gmpz_rootrem(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, long unsigned int arg3) [free function]"], libgmp_lib ) )
gmpf_ceil_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ) )
gmpf_ceil = gmpf_ceil_type( ( libgmp_lib.undecorated_names["extern void __gmpf_ceil(mpf_ptr arg0, mpf_srcptr arg1) [free function]"], libgmp_lib ) )
gmpf_fits_sshort_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpf_struct ) )
gmpf_fits_sshort_p = gmpf_fits_sshort_p_type( ( libgmp_lib.undecorated_names["extern int __gmpf_fits_sshort_p(mpf_srcptr arg0) [free function]"], libgmp_lib ) )
gmpz_fits_ushort_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ) )
gmpz_fits_ushort_p = gmpz_fits_ushort_p_type( ( libgmp_lib.undecorated_names["int __gmpz_fits_ushort_p(mpz_srcptr __gmp_z) [free function]"], libgmp_lib ) )
gmpz_sub_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_sub = gmpz_sub_type( ( libgmp_lib.undecorated_names["extern void __gmpz_sub(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpz_cdiv_r_2exp_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_cdiv_r_2exp = gmpz_cdiv_r_2exp_type( ( libgmp_lib.undecorated_names["extern void __gmpz_cdiv_r_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmp_randinit_default_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __gmp_randstate_struct ) )
gmp_randinit_default = gmp_randinit_default_type( ( libgmp_lib.undecorated_names["extern void __gmp_randinit_default(__gmp_randstate_struct * arg0) [free function]"], libgmp_lib ) )
gmpf_integer_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpf_struct ) )
gmpf_integer_p = gmpf_integer_p_type( ( libgmp_lib.undecorated_names["extern int __gmpf_integer_p(mpf_srcptr arg0) [free function]"], libgmp_lib ) )
gmpz_congruent_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_congruent_p = gmpz_congruent_p_type( ( libgmp_lib.undecorated_names["extern int __gmpz_congruent_p(mpz_srcptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpf_ui_div_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.c_ulong, ctypes.POINTER( __mpf_struct ) )
gmpf_ui_div = gmpf_ui_div_type( ( libgmp_lib.undecorated_names["extern void __gmpf_ui_div(mpf_ptr arg0, long unsigned int arg1, mpf_srcptr arg2) [free function]"], libgmp_lib ) )
gmpz_mul_si_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_long )
gmpz_mul_si = gmpz_mul_si_type( ( libgmp_lib.undecorated_names["extern void __gmpz_mul_si(mpz_ptr arg0, mpz_srcptr arg1, long int arg2) [free function]"], libgmp_lib ) )
gmpq_div_2exp_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpq_struct ), ctypes.c_ulong )
gmpq_div_2exp = gmpq_div_2exp_type( ( libgmp_lib.undecorated_names["extern void __gmpq_div_2exp(mpq_ptr arg0, mpq_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpq_neg_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpq_struct ) )
gmpq_neg = gmpq_neg_type( ( libgmp_lib.undecorated_names["void __gmpq_neg(mpq_ptr __gmp_w, mpq_srcptr __gmp_u) [free function]"], libgmp_lib ) )
gmpz_import_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_uint, ctypes.c_int, ctypes.c_uint, ctypes.c_int, ctypes.c_uint, ctypes.c_void_p )
gmpz_import = gmpz_import_type( ( libgmp_lib.undecorated_names["extern void __gmpz_import(mpz_ptr arg0, size_t arg1, int arg2, size_t arg3, int arg4, size_t arg5, void const * arg6) [free function]"], libgmp_lib ) )
gmpz_set_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_set = gmpz_set_type( ( libgmp_lib.undecorated_names["extern void __gmpz_set(mpz_ptr arg0, mpz_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_mul_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_mul_ui = gmpz_mul_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpz_mul_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpq_get_num_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpq_struct ) )
gmpq_get_num = gmpq_get_num_type( ( libgmp_lib.undecorated_names["extern void __gmpq_get_num(mpz_ptr arg0, mpq_srcptr arg1) [free function]"], libgmp_lib ) )
gmpq_set_f_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpf_struct ) )
gmpq_set_f = gmpq_set_f_type( ( libgmp_lib.undecorated_names["extern void __gmpq_set_f(mpq_ptr arg0, mpf_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_random_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_long )
gmpz_random = gmpz_random_type( ( libgmp_lib.undecorated_names["extern void __gmpz_random(mpz_ptr arg0, mp_size_t arg1) [free function]"], libgmp_lib ) )
gmpz_sub_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_sub_ui = gmpz_sub_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpz_sub_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpz_fdiv_qr_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_fdiv_qr = gmpz_fdiv_qr_type( ( libgmp_lib.undecorated_names["extern void __gmpz_fdiv_qr(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, mpz_srcptr arg3) [free function]"], libgmp_lib ) )
gmpz_rrandomb_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __gmp_randstate_struct ), ctypes.c_ulong )
gmpz_rrandomb = gmpz_rrandomb_type( ( libgmp_lib.undecorated_names["extern void __gmpz_rrandomb(mpz_ptr arg0, __gmp_randstate_struct * arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpn_set_str_type = ctypes.CFUNCTYPE( ctypes.c_long, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ubyte ), ctypes.c_uint, ctypes.c_int )
gmpn_set_str = gmpn_set_str_type( ( libgmp_lib.undecorated_names["extern mp_size_t __gmpn_set_str(mp_ptr arg0, unsigned char const * arg1, size_t arg2, int arg3) [free function]"], libgmp_lib ) )
gmpz_root_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_root = gmpz_root_type( ( libgmp_lib.undecorated_names["extern int __gmpz_root(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpz_fdiv_q_ui_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_fdiv_q_ui = gmpz_fdiv_q_ui_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpz_fdiv_q_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpz_fdiv_ui_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_fdiv_ui = gmpz_fdiv_ui_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpz_fdiv_ui(mpz_srcptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpz_cmpabs_ui_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_cmpabs_ui = gmpz_cmpabs_ui_type( ( libgmp_lib.undecorated_names["extern int __gmpz_cmpabs_ui(mpz_srcptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpz_cmpabs_d_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.c_double )
gmpz_cmpabs_d = gmpz_cmpabs_d_type( ( libgmp_lib.undecorated_names["extern int __gmpz_cmpabs_d(mpz_srcptr arg0, double arg1) [free function]"], libgmp_lib ) )
gmpz_cmp_si_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.c_long )
gmpz_cmp_si = gmpz_cmp_si_type( ( libgmp_lib.undecorated_names["extern int __gmpz_cmp_si(mpz_srcptr arg0, long int arg1) [free function]"], libgmp_lib ) )
gmpz_cdiv_qr_ui_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_cdiv_qr_ui = gmpz_cdiv_qr_ui_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpz_cdiv_qr_ui(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, long unsigned int arg3) [free function]"], libgmp_lib ) )
gmpz_cmp_ui_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_cmp_ui = gmpz_cmp_ui_type( ( libgmp_lib.undecorated_names["extern int __gmpz_cmp_ui(mpz_srcptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpz_clear_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ) )
gmpz_clear = gmpz_clear_type( ( libgmp_lib.undecorated_names["extern void __gmpz_clear(mpz_ptr arg0) [free function]"], libgmp_lib ) )
gmpq_set_z_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpz_struct ) )
gmpq_set_z = gmpq_set_z_type( ( libgmp_lib.undecorated_names["extern void __gmpq_set_z(mpq_ptr arg0, mpz_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_bin_uiui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong, ctypes.c_ulong )
gmpz_bin_uiui = gmpz_bin_uiui_type( ( libgmp_lib.undecorated_names["extern void __gmpz_bin_uiui(mpz_ptr arg0, long unsigned int arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpz_abs_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_abs = gmpz_abs_type( ( libgmp_lib.undecorated_names["void __gmpz_abs(mpz_ptr __gmp_w, mpz_srcptr __gmp_u) [free function]"], libgmp_lib ) )
gmpz_probab_prime_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.c_int )
gmpz_probab_prime_p = gmpz_probab_prime_p_type( ( libgmp_lib.undecorated_names["extern int __gmpz_probab_prime_p(mpz_srcptr arg0, int arg1) [free function]"], libgmp_lib ) )
gmpz_tdiv_q_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_tdiv_q = gmpz_tdiv_q_type( ( libgmp_lib.undecorated_names["extern void __gmpz_tdiv_q(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpz_tdiv_r_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_tdiv_r = gmpz_tdiv_r_type( ( libgmp_lib.undecorated_names["extern void __gmpz_tdiv_r(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpz_add_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_add = gmpz_add_type( ( libgmp_lib.undecorated_names["extern void __gmpz_add(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpf_set_str_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpf_struct ), ctypes.c_char_p, ctypes.c_int )
gmpf_set_str = gmpf_set_str_type( ( libgmp_lib.undecorated_names["extern int __gmpf_set_str(mpf_ptr arg0, char const * arg1, int arg2) [free function]"], libgmp_lib ) )
gmpf_set_q_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpq_struct ) )
gmpf_set_q = gmpf_set_q_type( ( libgmp_lib.undecorated_names["extern void __gmpf_set_q(mpf_ptr arg0, mpq_srcptr arg1) [free function]"], libgmp_lib ) )
gmpq_set_d_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ), ctypes.c_double )
gmpq_set_d = gmpq_set_d_type( ( libgmp_lib.undecorated_names["extern void __gmpq_set_d(mpq_ptr arg0, double arg1) [free function]"], libgmp_lib ) )
gmpn_neg_n_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long )
gmpn_neg_n = gmpn_neg_n_type( ( libgmp_lib.undecorated_names["mp_limb_t __gmpn_neg_n(mp_ptr __gmp_rp, mp_srcptr __gmp_up, mp_size_t __gmp_n) [free function]"], libgmp_lib ) )
gmpn_random2_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long )
gmpn_random2 = gmpn_random2_type( ( libgmp_lib.undecorated_names["extern void __gmpn_random2(mp_ptr arg0, mp_size_t arg1) [free function]"], libgmp_lib ) )
gmpz_lcm_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_lcm = gmpz_lcm_type( ( libgmp_lib.undecorated_names["extern void __gmpz_lcm(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpn_rshift_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.c_uint )
gmpn_rshift = gmpn_rshift_type( ( libgmp_lib.undecorated_names["extern mp_limb_t __gmpn_rshift(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, unsigned int arg3) [free function]"], libgmp_lib ) )
gmpf_set_z_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpz_struct ) )
gmpf_set_z = gmpf_set_z_type( ( libgmp_lib.undecorated_names["extern void __gmpf_set_z(mpf_ptr arg0, mpz_srcptr arg1) [free function]"], libgmp_lib ) )
gmpq_set_den_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpz_struct ) )
gmpq_set_den = gmpq_set_den_type( ( libgmp_lib.undecorated_names["extern void __gmpq_set_den(mpq_ptr arg0, mpz_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_get_str_type = ctypes.CFUNCTYPE( ctypes.c_char_p, ctypes.c_char_p, ctypes.c_int, ctypes.POINTER( __mpz_struct ) )
gmpz_get_str = gmpz_get_str_type( ( libgmp_lib.undecorated_names["extern char * __gmpz_get_str(char * arg0, int arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpf_init_set_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ) )
gmpf_init_set = gmpf_init_set_type( ( libgmp_lib.undecorated_names["extern void __gmpf_init_set(mpf_ptr arg0, mpf_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_ui_kronecker_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.c_ulong, ctypes.POINTER( __mpz_struct ) )
gmpz_ui_kronecker = gmpz_ui_kronecker_type( ( libgmp_lib.undecorated_names["extern int __gmpz_ui_kronecker(long unsigned int arg0, mpz_srcptr arg1) [free function]"], libgmp_lib ) )
gmpf_neg_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ) )
gmpf_neg = gmpf_neg_type( ( libgmp_lib.undecorated_names["extern void __gmpf_neg(mpf_ptr arg0, mpf_srcptr arg1) [free function]"], libgmp_lib ) )
gmpf_get_si_type = ctypes.CFUNCTYPE( ctypes.c_long, ctypes.POINTER( __mpf_struct ) )
gmpf_get_si = gmpf_get_si_type( ( libgmp_lib.undecorated_names["extern long int __gmpf_get_si(mpf_srcptr arg0) [free function]"], libgmp_lib ) )
gmp_randinit_lc_2exp_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __gmp_randstate_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong, ctypes.c_ulong )
gmp_randinit_lc_2exp = gmp_randinit_lc_2exp_type( ( libgmp_lib.undecorated_names["extern void __gmp_randinit_lc_2exp(__gmp_randstate_struct * arg0, mpz_srcptr arg1, long unsigned int arg2, long unsigned int arg3) [free function]"], libgmp_lib ) )
gmpz_and_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_and = gmpz_and_type( ( libgmp_lib.undecorated_names["extern void __gmpz_and(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpf_get_ui_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpf_struct ) )
gmpf_get_ui = gmpf_get_ui_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpf_get_ui(mpf_srcptr arg0) [free function]"], libgmp_lib ) )
gmpz_divisible_ui_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_divisible_ui_p = gmpz_divisible_ui_p_type( ( libgmp_lib.undecorated_names["extern int __gmpz_divisible_ui_p(mpz_srcptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpz_fib2_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_fib2_ui = gmpz_fib2_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpz_fib2_ui(mpz_ptr arg0, mpz_ptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpf_cmp_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ) )
gmpf_cmp = gmpf_cmp_type( ( libgmp_lib.undecorated_names["extern int __gmpf_cmp(mpf_srcptr arg0, mpf_srcptr arg1) [free function]"], libgmp_lib ) )
gmpq_cmp_si_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpq_struct ), ctypes.c_long, ctypes.c_ulong )
gmpq_cmp_si = gmpq_cmp_si_type( ( libgmp_lib.undecorated_names["extern int __gmpq_cmp_si(mpq_srcptr arg0, long int arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpz_divexact_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_divexact = gmpz_divexact_type( ( libgmp_lib.undecorated_names["extern void __gmpz_divexact(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpz_fib_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_fib_ui = gmpz_fib_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpz_fib_ui(mpz_ptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpn_get_str_type = ctypes.CFUNCTYPE( ctypes.c_uint, ctypes.POINTER( ctypes.c_ubyte ), ctypes.c_int, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long )
gmpn_get_str = gmpn_get_str_type( ( libgmp_lib.undecorated_names["extern size_t __gmpn_get_str(unsigned char * arg0, int arg1, mp_ptr arg2, mp_size_t arg3) [free function]"], libgmp_lib ) )
gmpz_cdiv_r_ui_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_cdiv_r_ui = gmpz_cdiv_r_ui_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpz_cdiv_r_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpf_ui_sub_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.c_ulong, ctypes.POINTER( __mpf_struct ) )
gmpf_ui_sub = gmpf_ui_sub_type( ( libgmp_lib.undecorated_names["extern void __gmpf_ui_sub(mpf_ptr arg0, long unsigned int arg1, mpf_srcptr arg2) [free function]"], libgmp_lib ) )
gmpz_random2_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_long )
gmpz_random2 = gmpz_random2_type( ( libgmp_lib.undecorated_names["extern void __gmpz_random2(mpz_ptr arg0, mp_size_t arg1) [free function]"], libgmp_lib ) )
gmpn_add_1_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.c_ulong )
gmpn_add_1 = gmpn_add_1_type( ( libgmp_lib.undecorated_names["mp_limb_t __gmpn_add_1(mp_ptr __gmp_dst, mp_srcptr __gmp_src, mp_size_t __gmp_size, mp_limb_t __gmp_n) [free function]"], libgmp_lib ) )
gmp_randinit_set_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __gmp_randstate_struct ), ctypes.POINTER( __gmp_randstate_struct ) )
gmp_randinit_set = gmp_randinit_set_type( ( libgmp_lib.undecorated_names["extern void __gmp_randinit_set(__gmp_randstate_struct * arg0, __gmp_randstate_struct const * arg1) [free function]"], libgmp_lib ) )
gmpn_add_n_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long )
gmpn_add_n = gmpn_add_n_type( ( libgmp_lib.undecorated_names["extern mp_limb_t __gmpn_add_n(mp_ptr arg0, mp_srcptr arg1, mp_srcptr arg2, mp_size_t arg3) [free function]"], libgmp_lib ) )
gmp_randinit_mt_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __gmp_randstate_struct ) )
gmp_randinit_mt = gmp_randinit_mt_type( ( libgmp_lib.undecorated_names["extern void __gmp_randinit_mt(__gmp_randstate_struct * arg0) [free function]"], libgmp_lib ) )
gmpf_get_str_type = ctypes.CFUNCTYPE( ctypes.c_char_p, ctypes.c_char_p, ctypes.POINTER( ctypes.c_long ), ctypes.c_int, ctypes.c_uint, ctypes.POINTER( __mpf_struct ) )
gmpf_get_str = gmpf_get_str_type( ( libgmp_lib.undecorated_names["extern char * __gmpf_get_str(char * arg0, mp_exp_t * arg1, int arg2, size_t arg3, mpf_srcptr arg4) [free function]"], libgmp_lib ) )
gmp_get_memory_functions_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( ctypes.CFUNCTYPE( ctypes.c_void_p, ctypes.c_uint ) ), ctypes.POINTER( ctypes.CFUNCTYPE( ctypes.c_void_p, ctypes.c_void_p, ctypes.c_uint, ctypes.c_uint ) ), ctypes.POINTER( ctypes.CFUNCTYPE( None, ctypes.c_void_p, ctypes.c_uint ) ) )
gmp_get_memory_functions = gmp_get_memory_functions_type( ( libgmp_lib.undecorated_names["extern void __gmp_get_memory_functions(void * (*)( ::size_t ) * * arg0, void * (*)( void *,::size_t,::size_t ) * * arg1, void (*)( void *,::size_t ) * * arg2) [free function]"], libgmp_lib ) )
gmpz_out_raw_type = ctypes.CFUNCTYPE( ctypes.c_uint, ctypes.POINTER( _IO_FILE ), ctypes.POINTER( __mpz_struct ) )
gmpz_out_raw = gmpz_out_raw_type( ( libgmp_lib.undecorated_names["extern size_t __gmpz_out_raw(FILE * arg0, mpz_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_congruent_ui_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong, ctypes.c_ulong )
gmpz_congruent_ui_p = gmpz_congruent_ui_p_type( ( libgmp_lib.undecorated_names["extern int __gmpz_congruent_ui_p(mpz_srcptr arg0, long unsigned int arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpz_congruent_2exp_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_congruent_2exp_p = gmpz_congruent_2exp_p_type( ( libgmp_lib.undecorated_names["extern int __gmpz_congruent_2exp_p(mpz_srcptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpq_get_den_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpq_struct ) )
gmpq_get_den = gmpq_get_den_type( ( libgmp_lib.undecorated_names["extern void __gmpq_get_den(mpz_ptr arg0, mpq_srcptr arg1) [free function]"], libgmp_lib ) )
gmpn_random_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long )
gmpn_random = gmpn_random_type( ( libgmp_lib.undecorated_names["extern void __gmpn_random(mp_ptr arg0, mp_size_t arg1) [free function]"], libgmp_lib ) )
gmp_randclear_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __gmp_randstate_struct ) )
gmp_randclear = gmp_randclear_type( ( libgmp_lib.undecorated_names["extern void __gmp_randclear(__gmp_randstate_struct * arg0) [free function]"], libgmp_lib ) )
gmpn_pow_1_type = ctypes.CFUNCTYPE( ctypes.c_long, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ) )
gmpn_pow_1 = gmpn_pow_1_type( ( libgmp_lib.undecorated_names["extern mp_size_t __gmpn_pow_1(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, mp_limb_t arg3, mp_ptr arg4) [free function]"], libgmp_lib ) )
gmpz_inp_str_type = ctypes.CFUNCTYPE( ctypes.c_uint, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( _IO_FILE ), ctypes.c_int )
gmpz_inp_str = gmpz_inp_str_type( ( libgmp_lib.undecorated_names["extern size_t __gmpz_inp_str(mpz_ptr arg0, FILE * arg1, int arg2) [free function]"], libgmp_lib ) )
gmpf_get_default_prec_type = ctypes.CFUNCTYPE( ctypes.c_ulong )
gmpf_get_default_prec = gmpf_get_default_prec_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpf_get_default_prec() [free function]"], libgmp_lib ) )
gmpn_mod_1_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.c_ulong )
gmpn_mod_1 = gmpn_mod_1_type( ( libgmp_lib.undecorated_names["extern mp_limb_t __gmpn_mod_1(mp_srcptr arg0, mp_size_t arg1, mp_limb_t arg2) [free function]"], libgmp_lib ) )
gmpz_perfect_square_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ) )
gmpz_perfect_square_p = gmpz_perfect_square_p_type( ( libgmp_lib.undecorated_names["int __gmpz_perfect_square_p(mpz_srcptr __gmp_a) [free function]"], libgmp_lib ) )
gmpz_add_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_add_ui = gmpz_add_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpz_add_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpf_urandomb_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __gmp_randstate_struct ), ctypes.c_ulong )
gmpf_urandomb = gmpf_urandomb_type( ( libgmp_lib.undecorated_names["extern void __gmpf_urandomb(__mpf_struct * arg0, __gmp_randstate_struct * arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmp_bits_per_limb = ctypes.c_int.in_dll( libgmp_lib, libgmp_lib.undecorated_names["__gmp_bits_per_limb [variable]"] )
gmpf_set_prec_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.c_ulong )
gmpf_set_prec = gmpf_set_prec_type( ( libgmp_lib.undecorated_names["extern void __gmpf_set_prec(mpf_ptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpq_set_si_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ), ctypes.c_long, ctypes.c_ulong )
gmpq_set_si = gmpq_set_si_type( ( libgmp_lib.undecorated_names["extern void __gmpq_set_si(mpq_ptr arg0, long int arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpq_mul_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpq_struct ) )
gmpq_mul = gmpq_mul_type( ( libgmp_lib.undecorated_names["extern void __gmpq_mul(mpq_ptr arg0, mpq_srcptr arg1, mpq_srcptr arg2) [free function]"], libgmp_lib ) )
gmpf_clear_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ) )
gmpf_clear = gmpf_clear_type( ( libgmp_lib.undecorated_names["extern void __gmpf_clear(mpf_ptr arg0) [free function]"], libgmp_lib ) )
gmpq_set_str_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpq_struct ), ctypes.c_char_p, ctypes.c_int )
gmpq_set_str = gmpq_set_str_type( ( libgmp_lib.undecorated_names["extern int __gmpq_set_str(mpq_ptr arg0, char const * arg1, int arg2) [free function]"], libgmp_lib ) )
gmpq_set_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ), ctypes.c_ulong, ctypes.c_ulong )
gmpq_set_ui = gmpq_set_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpq_set_ui(mpq_ptr arg0, long unsigned int arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpf_set_si_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.c_long )
gmpf_set_si = gmpf_set_si_type( ( libgmp_lib.undecorated_names["extern void __gmpf_set_si(mpf_ptr arg0, long int arg1) [free function]"], libgmp_lib ) )
gmpf_init_set_d_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.c_double )
gmpf_init_set_d = gmpf_init_set_d_type( ( libgmp_lib.undecorated_names["extern void __gmpf_init_set_d(mpf_ptr arg0, double arg1) [free function]"], libgmp_lib ) )
gmpz_jacobi_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_jacobi = gmpz_jacobi_type( ( libgmp_lib.undecorated_names["extern int __gmpz_jacobi(mpz_srcptr arg0, mpz_srcptr arg1) [free function]"], libgmp_lib ) )
gmpf_set_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.c_ulong )
gmpf_set_ui = gmpf_set_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpf_set_ui(mpf_ptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpf_get_d_2exp_type = ctypes.CFUNCTYPE( ctypes.c_double, ctypes.POINTER( ctypes.c_long ), ctypes.POINTER( __mpf_struct ) )
gmpf_get_d_2exp = gmpf_get_d_2exp_type( ( libgmp_lib.undecorated_names["extern double __gmpf_get_d_2exp(long int * arg0, mpf_srcptr arg1) [free function]"], libgmp_lib ) )
gmpf_init_set_si_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.c_long )
gmpf_init_set_si = gmpf_init_set_si_type( ( libgmp_lib.undecorated_names["extern void __gmpf_init_set_si(mpf_ptr arg0, long int arg1) [free function]"], libgmp_lib ) )
gmpf_random2_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.c_long, ctypes.c_long )
gmpf_random2 = gmpf_random2_type( ( libgmp_lib.undecorated_names["extern void __gmpf_random2(mpf_ptr arg0, mp_size_t arg1, mp_exp_t arg2) [free function]"], libgmp_lib ) )
gmpf_init_set_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.c_ulong )
gmpf_init_set_ui = gmpf_init_set_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpf_init_set_ui(mpf_ptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpq_swap_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpq_struct ) )
gmpq_swap = gmpq_swap_type( ( libgmp_lib.undecorated_names["extern void __gmpq_swap(mpq_ptr arg0, mpq_ptr arg1) [free function]"], libgmp_lib ) )
gmpq_clear_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ) )
gmpq_clear = gmpq_clear_type( ( libgmp_lib.undecorated_names["extern void __gmpq_clear(mpq_ptr arg0) [free function]"], libgmp_lib ) )
gmpn_mul_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long )
gmpn_mul = gmpn_mul_type( ( libgmp_lib.undecorated_names["extern mp_limb_t __gmpn_mul(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, mp_srcptr arg3, mp_size_t arg4) [free function]"], libgmp_lib ) )
gmpz_ior_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_ior = gmpz_ior_type( ( libgmp_lib.undecorated_names["extern void __gmpz_ior(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpq_cmp_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpq_struct ) )
gmpq_cmp = gmpq_cmp_type( ( libgmp_lib.undecorated_names["extern int __gmpq_cmp(mpq_srcptr arg0, mpq_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_cdiv_r_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_cdiv_r = gmpz_cdiv_r_type( ( libgmp_lib.undecorated_names["extern void __gmpz_cdiv_r(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpn_divrem_1_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.c_ulong )
gmpn_divrem_1 = gmpn_divrem_1_type( ( libgmp_lib.undecorated_names["extern mp_limb_t __gmpn_divrem_1(mp_ptr arg0, mp_size_t arg1, mp_srcptr arg2, mp_size_t arg3, mp_limb_t arg4) [free function]"], libgmp_lib ) )
gmpf_pow_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ), ctypes.c_ulong )
gmpf_pow_ui = gmpf_pow_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpf_pow_ui(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpz_sizeinbase_type = ctypes.CFUNCTYPE( ctypes.c_uint, ctypes.POINTER( __mpz_struct ), ctypes.c_int )
gmpz_sizeinbase = gmpz_sizeinbase_type( ( libgmp_lib.undecorated_names["extern size_t __gmpz_sizeinbase(mpz_srcptr arg0, int arg1) [free function]"], libgmp_lib ) )
gmpn_submul_1_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.c_ulong )
gmpn_submul_1 = gmpn_submul_1_type( ( libgmp_lib.undecorated_names["extern mp_limb_t __gmpn_submul_1(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, mp_limb_t arg3) [free function]"], libgmp_lib ) )
gmpz_invert_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_invert = gmpz_invert_type( ( libgmp_lib.undecorated_names["extern int __gmpz_invert(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpf_inp_str_type = ctypes.CFUNCTYPE( ctypes.c_uint, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( _IO_FILE ), ctypes.c_int )
gmpf_inp_str = gmpf_inp_str_type( ( libgmp_lib.undecorated_names["extern size_t __gmpf_inp_str(mpf_ptr arg0, FILE * arg1, int arg2) [free function]"], libgmp_lib ) )
gmpz_init_set_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_init_set_ui = gmpz_init_set_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpz_init_set_ui(mpz_ptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpz_powm_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_powm = gmpz_powm_type( ( libgmp_lib.undecorated_names["extern void __gmpz_powm(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2, mpz_srcptr arg3) [free function]"], libgmp_lib ) )
gmpf_set_prec_raw_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.c_ulong )
gmpf_set_prec_raw = gmpf_set_prec_raw_type( ( libgmp_lib.undecorated_names["extern void __gmpf_set_prec_raw(mpf_ptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpf_cmp_si_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpf_struct ), ctypes.c_long )
gmpf_cmp_si = gmpf_cmp_si_type( ( libgmp_lib.undecorated_names["extern int __gmpf_cmp_si(mpf_srcptr arg0, long int arg1) [free function]"], libgmp_lib ) )
gmpz_get_d_type = ctypes.CFUNCTYPE( ctypes.c_double, ctypes.POINTER( __mpz_struct ) )
gmpz_get_d = gmpz_get_d_type( ( libgmp_lib.undecorated_names["extern double __gmpz_get_d(mpz_srcptr arg0) [free function]"], libgmp_lib ) )
gmpz_cdiv_q_2exp_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_cdiv_q_2exp = gmpz_cdiv_q_2exp_type( ( libgmp_lib.undecorated_names["extern void __gmpz_cdiv_q_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpf_div_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ) )
gmpf_div = gmpf_div_type( ( libgmp_lib.undecorated_names["extern void __gmpf_div(mpf_ptr arg0, mpf_srcptr arg1, mpf_srcptr arg2) [free function]"], libgmp_lib ) )
gmp_urandomm_ui_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __gmp_randstate_struct ), ctypes.c_ulong )
gmp_urandomm_ui = gmp_urandomm_ui_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmp_urandomm_ui(__gmp_randstate_struct * arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpn_gcd_1_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.c_ulong )
gmpn_gcd_1 = gmpn_gcd_1_type( ( libgmp_lib.undecorated_names["extern mp_limb_t __gmpn_gcd_1(mp_srcptr arg0, mp_size_t arg1, mp_limb_t arg2) [free function]"], libgmp_lib ) )
gmpn_sub_n_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long )
gmpn_sub_n = gmpn_sub_n_type( ( libgmp_lib.undecorated_names["extern mp_limb_t __gmpn_sub_n(mp_ptr arg0, mp_srcptr arg1, mp_srcptr arg2, mp_size_t arg3) [free function]"], libgmp_lib ) )
gmpz_setbit_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_setbit = gmpz_setbit_type( ( libgmp_lib.undecorated_names["extern void __gmpz_setbit(mpz_ptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpz_lucnum2_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_lucnum2_ui = gmpz_lucnum2_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpz_lucnum2_ui(mpz_ptr arg0, mpz_ptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpz_remove_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_remove = gmpz_remove_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpz_remove(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpq_mul_2exp_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpq_struct ), ctypes.c_ulong )
gmpq_mul_2exp = gmpq_mul_2exp_type( ( libgmp_lib.undecorated_names["extern void __gmpq_mul_2exp(mpq_ptr arg0, mpq_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpz_gcd_ui_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_gcd_ui = gmpz_gcd_ui_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpz_gcd_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpn_popcount_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long )
gmpn_popcount = gmpn_popcount_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpn_popcount(mp_srcptr arg0, mp_size_t arg1) [free function]"], libgmp_lib ) )
gmpz_init_set_str_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.c_char_p, ctypes.c_int )
gmpz_init_set_str = gmpz_init_set_str_type( ( libgmp_lib.undecorated_names["extern int __gmpz_init_set_str(mpz_ptr arg0, char const * arg1, int arg2) [free function]"], libgmp_lib ) )
gmpz_cmp_d_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.c_double )
gmpz_cmp_d = gmpz_cmp_d_type( ( libgmp_lib.undecorated_names["extern int __gmpz_cmp_d(mpz_srcptr arg0, double arg1) [free function]"], libgmp_lib ) )
gmpz_xor_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_xor = gmpz_xor_type( ( libgmp_lib.undecorated_names["extern void __gmpz_xor(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpf_fits_sint_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpf_struct ) )
gmpf_fits_sint_p = gmpf_fits_sint_p_type( ( libgmp_lib.undecorated_names["extern int __gmpf_fits_sint_p(mpf_srcptr arg0) [free function]"], libgmp_lib ) )
gmpz_cdiv_q_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_cdiv_q = gmpz_cdiv_q_type( ( libgmp_lib.undecorated_names["extern void __gmpz_cdiv_q(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpz_sqrtrem_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_sqrtrem = gmpz_sqrtrem_type( ( libgmp_lib.undecorated_names["extern void __gmpz_sqrtrem(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpz_mul_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_mul = gmpz_mul_type( ( libgmp_lib.undecorated_names["extern void __gmpz_mul(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpn_tdiv_qr_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long )
gmpn_tdiv_qr = gmpn_tdiv_qr_type( ( libgmp_lib.undecorated_names["extern void __gmpn_tdiv_qr(mp_ptr arg0, mp_ptr arg1, mp_size_t arg2, mp_srcptr arg3, mp_size_t arg4, mp_srcptr arg5, mp_size_t arg6) [free function]"], libgmp_lib ) )
gmpz_tdiv_qr_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_tdiv_qr = gmpz_tdiv_qr_type( ( libgmp_lib.undecorated_names["extern void __gmpz_tdiv_qr(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, mpz_srcptr arg3) [free function]"], libgmp_lib ) )
gmpn_perfect_square_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long )
gmpn_perfect_square_p = gmpn_perfect_square_p_type( ( libgmp_lib.undecorated_names["extern int __gmpn_perfect_square_p(mp_srcptr arg0, mp_size_t arg1) [free function]"], libgmp_lib ) )
gmpf_get_prec_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpf_struct ) )
gmpf_get_prec = gmpf_get_prec_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpf_get_prec(mpf_srcptr arg0) [free function]"], libgmp_lib ) )
gmpz_get_si_type = ctypes.CFUNCTYPE( ctypes.c_long, ctypes.POINTER( __mpz_struct ) )
gmpz_get_si = gmpz_get_si_type( ( libgmp_lib.undecorated_names["extern long int __gmpz_get_si(mpz_srcptr arg0) [free function]"], libgmp_lib ) )
gmpf_set_default_prec_type = ctypes.CFUNCTYPE( None, ctypes.c_ulong )
gmpf_set_default_prec = gmpf_set_default_prec_type( ( libgmp_lib.undecorated_names["extern void __gmpf_set_default_prec(long unsigned int arg0) [free function]"], libgmp_lib ) )
gmp_urandomb_ui_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __gmp_randstate_struct ), ctypes.c_ulong )
gmp_urandomb_ui = gmp_urandomb_ui_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmp_urandomb_ui(__gmp_randstate_struct * arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpf_init_set_str_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpf_struct ), ctypes.c_char_p, ctypes.c_int )
gmpf_init_set_str = gmpf_init_set_str_type( ( libgmp_lib.undecorated_names["extern int __gmpf_init_set_str(mpf_ptr arg0, char const * arg1, int arg2) [free function]"], libgmp_lib ) )
gmpz_tstbit_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_tstbit = gmpz_tstbit_type( ( libgmp_lib.undecorated_names["extern int __gmpz_tstbit(mpz_srcptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpz_fits_sint_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ) )
gmpz_fits_sint_p = gmpz_fits_sint_p_type( ( libgmp_lib.undecorated_names["extern int __gmpz_fits_sint_p(mpz_srcptr arg0) [free function]"], libgmp_lib ) )
gmpz_urandomb_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __gmp_randstate_struct ), ctypes.c_ulong )
gmpz_urandomb = gmpz_urandomb_type( ( libgmp_lib.undecorated_names["extern void __gmpz_urandomb(mpz_ptr arg0, __gmp_randstate_struct * arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpz_urandomm_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __gmp_randstate_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_urandomm = gmpz_urandomm_type( ( libgmp_lib.undecorated_names["extern void __gmpz_urandomm(mpz_ptr arg0, __gmp_randstate_struct * arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpf_set_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ) )
gmpf_set = gmpf_set_type( ( libgmp_lib.undecorated_names["extern void __gmpf_set(mpf_ptr arg0, mpf_srcptr arg1) [free function]"], libgmp_lib ) )
gmpf_reldiff_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ) )
gmpf_reldiff = gmpf_reldiff_type( ( libgmp_lib.undecorated_names["extern void __gmpf_reldiff(mpf_ptr arg0, mpf_srcptr arg1, mpf_srcptr arg2) [free function]"], libgmp_lib ) )
gmpz_gcd_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_gcd = gmpz_gcd_type( ( libgmp_lib.undecorated_names["extern void __gmpz_gcd(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpz_set_d_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_double )
gmpz_set_d = gmpz_set_d_type( ( libgmp_lib.undecorated_names["extern void __gmpz_set_d(mpz_ptr arg0, double arg1) [free function]"], libgmp_lib ) )
gmpz_set_f_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpf_struct ) )
gmpz_set_f = gmpz_set_f_type( ( libgmp_lib.undecorated_names["extern void __gmpz_set_f(mpz_ptr arg0, mpf_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_set_q_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpq_struct ) )
gmpz_set_q = gmpz_set_q_type( ( libgmp_lib.undecorated_names["void __gmpz_set_q(mpz_ptr __gmp_w, mpq_srcptr __gmp_u) [free function]"], libgmp_lib ) )
gmpn_addmul_1_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.c_ulong )
gmpn_addmul_1 = gmpn_addmul_1_type( ( libgmp_lib.undecorated_names["extern mp_limb_t __gmpn_addmul_1(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, mp_limb_t arg3) [free function]"], libgmp_lib ) )
gmpz_millerrabin_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.c_int )
gmpz_millerrabin = gmpz_millerrabin_type( ( libgmp_lib.undecorated_names["extern int __gmpz_millerrabin(mpz_srcptr arg0, int arg1) [free function]"], libgmp_lib ) )
gmpz_init2_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_init2 = gmpz_init2_type( ( libgmp_lib.undecorated_names["extern void __gmpz_init2(mpz_ptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpf_eq_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ), ctypes.c_ulong )
gmpf_eq = gmpf_eq_type( ( libgmp_lib.undecorated_names["extern int __gmpf_eq(mpf_srcptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpn_lshift_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.c_uint )
gmpn_lshift = gmpn_lshift_type( ( libgmp_lib.undecorated_names["extern mp_limb_t __gmpn_lshift(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, unsigned int arg3) [free function]"], libgmp_lib ) )
gmpn_divrem_2_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.POINTER( ctypes.c_ulong ) )
gmpn_divrem_2 = gmpn_divrem_2_type( ( libgmp_lib.undecorated_names["extern mp_limb_t __gmpn_divrem_2(mp_ptr arg0, mp_size_t arg1, mp_ptr arg2, mp_size_t arg3, mp_srcptr arg4) [free function]"], libgmp_lib ) )
gmpf_fits_ushort_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpf_struct ) )
gmpf_fits_ushort_p = gmpf_fits_ushort_p_type( ( libgmp_lib.undecorated_names["extern int __gmpf_fits_ushort_p(mpf_srcptr arg0) [free function]"], libgmp_lib ) )
gmpz_init_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ) )
gmpz_init = gmpz_init_type( ( libgmp_lib.undecorated_names["extern void __gmpz_init(mpz_ptr arg0) [free function]"], libgmp_lib ) )
gmpf_init_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ) )
gmpf_init = gmpf_init_type( ( libgmp_lib.undecorated_names["extern void __gmpf_init(mpf_ptr arg0) [free function]"], libgmp_lib ) )
gmpz_out_str_type = ctypes.CFUNCTYPE( ctypes.c_uint, ctypes.POINTER( _IO_FILE ), ctypes.c_int, ctypes.POINTER( __mpz_struct ) )
gmpz_out_str = gmpz_out_str_type( ( libgmp_lib.undecorated_names["extern size_t __gmpz_out_str(FILE * arg0, int arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpz_swap_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_swap = gmpz_swap_type( ( libgmp_lib.undecorated_names["extern void __gmpz_swap(mpz_ptr arg0, mpz_ptr arg1) [free function]"], libgmp_lib ) )
gmpf_div_2exp_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ), ctypes.c_ulong )
gmpf_div_2exp = gmpf_div_2exp_type( ( libgmp_lib.undecorated_names["extern void __gmpf_div_2exp(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpz_cmpabs_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_cmpabs = gmpz_cmpabs_type( ( libgmp_lib.undecorated_names["extern int __gmpz_cmpabs(mpz_srcptr arg0, mpz_srcptr arg1) [free function]"], libgmp_lib ) )
gmpq_inp_str_type = ctypes.CFUNCTYPE( ctypes.c_uint, ctypes.POINTER( __mpq_struct ), ctypes.POINTER( _IO_FILE ), ctypes.c_int )
gmpq_inp_str = gmpq_inp_str_type( ( libgmp_lib.undecorated_names["extern size_t __gmpq_inp_str(mpq_ptr arg0, FILE * arg1, int arg2) [free function]"], libgmp_lib ) )
gmpq_get_str_type = ctypes.CFUNCTYPE( ctypes.c_char_p, ctypes.c_char_p, ctypes.c_int, ctypes.POINTER( __mpq_struct ) )
gmpq_get_str = gmpq_get_str_type( ( libgmp_lib.undecorated_names["extern char * __gmpq_get_str(char * arg0, int arg1, mpq_srcptr arg2) [free function]"], libgmp_lib ) )
gmpf_abs_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ) )
gmpf_abs = gmpf_abs_type( ( libgmp_lib.undecorated_names["extern void __gmpf_abs(mpf_ptr arg0, mpf_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_fdiv_q_2exp_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_fdiv_q_2exp = gmpz_fdiv_q_2exp_type( ( libgmp_lib.undecorated_names["extern void __gmpz_fdiv_q_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpn_preinv_mod_1_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.c_ulong, ctypes.c_ulong )
gmpn_preinv_mod_1 = gmpn_preinv_mod_1_type( ( libgmp_lib.undecorated_names["extern mp_limb_t __gmpn_preinv_mod_1(mp_srcptr arg0, mp_size_t arg1, mp_limb_t arg2, mp_limb_t arg3) [free function]"], libgmp_lib ) )
gmpf_add_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ) )
gmpf_add = gmpf_add_type( ( libgmp_lib.undecorated_names["extern void __gmpf_add(mpf_ptr arg0, mpf_srcptr arg1, mpf_srcptr arg2) [free function]"], libgmp_lib ) )
gmpn_cmp_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long )
gmpn_cmp = gmpn_cmp_type( ( libgmp_lib.undecorated_names["int __gmpn_cmp(mp_srcptr __gmp_xp, mp_srcptr __gmp_yp, mp_size_t __gmp_size) [free function]"], libgmp_lib ) )
gmpz_bin_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_bin_ui = gmpz_bin_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpz_bin_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpf_sub_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ) )
gmpf_sub = gmpf_sub_type( ( libgmp_lib.undecorated_names["extern void __gmpf_sub(mpf_ptr arg0, mpf_srcptr arg1, mpf_srcptr arg2) [free function]"], libgmp_lib ) )
gmpz_gcdext_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_gcdext = gmpz_gcdext_type( ( libgmp_lib.undecorated_names["extern void __gmpz_gcdext(mpz_ptr arg0, mpz_ptr arg1, mpz_ptr arg2, mpz_srcptr arg3, mpz_srcptr arg4) [free function]"], libgmp_lib ) )
gmpz_cdiv_qr_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_cdiv_qr = gmpz_cdiv_qr_type( ( libgmp_lib.undecorated_names["extern void __gmpz_cdiv_qr(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, mpz_srcptr arg3) [free function]"], libgmp_lib ) )
gmpz_realloc_type = ctypes.CFUNCTYPE( ctypes.c_void_p, ctypes.POINTER( __mpz_struct ), ctypes.c_long )
gmpz_realloc = gmpz_realloc_type( ( libgmp_lib.undecorated_names["extern void * __gmpz_realloc(mpz_ptr arg0, mp_size_t arg1) [free function]"], libgmp_lib ) )
gmpn_divexact_by3c_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.c_ulong )
gmpn_divexact_by3c = gmpn_divexact_by3c_type( ( libgmp_lib.undecorated_names["extern mp_limb_t __gmpn_divexact_by3c(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, mp_limb_t arg3) [free function]"], libgmp_lib ) )
gmpn_mul_1_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.c_ulong )
gmpn_mul_1 = gmpn_mul_1_type( ( libgmp_lib.undecorated_names["extern mp_limb_t __gmpn_mul_1(mp_ptr arg0, mp_srcptr arg1, mp_size_t arg2, mp_limb_t arg3) [free function]"], libgmp_lib ) )
gmpz_cdiv_ui_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_cdiv_ui = gmpz_cdiv_ui_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpz_cdiv_ui(mpz_srcptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpf_get_d_type = ctypes.CFUNCTYPE( ctypes.c_double, ctypes.POINTER( __mpf_struct ) )
gmpf_get_d = gmpf_get_d_type( ( libgmp_lib.undecorated_names["extern double __gmpf_get_d(mpf_srcptr arg0) [free function]"], libgmp_lib ) )
gmpz_fdiv_qr_ui_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_fdiv_qr_ui = gmpz_fdiv_qr_ui_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpz_fdiv_qr_ui(mpz_ptr arg0, mpz_ptr arg1, mpz_srcptr arg2, long unsigned int arg3) [free function]"], libgmp_lib ) )
gmpq_div_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpq_struct ) )
gmpq_div = gmpq_div_type( ( libgmp_lib.undecorated_names["extern void __gmpq_div(mpq_ptr arg0, mpq_srcptr arg1, mpq_srcptr arg2) [free function]"], libgmp_lib ) )
gmpz_fits_ulong_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ) )
gmpz_fits_ulong_p = gmpz_fits_ulong_p_type( ( libgmp_lib.undecorated_names["int __gmpz_fits_ulong_p(mpz_srcptr __gmp_z) [free function]"], libgmp_lib ) )
gmpz_fits_uint_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ) )
gmpz_fits_uint_p = gmpz_fits_uint_p_type( ( libgmp_lib.undecorated_names["int __gmpz_fits_uint_p(mpz_srcptr __gmp_z) [free function]"], libgmp_lib ) )
gmpz_fac_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_fac_ui = gmpz_fac_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpz_fac_ui(mpz_ptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpf_swap_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ) )
gmpf_swap = gmpf_swap_type( ( libgmp_lib.undecorated_names["extern void __gmpf_swap(mpf_ptr arg0, mpf_ptr arg1) [free function]"], libgmp_lib ) )
gmpz_init_set_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_init_set = gmpz_init_set_type( ( libgmp_lib.undecorated_names["extern void __gmpz_init_set(mpz_ptr arg0, mpz_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_lcm_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_lcm_ui = gmpz_lcm_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpz_lcm_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpn_gcd_type = ctypes.CFUNCTYPE( ctypes.c_long, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long )
gmpn_gcd = gmpn_gcd_type( ( libgmp_lib.undecorated_names["extern mp_size_t __gmpn_gcd(mp_ptr arg0, mp_ptr arg1, mp_size_t arg2, mp_ptr arg3, mp_size_t arg4) [free function]"], libgmp_lib ) )
gmpz_addmul_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_addmul_ui = gmpz_addmul_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpz_addmul_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpf_trunc_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ) )
gmpf_trunc = gmpf_trunc_type( ( libgmp_lib.undecorated_names["extern void __gmpf_trunc(mpf_ptr arg0, mpf_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_fits_slong_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ) )
gmpz_fits_slong_p = gmpz_fits_slong_p_type( ( libgmp_lib.undecorated_names["extern int __gmpz_fits_slong_p(mpz_srcptr arg0) [free function]"], libgmp_lib ) )
gmpf_floor_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ) )
gmpf_floor = gmpf_floor_type( ( libgmp_lib.undecorated_names["extern void __gmpf_floor(mpf_ptr arg0, mpf_srcptr arg1) [free function]"], libgmp_lib ) )
gmpf_out_str_type = ctypes.CFUNCTYPE( ctypes.c_uint, ctypes.POINTER( _IO_FILE ), ctypes.c_int, ctypes.c_uint, ctypes.POINTER( __mpf_struct ) )
gmpf_out_str = gmpf_out_str_type( ( libgmp_lib.undecorated_names["extern size_t __gmpf_out_str(FILE * arg0, int arg1, size_t arg2, mpf_srcptr arg3) [free function]"], libgmp_lib ) )
gmpn_divrem_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long, ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long )
gmpn_divrem = gmpn_divrem_type( ( libgmp_lib.undecorated_names["extern mp_limb_t __gmpn_divrem(mp_ptr arg0, mp_size_t arg1, mp_ptr arg2, mp_size_t arg3, mp_srcptr arg4, mp_size_t arg5) [free function]"], libgmp_lib ) )
gmpz_set_si_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_long )
gmpz_set_si = gmpz_set_si_type( ( libgmp_lib.undecorated_names["extern void __gmpz_set_si(mpz_ptr arg0, long int arg1) [free function]"], libgmp_lib ) )
gmpz_combit_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_combit = gmpz_combit_type( ( libgmp_lib.undecorated_names["extern void __gmpz_combit(mpz_ptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpz_clrbit_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_clrbit = gmpz_clrbit_type( ( libgmp_lib.undecorated_names["extern void __gmpz_clrbit(mpz_ptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpf_fits_ulong_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpf_struct ) )
gmpf_fits_ulong_p = gmpf_fits_ulong_p_type( ( libgmp_lib.undecorated_names["extern int __gmpf_fits_ulong_p(mpf_srcptr arg0) [free function]"], libgmp_lib ) )
gmpf_fits_uint_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpf_struct ) )
gmpf_fits_uint_p = gmpf_fits_uint_p_type( ( libgmp_lib.undecorated_names["extern int __gmpf_fits_uint_p(mpf_srcptr arg0) [free function]"], libgmp_lib ) )
gmpz_set_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_set_ui = gmpz_set_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpz_set_ui(mpz_ptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpf_cmp_d_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpf_struct ), ctypes.c_double )
gmpf_cmp_d = gmpf_cmp_d_type( ( libgmp_lib.undecorated_names["extern int __gmpf_cmp_d(mpf_srcptr arg0, double arg1) [free function]"], libgmp_lib ) )
gmpz_scan0_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_scan0 = gmpz_scan0_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpz_scan0(mpz_srcptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpz_scan1_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_scan1 = gmpz_scan1_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpz_scan1(mpz_srcptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpz_fdiv_r_ui_type = ctypes.CFUNCTYPE( ctypes.c_ulong, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_fdiv_r_ui = gmpz_fdiv_r_ui_type( ( libgmp_lib.undecorated_names["extern long unsigned int __gmpz_fdiv_r_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpz_divexact_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_divexact_ui = gmpz_divexact_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpz_divexact_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpz_fdiv_r_2exp_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_fdiv_r_2exp = gmpz_fdiv_r_2exp_type( ( libgmp_lib.undecorated_names["extern void __gmpz_fdiv_r_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpz_tdiv_q_2exp_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_tdiv_q_2exp = gmpz_tdiv_q_2exp_type( ( libgmp_lib.undecorated_names["extern void __gmpz_tdiv_q_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpz_lucnum_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_lucnum_ui = gmpz_lucnum_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpz_lucnum_ui(mpz_ptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpf_fits_slong_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpf_struct ) )
gmpf_fits_slong_p = gmpf_fits_slong_p_type( ( libgmp_lib.undecorated_names["extern int __gmpf_fits_slong_p(mpf_srcptr arg0) [free function]"], libgmp_lib ) )
gmpz_neg_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_neg = gmpz_neg_type( ( libgmp_lib.undecorated_names["void __gmpz_neg(mpz_ptr __gmp_w, mpz_srcptr __gmp_u) [free function]"], libgmp_lib ) )
gmpq_init_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ) )
gmpq_init = gmpq_init_type( ( libgmp_lib.undecorated_names["extern void __gmpq_init(mpq_ptr arg0) [free function]"], libgmp_lib ) )
gmpn_sqrtrem_type = ctypes.CFUNCTYPE( ctypes.c_long, ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.POINTER( ctypes.c_ulong ), ctypes.c_long )
gmpn_sqrtrem = gmpn_sqrtrem_type( ( libgmp_lib.undecorated_names["extern mp_size_t __gmpn_sqrtrem(mp_ptr arg0, mp_ptr arg1, mp_srcptr arg2, mp_size_t arg3) [free function]"], libgmp_lib ) )
gmpz_com_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_com = gmpz_com_type( ( libgmp_lib.undecorated_names["extern void __gmpz_com(mpz_ptr arg0, mpz_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_divisible_2exp_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_divisible_2exp_p = gmpz_divisible_2exp_p_type( ( libgmp_lib.undecorated_names["extern int __gmpz_divisible_2exp_p(mpz_srcptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpz_perfect_power_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ) )
gmpz_perfect_power_p = gmpz_perfect_power_p_type( ( libgmp_lib.undecorated_names["extern int __gmpz_perfect_power_p(mpz_srcptr arg0) [free function]"], libgmp_lib ) )
gmpz_cmp_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_cmp = gmpz_cmp_type( ( libgmp_lib.undecorated_names["extern int __gmpz_cmp(mpz_srcptr arg0, mpz_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_dump_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ) )
gmpz_dump = gmpz_dump_type( ( libgmp_lib.undecorated_names["extern void __gmpz_dump(mpz_srcptr arg0) [free function]"], libgmp_lib ) )
gmpz_init_set_si_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_long )
gmpz_init_set_si = gmpz_init_set_si_type( ( libgmp_lib.undecorated_names["extern void __gmpz_init_set_si(mpz_ptr arg0, long int arg1) [free function]"], libgmp_lib ) )
gmpf_mul_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ), ctypes.c_ulong )
gmpf_mul_ui = gmpf_mul_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpf_mul_ui(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpq_set_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpq_struct ) )
gmpq_set = gmpq_set_type( ( libgmp_lib.undecorated_names["extern void __gmpq_set(mpq_ptr arg0, mpq_srcptr arg1) [free function]"], libgmp_lib ) )
gmpq_equal_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpq_struct ) )
gmpq_equal = gmpq_equal_type( ( libgmp_lib.undecorated_names["extern int __gmpq_equal(mpq_srcptr arg0, mpq_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_pow_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_pow_ui = gmpz_pow_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpz_pow_ui(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpf_sub_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ), ctypes.c_ulong )
gmpf_sub_ui = gmpf_sub_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpf_sub_ui(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpz_realloc2_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_realloc2 = gmpz_realloc2_type( ( libgmp_lib.undecorated_names["extern void __gmpz_realloc2(mpz_ptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpq_abs_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpq_struct ), ctypes.POINTER( __mpq_struct ) )
gmpq_abs = gmpq_abs_type( ( libgmp_lib.undecorated_names["void __gmpq_abs(mpq_ptr __gmp_w, mpq_srcptr __gmp_u) [free function]"], libgmp_lib ) )
gmpf_set_d_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.c_double )
gmpf_set_d = gmpf_set_d_type( ( libgmp_lib.undecorated_names["extern void __gmpf_set_d(mpf_ptr arg0, double arg1) [free function]"], libgmp_lib ) )
gmpz_si_kronecker_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.c_long, ctypes.POINTER( __mpz_struct ) )
gmpz_si_kronecker = gmpz_si_kronecker_type( ( libgmp_lib.undecorated_names["extern int __gmpz_si_kronecker(long int arg0, mpz_srcptr arg1) [free function]"], libgmp_lib ) )
gmpz_array_init_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.c_long, ctypes.c_long )
gmpz_array_init = gmpz_array_init_type( ( libgmp_lib.undecorated_names["extern void __gmpz_array_init(mpz_ptr arg0, mp_size_t arg1, mp_size_t arg2) [free function]"], libgmp_lib ) )
gmpz_fits_sshort_p_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ) )
gmpz_fits_sshort_p = gmpz_fits_sshort_p_type( ( libgmp_lib.undecorated_names["extern int __gmpz_fits_sshort_p(mpz_srcptr arg0) [free function]"], libgmp_lib ) )
gmpz_kronecker_si_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.c_long )
gmpz_kronecker_si = gmpz_kronecker_si_type( ( libgmp_lib.undecorated_names["extern int __gmpz_kronecker_si(mpz_srcptr arg0, long int arg1) [free function]"], libgmp_lib ) )
gmpf_init2_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.c_ulong )
gmpf_init2 = gmpf_init2_type( ( libgmp_lib.undecorated_names["extern void __gmpf_init2(mpf_ptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpz_kronecker_ui_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_kronecker_ui = gmpz_kronecker_ui_type( ( libgmp_lib.undecorated_names["extern int __gmpz_kronecker_ui(mpz_srcptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpf_cmp_ui_type = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( __mpf_struct ), ctypes.c_ulong )
gmpf_cmp_ui = gmpf_cmp_ui_type( ( libgmp_lib.undecorated_names["extern int __gmpf_cmp_ui(mpf_srcptr arg0, long unsigned int arg1) [free function]"], libgmp_lib ) )
gmpz_fdiv_q_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_fdiv_q = gmpz_fdiv_q_type( ( libgmp_lib.undecorated_names["extern void __gmpz_fdiv_q(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpz_fdiv_r_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ) )
gmpz_fdiv_r = gmpz_fdiv_r_type( ( libgmp_lib.undecorated_names["extern void __gmpz_fdiv_r(mpz_ptr arg0, mpz_srcptr arg1, mpz_srcptr arg2) [free function]"], libgmp_lib ) )
gmpf_dump_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ) )
gmpf_dump = gmpf_dump_type( ( libgmp_lib.undecorated_names["extern void __gmpf_dump(mpf_srcptr arg0) [free function]"], libgmp_lib ) )
gmpz_tdiv_r_2exp_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpz_struct ), ctypes.POINTER( __mpz_struct ), ctypes.c_ulong )
gmpz_tdiv_r_2exp = gmpz_tdiv_r_2exp_type( ( libgmp_lib.undecorated_names["extern void __gmpz_tdiv_r_2exp(mpz_ptr arg0, mpz_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
gmpf_add_ui_type = ctypes.CFUNCTYPE( None, ctypes.POINTER( __mpf_struct ), ctypes.POINTER( __mpf_struct ), ctypes.c_ulong )
gmpf_add_ui = gmpf_add_ui_type( ( libgmp_lib.undecorated_names["extern void __gmpf_add_ui(mpf_ptr arg0, mpf_srcptr arg1, long unsigned int arg2) [free function]"], libgmp_lib ) )
| 101.814131 | 305 | 0.771116 | 24,973 | 159,950 | 4.394907 | 0.00961 | 0.098402 | 0.052071 | 0.067879 | 0.959965 | 0.947118 | 0.929971 | 0.910764 | 0.878756 | 0.818904 | 0 | 0.018046 | 0.12452 | 159,950 | 1,570 | 306 | 101.878981 | 0.765727 | 0.002138 | 0 | 0.006421 | 1 | 0.100321 | 0.552217 | 0.10642 | 0 | 0 | 0 | 0 | 0 | 1 | 0.001605 | false | 0 | 0.004815 | 0 | 0.014446 | 0.008026 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
9aa0476e4a1d9f696e7921828f93291f9bf337ac | 68,714 | py | Python | benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/ratio_based_results/EightThreads_omnetpp/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/ratio_based_results/EightThreads_omnetpp/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/ratio_based_results/EightThreads_omnetpp/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.000148316,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202805,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.00200126,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.188557,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.326513,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.187264,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.702334,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.186074,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.23068,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.000378081,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00683535,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0494348,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0505516,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0498129,
'Execution Unit/Register Files/Runtime Dynamic': 0.0573869,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.119495,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.31365,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 1.68155,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00212108,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00212108,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00184648,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000714271,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000726177,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00681483,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0203716,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0485965,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.09115,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.190869,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.165056,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.46199,
'Instruction Fetch Unit/Runtime Dynamic': 0.431708,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.077678,
'L2/Runtime Dynamic': 0.0223727,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.96843,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.864489,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0560121,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0560122,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.23401,
'Load Store Unit/Runtime Dynamic': 1.19673,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.138116,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.276233,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0490179,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0501455,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.192197,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0314062,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.435438,
'Memory Management Unit/Runtime Dynamic': 0.0815517,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 19.0015,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.00131894,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.00965764,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0974755,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.108452,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 3.52237,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 5.38473e-05,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202731,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.000746361,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0616127,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.099379,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0501632,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.211155,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0703534,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 3.96143,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.000141004,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00258431,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0186897,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0191126,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0188307,
'Execution Unit/Register Files/Runtime Dynamic': 0.0216969,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0393874,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.10342,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 0.94438,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000895998,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000895998,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000805511,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000325553,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000274554,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00287206,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00769405,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0183734,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.16871,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0721338,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0624044,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.44394,
'Instruction Fetch Unit/Runtime Dynamic': 0.163478,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0298131,
'L2/Runtime Dynamic': 0.00900874,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.89219,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.328005,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.021193,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0211931,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 1.99227,
'Load Store Unit/Runtime Dynamic': 0.453715,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0522584,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.104517,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0185467,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0189794,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.0726658,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0118701,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.260635,
'Memory Management Unit/Runtime Dynamic': 0.0308495,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 13.2776,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.000371004,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00278431,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0312178,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0343731,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.6358,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 4.15664e-05,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202721,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.000569258,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0459805,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.0741648,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0374359,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.157581,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0525003,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 3.92623,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.000107545,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00192863,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0139478,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0142634,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0140553,
'Execution Unit/Register Files/Runtime Dynamic': 0.016192,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0293944,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.0771684,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 0.85904,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00066911,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00066911,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000601491,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000243073,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000204895,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00214461,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00574737,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0137118,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 0.872185,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0538767,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0465713,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.13303,
'Instruction Fetch Unit/Runtime Dynamic': 0.122052,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.022398,
'L2/Runtime Dynamic': 0.0069425,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.72561,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.244943,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0158039,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0158038,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 1.80024,
'Load Store Unit/Runtime Dynamic': 0.338686,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0389698,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.0779391,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0138305,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0141556,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.0542293,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.00886549,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.234096,
'Memory Management Unit/Runtime Dynamic': 0.0230211,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 12.7055,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.000282339,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00207795,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0232957,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.025656,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.3754,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 3.49535e-05,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202716,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.000473117,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0386929,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.0624102,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0315025,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.132606,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.04418,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 3.9098,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 8.93819e-05,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00162295,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0117373,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0120027,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0118267,
'Execution Unit/Register Files/Runtime Dynamic': 0.0136257,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0247359,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.0649373,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 0.819262,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000562967,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000562967,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000506072,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000204511,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00017242,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00180443,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00483575,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0115385,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 0.733948,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0453187,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.03919,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 2.98808,
'Instruction Fetch Unit/Runtime Dynamic': 0.102687,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0189678,
'L2/Runtime Dynamic': 0.00595475,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.64827,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.206331,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0133017,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0133018,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 1.71109,
'Load Store Unit/Runtime Dynamic': 0.285233,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0327998,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.0656001,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0116408,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0119166,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.0456342,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.00745639,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.22174,
'Memory Management Unit/Runtime Dynamic': 0.019373,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 12.4391,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.000235542,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00174858,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0196037,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0215879,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.2541,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 4.940640148578696,
'Runtime Dynamic': 4.940640148578696,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.284496,
'Runtime Dynamic': 0.129592,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 57.7082,
'Peak Power': 90.8204,
'Runtime Dynamic': 7.91726,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 57.4237,
'Total Cores/Runtime Dynamic': 7.78767,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.284496,
'Total L3s/Runtime Dynamic': 0.129592,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}} | 75.179431 | 124 | 0.68251 | 8,086 | 68,714 | 5.793965 | 0.067648 | 0.123287 | 0.1127 | 0.093234 | 0.93682 | 0.928965 | 0.916606 | 0.888687 | 0.861089 | 0.840491 | 0 | 0.133507 | 0.223986 | 68,714 | 914 | 125 | 75.179431 | 0.745101 | 0 | 0 | 0.642232 | 0 | 0 | 0.656407 | 0.048024 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9aaff9e46df8de02630e03325d431d2cdc5d070b | 11,177 | py | Python | crankycoin/test/test_node.py | CompeteLeak/crankycoin | 9376fbd3095429f2d46a3e4436023f814bb2e36a | [
"MIT"
] | null | null | null | crankycoin/test/test_node.py | CompeteLeak/crankycoin | 9376fbd3095429f2d46a3e4436023f814bb2e36a | [
"MIT"
] | null | null | null | crankycoin/test/test_node.py | CompeteLeak/crankycoin | 9376fbd3095429f2d46a3e4436023f814bb2e36a | [
"MIT"
] | null | null | null | import unittest
from mock import patch, Mock, MagicMock, call, PropertyMock
from crankycoin.node import *
class TestNode(unittest.TestCase):
def test_request_nodes_whenValidNode_thenRequestsNodes(self):
mock_response = Mock()
mock_response.status_code = 200
mock_response.json.return_value = {"full_nodes": ["127.0.0.2", "127.0.0.1", "127.0.0.3"]}
with patch.object(FullNode, '__init__', return_value=None) as patched_init, \
patch("crankycoin.requests.get", return_value=mock_response) as patched_requests:
node = FullNode("127.0.0.1", "reward_address")
nodes = node.request_nodes("127.0.0.2", "30013")
self.assertIsNotNone(nodes)
self.assertEqual(nodes, {"full_nodes": ["127.0.0.2", "127.0.0.1", "127.0.0.3"]})
patched_requests.assert_called_once_with('http://127.0.0.2:30013/nodes')
def test_request_nodes_whenNon200Status_thenReturnsNone(self):
mock_response = Mock()
mock_response.status_code = 404
with patch.object(FullNode, '__init__', return_value=None) as patched_init, \
patch("crankycoin.requests.get", return_value=mock_response) as patched_requests:
node = FullNode("127.0.0.1", "reward_address")
nodes = node.request_nodes("127.0.0.2", "30013")
self.assertIsNone(nodes)
patched_requests.assert_called_once_with('http://127.0.0.2:30013/nodes')
def test_request_nodes_whenRequestError_thenReturnsNone(self):
with patch.object(FullNode, '__init__', return_value=None) as patched_init, \
patch("crankycoin.requests.get", side_effect=requests.exceptions.RequestException()) as patched_requests:
node = FullNode("127.0.0.1", "reward_address")
nodes = node.request_nodes("127.0.0.2", "30013")
self.assertIsNone(nodes)
patched_requests.assert_called_once_with('http://127.0.0.2:30013/nodes')
def test_request_nodes_from_all_SetsFullNodesPropertyOnClass(self):
nodes_one = {"full_nodes": ["127.0.0.2", "127.0.0.1", "127.0.0.4"]}
nodes_two = {"full_nodes": ["127.0.0.2", "127.0.0.3", "127.0.0.5"]}
nodes_three = {"full_nodes": ["127.0.0.1", "127.0.0.3", "127.0.0.4"]}
with patch.object(FullNode, '__init__', return_value=None) as patched_init, \
patch.object(FullNode, 'request_nodes', side_effect=[nodes_one, nodes_two, nodes_three]) as patched_request_nodes:
node = FullNode("127.0.0.1", "reward_address")
node.full_nodes = {"127.0.0.1", "127.0.1.1", "127.0.1.2"}
node.request_nodes_from_all()
self.assertEqual(node.full_nodes, {"127.0.0.2", "127.0.0.1", "127.0.0.3", "127.0.0.4", "127.0.0.5", "127.0.1.1", "127.0.1.2"})
def test_broadcast_transaction_thenBroadcastsToAllNodes(self):
with patch.object(FullNode, '__init__', return_value=None) as patched_init, \
patch.object(FullNode, 'request_nodes_from_all') as patched_request_nodes_from_all, \
patch("crankycoin.time.time", return_value="1508823223") as patched_time_time, \
patch("crankycoin.requests.post") as patched_requests:
transaction = Transaction("source", "destination", 0, 0)
node = FullNode("127.0.0.1", "reward_address")
node.full_nodes = {"127.0.0.1", "127.0.0.2", "127.0.0.3"}
node.broadcast_transaction(transaction)
patched_request_nodes_from_all.assert_called_once()
patched_requests.assert_has_calls([
call("http://127.0.0.1:30013/transactions", json={'transaction': '{"amount": 0, "destination": "destination", "fee": 0, "signature": null, "source": "source", "timestamp": 1508823223, "tx_hash": null}'}),
call("http://127.0.0.2:30013/transactions", json={'transaction': '{"amount": 0, "destination": "destination", "fee": 0, "signature": null, "source": "source", "timestamp": 1508823223, "tx_hash": null}'}),
call("http://127.0.0.3:30013/transactions", json={'transaction': '{"amount": 0, "destination": "destination", "fee": 0, "signature": null, "source": "source", "timestamp": 1508823223, "tx_hash": null}'})
], True)
def test_broadcast_transaction_whenRequestException_thenFailsGracefully(self):
with patch.object(FullNode, '__init__', return_value=None) as patched_init, \
patch.object(FullNode, 'request_nodes_from_all') as patched_request_nodes_from_all, \
patch("crankycoin.time.time", return_value="1508823223") as patched_time_time, \
patch("crankycoin.requests.post", side_effect=requests.exceptions.RequestException()) as patched_requests:
transaction = Transaction("source", "destination", 0, 0)
node = FullNode("127.0.0.1", "reward_address")
node.full_nodes = {"127.0.0.1", "127.0.0.2", "127.0.0.3"}
node.broadcast_transaction(transaction)
patched_request_nodes_from_all.assert_called_once()
patched_requests.assert_has_calls([
call("http://127.0.0.1:30013/transactions", json={'transaction': '{"amount": 0, "destination": "destination", "fee": 0, "signature": null, "source": "source", "timestamp": 1508823223, "tx_hash": null}'}),
call("http://127.0.0.2:30013/transactions", json={'transaction': '{"amount": 0, "destination": "destination", "fee": 0, "signature": null, "source": "source", "timestamp": 1508823223, "tx_hash": null}'}),
call("http://127.0.0.3:30013/transactions", json={'transaction': '{"amount": 0, "destination": "destination", "fee": 0, "signature": null, "source": "source", "timestamp": 1508823223, "tx_hash": null}'})
], True)
def test_request_block_whenIndexIsLatest_thenRequestsLatestBlockFromNode(self):
mock_response = Mock()
mock_response.status_code = 200
mock_response.json.return_value = '{"nonce": 12345, "index": 35, "transactions": [{"amount": 0, "destination": "destination", "fee": 0, "signature": "signature", "source": "source", "timestamp": 1508823223, "tx_hash": null}], "timestamp": 1234567890, "current_hash": "current_hash", "previous_hash": "previous_hash"}'
transaction = Transaction("source", "destination", 0, 0, "signature")
with patch.object(FullNode, '__init__', return_value=None) as patched_init, \
patch("crankycoin.node.Block.current_hash", new_callable=PropertyMock) as patched_block_current_hash, \
patch("crankycoin.requests.get", return_value=mock_response) as patched_requests:
patched_block_current_hash.return_value = "current_hash"
node = FullNode("127.0.0.1", "reward_address")
block = node.request_block("127.0.0.2", "30013", "latest")
self.assertIsNotNone(block)
self.assertEqual(block.index, 35)
self.assertEqual(block.transactions, [transaction])
self.assertEqual(block.block_header.previous_hash, "previous_hash")
self.assertEqual(block.current_hash, "current_hash")
self.assertEqual(block.block_header.timestamp, 1234567890)
self.assertEqual(block.block_header.nonce, 12345)
patched_requests.assert_called_once_with('http://127.0.0.2:30013/block/latest')
def test_request_block_whenIndexIsNumeric_thenRequestsCorrectBlockFromNode(self):
mock_response = Mock()
mock_response.status_code = 200
mock_response.json.return_value = '{"nonce": 12345, "index": 29, "transactions": [{"amount": 0, "destination": "destination", "fee": 0, "signature": "signature", "source": "source", "timestamp": 1508823223, "tx_hash": null}], "timestamp": 1234567890, "current_hash": "current_hash", "previous_hash": "previous_hash"}'
transaction = Transaction("source", "destination", 0, 0, "signature")
with patch.object(FullNode, '__init__', return_value=None) as patched_init, \
patch("crankycoin.node.Block.current_hash", new_callable=PropertyMock) as patched_block_current_hash, \
patch("crankycoin.requests.get", return_value=mock_response) as patched_requests:
patched_block_current_hash.return_value = "current_hash"
node = FullNode("127.0.0.1", "reward_address")
block = node.request_block("127.0.0.2", "30013", 29)
self.assertIsNotNone(block)
self.assertEqual(block.index, 29)
self.assertEqual(block.transactions, [transaction])
self.assertEqual(block.block_header.previous_hash, "previous_hash")
self.assertEqual(block.current_hash, "current_hash")
self.assertEqual(block.block_header.timestamp, 1234567890)
self.assertEqual(block.block_header.nonce, 12345)
patched_requests.assert_called_once_with('http://127.0.0.2:30013/block/29')
def test_request_block_whenRequestException_thenReturnsNone(self):
with patch.object(FullNode, '__init__', return_value=None) as patched_init, \
patch("crankycoin.requests.get", side_effect=requests.exceptions.RequestException()) as patched_requests:
node = FullNode("127.0.0.1", "reward_address")
block = node.request_block("127.0.0.2", "30013", "latest")
self.assertIsNone(block)
patched_requests.assert_called_once_with('http://127.0.0.2:30013/block/latest')
def test_request_block_from_all_whenIndexIsLatest_thenReturnsLatestBlockFromAll(self):
block = Mock(Block)
with patch.object(FullNode, '__init__', return_value=None) as patched_init, \
patch.object(FullNode, 'request_block', side_effect=[block, block, block]) as patched_request_block:
node = FullNode("127.0.0.1", "reward_address")
node.full_nodes = {"127.0.0.1", "127.0.0.2", "127.0.0.3"}
blocks = node.request_block_from_all("latest")
self.assertEqual(blocks, [block, block, block])
patched_request_block.assert_has_calls([
call("127.0.0.1", 30013, "latest"),
call("127.0.0.2", 30013, "latest"),
call("127.0.0.3", 30013, "latest")
], True)
def test_request_blocks_range(self):
pass
def test_request_blockchain(self):
pass
def test_mine(self):
pass
def test_broadcast_block(self):
pass
def test_add_node(self):
pass
def test_broadcast_node(self):
pass
def test_load_blockchain(self):
pass
def test_synchronize(self):
pass
def test_generate_ecc_instance(self):
pass
def test_get_pubkey(self):
pass
def test_get_privkey(self):
pass
def test_sign(self):
pass
def test_verify(self):
pass
def test_get_balance(self):
pass
def test_create_transaction(self):
pass
def test_calculate_transaction_hash(self):
pass
def test_generate_signable_transaction(self):
pass
| 51.506912 | 325 | 0.649459 | 1,368 | 11,177 | 5.059942 | 0.091374 | 0.037561 | 0.044062 | 0.019936 | 0.825484 | 0.791823 | 0.785322 | 0.77102 | 0.755562 | 0.751372 | 0 | 0.079531 | 0.20578 | 11,177 | 216 | 326 | 51.74537 | 0.700237 | 0 | 0 | 0.638037 | 0 | 0.04908 | 0.284871 | 0.026662 | 0 | 0 | 0 | 0 | 0.196319 | 1 | 0.165644 | false | 0.104294 | 0.018405 | 0 | 0.190184 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
9ac25e045f6249a7ee8c539a064242ce6e626721 | 5,028 | py | Python | scripts/multi-label.py | rhong3/GYN-HE-IHC | fa5ae2a8becd8afeea5dba16b16f953f0e367fe1 | [
"MIT"
] | null | null | null | scripts/multi-label.py | rhong3/GYN-HE-IHC | fa5ae2a8becd8afeea5dba16b16f953f0e367fe1 | [
"MIT"
] | null | null | null | scripts/multi-label.py | rhong3/GYN-HE-IHC | fa5ae2a8becd8afeea5dba16b16f953f0e367fe1 | [
"MIT"
] | null | null | null | # Match IHC-labeled tiles
import pandas as pd
if __name__ == '__main__':
ref = pd.read_csv('../align/final_summary_full.csv', header=0)
for idx, row in ref.iterrows():
for level in range(1, 4):
try:
PMS2 = pd.read_csv('../tiles/{}/level{}/{}_PMS2_label.csv'.format(row['Patient_ID'], level,
row['H&E_ID']), header=0)
PMS2 = PMS2.drop(columns=['ratio'])
MLH1 = pd.read_csv('../tiles/{}/level{}/{}_MLH1_label.csv'.format(row['Patient_ID'], level,
row['H&E_ID']),
header=0, usecols=['Num', 'label'])
out_dicta = PMS2.merge(MLH1, how='inner', on='Num', suffixes=['_PMS2', '_MLH1'])
out_dicta['label'] = out_dicta['label_PMS2'] + out_dicta['label_MLH1']
out_dicta['label'] = out_dicta['label'].clip(lower=0, upper=1)
out_dicta = out_dicta[['Num', 'X_pos', 'Y_pos', 'X', 'Y', 'Loc', 'label']]
out_dicta.to_csv(
'../tiles/{}/level{}/{}_PMS2-MLH1_label.csv'.format(row['Patient_ID'], level, row['H&E_ID']),
index=False)
except FileNotFoundError as e:
print(e)
try:
MSH2 = pd.read_csv('../tiles/{}/level{}/{}_MSH2_label.csv'.format(row['Patient_ID'], level,
row['H&E_ID']), header=0)
MSH2 = MSH2.drop(columns=['ratio'])
MSH6 = pd.read_csv('../tiles/{}/level{}/{}_MSH6_label.csv'.format(row['Patient_ID'], level,
row['H&E_ID']),
header=0, usecols=['Num', 'label'])
out_dictb = MSH2.merge(MSH6, how='inner', on='Num', suffixes=['_MSH2', '_MSH6'])
out_dictb['label'] = out_dictb['label_MSH2'] + out_dictb['label_MSH6']
out_dictb['label'] = out_dictb['label'].clip(lower=0, upper=1)
out_dictb = out_dictb[['Num', 'X_pos', 'Y_pos', 'X', 'Y', 'Loc', 'label']]
out_dictb.to_csv(
'../tiles/{}/level{}/{}_MSH2-MSH6_label.csv'.format(row['Patient_ID'], level, row['H&E_ID']),
index=False)
except FileNotFoundError as e:
print(e)
try:
PMS2 = pd.read_csv('../tiles/{}/level{}/{}_PMS2_label.csv'.format(row['Patient_ID'], level,
row['H&E_ID']), header=0)
PMS2 = PMS2.drop(columns=['ratio'])
MLH1 = pd.read_csv('../tiles/{}/level{}/{}_MLH1_label.csv'.format(row['Patient_ID'], level,
row['H&E_ID']),
header=0, usecols=['Num', 'label'])
MSH2 = pd.read_csv('../tiles/{}/level{}/{}_MSH2_label.csv'.format(row['Patient_ID'], level,
row['H&E_ID']), header=0)
MSH2 = MSH2.drop(columns=['ratio'])
MSH6 = pd.read_csv('../tiles/{}/level{}/{}_MSH6_label.csv'.format(row['Patient_ID'], level,
row['H&E_ID']),
header=0, usecols=['Num', 'label'])
out_dicta = PMS2.merge(MLH1, how='inner', on='Num', suffixes=['_PMS2', '_MLH1'])
out_dicta['label'] = out_dicta['label_PMS2'] + out_dicta['label_MLH1']
out_dicta['label'] = out_dicta['label'].clip(lower=0, upper=1)
out_dicta = out_dicta[['Num', 'X_pos', 'Y_pos', 'X', 'Y', 'Loc', 'label']]
out_dictb = MSH2.merge(MSH6, how='inner', on='Num', suffixes=['_MSH2', '_MSH6'])
out_dictb['label'] = out_dictb['label_MSH2'] + out_dictb['label_MSH6']
out_dictb['label'] = out_dictb['label'].clip(lower=0, upper=1)
out_dictb = out_dictb[['Num', 'X_pos', 'Y_pos', 'X', 'Y', 'Loc', 'label']]
out_dict = out_dicta.merge(out_dictb, how='inner', on=['Num', 'X_pos', 'Y_pos', 'X', 'Y', 'Loc'],
suffixes=['_a', '_b'])
out_dict['label'] = out_dict['label_a'] + out_dict['label_b']
out_dict['label'] = out_dict['label'].clip(lower=0, upper=1)
out_dict = out_dict[['Num', 'X_pos', 'Y_pos', 'X', 'Y', 'Loc', 'label']]
out_dict.to_csv(
'../tiles/{}/level{}/{}_PMS2-MLH1-MSH2-MSH6_label.csv'.format(row['Patient_ID'], level, row['H&E_ID']),
index=False)
except FileNotFoundError as e:
print(e)
| 65.298701 | 123 | 0.444113 | 556 | 5,028 | 3.758993 | 0.124101 | 0.0689 | 0.068421 | 0.089474 | 0.898565 | 0.898565 | 0.878469 | 0.845455 | 0.837799 | 0.837799 | 0 | 0.02464 | 0.378481 | 5,028 | 76 | 124 | 66.157895 | 0.64416 | 0.004574 | 0 | 0.764706 | 0 | 0 | 0.216357 | 0.092581 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.014706 | 0 | 0.014706 | 0.044118 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9ae7b2d6f15efe43e1ca1e008f4053cb7fc97257 | 5,016 | py | Python | W2_P3/W2_P3b.py | ParallelVoid/ptc-coding-challenge | 5e414869de1657f395d3d5ecfe69b07c55b2f804 | [
"MIT"
] | null | null | null | W2_P3/W2_P3b.py | ParallelVoid/ptc-coding-challenge | 5e414869de1657f395d3d5ecfe69b07c55b2f804 | [
"MIT"
] | null | null | null | W2_P3/W2_P3b.py | ParallelVoid/ptc-coding-challenge | 5e414869de1657f395d3d5ecfe69b07c55b2f804 | [
"MIT"
] | null | null | null | import sys
def doWin(board):
coordX = -1
coordY = -1
if board[0][0] == board[1][1] and board[0][0] == 'x':
if board[2][2] == ".":
coordY = 2
coordX = 2
return (coordX, coordY)
if board[0][0] == board[2][2] and board[0][0] == 'x':
if board[1][1] == ".":
coordY = 1
coordX = 1
return (coordX, coordY)
if board[1][1] == board[2][2] and board[1][1] == 'x':
if board[0][0] == ".":
coordY = 0
coordX = 0
return (coordX, coordY)
if board[0][2] == board[1][1] and board[0][2] == 'x':
if board[2][0] == ".":
coordY = 2
coordX = 0
return (coordX, coordY)
if board[0][2] == board[2][0] and board[0][2] == 'x':
if board[1][1] == ".":
coordY = 1
coordX = 1
return (coordX, coordY)
if board[1][1] == board[2][0] and board[2][0] == 'x':
if board[0][2] == ".":
coordY = 0
coordX = 2
return (coordX, coordY)
for row in range (0,3):
# print(board[row])
numO = 0
for col in range (0, 3):
if board[row][col] == 'x':
numO+=1
if numO == 2:
coordY = row
for x in range(0,3):
if board[row][x] != 'x' and board[row][x] == '.':
coordX = x
break
if coordX != -1:
break
if coordX != -1 and coordY != -1:
return (coordX, coordY)
# print("vertical step")
for col in range (0,3):
# print(board[row])
numO = 0
for row in range (0, 3):
if board[row][col] == 'x':
numO+=1
if numO == 2:
coordX = row
# print(col)
for y in range(0,3):
if board[y][row] != 'o' and board[y][row] == '.':
# print("Found Y")
coordY = y
break
if coordX != -1:
# print("Found x")
return (coordX, coordY)
if coordX != -1 and coordY != -1:
return (coordX, coordY)
def doBlock(board):
coordX = -1
coordY = -1
if board[0][0] == board[1][1] and board[0][0] == 'o':
if board[2][2] == ".":
coordY = 2
coordX = 2
return (coordX, coordY)
if board[0][0] == board[2][2] and board[0][0] == 'o':
if board[1][1] == ".":
coordY = 1
coordX = 1
return (coordX, coordY)
if board[1][1] == board[2][2] and board[1][1] == 'o':
if board[0][0] == ".":
coordY = 0
coordX = 0
return (coordX, coordY)
if board[0][2] == board[1][1] and board[0][2] == 'o':
if board[2][0] == ".":
coordY = 2
coordX = 0
return (coordX, coordY)
if board[0][2] == board[2][0] and board[0][2] == 'o':
if board[1][1] == ".":
coordY = 1
coordX = 1
return (coordX, coordY)
if board[1][1] == board[2][0] and board[2][0] == 'o':
if board[0][2] == ".":
coordY = 0
coordX = 2
return (coordX, coordY)
for row in range (0,3):
# print(board[row])
numO = 0
for col in range (0, 3):
if board[row][col] == 'o':
numO+=1
if numO == 2:
coordY = row
for x in range(0,3):
if board[row][x] != 'o' and board[row][x] == '.':
coordX = x
break
if coordX != -1:
break
if coordX != -1 and coordY != -1:
return (coordX, coordY)
# print("vertical step")
for col in range (0,3):
# print(board[row])
numO = 0
for row in range (0, 3):
if board[row][col] == 'o':
numO+=1
if numO == 2:
coordX = row
# print(col)
for y in range(0,3):
if board[y][row] != 'o' and board[y][row] == '.':
# print("Found Y")
coordY = y
break
if coordX != -1:
# print("Found x")
return (coordX, coordY)
if coordX != -1 and coordY != -1:
return (coordX, coordY)
board = []
with open(sys.argv[1]) as file:
result = [list(line.rstrip()) for (line) in file]
if len(result) != 3:
exit()
else:
command = ''.join(result[2])
board = result[0: ]
xy = (-1,-1)
xy = doWin(board)
if xy[0] != -1 and xy[1] != -1:
print("{} {}".format(int(xy[0]) + 1, int(xy[1]) + 1))
else:
xy = doBlock(board)
print("{} {}".format(int(xy[0]) + 1, int(xy[1]) + 1))
| 25.989637 | 69 | 0.386762 | 625 | 5,016 | 3.104 | 0.0736 | 0.115464 | 0.16701 | 0.123711 | 0.902062 | 0.902062 | 0.902062 | 0.9 | 0.9 | 0.9 | 0 | 0.068468 | 0.44677 | 5,016 | 192 | 70 | 26.125 | 0.630631 | 0.041268 | 0 | 0.802817 | 0 | 0 | 0.009591 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.014085 | false | 0 | 0.007042 | 0 | 0.147887 | 0.014085 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b1003f151e88e54c19a84274797bba7057fad6f5 | 6,482 | py | Python | models.py | nildip/DeepLearn_NUMBERS | 57171abc5d922df3aae24b8b86bc4674371608e9 | [
"MIT"
] | 1 | 2018-08-23T06:31:31.000Z | 2018-08-23T06:31:31.000Z | models.py | nildip/DeepLearn_NUMBERS | 57171abc5d922df3aae24b8b86bc4674371608e9 | [
"MIT"
] | null | null | null | models.py | nildip/DeepLearn_NUMBERS | 57171abc5d922df3aae24b8b86bc4674371608e9 | [
"MIT"
] | 1 | 2020-05-25T13:50:14.000Z | 2020-05-25T13:50:14.000Z | from keras.models import Model
from keras.layers import Input, Dense, Dropout, Flatten, concatenate
from keras.layers.convolutional import Conv2D, MaxPooling2D, Conv1D, MaxPooling1D
# Multilayer perceptron
def simple_nn(n_class, n_col):
# input shape
input_shape = Input(shape = (n_col,))
# fully connected hidden layers
fc_layer = Dense(500, activation = 'relu')(input_shape)
fc_layer = Dropout(0.5)(fc_layer)
fc_layer = Dense(100, activation = 'relu')(fc_layer)
fc_layer = Dropout(0.4)(fc_layer)
fc_layer = Dense(25, activation = 'relu')(fc_layer)
fc_layer = Dropout(0.3)(fc_layer)
# output layer
if n_class > 2:
out = Dense(n_class, activation = 'softmax')(fc_layer)
else:
out = Dense(n_class, activation = 'sigmoid')(fc_layer)
model = Model(input_shape, out)
return model
#Multilayer 1D CNN
def simple_1Dcnn(n_class, n_col):
# input shape
input_shape = Input(shape=(n_col,1))
# 1D cnn layer
cnn_layer = Conv1D(nb_filter = 5, kernel_size = 15, strides = 1, activation = 'relu')(input_shape)
cnn_layer = Dropout(0.5)(cnn_layer)
cnn_layer = (MaxPooling1D(pool_size = 2))(cnn_layer)
# reshaping for fully connected layers
cnn_flat = Flatten()(cnn_layer)
# fully connected hidden layers
fc_layer = Dense(500, activation = 'relu')(cnn_flat)
fc_layer = Dropout(0.5)(fc_layer)
fc_layer = Dense(100, activation = 'relu')(fc_layer)
fc_layer = Dropout(0.4)(fc_layer)
# output layer
if n_class > 2:
out = Dense(n_class, activation = 'softmax')(fc_layer)
else:
out = Dense(n_class, activation = 'sigmoid')(fc_layer)
model = Model(input_shape, out)
return model
#Multilayer 2D CNN
def simple_2Dcnn(n_class, image_height, image_width):
# input shape
input_shape = Input(shape=(image_height, image_width, 1))
# cnn layer
cnn_layer = Conv2D(nb_filter = 5, kernel_size = 15, strides = 1, activation = 'relu')(input_shape)
cnn_layer = Dropout(0.5)(cnn_layer)
cnn_layer = (MaxPooling2D(pool_size = 2))(cnn_layer)
# reshaping for fully connected layers
cnn_flat = Flatten()(cnn_layer)
# fully connected hidden layers
fc_layer = Dense(500, activation = 'relu')(cnn_flat)
fc_layer = Dropout(0.5)(fc_layer)
fc_layer = Dense(100, activation = 'relu')(fc_layer)
fc_layer = Dropout(0.4)(fc_layer)
# output layer
if n_class > 2:
out = Dense(n_class, activation = 'softmax')(fc_layer)
else:
out = Dense(n_class, activation = 'sigmoid')(fc_layer)
model = Model(input_shape, out)
return model
#Multilayer Stacked 1D CNN
def stacked_1Dcnn(n_class, n_col):
# input shape
input_shape = Input(shape=(n_col,1))
# 1D cnn layer
cnn_layer = Conv1D(nb_filter = 5, kernel_size = 15, strides = 1, activation = 'relu')(input_shape)
cnn_layer = Dropout(0.5)(cnn_layer)
cnn_layer = (MaxPooling1D(pool_size = 2))(cnn_layer)
cnn_layer = Conv1D(nb_filter = 4, kernel_size = 10, strides = 1, activation = 'relu')(input_shape)
cnn_layer = Dropout(0.4)(cnn_layer)
cnn_layer = (MaxPooling1D(pool_size = 2))(cnn_layer)
cnn_layer = Conv1D(nb_filter = 4, kernel_size = 5, strides = 1, activation = 'relu')(input_shape)
cnn_layer = Dropout(0.3)(cnn_layer)
cnn_layer = (MaxPooling1D(pool_size = 2))(cnn_layer)
# reshaping for fully connected layers
cnn_flat = Flatten()(cnn_layer)
# fully connected hidden layers
fc_layer = Dense(500, activation = 'relu')(cnn_flat)
fc_layer = Dropout(0.5)(fc_layer)
fc_layer = Dense(100, activation = 'relu')(fc_layer)
fc_layer = Dropout(0.4)(fc_layer)
# output layer
if n_class > 2:
out = Dense(n_class, activation = 'softmax')(fc_layer)
else:
out = Dense(n_class, activation = 'sigmoid')(fc_layer)
model = Model(input_shape, out)
return model
#Multilayer Stacked 2D CNN
def stacked_2Dcnn(n_class, image_height, image_width):
# input shape
input_shape = Input(shape=(image_height, image_width, 1))
# cnn layers
cnn_layer = Conv2D(nb_filter = 5, kernel_size = 15, strides = 1, activation = 'relu')(input_shape)
cnn_layer = Dropout(0.5)(cnn_layer)
cnn_layer = (MaxPooling2D(pool_size = 2))(cnn_layer)
cnn_layer = Conv2D(nb_filter = 4, kernel_size = 10, strides = 1, activation = 'relu')(input_shape)
cnn_layer = Dropout(0.4)(cnn_layer)
cnn_layer = (MaxPooling2D(pool_size = 2))(cnn_layer)
cnn_layer = Conv2D(nb_filter = 4, kernel_size = 5, strides = 1, activation = 'relu')(input_shape)
cnn_layer = Dropout(0.3)(cnn_layer)
cnn_layer = (MaxPooling2D(pool_size = 2))(cnn_layer)
# reshaping for fully connected layers
cnn_flat = Flatten()(cnn_layer)
# fully connected hidden layers
fc_layer = Dense(500, activation = 'relu')(cnn_flat)
fc_layer = Dropout(0.5)(fc_layer)
fc_layer = Dense(100, activation = 'relu')(fc_layer)
fc_layer = Dropout(0.4)(fc_layer)
# output layer
if n_class > 2:
out = Dense(n_class, activation = 'softmax')(fc_layer)
else:
out = Dense(n_class, activation = 'sigmoid')(fc_layer)
model = Model(input_shape, out)
return model
#Multilayer Multitowered 2D CNN
def multitower_2Dcnn(n_class, image_height, image_width):
# input shape
input_shape = Input(shape=(image_height, image_width, 1))
# cnn-tower 1
tower1 = Conv2D(nb_filter = 4, kernel_size = 15, strides = 1, activation = 'relu')(input_shape)
tower1 = Dropout(0.5)(tower1)
tower1 = Flatten()(tower1)
# cnn-tower 2
tower2 = Conv2D(nb_filter = 4, kernel_size = 15, strides = 1, activation = 'relu')(input_shape)
tower2 = Dropout(0.5)(tower2)
tower2 = Flatten()(tower2)
# cnn-tower 3
tower3 = Conv2D(nb_filter = 4, kernel_size = 15, strides = 1, activation = 'relu')(input_shape)
tower3 = Dropout(0.5)(tower3)
tower3 = Flatten()(tower3)
# reshaping for fully connected layers
cnn_flat = concatenate([tower1, tower2, tower3], axis=1)
# fully connected hidden layers
fc_layer = Dense(500, activation = 'relu')(cnn_flat)
fc_layer = Dropout(0.5)(fc_layer)
fc_layer = Dense(100, activation = 'relu')(fc_layer)
fc_layer = Dropout(0.4)(fc_layer)
# output layer
if n_class > 2:
out = Dense(n_class, activation = 'softmax')(fc_layer)
else:
out = Dense(n_class, activation = 'sigmoid')(fc_layer)
model = Model(input_shape, out)
return model
| 41.025316 | 102 | 0.674483 | 927 | 6,482 | 4.485437 | 0.080906 | 0.097643 | 0.065657 | 0.05772 | 0.883117 | 0.880231 | 0.880231 | 0.869649 | 0.860991 | 0.860991 | 0 | 0.039481 | 0.202869 | 6,482 | 157 | 103 | 41.286624 | 0.765241 | 0.113237 | 0 | 0.765217 | 0 | 0 | 0.031518 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.052174 | false | 0 | 0.026087 | 0 | 0.130435 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b10d219deb4d34d062380763773cc6229e3c1672 | 104,141 | py | Python | build/parsetab.py | oftcrash/martian-town-names | 4b51f04981ed2d1a3bb969093f8f7f2320fd418b | [
"CC-BY-3.0"
] | 1 | 2017-10-24T09:46:09.000Z | 2017-10-24T09:46:09.000Z | build/parsetab.py | oftcrash/martian-town-names | 4b51f04981ed2d1a3bb969093f8f7f2320fd418b | [
"CC-BY-3.0"
] | null | null | null | build/parsetab.py | oftcrash/martian-town-names | 4b51f04981ed2d1a3bb969093f8f7f2320fd418b | [
"CC-BY-3.0"
] | null | null | null |
# parsetab.py
# This file is automatically generated. Do not edit.
_tabversion = '3.2'
_lr_method = 'LALR'
_lr_signature = '\x93vL\xeaL\xb4\xf5\x87\xe8\x92\xcf\x87`em1'
_lr_action_items = {'REPLACESPRITE':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,35,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,35,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,35,35,35,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,35,-157,-161,-163,-149,-168,-123,]),'DIVIDE':([9,37,47,60,65,74,79,80,88,90,94,106,149,156,179,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,213,226,227,232,234,237,242,250,252,263,270,283,287,308,309,315,332,333,338,343,346,349,371,389,415,422,426,438,445,458,482,495,496,502,507,513,524,529,530,543,547,],[-38,122,-40,-36,-42,-39,-41,-37,122,122,-65,-66,122,-71,122,122,122,122,122,122,122,122,122,122,122,122,122,-50,-49,-48,122,122,122,122,122,122,122,122,122,122,-43,122,122,-40,122,122,122,-69,-70,-44,-89,122,122,122,122,122,122,122,122,122,122,-45,122,122,122,122,122,122,122,122,-71,122,122,122,122,122,122,]),'BINARY_NOT':([0,2,3,5,7,11,13,14,15,19,22,23,24,25,26,27,32,33,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,73,78,81,82,84,95,96,97,99,100,101,103,105,107,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,135,136,137,138,141,143,144,145,147,148,151,152,153,155,157,164,174,175,176,181,186,189,215,219,238,245,248,249,251,256,258,259,262,264,266,267,268,271,274,276,277,280,281,299,305,307,313,314,319,320,321,323,326,327,328,329,330,331,335,336,337,344,345,350,351,354,360,366,367,368,372,376,377,378,383,386,388,390,396,401,405,408,425,427,430,433,435,437,439,440,443,444,451,452,454,455,459,460,463,464,465,466,467,471,472,473,476,479,483,486,487,488,489,490,491,493,494,499,500,503,505,508,509,510,511,512,514,517,518,519,523,525,526,528,534,536,537,538,539,542,544,545,546,548,],[-2,33,-3,33,33,-35,-13,-9,33,-15,-18,-34,-26,-31,-5,-30,-119,33,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,33,-29,-22,-4,33,33,33,33,33,33,33,33,-120,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,-209,33,33,33,33,33,33,33,33,-101,33,33,33,33,33,-117,33,33,-2,33,33,33,33,-199,-213,-216,33,-104,33,33,-134,-118,-111,-135,33,-208,33,33,33,33,-190,-179,-193,33,-206,-102,-101,33,-200,-166,-214,-212,33,-192,-210,-2,-137,-133,-116,-207,-121,-191,-2,-128,33,33,-211,-194,-167,-2,33,-139,33,33,33,33,33,-76,33,33,33,-158,-142,33,-189,33,33,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,33,33,-178,-112,-122,-100,33,33,-159,33,33,33,-157,-161,33,33,-163,-149,-168,-143,-142,33,-215,-138,33,-123,-129,33,33,33,33,33,-86,33,-130,-87,-131,33,-144,33,-132,-88,-146,33,-145,]),'RETURN':([101,175,176,262,264,267,268,336,337,354,366,408,483,490,512,514,526,528,536,537,542,],[174,-117,174,-134,-118,-135,174,-133,-116,-128,174,174,174,174,-129,174,174,-130,-131,174,-132,]),'RECOLOUR_SPRITE':([317,334,347,352,355,359,379,381,382,385,395,403,404,406,407,410,411,412,414,428,429,432,434,453,456,461,462,474,501,506,520,531,532,],[380,380,380,380,380,380,380,-152,-153,380,380,380,380,380,380,380,380,380,380,-154,-155,475,380,380,380,380,380,-140,-147,-150,-141,-148,-151,]),'TOWN_NAMES':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,222,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,361,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,59,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,296,-199,-104,-111,-208,59,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,296,-211,-194,-167,-2,-139,59,59,59,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,59,-157,-161,-163,-149,-168,-123,]),'NUMBER':([0,2,3,5,7,11,13,14,15,19,22,23,24,25,26,27,32,33,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,73,78,81,82,84,95,96,97,98,99,100,101,103,105,107,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,135,136,137,138,141,143,144,145,147,148,151,152,153,155,157,164,168,170,174,175,176,181,186,189,215,219,238,245,248,249,251,256,257,258,259,262,264,266,267,268,271,274,276,277,280,281,299,305,307,313,314,319,320,321,323,326,327,328,329,330,331,335,336,337,344,345,350,351,354,360,366,367,368,372,376,377,378,383,386,388,390,391,393,396,401,405,408,425,427,430,433,435,437,439,440,441,442,443,444,451,452,454,455,459,460,463,464,465,466,467,471,472,473,476,479,483,486,487,488,489,490,491,493,494,499,500,503,505,508,509,510,511,512,514,517,518,519,523,525,526,528,534,536,537,538,539,542,544,545,546,548,],[-2,60,-3,60,60,-35,-13,-9,60,-15,-18,-34,-26,-31,-5,-30,-119,60,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,60,-29,-22,-4,60,60,60,60,169,60,60,60,60,-120,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,-209,60,60,60,60,60,60,60,60,-101,60,60,60,60,169,-105,60,-117,60,60,-2,60,60,60,60,-199,-213,-216,60,-104,-106,60,60,-134,-118,-111,-135,60,-208,60,60,60,60,-190,-179,-193,60,-206,-102,-101,60,-200,-166,-214,-212,60,-192,-210,-2,-137,-133,-116,-207,-121,-191,-2,-128,60,60,-211,-194,-167,-2,60,-139,60,60,60,60,-109,-107,60,-76,60,60,60,-158,-142,60,-189,60,60,-103,-110,-108,-165,-136,-2,-156,-124,-160,-125,-162,-164,60,60,-178,-112,-122,-100,60,60,-159,60,60,60,-157,-161,60,60,-163,-149,-168,-143,-142,60,-215,-138,60,-123,-129,60,60,60,60,60,-86,60,-130,-87,-131,60,-144,60,-132,-88,-146,60,-145,]),'ELSE':([32,471,511,],[104,-122,-123,]),'LIVERYOVERRIDE':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,61,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,61,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,61,61,61,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,61,-157,-161,-163,-149,-168,-123,]),'LBRACKET':([0,2,3,5,7,11,13,14,15,19,22,23,24,25,26,27,32,33,34,36,38,39,41,42,44,46,49,50,51,52,57,58,64,66,67,68,70,71,72,73,78,81,82,84,95,96,97,99,100,101,103,105,107,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,135,136,137,138,141,143,144,145,147,148,151,152,153,155,157,164,174,175,176,181,186,189,215,219,238,244,245,248,249,251,256,258,259,262,264,266,267,268,271,274,276,277,280,281,299,305,307,313,314,317,319,320,321,323,326,327,328,329,330,331,334,335,336,337,344,345,347,350,351,352,354,355,359,360,366,367,368,372,376,377,378,379,381,382,383,385,386,388,390,395,396,401,403,404,405,406,407,408,410,411,412,414,425,427,428,429,430,432,433,434,435,437,439,440,443,444,451,452,453,454,455,456,459,460,461,462,463,464,465,466,467,471,472,473,474,476,479,483,486,487,488,489,490,491,493,494,499,500,501,503,505,506,508,509,510,511,512,514,517,518,519,520,523,525,526,528,531,532,534,536,537,538,539,542,544,545,546,548,],[-2,5,-3,5,5,-35,-13,-9,5,-15,-18,-34,-26,-31,-5,-30,-119,5,-32,-14,-28,-23,-27,131,-10,-12,-21,-33,-11,-8,-19,-20,-7,147,-16,-17,-24,-25,-6,5,-29,-22,-4,5,5,5,5,5,5,5,5,-120,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,-209,5,5,5,5,5,5,5,5,-101,5,5,5,5,5,-117,5,5,-2,5,5,5,5,320,-199,-213,-216,5,-104,5,5,-134,-118,-111,-135,5,-208,5,5,5,5,-190,-179,-193,5,-206,-102,383,-101,5,-200,-166,-214,-212,5,-192,-210,-2,383,-137,-133,-116,-207,-121,383,-191,-2,383,-128,383,383,5,5,-211,-194,-167,-2,5,-139,383,-152,-153,5,383,5,437,5,383,5,-76,383,383,5,383,383,5,383,383,383,383,5,-158,-154,-155,-142,476,5,383,-189,5,5,-103,-165,-136,-2,-156,383,-124,-160,383,-125,-162,383,383,-164,5,5,-178,-112,-122,-100,5,-140,5,-159,5,5,5,-157,-161,5,5,-163,-149,-168,-143,-147,-142,5,-150,-215,-138,5,-123,-129,5,5,5,5,-141,5,-86,5,-130,-148,-151,-87,-131,5,-144,5,-132,-88,-146,5,-145,]),'COMP_EQ':([9,37,47,60,65,74,79,80,88,90,94,106,149,156,179,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,213,226,227,232,234,237,242,250,252,263,270,283,287,308,309,315,332,333,338,343,346,349,371,389,415,422,426,438,445,458,482,495,496,502,507,513,524,529,530,543,547,],[-38,110,-40,-36,-42,-39,-41,-37,110,-68,-65,-66,110,-71,110,-60,-57,110,-56,110,-47,-62,-59,-46,-58,-61,110,-50,-49,-48,110,110,-55,-54,110,110,110,110,110,110,-43,110,110,-40,110,110,110,-69,-70,-44,-89,110,110,110,110,110,110,110,110,110,110,-45,110,110,110,110,110,110,110,110,-71,110,110,110,110,110,110,]),'WHILE':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,43,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,43,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,43,43,43,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,43,-157,-161,-163,-149,-168,-123,]),'SNOWLINE':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,6,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,6,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,6,6,6,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,6,-157,-161,-163,-149,-168,-123,]),'LOGICAL_AND':([9,37,47,60,65,74,79,80,88,90,94,106,149,156,179,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,213,226,227,232,234,237,242,250,252,263,270,283,287,308,309,315,332,333,338,343,346,349,371,389,415,422,426,438,445,458,482,495,496,502,507,513,524,529,530,543,547,],[-38,111,-40,-36,-42,-39,-41,-37,111,-68,-65,-66,111,-71,111,-60,-57,-63,-56,-53,-47,-62,-59,-46,-58,-61,111,-50,-49,-48,111,-51,-55,-54,111,-52,111,111,111,111,-43,111,111,-40,111,111,111,-69,-70,-44,-89,111,111,111,111,111,111,111,111,111,111,-45,111,111,111,111,111,111,111,111,-71,111,111,111,111,111,111,]),'SORT_VEHICLES':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,62,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,62,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,62,62,62,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,62,-157,-161,-163,-149,-168,-123,]),'SHIFTU_RIGHT':([9,37,47,60,65,74,79,80,88,90,94,106,149,156,179,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,213,226,227,232,234,237,242,250,252,263,270,283,287,308,309,315,332,333,338,343,346,349,371,389,415,422,426,438,445,458,482,495,496,502,507,513,524,529,530,543,547,],[-38,112,-40,-36,-42,-39,-41,-37,112,-68,-65,-66,112,-71,112,112,112,112,-56,112,-47,112,112,-46,112,112,112,-50,-49,-48,112,112,-55,-54,112,112,112,112,112,112,-43,112,112,-40,112,112,112,-69,-70,-44,-89,112,112,112,112,112,112,112,112,112,112,-45,112,112,112,112,112,112,112,112,-71,112,112,112,112,112,112,]),'TIMES':([9,37,47,60,65,74,79,80,88,90,94,106,149,156,179,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,213,226,227,232,234,237,242,250,252,263,270,283,287,308,309,315,332,333,338,343,346,349,371,389,415,422,426,438,445,458,482,495,496,502,507,513,524,529,530,543,547,],[-38,123,-40,-36,-42,-39,-41,-37,123,123,-65,-66,123,-71,123,123,123,123,123,123,123,123,123,123,123,123,123,-50,-49,-48,123,123,123,123,123,123,123,123,123,123,-43,123,123,-40,123,123,123,-69,-70,-44,-89,123,123,123,123,123,123,123,123,123,123,-45,123,123,123,123,123,123,123,123,-71,123,123,123,123,123,123,]),'MODULO':([9,37,47,60,65,74,79,80,88,90,94,106,149,156,179,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,213,226,227,232,234,237,242,250,252,263,270,283,287,308,309,315,332,333,338,343,346,349,371,389,415,422,426,438,445,458,482,495,496,502,507,513,524,529,530,543,547,],[-38,121,-40,-36,-42,-39,-41,-37,121,121,-65,-66,121,-71,121,121,121,121,121,121,121,121,121,121,121,121,121,-50,-49,-48,121,121,121,121,121,121,121,121,121,121,-43,121,121,-40,121,121,121,-69,-70,-44,-89,121,121,121,121,121,121,121,121,121,121,-45,121,121,121,121,121,121,121,121,-71,121,121,121,121,121,121,]),'CARGOTABLE':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,63,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,63,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,63,63,63,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,63,-157,-161,-163,-149,-168,-123,]),'PARAMETER':([0,2,3,5,7,11,13,14,15,19,22,23,24,25,26,27,32,33,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,73,78,81,82,84,95,96,97,99,100,101,102,103,105,107,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,135,136,137,138,141,143,144,145,147,148,151,152,153,155,157,164,174,175,176,180,181,182,183,186,189,215,219,238,245,248,249,251,256,258,259,262,264,266,267,268,271,272,273,274,276,277,280,281,299,305,307,313,314,319,320,321,323,326,327,328,329,330,331,335,336,337,344,345,350,351,354,360,366,367,368,372,376,377,378,383,386,388,390,396,397,401,405,408,425,427,430,433,435,437,439,440,443,444,450,451,452,454,455,459,460,463,464,465,466,467,471,472,473,476,479,483,486,487,488,489,490,491,493,494,499,500,503,505,508,509,510,511,512,514,517,518,519,523,525,526,528,534,536,537,538,539,542,544,545,546,548,],[-2,66,-3,66,66,-35,-13,-9,66,-15,-18,-34,-26,-31,-5,-30,-119,66,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,66,-29,-22,-4,66,66,66,66,66,66,66,181,66,-120,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,-209,66,66,66,66,66,66,66,66,-101,66,66,66,66,66,-117,66,-72,66,-73,181,-2,66,66,66,66,-199,-213,-216,66,-104,66,66,-134,-118,-111,-135,66,-208,-74,-75,66,66,66,66,-190,-179,-193,66,-206,-102,-101,66,-200,-166,-214,-212,66,-192,-210,-2,-137,-133,-116,-207,-121,-191,-2,-128,66,66,-211,-194,-167,-2,66,-139,66,66,66,66,66,-78,-76,66,66,66,-158,-142,66,-189,66,66,-103,-165,-136,-77,-2,-156,-124,-160,-125,-162,-164,66,66,-178,-112,-122,-100,66,66,-159,66,66,66,-157,-161,66,66,-163,-149,-168,-143,-142,66,-215,-138,66,-123,-129,66,66,66,66,66,-86,66,-130,-87,-131,66,-144,66,-132,-88,-146,66,-145,]),'MINUS':([0,2,3,5,7,9,11,13,14,15,19,22,23,24,25,26,27,32,33,34,36,37,38,39,41,44,46,47,49,50,51,52,57,58,60,64,65,67,68,70,71,72,73,74,78,79,80,81,82,84,88,90,94,95,96,97,99,100,101,103,105,106,107,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,135,136,137,138,141,143,144,145,147,148,149,151,152,153,155,156,157,164,174,175,176,179,181,186,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,213,215,219,226,227,232,234,237,238,242,245,248,249,250,251,252,256,258,259,262,263,264,266,267,268,270,271,274,276,277,280,281,283,287,299,305,307,308,309,313,314,315,319,320,321,323,326,327,328,329,330,331,332,333,335,336,337,338,343,344,345,346,349,350,351,354,360,366,367,368,371,372,376,377,378,383,386,388,389,390,396,401,405,408,415,422,425,426,427,430,433,435,437,438,439,440,443,444,445,451,452,454,455,458,459,460,463,464,465,466,467,471,472,473,476,479,482,483,486,487,488,489,490,491,493,494,495,496,499,500,502,503,505,507,508,509,510,511,512,513,514,517,518,519,523,524,525,526,528,529,530,534,536,537,538,539,542,543,544,545,546,547,548,],[-2,7,-3,7,7,-38,-35,-13,-9,7,-15,-18,-34,-26,-31,-5,-30,-119,7,-32,-14,114,-28,-23,-27,-10,-12,-40,-21,-33,-11,-8,-19,-20,-36,-7,-42,-16,-17,-24,-25,-6,7,-39,-29,-41,-37,-22,-4,7,114,-68,-65,7,7,7,7,7,7,7,-120,-66,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,-209,7,7,7,7,7,7,7,114,7,-101,7,7,-71,7,7,7,-117,7,114,7,-2,7,114,114,114,114,114,-47,114,114,-46,114,114,114,-50,-49,-48,114,114,114,114,114,114,114,7,7,114,114,114,-43,114,7,114,-199,-213,-216,-40,7,114,-104,7,7,-134,114,-118,-111,-135,7,114,-208,7,7,7,7,-190,-69,-70,-179,-193,7,-44,-89,-206,-102,114,-101,7,-200,-166,-214,-212,7,-192,-210,-2,114,114,-137,-133,-116,114,114,-207,-121,114,114,-191,-2,-128,7,7,-211,-194,114,-167,-2,7,-139,7,7,7,114,7,7,-76,7,7,114,-45,7,114,-158,-142,7,-189,7,114,7,-103,-165,-136,114,-2,-156,-124,-160,114,-125,-162,-164,7,7,-178,-112,-122,-100,7,7,-159,114,7,7,7,-157,-161,7,7,-163,-149,114,114,-168,-143,114,-142,7,-71,-215,-138,7,-123,-129,114,7,7,7,7,7,114,-86,7,-130,114,114,-87,-131,7,-144,7,-132,114,-88,-146,7,114,-145,]),'COMP_GT':([9,37,47,60,65,74,79,80,88,90,94,106,149,156,179,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,213,226,227,232,234,237,242,250,252,263,270,283,287,308,309,315,332,333,338,343,346,349,371,389,415,422,426,438,445,458,482,495,496,502,507,513,524,529,530,543,547,],[-38,115,-40,-36,-42,-39,-41,-37,115,-68,-65,-66,115,-71,115,-60,-57,115,-56,115,-47,-62,-59,-46,-58,-61,115,-50,-49,-48,115,115,-55,-54,115,115,115,115,115,115,-43,115,115,-40,115,115,115,-69,-70,-44,-89,115,115,115,115,115,115,115,115,115,115,-45,115,115,115,115,115,115,115,115,-71,115,115,115,115,115,115,]),'RBRACE':([3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,152,159,160,161,162,163,168,170,175,176,177,178,180,182,183,186,223,224,229,230,231,235,238,245,246,247,248,249,251,256,257,262,264,265,266,267,271,272,273,276,281,295,297,299,300,305,306,310,313,314,319,321,322,323,324,326,327,329,330,331,335,336,337,339,340,344,345,350,351,354,361,362,367,368,369,370,372,373,376,378,379,381,382,386,390,391,393,395,396,397,398,400,401,403,405,406,408,409,410,414,416,417,419,420,421,424,425,427,428,429,430,434,435,436,440,441,442,443,444,446,447,449,450,451,452,453,454,455,456,457,459,460,461,462,463,466,467,468,469,470,471,472,473,474,479,481,484,485,487,488,489,493,494,497,498,499,500,501,503,506,508,509,511,512,515,516,519,520,522,523,525,528,531,532,534,536,538,540,542,544,545,548,],[-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-101,-201,-203,245,-204,-169,256,-105,-117,-113,-115,266,-72,-73,271,-2,-180,299,305,-195,-196,-173,313,-199,321,323,-213,-216,327,-104,-106,-134,-118,-114,-111,-135,-208,-74,-75,345,-190,-185,362,-179,-181,-193,368,372,-206,-102,-101,-200,-202,-166,-170,-214,-212,-192,-210,-2,-137,-133,-116,-79,397,-207,-121,-191,-2,-128,417,-183,-211,-194,-197,-198,-167,-174,-2,-139,427,-152,-153,435,440,-109,-107,443,444,-78,-80,450,-76,452,454,455,-127,459,460,463,-186,-184,-182,466,467,-173,471,-158,-154,-155,-142,479,-189,-205,-103,-110,-108,-165,-136,484,-84,-82,-77,-2,-156,488,-124,-160,489,-126,-125,-162,493,494,-164,-178,-112,-176,497,499,-122,-100,501,-140,-159,-172,-81,-83,511,-157,-161,-163,-149,-175,-177,-168,-143,-147,-142,-150,-215,-138,-123,-129,-187,-188,531,-141,-171,533,-86,-130,-148,-151,-87,-131,-144,-85,-132,-88,-146,-145,]),'RAILTYPETABLE':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,8,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,8,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,8,8,8,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,8,-157,-161,-163,-149,-168,-123,]),'BASE_GRAPHICS':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,45,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,45,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,45,45,45,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,45,-157,-161,-163,-149,-168,-123,]),'SEMICOLON':([9,47,53,60,65,74,79,80,90,94,106,156,174,179,190,191,192,193,194,195,196,197,198,199,200,202,203,204,205,206,207,208,209,210,234,253,254,263,275,282,283,287,304,308,309,316,332,333,338,343,349,364,392,394,422,426,438,458,482,507,529,533,541,543,547,],[-38,-40,137,-36,-42,-39,-41,-37,-68,-65,-66,-71,262,267,-60,-57,-63,-56,-53,-47,-62,-59,-46,-58,-61,-50,-49,-48,281,-51,-55,-54,-64,-52,-43,329,330,336,344,350,-69,-70,367,-44,-89,378,391,393,267,401,-67,419,441,442,-45,472,481,267,508,522,538,540,544,545,548,]),'LOGICAL_OR':([9,37,47,60,65,74,79,80,88,90,94,106,149,156,179,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,213,226,227,232,234,237,242,250,252,263,270,283,287,308,309,315,332,333,338,343,346,349,371,389,415,422,426,438,445,458,482,495,496,502,507,513,524,529,530,543,547,],[-38,128,-40,-36,-42,-39,-41,-37,128,-68,-65,-66,128,-71,128,-60,-57,-63,-56,-53,-47,-62,-59,-46,-58,-61,128,-50,-49,-48,128,-51,-55,-54,-64,-52,128,128,128,128,-43,128,128,-40,128,128,128,-69,-70,-44,-89,128,128,128,128,128,128,128,128,128,128,-45,128,128,128,128,128,128,128,128,-71,128,128,128,128,128,128,]),'SHIFT_LEFT':([9,37,47,60,65,74,79,80,88,90,94,106,149,156,179,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,213,226,227,232,234,237,242,250,252,263,270,283,287,308,309,315,332,333,338,343,346,349,371,389,415,422,426,438,445,458,482,495,496,502,507,513,524,529,530,543,547,],[-38,127,-40,-36,-42,-39,-41,-37,127,-68,-65,-66,127,-71,127,127,127,127,-56,127,-47,127,127,-46,127,127,127,-50,-49,-48,127,127,-55,-54,127,127,127,127,127,127,-43,127,127,-40,127,127,127,-69,-70,-44,-89,127,127,127,127,127,127,127,127,127,127,-45,127,127,127,127,127,127,127,127,-71,127,127,127,127,127,127,]),'COMP_LE':([9,37,47,60,65,74,79,80,88,90,94,106,149,156,179,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,213,226,227,232,234,237,242,250,252,263,270,283,287,308,309,315,332,333,338,343,346,349,371,389,415,422,426,438,445,458,482,495,496,502,507,513,524,529,530,543,547,],[-38,116,-40,-36,-42,-39,-41,-37,116,-68,-65,-66,116,-71,116,-60,-57,116,-56,116,-47,-62,-59,-46,-58,-61,116,-50,-49,-48,116,116,-55,-54,116,116,116,116,116,116,-43,116,116,-40,116,116,116,-69,-70,-44,-89,116,116,116,116,116,116,116,116,116,116,-45,116,116,116,116,116,116,116,116,-71,116,116,116,116,116,116,]),'SPRITELAYOUT':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,75,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,75,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,75,75,75,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,75,-157,-161,-163,-149,-168,-123,]),'SWITCH':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,48,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,48,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,48,48,48,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,48,-157,-161,-163,-149,-168,-123,]),'COLON':([9,47,60,65,74,79,80,90,94,106,156,160,169,171,179,184,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,206,207,208,209,210,225,234,250,283,287,308,309,315,325,349,384,389,422,445,448,458,492,502,513,524,527,530,],[-38,-40,-36,-42,-39,-41,-37,-68,-65,-66,-71,244,258,259,268,274,-60,-57,-63,-56,-53,-47,-62,-59,-46,-58,-61,280,-50,-49,-48,-51,-55,-54,-64,-52,301,-43,274,-69,-70,-44,-89,377,388,-67,432,439,-45,483,486,490,514,517,526,535,537,539,]),'PLUS':([9,37,47,60,65,74,79,80,88,90,94,106,149,156,179,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,213,226,227,232,234,237,242,250,252,263,270,283,287,308,309,315,332,333,338,343,346,349,371,389,415,422,426,438,445,458,482,495,496,502,507,513,524,529,530,543,547,],[-38,117,-40,-36,-42,-39,-41,-37,117,-68,-65,-66,117,-71,117,117,117,117,117,117,-47,117,117,-46,117,117,117,-50,-49,-48,117,117,117,117,117,117,117,117,117,117,-43,117,117,-40,117,117,117,-69,-70,-44,-89,117,117,117,117,117,117,117,117,117,117,-45,117,117,117,117,117,117,117,117,-71,117,117,117,117,117,117,]),'SPRITEGROUP':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,10,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,10,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,10,10,10,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,10,-157,-161,-163,-149,-168,-123,]),'COMP_NEQ':([9,37,47,60,65,74,79,80,88,90,94,106,149,156,179,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,213,226,227,232,234,237,242,250,252,263,270,283,287,308,309,315,332,333,338,343,346,349,371,389,415,422,426,438,445,458,482,495,496,502,507,513,524,529,530,543,547,],[-38,118,-40,-36,-42,-39,-41,-37,118,-68,-65,-66,118,-71,118,-60,-57,118,-56,118,-47,-62,-59,-46,-58,-61,118,-50,-49,-48,118,118,-55,-54,118,118,118,118,118,118,-43,118,118,-40,118,118,118,-69,-70,-44,-89,118,118,118,118,118,118,118,118,118,118,-45,118,118,118,118,118,118,118,118,-71,118,118,118,118,118,118,]),'$end':([0,1,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,245,256,266,271,281,299,305,313,321,323,327,329,330,344,345,350,367,368,372,378,427,435,440,443,444,452,454,455,459,460,463,466,467,471,479,488,489,493,494,499,511,],[-2,0,-1,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-199,-104,-111,-208,-190,-179,-193,-206,-200,-166,-212,-192,-210,-207,-121,-191,-211,-194,-167,-139,-158,-189,-103,-165,-136,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,-157,-161,-163,-149,-168,-123,]),'TERNARY_OPEN':([9,37,47,60,65,74,79,80,88,90,94,106,149,156,179,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,213,226,227,232,234,237,242,250,252,263,270,283,287,308,309,315,332,333,338,343,346,349,371,389,415,422,426,438,445,458,482,495,496,502,507,513,524,529,530,543,547,],[-38,120,-40,-36,-42,-39,-41,-37,120,-68,-65,-66,120,-71,120,-60,-57,-63,-56,-53,-47,-62,-59,-46,-58,-61,120,-50,-49,-48,120,-51,-55,-54,-64,-52,120,120,120,120,-43,120,120,-40,120,120,120,-69,-70,-44,-89,120,120,120,120,120,120,120,120,120,120,-45,120,120,120,120,120,120,120,120,-71,120,120,120,120,120,120,]),'COMP_LT':([9,37,47,60,65,74,79,80,88,90,94,106,149,156,179,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,213,226,227,232,234,237,242,250,252,263,270,283,287,308,309,315,332,333,338,343,346,349,371,389,415,422,426,438,445,458,482,495,496,502,507,513,524,529,530,543,547,],[-38,119,-40,-36,-42,-39,-41,-37,119,-68,-65,-66,119,-71,119,-60,-57,119,-56,119,-47,-62,-59,-46,-58,-61,119,-50,-49,-48,119,119,-55,-54,119,119,119,119,119,119,-43,119,119,-40,119,119,119,-69,-70,-44,-89,119,119,119,119,119,119,119,119,119,119,-45,119,119,119,119,119,119,119,119,-71,119,119,119,119,119,119,]),'XOR':([9,37,47,60,65,74,79,80,88,90,94,106,149,156,179,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,213,226,227,232,234,237,242,250,252,263,270,283,287,308,309,315,332,333,338,343,346,349,371,389,415,422,426,438,445,458,482,495,496,502,507,513,524,529,530,543,547,],[-38,113,-40,-36,-42,-39,-41,-37,113,-68,-65,-66,113,-71,113,-60,-57,113,-56,-53,-47,-62,-59,-46,-58,-61,113,-50,-49,-48,113,-51,-55,-54,113,113,113,113,113,113,-43,113,113,-40,113,113,113,-69,-70,-44,-89,113,113,113,113,113,113,113,113,113,113,-45,113,113,113,113,113,113,113,113,-71,113,113,113,113,113,113,]),'STRING':([0,2,3,5,7,11,13,14,15,19,22,23,24,25,26,27,32,33,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,73,78,81,82,84,95,96,97,99,100,101,103,105,107,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,135,136,137,138,141,143,144,145,147,148,151,152,153,155,157,164,174,175,176,181,186,189,215,219,238,245,248,249,251,256,258,259,262,264,266,267,268,271,274,276,277,280,281,299,301,305,307,313,314,319,320,321,323,326,327,328,329,330,331,335,336,337,344,345,350,351,354,360,366,367,368,372,376,377,378,383,386,388,390,396,401,405,408,425,427,430,433,435,437,439,440,443,444,451,452,454,455,459,460,463,464,465,466,467,471,472,473,476,479,483,486,487,488,489,490,491,493,494,499,500,503,505,508,509,510,511,512,514,517,518,519,523,525,526,528,534,535,536,537,538,539,542,544,545,546,548,],[-2,69,-3,69,69,-35,-13,-9,69,-15,-18,-34,-26,-31,-5,-30,-119,69,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,69,-29,-22,-4,69,69,69,69,69,69,69,69,-120,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,-209,69,69,69,69,69,69,69,69,-101,69,69,69,69,69,-117,69,69,-2,69,69,69,69,-199,-213,-216,69,-104,69,69,-134,-118,-111,-135,69,-208,69,69,69,69,-190,-179,69,-193,69,-206,-102,-101,69,-200,-166,-214,-212,69,-192,-210,-2,-137,-133,-116,-207,-121,-191,-2,-128,69,69,-211,-194,-167,-2,69,-139,69,69,69,69,69,-76,69,69,69,-158,-142,69,-189,69,69,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,69,69,-178,-112,-122,-100,69,69,-159,69,69,69,-157,-161,69,69,-163,-149,-168,-143,-142,69,-215,-138,69,-123,-129,69,69,69,69,69,-86,69,-130,-87,69,-131,69,-144,69,-132,-88,-146,69,-145,]),'SKIP_ALL':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,53,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,53,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,53,53,53,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,53,-157,-161,-163,-149,-168,-123,]),'REPLACENEWSPRITE':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,4,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,4,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,4,4,4,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,4,-157,-161,-163,-149,-168,-123,]),'BASECOST':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,77,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,77,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,77,77,77,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,77,-157,-161,-163,-149,-168,-123,]),'RANDOMSWITCH':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,28,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,28,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,28,28,28,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,28,-157,-161,-163,-149,-168,-123,]),'RANGE':([9,47,60,65,74,79,80,90,94,106,156,190,191,192,193,194,195,196,197,198,199,200,202,203,204,206,207,208,209,210,234,283,287,308,309,349,422,458,502,543,],[-38,-40,-36,-42,-39,-41,-37,-68,-65,-66,-71,-60,-57,-63,-56,-53,-47,-62,-59,-46,-58,-61,-50,-49,-48,-51,-55,-54,-64,-52,-43,-69,-70,-44,-89,-67,-45,491,518,546,]),'TILELAYOUT':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,12,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,12,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,12,12,12,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,12,-157,-161,-163,-149,-168,-123,]),'SHIFT_RIGHT':([9,37,47,60,65,74,79,80,88,90,94,106,149,156,179,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,213,226,227,232,234,237,242,250,252,263,270,283,287,308,309,315,332,333,338,343,346,349,371,389,415,422,426,438,445,458,482,495,496,502,507,513,524,529,530,543,547,],[-38,126,-40,-36,-42,-39,-41,-37,126,-68,-65,-66,126,-71,126,126,126,126,-56,126,-47,126,126,-46,126,126,126,-50,-49,-48,126,126,-55,-54,126,126,126,126,126,126,-43,126,126,-40,126,126,126,-69,-70,-44,-89,126,126,126,126,126,126,126,126,126,126,-45,126,126,126,126,126,126,126,126,-71,126,126,126,126,126,126,]),'LPAREN':([0,2,3,4,5,6,7,11,13,14,15,16,17,18,19,21,22,23,24,25,26,27,28,31,32,33,34,35,36,38,39,40,41,43,44,45,46,47,48,49,50,51,52,54,56,57,58,59,61,62,64,67,68,69,70,71,72,73,76,78,81,82,83,84,85,95,96,97,99,100,101,103,105,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,143,144,145,147,148,150,151,152,153,155,157,164,174,175,176,181,186,187,189,215,219,238,245,248,249,250,251,256,258,259,262,264,266,267,268,271,274,276,277,280,281,296,298,299,305,307,313,314,319,320,321,323,326,327,328,329,330,331,335,336,337,344,345,350,351,354,360,366,367,368,372,376,377,378,383,384,386,388,390,396,401,405,408,425,427,430,433,435,437,439,440,443,444,451,452,454,455,459,460,463,464,465,466,467,471,472,473,476,477,479,483,486,487,488,489,490,491,493,494,499,500,503,505,508,509,510,511,512,514,517,518,519,523,525,526,528,534,536,537,538,539,542,544,545,546,548,],[-2,73,-3,84,73,89,73,-35,-13,-9,73,95,96,97,-15,99,-18,-34,-26,-31,-5,-30,100,103,-119,73,-32,107,-14,-28,-23,130,-27,132,-10,133,-12,135,136,-21,-33,-11,-8,138,141,-19,-20,143,144,145,-7,-16,-17,148,-24,-25,-6,73,151,-29,-22,-4,153,73,155,73,73,73,73,73,73,73,-120,73,189,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,215,73,73,-209,73,219,220,73,73,73,73,73,73,236,73,-101,73,73,73,73,73,-117,73,73,-2,277,73,73,73,73,-199,-213,-216,135,73,-104,73,73,-134,-118,-111,-135,73,-208,73,73,73,73,-190,360,363,-179,-193,73,-206,-102,-101,73,-200,-166,-214,-212,73,-192,-210,-2,-137,-133,-116,-207,-121,-191,-2,-128,73,73,-211,-194,-167,-2,73,-139,73,433,73,73,73,73,-76,73,73,73,-158,-142,73,-189,73,73,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,73,73,-178,-112,-122,-100,73,73,505,-159,73,73,73,-157,-161,73,73,-163,-149,-168,-143,-142,73,-215,-138,73,-123,-129,73,73,73,73,73,-86,73,-130,-87,-131,73,-144,73,-132,-88,-146,73,-145,]),'GRAPHICS':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,29,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,29,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,29,29,29,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,29,-157,-161,-163,-149,-168,-123,]),'VARIABLE':([0,2,3,5,7,11,13,14,15,19,22,23,24,25,26,27,32,33,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,73,78,81,82,84,95,96,97,99,100,101,103,105,107,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,135,136,137,138,141,143,144,145,147,148,151,152,153,155,157,164,174,175,176,181,186,189,215,219,238,245,248,249,251,256,258,259,262,264,266,267,268,271,274,276,277,280,281,299,305,307,313,314,319,320,321,323,326,327,328,329,330,331,335,336,337,344,345,350,351,354,360,366,367,368,372,376,377,378,383,386,388,390,396,401,405,408,425,427,430,433,435,437,439,440,443,444,451,452,454,455,459,460,463,464,465,466,467,471,472,473,476,479,483,486,487,488,489,490,491,493,494,499,500,503,505,508,509,510,511,512,514,517,518,519,523,525,526,528,534,536,537,538,539,542,544,545,546,548,],[-2,42,-3,42,42,-35,-13,-9,42,-15,-18,-34,-26,-31,-5,-30,-119,42,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,42,-29,-22,-4,42,42,42,42,42,42,42,42,-120,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,-209,42,42,42,42,42,42,42,42,-101,42,42,42,42,42,-117,42,42,-2,42,42,42,42,-199,-213,-216,42,-104,42,42,-134,-118,-111,-135,42,-208,42,42,42,42,-190,-179,-193,42,-206,-102,-101,42,-200,-166,-214,-212,42,-192,-210,-2,-137,-133,-116,-207,-121,-191,-2,-128,42,42,-211,-194,-167,-2,42,-139,42,42,42,42,42,-76,42,42,42,-158,-142,42,-189,42,42,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,42,42,-178,-112,-122,-100,42,42,-159,42,42,42,-157,-161,42,42,-163,-149,-168,-143,-142,42,-215,-138,42,-123,-129,42,42,42,42,42,-86,42,-130,-87,-131,42,-144,42,-132,-88,-146,42,-145,]),'RPAREN':([9,47,60,65,74,79,80,84,87,88,90,94,95,96,97,99,100,103,106,107,130,133,135,136,138,141,145,148,149,153,154,155,156,157,158,165,166,167,172,173,185,188,189,190,191,192,193,194,195,196,197,198,199,200,202,203,204,206,207,208,209,210,211,213,214,215,216,217,218,219,220,221,226,227,228,233,234,236,237,239,241,242,279,283,286,287,290,291,292,293,308,309,311,346,349,358,413,422,433,478,495,496,505,521,],[-38,-40,-36,-42,-39,-41,-37,-92,-93,-90,-68,-65,-92,-92,-92,-92,-92,-92,-66,-92,-92,-92,-92,-92,-92,-92,-92,-92,234,-92,240,-92,-71,-94,243,253,254,255,260,261,275,278,-92,-60,-57,-63,-56,-53,-47,-62,-59,-46,-58,-61,-50,-49,-48,-51,-55,-54,-64,-52,282,284,285,-92,287,288,289,-92,-97,294,302,303,304,309,-43,-97,312,316,318,-91,348,-69,353,-70,356,357,-98,-95,-44,-89,375,402,-67,-99,-96,-45,-92,506,515,516,-92,532,]),'COMP_GE':([9,37,47,60,65,74,79,80,88,90,94,106,149,156,179,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,213,226,227,232,234,237,242,250,252,263,270,283,287,308,309,315,332,333,338,343,346,349,371,389,415,422,426,438,445,458,482,495,496,502,507,513,524,529,530,543,547,],[-38,109,-40,-36,-42,-39,-41,-37,109,-68,-65,-66,109,-71,109,-60,-57,109,-56,109,-47,-62,-59,-46,-58,-61,109,-50,-49,-48,109,109,-55,-54,109,109,109,109,109,109,-43,109,109,-40,109,109,109,-69,-70,-44,-89,109,109,109,109,109,109,109,109,109,109,-45,109,109,109,109,109,109,109,109,-71,109,109,109,109,109,109,]),'EQ':([9,37,47,60,65,74,79,80,90,94,106,156,190,191,192,193,194,195,196,197,198,199,200,202,203,204,206,207,208,209,210,234,283,287,308,309,349,422,],[-38,124,-40,-36,-42,-39,-41,-37,-68,-65,-66,-71,-60,-57,-63,-56,-53,-47,-62,-59,-46,-58,-61,-50,-49,-48,-51,-55,-54,-64,-52,-43,-69,-70,-44,-89,-67,-45,]),'ID':([0,2,3,4,5,7,10,11,12,13,14,15,19,22,23,24,25,26,27,32,33,34,35,36,38,39,41,44,45,46,49,50,51,52,54,55,57,58,64,67,68,70,71,72,73,75,78,81,82,84,89,91,95,96,97,98,99,100,101,102,103,105,107,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,135,136,137,138,141,142,143,144,145,146,147,148,151,152,153,155,157,163,164,168,170,174,175,176,180,181,182,183,186,189,215,219,220,222,223,224,235,236,238,245,246,247,248,249,251,256,257,258,259,262,264,266,267,268,269,271,272,273,274,276,277,280,281,299,300,305,306,307,310,313,314,317,319,320,321,323,324,326,327,328,329,330,331,334,335,336,337,339,340,342,344,345,347,350,351,352,354,355,358,359,360,361,362,365,366,367,368,372,373,376,377,378,379,381,382,383,385,386,388,390,391,393,395,396,397,398,399,400,401,403,404,405,406,407,408,410,411,412,414,417,419,420,423,424,425,427,428,429,430,432,433,434,435,437,439,440,441,442,443,444,446,447,449,450,451,452,453,454,455,456,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,476,479,481,483,484,485,486,487,488,489,490,491,493,494,497,498,499,500,501,503,505,506,508,509,510,511,512,514,517,518,519,520,522,523,525,526,528,531,532,534,536,537,538,539,540,542,544,545,546,548,],[-2,47,-3,85,47,47,92,-35,93,-13,-9,47,-15,-18,-34,-26,-31,-5,-30,-119,47,-32,108,-14,-28,-23,-27,-10,134,-12,-21,-33,-11,-8,139,140,-19,-20,-7,-16,-17,-24,-25,-6,47,150,-29,-22,-4,47,158,160,47,47,47,171,47,47,47,184,47,-120,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,-209,47,47,225,47,47,47,230,47,47,47,-101,47,47,47,-169,250,171,-105,47,-117,47,-72,47,-73,184,-2,47,47,47,293,298,-180,225,-173,293,47,-199,160,325,-213,-216,250,-104,-106,47,47,-134,-118,-111,-135,47,341,-208,-74,-75,47,47,47,47,-190,-179,-181,-193,369,47,374,-206,-102,384,-101,47,-200,-166,-170,-214,-212,47,-192,-210,-2,384,-137,-133,-116,-79,341,341,-207,-121,384,-191,-2,384,-128,384,413,384,47,298,-183,225,47,-211,-194,-167,-174,-2,47,-139,384,-152,-153,47,384,47,47,47,-109,-107,384,47,-78,-80,448,341,-76,384,384,47,384,384,47,384,384,384,384,-184,-182,225,184,-173,47,-158,-154,-155,-142,477,47,384,-189,47,47,-103,-110,-108,-165,-136,448,-84,-82,-77,-2,-156,384,-124,-160,384,-125,-162,384,384,-164,47,47,-178,-112,-176,184,374,-122,-100,47,-140,47,-159,-172,47,-81,-83,47,47,-157,-161,47,47,-163,-149,-175,-177,-168,-143,-147,-142,47,-150,-215,-138,47,-123,-129,47,47,47,47,-141,-171,47,-86,47,-130,-148,-151,-87,-131,47,-144,47,-85,-132,-88,-146,47,-145,]),'UNIT':([9,47,60,65,74,79,80,90,94,106,156,190,191,192,193,194,195,196,197,198,199,200,202,203,204,206,207,208,209,210,234,283,287,308,309,332,333,349,422,458,513,],[-38,-40,-36,-42,-39,-41,-37,-68,-65,-66,-71,-60,-57,-63,-56,-53,-47,-62,-59,-46,-58,-61,-50,-49,-48,-51,-55,-54,-64,-52,-43,-69,-70,-44,-89,392,394,-67,-45,492,527,]),'IF':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,104,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,76,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,187,-120,-209,-2,-199,-104,-111,-208,76,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,76,76,76,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,76,-157,-161,-163,-149,-168,-123,]),'AND':([9,37,47,60,65,74,79,80,88,90,94,106,149,156,179,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,213,226,227,232,234,237,242,250,252,263,270,283,287,308,309,315,332,333,338,343,346,349,371,389,415,422,426,438,445,458,482,495,496,502,507,513,524,529,530,543,547,],[-38,125,-40,-36,-42,-39,-41,-37,125,-68,-65,-66,125,-71,125,-60,-57,125,-56,125,-47,-62,-59,-46,-58,-61,125,-50,-49,-48,125,-51,-55,-54,125,125,125,125,125,125,-43,125,125,-40,125,125,125,-69,-70,-44,-89,125,125,125,125,125,125,125,125,125,125,-45,125,125,125,125,125,125,125,125,-71,125,125,125,125,125,125,]),'GRF':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,30,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,30,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,30,30,30,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,30,-157,-161,-163,-149,-168,-123,]),'LBRACE':([8,9,20,29,30,47,59,60,63,65,74,77,79,80,90,92,93,94,104,106,142,150,156,181,190,191,192,193,194,195,196,197,198,199,200,202,203,204,206,207,208,209,210,223,224,234,240,243,255,260,261,270,278,283,284,285,287,288,289,294,300,302,303,308,309,312,318,341,348,349,353,356,357,362,365,374,375,380,402,417,419,420,422,475,486,],[91,-38,98,101,102,-40,142,-36,146,-42,-39,152,-41,-37,-68,163,164,-65,186,-66,222,235,-71,269,-60,-57,-63,-56,-53,-47,-62,-59,-46,-58,-61,-50,-49,-48,-51,-55,-54,-64,-52,-180,222,-43,317,319,331,334,335,342,347,-69,351,352,-70,354,355,359,-181,365,366,-44,-89,376,385,399,404,-67,407,411,412,-183,222,423,424,430,451,-184,-182,222,-45,503,510,]),'LOGICAL_NOT':([0,2,3,5,7,11,13,14,15,19,22,23,24,25,26,27,32,33,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,73,78,81,82,84,95,96,97,99,100,101,103,105,107,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,135,136,137,138,141,143,144,145,147,148,151,152,153,155,157,164,174,175,176,181,186,189,215,219,238,245,248,249,251,256,258,259,262,264,266,267,268,271,274,276,277,280,281,299,305,307,313,314,319,320,321,323,326,327,328,329,330,331,335,336,337,344,345,350,351,354,360,366,367,368,372,376,377,378,383,386,388,390,396,401,405,408,425,427,430,433,435,437,439,440,443,444,451,452,454,455,459,460,463,464,465,466,467,471,472,473,476,479,483,486,487,488,489,490,491,493,494,499,500,503,505,508,509,510,511,512,514,517,518,519,523,525,526,528,534,536,537,538,539,542,544,545,546,548,],[-2,15,-3,15,15,-35,-13,-9,15,-15,-18,-34,-26,-31,-5,-30,-119,15,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,15,-29,-22,-4,15,15,15,15,15,15,15,15,-120,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,-209,15,15,15,15,15,15,15,15,-101,15,15,15,15,15,-117,15,15,-2,15,15,15,15,-199,-213,-216,15,-104,15,15,-134,-118,-111,-135,15,-208,15,15,15,15,-190,-179,-193,15,-206,-102,-101,15,-200,-166,-214,-212,15,-192,-210,-2,-137,-133,-116,-207,-121,-191,-2,-128,15,15,-211,-194,-167,-2,15,-139,15,15,15,15,15,-76,15,15,15,-158,-142,15,-189,15,15,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,15,15,-178,-112,-122,-100,15,15,-159,15,15,15,-157,-161,15,15,-163,-149,-168,-143,-142,15,-215,-138,15,-123,-129,15,15,15,15,15,-86,15,-130,-87,-131,15,-144,15,-132,-88,-146,15,-145,]),'STRING_LITERAL':([0,2,3,5,7,11,13,14,15,19,22,23,24,25,26,27,32,33,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,73,78,81,82,84,91,95,96,97,99,100,101,103,105,107,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,135,136,137,138,141,143,144,145,146,147,148,151,152,153,155,157,164,174,175,176,181,186,189,215,219,238,245,246,248,249,251,256,258,259,262,264,266,267,268,271,274,276,277,280,281,299,305,306,307,313,314,319,320,321,323,326,327,328,329,330,331,335,336,337,344,345,350,351,354,360,363,366,367,368,372,376,377,378,383,386,388,390,396,401,405,408,425,427,430,433,435,437,439,440,443,444,451,452,454,455,459,460,463,464,465,466,467,471,472,473,476,479,483,486,487,488,489,490,491,493,494,499,500,503,505,508,509,510,511,512,514,517,518,519,523,525,526,528,534,536,537,538,539,542,544,545,546,548,],[-2,79,-3,79,79,-35,-13,-9,79,-15,-18,-34,-26,-31,-5,-30,-119,79,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,79,-29,-22,-4,79,162,79,79,79,79,79,79,79,-120,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,-209,79,79,79,79,79,231,79,79,79,-101,79,79,79,79,79,-117,79,79,-2,79,79,79,79,-199,162,-213,-216,79,-104,79,79,-134,-118,-111,-135,79,-208,79,79,79,79,-190,-179,-193,370,79,-206,-102,-101,79,-200,-166,-214,-212,79,-192,-210,-2,-137,-133,-116,-207,-121,-191,-2,-128,79,418,79,-211,-194,-167,-2,79,-139,79,79,79,79,79,-76,79,79,79,-158,-142,79,-189,79,79,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,79,79,-178,-112,-122,-100,79,79,-159,79,79,79,-157,-161,79,79,-163,-149,-168,-143,-142,79,-215,-138,79,-123,-129,79,79,79,79,79,-86,79,-130,-87,-131,79,-144,79,-132,-88,-146,79,-145,]),'FONTGLYPH':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,54,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,54,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,54,54,54,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,54,-157,-161,-163,-149,-168,-123,]),'DEACTIVATE':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,31,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,31,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,31,31,31,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,31,-157,-161,-163,-149,-168,-123,]),'DISABLE_ITEM':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,16,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,16,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,16,16,16,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,16,-157,-161,-163,-149,-168,-123,]),'FLOAT':([0,2,3,5,7,11,13,14,15,19,22,23,24,25,26,27,32,33,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,73,78,81,82,84,95,96,97,99,100,101,103,105,107,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,135,136,137,138,141,143,144,145,147,148,151,152,153,155,157,164,174,175,176,181,186,189,215,219,238,245,248,249,251,256,258,259,262,264,266,267,268,271,274,276,277,280,281,299,305,307,313,314,319,320,321,323,326,327,328,329,330,331,335,336,337,344,345,350,351,354,360,366,367,368,372,376,377,378,383,386,388,390,396,401,405,408,425,427,430,433,435,437,439,440,443,444,451,452,454,455,459,460,463,464,465,466,467,471,472,473,476,479,483,486,487,488,489,490,491,493,494,499,500,503,505,508,509,510,511,512,514,517,518,519,523,525,526,528,534,536,537,538,539,542,544,545,546,548,],[-2,80,-3,80,80,-35,-13,-9,80,-15,-18,-34,-26,-31,-5,-30,-119,80,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,80,-29,-22,-4,80,80,80,80,80,80,80,80,-120,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,-209,80,80,80,80,80,80,80,80,-101,80,80,80,80,80,-117,80,80,-2,80,80,80,80,-199,-213,-216,80,-104,80,80,-134,-118,-111,-135,80,-208,80,80,80,80,-190,-179,-193,80,-206,-102,-101,80,-200,-166,-214,-212,80,-192,-210,-2,-137,-133,-116,-207,-121,-191,-2,-128,80,80,-211,-194,-167,-2,80,-139,80,80,80,80,80,-76,80,80,80,-158,-142,80,-189,80,80,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,80,80,-178,-112,-122,-100,80,80,-159,80,80,80,-157,-161,80,80,-163,-149,-168,-143,-142,80,-215,-138,80,-123,-129,80,80,80,80,80,-86,80,-130,-87,-131,80,-144,80,-132,-88,-146,80,-145,]),'ENGINE_OVERRIDE':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,17,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,17,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,17,17,17,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,17,-157,-161,-163,-149,-168,-123,]),'ERROR':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,40,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,40,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,40,40,40,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,40,-157,-161,-163,-149,-168,-123,]),'OR':([9,37,47,60,65,74,79,80,88,90,94,106,149,156,179,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,213,226,227,232,234,237,242,250,252,263,270,283,287,308,309,315,332,333,338,343,346,349,371,389,415,422,426,438,445,458,482,495,496,502,507,513,524,529,530,543,547,],[-38,129,-40,-36,-42,-39,-41,-37,129,-68,-65,-66,129,-71,129,-60,-57,129,-56,-53,-47,-62,-59,-46,-58,-61,129,-50,-49,-48,129,-51,-55,-54,129,-52,129,129,129,129,-43,129,129,-40,129,129,129,-69,-70,-44,-89,129,129,129,129,129,129,129,129,129,129,-45,129,129,129,129,129,129,129,129,-71,129,129,129,129,129,129,]),'ITEM':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,18,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,18,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,18,18,18,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,18,-157,-161,-163,-149,-168,-123,]),'COMMA':([9,47,60,65,74,79,80,87,88,90,94,106,156,159,160,161,162,190,191,192,193,194,195,196,197,198,199,200,202,203,204,206,207,208,209,210,229,230,231,232,234,242,250,252,283,287,292,293,295,297,308,309,322,349,369,370,413,415,416,418,422,436,515,516,],[-38,-40,-36,-42,-39,-41,-37,157,-90,-68,-65,-66,-71,-201,-203,246,-204,-60,-57,-63,-56,-53,-47,-62,-59,-46,-58,-61,-50,-49,-48,-51,-55,-54,-64,-52,306,-195,-196,307,-43,-91,-40,328,-69,-70,358,-95,-185,361,-44,-89,-202,-67,-197,-198,-96,464,-186,465,-45,-205,-187,-188,]),'PRODUCE':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,83,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,83,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,83,83,83,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,83,-157,-161,-163,-149,-168,-123,]),'TEMPLATE':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,55,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,55,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,55,55,55,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,55,-157,-161,-163,-149,-168,-123,]),'RBRACKET':([5,9,47,60,65,74,79,80,86,87,88,90,94,106,131,156,157,190,191,192,193,194,195,196,197,198,199,200,202,203,204,206,207,208,209,210,212,232,234,242,283,287,308,309,320,349,371,383,387,422,431,437,476,480,504,],[-92,-38,-40,-36,-42,-39,-41,-37,156,-93,-90,-68,-65,-66,-92,-71,-94,-60,-57,-63,-56,-53,-47,-62,-59,-46,-58,-61,-50,-49,-48,-51,-55,-54,-64,-52,283,308,-43,-91,-69,-70,-44,-89,-92,-67,422,-92,436,-45,474,-92,-92,507,520,]),'PROPERTY':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,20,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,20,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,20,20,20,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,20,-157,-161,-163,-149,-168,-123,]),'SPRITESET':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,21,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,21,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,21,21,21,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,21,-157,-161,-163,-149,-168,-123,]),'ALT_SPRITES':([0,2,3,11,13,14,19,22,23,24,25,26,27,32,34,36,38,39,41,44,46,49,50,51,52,57,58,64,67,68,70,71,72,78,81,82,105,137,186,245,256,266,271,276,281,299,305,313,321,323,327,329,330,331,344,345,350,351,367,368,372,376,378,390,405,425,427,435,440,443,444,451,452,454,455,459,460,463,466,467,471,479,487,488,489,493,494,499,511,],[-2,56,-3,-35,-13,-9,-15,-18,-34,-26,-31,-5,-30,-119,-32,-14,-28,-23,-27,-10,-12,-21,-33,-11,-8,-19,-20,-7,-16,-17,-24,-25,-6,-29,-22,-4,-120,-209,-2,-199,-104,-111,-208,56,-190,-179,-193,-206,-200,-166,-212,-192,-210,-2,-207,-121,-191,-2,-211,-194,-167,-2,-139,56,56,56,-158,-189,-103,-165,-136,-2,-156,-124,-160,-125,-162,-164,-178,-112,-122,-159,56,-157,-161,-163,-149,-168,-123,]),}
_lr_action = { }
for _k, _v in _lr_action_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_action: _lr_action[_x] = { }
_lr_action[_x][_k] = _y
del _lr_action_items
_lr_goto_items = {'replace_new':([2,276,390,405,425,487,],[38,38,38,38,38,38,]),'setting_value_list':([399,],[446,]),'generic_assignment_list':([152,319,],[238,386,]),'layout_sprite':([310,470,],[373,373,]),'skip_all':([2,276,390,405,425,487,],[68,68,68,68,68,68,]),'switch_ranges':([354,],[408,]),'conditional':([2,276,390,405,425,487,],[22,22,22,22,22,22,]),'recolour_assignment_list':([430,503,],[473,519,]),'recolour_assignment':([473,519,],[500,500,]),'replace':([2,276,390,405,425,487,],[41,41,41,41,41,41,]),'id_list':([220,236,],[291,311,]),'graphics_assignment':([101,176,366,],[175,264,175,]),'sort_vehicles':([2,276,390,405,425,487,],[23,23,23,23,23,23,]),'property_assignment':([98,168,],[170,257,]),'template_declaration':([2,276,390,405,425,487,],[44,44,44,44,44,44,]),'setting_list':([269,342,],[340,400,]),'spriteset':([2,276,390,405,425,487,],[64,64,64,64,64,64,]),'layout_sprite_list':([235,424,],[310,470,]),'else_block':([32,],[105,]),'graphics_list':([101,366,],[178,421,]),'spriteset_contents':([317,334,347,352,355,359,385,404,407,411,412,],[379,395,403,406,410,414,434,453,456,461,462,]),'random_body':([335,],[396,]),'script':([0,186,331,351,376,451,],[2,276,390,405,425,487,]),'deactivate':([2,276,390,405,425,487,],[24,24,24,24,24,24,]),'main_script':([0,],[1,]),'cargotable_list':([146,],[229,]),'town_names':([2,276,390,405,425,487,],[46,46,46,46,46,46,]),'param_assignment':([2,276,390,405,425,487,],[67,67,67,67,67,67,]),'param':([2,5,7,15,33,73,84,95,96,97,99,100,101,103,107,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,135,136,138,141,143,144,145,147,148,151,153,155,157,164,174,176,181,189,215,219,238,251,258,259,268,274,276,277,280,307,320,328,360,366,377,383,386,388,390,396,405,408,425,433,437,439,464,465,473,476,483,486,487,490,491,505,510,514,517,518,519,523,526,537,539,546,],[9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,]),'property_block':([2,276,390,405,425,487,],[49,49,49,49,49,49,]),'non_empty_id_list':([220,236,],[292,292,]),'layout_param_list':([423,],[469,]),'switch_body':([354,],[409,]),'tilelayout_item':([164,251,],[248,326,]),'graphics_assignment_list':([101,366,],[176,176,]),'alt_sprites':([2,276,390,405,425,487,],[25,25,25,25,25,25,]),'switch_value':([101,176,268,366,408,483,490,514,526,537,],[177,265,337,177,457,509,512,528,536,542,]),'random_switch':([2,276,390,405,425,487,],[26,26,26,26,26,26,]),'main_block':([2,276,390,405,425,487,],[3,3,3,3,3,3,]),'template_usage':([317,334,347,352,355,359,379,385,395,403,404,406,407,410,411,412,414,434,453,456,461,462,],[382,382,382,382,382,382,429,382,429,429,382,429,382,429,382,382,429,429,429,429,429,429,]),'string':([2,5,7,15,33,73,84,95,96,97,99,100,101,103,107,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,135,136,138,141,143,144,145,147,148,151,153,155,157,164,174,176,181,189,215,219,238,251,258,259,268,274,276,277,280,301,307,320,328,360,366,377,383,386,388,390,396,405,408,425,433,437,439,464,465,473,476,483,486,487,490,491,505,510,514,517,518,519,523,526,535,537,539,546,],[65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,364,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,541,65,65,65,]),'font_glyph':([2,276,390,405,425,487,],[27,27,27,27,27,27,]),'error_block':([2,276,390,405,425,487,],[70,70,70,70,70,70,]),'assignment':([102,164,183,251,399,423,446,469,],[180,249,272,249,447,468,447,498,]),'disable_item':([2,276,390,405,425,487,],[71,71,71,71,71,71,]),'setting':([269,340,342,400,],[339,398,339,398,]),'railtypetable_item':([91,246,],[159,322,]),'name_string_list':([510,],[523,]),'generic_assignment':([238,386,],[314,314,]),'basecost':([2,276,390,405,425,487,],[11,11,11,11,11,11,]),'engine_override':([2,276,390,405,425,487,],[50,50,50,50,50,50,]),'produce':([2,276,390,405,425,487,],[72,72,72,72,72,72,]),'tilelayout':([2,276,390,405,425,487,],[51,51,51,51,51,51,]),'town_names_param':([142,224,365,420,],[223,300,223,300,]),'town_names_part_list':([222,],[297,]),'name_string_item':([510,523,],[525,534,]),'variable':([2,5,7,15,33,73,84,95,96,97,99,100,101,103,107,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,135,136,138,141,143,144,145,147,148,151,153,155,157,164,174,176,181,189,215,219,238,251,258,259,268,274,276,277,280,307,320,328,360,366,377,383,386,388,390,396,405,408,425,433,437,439,464,465,473,476,483,486,487,490,491,505,510,514,517,518,519,523,526,537,539,546,],[74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,]),'town_names_part':([222,361,],[295,416,]),'cargotable':([2,276,390,405,425,487,],[13,13,13,13,13,13,]),'spritegroup':([2,276,390,405,425,487,],[52,52,52,52,52,52,]),'spritelayout':([2,276,390,405,425,487,],[14,14,14,14,14,14,]),'spriteview_list':([163,],[247,]),'tilelayout_list':([164,],[251,]),'param_desc':([102,183,],[182,273,]),'base_graphics':([2,276,390,405,425,487,],[78,78,78,78,78,78,]),'real_sprite':([317,334,347,352,355,359,379,385,395,403,404,406,407,410,411,412,414,434,453,456,461,462,],[381,381,381,381,381,381,428,381,428,428,381,428,381,428,381,381,428,428,428,428,428,428,]),'railtypetable_list':([91,],[161,]),'expression_list':([5,84,95,96,97,99,100,103,107,130,131,133,135,136,138,141,145,148,153,155,189,215,219,320,383,433,437,476,505,],[86,154,165,166,167,172,173,185,188,211,212,214,216,217,218,221,228,233,239,241,279,286,290,387,431,478,480,504,521,]),'property_list':([98,],[168,]),'if_else_parts':([2,276,390,405,425,487,],[32,32,32,32,32,32,]),'non_empty_expression_list':([5,84,95,96,97,99,100,103,107,130,131,133,135,136,138,141,145,148,153,155,189,215,219,320,383,433,437,476,505,],[87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,]),'graphics_block':([2,276,390,405,425,487,],[81,81,81,81,81,81,]),'town_names_param_list':([142,365,],[224,420,]),'item':([2,276,390,405,425,487,],[58,58,58,58,58,58,]),'switch':([2,276,390,405,425,487,],[82,82,82,82,82,82,]),'snowline':([2,276,390,405,425,487,],[34,34,34,34,34,34,]),'grf_block':([2,276,390,405,425,487,],[19,19,19,19,19,19,]),'assignment_list':([102,],[183,]),'spriteview':([247,],[324,]),'railtype':([2,276,390,405,425,487,],[36,36,36,36,36,36,]),'liveryoverride_block':([2,276,390,405,425,487,],[39,39,39,39,39,39,]),'expression':([2,5,7,15,33,73,84,95,96,97,99,100,101,103,107,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,135,136,138,141,143,144,145,147,148,151,153,155,157,164,174,176,181,189,215,219,238,251,258,259,268,274,276,277,280,307,320,328,360,366,377,383,386,388,390,396,405,408,425,433,437,439,464,465,473,476,483,486,487,490,491,505,510,514,517,518,519,523,526,537,539,546,],[37,88,90,94,106,149,88,88,88,88,88,88,179,88,88,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,88,88,213,88,88,88,88,88,226,227,88,232,88,237,88,88,242,252,263,179,270,88,88,88,315,252,332,333,338,343,37,346,349,371,88,389,415,179,426,88,315,438,37,445,37,458,37,88,88,482,495,496,502,88,338,343,37,338,513,88,524,338,529,530,502,524,338,338,543,547,]),'loop':([2,276,390,405,425,487,],[57,57,57,57,57,57,]),'setting_value':([399,446,],[449,485,]),}
_lr_goto = { }
for _k, _v in _lr_goto_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_goto: _lr_goto[_x] = { }
_lr_goto[_x][_k] = _y
del _lr_goto_items
_lr_productions = [
("S' -> main_script","S'",1,None,None,None),
('main_script -> script','main_script',1,'p_main_script','build/bdist.macosx-10.6-universal/egg/nml/parser.py',60),
('script -> <empty>','script',0,'p_script','build/bdist.macosx-10.6-universal/egg/nml/parser.py',64),
('script -> script main_block','script',2,'p_script','build/bdist.macosx-10.6-universal/egg/nml/parser.py',65),
('main_block -> switch','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',70),
('main_block -> random_switch','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',71),
('main_block -> produce','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',72),
('main_block -> spriteset','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',73),
('main_block -> spritegroup','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',74),
('main_block -> spritelayout','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',75),
('main_block -> template_declaration','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',76),
('main_block -> tilelayout','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',77),
('main_block -> town_names','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',78),
('main_block -> cargotable','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',79),
('main_block -> railtype','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',80),
('main_block -> grf_block','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',81),
('main_block -> param_assignment','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',82),
('main_block -> skip_all','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',83),
('main_block -> conditional','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',84),
('main_block -> loop','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',85),
('main_block -> item','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',86),
('main_block -> property_block','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',87),
('main_block -> graphics_block','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',88),
('main_block -> liveryoverride_block','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',89),
('main_block -> error_block','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',90),
('main_block -> disable_item','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',91),
('main_block -> deactivate','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',92),
('main_block -> replace','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',93),
('main_block -> replace_new','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',94),
('main_block -> base_graphics','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',95),
('main_block -> font_glyph','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',96),
('main_block -> alt_sprites','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',97),
('main_block -> snowline','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',98),
('main_block -> engine_override','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',99),
('main_block -> sort_vehicles','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',100),
('main_block -> basecost','main_block',1,'p_main_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',101),
('expression -> NUMBER','expression',1,'p_expression','build/bdist.macosx-10.6-universal/egg/nml/parser.py',108),
('expression -> FLOAT','expression',1,'p_expression','build/bdist.macosx-10.6-universal/egg/nml/parser.py',109),
('expression -> param','expression',1,'p_expression','build/bdist.macosx-10.6-universal/egg/nml/parser.py',110),
('expression -> variable','expression',1,'p_expression','build/bdist.macosx-10.6-universal/egg/nml/parser.py',111),
('expression -> ID','expression',1,'p_expression','build/bdist.macosx-10.6-universal/egg/nml/parser.py',112),
('expression -> STRING_LITERAL','expression',1,'p_expression','build/bdist.macosx-10.6-universal/egg/nml/parser.py',113),
('expression -> string','expression',1,'p_expression','build/bdist.macosx-10.6-universal/egg/nml/parser.py',114),
('expression -> LPAREN expression RPAREN','expression',3,'p_parenthesed_expression','build/bdist.macosx-10.6-universal/egg/nml/parser.py',118),
('param -> PARAMETER LBRACKET expression RBRACKET','param',4,'p_parameter','build/bdist.macosx-10.6-universal/egg/nml/parser.py',122),
('param -> PARAMETER LBRACKET expression COMMA expression RBRACKET','param',6,'p_parameter_other_grf','build/bdist.macosx-10.6-universal/egg/nml/parser.py',126),
('expression -> expression PLUS expression','expression',3,'p_binop','build/bdist.macosx-10.6-universal/egg/nml/parser.py',152),
('expression -> expression MINUS expression','expression',3,'p_binop','build/bdist.macosx-10.6-universal/egg/nml/parser.py',153),
('expression -> expression TIMES expression','expression',3,'p_binop','build/bdist.macosx-10.6-universal/egg/nml/parser.py',154),
('expression -> expression DIVIDE expression','expression',3,'p_binop','build/bdist.macosx-10.6-universal/egg/nml/parser.py',155),
('expression -> expression MODULO expression','expression',3,'p_binop','build/bdist.macosx-10.6-universal/egg/nml/parser.py',156),
('expression -> expression AND expression','expression',3,'p_binop','build/bdist.macosx-10.6-universal/egg/nml/parser.py',157),
('expression -> expression OR expression','expression',3,'p_binop','build/bdist.macosx-10.6-universal/egg/nml/parser.py',158),
('expression -> expression XOR expression','expression',3,'p_binop','build/bdist.macosx-10.6-universal/egg/nml/parser.py',159),
('expression -> expression SHIFT_LEFT expression','expression',3,'p_binop','build/bdist.macosx-10.6-universal/egg/nml/parser.py',160),
('expression -> expression SHIFT_RIGHT expression','expression',3,'p_binop','build/bdist.macosx-10.6-universal/egg/nml/parser.py',161),
('expression -> expression SHIFTU_RIGHT expression','expression',3,'p_binop','build/bdist.macosx-10.6-universal/egg/nml/parser.py',162),
('expression -> expression COMP_EQ expression','expression',3,'p_binop','build/bdist.macosx-10.6-universal/egg/nml/parser.py',163),
('expression -> expression COMP_NEQ expression','expression',3,'p_binop','build/bdist.macosx-10.6-universal/egg/nml/parser.py',164),
('expression -> expression COMP_LE expression','expression',3,'p_binop','build/bdist.macosx-10.6-universal/egg/nml/parser.py',165),
('expression -> expression COMP_GE expression','expression',3,'p_binop','build/bdist.macosx-10.6-universal/egg/nml/parser.py',166),
('expression -> expression COMP_LT expression','expression',3,'p_binop','build/bdist.macosx-10.6-universal/egg/nml/parser.py',167),
('expression -> expression COMP_GT expression','expression',3,'p_binop','build/bdist.macosx-10.6-universal/egg/nml/parser.py',168),
('expression -> expression LOGICAL_AND expression','expression',3,'p_binop_logical','build/bdist.macosx-10.6-universal/egg/nml/parser.py',172),
('expression -> expression LOGICAL_OR expression','expression',3,'p_binop_logical','build/bdist.macosx-10.6-universal/egg/nml/parser.py',173),
('expression -> LOGICAL_NOT expression','expression',2,'p_logical_not','build/bdist.macosx-10.6-universal/egg/nml/parser.py',177),
('expression -> BINARY_NOT expression','expression',2,'p_binary_not','build/bdist.macosx-10.6-universal/egg/nml/parser.py',181),
('expression -> expression TERNARY_OPEN expression COLON expression','expression',5,'p_ternary_op','build/bdist.macosx-10.6-universal/egg/nml/parser.py',185),
('expression -> MINUS expression','expression',2,'p_unary_minus','build/bdist.macosx-10.6-universal/egg/nml/parser.py',189),
('variable -> VARIABLE LBRACKET expression_list RBRACKET','variable',4,'p_variable','build/bdist.macosx-10.6-universal/egg/nml/parser.py',193),
('expression -> ID LPAREN expression_list RPAREN','expression',4,'p_function','build/bdist.macosx-10.6-universal/egg/nml/parser.py',198),
('expression -> LBRACKET expression_list RBRACKET','expression',3,'p_array','build/bdist.macosx-10.6-universal/egg/nml/parser.py',202),
('assignment_list -> assignment','assignment_list',1,'p_assignment_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',210),
('assignment_list -> param_desc','assignment_list',1,'p_assignment_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',211),
('assignment_list -> assignment_list assignment','assignment_list',2,'p_assignment_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',212),
('assignment_list -> assignment_list param_desc','assignment_list',2,'p_assignment_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',213),
('assignment -> ID COLON expression SEMICOLON','assignment',4,'p_assignment','build/bdist.macosx-10.6-universal/egg/nml/parser.py',218),
('param_desc -> PARAMETER expression LBRACE setting_list RBRACE','param_desc',5,'p_param_desc','build/bdist.macosx-10.6-universal/egg/nml/parser.py',222),
('param_desc -> PARAMETER LBRACE setting_list RBRACE','param_desc',4,'p_param_desc','build/bdist.macosx-10.6-universal/egg/nml/parser.py',223),
('setting_list -> setting','setting_list',1,'p_setting_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',228),
('setting_list -> setting_list setting','setting_list',2,'p_setting_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',229),
('setting -> ID LBRACE setting_value_list RBRACE','setting',4,'p_setting','build/bdist.macosx-10.6-universal/egg/nml/parser.py',234),
('setting_value_list -> setting_value','setting_value_list',1,'p_setting_value_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',238),
('setting_value_list -> setting_value_list setting_value','setting_value_list',2,'p_setting_value_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',239),
('setting_value -> assignment','setting_value',1,'p_setting_value','build/bdist.macosx-10.6-universal/egg/nml/parser.py',244),
('setting_value -> ID COLON LBRACE name_string_list RBRACE SEMICOLON','setting_value',6,'p_names_setting_value','build/bdist.macosx-10.6-universal/egg/nml/parser.py',248),
('name_string_list -> name_string_item','name_string_list',1,'p_name_string_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',252),
('name_string_list -> name_string_list name_string_item','name_string_list',2,'p_name_string_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',253),
('name_string_item -> expression COLON string SEMICOLON','name_string_item',4,'p_name_string_item','build/bdist.macosx-10.6-universal/egg/nml/parser.py',258),
('string -> STRING LPAREN expression_list RPAREN','string',4,'p_string','build/bdist.macosx-10.6-universal/egg/nml/parser.py',262),
('non_empty_expression_list -> expression','non_empty_expression_list',1,'p_non_empty_expression_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',266),
('non_empty_expression_list -> non_empty_expression_list COMMA expression','non_empty_expression_list',3,'p_non_empty_expression_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',267),
('expression_list -> <empty>','expression_list',0,'p_expression_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',272),
('expression_list -> non_empty_expression_list','expression_list',1,'p_expression_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',273),
('expression_list -> non_empty_expression_list COMMA','expression_list',2,'p_expression_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',274),
('non_empty_id_list -> ID','non_empty_id_list',1,'p_non_empty_id_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',278),
('non_empty_id_list -> non_empty_id_list COMMA ID','non_empty_id_list',3,'p_non_empty_id_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',279),
('id_list -> <empty>','id_list',0,'p_id_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',284),
('id_list -> non_empty_id_list','id_list',1,'p_id_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',285),
('id_list -> non_empty_id_list COMMA','id_list',2,'p_id_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',286),
('generic_assignment -> expression COLON expression SEMICOLON','generic_assignment',4,'p_generic_assignment','build/bdist.macosx-10.6-universal/egg/nml/parser.py',290),
('generic_assignment_list -> <empty>','generic_assignment_list',0,'p_generic_assignment_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',294),
('generic_assignment_list -> generic_assignment_list generic_assignment','generic_assignment_list',2,'p_generic_assignment_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',295),
('item -> ITEM LPAREN expression_list RPAREN LBRACE script RBRACE','item',7,'p_item','build/bdist.macosx-10.6-universal/egg/nml/parser.py',302),
('property_block -> PROPERTY LBRACE property_list RBRACE','property_block',4,'p_property_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',306),
('property_list -> property_assignment','property_list',1,'p_property_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',310),
('property_list -> property_list property_assignment','property_list',2,'p_property_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',311),
('property_assignment -> ID COLON expression SEMICOLON','property_assignment',4,'p_property_assignment','build/bdist.macosx-10.6-universal/egg/nml/parser.py',316),
('property_assignment -> ID COLON expression UNIT SEMICOLON','property_assignment',5,'p_property_assignment','build/bdist.macosx-10.6-universal/egg/nml/parser.py',317),
('property_assignment -> NUMBER COLON expression SEMICOLON','property_assignment',4,'p_property_assignment','build/bdist.macosx-10.6-universal/egg/nml/parser.py',318),
('property_assignment -> NUMBER COLON expression UNIT SEMICOLON','property_assignment',5,'p_property_assignment','build/bdist.macosx-10.6-universal/egg/nml/parser.py',319),
('graphics_block -> GRAPHICS LBRACE graphics_list RBRACE','graphics_block',4,'p_graphics_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',325),
('liveryoverride_block -> LIVERYOVERRIDE LPAREN expression RPAREN LBRACE graphics_list RBRACE','liveryoverride_block',7,'p_liveryoverride_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',329),
('graphics_list -> graphics_assignment_list','graphics_list',1,'p_graphics_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',333),
('graphics_list -> graphics_assignment_list switch_value','graphics_list',2,'p_graphics_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',334),
('graphics_list -> switch_value','graphics_list',1,'p_graphics_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',335),
('graphics_assignment -> expression COLON switch_value','graphics_assignment',3,'p_graphics_assignment','build/bdist.macosx-10.6-universal/egg/nml/parser.py',346),
('graphics_assignment_list -> graphics_assignment','graphics_assignment_list',1,'p_graphics_assignment_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',350),
('graphics_assignment_list -> graphics_assignment_list graphics_assignment','graphics_assignment_list',2,'p_graphics_assignment_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',351),
('conditional -> if_else_parts','conditional',1,'p_conditional','build/bdist.macosx-10.6-universal/egg/nml/parser.py',359),
('conditional -> if_else_parts else_block','conditional',2,'p_conditional','build/bdist.macosx-10.6-universal/egg/nml/parser.py',360),
('else_block -> ELSE LBRACE script RBRACE','else_block',4,'p_else_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',367),
('if_else_parts -> IF LPAREN expression RPAREN LBRACE script RBRACE','if_else_parts',7,'p_if_else_parts','build/bdist.macosx-10.6-universal/egg/nml/parser.py',371),
('if_else_parts -> if_else_parts ELSE IF LPAREN expression RPAREN LBRACE script RBRACE','if_else_parts',9,'p_if_else_parts','build/bdist.macosx-10.6-universal/egg/nml/parser.py',372),
('loop -> WHILE LPAREN expression RPAREN LBRACE script RBRACE','loop',7,'p_loop','build/bdist.macosx-10.6-universal/egg/nml/parser.py',377),
('switch -> SWITCH LPAREN expression_list RPAREN LBRACE switch_body RBRACE','switch',7,'p_switch','build/bdist.macosx-10.6-universal/egg/nml/parser.py',384),
('switch_body -> switch_ranges switch_value','switch_body',2,'p_switch_body','build/bdist.macosx-10.6-universal/egg/nml/parser.py',388),
('switch_body -> switch_ranges','switch_body',1,'p_switch_body','build/bdist.macosx-10.6-universal/egg/nml/parser.py',389),
('switch_ranges -> <empty>','switch_ranges',0,'p_switch_ranges','build/bdist.macosx-10.6-universal/egg/nml/parser.py',393),
('switch_ranges -> switch_ranges expression COLON switch_value','switch_ranges',4,'p_switch_ranges','build/bdist.macosx-10.6-universal/egg/nml/parser.py',394),
('switch_ranges -> switch_ranges expression UNIT COLON switch_value','switch_ranges',5,'p_switch_ranges','build/bdist.macosx-10.6-universal/egg/nml/parser.py',395),
('switch_ranges -> switch_ranges expression RANGE expression COLON switch_value','switch_ranges',6,'p_switch_ranges','build/bdist.macosx-10.6-universal/egg/nml/parser.py',396),
('switch_ranges -> switch_ranges expression RANGE expression UNIT COLON switch_value','switch_ranges',7,'p_switch_ranges','build/bdist.macosx-10.6-universal/egg/nml/parser.py',397),
('switch_value -> RETURN expression SEMICOLON','switch_value',3,'p_switch_value','build/bdist.macosx-10.6-universal/egg/nml/parser.py',405),
('switch_value -> RETURN SEMICOLON','switch_value',2,'p_switch_value','build/bdist.macosx-10.6-universal/egg/nml/parser.py',406),
('switch_value -> expression SEMICOLON','switch_value',2,'p_switch_value','build/bdist.macosx-10.6-universal/egg/nml/parser.py',407),
('random_switch -> RANDOMSWITCH LPAREN expression_list RPAREN LBRACE random_body RBRACE','random_switch',7,'p_random_switch','build/bdist.macosx-10.6-universal/egg/nml/parser.py',413),
('random_body -> <empty>','random_body',0,'p_random_body','build/bdist.macosx-10.6-universal/egg/nml/parser.py',417),
('random_body -> random_body expression COLON switch_value','random_body',4,'p_random_body','build/bdist.macosx-10.6-universal/egg/nml/parser.py',418),
('produce -> PRODUCE LPAREN expression_list RPAREN SEMICOLON','produce',5,'p_produce','build/bdist.macosx-10.6-universal/egg/nml/parser.py',423),
('real_sprite -> LBRACKET expression_list RBRACKET','real_sprite',3,'p_real_sprite','build/bdist.macosx-10.6-universal/egg/nml/parser.py',430),
('real_sprite -> ID COLON LBRACKET expression_list RBRACKET','real_sprite',5,'p_real_sprite','build/bdist.macosx-10.6-universal/egg/nml/parser.py',431),
('recolour_assignment_list -> <empty>','recolour_assignment_list',0,'p_recolour_assignment_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',438),
('recolour_assignment_list -> recolour_assignment_list recolour_assignment','recolour_assignment_list',2,'p_recolour_assignment_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',439),
('recolour_assignment -> expression COLON expression SEMICOLON','recolour_assignment',4,'p_recolour_assignment_1','build/bdist.macosx-10.6-universal/egg/nml/parser.py',443),
('recolour_assignment -> expression RANGE expression COLON expression RANGE expression SEMICOLON','recolour_assignment',8,'p_recolour_assignment_2','build/bdist.macosx-10.6-universal/egg/nml/parser.py',447),
('recolour_assignment -> expression RANGE expression COLON expression SEMICOLON','recolour_assignment',6,'p_recolour_assignment_3','build/bdist.macosx-10.6-universal/egg/nml/parser.py',451),
('real_sprite -> RECOLOUR_SPRITE LBRACE recolour_assignment_list RBRACE','real_sprite',4,'p_recolour_sprite','build/bdist.macosx-10.6-universal/egg/nml/parser.py',455),
('real_sprite -> ID COLON RECOLOUR_SPRITE LBRACE recolour_assignment_list RBRACE','real_sprite',6,'p_recolour_sprite','build/bdist.macosx-10.6-universal/egg/nml/parser.py',456),
('template_declaration -> TEMPLATE ID LPAREN id_list RPAREN LBRACE spriteset_contents RBRACE','template_declaration',8,'p_template_declaration','build/bdist.macosx-10.6-universal/egg/nml/parser.py',463),
('template_usage -> ID LPAREN expression_list RPAREN','template_usage',4,'p_template_usage','build/bdist.macosx-10.6-universal/egg/nml/parser.py',467),
('template_usage -> ID COLON ID LPAREN expression_list RPAREN','template_usage',6,'p_template_usage','build/bdist.macosx-10.6-universal/egg/nml/parser.py',468),
('spriteset_contents -> real_sprite','spriteset_contents',1,'p_spriteset_contents','build/bdist.macosx-10.6-universal/egg/nml/parser.py',475),
('spriteset_contents -> template_usage','spriteset_contents',1,'p_spriteset_contents','build/bdist.macosx-10.6-universal/egg/nml/parser.py',476),
('spriteset_contents -> spriteset_contents real_sprite','spriteset_contents',2,'p_spriteset_contents','build/bdist.macosx-10.6-universal/egg/nml/parser.py',477),
('spriteset_contents -> spriteset_contents template_usage','spriteset_contents',2,'p_spriteset_contents','build/bdist.macosx-10.6-universal/egg/nml/parser.py',478),
('replace -> REPLACESPRITE LPAREN expression_list RPAREN LBRACE spriteset_contents RBRACE','replace',7,'p_replace','build/bdist.macosx-10.6-universal/egg/nml/parser.py',483),
('replace -> REPLACESPRITE ID LPAREN expression_list RPAREN LBRACE spriteset_contents RBRACE','replace',8,'p_replace','build/bdist.macosx-10.6-universal/egg/nml/parser.py',484),
('replace_new -> REPLACENEWSPRITE LPAREN expression_list RPAREN LBRACE spriteset_contents RBRACE','replace_new',7,'p_replace_new','build/bdist.macosx-10.6-universal/egg/nml/parser.py',489),
('replace_new -> REPLACENEWSPRITE ID LPAREN expression_list RPAREN LBRACE spriteset_contents RBRACE','replace_new',8,'p_replace_new','build/bdist.macosx-10.6-universal/egg/nml/parser.py',490),
('base_graphics -> BASE_GRAPHICS LPAREN expression_list RPAREN LBRACE spriteset_contents RBRACE','base_graphics',7,'p_base_graphics','build/bdist.macosx-10.6-universal/egg/nml/parser.py',495),
('base_graphics -> BASE_GRAPHICS ID LPAREN expression_list RPAREN LBRACE spriteset_contents RBRACE','base_graphics',8,'p_base_graphics','build/bdist.macosx-10.6-universal/egg/nml/parser.py',496),
('font_glyph -> FONTGLYPH LPAREN expression_list RPAREN LBRACE spriteset_contents RBRACE','font_glyph',7,'p_font_glyph','build/bdist.macosx-10.6-universal/egg/nml/parser.py',501),
('font_glyph -> FONTGLYPH ID LPAREN expression_list RPAREN LBRACE spriteset_contents RBRACE','font_glyph',8,'p_font_glyph','build/bdist.macosx-10.6-universal/egg/nml/parser.py',502),
('alt_sprites -> ALT_SPRITES LPAREN expression_list RPAREN LBRACE spriteset_contents RBRACE','alt_sprites',7,'p_alt_sprites','build/bdist.macosx-10.6-universal/egg/nml/parser.py',507),
('spriteset -> SPRITESET LPAREN expression_list RPAREN LBRACE spriteset_contents RBRACE','spriteset',7,'p_spriteset','build/bdist.macosx-10.6-universal/egg/nml/parser.py',515),
('spritegroup -> SPRITEGROUP ID LBRACE spriteview_list RBRACE','spritegroup',5,'p_spritegroup_normal','build/bdist.macosx-10.6-universal/egg/nml/parser.py',519),
('spritelayout -> SPRITELAYOUT ID LBRACE layout_sprite_list RBRACE','spritelayout',5,'p_spritelayout','build/bdist.macosx-10.6-universal/egg/nml/parser.py',523),
('spritelayout -> SPRITELAYOUT ID LPAREN id_list RPAREN LBRACE layout_sprite_list RBRACE','spritelayout',8,'p_spritelayout','build/bdist.macosx-10.6-universal/egg/nml/parser.py',524),
('spriteview_list -> <empty>','spriteview_list',0,'p_spriteview_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',531),
('spriteview_list -> spriteview_list spriteview','spriteview_list',2,'p_spriteview_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',532),
('spriteview -> ID COLON LBRACKET expression_list RBRACKET SEMICOLON','spriteview',6,'p_spriteview','build/bdist.macosx-10.6-universal/egg/nml/parser.py',537),
('spriteview -> ID COLON expression SEMICOLON','spriteview',4,'p_spriteview','build/bdist.macosx-10.6-universal/egg/nml/parser.py',538),
('layout_sprite_list -> <empty>','layout_sprite_list',0,'p_layout_sprite_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',543),
('layout_sprite_list -> layout_sprite_list layout_sprite','layout_sprite_list',2,'p_layout_sprite_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',544),
('layout_sprite -> ID LBRACE layout_param_list RBRACE','layout_sprite',4,'p_layout_sprite','build/bdist.macosx-10.6-universal/egg/nml/parser.py',549),
('layout_param_list -> assignment','layout_param_list',1,'p_layout_param_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',553),
('layout_param_list -> layout_param_list assignment','layout_param_list',2,'p_layout_param_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',554),
('town_names -> TOWN_NAMES LPAREN expression RPAREN LBRACE town_names_param_list RBRACE','town_names',7,'p_town_names','build/bdist.macosx-10.6-universal/egg/nml/parser.py',562),
('town_names -> TOWN_NAMES LBRACE town_names_param_list RBRACE','town_names',4,'p_town_names','build/bdist.macosx-10.6-universal/egg/nml/parser.py',563),
('town_names_param_list -> town_names_param','town_names_param_list',1,'p_town_names_param_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',568),
('town_names_param_list -> town_names_param_list town_names_param','town_names_param_list',2,'p_town_names_param_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',569),
('town_names_param -> ID COLON string SEMICOLON','town_names_param',4,'p_town_names_param','build/bdist.macosx-10.6-universal/egg/nml/parser.py',574),
('town_names_param -> LBRACE town_names_part_list RBRACE','town_names_param',3,'p_town_names_param','build/bdist.macosx-10.6-universal/egg/nml/parser.py',575),
('town_names_param -> LBRACE town_names_part_list COMMA RBRACE','town_names_param',4,'p_town_names_param','build/bdist.macosx-10.6-universal/egg/nml/parser.py',576),
('town_names_part_list -> town_names_part','town_names_part_list',1,'p_town_names_part_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',581),
('town_names_part_list -> town_names_part_list COMMA town_names_part','town_names_part_list',3,'p_town_names_part_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',582),
('town_names_part -> TOWN_NAMES LPAREN expression COMMA expression RPAREN','town_names_part',6,'p_town_names_part','build/bdist.macosx-10.6-universal/egg/nml/parser.py',587),
('town_names_part -> ID LPAREN STRING_LITERAL COMMA expression RPAREN','town_names_part',6,'p_town_names_part','build/bdist.macosx-10.6-universal/egg/nml/parser.py',588),
('snowline -> SNOWLINE LPAREN ID RPAREN LBRACE generic_assignment_list RBRACE','snowline',7,'p_snowline','build/bdist.macosx-10.6-universal/egg/nml/parser.py',596),
('param_assignment -> expression EQ expression SEMICOLON','param_assignment',4,'p_param_assignment','build/bdist.macosx-10.6-universal/egg/nml/parser.py',603),
('error_block -> ERROR LPAREN expression_list RPAREN SEMICOLON','error_block',5,'p_error_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',607),
('disable_item -> DISABLE_ITEM LPAREN expression_list RPAREN SEMICOLON','disable_item',5,'p_disable_item','build/bdist.macosx-10.6-universal/egg/nml/parser.py',611),
('cargotable -> CARGOTABLE LBRACE cargotable_list RBRACE','cargotable',4,'p_cargotable','build/bdist.macosx-10.6-universal/egg/nml/parser.py',615),
('cargotable -> CARGOTABLE LBRACE cargotable_list COMMA RBRACE','cargotable',5,'p_cargotable','build/bdist.macosx-10.6-universal/egg/nml/parser.py',616),
('cargotable_list -> ID','cargotable_list',1,'p_cargotable_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',620),
('cargotable_list -> STRING_LITERAL','cargotable_list',1,'p_cargotable_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',621),
('cargotable_list -> cargotable_list COMMA ID','cargotable_list',3,'p_cargotable_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',622),
('cargotable_list -> cargotable_list COMMA STRING_LITERAL','cargotable_list',3,'p_cargotable_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',623),
('railtype -> RAILTYPETABLE LBRACE railtypetable_list RBRACE','railtype',4,'p_railtypetable','build/bdist.macosx-10.6-universal/egg/nml/parser.py',628),
('railtype -> RAILTYPETABLE LBRACE railtypetable_list COMMA RBRACE','railtype',5,'p_railtypetable','build/bdist.macosx-10.6-universal/egg/nml/parser.py',629),
('railtypetable_list -> railtypetable_item','railtypetable_list',1,'p_railtypetable_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',633),
('railtypetable_list -> railtypetable_list COMMA railtypetable_item','railtypetable_list',3,'p_railtypetable_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',634),
('railtypetable_item -> ID','railtypetable_item',1,'p_railtypetable_item','build/bdist.macosx-10.6-universal/egg/nml/parser.py',639),
('railtypetable_item -> STRING_LITERAL','railtypetable_item',1,'p_railtypetable_item','build/bdist.macosx-10.6-universal/egg/nml/parser.py',640),
('railtypetable_item -> ID COLON LBRACKET expression_list RBRACKET','railtypetable_item',5,'p_railtypetable_item','build/bdist.macosx-10.6-universal/egg/nml/parser.py',641),
('basecost -> BASECOST LBRACE generic_assignment_list RBRACE','basecost',4,'p_basecost','build/bdist.macosx-10.6-universal/egg/nml/parser.py',646),
('deactivate -> DEACTIVATE LPAREN expression_list RPAREN SEMICOLON','deactivate',5,'p_deactivate','build/bdist.macosx-10.6-universal/egg/nml/parser.py',650),
('grf_block -> GRF LBRACE assignment_list RBRACE','grf_block',4,'p_grf_block','build/bdist.macosx-10.6-universal/egg/nml/parser.py',654),
('skip_all -> SKIP_ALL SEMICOLON','skip_all',2,'p_skip_all','build/bdist.macosx-10.6-universal/egg/nml/parser.py',658),
('engine_override -> ENGINE_OVERRIDE LPAREN expression_list RPAREN SEMICOLON','engine_override',5,'p_engine_override','build/bdist.macosx-10.6-universal/egg/nml/parser.py',662),
('sort_vehicles -> SORT_VEHICLES LPAREN expression_list RPAREN SEMICOLON','sort_vehicles',5,'p_sort_vehicles','build/bdist.macosx-10.6-universal/egg/nml/parser.py',666),
('tilelayout -> TILELAYOUT ID LBRACE tilelayout_list RBRACE','tilelayout',5,'p_tilelayout','build/bdist.macosx-10.6-universal/egg/nml/parser.py',670),
('tilelayout_list -> tilelayout_item','tilelayout_list',1,'p_tilelayout_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',674),
('tilelayout_list -> tilelayout_list tilelayout_item','tilelayout_list',2,'p_tilelayout_list','build/bdist.macosx-10.6-universal/egg/nml/parser.py',675),
('tilelayout_item -> expression COMMA expression COLON expression SEMICOLON','tilelayout_item',6,'p_tilelayout_item_tile','build/bdist.macosx-10.6-universal/egg/nml/parser.py',680),
('tilelayout_item -> assignment','tilelayout_item',1,'p_tilelayout_item_prop','build/bdist.macosx-10.6-universal/egg/nml/parser.py',684),
]
| 423.337398 | 63,709 | 0.688701 | 23,082 | 104,141 | 3.054675 | 0.032969 | 0.030635 | 0.049016 | 0.055143 | 0.752354 | 0.707211 | 0.663485 | 0.646168 | 0.618582 | 0.602839 | 0 | 0.453816 | 0.01593 | 104,141 | 245 | 63,710 | 425.065306 | 0.234187 | 0.000595 | 0 | 0.008475 | 1 | 0 | 0.276675 | 0.121268 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b14c1f7159b215b10ca183cb945aa1f830e0700b | 42 | py | Python | ui/ex5.py | jonaslindemann/compute-course-public | b8f55595ebbd790d79b525efdff17b8517154796 | [
"MIT"
] | 4 | 2021-09-12T12:07:01.000Z | 2021-09-29T17:38:34.000Z | ui/ex5.py | jonaslindemann/compute-course-public | b8f55595ebbd790d79b525efdff17b8517154796 | [
"MIT"
] | null | null | null | ui/ex5.py | jonaslindemann/compute-course-public | b8f55595ebbd790d79b525efdff17b8517154796 | [
"MIT"
] | 5 | 2020-10-24T16:02:31.000Z | 2021-09-28T20:57:46.000Z | print(3*3.75/1.5)
print(7.0/2)
print(7/2)
| 10.5 | 17 | 0.619048 | 13 | 42 | 2 | 0.615385 | 0.461538 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.282051 | 0.071429 | 42 | 3 | 18 | 14 | 0.384615 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 8 |
b15a82abf16e78bf965c517927d2e39ff84d7d78 | 132,617 | py | Python | i18n/tests/test_transifex.py | AbhigyaShridhar/cc-licenses | fe436a1ee5bdc86f764e88c5fb2182396dad462e | [
"MIT"
] | 23 | 2020-08-10T15:04:58.000Z | 2021-10-05T02:02:15.000Z | i18n/tests/test_transifex.py | hninsin/cc-legal-tools-app | 1dd22ac315aaa8df0eac2a320154dd0a01d35c6d | [
"MIT"
] | 80 | 2020-06-23T17:18:25.000Z | 2021-11-19T14:56:34.000Z | i18n/tests/test_transifex.py | hninsin/cc-legal-tools-app | 1dd22ac315aaa8df0eac2a320154dd0a01d35c6d | [
"MIT"
] | 23 | 2020-09-30T11:14:59.000Z | 2021-11-11T18:04:43.000Z | # Standard library
import datetime
from copy import deepcopy
from unittest import mock
# Third-party
import dateutil.parser
import polib
from dateutil.tz import tzutc
from django.conf import settings
from django.test import TestCase, override_settings
# First-party/Local
from i18n.transifex import (
LEGALCODES_KEY,
TransifexHelper,
_empty_branch_object,
)
from i18n.utils import get_pofile_content
from legal_tools.models import LegalCode
from legal_tools.tests.factories import LegalCodeFactory, ToolFactory
TEST_PROJ_SLUG = "x_proj_x"
TEST_ORG_SLUG = "x_org_x"
TEST_TOKEN = "x_token_x"
TEST_TEAM_ID = "x_team_id_x"
TEST_TRANSIFEX_SETTINGS = {
"ORGANIZATION_SLUG": TEST_ORG_SLUG,
"PROJECT_SLUG": TEST_PROJ_SLUG,
"API_TOKEN": TEST_TOKEN,
"TEAM_ID": TEST_TEAM_ID,
}
POFILE_CONTENT = fr"""
msgid ""
msgstr ""
"Project-Id-Version: by-nd_40\n"
"Language-Team: https://www.transifex.com/{TEST_ORG_SLUG}/{TEST_PROJ_SLUG}/\n"
"Language: en\n"
"Language-Django: en\n"
"Language-Transifex: en\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
msgid "license_medium"
msgstr "Attribution-NoDerivatives 4.0 International"
msgid "english text"
msgstr "english text"
"""
class DummyRepo:
def __init__(self, path):
self.index = mock.MagicMock()
self.remotes = mock.MagicMock()
self.branches = mock.MagicMock()
self.heads = mock.MagicMock()
# def __str__(self):
# return "a dummy repo"
def __enter__(self):
return self
def __exit__(self, *a, **k):
pass
def is_dirty(self):
return False
def delete_head(self, name, force):
pass
@override_settings(
TRANSIFEX=TEST_TRANSIFEX_SETTINGS,
)
class TestTransifex(TestCase):
def setUp(self):
project_xa = mock.Mock(id="o:XA:p:XA", attributes={"slug": "XA"})
project_xa.__str__ = mock.Mock(return_value=project_xa.id)
project_xb = mock.Mock(id="o:XB:p:XB", attributes={"slug": "XB"})
project_xb.__str__ = mock.Mock(return_value=project_xb.id)
project_cc = mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}",
attributes={"slug": TEST_PROJ_SLUG},
)
project_cc.__str__ = mock.Mock(return_value=project_cc.id)
project_xd = mock.Mock(id="o:XD:p:XD", attributes={"slug": "XD"})
project_xd.__str__ = mock.Mock(return_value=project_xd.id)
organization = mock.Mock(
id=f"o:{TEST_ORG_SLUG}",
attributes={"slug": TEST_ORG_SLUG},
)
organization.__str__ = mock.Mock(return_value=organization.id)
organization.fetch = mock.Mock(
return_value=[project_xa, project_xb, project_cc, project_xd]
)
i18n_format_xa = mock.Mock(id="XA")
i18n_format_xa.__str__ = mock.Mock(return_value=i18n_format_xa.id)
i18n_format_xb = mock.Mock(id="XB")
i18n_format_xb.__str__ = mock.Mock(return_value=i18n_format_xb.id)
i18n_format_po = mock.Mock(id="PO")
i18n_format_po.__str__ = mock.Mock(return_value=i18n_format_po.id)
i18n_format_xd = mock.Mock(id="XD")
i18n_format_xd.__str__ = mock.Mock(return_value=i18n_format_xd.id)
with mock.patch("i18n.transifex.transifex_api") as api:
api.Organization.get = mock.Mock(return_value=organization)
api.I18nFormat.filter = mock.Mock(
return_value=[
i18n_format_xa,
i18n_format_xb,
i18n_format_po,
i18n_format_xd,
]
)
self.helper = TransifexHelper(dryrun=False)
api.Organization.get.assert_called_once()
organization.fetch.assert_called_once()
api.I18nFormat.filter.assert_called_once()
def test__empty_branch_object(self):
empty = _empty_branch_object()
self.assertEquals(empty, {LEGALCODES_KEY: []})
def test_resource_stats(self):
resources = [
mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:cc-search",
attributes={
"slug": "cc-search",
},
),
mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:deeds-choosers",
attributes={
"slug": "deeds-choosers",
},
),
mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:by-nc-nd_40",
attributes={
"accept_translations": True,
"datetime_created": "2020-09-21T15:22:49Z",
"datetime_modified": "2020-10-05T13:23:22Z",
"i18n_type": "PO",
"i18n_version": 2,
"name": "CC BY-NC-ND 4.0",
"priority": "high",
"slug": "by-nc-nd_40",
"string_count": 74,
"word_count": 2038,
},
),
mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:by-nc-sa_40",
attributes={
"accept_translations": True,
"datetime_created": "2020-10-05T13:40:25Z",
"datetime_modified": "2020-10-05T13:40:25Z",
"i18n_type": "PO",
"i18n_version": 2,
"name": "CC BY-NC-SA 4.0",
"priority": "high",
"slug": "by-nc-sa_40",
"string_count": 84,
"word_count": 2289,
},
),
]
all_resources = mock.Mock(return_value=resources)
self.helper.api_project.fetch = mock.Mock(
return_value=mock.Mock(all=all_resources)
)
# With _resource_stats empty
stats = self.helper.resource_stats
# With _resource_stats populated
stats = self.helper.resource_stats
all_resources.assert_called_once()
self.assertNotIn("cc-search", stats)
self.assertNotIn("deeds-choosers", stats)
self.assertIn("by-nc-nd_40", stats)
self.assertEqual(
"2020-09-21T15:22:49Z", stats["by-nc-nd_40"]["datetime_created"]
)
self.assertIn("by-nc-sa_40", stats)
self.assertEqual(2289, stats["by-nc-sa_40"]["word_count"])
def test_(self):
languages_stats = [
mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:cc-search:l:es",
attributes={
"last_proofread_update": None,
"last_review_update": "2018-04-15T12:50:40Z",
"last_translation_update": "2018-04-15T12:50:33Z",
"last_update": "2018-04-15T12:50:40Z",
"proofread_strings": 0,
"proofread_words": 0,
"reviewed_strings": 22,
"reviewed_words": 189,
"total_strings": 22,
"total_words": 189,
"translated_strings": 22,
"translated_words": 189,
"untranslated_strings": 0,
"untranslated_words": 0,
},
related={
"language": mock.Mock(id="l:es"),
"resource": mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:cc-search"
),
},
),
mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:deeds-choosers"
":l:nl",
attributes={
"last_proofread_update": None,
"last_review_update": "2020-10-02T06:47:38Z",
"last_translation_update": "2020-10-02T06:47:38Z",
"last_update": "2020-10-02T06:47:38Z",
"proofread_strings": 0,
"proofread_words": 0,
"reviewed_strings": 572,
"reviewed_words": 8124,
"total_strings": 575,
"total_words": 8128,
"translated_strings": 575,
"translated_words": 8128,
"untranslated_strings": 0,
"untranslated_words": 0,
},
related={
"language": mock.Mock(id="l:nl"),
"resource": mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:"
"r:deeds-choosers"
),
},
),
mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:deeds_ux:l:id",
attributes={
"last_proofread_update": None,
"last_review_update": None,
"last_translation_update": "2020-06-29T12:54:48Z",
"last_update": "2021-07-28T15:04:31Z",
"proofread_strings": 0,
"proofread_words": 0,
"reviewed_strings": 0,
"reviewed_words": 0,
"total_strings": 112,
"total_words": 2388,
"translated_strings": 0,
"translated_words": 0,
"untranslated_strings": 112,
"untranslated_words": 2388,
},
related={
"language": mock.Mock(id="l:id"),
"resource": mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:deeds_ux"
),
},
),
mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:deeds_ux:l:is",
attributes={
"last_proofread_update": None,
"last_review_update": None,
"last_translation_update": "2020-09-18T09:46:58Z",
"last_update": "2021-07-28T15:04:31Z",
"proofread_strings": 0,
"proofread_words": 0,
"reviewed_strings": 0,
"reviewed_words": 0,
"total_strings": 112,
"total_words": 2388,
"translated_strings": 30,
"translated_words": 74,
"untranslated_strings": 82,
"untranslated_words": 2314,
},
related={
"language": mock.Mock(id="l:is"),
"resource": mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:deeds_ux"
),
},
),
mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:deeds_ux:l:it",
attributes={
"last_proofread_update": None,
"last_review_update": None,
"last_translation_update": "2020-10-28T16:00:16Z",
"last_update": "2021-07-28T15:04:31Z",
"proofread_strings": 0,
"proofread_words": 0,
"reviewed_strings": 0,
"reviewed_words": 0,
"total_strings": 112,
"total_words": 2388,
"translated_strings": 50,
"translated_words": 500,
"untranslated_strings": 62,
"untranslated_words": 1888,
},
related={
"language": mock.Mock(id="l:it"),
"resource": mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:deeds_ux"
),
},
),
]
all_lang_stats = mock.Mock(return_value=languages_stats)
self.helper.api.ResourceLanguageStats.filter = mock.Mock(
return_value=mock.Mock(all=all_lang_stats)
)
# With _resource_stats empty
stats = self.helper.translation_stats
# With _resource_stats populated
stats = self.helper.translation_stats
all_lang_stats.assert_called_once()
self.assertNotIn("cc-search", stats)
self.assertNotIn("deeds-choosers", stats)
self.assertIn("deeds_ux", stats)
self.assertIn("id", stats["deeds_ux"])
self.assertIn("is", stats["deeds_ux"])
self.assertIn("it", stats["deeds_ux"])
self.assertEqual(
0, stats["deeds_ux"]["id"].get("translated_strings", 0)
)
self.assertEqual(
30, stats["deeds_ux"]["is"].get("translated_strings", 0)
)
self.assertEqual(
50, stats["deeds_ux"]["it"].get("translated_strings", 0)
)
def test_transifex_get_pofile_content_bad_i18n_type(self):
api = self.helper.api
resource_slug = "x_resource_x"
transifex_code = "en"
resource = mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:{resource_slug}",
attributes={"i18n_type": "XA"},
)
self.helper.api.Resource.get = mock.Mock(return_value=resource)
with mock.patch("requests.get") as request:
with self.assertRaises(ValueError) as cm:
self.helper.transifex_get_pofile_content(
resource_slug, transifex_code
)
self.assertEqual(
f"Transifex {resource_slug} file format is not 'PO'. It is: XA",
str(cm.exception),
)
api.ResourceStringsAsyncDownload.download.assert_not_called()
api.ResourceTranslationsAsyncDownload.download.assert_not_called()
request.assert_not_called()
def test_transifex_get_pofile_content_source(self):
api = self.helper.api
resource_slug = "x_resource_x"
transifex_code = "en"
resource = mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:{resource_slug}",
attributes={"i18n_type": "PO"},
)
self.helper.api.Resource.get = mock.Mock(return_value=resource)
with mock.patch("requests.get") as request:
request.return_value = mock.MagicMock(content=b"xxxxxx")
result = self.helper.transifex_get_pofile_content(
resource_slug, transifex_code
)
api.ResourceStringsAsyncDownload.download.assert_called_once()
api.ResourceTranslationsAsyncDownload.download.assert_not_called()
self.assertEqual(result, b"xxxxxx")
def test_transifex_get_pofile_content_translation(self):
api = self.helper.api
resource_slug = "x_resource_x"
transifex_code = "nl"
resource = mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:{resource_slug}",
attributes={"i18n_type": "PO"},
)
self.helper.api.Resource.get = mock.Mock(return_value=resource)
with mock.patch("requests.get") as request:
request.return_value = mock.MagicMock(content=b"yyyyyy")
result = self.helper.transifex_get_pofile_content(
resource_slug, transifex_code
)
api.ResourceStringsAsyncDownload.download.not_called()
api.ResourceTranslationsAsyncDownload.download.assert_called_once()
self.assertEqual(result, b"yyyyyy")
def test_clear_transifex_stats(self):
with self.assertRaises(AttributeError):
self.helper._resource_stats
self.helper._translation_stats
self.helper.clear_transifex_stats()
self.helper._resource_stats = 1
self.helper._translation_stats = 1
self.helper.clear_transifex_stats()
with self.assertRaises(AttributeError):
self.helper._resource_stats
self.helper._translation_stats
def test_check_data_repo_is_clean_true(self):
mock_repo = mock.Mock(
__str__=mock.Mock(return_value="mock_repo"),
is_dirty=mock.Mock(return_value=False),
)
with mock.patch("git.Repo") as git_repo:
result = self.helper.check_data_repo_is_clean(mock_repo)
git_repo.assert_not_called()
self.assertTrue(result)
@override_settings(DATA_REPOSITORY_DIR="/trans/repo")
def test_check_data_repo_is_clean_false(self):
mock_repo = mock.Mock(
__str__=mock.Mock(return_value="mock_repo"),
is_dirty=mock.Mock(return_value=True),
)
with mock.patch("git.Repo") as git_repo:
git_repo.return_value.__enter__.return_value = mock_repo
result = self.helper.check_data_repo_is_clean()
git_repo.assert_called_once()
self.assertFalse(result)
# Test: get_local_data ###################################################
@override_settings(
DEEDS_UX_PO_FILE_INFO={
"af": {
"creation_date": datetime.datetime(
2020, 6, 29, 12, 54, 48, tzinfo=tzutc()
),
"revision_date": datetime.datetime(
2021, 7, 28, 15, 4, 31, tzinfo=tzutc()
),
},
}
)
def test_get_local_data_all(self):
limit_domain = None
limit_language = None
deeds_ux = settings.DEEDS_UX_PO_FILE_INFO
tool = ToolFactory(unit="by", version="4.0")
LegalCodeFactory(tool=tool, language_code=settings.LANGUAGE_CODE)
LegalCodeFactory(tool=tool, language_code="de")
legal_codes = list(
LegalCode.objects.valid()
.translated()
.exclude(language_code=settings.LANGUAGE_CODE)
)
self.helper.build_local_data = mock.Mock()
self.helper.get_local_data(limit_domain, limit_language)
self.helper.build_local_data.assert_called_once()
self.helper.build_local_data.assert_called_with(deeds_ux, legal_codes)
@override_settings(
DEEDS_UX_PO_FILE_INFO={
"es": {
"creation_date": datetime.datetime(
2020, 6, 29, 12, 54, 48, tzinfo=tzutc()
),
"revision_date": datetime.datetime(
2021, 7, 28, 15, 4, 31, tzinfo=tzutc()
),
},
}
)
def test_get_local_data_limit_to_deeds_ux(self):
limit_domain = "deeds_ux"
limit_language = None
tool = ToolFactory(unit="by", version="4.0")
LegalCodeFactory(tool=tool, language_code=settings.LANGUAGE_CODE)
LegalCodeFactory(tool=tool, language_code="de")
deeds_ux = settings.DEEDS_UX_PO_FILE_INFO
legal_codes = []
self.helper.build_local_data = mock.Mock()
self.helper.get_local_data(limit_domain, limit_language)
self.helper.build_local_data.assert_called_once()
self.helper.build_local_data.assert_called_with(deeds_ux, legal_codes)
@override_settings(
DEEDS_UX_PO_FILE_INFO={
"es": {
"creation_date": datetime.datetime(
2020, 6, 29, 12, 54, 48, tzinfo=tzutc()
),
"revision_date": datetime.datetime(
2021, 7, 28, 15, 4, 31, tzinfo=tzutc()
),
},
}
)
def test_get_local_data_limit_to_legal_code(self):
limit_domain = "legal_code"
limit_language = None
tool = ToolFactory(unit="by", version="4.0")
LegalCodeFactory(tool=tool, language_code=settings.LANGUAGE_CODE)
LegalCodeFactory(tool=tool, language_code="es")
deeds_ux = {}
legal_codes = list(
LegalCode.objects.valid()
.translated()
.exclude(language_code=settings.LANGUAGE_CODE)
)
self.helper.build_local_data = mock.Mock()
self.helper.get_local_data(limit_domain, limit_language)
self.helper.build_local_data.assert_called_once()
self.helper.build_local_data.assert_called_with(deeds_ux, legal_codes)
@override_settings(
DEEDS_UX_PO_FILE_INFO={
"es": {
"creation_date": datetime.datetime(
2020, 6, 29, 12, 54, 48, tzinfo=tzutc()
),
"revision_date": datetime.datetime(
2021, 7, 28, 15, 4, 31, tzinfo=tzutc()
),
},
"nl": {
"creation_date": datetime.datetime(
2020, 6, 29, 12, 54, 48, tzinfo=tzutc()
),
"revision_date": datetime.datetime(
2021, 7, 28, 15, 4, 31, tzinfo=tzutc()
),
},
}
)
def test_get_local_data_limit_to_deeds_ux_nl(self):
limit_domain = "deeds_ux"
limit_language = "nl"
tool = ToolFactory(unit="by", version="4.0")
LegalCodeFactory(tool=tool, language_code=settings.LANGUAGE_CODE)
LegalCodeFactory(tool=tool, language_code="es")
LegalCodeFactory(tool=tool, language_code="nl")
tool = ToolFactory(unit="by-sa", version="4.0")
LegalCodeFactory(tool=tool, language_code=settings.LANGUAGE_CODE)
LegalCodeFactory(tool=tool, language_code="es")
LegalCodeFactory(tool=tool, language_code="nl")
deeds_ux = {"nl": settings.DEEDS_UX_PO_FILE_INFO["nl"]}
legal_codes = []
self.helper.build_local_data = mock.Mock()
self.helper.get_local_data(limit_domain, limit_language)
self.helper.build_local_data.assert_called_once()
self.helper.build_local_data.assert_called_with(deeds_ux, legal_codes)
@override_settings(
DEEDS_UX_PO_FILE_INFO={
"es": {
"creation_date": datetime.datetime(
2020, 6, 29, 12, 54, 48, tzinfo=tzutc()
),
"revision_date": datetime.datetime(
2021, 7, 28, 15, 4, 31, tzinfo=tzutc()
),
},
"nl": {
"creation_date": datetime.datetime(
2020, 6, 29, 12, 54, 48, tzinfo=tzutc()
),
"revision_date": datetime.datetime(
2021, 7, 28, 15, 4, 31, tzinfo=tzutc()
),
},
}
)
def test_get_local_data_limit_to_by_40_nl(self):
limit_domain = "by_40"
limit_language = "nl"
tool = ToolFactory(unit="by", version="4.0")
LegalCodeFactory(tool=tool, language_code=settings.LANGUAGE_CODE)
LegalCodeFactory(tool=tool, language_code="es")
LegalCodeFactory(tool=tool, language_code="nl")
tool = ToolFactory(unit="by-sa", version="4.0")
LegalCodeFactory(tool=tool, language_code=settings.LANGUAGE_CODE)
LegalCodeFactory(tool=tool, language_code="es")
LegalCodeFactory(tool=tool, language_code="nl")
deeds_ux = {}
legal_codes = list(
LegalCode.objects.valid()
.translated()
.filter(
language_code=limit_language,
tool__unit="by",
tool__version="4.0",
)
)
self.helper.build_local_data = mock.Mock()
self.helper.get_local_data(limit_domain, limit_language)
self.helper.build_local_data.assert_called_once()
self.helper.build_local_data.assert_called_with(deeds_ux, legal_codes)
# Test: build_local_data #################################################
@override_settings(
DEEDS_UX_PO_FILE_INFO={
"af": {
"creation_date": datetime.datetime(
2020, 6, 29, 12, 54, 48, tzinfo=tzutc()
),
"revision_date": datetime.datetime(
2021, 7, 28, 15, 4, 31, tzinfo=tzutc()
),
},
"en": {
"creation_date": datetime.datetime(
2020, 6, 29, 12, 54, 48, tzinfo=tzutc()
),
"revision_date": datetime.datetime(
2021, 7, 28, 15, 4, 31, tzinfo=tzutc()
),
},
}
)
def test_build_local_data(self):
deeds_ux = settings.DEEDS_UX_PO_FILE_INFO
tool = ToolFactory(unit="by", version="4.0")
LegalCodeFactory(tool=tool, language_code="en")
LegalCodeFactory(tool=tool, language_code="es")
LegalCodeFactory(tool=tool, language_code="nl")
legal_codes = list(LegalCode.objects.valid().translated())
local_data = self.helper.build_local_data(deeds_ux, legal_codes)
self.assertIn("by_40", local_data)
self.assertIn("name", local_data["by_40"])
self.assertEqual(local_data["by_40"]["name"], "CC BY 4.0")
self.assertEqual(
list(local_data["by_40"]["translations"].keys()), ["es", "nl"]
)
self.assertIn("deeds_ux", local_data)
self.assertIn("name", local_data["deeds_ux"])
self.assertEqual(local_data["deeds_ux"]["name"], "Deeds & UX")
self.assertIn("translations", local_data["deeds_ux"])
self.assertEqual(
list(local_data["deeds_ux"]["translations"].keys()), ["af"]
)
@override_settings(
DEEDS_UX_PO_FILE_INFO={
"af": {
"creation_date": datetime.datetime(
2020, 6, 29, 12, 54, 48, tzinfo=tzutc()
),
"revision_date": datetime.datetime(
2021, 7, 28, 15, 4, 31, tzinfo=tzutc()
),
},
"be": {
"creation_date": datetime.datetime(
2020, 6, 29, 12, 54, 48, tzinfo=tzutc()
),
"revision_date": datetime.datetime(
2021, 7, 28, 15, 4, 31, tzinfo=tzutc()
),
},
}
)
def test_build_local_data_limit_to_deeds_ux(self):
deeds_ux = settings.DEEDS_UX_PO_FILE_INFO
legal_codes = []
local_data = self.helper.build_local_data(deeds_ux, legal_codes)
self.assertNotIn("by_40", local_data)
self.assertIn("deeds_ux", local_data)
self.assertIn("name", local_data["deeds_ux"])
self.assertEqual(local_data["deeds_ux"]["name"], "Deeds & UX")
self.assertIn("translations", local_data["deeds_ux"])
self.assertEqual(
list(local_data["deeds_ux"]["translations"].keys()), ["af", "be"]
)
@override_settings(DEEDS_UX_PO_FILE_INFO={})
def test_build_local_data_limit_to_legal_code(self):
deeds_ux = settings.DEEDS_UX_PO_FILE_INFO
tool = ToolFactory(unit="by", version="4.0")
LegalCodeFactory(tool=tool, language_code=settings.LANGUAGE_CODE)
LegalCodeFactory(tool=tool, language_code="es")
LegalCodeFactory(tool=tool, language_code="nl")
legal_codes = list(
LegalCode.objects.valid()
.translated()
.exclude(language_code=settings.LANGUAGE_CODE)
)
local_data = self.helper.build_local_data(deeds_ux, legal_codes)
self.assertIn("by_40", local_data)
self.assertIn("name", local_data["by_40"])
self.assertEqual(local_data["by_40"]["name"], "CC BY 4.0")
self.assertIn("translations", local_data["by_40"])
self.assertEqual(
list(local_data["by_40"]["translations"].keys()), ["es", "nl"]
)
self.assertNotIn("deeds_ux", local_data)
# Test: resource_present #################################################
def test_resource_present_false(self):
resource_slug = "x_slug_x"
resource_name = "x_name_x"
self.helper._resource_stats = {}
with self.assertLogs(self.helper.log) as log_context:
result = self.helper.resource_present(resource_slug, resource_name)
self.assertTrue(log_context.output[0].startswith("CRITICAL:"))
self.assertIn("Aborting resource processing.", log_context.output[0])
self.assertFalse(result)
def test_resource_present_true(self):
resource_slug = "x_slug_x"
resource_name = "x_name_x"
self.helper._resource_stats = {resource_slug: {}}
result = self.helper.resource_present(resource_slug, resource_name)
self.assertTrue(result)
# Test: translation_supported ############################################
def test_translation_supported_false(self):
resource_slug = "x_slug_x"
resource_name = "x_name_x"
transifex_code = "x_trans_code_x"
self.helper._translation_stats = {resource_slug: {}}
with self.assertLogs(self.helper.log) as log_context:
result = self.helper.translation_supported(
resource_slug, resource_name, transifex_code
)
self.assertTrue(log_context.output[0].startswith("CRITICAL:"))
self.assertIn(
"Aborting translation language processing.", log_context.output[0]
)
self.assertFalse(result)
def test_translation_supported_true(self):
resource_slug = "x_slug_x"
resource_name = "x_name_x"
transifex_code = "x_trans_code_x"
self.helper._translation_stats = {resource_slug: {transifex_code: {}}}
result = self.helper.translation_supported(
resource_slug, resource_name, transifex_code
)
self.assertTrue(result)
# Test: resources_metadata_identical #####################################
def test_resources_metadata_identical_false_differ_creation(self):
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_creation = dateutil.parser.isoparse("2021-01-01 01:01:01+00:00")
pofile_revision = dateutil.parser.isoparse("2021-03-03 03:03:03+00:00")
pofile_string_count = 1
transifex_creation = dateutil.parser.isoparse(
"2021-02-02 02:02:02+00:00"
)
transifex_revision = pofile_revision
transifex_string_count = pofile_string_count
with self.assertLogs(self.helper.log) as log_context:
result = self.helper.resources_metadata_identical(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_creation,
pofile_revision,
pofile_string_count,
transifex_creation,
transifex_revision,
transifex_string_count,
)
self.assertTrue(log_context.output[0].startswith("ERROR:"))
self.assertIn("creation:", log_context.output[0])
self.assertNotIn("revision:", log_context.output[0])
self.assertNotIn("string count:", log_context.output[0])
self.assertFalse(result)
def test_resources_metadata_identical_false_differ_revision(self):
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_creation = dateutil.parser.isoparse("2021-01-01 01:01:01+00:00")
pofile_revision = dateutil.parser.isoparse("2021-02-02 02:02:02+00:00")
pofile_string_count = 1
transifex_creation = pofile_creation
transifex_revision = dateutil.parser.isoparse(
"2021-03-03 03:03:03+00:00"
)
transifex_string_count = pofile_string_count
with self.assertLogs(self.helper.log) as log_context:
result = self.helper.resources_metadata_identical(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_creation,
pofile_revision,
pofile_string_count,
transifex_creation,
transifex_revision,
transifex_string_count,
)
self.assertTrue(log_context.output[0].startswith("ERROR:"))
self.assertNotIn("creation:", log_context.output[0])
self.assertIn("revision:", log_context.output[0])
self.assertNotIn("string count:", log_context.output[0])
self.assertFalse(result)
def test_resources_metadata_identical_false_differ_string_count(self):
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_creation = dateutil.parser.isoparse("2021-01-01 01:01:01+00:00")
pofile_revision = dateutil.parser.isoparse("2021-02-02 02:02:02+00:00")
pofile_string_count = 1
transifex_creation = pofile_creation
transifex_revision = pofile_revision
transifex_string_count = 2
with self.assertLogs(self.helper.log) as log_context:
result = self.helper.resources_metadata_identical(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_creation,
pofile_revision,
pofile_string_count,
transifex_creation,
transifex_revision,
transifex_string_count,
)
self.assertTrue(log_context.output[0].startswith("ERROR:"))
self.assertNotIn("creation:", log_context.output[0])
self.assertNotIn("revision:", log_context.output[0])
self.assertIn("string count:", log_context.output[0])
self.assertFalse(result)
def test_resources_metadata_identical_false_differ_all(self):
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_creation = dateutil.parser.isoparse("2021-01-01 01:01:01+00:00")
pofile_revision = dateutil.parser.isoparse("2021-02-02 02:02:02+00:00")
pofile_string_count = 1
transifex_creation = dateutil.parser.isoparse(
"2021-03-03 03:03:03+00:00"
)
transifex_revision = dateutil.parser.isoparse(
"2021-04-04 04:04:04+00:00"
)
transifex_string_count = 2
with self.assertLogs(self.helper.log) as log_context:
result = self.helper.resources_metadata_identical(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_creation,
pofile_revision,
pofile_string_count,
transifex_creation,
transifex_revision,
transifex_string_count,
)
self.assertTrue(log_context.output[0].startswith("ERROR:"))
self.assertIn("creation:", log_context.output[0])
self.assertIn("revision:", log_context.output[0])
self.assertIn("string count:", log_context.output[0])
self.assertFalse(result)
def test_resources_metadata_identical_true(self):
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_creation = dateutil.parser.isoparse("2021-01-01 01:01:01+00:00")
pofile_revision = dateutil.parser.isoparse("2021-02-02 02:02:02+00:00")
pofile_string_count = 1
transifex_creation = pofile_creation
transifex_revision = pofile_revision
transifex_string_count = pofile_string_count
with self.assertLogs(self.helper.log, level="DEBUG") as log_context:
result = self.helper.resources_metadata_identical(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_creation,
pofile_revision,
pofile_string_count,
transifex_creation,
transifex_revision,
transifex_string_count,
)
self.assertTrue(log_context.output[0].startswith("DEBUG:"))
self.assertNotIn("creation:", log_context.output[0])
self.assertNotIn("revision:", log_context.output[0])
self.assertNotIn("string count:", log_context.output[0])
self.assertTrue(result)
# Test: translations_metadata_identical ##################################
def test_translations_metadata_identical_false_differ_creation(self):
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_creation = dateutil.parser.isoparse("2021-01-01 01:01:01+00:00")
pofile_revision = dateutil.parser.isoparse("2021-03-03 03:03:03+00:00")
pofile_translated = 1
transifex_creation = dateutil.parser.isoparse(
"2021-02-02 02:02:02+00:00"
)
transifex_revision = pofile_revision
transifex_translated = pofile_translated
with self.assertLogs(self.helper.log) as log_context:
result = self.helper.translations_metadata_identical(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_creation,
pofile_revision,
pofile_translated,
transifex_creation,
transifex_revision,
transifex_translated,
)
self.assertTrue(log_context.output[0].startswith("ERROR:"))
self.assertIn("creation:", log_context.output[0])
self.assertNotIn("revision:", log_context.output[0])
self.assertNotIn("translated entries:", log_context.output[0])
self.assertFalse(result)
def test_translations_metadata_identical_false_differ_revision(self):
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_creation = dateutil.parser.isoparse("2021-01-01 01:01:01+00:00")
pofile_revision = dateutil.parser.isoparse("2021-02-02 02:02:02+00:00")
pofile_translated = 1
transifex_creation = pofile_creation
transifex_revision = dateutil.parser.isoparse(
"2021-03-03 03:03:03+00:00"
)
transifex_translated = pofile_translated
with self.assertLogs(self.helper.log) as log_context:
result = self.helper.translations_metadata_identical(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_creation,
pofile_revision,
pofile_translated,
transifex_creation,
transifex_revision,
transifex_translated,
)
self.assertTrue(log_context.output[0].startswith("ERROR:"))
self.assertNotIn("creation:", log_context.output[0])
self.assertIn("revision:", log_context.output[0])
self.assertNotIn("translated entries:", log_context.output[0])
self.assertFalse(result)
def test_translations_metadata_identical_false_differ_translated(self):
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_creation = dateutil.parser.isoparse("2021-01-01 01:01:01+00:00")
pofile_revision = dateutil.parser.isoparse("2021-02-02 02:02:02+00:00")
pofile_translated = 1
transifex_creation = pofile_creation
transifex_revision = pofile_revision
transifex_translated = 2
with self.assertLogs(self.helper.log) as log_context:
result = self.helper.translations_metadata_identical(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_creation,
pofile_revision,
pofile_translated,
transifex_creation,
transifex_revision,
transifex_translated,
)
self.assertTrue(log_context.output[0].startswith("ERROR:"))
self.assertNotIn("creation:", log_context.output[0])
self.assertNotIn("revision:", log_context.output[0])
self.assertIn("translated entries:", log_context.output[0])
self.assertFalse(result)
def test_translations_metadata_identical_false_differ_all(self):
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_creation = dateutil.parser.isoparse("2021-01-01 01:01:01+00:00")
pofile_revision = dateutil.parser.isoparse("2021-02-02 02:02:02+00:00")
pofile_translated = 1
transifex_creation = dateutil.parser.isoparse(
"2021-03-03 03:03:03+00:00"
)
transifex_revision = dateutil.parser.isoparse(
"2021-04-04 04:04:04+00:00"
)
transifex_translated = 2
with self.assertLogs(self.helper.log) as log_context:
result = self.helper.translations_metadata_identical(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_creation,
pofile_revision,
pofile_translated,
transifex_creation,
transifex_revision,
transifex_translated,
)
self.assertTrue(log_context.output[0].startswith("ERROR:"))
self.assertIn("creation:", log_context.output[0])
self.assertIn("revision:", log_context.output[0])
self.assertIn("translated entries:", log_context.output[0])
self.assertFalse(result)
def test_translations_metadata_identical_true(self):
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_creation = dateutil.parser.isoparse("2021-01-01 01:01:01+00:00")
pofile_revision = dateutil.parser.isoparse("2021-02-02 02:02:02+00:00")
pofile_translated = 1
transifex_creation = pofile_creation
transifex_revision = pofile_revision
transifex_translated = pofile_translated
with self.assertLogs(self.helper.log, level="DEBUG") as log_context:
result = self.helper.translations_metadata_identical(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_creation,
pofile_revision,
pofile_translated,
transifex_creation,
transifex_revision,
transifex_translated,
)
self.assertTrue(log_context.output[0].startswith("DEBUG:"))
self.assertNotIn("creation:", log_context.output[0])
self.assertNotIn("revision:", log_context.output[0])
self.assertNotIn("translated entries:", log_context.output[0])
self.assertTrue(result)
# Test: safesync_translation #############################################
def test_safesync_translation_mismatched_msgids(self):
api = self.helper.api
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
language = mock.Mock(
id=f"l:{transifex_code}",
)
api.Language.get = mock.Mock(return_value=language)
resource = mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:{resource_slug}",
attributes={"i18n_type": "PO"},
)
api.Resource.get = mock.Mock(return_value=resource)
translations = [
mock.Mock(
resource_string=mock.Mock(
strings={"other": "XXXXXXXXXXXXXXXXXXXXXXX"}
),
strings={"other": pofile_obj[0].msgstr},
save=mock.Mock(),
),
mock.Mock(
resource_string=mock.Mock(
strings={"other": pofile_obj[1].msgid}
),
strings={"other": pofile_obj[1].msgstr},
save=mock.Mock(),
),
]
api.ResourceTranslation.filter = mock.Mock(
return_value=mock.Mock(
include=mock.Mock(
return_value=mock.Mock(
all=mock.Mock(return_value=translations)
),
),
),
)
self.helper.clear_transifex_stats = mock.Mock()
with self.assertLogs(self.helper.log) as log_context:
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
pofile_obj_new = self.helper.safesync_translation(
language_code,
transifex_code,
resource_slug,
pofile_path,
pofile_obj,
)
self.assertEqual(pofile_obj_new, pofile_obj)
self.assertTrue(log_context.output[0].startswith("CRITICAL:"))
self.assertIn(
"Local PO File msgid and Transifex msgid do not match",
log_context.output[0],
)
translations[0].save.assert_not_called()
translations[1].save.assert_not_called()
self.helper.clear_transifex_stats.assert_not_called()
mock_pofile_save.assert_not_called
def test_safesync_translation_with_transifex_changes(self):
api = self.helper.api
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
language = mock.Mock(
id=f"l:{transifex_code}",
)
api.Language.get = mock.Mock(return_value=language)
resource = mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:{resource_slug}",
attributes={"i18n_type": "PO"},
)
api.Resource.get = mock.Mock(return_value=resource)
translations = [
mock.Mock(
resource_string=mock.Mock(
strings={"other": pofile_obj[0].msgid}
),
strings=None,
save=mock.Mock(),
),
mock.Mock(
resource_string=mock.Mock(
strings={"other": pofile_obj[1].msgid}
),
strings={
"other": pofile_obj[1].msgstr.replace(
'msgstr "english text"',
'msgstr "english text!!!!!!"',
),
},
save=mock.Mock(),
),
]
api.ResourceTranslation.filter = mock.Mock(
return_value=mock.Mock(
include=mock.Mock(
return_value=mock.Mock(
all=mock.Mock(return_value=translations)
),
),
),
)
self.helper.clear_transifex_stats = mock.Mock()
with self.assertLogs(self.helper.log) as log_context:
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
pofile_obj_new = self.helper.safesync_translation(
language_code,
transifex_code,
resource_slug,
pofile_path,
pofile_obj,
)
self.assertEqual(pofile_obj_new, pofile_obj)
self.assertTrue(log_context.output[0].startswith("INFO:"))
self.assertIn(
"Adding translation from PO File to Transifex",
log_context.output[0],
)
self.assertIn(" msgid 0: 'license_medium'", log_context.output[0])
self.assertNotIn(" msgid 1: 'english text'", log_context.output[0])
translations[0].save.assert_called()
translations[1].save.assert_not_called()
self.helper.clear_transifex_stats.assert_called()
mock_pofile_save.assert_not_called()
def test_safesync_translation_with_pofile_changes(self):
api = self.helper.api
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
language = mock.Mock(
id=f"l:{transifex_code}",
)
api.Language.get = mock.Mock(return_value=language)
resource = mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:{resource_slug}",
attributes={"i18n_type": "PO"},
)
api.Resource.get = mock.Mock(return_value=resource)
translations = [
mock.Mock(
resource_string=mock.Mock(
strings={"other": pofile_obj[0].msgid}
),
strings={
"other": pofile_obj[0].msgstr.replace(
"Attribution", "XXXXXXXXXXX"
),
},
save=mock.Mock(),
),
mock.Mock(
resource_string=mock.Mock(
strings={"other": pofile_obj[1].msgid}
),
strings={
"other": pofile_obj[1].msgstr.replace(
'msgstr "english text"',
'msgstr "english text!!!!!!"',
),
},
save=mock.Mock(),
),
]
api.ResourceTranslation.filter = mock.Mock(
return_value=mock.Mock(
include=mock.Mock(
return_value=mock.Mock(
all=mock.Mock(return_value=translations)
),
),
),
)
self.helper.clear_transifex_stats = mock.Mock()
pofile_obj[0].msgstr = ""
with self.assertLogs(self.helper.log) as log_context:
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
pofile_obj_new = self.helper.safesync_translation(
language_code,
transifex_code,
resource_slug,
pofile_path,
pofile_obj,
)
self.assertEqual(
pofile_obj_new[0].msgstr,
"XXXXXXXXXXX-NoDerivatives 4.0 International",
)
self.assertTrue(log_context.output[0].startswith("INFO:"))
self.assertIn(
"Adding translation from Transifex to PO File",
log_context.output[0],
)
self.assertIn(" msgid 0: 'license_medium'", log_context.output[0])
self.assertNotIn(" msgid 1: 'english text'", log_context.output[0])
translations[0].save.assert_not_called()
translations[1].save.assert_not_called()
self.helper.clear_transifex_stats.assert_not_called()
mock_pofile_save.assert_called_once()
def test_safesync_translation_with_both_changes(self):
api = self.helper.api
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
language = mock.Mock(
id=f"l:{transifex_code}",
)
api.Language.get = mock.Mock(return_value=language)
resource = mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:{resource_slug}",
attributes={"i18n_type": "PO"},
)
api.Resource.get = mock.Mock(return_value=resource)
translations = [
mock.Mock(
resource_string=mock.Mock(
strings={"other": pofile_obj[0].msgid}
),
strings={
"other": pofile_obj[0].msgstr.replace(
"Attribution", "XXXXXXXXXXX"
),
},
save=mock.Mock(),
),
mock.Mock(
resource_string=mock.Mock(
strings={"other": pofile_obj[1].msgid}
),
strings={"other": ""},
save=mock.Mock(),
),
]
api.ResourceTranslation.filter = mock.Mock(
return_value=mock.Mock(
include=mock.Mock(
return_value=mock.Mock(
all=mock.Mock(return_value=translations)
),
),
),
)
self.helper.clear_transifex_stats = mock.Mock()
pofile_obj[0].msgstr = ""
with self.assertLogs(self.helper.log) as log_context:
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
pofile_obj_new = self.helper.safesync_translation(
language_code,
transifex_code,
resource_slug,
pofile_path,
pofile_obj,
)
self.assertEqual(
pofile_obj_new[0].msgstr,
"XXXXXXXXXXX-NoDerivatives 4.0 International",
)
self.assertTrue(log_context.output[0].startswith("INFO:"))
self.assertIn(
"Adding translation from PO File to Transifex",
log_context.output[0],
)
self.assertNotIn(
" msgid 0: 'license_medium'", log_context.output[0]
)
self.assertIn(" msgid 1: 'english text'", log_context.output[0])
self.assertTrue(log_context.output[1].startswith("INFO:"))
self.assertIn(
"Adding translation from Transifex to PO File",
log_context.output[1],
)
self.assertIn(" msgid 0: 'license_medium'", log_context.output[1])
self.assertNotIn(" msgid 1: 'english text'", log_context.output[1])
translations[0].save.assert_not_called()
translations[1].save.assert_called()
self.helper.clear_transifex_stats.assert_called()
mock_pofile_save.assert_called_once()
def test_safesync_translation_with_both_changes_dryrun(self):
api = self.helper.api
self.helper.dryrun = True
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
language = mock.Mock(
id=f"l:{transifex_code}",
)
api.Language.get = mock.Mock(return_value=language)
resource = mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:{resource_slug}",
attributes={"i18n_type": "PO"},
)
api.Resource.get = mock.Mock(return_value=resource)
translations = [
mock.Mock(
resource_string=mock.Mock(
strings={"other": pofile_obj[0].msgid}
),
strings={
"other": pofile_obj[0].msgstr.replace(
"Attribution", "XXXXXXXXXXX"
),
},
save=mock.Mock(),
),
mock.Mock(
resource_string=mock.Mock(
strings={"other": pofile_obj[1].msgid}
),
strings={"other": ""},
save=mock.Mock(),
),
]
api.ResourceTranslation.filter = mock.Mock(
return_value=mock.Mock(
include=mock.Mock(
return_value=mock.Mock(
all=mock.Mock(return_value=translations)
),
),
),
)
self.helper.clear_transifex_stats = mock.Mock()
pofile_obj[0].msgstr = ""
with self.assertLogs(self.helper.log) as log_context:
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
pofile_obj_new = self.helper.safesync_translation(
language_code,
transifex_code,
resource_slug,
pofile_path,
pofile_obj,
)
self.assertEqual(pofile_obj_new, pofile_obj)
self.assertTrue(log_context.output[0].startswith("INFO:"))
self.assertIn(
"Adding translation from PO File to Transifex",
log_context.output[0],
)
self.assertNotIn(
" msgid 0: 'license_medium'", log_context.output[0]
)
self.assertIn(" msgid 1: 'english text'", log_context.output[0])
self.assertTrue(log_context.output[1].startswith("INFO:"))
self.assertIn(
"Adding translation from Transifex to PO File",
log_context.output[1],
)
self.assertIn(" msgid 0: 'license_medium'", log_context.output[1])
self.assertNotIn(" msgid 1: 'english text'", log_context.output[1])
translations[0].save.assert_not_called()
translations[1].save.assert_not_called()
self.helper.clear_transifex_stats.assert_not_called()
mock_pofile_save.assert_not_called()
# Test: diff_entry #######################################################
def test_diff_entry(self):
resource_name = "x_name_x"
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
pofile_entry = pofile_obj[0]
transifex_obj = polib.pofile(pofile=POFILE_CONTENT)
transifex_entry = transifex_obj[0]
transifex_entry.msgstr = transifex_entry.msgstr.replace(
"Attribution", "XXXXXXXXXXX"
)
with self.assertLogs(self.helper.log) as log_context:
self.helper.diff_entry(
resource_name,
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_entry,
transifex_entry,
)
self.assertTrue(log_context.output[0].startswith("WARNING:"))
self.assertIn(
f"--- {resource_name} PO File {pofile_path}\n\n"
f"+++ {resource_name} Transifex {resource_slug} {language_code}"
f" ({transifex_code})\n\n",
log_context.output[0],
)
self.assertIn(
'-msgstr "Attribution-NoDerivatives 4.0 International"\n'
'+msgstr "XXXXXXXXXXX-NoDerivatives 4.0 International"\n',
log_context.output[0],
)
# Test: compare_entries ##################################################
def test_compare_entries_translation_differences(self):
resource_name = "x_name_x"
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
colordiff = False
resource = False
pofile_entry = pofile_obj[0]
transifex_entry = deepcopy(pofile_obj[0])
transifex_entry.msgstr = transifex_entry.msgstr.replace(
"Attribution", "XXXXXXXXXXX"
)
self.helper.transifex_get_pofile_content = mock.Mock(
return_value=POFILE_CONTENT.replace(
"Attribution", "XXXXXXXXXXX"
).encode("utf-8"),
)
self.helper.diff_entry = mock.Mock()
self.helper.compare_entries(
resource_name,
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
colordiff,
resource,
)
self.helper.transifex_get_pofile_content.assert_called_once()
self.helper.transifex_get_pofile_content.assert_called_with(
resource_slug, transifex_code
)
self.helper.diff_entry.assert_called_once()
self.helper.diff_entry.assert_called_with(
resource_name,
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_entry,
transifex_entry,
colordiff,
)
def test_compare_entries_translation_same(self):
resource_name = "x_name_x"
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
colordiff = False
resource = False
self.helper.transifex_get_pofile_content = mock.Mock(
return_value=POFILE_CONTENT.encode("utf-8")
)
self.helper.diff_entry = mock.Mock()
self.helper.compare_entries(
resource_name,
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
colordiff,
resource,
)
self.helper.transifex_get_pofile_content.assert_called_once()
self.helper.transifex_get_pofile_content.assert_called_with(
resource_slug, transifex_code
)
self.helper.diff_entry.assert_not_called()
def test_compare_entries_resource_differences(self):
resource_name = "x_name_x"
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
colordiff = False
resource = True
pofile_entry = pofile_obj[0]
pofile_entry.msgstr = ""
transifex_entry = deepcopy(pofile_obj[0])
transifex_entry.msgid = transifex_entry.msgid.replace(
"license_medium", "YYYYYYYYYYY"
)
transifex_entry.msgstr = ""
self.helper.transifex_get_pofile_content = mock.Mock(
return_value=POFILE_CONTENT.replace(
"license_medium", "YYYYYYYYYYY"
).encode("utf-8"),
)
self.helper.diff_entry = mock.Mock()
self.helper.compare_entries(
resource_name,
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
colordiff,
resource,
)
self.helper.transifex_get_pofile_content.assert_called_once()
self.helper.transifex_get_pofile_content.assert_called_with(
resource_slug, transifex_code
)
self.helper.diff_entry.assert_called_once()
self.helper.diff_entry.assert_called_with(
resource_name,
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_entry,
transifex_entry,
colordiff,
)
def test_compare_entries_resource_same(self):
resource_name = "x_name_x"
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
colordiff = False
resource = True
# The resources should be the same as resource comparison only looks
# at msgid values (not msgstr values)
self.helper.transifex_get_pofile_content = mock.Mock(
return_value=POFILE_CONTENT.replace(
"Attribution", "XXXXXXXXXXX"
).encode("utf-8"),
)
self.helper.diff_entry = mock.Mock()
self.helper.compare_entries(
resource_name,
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
colordiff,
resource,
)
self.helper.transifex_get_pofile_content.assert_called_once()
self.helper.transifex_get_pofile_content.assert_called_with(
resource_slug, transifex_code
)
self.helper.diff_entry.assert_not_called()
# Test: save_transifex_to_pofile #########################################
def test_save_transifex_to_pofile(self):
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
pofile_path = "x_path_x"
pofile_obj = "x_pofile_obj_x"
self.helper.transifex_get_pofile_content = mock.Mock(
return_value=POFILE_CONTENT.encode("utf-8")
)
with self.assertLogs(self.helper.log) as log_context:
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
self.helper.save_transifex_to_pofile(
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
)
self.assertTrue(log_context.output[0].startswith("INFO:"))
mock_pofile_save.assert_called_once()
def test_save_transifex_to_pofile_dryrun(self):
self.helper.dryrun = True
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
pofile_path = "x_path_x"
pofile_obj = "x_pofile_obj_x"
self.helper.transifex_get_pofile_content = mock.Mock(
return_value=POFILE_CONTENT.encode("utf-8")
)
with self.assertLogs(self.helper.log) as log_context:
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
self.helper.save_transifex_to_pofile(
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
)
self.assertTrue(log_context.output[0].startswith("INFO:"))
mock_pofile_save.assert_not_called()
# Test: upload_resource_to_transifex #####################################
def test_upload_resource_to_transifex_present(self):
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = "x_pofile_obj_x"
push_overwrite = False
self.helper._resource_stats = {"x_slug_x": None}
self.helper.upload_resource_to_transifex(
resource_slug,
language_code,
transifex_code,
resource_name,
pofile_path,
pofile_obj,
push_overwrite,
)
self.helper.api.Resource.create.assert_not_called()
self.helper.api.Resource.get.assert_not_called()
self.helper.api.ResourceStringsAsyncUpload.upload.assert_not_called()
def test_upload_resource_to_transifex_missing_created(self):
api = self.helper.api
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
push_overwrite = False
resource = mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:{resource_slug}",
attributes={"i18n_type": "PO"},
)
api.Resource.get = mock.Mock(return_value=resource)
api.ResourceStringsAsyncUpload.upload = mock.Mock(
return_value={"strings_created": 1, "strings_skipped": 0}
)
pofile_content = get_pofile_content(pofile_obj)
self.helper._resource_stats = {}
self.helper.clear_transifex_stats = mock.Mock()
with self.assertLogs(self.helper.log) as log_context:
self.helper.upload_resource_to_transifex(
resource_slug,
language_code,
transifex_code,
resource_name,
pofile_path,
pofile_obj,
push_overwrite,
)
api.Resource.create.assert_called_once()
api.Resource.create.assert_called_with(
name=resource_name,
slug=resource_slug,
relationships={
"i18n_format": self.helper.api_i18n_format,
"project": self.helper.api_project,
},
)
api.Resource.get.assert_called_once()
api.ResourceStringsAsyncUpload.upload.assert_called_once()
api.ResourceStringsAsyncUpload.upload.assert_called_with(
resource=resource,
content=pofile_content.replace(
'msgstr "Attribution-NoDerivatives 4.0 International"',
'msgstr ""',
).replace('msgstr "english text"', 'msgstr ""'),
)
self.assertTrue(log_context.output[0].startswith("WARNING:"))
self.assertIn("Uploading resource to Transifex", log_context.output[0])
self.assertTrue(log_context.output[1].startswith("INFO:"))
self.assertIn("Resource upload results", log_context.output[1])
self.helper.clear_transifex_stats.assert_called_once()
def test_upload_resource_to_transifex_missing_failed(self):
api = self.helper.api
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
push_overwrite = False
resource = mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:{resource_slug}",
attributes={"i18n_type": "PO"},
)
api.Resource.get = mock.Mock(return_value=resource)
api.ResourceStringsAsyncUpload.upload = mock.Mock(
return_value={"strings_created": 0, "strings_skipped": 0}
)
pofile_content = get_pofile_content(pofile_obj)
self.helper._resource_stats = {}
self.helper.clear_transifex_stats = mock.Mock()
with self.assertLogs(self.helper.log) as log_context:
self.helper.upload_resource_to_transifex(
resource_slug,
language_code,
transifex_code,
resource_name,
pofile_path,
pofile_obj,
push_overwrite,
)
api.Resource.create.assert_called_once()
api.Resource.create.assert_called_with(
name=resource_name,
slug=resource_slug,
relationships={
"i18n_format": self.helper.api_i18n_format,
"project": self.helper.api_project,
},
)
api.Resource.get.assert_called_once()
api.ResourceStringsAsyncUpload.upload.assert_called_once()
api.ResourceStringsAsyncUpload.upload.assert_called_with(
resource=resource,
content=pofile_content.replace(
'msgstr "Attribution-NoDerivatives 4.0 International"',
'msgstr ""',
).replace('msgstr "english text"', 'msgstr ""'),
)
self.assertTrue(log_context.output[0].startswith("WARNING:"))
self.assertIn("Uploading resource to Transifex", log_context.output[0])
self.assertTrue(log_context.output[1].startswith("INFO:"))
self.assertIn("Resource upload results", log_context.output[1])
self.assertTrue(log_context.output[2].startswith("CRITICAL:"))
self.assertIn("Resource upload failed", log_context.output[2])
self.helper.clear_transifex_stats.assert_not_called()
def test_upload_resource_to_transifex_missing_some_skipped(self):
api = self.helper.api
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
push_overwrite = False
resource = mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:{resource_slug}",
attributes={"i18n_type": "PO"},
)
api.Resource.get = mock.Mock(return_value=resource)
api.ResourceStringsAsyncUpload.upload = mock.Mock(
return_value={"strings_created": 1, "strings_skipped": 1}
)
pofile_content = get_pofile_content(pofile_obj)
self.helper._resource_stats = {}
self.helper.clear_transifex_stats = mock.Mock()
with self.assertLogs(self.helper.log) as log_context:
self.helper.upload_resource_to_transifex(
resource_slug,
language_code,
transifex_code,
resource_name,
pofile_path,
pofile_obj,
push_overwrite,
)
api.Resource.create.assert_called_once()
api.Resource.create.assert_called_with(
name=resource_name,
slug=resource_slug,
relationships={
"i18n_format": self.helper.api_i18n_format,
"project": self.helper.api_project,
},
)
api.Resource.get.assert_called_once()
api.ResourceStringsAsyncUpload.upload.assert_called_once()
api.ResourceStringsAsyncUpload.upload.assert_called_with(
resource=resource,
content=pofile_content.replace(
'msgstr "Attribution-NoDerivatives 4.0 International"',
'msgstr ""',
).replace('msgstr "english text"', 'msgstr ""'),
)
self.assertTrue(log_context.output[0].startswith("WARNING:"))
self.assertIn("Uploading resource to Transifex", log_context.output[0])
self.assertTrue(log_context.output[1].startswith("INFO:"))
self.assertIn("Resource upload results", log_context.output[1])
self.assertTrue(log_context.output[2].startswith("WARNING:"))
self.assertIn("Resource strings skipped", log_context.output[2])
self.helper.clear_transifex_stats.assert_called_once()
def test_upload_resource_to_transifex_dryrun(self):
self.helper.dryrun = True
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = "x_pofile_obj_x"
push_overwrite = False
self.helper._resource_stats = {}
self.helper.upload_resource_to_transifex(
resource_slug,
language_code,
transifex_code,
resource_name,
pofile_path,
pofile_obj,
push_overwrite,
)
self.helper.api.Resource.create.assert_not_called()
self.helper.api.Resource.get.assert_not_called()
self.helper.api.ResourceStringsAsyncUpload.upload.assert_not_called()
def test_upload_resource_to_transifex_present_push_overwrite(self):
api = self.helper.api
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
push_overwrite = True
resource = mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:{resource_slug}",
attributes={"i18n_type": "PO"},
)
api.Resource.get = mock.Mock(return_value=resource)
api.ResourceStringsAsyncUpload.upload = mock.Mock(
return_value={
"strings_created": 2,
"strings_updated": 5,
"strings_skipped": 0,
"strings_deleted": 5,
}
)
pofile_content = get_pofile_content(pofile_obj)
self.helper._resource_stats = {"x_slug_x": None}
self.helper.clear_transifex_stats = mock.Mock()
with self.assertLogs(self.helper.log) as log_context:
self.helper.upload_resource_to_transifex(
resource_slug,
language_code,
transifex_code,
resource_name,
pofile_path,
pofile_obj,
push_overwrite,
)
api.Resource.create.assert_not_called()
api.Resource.get.assert_called_once()
api.ResourceStringsAsyncUpload.upload.assert_called_once()
api.ResourceStringsAsyncUpload.upload.assert_called_with(
resource=resource,
content=pofile_content.replace(
'msgstr "Attribution-NoDerivatives 4.0 International"',
'msgstr ""',
).replace('msgstr "english text"', 'msgstr ""'),
)
self.assertTrue(log_context.output[0].startswith("WARNING:"))
self.assertIn("Uploading resource to Transifex", log_context.output[0])
self.assertTrue(log_context.output[1].startswith("INFO:"))
self.assertIn("Resource upload results", log_context.output[1])
self.helper.clear_transifex_stats.assert_called_once()
# Test: upload_translation_to_transifex_resource #########################
def test_upload_translation_to_transifex_resource_is_source(self):
api = self.helper.api
resource_slug = "x_slug_x"
language_code = settings.LANGUAGE_CODE
transifex_code = settings.LANGUAGE_CODE
pofile_path = "x_path_x"
pofile_obj = "x_pofile_obj_x"
push_overwrite = False
self.helper._resource_stats = {}
self.helper._translation_stats = {}
with self.assertRaises(ValueError) as cm:
self.helper.upload_translation_to_transifex_resource(
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
push_overwrite,
)
self.assertIn(
f"{resource_slug} {language_code} ({transifex_code}):",
str(cm.exception),
)
self.assertIn("is for translations, not sources.", str(cm.exception))
api.Language.get.assert_not_called()
api.Resource.get.assert_not_called()
api.ResourceTranslationsAsyncUpload.upload.assert_not_called()
def test_upload_translation_to_transifex_resource_missing_source(self):
api = self.helper.api
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
pofile_path = "x_path_x"
pofile_obj = "x_pofile_obj_x"
push_overwrite = False
self.helper._resource_stats = {}
self.helper._translation_stats = {}
with self.assertRaises(ValueError) as cm:
self.helper.upload_translation_to_transifex_resource(
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
push_overwrite,
)
self.assertIn(
f"{resource_slug} {language_code} ({transifex_code}):",
str(cm.exception),
)
self.assertIn(
"Transifex does not yet contain resource.", str(cm.exception)
)
api.Language.get.assert_not_called()
api.Resource.get.assert_not_called()
api.ResourceTranslationsAsyncUpload.upload.assert_not_called()
def test_upload_translation_to_transifex_resource_present(self):
api = self.helper.api
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
push_overwrite = False
self.helper._resource_stats = {resource_slug: None}
self.helper._translation_stats = {
resource_slug: {transifex_code: {"translated_strings": 99}}
}
self.helper.upload_translation_to_transifex_resource(
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
push_overwrite,
)
api.Language.get.assert_not_called()
api.Resource.get.assert_not_called()
api.ResourceTranslationsAsyncUpload.upload.assert_not_called()
def test_upload_translation_to_transifex_resource_dryrun(self):
api = self.helper.api
self.helper.dryrun = True
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
push_overwrite = False
self.helper._resource_stats = {resource_slug: None}
self.helper._translation_stats = {resource_slug: {}}
self.helper.upload_translation_to_transifex_resource(
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
push_overwrite,
)
api.Language.get.assert_called_once()
api.Resource.get.assert_called_once()
api.ResourceTranslationsAsyncUpload.upload.assert_not_called()
def test_upload_translation_to_transifex_resource_miss_with_changes(self):
api = self.helper.api
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
push_overwrite = False
pofile_content = get_pofile_content(pofile_obj)
self.helper._resource_stats = {resource_slug: {}}
self.helper._translation_stats = {resource_slug: {}}
language = mock.Mock(
id=f"l:{transifex_code}",
)
self.helper.api.Language.get = mock.Mock(return_value=language)
resource = mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:{resource_slug}",
attributes={"i18n_type": "PO"},
)
self.helper.api.Resource.get = mock.Mock(return_value=resource)
api.ResourceTranslationsAsyncUpload.upload.return_value = {
"translations_created": 1,
"translations_updated": 1,
}
self.helper.clear_transifex_stats = mock.Mock()
self.helper.upload_translation_to_transifex_resource(
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
push_overwrite,
)
api.Language.get.assert_called_once()
api.Resource.get.assert_called_once()
api.ResourceTranslationsAsyncUpload.upload.assert_called_once()
api.ResourceTranslationsAsyncUpload.upload.assert_called_with(
resource=resource,
content=pofile_content,
language=language.id,
)
self.helper.clear_transifex_stats.assert_called_once()
def test_upload_translation_to_transifex_resource_push(self):
api = self.helper.api
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
push_overwrite = True
pofile_content = get_pofile_content(pofile_obj)
self.helper._resource_stats = {}
self.helper._translation_stats = {}
language = mock.Mock(
id=f"l:{transifex_code}",
)
self.helper.api.Language.get = mock.Mock(return_value=language)
resource = mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:{resource_slug}",
attributes={"i18n_type": "PO"},
)
self.helper.api.Resource.get = mock.Mock(return_value=resource)
api.ResourceTranslationsAsyncUpload.upload.return_value = {
"translations_created": 1,
"translations_updated": 1,
}
self.helper.clear_transifex_stats = mock.Mock()
self.helper.upload_translation_to_transifex_resource(
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
push_overwrite,
)
api.Language.get.assert_called_once()
api.Resource.get.assert_called_once()
api.ResourceTranslationsAsyncUpload.upload.assert_called_once()
api.ResourceTranslationsAsyncUpload.upload.assert_called_with(
resource=resource,
content=pofile_content,
language=language.id,
)
self.helper.clear_transifex_stats.assert_called_once()
def test_upload_translation_to_transifex_resource_no_changes(self):
api = self.helper.api
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
push_overwrite = False
pofile_content = get_pofile_content(pofile_obj)
self.helper._resource_stats = {resource_slug: {}}
self.helper._translation_stats = {resource_slug: {}}
language = mock.Mock(
id=f"l:{transifex_code}",
)
self.helper.api.Language.get = mock.Mock(return_value=language)
resource = mock.Mock(
id=f"o:{TEST_ORG_SLUG}:p:{TEST_PROJ_SLUG}:r:{resource_slug}",
attributes={"i18n_type": "PO"},
)
self.helper.api.Resource.get = mock.Mock(return_value=resource)
api.ResourceTranslationsAsyncUpload.upload.return_value = {
"translations_created": 0,
"translations_updated": 0,
}
self.helper.clear_transifex_stats = mock.Mock()
with self.assertLogs(self.helper.log) as log_context:
self.helper.upload_translation_to_transifex_resource(
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
push_overwrite,
)
api.Language.get.assert_called_once()
api.Resource.get.assert_called_once()
api.ResourceTranslationsAsyncUpload.upload.assert_called_once()
api.ResourceTranslationsAsyncUpload.upload.assert_called_with(
resource=resource,
content=pofile_content,
language=language.id,
)
self.assertTrue(log_context.output[2].startswith("CRITICAL:"))
self.assertIn("Translation upload failed", log_context.output[2])
self.helper.clear_transifex_stats.assert_not_called()
# Test: normalize_pofile_language ########################################
def test_noramalize_pofile_language_correct(self):
language_code = "en"
transifex_code = "en"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
self.helper.normalize_pofile_language(
language_code,
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_obj,
)
mock_pofile_save.assert_not_called()
def test_noramalize_pofile_language_dryrun(self):
self.helper.dryrun = True
language_code = "en"
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
self.helper.normalize_pofile_language(
language_code,
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_obj,
)
mock_pofile_save.assert_not_called()
def test_noramalize_pofile_language_missing(self):
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
pofile_obj.metadata.pop("Language", None)
pofile_obj.metadata.pop("Language-Django", None)
pofile_obj.metadata.pop("Language-Transifex", None)
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
new_pofile_obj = self.helper.normalize_pofile_language(
language_code,
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_obj,
)
mock_pofile_save.assert_called()
self.assertIn("Language", new_pofile_obj.metadata)
self.assertEqual(new_pofile_obj.metadata["Language"], transifex_code)
def test_noramalize_pofile_language_incorrect(self):
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
new_pofile_obj = self.helper.normalize_pofile_language(
language_code,
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_obj,
)
mock_pofile_save.assert_called()
self.assertEqual(new_pofile_obj.metadata["Language"], transifex_code)
# Test: normalize_pofile_language_team ###################################
def test_normalize_pofile_language_team_source_correct(self):
transifex_code = "en"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
self.helper.normalize_pofile_language_team(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_obj,
)
mock_pofile_save.assert_not_called()
def test_normalize_pofile_language_team_translation_correct(self):
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
pofile_obj.metadata["Language-Team"] = (
f"https://www.transifex.com/{TEST_ORG_SLUG}/teams/{TEST_TEAM_ID}"
f"/{transifex_code}/"
)
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
self.helper.normalize_pofile_language_team(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_obj,
)
mock_pofile_save.assert_not_called()
def test_normalize_pofile_language_team_dryrun(self):
self.helper.dryrun = True
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
self.helper.normalize_pofile_language_team(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_obj,
)
mock_pofile_save.assert_not_called()
def test_normalize_pofile_language_team_incorrect(self):
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
self.helper.normalize_pofile_language_team(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_obj,
)
mock_pofile_save.assert_called()
def test_normalize_pofile_language_team_missing(self):
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
pofile_obj.metadata.pop("Language-Team", None)
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
new_pofile_obj = self.helper.normalize_pofile_language_team(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_obj,
)
mock_pofile_save.assert_called()
self.assertIn("Language-Team", new_pofile_obj.metadata)
# Test: normalize_pofile_last_translator #################################
def test_normalize_pofile_last_translator_missing(self):
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
pofile_obj.metadata.pop("Last-Translator", None)
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
new_pofile_obj = self.helper.normalize_pofile_last_translator(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_obj,
)
mock_pofile_save.assert_not_called()
self.assertNotIn("Last-Translator", new_pofile_obj.metadata)
def test_normalize_pofile_last_translator_correct(self):
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
pofile_obj.metadata["Last-Translator"] = "valid_email@example.com"
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
self.helper.normalize_pofile_last_translator(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_obj,
)
mock_pofile_save.assert_not_called()
def test_normalize_pofile_last_translator_dryrun(self):
self.helper.dryrun = True
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
pofile_obj.metadata["Last-Translator"] = "FULL NAME <EMAIL@ADDRESS>"
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
self.helper.normalize_pofile_last_translator(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_obj,
)
mock_pofile_save.assert_not_called()
def test_normalize_pofile_last_translator_incorrect(self):
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
pofile_obj.metadata["Last-Translator"] = "FULL NAME <EMAIL@ADDRESS>"
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
new_pofile_obj = self.helper.normalize_pofile_last_translator(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_obj,
)
mock_pofile_save.assert_called()
self.assertNotIn("Last-Translator", new_pofile_obj.metadata)
# Test: normalize_pofile_project_id ######################################
def test_normalize_pofile_project_id_correct(self):
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
pofile_obj.metadata["Project-Id-Version"] = resource_slug
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
self.helper.normalize_pofile_project_id(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_obj,
)
mock_pofile_save.assert_not_called()
def test_normalize_pofile_project_id_dryrun(self):
self.helper.dryrun = True
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
pofile_obj.metadata["Project-Id-Version"] = "PACKAGE VERSION"
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
self.helper.normalize_pofile_project_id(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_obj,
)
mock_pofile_save.assert_not_called()
def test_normalize_pofile_project_id_incorrect(self):
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
pofile_obj.metadata["Project-Id-Version"] = "PACKAGE VERSION"
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
new_pofile_obj = self.helper.normalize_pofile_project_id(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_obj,
)
mock_pofile_save.assert_called()
self.assertIn("Project-Id-Version", new_pofile_obj.metadata)
self.assertEqual(
resource_slug, new_pofile_obj.metadata["Project-Id-Version"]
)
def test_normalize_pofile_project_id_missing(self):
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
pofile_obj.metadata.pop("Project-Id-Version", None)
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
new_pofile_obj = self.helper.normalize_pofile_project_id(
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_obj,
)
mock_pofile_save.assert_called()
self.assertIn("Project-Id-Version", new_pofile_obj.metadata)
self.assertEqual(
resource_slug, new_pofile_obj.metadata["Project-Id-Version"]
)
# Test: normalize_pofile_metadata ########################################
def test_normalize_pofile_metadata(self):
self.helper.dryrun = True
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
resource_slug = "x_slug_x"
resource_name = "x_name_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
new_pofile_obj = self.helper.normalize_pofile_metadata(
language_code,
transifex_code,
resource_slug,
resource_name,
pofile_path,
pofile_obj,
)
mock_pofile_save.assert_not_called()
self.assertEqual(pofile_obj, new_pofile_obj)
# Test: update_pofile_creation_datetime ##################################
def test_update_pofile_creation_datetime_dryrun(self):
self.helper.dryrun = True
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
pofile_creation = "2021-01-01 01:01:01+00:00"
pofile_obj.metadata["POT-Creation-Date"] = pofile_creation
transifex_creation = "2021-02-02 02:02:02+00:00"
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
self.helper.update_pofile_creation_datetime(
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
pofile_creation,
transifex_creation,
)
mock_pofile_save.assert_not_called()
def test_update_pofile_creation_datetime_save(self):
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
pofile_creation = "2021-01-01 01:01:01+00:00"
pofile_obj.metadata["POT-Creation-Date"] = pofile_creation
transifex_creation = "2021-02-02 02:02:02+00:00"
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
new_pofile_obj = self.helper.update_pofile_creation_datetime(
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
pofile_creation,
transifex_creation,
)
mock_pofile_save.assert_called()
self.assertEqual(
new_pofile_obj.metadata["POT-Creation-Date"], transifex_creation
)
# Test: update_pofile_revision_datetime ##################################
def test_update_pofile_revision_datetime_dryrun(self):
self.helper.dryrun = True
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
pofile_revision = "2021-01-01 01:01:01+00:00"
pofile_obj.metadata["PO-Revision-Date"] = pofile_revision
transifex_revision = "2021-02-02 02:02:02+00:00"
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
self.helper.update_pofile_revision_datetime(
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
pofile_revision,
transifex_revision,
)
mock_pofile_save.assert_not_called()
def test_update_pofile_revision_datetime_save(self):
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
pofile_revision = dateutil.parser.isoparse("2021-01-01 01:01:01+00:00")
pofile_obj.metadata["PO-Revision-Date"] = str(pofile_revision)
transifex_revision = dateutil.parser.isoparse(
"2021-02-02 02:02:02+00:00"
)
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
new_pofile_obj = self.helper.update_pofile_revision_datetime(
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
pofile_revision,
transifex_revision,
)
mock_pofile_save.assert_called()
self.assertEqual(
new_pofile_obj.metadata["PO-Revision-Date"],
str(transifex_revision),
)
# Test: normalize_pofile_dates ########################
def test_normalize_pofile_dates_update_pofile_dates_missing(self):
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
transifex_creation = dateutil.parser.isoparse(
"2021-01-01 01:01:01+00:00"
)
transifex_revision = dateutil.parser.isoparse(
"2021-02-02 02:02:02+00:00"
)
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
pofile_creation = None
pofile_revision = None
pofile_obj.metadata.pop("POT-Creation-Date", None)
pofile_obj.metadata.pop("PO-Revision-Date", None)
self.helper._resource_stats = {
resource_slug: {
"datetime_created": str(transifex_creation),
"datetime_modified": str(transifex_revision),
},
}
self.helper._translation_stats = {}
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
new_pofile_obj = self.helper.normalize_pofile_dates(
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
pofile_creation,
pofile_revision,
transifex_creation,
transifex_revision,
)
mock_pofile_save.assert_called()
self.assertEqual(
new_pofile_obj.metadata["POT-Creation-Date"],
str(transifex_creation),
)
self.assertEqual(
new_pofile_obj.metadata["PO-Revision-Date"],
str(transifex_revision),
)
def test_normalize_pofile_dates_update_pofile_creation_differs(self):
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
transifex_creation = dateutil.parser.isoparse(
"2021-01-01 01:01:01+00:00"
)
transifex_revision = dateutil.parser.isoparse(
"2021-02-02 02:02:02+00:00"
)
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
pofile_creation = dateutil.parser.isoparse("2021-03-03 03:03:03+00:00")
pofile_revision = transifex_revision
pofile_obj.metadata["POT-Creation-Date"] = str(pofile_creation)
pofile_obj.metadata["PO-Revision-Date"] = str(pofile_revision)
self.helper._resource_stats = {
resource_slug: {
"datetime_created": str(transifex_creation),
"datetime_modified": str(transifex_revision),
},
}
self.helper._translation_stats = {}
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
new_pofile_obj = self.helper.normalize_pofile_dates(
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
pofile_creation,
pofile_revision,
transifex_creation,
transifex_revision,
)
mock_pofile_save.assert_called_once()
self.assertEqual(
new_pofile_obj.metadata["POT-Creation-Date"],
str(transifex_creation),
)
def test_normalize_pofile_dates_update_revisions_differ_entries_same(self):
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
transifex_creation = dateutil.parser.isoparse(
"2021-01-01 01:01:01+00:00"
)
transifex_revision = dateutil.parser.isoparse(
"2021-02-02 02:02:02+00:00"
)
pofile_path = "x_path_x"
pofile_obj = polib.pofile(pofile=POFILE_CONTENT)
pofile_creation = transifex_creation
pofile_revision = dateutil.parser.isoparse("2021-03-03 03:03:03+00:00")
pofile_obj.metadata["POT-Creation-Date"] = str(pofile_creation)
pofile_obj.metadata["PO-Revision-Date"] = str(pofile_revision)
self.helper._resource_stats = {
resource_slug: {
"datetime_created": str(transifex_creation),
"datetime_modified": str(transifex_revision),
},
}
self.helper._translation_stats = {}
with mock.patch.object(
self.helper, "transifex_get_pofile_content"
) as mock_transifex_content:
mock_transifex_content.return_value = POFILE_CONTENT.encode(
"utf-8"
)
with mock.patch.object(polib.POFile, "save") as mock_pofile_save:
new_pofile_obj = self.helper.normalize_pofile_dates(
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
pofile_creation,
pofile_revision,
transifex_creation,
transifex_revision,
)
mock_pofile_save.assert_called_once()
self.assertEqual(
new_pofile_obj.metadata["PO-Revision-Date"],
str(transifex_revision),
)
def test_normalize_pofile_dates_update_revisions_differ_entries_differ(
self,
):
resource_slug = "x_slug_x"
language_code = "x_lang_code_x"
transifex_code = "x_trans_code_x"
transifex_creation = dateutil.parser.isoparse(
"2021-01-01 01:01:01+00:00"
)
transifex_revision = dateutil.parser.isoparse(
"2021-02-02 02:02:02+00:00"
)
pofile_path = "x_path_x"
pofile_obj = polib.pofile(
pofile=POFILE_CONTENT.replace("International", "Intergalactic")
)
pofile_creation = transifex_creation
pofile_revision = dateutil.parser.isoparse("2021-03-03 03:03:03+00:00")
pofile_obj.metadata["POT-Creation-Date"] = str(pofile_creation)
pofile_obj.metadata["PO-Revision-Date"] = str(pofile_revision)
self.helper._resource_stats = {
resource_slug: {
"datetime_created": str(transifex_creation),
"datetime_modified": str(transifex_revision),
},
}
self.helper._translation_stats = {
resource_slug: {
transifex_code: {
"untranslated_strings": 1,
"translated_strings": 1,
},
},
}
with self.assertLogs(self.helper.log) as log_context:
with mock.patch.object(
self.helper, "transifex_get_pofile_content"
) as mock_transifex_content:
mock_transifex_content.return_value = POFILE_CONTENT.encode(
"utf-8"
)
with mock.patch.object(
polib.POFile, "save"
) as mock_pofile_save:
self.helper.normalize_pofile_dates(
resource_slug,
language_code,
transifex_code,
pofile_path,
pofile_obj,
pofile_creation,
pofile_revision,
transifex_creation,
transifex_revision,
)
mock_pofile_save.assert_not_called()
self.assertTrue(log_context.output[0].startswith("ERROR:"))
self.assertIn("'PO-Revision-Date' mismatch", log_context.output[0])
# def test_update_source_messages(self):
# with mock.patch.object(self.helper, "request20") as mock_request:
# self.helper.update_source_messages(
# "slug", "pofilename", "pofilecontent"
# )
# mock_request.assert_called_with(
# "put",
# "project/proj/resource/slug/content/",
# files=[
# (
# "content",
# (
# "pofilename",
# "pofilecontent",
# "application/octet-stream",
# ),
# )
# ],
# )
# def test_update_translations(self):
# with mock.patch.object(self.helper, "request20") as mock_request:
# self.helper.update_translations(
# "slug", "lang", "pofilename", "pofilecontent"
# )
# mock_request.assert_called_with(
# "put",
# "project/proj/resource/slug/translation/lang/",
# files=[
# (
# "file",
# (
# "pofilename",
# "pofilecontent",
# "application/octet-stream",
# ),
# )
# ],
# )
# def test_upload_resource_to_transifex_no_resource_yet_not_english(self):
# # Must be english or we can't create the resource
# # If we try this with a non-english language and there's no resource,
# # we should get an error.
# legal_code = LegalCodeFactory(language_code="es")
# test_pofile = polib.POFile()
#
# with mock.patch.object(
# self.helper, "get_transifex_resource_stats"
# ) as mock_gtr:
# mock_gtr.return_value = []
# with mock.patch.object(legal_code, "get_pofile") as mock_gpwem:
# mock_gpwem.return_value = test_pofile
# with self.assertRaisesMessage(
# ValueError, "Must upload English first"
# ):
# self.helper.upload_resource_to_transifex(legal_code)
#
# mock_gtr.assert_called_with()
# mock_gpwem.assert_called_with()
# def test_upload_messages_english_resource_exists(self):
# # English because it's the source messages and is handled differently
# tool = ToolFactory(unit="by-nd", version="4.0")
# legal_code = LegalCodeFactory(
# tool=tool,
# language_code=settings.LANGUAGE_CODE,
# )
# test_resources = [
# {
# "slug": tool.resource_slug,
# }
# ]
# test_pofile = polib.POFile()
# with mock.patch.object(
# self.helper, "get_transifex_resource_stats"
# ) as mock_gtr:
# mock_gtr.return_value = test_resources
# with mock.patch(
# "i18n.transifex.get_pofile_content"
# ) as mock_gpc:
# mock_gpc.return_value = "not really"
# with mock.patch.object(
# self.helper, "update_source_messages"
# ) as mock_usm:
# self.helper.upload_resource_to_transifex(
# legal_code, test_pofile
# )
#
# mock_gtr.assert_called_with()
# mock_gpc.assert_called_with(test_pofile)
# mock_usm.assert_called_with(
# "by-nd_40",
# "/trans/repo/legalcode/en/LC_MESSAGES/by-nd_40.po",
# "not really",
# )
# def test_upload_messages_non_english_resource_exists(self):
# # non-English because it's not the source messages and is handled
# # differently
# tool = ToolFactory(unit="by-nd", version="4.0")
# legal_code = LegalCodeFactory(tool=tool, language_code="fr")
# test_resources = [
# {
# "slug": tool.resource_slug,
# }
# ]
# test_pofile = mock.MagicMock()
# with mock.patch.object(
# self.helper, "get_transifex_resource_stats"
# ) as mock_gtr:
# mock_gtr.return_value = test_resources
# with mock.patch(
# "i18n.transifex.get_pofile_content"
# ) as mock_gpc:
# mock_gpc.return_value = "not really"
# with mock.patch.object(
# self.helper, "update_translations"
# ) as mock_ut:
# self.helper.upload_resource_to_transifex(
# legal_code, test_pofile
# )
#
# mock_gtr.assert_called_with()
# mock_gpc.assert_called_with(test_pofile)
# mock_ut.assert_called_with(
# "by-nd_40",
# "fr",
# "/trans/repo/legalcode/fr/LC_MESSAGES/by-nd_40.po",
# "not really",
# )
# def test_get_transifex_resource_stats(self):
# # First call returns a response whose json value is a list of dicts
# # with slug keys
# call0_response = mock.MagicMock()
# call0_response.json.return_value = [{"slug": "slug0"}]
#
# # second call is more data about slug0 - FIXME
# call1_response = mock.MagicMock()
# call1_response.json.return_value = {"stats": "stats1"}
# with mock.patch.object(self.helper, "request25") as mock_request25:
# # Return values for each call to request25
# mock_request25.side_effect = [
# call0_response,
# call1_response,
# ]
# result = self.helper.get_transifex_resource_stats()
# calls = mock_request25.call_args_list
# self.assertEqual(
# [
# call("get", "organizations/org/projects/proj/resources/"),
# call(
# "get", "organizations/org/projects/proj/resources/slug0"
# ),
# ],
# calls,
# )
# self.assertEqual({"slug0": "stats1"}, result)
# @override_settings(
# DATA_REPOSITORY_DIR="/trans/repo",
# )
# class CheckForTranslationUpdatesTest(TestCase):
# def test_check_for_translation_updates_with_dirty_repo(self):
# mock_repo = mock.MagicMock()
# mock_repo.__str__.return_value = "mock_repo"
# mock_repo.is_dirty.return_value = True
# with mock.patch.object(git, "Repo") as mock_Repo:
# mock_Repo.return_value.__enter__.return_value = mock_repo
# helper = TransifexHelper()
# with self.assertRaisesMessage(
# Exception, "is dirty. We cannot continue."
# ):
# helper.check_for_translation_updates()
#
# def test_check_for_translation_updates_with_no_legal_codes(self):
# mock_repo = mock.MagicMock()
# mock_repo.__str__.return_value = "mock_repo"
# mock_repo.is_dirty.return_value = False
# with mock.patch.object(git, "Repo") as mock_Repo:
# mock_Repo.return_value.__enter__.return_value = mock_repo
# with mock.patch.object(
# TransifexHelper, "get_transifex_resource_stats"
# ) as mock_get_transifex_resource_stats:
# mock_get_transifex_resource_stats.return_value = {}
# helper = TransifexHelper()
# helper.check_for_translation_updates()
#
# def test_check_for_translation_updates_first_time(self):
# # We don't have a 'translation_last_update' yet to compare to.
# self.help_test_check_for_translation_updates(
# first_time=True, changed=None
# )
#
# def test_check_for_translation_updates_unchanged(self):
# # The translation update timestamp has not changed
# self.help_test_check_for_translation_updates(
# first_time=False, changed=False
# )
#
# def test_check_for_translation_updates_changed(self):
# # 'translation' is newer than translation_last_update
# self.help_test_check_for_translation_updates(
# first_time=False, changed=True
# )
#
# def test_check_for_translation_updates_upload_language(self):
# # The language isn't (yet) on transifex
# self.help_test_check_for_translation_updates(
# first_time=False, changed=True, language_exists=False
# )
#
# def help_test_check_for_translation_updates(
# self,
# first_time,
# changed,
# resource_exists=True,
# language_exists=True,
# ):
# """
# Helper to test several conditions, since all the setup is so
# convoluted.
# """
# language_code = "zh-Hans"
# tool = ToolFactory(version="4.0", unit="by-nd")
#
# first_translation_update_datetime = datetime.datetime(
# 2007, 1, 25, 12, 0, 0, tzinfo=utc
# )
# changed_translation_update_datetime = datetime.datetime(
# 2020, 9, 30, 13, 11, 52, tzinfo=utc
# )
#
# if first_time:
# # We don't yet know when the last update was.
# legal_code_last_update = None
# else:
# # The last update we know of was at this time.
# legal_code_last_update = first_translation_update_datetime
#
# legal_code = LegalCodeFactory(
# tool=tool,
# language_code=language_code,
# translation_last_update=legal_code_last_update,
# )
# resource_slug = tool.resource_slug
#
# # Will need an English legal_code if we need to create the resource
# if not resource_exists and language_code != settings.LANGUAGE_CODE:
# LegalCodeFactory(
# tool=tool,
# language_code=settings.LANGUAGE_CODE,
# )
#
# # 'timestamp' returns on translation stats from transifex
# if changed:
# # now it's the newer time
# timestamp = changed_translation_update_datetime.isoformat()
# else:
# # it's still the first time
# timestamp = first_translation_update_datetime.isoformat()
#
# mock_repo = mock.MagicMock()
# mock_repo.is_dirty.return_value = False
#
# legal_codes = [legal_code]
# dummy_repo = DummyRepo("/trans/repo")
#
# # A couple of places use git.Repo(path) to get a git repo object.
# # Have them all get back our same dummy repo.
# def dummy_repo_factory(path):
# return dummy_repo
#
# helper = TransifexHelper()
#
# with mock.patch.object(
# helper, "handle_legal_codes_with_updated_translations"
# ) as mock_handle_legal_codes, mock.patch.object(
# helper, "get_transifex_resource_stats"
# ) as mock_get_transifex_resource_stats, mock.patch.object(
# helper, "upload_resource_to_transifex"
# ) as mock_upload_resource_to_transifex, mock.patch.object(
# LegalCode, "get_pofile"
# ) as mock_get_pofile, mock.patch.object(
# helper, "upload_resource_to_transifex"
# ) as mock_upload:
# if resource_exists:
# if language_exists:
# mock_get_transifex_resource_stats.return_value = {
# resource_slug: {
# language_code: {
# "translated": {
# "last_activity": timestamp,
# }
# }
# }
# }
# else:
# # language does not exist 1st time, does the 2nd time
# mock_get_transifex_resource_stats.side_effect = [
# {resource_slug: {}},
# {
# resource_slug: {
# language_code: {
# "translated": {
# "last_activity": timestamp,
# }
# }
# }
# },
# ]
# else:
# # First time does not exist, second time does
# mock_get_transifex_resource_stats.side_effect = [
# {},
# {
# resource_slug: {
# language_code: {
# "translated": {
# "last_activity": timestamp,
# }
# }
# }
# },
# ]
# # Will need pofile
# mock_get_pofile.return_value = polib.POFile()
# helper.check_for_translation_updates_with_repo_and_legal_codes(
# dummy_repo, legal_codes
# )
#
# if not resource_exists:
# # Should have tried to create resource
# mock_upload_resource_to_transifex.assert_called_with(
# language_code=legal_code.language_code,
# resource_slug=resource_slug,
# resource_name=legal_code.tool.identifier(),
# pofile_path=legal_code.translation_filename(),
# pofile_obj=mock_get_pofile,
# )
# else:
# # Not
# mock_upload_resource_to_transifex.assert_not_called()
#
# if language_exists:
# mock_upload.assert_not_called()
# else:
# mock_upload.assert_called()
#
# mock_get_transifex_resource_stats.assert_called_with()
# legal_code.refresh_from_db()
# if changed:
# # we mocked the actual processing, so...
# self.assertEqual(
# first_translation_update_datetime,
# legal_code.translation_last_update,
# )
# mock_handle_legal_codes.assert_called_with(
# dummy_repo, [legal_code]
# )
# else:
# self.assertEqual(
# first_translation_update_datetime,
# legal_code.translation_last_update,
# )
# mock_handle_legal_codes.assert_called_with(dummy_repo, [])
# return
#
# def test_handle_legal_codes_with_updated_translations(self):
# helper = TransifexHelper()
# dummy_repo = DummyRepo("/trans/repo")
#
# # No legal_codes, shouldn't call anything or return anything
# result = helper.handle_legal_codes_with_updated_translations(
# dummy_repo, []
# )
# self.assertEqual([], result)
#
# # legal_codes for two branches
# legal_code1 = LegalCodeFactory(
# tool__version="4.0",
# tool__unit="by-nc",
# language_code="fr",
# )
# legal_code2 = LegalCodeFactory(
# tool__version="4.0",
# tool__unit="by-nd",
# language_code="de",
# )
# with mock.patch.object(
# helper, "handle_updated_translation_branch"
# ) as mock_handle:
# result = helper.handle_legal_codes_with_updated_translations(
# dummy_repo, [legal_code1, legal_code2]
# )
# self.assertEqual(
# [legal_code1.branch_name(), legal_code2.branch_name()], result
# )
# self.assertEqual(
# [
# mock.call(dummy_repo, [legal_code1]),
# mock.call(dummy_repo, [legal_code2]),
# ],
# mock_handle.call_args_list,
# )
#
# def test_handle_updated_translation_branch(self):
# helper = TransifexHelper()
# dummy_repo = DummyRepo("/trans/repo")
# result = helper.handle_updated_translation_branch(dummy_repo, [])
# self.assertIsNone(result)
# legal_code1 = LegalCodeFactory(
# tool__version="4.0",
# tool__unit="by-nc",
# language_code="fr",
# )
# legal_code2 = LegalCodeFactory(
# tool__version="4.0",
# tool__unit="by-nd",
# language_code="fr",
# )
# with mock.patch(
# "i18n.transifex.setup_local_branch"
# ) as mock_setup, mock.patch.object(
# helper, "update_branch_for_legal_code"
# ) as mock_update_branch, mock.patch(
# "i18n.transifex.call_command"
# ) as mock_call_command, mock.patch(
# "i18n.transifex.commit_and_push_changes"
# ) as mock_commit:
# # setup_local_branch
# # update_branch_for_legal_code
# # commit_and_push_changes
# # branch_object.save()
# result = helper.handle_updated_translation_branch(
# dummy_repo, [legal_code1, legal_code2]
# )
# self.assertIsNone(result)
# mock_setup.assert_called_with(dummy_repo, legal_code1.branch_name())
# # Should have published static files for this branch
# expected = [
# mock.call("publish", branch_name=legal_code1.branch_name()),
# ]
# self.assertEqual(expected, mock_call_command.call_args_list)
# trb = TranslationBranch.objects.get()
# expected = [
# mock.call(dummy_repo, legal_code1, trb),
# mock.call(dummy_repo, legal_code2, trb),
# ]
# self.assertEqual(expected, mock_update_branch.call_args_list)
# mock_commit.assert_called_with(
# dummy_repo, "Translation changes from Transifex.", "", push=True
# )
#
# def test_update_branch_for_legal_code(self):
# helper = TransifexHelper()
# dummy_repo = DummyRepo("/trans/repo")
# legal_code = LegalCodeFactory(
# tool__version="4.0",
# tool__unit="by-nc",
# language_code="fr",
# )
# helper._stats = {
# legal_code.tool.resource_slug: {
# legal_code.language_code: {
# "translated": {
# "last_activity": now().isoformat(),
# }
# }
# }
# }
# trb = TranslationBranch.objects.create(
# branch_name=legal_code.branch_name(),
# version=legal_code.tool.version,
# language_code=legal_code.language_code,
# complete=False,
# )
# content = b"wxyz"
# # transifex_get_pofile_content
# # save_content_as_pofile_and_mofile
# with mock.patch.object(
# helper, "transifex_get_pofile_content"
# ) as mock_get_content, mock.patch(
# "i18n.transifex.save_content_as_pofile_and_mofile"
# ) as mock_save:
# mock_get_content.return_value = content
# mock_save.return_value = [legal_code.translation_filename()]
# result = helper.update_branch_for_legal_code(
# dummy_repo, legal_code, trb
# )
# self.assertIsNone(result)
# mock_get_content.assert_called_with(
# legal_code.tool.resource_slug, legal_code.language_code
# )
# mock_save.assert_called_with(
# legal_code.translation_filename(), content
# )
# self.assertEqual({legal_code}, set(trb.legal_codes.all()))
# relpath = os.path.relpath(
# legal_code.translation_filename(),
# settings.DATA_REPOSITORY_DIR,
# )
# dummy_repo.index.add.assert_called_with([relpath])
#
| 38.031833 | 79 | 0.575643 | 14,024 | 132,617 | 5.098688 | 0.037293 | 0.039718 | 0.022153 | 0.018307 | 0.868819 | 0.848442 | 0.816122 | 0.795298 | 0.776754 | 0.765538 | 0 | 0.02198 | 0.322787 | 132,617 | 3,486 | 80 | 38.042742 | 0.77419 | 0.145162 | 0 | 0.756453 | 0 | 0 | 0.124863 | 0.019339 | 0 | 0 | 0 | 0.000287 | 0.132061 | 1 | 0.032922 | false | 0.000748 | 0.004489 | 0.000748 | 0.038908 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b16efa947eb8131adf7c3b0729b04868de39d1d5 | 691 | py | Python | hosts_pkg/config_hosts_blocklists.py | undebuggable/iolaus | d7fa8d52acb72d20810fddaf661458bc7aaf3b3b | [
"MIT"
] | null | null | null | hosts_pkg/config_hosts_blocklists.py | undebuggable/iolaus | d7fa8d52acb72d20810fddaf661458bc7aaf3b3b | [
"MIT"
] | null | null | null | hosts_pkg/config_hosts_blocklists.py | undebuggable/iolaus | d7fa8d52acb72d20810fddaf661458bc7aaf3b3b | [
"MIT"
] | null | null | null | URL_HOST_FILES = [
"https://raw.githubusercontent.com/jmdugan/blocklists/master/corporations/cloudflare/all",
"https://raw.githubusercontent.com/jmdugan/blocklists/master/corporations/facebook/all",
"https://raw.githubusercontent.com/jmdugan/blocklists/master/corporations/google/localized",
"https://raw.githubusercontent.com/jmdugan/blocklists/master/corporations/google/non_localized",
"https://raw.githubusercontent.com/jmdugan/blocklists/master/corporations/microsoft/all",
"https://raw.githubusercontent.com/jmdugan/blocklists/master/corporations/mozilla/all.txt",
"https://raw.githubusercontent.com/jmdugan/blocklists/master/corporations/pinterest/all",
]
| 69.1 | 100 | 0.795948 | 75 | 691 | 7.293333 | 0.28 | 0.102377 | 0.319927 | 0.358318 | 0.877514 | 0.877514 | 0.877514 | 0.877514 | 0.647166 | 0 | 0 | 0 | 0.05644 | 691 | 9 | 101 | 76.777778 | 0.838957 | 0 | 0 | 0 | 0 | 0.111111 | 0.888567 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
b182316158241d7b901009955f986b862309091b | 9,477 | py | Python | fusion_tcv/rewards_used.py | mkuiper/deepmind-research | 1642ae3499c8d1135ec6fe620a68911091dd25ef | [
"Apache-2.0"
] | 2 | 2022-03-14T18:36:23.000Z | 2022-03-14T22:35:20.000Z | fusion_tcv/rewards_used.py | sunjinhao123/deepmind-research | 1642ae3499c8d1135ec6fe620a68911091dd25ef | [
"Apache-2.0"
] | null | null | null | fusion_tcv/rewards_used.py | sunjinhao123/deepmind-research | 1642ae3499c8d1135ec6fe620a68911091dd25ef | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The rewards used in our experiments."""
from fusion_tcv import combiners
from fusion_tcv import rewards
from fusion_tcv import targets
from fusion_tcv import transforms
# Used in TCV#70915
FUNDAMENTAL_CAPABILITY = rewards.Reward([
rewards.Component(
target=targets.ShapeLCFSDistance(),
transforms=[transforms.SoftPlus(good=0.005, bad=0.05),
combiners.SmoothMax(-1)]),
rewards.Component(
target=targets.XPointFar(),
transforms=[transforms.Sigmoid(good=0.3, bad=0.1),
combiners.SmoothMax(-5)]),
rewards.Component(
target=targets.LimitPoint(),
transforms=[transforms.Sigmoid(bad=0.2, good=0.1)]),
rewards.Component(
target=targets.XPointNormalizedFlux(num_points=1),
transforms=[transforms.SoftPlus(bad=0.08)]),
rewards.Component(
target=targets.XPointDistance(num_points=1),
transforms=[transforms.Sigmoid(good=0.01, bad=0.15)]),
rewards.Component(
target=targets.XPointFluxGradient(num_points=1),
transforms=[transforms.SoftPlus(bad=3)],
weight=0.5),
rewards.Component(
target=targets.Ip(),
transforms=[transforms.SoftPlus(good=500, bad=20000)]),
rewards.Component(
target=targets.OHCurrentsClose(),
transforms=[transforms.SoftPlus(good=50, bad=1050)]),
], combiners.SmoothMax(-0.5))
# Used in TCV#70920
ELONGATION = rewards.Reward([
rewards.Component(
target=targets.ShapeLCFSDistance(),
transforms=[transforms.SoftPlus(good=0.003, bad=0.03),
combiners.SmoothMax(-1)],
weight=3),
rewards.Component(
target=targets.ShapeRadius(),
transforms=[transforms.SoftPlus(good=0.002, bad=0.02)]),
rewards.Component(
target=targets.ShapeElongation(),
transforms=[transforms.SoftPlus(good=0.005, bad=0.2)]),
rewards.Component(
target=targets.ShapeTriangularity(),
transforms=[transforms.SoftPlus(good=0.005, bad=0.2)]),
rewards.Component(
target=targets.XPointCount(),
transforms=[transforms.Equal()]),
rewards.Component(
target=targets.LimitPoint(), # Stay away from the top/baffles.
transforms=[transforms.Sigmoid(bad=0.3, good=0.2)]),
rewards.Component(
target=targets.Ip(),
transforms=[transforms.SoftPlus(good=500, bad=30000)]),
rewards.Component(
target=targets.VoltageOOB(),
transforms=[combiners.Mean(), transforms.SoftPlus(bad=1)]),
rewards.Component(
target=targets.OHCurrentsClose(),
transforms=[transforms.ClippedLinear(good=50, bad=1050)]),
rewards.Component(
name="CurrentsFarFromZero",
target=targets.EFCurrents(),
transforms=[transforms.Abs(),
transforms.SoftPlus(good=100, bad=50),
combiners.GeometricMean()]),
], combiner=combiners.SmoothMax(-5))
# Used in TCV#70600
ITER = rewards.Reward([
rewards.Component(
target=targets.ShapeLCFSDistance(),
transforms=[transforms.SoftPlus(good=0.005, bad=0.05),
combiners.SmoothMax(-1)],
weight=3),
rewards.Component(
target=targets.Diverted(),
transforms=[transforms.Equal()]),
rewards.Component(
target=targets.XPointNormalizedFlux(num_points=2),
transforms=[transforms.SoftPlus(bad=0.08)],
weight=[1] * 2),
rewards.Component(
target=targets.XPointDistance(num_points=2),
transforms=[transforms.Sigmoid(good=0.01, bad=0.15)],
weight=[0.5] * 2),
rewards.Component(
target=targets.XPointFluxGradient(num_points=2),
transforms=[transforms.SoftPlus(bad=3)],
weight=[0.5] * 2),
rewards.Component(
target=targets.LegsNormalizedFlux(),
transforms=[transforms.Sigmoid(good=0.1, bad=0.3),
combiners.SmoothMax(-5)],
weight=2),
rewards.Component(
target=targets.Ip(),
transforms=[transforms.SoftPlus(good=500, bad=20000)],
weight=2),
rewards.Component(
target=targets.VoltageOOB(),
transforms=[combiners.Mean(), transforms.SoftPlus(bad=1)]),
rewards.Component(
target=targets.OHCurrentsClose(),
transforms=[transforms.ClippedLinear(good=50, bad=1050)]),
rewards.Component(
name="CurrentsFarFromZero",
target=targets.EFCurrents(),
transforms=[transforms.Abs(),
transforms.SoftPlus(good=100, bad=50),
combiners.GeometricMean()]),
], combiner=combiners.SmoothMax(-5))
# Used in TCV#70755
SNOWFLAKE = rewards.Reward([
rewards.Component(
target=targets.ShapeLCFSDistance(),
transforms=[transforms.SoftPlus(good=0.005, bad=0.05),
combiners.SmoothMax(-1)],
weight=3),
rewards.Component(
target=targets.LimitPoint(),
transforms=[transforms.Sigmoid(bad=0.2, good=0.1)]),
rewards.Component(
target=targets.XPointNormalizedFlux(num_points=2),
transforms=[transforms.SoftPlus(bad=0.08)],
weight=[1] * 2),
rewards.Component(
target=targets.XPointDistance(num_points=2),
transforms=[transforms.Sigmoid(good=0.01, bad=0.15)],
weight=[0.5] * 2),
rewards.Component(
target=targets.XPointFluxGradient(num_points=2),
transforms=[transforms.SoftPlus(bad=3)],
weight=[0.5] * 2),
rewards.Component(
target=targets.LegsNormalizedFlux(),
transforms=[transforms.Sigmoid(good=0.1, bad=0.3),
combiners.SmoothMax(-5)],
weight=2),
rewards.Component(
target=targets.Ip(),
transforms=[transforms.SoftPlus(good=500, bad=20000)],
weight=2),
rewards.Component(
target=targets.VoltageOOB(),
transforms=[combiners.Mean(), transforms.SoftPlus(bad=1)]),
rewards.Component(
target=targets.OHCurrentsClose(),
transforms=[transforms.ClippedLinear(good=50, bad=1050)]),
rewards.Component(
name="CurrentsFarFromZero",
target=targets.EFCurrents(),
transforms=[transforms.Abs(),
transforms.SoftPlus(good=100, bad=50),
combiners.GeometricMean()]),
], combiner=combiners.SmoothMax(-5))
# Used in TCV#70457
NEGATIVE_TRIANGULARITY = rewards.Reward([
rewards.Component(
target=targets.ShapeLCFSDistance(),
transforms=[transforms.SoftPlus(good=0.005, bad=0.05),
combiners.SmoothMax(-1)],
weight=3),
rewards.Component(
target=targets.ShapeRadius(),
transforms=[transforms.SoftPlus(bad=0.04)]),
rewards.Component(
target=targets.ShapeElongation(),
transforms=[transforms.SoftPlus(bad=0.5)]),
rewards.Component(
target=targets.ShapeTriangularity(),
transforms=[transforms.SoftPlus(bad=0.5)]),
rewards.Component(
target=targets.Diverted(),
transforms=[transforms.Equal()]),
rewards.Component(
target=targets.XPointNormalizedFlux(num_points=2),
transforms=[transforms.SoftPlus(bad=0.08)],
weight=[1] * 2),
rewards.Component(
target=targets.XPointDistance(num_points=2),
transforms=[transforms.Sigmoid(good=0.02, bad=0.15)],
weight=[0.5] * 2),
rewards.Component(
target=targets.XPointFluxGradient(num_points=2),
transforms=[transforms.SoftPlus(bad=3)],
weight=[0.5] * 2),
rewards.Component(
target=targets.Ip(),
transforms=[transforms.SoftPlus(good=500, bad=20000)],
weight=2),
rewards.Component(
target=targets.VoltageOOB(),
transforms=[combiners.Mean(), transforms.SoftPlus(bad=1)]),
rewards.Component(
target=targets.OHCurrentsClose(),
transforms=[transforms.ClippedLinear(good=50, bad=1050)]),
rewards.Component(
name="CurrentsFarFromZero",
target=targets.EFCurrents(),
transforms=[transforms.Abs(),
transforms.SoftPlus(good=100, bad=50),
combiners.GeometricMean()]),
], combiner=combiners.SmoothMax(-0.5))
# Used in TCV#69545
DROPLETS = rewards.Reward([
rewards.Component(
target=targets.R(indices=[0, 1]),
transforms=[transforms.Sigmoid(good=0.02, bad=0.5)],
weight=[1, 1]),
rewards.Component(
target=targets.Z(indices=[0, 1]),
transforms=[transforms.Sigmoid(good=0.02, bad=0.2)],
weight=[1, 1]),
rewards.Component(
target=targets.Ip(indices=[0, 1]),
transforms=[transforms.Sigmoid(good=2000, bad=20000)],
weight=[1, 1]),
rewards.Component(
target=targets.OHCurrentsClose(),
transforms=[transforms.ClippedLinear(good=50, bad=1050)]),
], combiner=combiners.GeometricMean())
| 37.311024 | 74 | 0.641237 | 1,008 | 9,477 | 6.010913 | 0.149802 | 0.142598 | 0.181548 | 0.239313 | 0.855917 | 0.837102 | 0.829345 | 0.760026 | 0.732629 | 0.726027 | 0 | 0.048076 | 0.218635 | 9,477 | 253 | 75 | 37.458498 | 0.770155 | 0.078084 | 0 | 0.827273 | 0 | 0 | 0.00873 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.018182 | 0 | 0.018182 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
492a6fc99c7bf0e9fdead71c24eff66f776194ab | 110 | py | Python | training/scalable_shampoo/__init__.py | LAION-AI/watermark-detection | b6a22c51cae9ffd50421ae251794f3521e45bf4c | [
"MIT"
] | 2 | 2022-01-06T05:55:33.000Z | 2022-01-14T02:01:06.000Z | training/scalable_shampoo/__init__.py | LAION-AI/watermark-detection | b6a22c51cae9ffd50421ae251794f3521e45bf4c | [
"MIT"
] | null | null | null | training/scalable_shampoo/__init__.py | LAION-AI/watermark-detection | b6a22c51cae9ffd50421ae251794f3521e45bf4c | [
"MIT"
] | null | null | null | import scalable_shampoo.matrix_functions
import scalable_shampoo.shampoo_utils
from .shampoo import Shampoo
| 18.333333 | 40 | 0.881818 | 14 | 110 | 6.642857 | 0.5 | 0.301075 | 0.451613 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.090909 | 110 | 5 | 41 | 22 | 0.93 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
498ee6e27187458cbb2eceba3b8409e73dff4d28 | 3,764 | py | Python | modules/tests/photons_transport_tests/comms/test_base_helpers.py | Djelibeybi/photons | bc0aa91771d8e88fd3c691fb58f18cb876f292ec | [
"MIT"
] | 51 | 2020-07-03T08:34:48.000Z | 2022-03-16T10:56:08.000Z | modules/tests/photons_transport_tests/comms/test_base_helpers.py | delfick/photons | bc0aa91771d8e88fd3c691fb58f18cb876f292ec | [
"MIT"
] | 81 | 2020-07-03T08:13:59.000Z | 2022-03-31T23:02:54.000Z | modules/tests/photons_transport_tests/comms/test_base_helpers.py | Djelibeybi/photons | bc0aa91771d8e88fd3c691fb58f18cb876f292ec | [
"MIT"
] | 8 | 2020-07-24T23:48:20.000Z | 2021-05-24T17:20:16.000Z | # coding: spec
from photons_transport.comms.base import timeout_task
from photons_app.errors import TimedOut
from photons_app import helpers as hp
from delfick_project.errors_pytest import assertRaises
from unittest import mock
import asyncio
describe "timeout_task":
async it "does nothing if the task has a result":
async def doit():
return 1
task = hp.async_as_background(doit())
await task
errf = hp.create_future()
timeout_task(task, errf, 1)
assert not errf.done()
async it "does nothing if the task has an exception":
async def doit():
raise Exception("NOPE")
task = hp.async_as_background(doit())
with assertRaises(Exception, "NOPE"):
await task
errf = hp.create_future()
timeout_task(task, errf, 1)
assert not errf.done()
async it "does nothing if the task was cancelled":
async def doit():
return 1
task = hp.async_as_background(doit())
task.cancel()
try:
await task
except asyncio.CancelledError:
pass
assert task.cancelled()
errf = hp.create_future()
timeout_task(task, errf, 1)
assert not errf.done()
async it "cancels the task if it's not done":
called = []
async def doit():
called.append("sleep")
await asyncio.sleep(10)
called.append("slept")
task = hp.async_as_background(doit())
errf = hp.create_future()
serial = mock.Mock(name="serial")
timeout_task(task, errf, serial)
try:
await task
except asyncio.CancelledError:
pass
assert task.cancelled()
assert errf.done()
msg = "Waiting for reply to a packet"
with assertRaises(TimedOut, msg, serial=serial):
await errf
async it "does not set exception on errf if it's already done":
called = []
async def doit():
called.append("sleep")
await asyncio.sleep(10)
called.append("slept")
task = hp.async_as_background(doit())
errf = hp.create_future()
errf.set_result(1)
serial = mock.Mock(name="serial")
timeout_task(task, errf, serial)
try:
await task
except asyncio.CancelledError:
pass
assert task.cancelled()
assert errf.done()
assert await errf == 1
async it "does not set exception on errf already has an exception":
called = []
async def doit():
called.append("sleep")
await asyncio.sleep(10)
called.append("slept")
task = hp.async_as_background(doit())
errf = hp.create_future()
errf.set_exception(ValueError("NOPE"))
serial = mock.Mock(name="serial")
timeout_task(task, errf, serial)
try:
await task
except asyncio.CancelledError:
pass
assert task.cancelled()
assert errf.done()
with assertRaises(ValueError, "NOPE"):
await errf
async it "does not set exception on errf already cancelled":
called = []
async def doit():
called.append("sleep")
await asyncio.sleep(10)
called.append("slept")
task = hp.async_as_background(doit())
errf = hp.create_future()
errf.cancel()
serial = mock.Mock(name="serial")
timeout_task(task, errf, serial)
try:
await task
except asyncio.CancelledError:
pass
assert task.cancelled()
assert errf.cancelled()
| 23.092025 | 71 | 0.569872 | 430 | 3,764 | 4.902326 | 0.176744 | 0.046964 | 0.039848 | 0.043169 | 0.745731 | 0.745731 | 0.732922 | 0.732922 | 0.717268 | 0.695446 | 0 | 0.006044 | 0.340595 | 3,764 | 162 | 72 | 23.234568 | 0.843272 | 0.003188 | 0 | 0.772727 | 0 | 0 | 0.113067 | 0 | 0 | 0 | 0 | 0 | 0.154545 | 0 | null | null | 0.045455 | 0.054545 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b8ef28584a352fecc038a2a7f95785f319070b61 | 231 | py | Python | convex_contracts/contracts/__init__.py | datacraft-dsc/convex-contracts | 16dd54a5ad83bfb261ba4cedd100767593427e56 | [
"Apache-2.0"
] | 1 | 2020-10-19T10:06:52.000Z | 2020-10-19T10:06:52.000Z | convex_contracts/contracts/__init__.py | DEX-Company/convex-contracts | a323839813e7b4a31a927e63821a1c816c36beae | [
"Apache-2.0"
] | null | null | null | convex_contracts/contracts/__init__.py | DEX-Company/convex-contracts | a323839813e7b4a31a927e63821a1c816c36beae | [
"Apache-2.0"
] | null | null | null |
from convex_contracts.contracts.did_registry_contract import DIDRegistryContract # noqa: F401
from convex_contracts.contracts.provenance_contract import ProvenanceContract # noqa: F401
| 57.75 | 114 | 0.705628 | 21 | 231 | 7.52381 | 0.571429 | 0.126582 | 0.240506 | 0.35443 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.035088 | 0.25974 | 231 | 3 | 115 | 77 | 0.888889 | 0.090909 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
6201030d0ab11d5bbe4b21ed43b94c5e4da2fd66 | 7,860 | py | Python | data/train/python/6201030d0ab11d5bbe4b21ed43b94c5e4da2fd66test_selectioncontroller.py | harshp8l/deep-learning-lang-detection | 2a54293181c1c2b1a2b840ddee4d4d80177efb33 | [
"MIT"
] | 84 | 2017-10-25T15:49:21.000Z | 2021-11-28T21:25:54.000Z | data/train/python/6201030d0ab11d5bbe4b21ed43b94c5e4da2fd66test_selectioncontroller.py | vassalos/deep-learning-lang-detection | cbb00b3e81bed3a64553f9c6aa6138b2511e544e | [
"MIT"
] | 5 | 2018-03-29T11:50:46.000Z | 2021-04-26T13:33:18.000Z | data/train/python/6201030d0ab11d5bbe4b21ed43b94c5e4da2fd66test_selectioncontroller.py | vassalos/deep-learning-lang-detection | cbb00b3e81bed3a64553f9c6aa6138b2511e544e | [
"MIT"
] | 24 | 2017-11-22T08:31:00.000Z | 2022-03-27T01:22:31.000Z | # -*- coding: utf-8 -*-
import unittest
import sys
import audioselector
class TestSelectionController(unittest.TestCase):
def setUp(self):
self.controller = audioselector.SelectionController(0, 100)
def test_create_partition(self):
self.assertEqual(0, self.controller.create_partition(50))
self.assertEqual(1, self.controller.create_partition(75))
self.assertEqual(2, self.controller.create_partition(100))
self.assertEqual(0, self.controller.create_partition(25))
self.assertEqual(0, self.controller.create_partition(10))
self.assertEqual(1, self.controller.create_partition(20))
self.assertEqual(3, self.controller.create_partition(40))
def test_get_partitions(self):
self.assertEqual([], self.controller.get_partitions())
self.assertEqual(0, self.controller.create_partition(90))
self.assertEqual([90], self.controller.get_partitions())
self.assertEqual(0, self.controller.create_partition(10))
self.assertEqual([10, 90], self.controller.get_partitions())
self.assertEqual(1, self.controller.create_partition(50))
self.assertEqual([10, 50, 90], self.controller.get_partitions())
self.assertEqual(2, self.controller.create_partition(50))
self.assertEqual([10, 50, 50, 90], self.controller.get_partitions())
def test_create_partition_out_of_range(self):
self.assertRaises(ValueError, self.controller.create_partition, -1)
self.assertRaises(ValueError, self.controller.create_partition, 100.001)
def test_partition_callbacks(self):
cbs = []
self.controller.on_partition_created(lambda i: cbs.append(i))
self.controller.create_partition(10)
self.assertEqual([0], cbs)
self.controller.create_partition(90)
self.assertEqual([0, 1], cbs)
self.controller.create_partition(50)
self.assertEqual([0, 1, 1], cbs)
self.controller.create_partition(75)
self.assertEqual([0, 1, 1, 2], cbs)
self.controller.create_partition(5)
self.assertEqual([0, 1, 1, 2, 0], cbs)
self.controller.create_partition(95)
self.assertEqual([0, 1, 1, 2, 0, 5], cbs)
def test_remove_partition(self):
self.controller.create_partition(40)
self.controller.create_partition(60)
self.controller.create_partition(20)
self.controller.create_partition(80)
self.controller.create_partition(90)
self.controller.create_partition(66.7)
self.assertEqual([20, 40, 60, 66.7, 80, 90],
self.controller.get_partitions())
self.assertRaises(IndexError, self.controller.remove_partition, 6)
self.controller.remove_partition(1)
self.assertEqual([20, 60, 66.7, 80, 90],
self.controller.get_partitions())
self.controller.remove_partition(4)
self.assertEqual([20, 60, 66.7, 80],
self.controller.get_partitions())
self.controller.remove_partition(0)
self.assertEqual([60, 66.7, 80],
self.controller.get_partitions())
self.controller.remove_partition(1)
self.assertEqual([60, 80],
self.controller.get_partitions())
self.controller.remove_partition(0)
self.assertEqual([80],
self.controller.get_partitions())
self.controller.remove_partition(0)
self.assertEqual([],
self.controller.get_partitions())
self.assertRaises(IndexError, self.controller.remove_partition, 0)
self.assertRaises(IndexError, self.controller.remove_partition, 1)
def test_create_selection(self):
# no overlaps
self.assertEqual(0, self.controller.create_selection(50, 75))
self.assertEqual(1, self.controller.create_selection(80, 90))
self.assertEqual(2, self.controller.create_selection(95, 100))
self.assertEqual(0, self.controller.create_selection(10, 20))
# order doesn't matter
self.assertEqual(0, self.controller.create_selection(2, 1))
self.assertEqual(2, self.controller.create_selection(40, 30))
# new contained in old
self.assertEqual(2, self.controller.create_selection(31, 39))
self.assertEqual(2, self.controller.create_selection(31, 40))
self.assertEqual(2, self.controller.create_selection(30, 39))
self.assertEqual(0, self.controller.create_selection(1.1, 1.9))
self.assertEqual(5, self.controller.create_selection(95, 100))
# new overlaps old (partially or completely)
self.assertEqual(0, self.controller.create_selection(0, 1))
self.assertEqual(5, self.controller.create_selection(91, 96))
self.assertEqual(1, self.controller.create_selection(19, 22))
self.assertEqual(2, self.controller.create_selection(29, 41))
def test_create_selection_out_of_range(self):
self.assertRaises(ValueError, self.controller.create_selection, -1, 4)
self.assertRaises(ValueError, self.controller.create_selection, 98, 101)
self.assertRaises(ValueError, self.controller.create_selection, -1, 101)
def test_get_selections(self):
self.assertEqual([], self.controller.get_selections())
self.assertEqual(0, self.controller.create_selection(40, 60))
self.assertEqual([(40, 60)],
self.controller.get_selections())
self.assertEqual(1, self.controller.create_selection(70, 80))
self.assertEqual([(40, 60), (70, 80)],
self.controller.get_selections())
self.assertEqual(0, self.controller.create_selection(20, 30))
self.assertEqual([(20, 30), (40, 60), (70, 80)],
self.controller.get_selections())
self.assertEqual(0, self.controller.create_selection(25, 35))
self.assertEqual([(20, 35), (40, 60), (70, 80)],
self.controller.get_selections())
self.assertEqual(2, self.controller.create_selection(65, 70))
self.assertEqual([(20, 35), (40, 60), (65, 80)],
self.controller.get_selections())
self.assertEqual(1, self.controller.create_selection(39, 61))
self.assertEqual([(20, 35), (39, 61), (65, 80)],
self.controller.get_selections())
self.assertEqual(0, self.controller.create_selection(21, 34))
self.assertEqual([(20, 35), (39, 61), (65, 80)],
self.controller.get_selections())
def test_remove_selection(self):
self.controller.create_selection(10, 15)
self.controller.create_selection(30, 48)
self.controller.create_selection(48, 50)
self.controller.create_selection(56, 57)
self.controller.create_selection(99, 100)
self.assertEqual([(10, 15), (30, 50), (56, 57), (99, 100)],
self.controller.get_selections())
self.assertRaises(IndexError, self.controller.remove_selection, 4)
self.controller.remove_selection(1)
self.assertEqual([(10, 15), (56, 57), (99, 100)],
self.controller.get_selections())
self.controller.remove_selection(2)
self.assertEqual([(10, 15), (56, 57)],
self.controller.get_selections())
self.controller.remove_selection(0)
self.assertEqual([(56, 57)],
self.controller.get_selections())
self.assertRaises(IndexError, self.controller.remove_selection, 1)
self.controller.remove_selection(0)
self.assertEqual([], self.controller.get_selections())
self.assertRaises(IndexError, self.controller.remove_selection, 0)
if __name__ == "__main__":
unittest.main()
| 45.964912 | 80 | 0.650636 | 906 | 7,860 | 5.501104 | 0.107064 | 0.275281 | 0.220706 | 0.174559 | 0.81561 | 0.7811 | 0.724518 | 0.419342 | 0.365169 | 0.335273 | 0 | 0.069977 | 0.223664 | 7,860 | 170 | 81 | 46.235294 | 0.746804 | 0.015013 | 0 | 0.23741 | 0 | 0 | 0.001034 | 0 | 0 | 0 | 0 | 0 | 0.539568 | 1 | 0.071942 | false | 0 | 0.021583 | 0 | 0.100719 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
621d435131610ecfab5c6dc2032e3af3a18c30dd | 163 | py | Python | src/mykrobe/predict/__init__.py | chamilaadikaram/mykrobe | 2bcebf7b37f1c1416f397374da6ebfd02ce1aead | [
"MIT"
] | 1 | 2020-01-10T06:43:22.000Z | 2020-01-10T06:43:22.000Z | src/mykrobe/predict/__init__.py | chamilaadikaram/mykrobe | 2bcebf7b37f1c1416f397374da6ebfd02ce1aead | [
"MIT"
] | null | null | null | src/mykrobe/predict/__init__.py | chamilaadikaram/mykrobe | 2bcebf7b37f1c1416f397374da6ebfd02ce1aead | [
"MIT"
] | null | null | null | from mykrobe.predict.amr import TBPredictor
from mykrobe.predict.amr import StaphPredictor
from mykrobe.predict.models import MykrobePredictorSusceptibilityResult
| 40.75 | 71 | 0.889571 | 18 | 163 | 8.055556 | 0.5 | 0.227586 | 0.372414 | 0.289655 | 0.372414 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.07362 | 163 | 3 | 72 | 54.333333 | 0.960265 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
6241435c533de6ed03ad2fafc7ce774f839c2051 | 99,507 | py | Python | serial_scripts/max_flows/test_max_flows.py | atsgen/tf-test | 2748fcd81491450c75dadc71849d2a1c11061029 | [
"Apache-2.0"
] | null | null | null | serial_scripts/max_flows/test_max_flows.py | atsgen/tf-test | 2748fcd81491450c75dadc71849d2a1c11061029 | [
"Apache-2.0"
] | null | null | null | serial_scripts/max_flows/test_max_flows.py | atsgen/tf-test | 2748fcd81491450c75dadc71849d2a1c11061029 | [
"Apache-2.0"
] | null | null | null | from __future__ import division
# Need to import path to test/fixtures and test/scripts/
# Ex : export PYTHONPATH='$PATH:/root/test/fixtures/:/root/test/scripts/'
#
# To run tests, you can do 'python -m testtools.run tests'. To run specific tests,
# You can do 'python -m testtools.run -l tests'
# Set the env variable PARAMS_FILE to point to your ini file. Else it will try to pick params.ini in PWD
from common.max_flows.base import BaseMaxFlowsTest
from builtins import str
from builtins import range
from past.utils import old_div
from tcutils.wrappers import preposttest_wrapper
from tcutils.agent import *
from common.max_flows.verify import VerifyMaxFlows
import test
from tcutils.util import skip_because
import time
from tcutils.traffic_utils.scapy_traffic_gen import ScapyTraffic
from tcutils.traffic_utils.traffic_analyzer import TrafficAnalyzer
from compute_node_test import ComputeNodeFixture
class TestMaxFlows(VerifyMaxFlows, BaseMaxFlowsTest):
setup_fixtures = {}
DEFAULT_FLOW_TIMEOUT = 120
@classmethod
def setUpClass(cls):
super(TestMaxFlows, cls).setUpClass(flow_timeout=120)
@classmethod
def tearDownClass(cls):
super(TestMaxFlows, cls).tearDownClass()
def runTest(self):
pass
#end runTest
def waiting_for_flow_timeout(self):
self.logger.info("Sleeping for flow timeout (%d seconds)..." % (self.DEFAULT_FLOW_TIMEOUT))
time.sleep(self.DEFAULT_FLOW_TIMEOUT)
self.logger.info("Sleeping for flow timeout (%d seconds)...Completed" % (self.DEFAULT_FLOW_TIMEOUT))
@preposttest_wrapper
def test_max_flows_vn_level(self):
'''
Description:
Verify max_flows functionality at VN level
Test steps:
1.Create a vn1 and vn2
2.configure max_flows as 1000 @ vn1
3.Launch vm11, vm12 and vm13 on vn1 network
4.Launch vm21 and vm22 on vn2 network
5.Verify traffic between the VMs
6.send 2000 flows traffic from vm11 to vm12 , it should allow only 1000 flows
7.send 2000 flows traffic from vm11 to vm13, it should all only 1000 flows
8.Modify the max_flows value as 1500, verify traffic between vm11 and vm12
9.send 2000 flows traffic from vm21 to vm22, it should allow all the traffic
10.Delete the max_flows @ vn1 ( by setting the value as 0 )
11.send 2000 flows traffic from vm11 to vm12 , it should allow all the traffic
Pass criteria:
Number of flows should be created as per max_flows configured at VN level.
Other VNs should not be imapacted
After deleting max_flows configuration, it should allow all the flows.
Maintainer : mmohan@juniper.net
'''
vn = {'count':2, # VN coun
'vn1':{'subnet':'21.0.0.0/24'},
'vn2':{'subnet':'22.0.0.0/24'},
}
vmi = {'count':5, # VMI Count
'vmi11':{'vn': 'vn1'}, # VMI details
'vmi12':{'vn': 'vn1'}, # VMI details
'vmi13':{'vn': 'vn1'}, # VMI details
'vmi21':{'vn': 'vn2'}, # VMI details
'vmi22':{'vn': 'vn2'}, # VMI details
}
# Input Variables
compute_nodes = self.orch.get_hosts()
assert len(compute_nodes) >= 2 , "Required Minimum 2 Compute Nodes"
vm = {'count':5,
'launch_mode':'distribute',
'vm11':{'vn':['vn1'], 'vmi':['vmi11'],'node': compute_nodes[0]}, # VM Details
'vm12':{'vn':['vn1'], 'vmi':['vmi12'],'node': compute_nodes[1]}, # VM Details
'vm13':{'vn':['vn1'], 'vmi':['vmi13'],'node': compute_nodes[0]}, # VM Details
'vm21':{'vn':['vn2'], 'vmi':['vmi21'],'node': compute_nodes[0]}, # VM Details
'vm22':{'vn':['vn2'], 'vmi':['vmi22'],'node': compute_nodes[1]}, # VM Details
}
# Create Virtual Networks (VNs)
vn_fixtures = self.setup_vns(vn=vn)
vn1_fix = vn_fixtures['vn1']
vn2_fix = vn_fixtures['vn2']
# Setting MAX Flows only on VN-1
vn1_max_flows = 1000
vn1_fix.set_max_flows(max_flows=vn1_max_flows)
# Create VMIs
vmi_fixtures = self.setup_vmis(vn_fixtures, vmi=vmi)
# Create VMs
vm_fixtures = self.setup_vms(vn_fixtures, vmi_fixtures, vm=vm, image_name='ubuntu-traffic')
vm11_fix = vm_fixtures['vm11']
vm12_fix = vm_fixtures['vm12']
vm13_fix = vm_fixtures['vm13']
vm21_fix = vm_fixtures['vm21']
vm22_fix = vm_fixtures['vm22']
# Creating ComputeNode/Vrouter Fixtures
vm11_vrouter_fixture = ComputeNodeFixture(self.connections, vm11_fix.vm_node_ip)
vm12_vrouter_fixture = ComputeNodeFixture(self.connections, vm12_fix.vm_node_ip)
vm21_vrouter_fixture = ComputeNodeFixture(self.connections, vm11_fix.vm_node_ip)
vm22_vrouter_fixture = ComputeNodeFixture(self.connections, vm12_fix.vm_node_ip)
self.logger.info("Verify Traffic within VMs in VN-1")
send_vm_fixture = vm11_fix
recv_vm_fixture = vm12_fix
traffic_result = self.verify_traffic(sender_vm=send_vm_fixture, receiver_vm=recv_vm_fixture,
proto='udp', sport=1500, dport=10001, count=100)
self.logger.info("Traffic Tx-Pkts: %d Rx-Pkts: %d" % (traffic_result[0],traffic_result[1]))
assert traffic_result[0] == traffic_result[1], "Traffic between VMs Failed"
self.logger.info("Verify Traffic within VMs in VN-2")
send_vm_fixture = vm21_fix
recv_vm_fixture = vm22_fix
traffic_result = self.verify_traffic(sender_vm=send_vm_fixture, receiver_vm=recv_vm_fixture,
proto='udp', sport=1500, dport=10001, count=100)
self.logger.info("Traffic Tx-Pkts: %d Rx-Pkts: %d" % (traffic_result[0],traffic_result[1]))
assert traffic_result[0] == traffic_result[1], "Traffic between VMs Failed"
self.logger.info("Sleeping for dns/metadata flows to timeout...")
self.waiting_for_flow_timeout()
# Verify Max_flows functionality on VN level
#import pdb; pdb.set_trace()
send_flow_count = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm12_fix.vm_ip),
max_flows=vn1_max_flows,
vm_fix=vm11_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm12_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %d" % (vn1_max_flows))
#import pdb; pdb.set_trace()
if total_flow_count == vn1_max_flows:
self.logger.info("VN level Max Flows Provisioning is working fine")
assert total_flow_count == vn1_max_flows, "VN level Max Flows Provisioning is not working"
self.waiting_for_flow_timeout()
# Source and Destination VMs part of the same Compute Node
send_flow_count = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vn1_max_flows,
vm_fix=vm11_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %d" % (vn1_max_flows))
#import pdb; pdb.set_trace()
if total_flow_count == vn1_max_flows:
self.logger.info("VMs are the part of the same Compute - VN level Max Flows Provisioning is working fine")
assert total_flow_count == vn1_max_flows, "VMs are the part of the same Compute - VN level Max Flows Provisioning is not working"
self.waiting_for_flow_timeout()
# Modifiy max flows to differnet values
# Setting MAX Flows on VN-1
vn1_max_flows_new = vn1_max_flows + 500
vn1_fix.set_max_flows(max_flows=vn1_max_flows_new)
send_flow_count = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm12_fix.vm_ip),
max_flows=vn1_max_flows,
vm_fix=vm11_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm12_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %d" % (vn1_max_flows_new))
if total_flow_count == vn1_max_flows_new:
self.logger.info("VN level Max Flows Provisioning is working fine as per modified value")
assert total_flow_count == vn1_max_flows_new, "VN level Max Flows is not working as per modified value"
# check other VN-2 should allow all the flows
send_flow_count = self.send_traffic(
src=str(vm21_fix.vm_ip),
dst=str(vm22_fix.vm_ip),
max_flows=vn1_max_flows,
vm_fix=vm21_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm21_fix.vm_ip),
dest_ip=str(vm22_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn2_fix.vn_fq_name),
metadata_ip=str(vm21_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %s" % ('Not Configured'))
#import pdb; pdb.set_trace()
if total_flow_count >= send_flow_count:
self.logger.info("VN-2 is allowing all the flows")
assert total_flow_count >= send_flow_count, "Other VN (VN-2) impacted due to VN level max flows configuration @ VN-1"
self.waiting_for_flow_timeout()
# Reset the VN level Max flows to default value (0)
vn1_fix.set_max_flows(max_flows=0)
send_flow_count = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm12_fix.vm_ip),
max_flows=vn1_max_flows,
vm_fix=vm11_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm12_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %s" % ('Deleted - It should allow the flows'))
if total_flow_count >= send_flow_count:
self.logger.info("VN level Max Flows Provisioning is deleted properly")
assert total_flow_count >= send_flow_count, "VN level Max Flows Provisioning is not deleted properly"
def send_traffic(self,**kwargs):
vm_fix = kwargs.get('vm_fix', None)
src_ip = kwargs.get('src', None)
dst_ip = kwargs.get('dst', None)
max_flows = kwargs.get('max_flows', None)
flow_count = kwargs.get('flow_count', max_flows)
sport = kwargs.get('sport', 1500)
dport_start = kwargs.get('dport_start', 10001)
dport_end = kwargs.get('dport_end', dport_start+flow_count-1)
dport_range = kwargs.get('dport_range', (dport_start, dport_end))
params = {}
params['ip'] = {'src': src_ip , 'dst': dst_ip}
params['udp'] = {'sport': sport, 'dport': dport_range}
params['count'] = 1
params['interval'] = 0
params['mode'] = 'L3'
scapy_obj = ScapyTraffic(vm_fix, **params)
scapy_obj.start()
sleep_time = int(old_div(flow_count,25))
self.logger.info("Started Traffic...sleeping for %d secs..." % sleep_time )
time.sleep(sleep_time)
flow_count = dport_range[1]-dport_range[0]+1
return flow_count
def get_total_flow_count(self,**kwargs):
source_ip = kwargs.get('source_ip', None)
dest_ip = kwargs.get('dest_ip', None)
vrf_id = kwargs.get('vrf_id', None)
metadata_ip = kwargs.get('metadata_ip', None)
vrouter_fixture = kwargs.get('vrouter_fixture', None)
flow_table = vrouter_fixture.get_flow_table()
if dest_ip == None:
(ff_count, rf_count) = vrouter_fixture.get_flow_count(
flow_table=flow_table,
refresh=False,
source_ip=source_ip,
proto='udp',
vrf_id=vrf_id
)
elif source_ip == None:
(ff_count, rf_count) = vrouter_fixture.get_flow_count(
flow_table=flow_table,
refresh=False,
dest_ip=dest_ip,
proto='udp',
vrf_id=vrf_id
)
else:
(ff_count, rf_count) = vrouter_fixture.get_flow_count(
flow_table=flow_table,
refresh=False,
source_ip=source_ip,
dest_ip=dest_ip,
proto='udp',
vrf_id=vrf_id
)
self.logger.info("Flow Count Forward: %d Reverse: %d" % (ff_count, rf_count))
(ff_dns_count, rf_dns_count) = vrouter_fixture.get_flow_count(
flow_table=flow_table,
refresh=False,
source_ip=source_ip,
dest_port=53,
proto='udp',
vrf_id=vrf_id
)
self.logger.info("DNS Flow Count Forward: %d Reverse: %d" % (ff_dns_count, rf_dns_count))
(ff_meta_ip, rf_meta_ip) = vrouter_fixture.get_flow_count(
flow_table=flow_table,
refresh=False,
dest_ip=metadata_ip
)
self.logger.info("Meta Data Flow Count Forward: %d Reverse: %d" % (ff_meta_ip, rf_meta_ip))
total_flow_count = ff_count + rf_count + ff_dns_count + rf_dns_count + ff_meta_ip + rf_meta_ip
return total_flow_count
@preposttest_wrapper
def test_max_flows_vmi_level(self):
'''
Description:
Verify max_flows functionality at VMI level
Test steps:
1.Create a virtual network (vn1)
2.Launch vm11, vm12 and vm13 on vn1 network
3.Configure vmi level max_flows as 1000, 2000 and 3000 on vmi11, vmi12 and vmi13 respectively
5.Verify traffic between the VMs
6.send 2000 flows traffic from vm11 to vm13 , it should allow only 1000 flows @vmi11
7.send 4000 flows traffic from vm12 to vm13, it should all only 2000 flows @vmi12
8.Modify the max_flows value as 1500 @ vmi11
9.verify sending traffic between vm11 and vm13, now it should all 1500 flows
10.Delete the max_flows @ all VMIs ( by setting the value as 0 )
11.send traffics across vm11, vm12 and vm13 , it should allow all the traffic
Pass criteria:
Number of flows should be created as per max_flows configured at VMI level.
After modification, it should work as per modified value
After deleting max_flows configuration, it should allow all the flows.
Maintainer : mmohan@juniper.net
'''
vn = {'count':1, # VN coun
'vn1':{'subnet':'21.0.0.0/24'},
}
vmi = {'count':3, # VMI Count
'vmi11':{'vn': 'vn1'}, # VMI details
'vmi12':{'vn': 'vn1'}, # VMI details
'vmi13':{'vn': 'vn1'}, # VMI details
}
# Input Variables
compute_nodes = self.orch.get_hosts()
assert len(compute_nodes) >= 2 , "Required Minimum 2 Compute Nodes"
vm = {'count':3,
'launch_mode':'distribute',
'vm11':{'vn':['vn1'], 'vmi':['vmi11'],'node': compute_nodes[0]}, # VM Details
'vm12':{'vn':['vn1'], 'vmi':['vmi12'],'node': compute_nodes[1]}, # VM Details
'vm13':{'vn':['vn1'], 'vmi':['vmi13'],'node': compute_nodes[0]}, # VM Details
}
# Create Virtual Networks (VNs)
vn_fixtures = self.setup_vns(vn=vn)
vn1_fix = vn_fixtures['vn1']
#import pdb; pdb.set_trace()
# Create VMIs
vmi_fixtures = self.setup_vmis(vn_fixtures, vmi=vmi)
vmi11_fix = vmi_fixtures['vmi11']
vmi12_fix = vmi_fixtures['vmi12']
vmi13_fix = vmi_fixtures['vmi13']
#import pdb; pdb.set_trace()
# Setting MAX Flows only on VMI 11
vmi11_max_flows = 100
vmi11_fix.set_max_flows(max_flows=vmi11_max_flows)
vmi12_max_flows = 200
vmi12_fix.set_max_flows(max_flows=vmi12_max_flows)
vmi13_max_flows = 300
vmi13_fix.set_max_flows(max_flows=vmi13_max_flows)
# Create VMs
vm_fixtures = self.setup_vms(vn_fixtures, vmi_fixtures, vm=vm, image_name='ubuntu-traffic')
vm11_fix = vm_fixtures['vm11']
vm12_fix = vm_fixtures['vm12']
vm13_fix = vm_fixtures['vm13']
# Creating ComputeNode/Vrouter Fixtures
vm11_vrouter_fixture = ComputeNodeFixture(self.connections, vm11_fix.vm_node_ip)
vm12_vrouter_fixture = ComputeNodeFixture(self.connections, vm12_fix.vm_node_ip)
vm13_vrouter_fixture = ComputeNodeFixture(self.connections, vm13_fix.vm_node_ip)
self.logger.info("Verify Traffic within VMs in VN-1")
send_vm_fixture = vm11_fix
recv_vm_fixture = vm13_fix
traffic_result = self.verify_traffic(sender_vm=send_vm_fixture, receiver_vm=recv_vm_fixture,
proto='udp', sport=1500, dport=10001, count=100)
self.logger.info("Traffic Tx-Pkts: %d Rx-Pkts: %d" % (traffic_result[0],traffic_result[1]))
assert traffic_result[0] == traffic_result[1], "Basic Traffic Validation Failed between VMs ( VN-1)"
send_vm_fixture = vm12_fix
recv_vm_fixture = vm13_fix
traffic_result = self.verify_traffic(sender_vm=send_vm_fixture, receiver_vm=recv_vm_fixture,
proto='udp', sport=1500, dport=10001, count=100)
self.logger.info("Traffic Tx-Pkts: %d Rx-Pkts: %d" % (traffic_result[0],traffic_result[1]))
assert traffic_result[0] == traffic_result[1], "Basic Traffic Validation Failed between VMs ( VN-1)"
#import pdb; pdb.set_trace()
vm11_inspect = self.agent_inspect[vm11_fix.vm_node_ip]
vm12_inspect = self.agent_inspect[vm12_fix.vm_node_ip]
vm13_inspect = self.agent_inspect[vm13_fix.vm_node_ip]
vm11_tap_intf = vm11_inspect.get_vna_tap_interface_by_ip(vm11_fix.vm_ip)
self.logger.info("drop_new_flows flag values @ vifs before sending traffics...")
vm11_drop_new_flows = vm11_inspect.get_vna_tap_interface_by_ip(vm11_fix.vm_ip)[0]['drop_new_flows']
self.logger.info("drop_new_flows flag value @ vm11: %s " % (vm11_drop_new_flows))
vm12_drop_new_flows = vm12_inspect.get_vna_tap_interface_by_ip(vm12_fix.vm_ip)[0]['drop_new_flows']
self.logger.info("drop_new_flows flag value @ vm12: %s " % (vm12_drop_new_flows))
vm13_drop_new_flows = vm13_inspect.get_vna_tap_interface_by_ip(vm13_fix.vm_ip)[0]['drop_new_flows']
self.logger.info("drop_new_flows flag value @ vm13: %s " % (vm13_drop_new_flows))
if vm11_drop_new_flows != 'false' and vm11_drop_new_flows != 'false' and vm13_drop_new_flows != 'false' :
assert False, "drop_new_flows flag is set even before sending traffics..."
self.logger.info("Sleeping for dns/meta flows timeout seconds...")
self.waiting_for_flow_timeout()
# Verify Max_flows functionality on VMI level
#import pdb; pdb.set_trace()
send_flow_count_vm11 = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm11_fix
)
total_flow_count_vm11 = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
send_flow_count_vm12 = self.send_traffic(
src=str(vm12_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi12_max_flows,
vm_fix=vm12_fix
)
total_flow_count_vm12 = self.get_total_flow_count(
source_ip=str(vm12_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm12_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm12_fix.get_local_ip()),
vrouter_fixture=vm12_vrouter_fixture
)
send_flow_count_vm13 = self.send_traffic(
src=str(vm13_fix.vm_ip),
dst=str(vm11_fix.vm_ip),
max_flows=vmi13_max_flows,
vm_fix=vm13_fix
)
total_flow_count_vm13 = self.get_total_flow_count(
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm13_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm13_fix.get_local_ip()),
vrouter_fixture=vm13_vrouter_fixture
)
self.logger.info("drop_new_flows flag values @ vifs after max_flows exceeds..")
vm11_drop_new_flows = vm11_inspect.get_vna_tap_interface_by_ip(vm11_fix.vm_ip)[0]['drop_new_flows']
self.logger.info("drop_new_flows flag value @ vm11: %s " % (vm11_drop_new_flows))
vm12_drop_new_flows = vm12_inspect.get_vna_tap_interface_by_ip(vm12_fix.vm_ip)[0]['drop_new_flows']
self.logger.info("drop_new_flows flag value @ vm12: %s " % (vm12_drop_new_flows))
vm13_drop_new_flows = vm13_inspect.get_vna_tap_interface_by_ip(vm13_fix.vm_ip)[0]['drop_new_flows']
self.logger.info("drop_new_flows flag value @ vm13: %s " % (vm13_drop_new_flows))
if vm11_drop_new_flows != 'true' and vm11_drop_new_flows != 'true' and vm13_drop_new_flows != 'true' :
assert False, "drop_new_flows flag is NOT set even after max_flows execeeded.."
self.logger.info("Total Obtained Flow Count @ vm11: %d"% (total_flow_count_vm11))
self.logger.info("Total Expected Flow Count @ vm11: %d" % (vmi11_max_flows))
self.logger.info("Total Obtained Flow Count @ vm12: %d"% (total_flow_count_vm12))
self.logger.info("Total Expected Flow Count @ vm12: %d" % (vmi12_max_flows))
self.logger.info("Total Obtained Flow Count @ vm13: %d"% (total_flow_count_vm13))
self.logger.info("Total Expected Flow Count @ vm13: %d" % (vmi13_max_flows))
#import pdb; pdb.set_trace()
if total_flow_count_vm11 == vmi11_max_flows:
self.logger.info("VMI level (vm11) Max Flows Provisioning is working fine")
assert total_flow_count_vm11 == vmi11_max_flows, "VMI (vm11) level Provisioning is not working"
if total_flow_count_vm12 == vmi12_max_flows:
self.logger.info("VMI level (vm12) Max Flows Provisioning is working fine")
assert total_flow_count_vm12 == vmi12_max_flows, "VMI (vm12) level Provisioning is not working"
vmi13_max_flows_low = vmi13_max_flows - 10
vmi13_max_flows_high = vmi13_max_flows + 10
if total_flow_count_vm13 >= vmi13_max_flows_low and total_flow_count_vm13 <= vmi13_max_flows_high:
self.logger.info("VMI level (vm13) Max Flows Provisioning is working fine")
else:
assert False, "VMI (vm13) level Provisioning is not working"
self.waiting_for_flow_timeout()
self.logger.info("drop_new_flows flag values @ vifs after flows timedout...")
vm11_drop_new_flows = vm11_inspect.get_vna_tap_interface_by_ip(vm11_fix.vm_ip)[0]['drop_new_flows']
self.logger.info("drop_new_flows flag value @ vm11: %s " % (vm11_drop_new_flows))
vm12_drop_new_flows = vm12_inspect.get_vna_tap_interface_by_ip(vm12_fix.vm_ip)[0]['drop_new_flows']
self.logger.info("drop_new_flows flag value @ vm12: %s " % (vm12_drop_new_flows))
vm13_drop_new_flows = vm13_inspect.get_vna_tap_interface_by_ip(vm13_fix.vm_ip)[0]['drop_new_flows']
self.logger.info("drop_new_flows flag value @ vm13: %s " % (vm13_drop_new_flows))
if vm11_drop_new_flows != 'false' and vm11_drop_new_flows != 'false' and vm13_drop_new_flows != 'false' :
assert False, "drop_new_flows flag is set even after flows timedout..."
# Modifiy max flows to differnet values
# Setting MAX Flows on VMI 11
vmi11_max_flows_new = vmi11_max_flows + 50
vmi11_fix.set_max_flows(max_flows=vmi11_max_flows_new)
send_flow_count_vm11 = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm11_fix
)
total_flow_count_vm11 = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
send_flow_count_vm12 = self.send_traffic(
src=str(vm12_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi12_max_flows,
vm_fix=vm12_fix
)
total_flow_count_vm12 = self.get_total_flow_count(
source_ip=str(vm12_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm12_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm12_fix.get_local_ip()),
vrouter_fixture=vm12_vrouter_fixture
)
total_flow_count_vm13 = self.get_total_flow_count(
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm13_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm13_fix.get_local_ip()),
vrouter_fixture=vm13_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count @ vm11: %d"% (total_flow_count_vm11))
self.logger.info("Total Expected Flow Count @ vm11: %d" % (vmi11_max_flows_new))
self.logger.info("Total Obtained Flow Count @ vm12: %d"% (total_flow_count_vm12))
self.logger.info("Total Expected Flow Count @ vm12: %d" % (vmi12_max_flows))
self.logger.info("Total Obtained Flow Count @ vm13: %d"% (total_flow_count_vm13))
self.logger.info("Total Expected Flow Count @ vm13: %d" % (vmi13_max_flows))
#import pdb; pdb.set_trace()
if total_flow_count_vm11 == vmi11_max_flows_new:
self.logger.info("VMI(vm11) level Max Flows Provisioning is working fine as per modified value")
assert total_flow_count_vm11 == vmi11_max_flows_new, "VMI(vm11) level Max Flows is not working as per modified value"
if total_flow_count_vm12 == vmi12_max_flows:
self.logger.info("VMI level (vm12) Max Flows Provisioning is working fine")
assert total_flow_count_vm12 == vmi12_max_flows, "VMI (vm12) level Provisioning is not working"
vmi13_max_flows_low = vmi13_max_flows - 10
vmi13_max_flows_high = vmi13_max_flows + 10
if total_flow_count_vm13 >= vmi13_max_flows_low and total_flow_count_vm13 <= vmi13_max_flows_high :
self.logger.info("VMI level (vm13) Max Flows Provisioning is working fine")
else:
assert False, "VMI (vm13) level Provisioning is not working"
self.logger.info("Sleeping for dns/meta flows timeout seconds...")
self.waiting_for_flow_timeout()
vmi11_fix.set_max_flows(max_flows=vmi11_max_flows)
# Verify Max_flows functionality on VMI level
#import pdb; pdb.set_trace()
send_flow_count_vm11 = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm11_fix
)
total_flow_count_vm11 = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
send_flow_count_vm12 = self.send_traffic(
src=str(vm12_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi12_max_flows,
vm_fix=vm12_fix
)
total_flow_count_vm12 = self.get_total_flow_count(
source_ip=str(vm12_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm12_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm12_fix.get_local_ip()),
vrouter_fixture=vm12_vrouter_fixture
)
total_flow_count_vm13 = self.get_total_flow_count(
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm13_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm13_fix.get_local_ip()),
vrouter_fixture=vm13_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count @ vm11: %d"% (total_flow_count_vm11))
self.logger.info("Total Expected Flow Count @ vm11: %d" % (vmi11_max_flows))
self.logger.info("Total Obtained Flow Count @ vm12: %d"% (total_flow_count_vm12))
self.logger.info("Total Expected Flow Count @ vm12: %d" % (vmi12_max_flows))
self.logger.info("Total Obtained Flow Count @ vm13: %d"% (total_flow_count_vm13))
self.logger.info("Total Expected Flow Count @ vm13: %d" % (vmi13_max_flows))
#import pdb; pdb.set_trace()
if total_flow_count_vm11 == vmi11_max_flows:
self.logger.info("VMI level (vm11) Max Flows Provisioning is working fine")
assert total_flow_count_vm11 == vmi11_max_flows, "VMI (vm11) level Provisioning is not working"
if total_flow_count_vm12 == vmi12_max_flows:
self.logger.info("VMI level (vm12) Max Flows Provisioning is working fine")
assert total_flow_count_vm12 == vmi12_max_flows, "VMI (vm12) level Provisioning is not working"
vmi13_max_flows_low = vmi13_max_flows - 10
vmi13_max_flows_high = vmi13_max_flows + 10
if total_flow_count_vm13 >= vmi13_max_flows_low and total_flow_count_vm13 <= vmi13_max_flows_high:
self.logger.info("VMI level (vm13) Max Flows Provisioning is working fine")
else:
assert False, "VMI (vm13) level Provisioning is not working"
self.waiting_for_flow_timeout()
time.sleep(10)
# Reset the VN level Max flows to default value (0)
vmi11_fix.set_max_flows(max_flows=0)
vmi12_fix.set_max_flows(max_flows=0)
vmi13_fix.set_max_flows(max_flows=0)
send_flow_count_vm11 = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm11_fix
)
time.sleep(10)
total_flow_count_vm11 = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
send_flow_count_vm12 = self.send_traffic(
src=str(vm12_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi12_max_flows,
vm_fix=vm12_fix
)
total_flow_count_vm12 = self.get_total_flow_count(
source_ip=str(vm12_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm12_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm12_fix.get_local_ip()),
vrouter_fixture=vm12_vrouter_fixture
)
send_flow_count_vm13 = send_flow_count_vm11+send_flow_count_vm12
total_flow_count_vm13 = self.get_total_flow_count(
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm13_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm13_fix.get_local_ip()),
vrouter_fixture=vm13_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count @ vm11: %d"% (total_flow_count_vm11))
self.logger.info("Total Expected Flow Count @ vm11: %d" % (send_flow_count_vm11*2))
self.logger.info("Total Obtained Flow Count @ vm12: %d"% (total_flow_count_vm12))
self.logger.info("Total Expected Flow Count @ vm12: %d" % (send_flow_count_vm12*2))
self.logger.info("Total Obtained Flow Count @ vm13: %d"% (total_flow_count_vm13))
self.logger.info("Total Expected Flow Count @ vm13: %d" % (send_flow_count_vm13*2))
#import pdb; pdb.set_trace()
if total_flow_count_vm11 >= send_flow_count_vm11*2:
self.logger.info("VMI(vm11) level Max Flows Provisioning is deleted properly")
assert total_flow_count_vm11 >= send_flow_count_vm11*2, "VMI (vm11) level Provisioning is not deleted properly"
if total_flow_count_vm12 >= send_flow_count_vm12*2:
self.logger.info("VMI(vm12) level Max Flows Provisioning is deleted properly")
assert total_flow_count_vm12 >= send_flow_count_vm12*2, "VMI (vm12) level Provisioning is not deleted properly"
if total_flow_count_vm13 >= send_flow_count_vm13*2:
self.logger.info("VMI(vm13) level Max Flows Provisioning is deleted properly")
assert total_flow_count_vm13 >= send_flow_count_vm13*2, "VMI (vm13) level Provisioning is not deleted properly"
@preposttest_wrapper
def test_max_flows_precedence(self):
'''
Description:
Verify precedence order between VMI level and VN level configuration
Test steps:
1.Create a virtaul network (vn1)
2.Launch vm11, vm12 and vm13 on vn1 network
3.configure max_flows as 400 @ VN level (vn1)
4.configure max_flows as 1000 and 2000 on VMIs ( vmi11 and vmi13) respectively
5.send traffic between vm11 and vm12 and it should allow only 1000 flows
6.send traffic between vm12 and vm13 , it should all only 400 flows
7.Delete VMI level configuration @ vmi11 (max_flows=0)
8.Now send traffic between vm11 and vm12 and it should allow only 400 flows
Pass criteria:
VMI level is preferred when max_flows configured @ both VMI and VN level
After removing configration from VMI level, it should use VN level value
Maintainer : mmohan@juniper.net
'''
vn = {'count':1, # VN coun
'vn1':{'subnet':'21.0.0.0/24'},
}
vmi = {'count':3, # VMI Count
'vmi11':{'vn': 'vn1'}, # VMI details
'vmi12':{'vn': 'vn1'}, # VMI details
'vmi13':{'vn': 'vn1'}, # VMI details
}
# Input Variables
compute_nodes = self.orch.get_hosts()
assert len(compute_nodes) >= 2 , "Required Minimum 2 Compute Nodes"
vm = {'count':3,
'launch_mode':'distribute',
'vm11':{'vn':['vn1'], 'vmi':['vmi11'],'node': compute_nodes[0]}, # VM Details
'vm12':{'vn':['vn1'], 'vmi':['vmi12'],'node': compute_nodes[1]}, # VM Details
'vm13':{'vn':['vn1'], 'vmi':['vmi13'],'node': compute_nodes[0]}, # VM Details
}
# Create Virtual Networks (VNs)
vn_fixtures = self.setup_vns(vn=vn)
vn1_fix = vn_fixtures['vn1']
#import pdb; pdb.set_trace()
# Create VMIs
vmi_fixtures = self.setup_vmis(vn_fixtures, vmi=vmi)
vmi11_fix = vmi_fixtures['vmi11']
vmi12_fix = vmi_fixtures['vmi12']
vmi13_fix = vmi_fixtures['vmi13']
#import pdb; pdb.set_trace()
# Setting MAX Flows only on VMI 11
vn1_max_flows = 400
vmi11_max_flows = 1000
vmi13_max_flows = 2000
vn1_fix.set_max_flows(max_flows=vn1_max_flows)
vmi11_fix.set_max_flows(max_flows=vmi11_max_flows)
vmi13_fix.set_max_flows(max_flows=vmi13_max_flows)
# Create VMs
vm_fixtures = self.setup_vms(vn_fixtures, vmi_fixtures, vm=vm, image_name='ubuntu-traffic')
vm11_fix = vm_fixtures['vm11']
vm12_fix = vm_fixtures['vm12']
vm13_fix = vm_fixtures['vm13']
# Creating ComputeNode/Vrouter Fixtures
vm11_vrouter_fixture = ComputeNodeFixture(self.connections, vm11_fix.vm_node_ip)
vm12_vrouter_fixture = ComputeNodeFixture(self.connections, vm12_fix.vm_node_ip)
self.logger.info("Verify Traffic within VMs in VN-1")
send_vm_fixture = vm11_fix
recv_vm_fixture = vm13_fix
traffic_result = self.verify_traffic(sender_vm=send_vm_fixture, receiver_vm=recv_vm_fixture,
proto='udp', sport=1500, dport=10001, count=100)
self.logger.info("Traffic Tx-Pkts: %d Rx-Pkts: %d" % (traffic_result[0],traffic_result[1]))
assert traffic_result[0] == traffic_result[1], "Basic Traffic Validation Failed between VMs ( VN-1)"
self.logger.info("Sleeping for dns/meta flows timeout seconds...")
self.waiting_for_flow_timeout()
# Verify Max_flows functionality on VMI level
#import pdb; pdb.set_trace()
send_flow_count = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm11_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %d" % (vmi11_max_flows))
#import pdb; pdb.set_trace()
if total_flow_count == vmi11_max_flows:
self.logger.info("VMI level Max Flows Provisioning is working fine")
assert total_flow_count == vmi11_max_flows, "VMI level Max Flows Provisioning is not working"
send_flow_count = self.send_traffic(
src=str(vm12_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm12_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm12_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm12_fix.get_local_ip()),
vrouter_fixture=vm12_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %s" % ('Not VMI level Configured - Should use VN level value'))
self.logger.info("Total Expected Flow Count: %d" % (vn1_max_flows))
# import pdb; pdb.set_trace()
if total_flow_count == vn1_max_flows:
self.logger.info("No VMI level configuration - VN level max flows is used correctly")
assert total_flow_count == vn1_max_flows, "Other VMI ( VMI 12) are not taking VN level value"
self.waiting_for_flow_timeout()
# Reset the VN level Max flows to default value (0)
vmi11_fix.set_max_flows(max_flows=0)
send_flow_count = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm11_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %s" % ('VMI level Deleted - Should use VN level'))
self.logger.info("Total Expected Flow Count: %d" % (vn1_max_flows))
if total_flow_count == vn1_max_flows:
self.logger.info("VMI level deleted properly and it uses VN level Value")
assert total_flow_count == vn1_max_flows, "VMI level Max Flows Provisioning is not deleted properly"
@preposttest_wrapper
def test_max_flows_precedence_with_max_vm_flows(self):
'''
Description:
Verify precedence order between VMI level and VN level configuration
Test steps:
1.Create a virtaul network (vn1)
2.Launch vm11, vm12 and vm13 on vn1 network
3.configure max_flows as 400 @ VN level (vn1)
4.configure max_flows as 1000 and 2000 on VMIs ( vmi11 and vmi13) respectively
5.configure max_vm_flows as 0.02 @ vm11 (vm level)
6.send traffic between vm11 and vm12 and it should allow only 1000 flows
7.send traffic between vm12 and vm13 , it should all only 400 flows
8.Delete VMI level configuration @ vmi11 (max_flows=0)
9.Now send traffic between vm11 and vm12
10.It should allow only 400 flows, as per VN level configuration
11.Delete VN level configration @ vn1 (max_flows=0)
12.Now send traffic between vm11 and vm12
13.It should allow only ~120 flows, as per max_vm_flows configuration
Pass criteria:
Precedence order: VMI > VN > max_vm_flows
After removing configration from VMI level, it should use VN level value
After removing VN level, it should use VM level ( max_vm_flows)
Maintainer : mmohan@juniper.net
'''
vn = {'count':1, # VN coun
'vn1':{'subnet':'21.0.0.0/24'},
}
vmi = {'count':3, # VMI Count
'vmi11':{'vn': 'vn1'}, # VMI details
'vmi12':{'vn': 'vn1'}, # VMI details
'vmi13':{'vn': 'vn1'}, # VMI details
}
# Input Variables
compute_nodes = self.orch.get_hosts()
assert len(compute_nodes) >= 2 , "Required Minimum 2 Compute Nodes"
vm = {'count':3,
'launch_mode':'distribute',
'vm11':{'vn':['vn1'], 'vmi':['vmi11'],'node': compute_nodes[0]}, # VM Details
'vm12':{'vn':['vn1'], 'vmi':['vmi12'],'node': compute_nodes[1]}, # VM Details
'vm13':{'vn':['vn1'], 'vmi':['vmi13'],'node': compute_nodes[0]}, # VM Details
}
# Create Virtual Networks (VNs)
vn_fixtures = self.setup_vns(vn=vn)
vn1_fix = vn_fixtures['vn1']
#import pdb; pdb.set_trace()
# Create VMIs
vmi_fixtures = self.setup_vmis(vn_fixtures, vmi=vmi)
vmi11_fix = vmi_fixtures['vmi11']
vmi12_fix = vmi_fixtures['vmi12']
vmi13_fix = vmi_fixtures['vmi13']
#import pdb; pdb.set_trace()
# Setting MAX Flows only on VMI 11
vn1_max_flows = 400
vmi11_max_flows = 1000
vmi13_max_flows = 2000
vn1_fix.set_max_flows(max_flows=vn1_max_flows)
vmi11_fix.set_max_flows(max_flows=vmi11_max_flows)
vmi13_fix.set_max_flows(max_flows=vmi13_max_flows)
# Create VMs
vm_fixtures = self.setup_vms(vn_fixtures, vmi_fixtures, vm=vm, image_name='ubuntu-traffic')
vm11_fix = vm_fixtures['vm11']
vm12_fix = vm_fixtures['vm12']
vm13_fix = vm_fixtures['vm13']
# Creating ComputeNode/Vrouter Fixtures
vm11_vrouter_fixture = ComputeNodeFixture(self.connections, vm11_fix.vm_node_ip)
vm12_vrouter_fixture = ComputeNodeFixture(self.connections, vm12_fix.vm_node_ip)
vm11_vrouter_fixture.set_per_vm_flow_limit(0.02)
#import pdb; pdb.set_trace()
self.logger.info("Sleeping for 360 secs...")
time.sleep(360)
self.logger.info("Sleeping for 360 secs...Completed")
self.addCleanup(self.cleanup_test_max_vm_flows_vrouter_config, [vm11_vrouter_fixture])
self.logger.info("Verify Traffic within VMs in VN-1")
send_vm_fixture = vm11_fix
recv_vm_fixture = vm13_fix
traffic_result = self.verify_traffic(sender_vm=send_vm_fixture, receiver_vm=recv_vm_fixture,
proto='udp', sport=1500, dport=10001, count=100)
self.logger.info("Traffic Tx-Pkts: %d Rx-Pkts: %d" % (traffic_result[0],traffic_result[1]))
assert traffic_result[0] == traffic_result[1], "Basic Traffic Validation Failed between VMs ( VN-1)"
self.logger.info("Sleeping for dns/meta flows timeout seconds...")
self.waiting_for_flow_timeout()
# Verify Max_flows functionality on VMI level
#import pdb; pdb.set_trace()
send_flow_count = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm11_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %d" % (vmi11_max_flows))
#import pdb; pdb.set_trace()
if total_flow_count == vmi11_max_flows:
self.logger.info("VMI level Max Flows Provisioning is working fine")
assert total_flow_count == vmi11_max_flows, "VMI level Max Flows Provisioning is not working"
send_flow_count = self.send_traffic(
src=str(vm12_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm12_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm12_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm12_fix.get_local_ip()),
vrouter_fixture=vm12_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %s" % ('Not VMI level Configured - Should use VN level value'))
self.logger.info("Total Expected Flow Count: %d" % (vn1_max_flows))
# import pdb; pdb.set_trace()
if total_flow_count == vn1_max_flows:
self.logger.info("No VMI level configuration - VN level max flows is used correctly")
assert total_flow_count == vn1_max_flows, "Other VMI ( VMI 12) are not taking VN level value"
self.waiting_for_flow_timeout()
# Reset the VN level Max flows to default value (0)
vmi11_fix.set_max_flows(max_flows=0)
send_flow_count = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm11_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %s" % ('VMI level Deleted - Should use VN level'))
self.logger.info("Total Expected Flow Count: %d" % (vn1_max_flows))
if total_flow_count == vn1_max_flows:
self.logger.info("VMI level deleted properly and it uses VN level Value")
assert total_flow_count == vn1_max_flows, "VMI level Max Flows Provisioning is not deleted properly"
vn1_fix.set_max_flows(max_flows=0)
self.waiting_for_flow_timeout()
send_flow_count = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm11_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %s" % ('VMI level Deleted - Should use VN level'))
self.logger.info("Total Expected Flow Count: ~120 flows(0.02*512K)")
if total_flow_count < 130 and total_flow_count > 100:
self.logger.info("VN level is deleted properly and it uses max_vm_flows Value")
else:
assert False, "VN level Max Flows Provisioning is not deleted properly, it should use max_vm_flows level value"
@preposttest_wrapper
def test_max_flows_vn_level_already_has_some_vmis(self):
'''
Description:
Verify max_flows functionality at VN which already has some VMIs
Test steps:
1.Create a virtaul network (vn1)
2.Launch vm11, vm12 vn1 network
3.configure max_flows as 1000 @ vn1
4.send traffic between vm11 and vm12 and it should allow only 1000 flows
4.Launch 1 more VM(vm13) on vn1 network
5.Verify traffic between the vm13 and vm12, it shouls allow only 1000 flows
Pass criteria:
Number of flows should be created as per max_flows configured at VN level.
Maintainer : mmohan@juniper.net
'''
vn = {'count':1, # VN coun
'vn1':{'subnet':'21.0.0.0/24'},
}
vmi = {'count':2, # VMI Count
'vmi11':{'vn': 'vn1'}, # VMI details
'vmi12':{'vn': 'vn1'}, # VMI details
}
vmi_3 = {'count':1, # VMI Count
'vmi13':{'vn': 'vn1'}, # VMI details
}
# Input Variables
compute_nodes = self.orch.get_hosts()
assert len(compute_nodes) >= 2 , "Required Minimum 2 Compute Nodes"
vm = {'count':2,
'launch_mode':'distribute',
'vm11':{'vn':['vn1'], 'vmi':['vmi11'],'node': compute_nodes[0]}, # VM Details
'vm12':{'vn':['vn1'], 'vmi':['vmi12'],'node': compute_nodes[1]}, # VM Details
}
vm_3 = {'count':1,
'launch_mode':'distribute',
'vm13':{'vn':['vn1'], 'vmi':['vmi13'],'node': compute_nodes[0]}, # VM Details
}
# Create Virtual Networks (VNs)
vn_fixtures = self.setup_vns(vn=vn)
vn1_fix = vn_fixtures['vn1']
# Create VMIs
vmi_fixtures = self.setup_vmis(vn_fixtures, vmi=vmi)
# Create VMs
vm_fixtures = self.setup_vms(vn_fixtures, vmi_fixtures, vm=vm, image_name='ubuntu-traffic')
vm11_fix = vm_fixtures['vm11']
vm12_fix = vm_fixtures['vm12']
# Creating ComputeNode/Vrouter Fixtures
vm11_vrouter_fixture = ComputeNodeFixture(self.connections, vm11_fix.vm_node_ip)
vm12_vrouter_fixture = ComputeNodeFixture(self.connections, vm12_fix.vm_node_ip)
self.logger.info("Verify Traffic within VMs in VN-1")
send_vm_fixture = vm11_fix
recv_vm_fixture = vm12_fix
traffic_result = self.verify_traffic(sender_vm=send_vm_fixture, receiver_vm=recv_vm_fixture,
proto='udp', sport=1500, dport=10001, count=100)
self.logger.info("Traffic Tx-Pkts: %d Rx-Pkts: %d" % (traffic_result[0],traffic_result[1]))
assert traffic_result[0] == traffic_result[1], "Traffic between VMs Failed"
# Setting MAX Flows only on VN-1
vn1_max_flows = 1000
vn1_fix.set_max_flows(max_flows=vn1_max_flows)
self.logger.info("Sleeping for dns/metadata flows to timeout 180 seconds...")
self.waiting_for_flow_timeout()
# Verify Max_flows functionality on VN level
#import pdb; pdb.set_trace()
send_flow_count = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm12_fix.vm_ip),
max_flows=vn1_max_flows,
vm_fix=vm11_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm12_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %d" % (vn1_max_flows))
#import pdb; pdb.set_trace()
if total_flow_count == vn1_max_flows:
self.logger.info("VN level Max Flows Provisioning is working fine")
assert total_flow_count == vn1_max_flows, "VN level Max Flows Provisioning is not working"
self.waiting_for_flow_timeout()
self.logger.info("Creating 1 more VM on the same VN....")
# Create VMIs
vmi_fixtures_3 = self.setup_vmis(vn_fixtures, vmi=vmi_3)
# Create VMs
vm_fixtures_3 = self.setup_vms(vn_fixtures, vmi_fixtures_3, vm=vm_3, image_name='ubuntu-traffic')
vm13_fix = vm_fixtures_3['vm13']
vm13_vrouter_fixture = ComputeNodeFixture(self.connections, vm13_fix.vm_node_ip)
self.logger.info("Sleeping for dns/metadata flows to timeout 180 seconds...")
self.waiting_for_flow_timeout()
# Verify Max_flows functionality on VN level
#import pdb; pdb.set_trace()
send_flow_count = self.send_traffic(
src=str(vm13_fix.vm_ip),
dst=str(vm12_fix.vm_ip),
max_flows=vn1_max_flows,
vm_fix=vm13_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm13_fix.vm_ip),
dest_ip=str(vm12_fix.vm_ip),
vrf_id=vm13_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm13_fix.get_local_ip()),
vrouter_fixture=vm13_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %d" % (vn1_max_flows))
#import pdb; pdb.set_trace()
if total_flow_count == vn1_max_flows:
self.logger.info("VN level Max Flows Provisioning is working fine")
assert total_flow_count == vn1_max_flows, "VN level Max Flows Provisioning is not working"
@preposttest_wrapper
def test_drop_new_flows_flag(self):
'''
Description:
Verify Drop new flows flag is set once flow count value exceeds max_flows
and flag is set as false after flow count value reduces below 90% max_flows
Test steps:
1.Create a virtaul network (vn1)
2.Launch VMs( vm11, vm12) on vn1 network
3.configure max_flows as 1000 @ VMI (vmi11)
4.send traffic (2000 flows) between vm11 and vm12
5.it should allow only 1000 flows and drop_new_flows should be set (true)
6.Keep sending 90% of flows @ every 60 seconds
7.wait for flow timeout to happens
8.drop_new_flows flag should be removed (False),onces flows reduces below <90%
Pass criteria:
drop_new_flows flag should be set when flow rate exceeds max_flows
flag should be removed once number of flows reduces to <=90% of max_flows
Maintainer : mmohan@juniper.net
'''
vn = {'count':1, # VN coun
'vn1':{'subnet':'21.0.0.0/24'},
}
vmi = {'count':2, # VMI Count
'vmi11':{'vn': 'vn1'}, # VMI details
'vmi12':{'vn': 'vn1'}, # VMI details
}
# Input Variables
compute_nodes = self.orch.get_hosts()
assert len(compute_nodes) >= 2 , "Required Minimum 2 Compute Nodes"
vm = {'count':2,
'launch_mode':'distribute',
'vm11':{'vn':['vn1'], 'vmi':['vmi11'],'node': compute_nodes[0]}, # VM Details
'vm12':{'vn':['vn1'], 'vmi':['vmi12'],'node': compute_nodes[1]}, # VM Details
}
# Create Virtual Networks (VNs)
vn_fixtures = self.setup_vns(vn=vn)
vn1_fix = vn_fixtures['vn1']
#import pdb; pdb.set_trace()
# Create VMIs
vmi_fixtures = self.setup_vmis(vn_fixtures, vmi=vmi)
vmi11_fix = vmi_fixtures['vmi11']
vmi12_fix = vmi_fixtures['vmi12']
#import pdb; pdb.set_trace()
# Setting MAX Flows only on VMI 11
vmi11_max_flows = 1000
vmi11_fix.set_max_flows(max_flows=vmi11_max_flows)
# Create VMs
vm_fixtures = self.setup_vms(vn_fixtures, vmi_fixtures, vm=vm, image_name='ubuntu-traffic')
vm11_fix = vm_fixtures['vm11']
vm12_fix = vm_fixtures['vm12']
# Creating ComputeNode/Vrouter Fixtures
vm11_vrouter_fixture = ComputeNodeFixture(self.connections, vm11_fix.vm_node_ip)
vm12_vrouter_fixture = ComputeNodeFixture(self.connections, vm12_fix.vm_node_ip)
self.logger.info("Verify Traffic within VMs in VN-1")
send_vm_fixture = vm11_fix
recv_vm_fixture = vm12_fix
traffic_result = self.verify_traffic(sender_vm=send_vm_fixture, receiver_vm=recv_vm_fixture,
proto='udp', sport=1500, dport=10001, count=100)
self.logger.info("Traffic Tx-Pkts: %d Rx-Pkts: %d" % (traffic_result[0],traffic_result[1]))
assert traffic_result[0] == traffic_result[1], "Basic Traffic Validation Failed between VMs ( VN-1)"
#import pdb; pdb.set_trace()
vm11_inspect = self.agent_inspect[vm11_fix.vm_node_ip]
vm11_tap_intf = vm11_inspect.get_vna_tap_interface_by_ip(vm11_fix.vm_ip)
self.logger.info("drop_new_flows flag values @ vifs before sending traffics...")
vm11_drop_new_flows = vm11_inspect.get_vna_tap_interface_by_ip(vm11_fix.vm_ip)[0]['drop_new_flows']
self.logger.info("drop_new_flows flag value @ vm11: %s " % (vm11_drop_new_flows))
if vm11_drop_new_flows != 'false':
assert False, "drop_new_flows flag is set even before sending traffics..."
self.logger.info("Sleeping for dns/meta flows timeout seconds...")
self.waiting_for_flow_timeout()
# Verify Max_flows functionality on VMI level
#import pdb; pdb.set_trace()
send_flow_count_vm11 = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm12_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm11_fix
)
total_flow_count_vm11 = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm12_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("drop_new_flows flag values @ vifs after max_flows exceeds..")
vm11_drop_new_flows = vm11_inspect.get_vna_tap_interface_by_ip(vm11_fix.vm_ip)[0]['drop_new_flows']
self.logger.info("drop_new_flows flag value @ vm11: %s " % (vm11_drop_new_flows))
if vm11_drop_new_flows != 'true':
assert False, "drop_new_flows flag is NOT set even after max_flows execeeded.."
self.logger.info("Total Obtained Flow Count @ vm11: %d"% (total_flow_count_vm11))
self.logger.info("Total Expected Flow Count @ vm11: %d" % (vmi11_max_flows))
#import pdb; pdb.set_trace()
if total_flow_count_vm11 == vmi11_max_flows:
self.logger.info("VMI level (vm11) Max Flows Provisioning is working fine")
assert total_flow_count_vm11 == vmi11_max_flows, "VMI (vm11) level Provisioning is not working"
vmi11_max_flows_90_percentage = int(vmi11_max_flows*0.9)-20
flow_timeout = self.DEFAULT_FLOW_TIMEOUT
while (flow_timeout > 0):
if flow_timeout < 60:
time.sleep(flow_timeout)
flow_timeout = 0
else:
time.sleep(60)
flow_timeout = flow_timeout - 60
send_flow_count_vm11 = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm12_fix.vm_ip),
max_flows=old_div(vmi11_max_flows_90_percentage,2),
vm_fix=vm11_fix
)
total_flow_count_vm11 = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm12_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count @ vm11: %d"% (total_flow_count_vm11))
self.logger.info("Total Expected Flow Count @ vm11: %d" % (vmi11_max_flows_90_percentage))
#import pdb; pdb.set_trace()
vmi11_max_flows_90_percentage_high = vmi11_max_flows_90_percentage + 20
vmi11_max_flows_90_percentage_low = vmi11_max_flows_90_percentage - 20
if total_flow_count_vm11 <= vmi11_max_flows_90_percentage_high and total_flow_count_vm11 >= vmi11_max_flows_90_percentage_low:
self.logger.info("VMI level (vm11) Max Flows Provisioning is working fine")
else:
assert False, "VMI (vm11) level Provisioning is not working"
self.logger.info("drop_new_flows flag values @ vifs after flowcount to reduces < 90% of max_flows")
vm11_drop_new_flows = vm11_inspect.get_vna_tap_interface_by_ip(vm11_fix.vm_ip)[0]['drop_new_flows']
self.logger.info("drop_new_flows flag value @ vm11: %s " % (vm11_drop_new_flows))
if vm11_drop_new_flows != 'false':
assert False, "drop_new_flows flag is set even after flowcount to reduces < 90% of max_flows"
self.waiting_for_flow_timeout()
self.logger.info("drop_new_flows flag values @ vifs after all flows timed out...")
vm11_drop_new_flows = vm11_inspect.get_vna_tap_interface_by_ip(vm11_fix.vm_ip)[0]['drop_new_flows']
self.logger.info("drop_new_flows flag value @ vm11: %s " % (vm11_drop_new_flows))
if vm11_drop_new_flows != 'false':
assert False, "drop_new_flows flag is set even after all flows timedout..."
@preposttest_wrapper
def test_dropstats(self):
'''
Description:
Verify VN max_flows with values as -1
Verify VMI max_flows with values as -1
Verify vrouter drop_stats are incremented properly after exceeding max_flows limit
Verify clearing dropstats counter
Test steps:
1.Create a virtaul network (vn1)
2.Launch VMs( vm11, vm12) on vn1 network
3.Try configuring max_flows as -1 @ VN level
4.Try configuring max_flows as -1 @ VMI level
5.Configure VMI level max_flows as 1000 @ vmi11
4.send traffic (1000 flows) between vm11 and vm12
5.verify dropstats (New Flow Drops), it should not be increated
6.send traffic (2000 flows) between vm11 and vm12
7.verify dropstats (New Flow Drop), it should be increated
8.clear dropstats values using dropstats --clear
9.verify counter (New Flow Drop) value
Pass criteria:
It should NOT allow configuration of negative values (-1)
dropstats(New Flow Drop) should not be incremented when flows are <= max_flows
dropstats(New Flow Drop) should be incremented when flows are > max_flows
Counter value should become 0 after doing clear dropstats
Maintainer : mmohan@juniper.net
'''
vn = {'count':1, # VN coun
'vn1':{'subnet':'21.0.0.0/24'},
}
vmi = {'count':2, # VMI Count
'vmi11':{'vn': 'vn1'}, # VMI details
'vmi12':{'vn': 'vn1'}, # VMI details
}
# Input Variables
compute_nodes = self.orch.get_hosts()
assert len(compute_nodes) >= 2 , "Required Minimum 2 Compute Nodes"
vm = {'count':2,
'launch_mode':'distribute',
'vm11':{'vn':['vn1'], 'vmi':['vmi11'],'node': compute_nodes[0]}, # VM Details
'vm12':{'vn':['vn1'], 'vmi':['vmi12'],'node': compute_nodes[1]}, # VM Details
}
# Create Virtual Networks (VNs)
vn_fixtures = self.setup_vns(vn=vn)
vn1_fix = vn_fixtures['vn1']
#import pdb; pdb.set_trace()
# Create VMIs
vmi_fixtures = self.setup_vmis(vn_fixtures, vmi=vmi)
vmi11_fix = vmi_fixtures['vmi11']
vmi12_fix = vmi_fixtures['vmi12']
#import pdb; pdb.set_trace()
# Setting MAX Flows only on VMI 11
# Try configuring negative value @ VN level ...
self.logger.info("Try Configuring negative value @ VN level ...")
try:
vn1_fix.set_max_flows(max_flows=-1)
except Exception as exp :
self.logger.info(str(exp))
self.logger.info("Not able to configure negative value (-1) @ VN level max_flows")
else:
assert False, self.logger.info("Able to configure negative value (-1) @ VN level max_flows")
self.logger.info("Try Configuring negative value @ VMI level ...")
try:
vmi11_fix.set_max_flows(max_flows=-1)
except Exception as exp :
self.logger.info(str(exp))
self.logger.info("Not able to configure negative value (-1) @ VMI level max_flows")
else:
assert False, self.logger.info("Able to configure negative value (-1) @ VMI level max_flows")
vmi11_max_flows = 1000
vmi11_fix.set_max_flows(max_flows=vmi11_max_flows)
# Create VMs
vm_fixtures = self.setup_vms(vn_fixtures, vmi_fixtures, vm=vm, image_name='ubuntu-traffic')
vm11_fix = vm_fixtures['vm11']
vm12_fix = vm_fixtures['vm12']
# Creating ComputeNode/Vrouter Fixtures
vm11_vrouter_fixture = ComputeNodeFixture(self.connections, vm11_fix.vm_node_ip)
vm12_vrouter_fixture = ComputeNodeFixture(self.connections, vm12_fix.vm_node_ip)
self.logger.info("Verify Traffic within VMs in VN-1")
send_vm_fixture = vm11_fix
recv_vm_fixture = vm12_fix
traffic_result = self.verify_traffic(sender_vm=send_vm_fixture, receiver_vm=recv_vm_fixture,
proto='udp', sport=1500, dport=10001, count=100)
self.logger.info("Traffic Tx-Pkts: %d Rx-Pkts: %d" % (traffic_result[0],traffic_result[1]))
assert traffic_result[0] == traffic_result[1], "Basic Traffic Validation Failed between VMs ( VN-1)"
vm11_inspect = self.agent_inspect[vm11_fix.vm_node_ip]
vm11_tap_intf = vm11_inspect.get_vna_tap_interface_by_ip(vm11_fix.vm_ip)
#import pdb; pdb.set_trace()
vm11_dropstats = vm11_inspect.get_agent_vrouter_drop_stats()
vm11_drop_new_flow_before = int(vm11_dropstats['ds_drop_new_flow'])
self.logger.info("New Flow Drops stats value before traffic @ %s : %d" % (vm11_fix.vm_node_ip,vm11_drop_new_flow_before))
self.logger.info("drop_new_flows flag values @ vifs before sending traffics...")
vm11_drop_new_flows = vm11_inspect.get_vna_tap_interface_by_ip(vm11_fix.vm_ip)[0]['drop_new_flows']
self.logger.info("drop_new_flows flag value @ vm11: %s " % (vm11_drop_new_flows))
if vm11_drop_new_flows != 'false':
assert False, "drop_new_flows flag is set even before sending traffics..."
self.logger.info("Sleeping for dns/meta flows timeout seconds...")
self.waiting_for_flow_timeout()
vmi11_max_flows_exact = (old_div(vmi11_max_flows,2))-10
# Verify Max_flows functionality on VMI level
#import pdb; pdb.set_trace()
send_flow_count_vm11 = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm12_fix.vm_ip),
max_flows=vmi11_max_flows_exact,
vm_fix=vm11_fix
)
total_flow_count_vm11 = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm12_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("drop_new_flows flag values @ vifs after max_flows exceeds..")
vm11_drop_new_flows = vm11_inspect.get_vna_tap_interface_by_ip(vm11_fix.vm_ip)[0]['drop_new_flows']
self.logger.info("drop_new_flows flag value @ vm11: %s " % (vm11_drop_new_flows))
if vm11_drop_new_flows != 'false':
assert False, "drop_new_flows flag is set even before max_flows execeeded.."
self.logger.info("Total Obtained Flow Count @ vm11: %d"% (total_flow_count_vm11))
self.logger.info("Total Expected Flow Count +/- 10 @ vm11: %d" % (vmi11_max_flows_exact*2))
#import pdb; pdb.set_trace()
vmi11_max_flows_high = (vmi11_max_flows_exact*2) + 20
vmi11_max_flows_low = (vmi11_max_flows_exact*2) - 20
if total_flow_count_vm11 <= vmi11_max_flows_high and total_flow_count_vm11 >= vmi11_max_flows_low:
self.logger.info("VMI level (vm11) Max Flows Provisioning is working fine")
else:
assert False, "VMI (vm11) level Provisioning is not working"
vm11_dropstats = vm11_inspect.get_agent_vrouter_drop_stats()
vm11_drop_new_flow_after = int(vm11_dropstats['ds_drop_new_flow'])
self.logger.info("New Flow Drops stats value after traffic @ %s : %d" % (vm11_fix.vm_node_ip,vm11_drop_new_flow_after))
if vm11_drop_new_flow_before == vm11_drop_new_flow_after:
self.logger.info("Dropstats for 'New Flow Drops' is not incremented when num of flows <= max_flows")
else:
assert False, "Dropstats for 'New Flow Drops' is incremented when num of flows <= max_flows"
vm11_dropstats = vm11_inspect.get_agent_vrouter_drop_stats()
vm11_drop_new_flow_before = int(vm11_dropstats['ds_drop_new_flow'])
self.logger.info("New Flow Drops stats value before traffic @ %s : %d" % (vm11_fix.vm_node_ip,vm11_drop_new_flow_before))
send_flow_count_vm11 = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm12_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm11_fix
)
total_flow_count_vm11 = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm12_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count @ vm11: %d"% (total_flow_count_vm11))
self.logger.info("Total Expected Flow Count @ vm11: %d" % (vmi11_max_flows))
#import pdb; pdb.set_trace()
if total_flow_count_vm11 == vmi11_max_flows:
self.logger.info("VMI level (vm11) Max Flows Provisioning is working fine")
else:
assert False, "VMI (vm11) level Provisioning is not working"
self.logger.info("drop_new_flows flag values @ vifs after max_flows exceeds..")
vm11_drop_new_flows = vm11_inspect.get_vna_tap_interface_by_ip(vm11_fix.vm_ip)[0]['drop_new_flows']
self.logger.info("drop_new_flows flag value @ vm11: %s " % (vm11_drop_new_flows))
if vm11_drop_new_flows != 'true':
assert False, "drop_new_flows flag is NOT set even after max_flows execeeded.."
expected_drop_high = (old_div(vmi11_max_flows,2)) + 10
expected_drop_low = (old_div(vmi11_max_flows,2)) - 10
for i in range(1,8):
time.sleep(5)
vm11_dropstats = vm11_inspect.get_agent_vrouter_drop_stats()
vm11_drop_new_flow_after = int(vm11_dropstats['ds_drop_new_flow'])
vm11_drop_new_flow_diff = vm11_drop_new_flow_after - vm11_drop_new_flow_before
if vm11_drop_new_flow_diff >= expected_drop_low and vm11_drop_new_flow_diff <= expected_drop_high:
self.logger.info("New Flow Drops stats value detected after seconds %d" % (5*i))
break
self.logger.info("New Flow Drops stats value after traffic @ %s : %d" % (vm11_fix.vm_node_ip,vm11_drop_new_flow_after))
self.logger.info("New Flow Drops stats value difference (dropped pkts) @ %s : %d" % (vm11_fix.vm_node_ip,vm11_drop_new_flow_diff))
if vm11_drop_new_flow_diff >= expected_drop_low and vm11_drop_new_flow_diff <= expected_drop_high:
self.logger.info("Dropstats for 'New Flow Drops' is incremented when num of flows execeeds max_flows")
else:
assert False, "Dropstats for 'New Flow Drops' is NOT incremented when num of flows exceeds max_flows"
# Verify clearing drop stats
vm11_dropstats = vm11_inspect.get_agent_vrouter_drop_stats()
vm11_drop_new_flow_before = int(vm11_dropstats['ds_drop_new_flow'])
self.logger.info("Dropstats value before executing clear command: %d " % (vm11_drop_new_flow_before))
self.inputs.run_cmd_on_server(vm11_fix.vm_node_ip, "contrail-tools dropstats --clear")
for i in range(1,12):
time.sleep(5)
vm11_dropstats = vm11_inspect.get_agent_vrouter_drop_stats()
vm11_drop_new_flow_after = int(vm11_dropstats['ds_drop_new_flow'])
if vm11_drop_new_flow_after <= 10:
self.logger.info("New Flow Drops stats value detected after seconds %d" % (5*i))
break
self.logger.info("Dropstats value before doing clear: %d " % (vm11_drop_new_flow_before))
self.logger.info("Dropstats value after doing clear: %d " % (vm11_drop_new_flow_after))
if vm11_drop_new_flow_after <= 10:
self.logger.info("Dropstats(Drop New Flows) is cleared properly, becomes zero(0) after execting 'dropstats --clear'")
else:
assert False, "Dropstats(Drop New Flows) is NOT cleared properly, even after executing 'dropstats --clear'"
@preposttest_wrapper
def test_restart_vrouter_agent(self):
'''
Description:
Verify VMI level functionality after restart of Vrouter Agent
Test steps:
1.Create a virtual network (vn1)
2.Launch vm11, vm12 and vm13 on vn1 network
3.Configure vmi level max_flows as 1000, 2000 and 3000 on vmi11, vmi12 and vmi13 respectively
5.Verify traffic between the VMs
6.send 2000 flows traffic from vm11 to vm13 , it should allow only 1000 flows @vmi11
7.send 4000 flows traffic from vm12 to vm13, it should all only 2000 flows @vmi12
8.Modify the max_flows value as 1500 @ vmi11
9.verify sending traffic between vm11 and vm13, now it should all 1500 flows
10.Delete the max_flows @ all VMIs ( by setting the value as 0 )
11.send traffics across vm11, vm12 and vm13 , it should allow all the traffic
Pass criteria:
Number of flows should be created as per max_flows configured at VMI level.
After modification, it should work as per modified value
After deleting max_flows configuration, it should allow all the flows.
Maintainer : mmohan@juniper.net
'''
vn = {'count':1, # VN coun
'vn1':{'subnet':'21.0.0.0/24'},
}
vmi = {'count':3, # VMI Count
'vmi11':{'vn': 'vn1'}, # VMI details
'vmi12':{'vn': 'vn1'}, # VMI details
'vmi13':{'vn': 'vn1'}, # VMI details
}
# Input Variables
compute_nodes = self.orch.get_hosts()
assert len(compute_nodes) >= 2 , "Required Minimum 2 Compute Nodes"
vm = {'count':3,
'launch_mode':'distribute',
'vm11':{'vn':['vn1'], 'vmi':['vmi11'],'node': compute_nodes[0]}, # VM Details
'vm12':{'vn':['vn1'], 'vmi':['vmi12'],'node': compute_nodes[1]}, # VM Details
'vm13':{'vn':['vn1'], 'vmi':['vmi13'],'node': compute_nodes[0]}, # VM Details
}
# Create Virtual Networks (VNs)
vn_fixtures = self.setup_vns(vn=vn)
vn1_fix = vn_fixtures['vn1']
#import pdb; pdb.set_trace()
# Create VMIs
vmi_fixtures = self.setup_vmis(vn_fixtures, vmi=vmi)
vmi11_fix = vmi_fixtures['vmi11']
vmi12_fix = vmi_fixtures['vmi12']
vmi13_fix = vmi_fixtures['vmi13']
#import pdb; pdb.set_trace()
# Setting MAX Flows only on VMI 11
vn1_max_flows = 400
vmi11_max_flows = 1000
vmi13_max_flows = 2000
vn1_fix.set_max_flows(max_flows=vn1_max_flows)
vmi11_fix.set_max_flows(max_flows=vmi11_max_flows)
vmi13_fix.set_max_flows(max_flows=vmi13_max_flows)
# Create VMs
vm_fixtures = self.setup_vms(vn_fixtures, vmi_fixtures, vm=vm, image_name='ubuntu-traffic')
vm11_fix = vm_fixtures['vm11']
vm12_fix = vm_fixtures['vm12']
vm13_fix = vm_fixtures['vm13']
# Creating ComputeNode/Vrouter Fixtures
vm11_vrouter_fixture = ComputeNodeFixture(self.connections, vm11_fix.vm_node_ip)
vm12_vrouter_fixture = ComputeNodeFixture(self.connections, vm12_fix.vm_node_ip)
self.logger.info("Verify Traffic within VMs in VN-1")
send_vm_fixture = vm11_fix
recv_vm_fixture = vm12_fix
traffic_result = self.verify_traffic(sender_vm=send_vm_fixture, receiver_vm=recv_vm_fixture,
proto='udp', sport=1500, dport=10001, count=100)
self.logger.info("Traffic Tx-Pkts: %d Rx-Pkts: %d" % (traffic_result[0],traffic_result[1]))
assert traffic_result[0] == traffic_result[1], "Basic Traffic Validation Failed between VMs ( VN-1)"
self.logger.info("Sleeping for dns/meta flows timeout seconds...")
self.waiting_for_flow_timeout()
# Verify Max_flows functionality on VMI level
#import pdb; pdb.set_trace()
send_flow_count = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm11_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %d" % (vmi11_max_flows))
#import pdb; pdb.set_trace()
if total_flow_count == vmi11_max_flows:
self.logger.info("VMI level Max Flows Provisioning is working fine")
assert total_flow_count == vmi11_max_flows, "VMI level Max Flows Provisioning is not working"
# check other VMIs part of that same VN should allow all the flows
send_flow_count = self.send_traffic(
src=str(vm12_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm12_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm12_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm12_fix.get_local_ip()),
vrouter_fixture=vm12_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %s" % ('Not VMI level Configured - Should use VN level value'))
self.logger.info("Total Expected Flow Count: %d" % (vn1_max_flows))
# import pdb; pdb.set_trace()
if total_flow_count == vn1_max_flows:
self.logger.info("No VMI level configuration - VN level max flows is used correctly")
assert total_flow_count == vn1_max_flows, "Other VMI ( VMI 12) are not taking VN level value"
self.logger.info("Restarting Vrouter Agent...")
self.inputs.restart_service('contrail-vrouter-agent', [vm11_fix.vm_node_ip], container='agent', verify_service=True)
self.inputs.restart_service('contrail-vrouter-agent', [vm12_fix.vm_node_ip], container='agent', verify_service=True)
self.logger.info("After Agent restart, Sleeping for 180 secs...")
time.sleep(180)
# Verify Max_flows functionality on VMI level
#import pdb; pdb.set_trace()
send_flow_count = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm11_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %d" % (vmi11_max_flows))
#import pdb; pdb.set_trace()
if total_flow_count == vmi11_max_flows:
self.logger.info("After Agrent restarting, VMI level Max Flows Provisioning is working fine")
assert total_flow_count == vmi11_max_flows, "After Agenr restart, VMI level Max Flows Provisioning is not working"
# check other VMIs part of that same VN should allow all the flows
send_flow_count = self.send_traffic(
src=str(vm12_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm12_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm12_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm12_fix.get_local_ip()),
vrouter_fixture=vm12_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %s" % ('Not VMI level Configured - Should use VN level value'))
self.logger.info("Total Expected Flow Count: %d" % (vn1_max_flows))
# import pdb; pdb.set_trace()
if total_flow_count == vn1_max_flows:
self.logger.info("After Agenr restart, No VMI level configuration - VN level max flows is used correctly")
assert total_flow_count == vn1_max_flows, "After Agrent Restart, Other VMI ( VMI 12) are not taking VN level value"
self.logger.info("Sleeping for dns/meta flows timeout seconds...")
self.waiting_for_flow_timeout()
# Reset the VN level Max flows to default value (0)
vmi11_fix.set_max_flows(max_flows=0)
send_flow_count = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm11_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %s" % ('VMI level Deleted - Should use VN level'))
self.logger.info("Total Expected Flow Count: %d" % (vn1_max_flows))
if total_flow_count == vn1_max_flows:
self.logger.info("VMI level deleted properly and it uses VN level Value")
assert total_flow_count == vn1_max_flows, "VMI level deleted properly and it uses VN level Value"
self.logger.info("Restarting Vrouter Agent...")
self.inputs.restart_service('contrail-vrouter-agent', [vm11_fix.vm_node_ip], container='agent', verify_service=True)
self.inputs.restart_service('contrail-vrouter-agent', [vm12_fix.vm_node_ip], container='agent', verify_service=True)
self.logger.info("After Agent restart, Sleeping for 180 secs...")
time.sleep(180)
send_flow_count = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm11_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %s" % ('VMI level set as 0 - Should use VN level'))
self.logger.info("Total Expected Flow Count: %d" % (vn1_max_flows))
if total_flow_count == vn1_max_flows:
self.logger.info("After Vrouter Agent Restart : VMI level set as 0 and it uses VN level Value")
assert total_flow_count == vn1_max_flows, "After Vrouter Agent Restart : It should use VN level Value"
send_flow_count = self.send_traffic(
src=str(vm12_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm12_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm12_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm12_fix.get_local_ip()),
vrouter_fixture=vm12_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %s" % ('Not VMI level Configured - Should use VN level value'))
self.logger.info("Total Expected Flow Count: %d" % (vn1_max_flows))
# import pdb; pdb.set_trace()
if total_flow_count == vn1_max_flows:
self.logger.info("After Vrouter-Agent Restart: No VMI level configuration - VN level max flows is used correctly")
assert total_flow_count == vn1_max_flows, "After Agent restart, Other VMI ( VMI 12) are not taking VN level value"
@preposttest_wrapper
def test_restart_vm(self):
'''
Description:
Verify VMI level functionality after restart of VM
Test steps:
1.Create a virtaul network (vn1)
2.Launch VMs( vm11, vm12) on vn1 network
3.configure max_flows as 1000 @ VMI (vmi11)
4.send traffic (2000 flows) between vm11 and vm12
5.it should allow only 1000 flows and drop_new_flows should be set (true)
6.Keep sending 90% of flows @ every 60 seconds
7.wait for flow timeout to happens
8.drop_new_flows flag should be removed (False),onces flows reduces below <90%
Pass criteria:
drop_new_flows flag should be set when flow rate exceeds max_flows
flag should be removed once number of flows reduces to <=90% of max_flows
Maintainer : mmohan@juniper.net
'''
vn = {'count':1, # VN coun
'vn1':{'subnet':'21.0.0.0/24'},
}
vmi = {'count':3, # VMI Count
'vmi11':{'vn': 'vn1'}, # VMI details
'vmi12':{'vn': 'vn1'}, # VMI details
'vmi13':{'vn': 'vn1'}, # VMI details
}
# Input Variables
compute_nodes = self.orch.get_hosts()
assert len(compute_nodes) >= 2 , "Required Minimum 2 Compute Nodes"
vm = {'count':3,
'launch_mode':'distribute',
'vm11':{'vn':['vn1'], 'vmi':['vmi11'],'node': compute_nodes[0]}, # VM Details
'vm12':{'vn':['vn1'], 'vmi':['vmi12'],'node': compute_nodes[1]}, # VM Details
'vm13':{'vn':['vn1'], 'vmi':['vmi13'],'node': compute_nodes[0]}, # VM Details
}
# Create Virtual Networks (VNs)
vn_fixtures = self.setup_vns(vn=vn)
vn1_fix = vn_fixtures['vn1']
#import pdb; pdb.set_trace()
# Create VMIs
vmi_fixtures = self.setup_vmis(vn_fixtures, vmi=vmi)
vmi11_fix = vmi_fixtures['vmi11']
vmi12_fix = vmi_fixtures['vmi12']
vmi13_fix = vmi_fixtures['vmi13']
#import pdb; pdb.set_trace()
# Setting MAX Flows only on VMI 11
vn1_max_flows = 400
vmi11_max_flows = 1000
vmi13_max_flows = 2000
vn1_fix.set_max_flows(max_flows=vn1_max_flows)
vmi11_fix.set_max_flows(max_flows=vmi11_max_flows)
vmi13_fix.set_max_flows(max_flows=vmi13_max_flows)
# Create VMs
vm_fixtures = self.setup_vms(vn_fixtures, vmi_fixtures, vm=vm, image_name='ubuntu-traffic')
vm11_fix = vm_fixtures['vm11']
vm12_fix = vm_fixtures['vm12']
vm13_fix = vm_fixtures['vm13']
# Creating ComputeNode/Vrouter Fixtures
vm11_vrouter_fixture = ComputeNodeFixture(self.connections, vm11_fix.vm_node_ip)
vm12_vrouter_fixture = ComputeNodeFixture(self.connections, vm12_fix.vm_node_ip)
self.logger.info("Verify Traffic within VMs in VN-1")
send_vm_fixture = vm11_fix
recv_vm_fixture = vm12_fix
traffic_result = self.verify_traffic(sender_vm=send_vm_fixture, receiver_vm=recv_vm_fixture,
proto='udp', sport=1500, dport=10001, count=100)
self.logger.info("Traffic Tx-Pkts: %d Rx-Pkts: %d" % (traffic_result[0],traffic_result[1]))
assert traffic_result[0] == traffic_result[1], "Basic Traffic Validation Failed between VMs ( VN-1)"
self.logger.info("Sleeping for dns/meta flows timeout seconds...")
self.waiting_for_flow_timeout()
# Verify Max_flows functionality on VMI level
#import pdb; pdb.set_trace()
send_flow_count = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm11_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %d" % (vmi11_max_flows))
#import pdb; pdb.set_trace()
if total_flow_count == vmi11_max_flows:
self.logger.info("VMI level Max Flows Provisioning is working fine")
assert total_flow_count == vmi11_max_flows, "VMI level Max Flows Provisioning is not working"
# check other VMIs part of that same VN should allow all the flows
send_flow_count = self.send_traffic(
src=str(vm12_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm12_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm12_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm12_fix.get_local_ip()),
vrouter_fixture=vm12_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %s" % ('Not VMI level Configured - Should use VN level value'))
self.logger.info("Total Expected Flow Count: %d" % (vn1_max_flows))
# import pdb; pdb.set_trace()
if total_flow_count == vn1_max_flows:
self.logger.info("No VMI level configuration - VN level max flows is used correctly")
assert total_flow_count == vn1_max_flows, "Other VMI ( VMI 12) are not taking VN level value"
self.logger.info("Restarting Virtual Machines (vm11 and vm12)...")
# Restart the VM here
self.logger.info('Rebooting the VMs...')
cmd_to_reboot_vm = ['sudo reboot']
vm11_fix.run_cmd_on_vm(cmds=cmd_to_reboot_vm)
vm12_fix.run_cmd_on_vm(cmds=cmd_to_reboot_vm)
vm11_fix.wait_till_vm_boots()
vm12_fix.wait_till_vm_boots()
assert vm11_fix.verify_on_setup()
assert vm12_fix.verify_on_setup()
self.logger.info("After VM restart, Sleeping for 180 secs...")
#self.waiting_for_flow_timeout()
time.sleep(240)
# Verify Max_flows functionality on VMI level
#import pdb; pdb.set_trace()
send_flow_count = self.send_traffic(
src=str(vm11_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm11_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm11_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm11_fix.get_local_ip()),
vrouter_fixture=vm11_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %d" % (vmi11_max_flows))
#import pdb; pdb.set_trace()
if total_flow_count == vmi11_max_flows:
self.logger.info("After VM restarting, VMI level Max Flows Provisioning is working fine")
assert total_flow_count == vmi11_max_flows, "After VM restart, VMI level Max Flows Provisioning is not working"
# check other VMIs part of that same VN should allow all the flows
send_flow_count = self.send_traffic(
src=str(vm12_fix.vm_ip),
dst=str(vm13_fix.vm_ip),
max_flows=vmi11_max_flows,
vm_fix=vm12_fix
)
total_flow_count = self.get_total_flow_count(
source_ip=str(vm12_fix.vm_ip),
dest_ip=str(vm13_fix.vm_ip),
vrf_id=vm11_vrouter_fixture.get_vrf_id(vn1_fix.vn_fq_name),
metadata_ip=str(vm12_fix.get_local_ip()),
vrouter_fixture=vm12_vrouter_fixture
)
self.logger.info("Total Obtained Flow Count: %d"% (total_flow_count))
self.logger.info("Total Expected Flow Count: %s" % ('Not VMI level Configured - Should use VN level value'))
self.logger.info("Total Expected Flow Count: %d" % (vn1_max_flows))
# import pdb; pdb.set_trace()
if total_flow_count == vn1_max_flows:
self.logger.info("After VM restart, No VMI level configuration - VN level max flows is used correctly")
assert total_flow_count == vn1_max_flows, "After VM Restart, Other VMI ( VMI 12) are not taking VN level value"
| 45.624484 | 139 | 0.625283 | 13,603 | 99,507 | 4.26994 | 0.030508 | 0.064734 | 0.055678 | 0.030749 | 0.920598 | 0.905258 | 0.890676 | 0.875801 | 0.860771 | 0.847893 | 0 | 0.049247 | 0.280061 | 99,507 | 2,180 | 140 | 45.645413 | 0.761541 | 0.139749 | 0 | 0.758076 | 0 | 0.000687 | 0.196735 | 0.001048 | 0 | 0 | 0 | 0 | 0.053608 | 1 | 0.010309 | false | 0.000687 | 0.009622 | 0 | 0.023368 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6556eab5fdb78d3a340de1deb1adf3d5c39a8d29 | 20,302 | py | Python | Continual/datasets/task_sampler.py | huang50213/AIM-Fewshot-Continual | 89a7479b21d32b49e0789c6db922a37328ea95f5 | [
"MIT"
] | 17 | 2021-07-23T06:16:54.000Z | 2022-03-22T06:35:07.000Z | Continual/datasets/task_sampler.py | huang50213/AIM-Fewshot-Continual | 89a7479b21d32b49e0789c6db922a37328ea95f5 | [
"MIT"
] | 1 | 2022-03-12T16:26:27.000Z | 2022-03-12T16:26:27.000Z | Continual/datasets/task_sampler.py | huang50213/AIM-Fewshot-Continual | 89a7479b21d32b49e0789c6db922a37328ea95f5 | [
"MIT"
] | 7 | 2021-08-21T11:28:32.000Z | 2022-03-04T07:01:48.000Z | import copy
import logging
import numpy as np
import torch
logger = logging.getLogger("experiment")
class SamplerFactory:
def __init__(self):
pass
@staticmethod
def get_sampler(dataset, tasks, trainset, testset=None, capacity=None):
print(dataset)
if "omni" in dataset:
return OmniglotSampler(tasks, trainset, testset)
elif "Sin" == dataset:
return SineSampler(tasks, capacity=capacity)
elif "SinBaseline" in dataset:
# assert(False)
return SineBaselineSampler(tasks, capacity=capacity)
elif "cifar" in dataset:
return CIFAR100Sampler(tasks, trainset, testset)
elif "imagenet" in dataset:
return ImagenetSampler(tasks, trainset, testset)
class SineSampler:
def __init__(self, tasks, capacity):
self.capacity = capacity
self.tasks = tasks
self.task_sampler = SampleSine(capacity)
self.task_sampler.add_complete_iteraetor(self.tasks)
self.sample_batch = self.task_sampler.sample_batch
self.sample_trajectory = self.task_sampler.sample_trajectory
def get_complete_iterator(self):
return self.task_sampler.complete_iterator
def sample_random(self):
return self.task_sampler.get([np.random.choice(self.tasks)])
def filter_upto(self, task):
return self.task_sampler.filter_upto(task)
def sample_task(self, t):
return self.task_sampler.get(t)
def sample_tasks(self, t):
return self.task_sampler.get_task_trainset(t)
class SineBaselineSampler:
def __init__(self, tasks, capacity):
self.capacity = capacity
self.tasks = tasks
self.task_sampler = SampleSineBaseline(capacity)
self.task_sampler.add_complete_iteraetor(self.tasks)
self.sample_batch = self.task_sampler.sample_batch
self.sample_trajectory = self.task_sampler.sample_trajectory
def get_complete_iterator(self):
return self.task_sampler.complete_iterator
def sample_random(self):
return self.task_sampler.get([np.random.choice(self.tasks)])
def filter_upto(self, task):
return self.task_sampler.filter_upto(task)
def sample_task(self, t):
return self.task_sampler.get(t)
def sample_tasks(self, t):
return self.task_sampler.get_task_trainset(t)
class SampleSineBaseline:
def __init__(self, capacity):
self.task_iterators = []
self.iterators = {}
self.capacity = capacity
def add_complete_iteraetor(self, tasks):
pass
def add_task_iterator(self, task):
amplitude = (np.random.rand() + 0.02) * (5)
phase = np.random.rand() * np.pi
decay = np.random.rand() * 0.4
frequency = np.random.rand() * 2 + 1.0
self.iterators[task] = {'id': task, 'phase': phase, 'amplitude': amplitude, 'decay': decay,
'frequency': frequency}
logger.info("Task %d has been added to the list with phase %f and amp %f", task, phase, amplitude)
return self.iterators[task]
def get(self, tasks):
for task in tasks:
if task in self.iterators:
return self.iterators[task]
else:
return self.add_task_iterator(task)
def sample_batch(self, task, task_id, samples=10):
x_samples = np.random.rand(samples) * 10 - 5
x = np.zeros((samples, 11))
x[:, 10] = x_samples
x[:, task_id % 10] = 1
targets = np.zeros((len(x_samples), 2))
targets[:, 0] = task['amplitude'] * np.sin(x_samples + task['phase'])
targets[:, 1] = int(float(task_id) / 10)
return torch.tensor(x).float(), torch.tensor(targets).float()
def sample_trajectory(self, task, len, test=False):
xs = []
ys = []
for t in range(0, len):
x = float(t) / 20
y = task['amplitude'] * np.e ** (-x * task['decay']) * np.sin(
2 * np.pi * x / task['frequency'] + task['phase'])
xs.append(x)
ys.append(y)
return torch.tensor(xs).float(), torch.tensor(ys).float()
class SampleSine:
# Task sampler for the trainset (PyTorch really needs to fix the hard-coded "trainset" variable and change it with a dictionary that takes "train"/"test" as an argument
def __init__(self, capacity):
self.task_iterators = []
self.iterators = {}
self.capacity = capacity
def add_complete_iteraetor(self, tasks):
pass
def add_task_iterator(self, task):
amplitude = (np.random.rand() + 0.02) * (5)
phase = np.random.rand() * np.pi
decay = np.random.rand() * 0.4
frequency = np.random.rand() * 2 + 1.0
self.iterators[task] = {'id': task, 'phase': phase, 'amplitude': amplitude, 'decay': decay,
'frequency': frequency}
# logger.info("Task %d has been added to the list with phase %f and amp %f", task, phase, amplitude)
return self.iterators[task]
# def sample_batch(self, tasks):
def get(self, tasks):
for task in tasks:
if task in self.iterators:
return self.iterators[task]
else:
return self.add_task_iterator(task)
def sample_batch(self, task, task_id, samples=10):
x_samples = np.random.rand(samples) * 10 - 5
x = np.zeros((len(x_samples), 2))
x = np.zeros((samples, self.capacity))
x[:, self.capacity - 1] = x_samples
assert (task_id <= self.capacity - 1)
x[:, task_id] = 1
targets = np.zeros((len(x_samples), 2))
targets[:, 0] = task['amplitude'] * np.sin(x_samples + task['phase'])
targets[:, 1] = 0
return torch.tensor(x).float(), torch.tensor(targets).float()
def sample_trajectory(self, task, len, test=False):
xs = []
ys = []
for t in range(0, len):
x = float(t) / 20
y = task['amplitude'] * np.e ** (-x * task['decay']) * np.sin(
2 * np.pi * x / task['frequency'] + task['phase'])
xs.append(x)
ys.append(y)
return torch.tensor(xs).float(), torch.tensor(ys).float()
class OmniglotSampler:
# Class to sample tasks
def __init__(self, tasks, trainset, testset):
self.tasks = tasks
self.task_sampler = SampleOmni(trainset, testset)
self.task_sampler.add_complete_iteraetor(self.tasks)
def get_complete_iterator(self):
return self.task_sampler.complete_iterator
def get_another_complete_iterator(self):
return self.task_sampler.another_complete_iterator
def sample_random(self):
return self.task_sampler.get([np.random.choice(self.tasks)])
def filter_upto(self, task):
return self.task_sampler.filter_upto(task)
def sample_task(self, t, train=True):
return self.task_sampler.get(t, train)
def sample_tasks(self, t, train=False):
# assert(false)
dataset = self.task_sampler.get_task_trainset(t, train)
train_iterator = torch.utils.data.DataLoader(dataset,
batch_size=1,
shuffle=True, num_workers=1)
return train_iterator
class SampleOmni:
def __init__(self, trainset, testset):
self.task_iterators = []
self.trainset = trainset
self.testset = testset
self.iterators = {}
self.test_iterators = {}
def add_complete_iteraetor(self, tasks):
dataset = self.get_task_trainset(tasks, True)
# dataset = self.get_task_testset(tasks)
train_iterator = torch.utils.data.DataLoader(dataset,
batch_size=64,
shuffle=True, num_workers=1)
self.complete_iterator = train_iterator
logger.info("Len of complete iterator = %d", len(self.complete_iterator) * 64)
train_iterator2 = torch.utils.data.DataLoader(dataset,
batch_size=1,
shuffle=True, num_workers=1)
self.another_complete_iterator = train_iterator2
def add_task_iterator(self, task, train):
dataset = self.get_task_trainset([task], train)
train_iterator = torch.utils.data.DataLoader(dataset,
batch_size=1,
shuffle=True, num_workers=1)
self.iterators[task] = train_iterator
print("Task %d has been added to the list" % task)
return train_iterator
def get(self, tasks, train):
if train:
for task in tasks:
if task in self.iterators:
return self.iterators[task]
else:
return self.add_task_iterator(task, True)
else:
for task in tasks:
if tasks in self.test_iterators:
return self.test_iterators[task]
else:
return self.add_task_iterator(task, False)
def get_task_trainset(self, task, train):
if train:
trainset = copy.deepcopy(self.trainset)
else:
trainset = copy.deepcopy(self.testset)
class_labels = np.array([x[1] for x in trainset._flat_character_images])
indices = np.zeros_like(class_labels)
for a in task:
indices = indices + (class_labels == a).astype(int)
indices = np.nonzero(indices)
trainset._flat_character_images = [trainset._flat_character_images[i] for i in indices[0]]
trainset.data = [trainset.data[i] for i in indices[0]]
trainset.targets = [trainset.targets[i] for i in indices[0]]
return trainset
def get_task_testset(self, task):
trainset = copy.deepcopy(self.testset)
class_labels = np.array([x[1] for x in trainset._flat_character_images])
indices = np.zeros_like(class_labels)
for a in task:
indices = indices + (class_labels == a).astype(int)
indices = np.nonzero(indices)
trainset._flat_character_images = [trainset._flat_character_images[i] for i in indices[0]]
trainset.data = [trainset.data[i] for i in indices[0]]
trainset.targets = [trainset.targets[i] for i in indices[0]]
return trainset
def filter_upto(self, task):
trainset = copy.deepcopy(self.trainset)
trainset.data = trainset.data[trainset.data['target'] <= task]
return trainset
class CIFAR100Sampler:
# Class to sample tasks
def __init__(self, tasks, trainset, testset):
self.tasks = tasks
self.task_sampler = SampleCIFAR100(trainset, testset)
self.task_sampler.add_complete_iteraetor(self.tasks)
def get_complete_iterator(self):
return self.task_sampler.complete_iterator
def get_another_complete_iterator(self):
return self.task_sampler.another_complete_iterator
def sample_random(self):
return self.task_sampler.get([np.random.choice(self.tasks)])
def filter_upto(self, task):
return self.task_sampler.filter_upto(task)
def sample_task(self, t, train=True):
train_iterator = self.task_sampler.get(t, train)
return self.shuffle_iterator(train_iterator)
def sample_tasks(self, t, train=False):
# assert(false)
dataset = self.task_sampler.get_task_trainset(t, train)
train_iterator = torch.utils.data.DataLoader(dataset,
batch_size=128,
shuffle=False, num_workers=1)
return self.shuffle_iterator(train_iterator)
def shuffle_iterator(self, iterator):
indices = np.arange(len(iterator.dataset.targets))
np.random.shuffle(indices)
iterator.dataset.data = np.array(iterator.dataset.data)[indices]
iterator.dataset.targets = np.array(iterator.dataset.targets)[indices]
return iterator
class SampleCIFAR100:
def __init__(self, trainset, testset):
self.task_iterators = []
self.trainset = trainset
self.testset = testset
self.iterators = {}
self.test_iterators = {}
def add_complete_iteraetor(self, tasks):
dataset = self.get_task_trainset(tasks, True)
# dataset = self.get_task_testset(tasks)
train_iterator = torch.utils.data.DataLoader(dataset,
batch_size=64,
shuffle=True, num_workers=1)
self.complete_iterator = train_iterator
logger.info("Len of complete iterator = %d", len(self.complete_iterator) * 64)
train_iterator2 = torch.utils.data.DataLoader(dataset,
batch_size=1,
shuffle=False, num_workers=1)
self.another_complete_iterator = train_iterator2
def add_task_iterator(self, task, train):
dataset = self.get_task_trainset([task], train)
train_iterator = torch.utils.data.DataLoader(dataset,
batch_size=1,
shuffle=False, num_workers=1)
self.iterators[task] = train_iterator
print("Task %d has been added to the list" % task)
return train_iterator
def get(self, tasks, train):
if train:
for task in tasks:
if task in self.iterators:
return self.iterators[task]
else:
return self.add_task_iterator(task, True)
else:
for task in tasks:
if tasks in self.test_iterators:
return self.test_iterators[task]
else:
return self.add_task_iterator(task, False)
def get_task_trainset(self, task, train):
if train:
trainset = copy.deepcopy(self.trainset)
else:
trainset = copy.deepcopy(self.testset)
class_labels = np.array(trainset.targets)
indices = np.zeros_like(class_labels)
for a in task:
indices = indices + (class_labels == a).astype(int)
indices = np.nonzero(indices)
trainset.data = [trainset.data[i] for i in indices[0]]
trainset.targets = [trainset.targets[i] for i in indices[0]]
return trainset
def get_task_testset(self, task):
trainset = copy.deepcopy(self.testset)
class_labels = np.array([x[1] for x in trainset._flat_character_images])
indices = np.zeros_like(class_labels)
for a in task:
indices = indices + (class_labels == a).astype(int)
indices = np.nonzero(indices)
trainset._flat_character_images = [trainset._flat_character_images[i] for i in indices[0]]
trainset.data = [trainset.data[i] for i in indices[0]]
trainset.targets = [trainset.targets[i] for i in indices[0]]
return trainset
def filter_upto(self, task):
trainset = copy.deepcopy(self.trainset)
trainset.data = trainset.data[trainset.data['target'] <= task]
return trainset
class ImagenetSampler:
# Class to sample tasks
def __init__(self, tasks, trainset, testset):
self.tasks = tasks
self.task_sampler = SampleImagenet(trainset, testset)
self.task_sampler.add_complete_iteraetor(list(range(0, int(len(self.tasks)))))
def get_complete_iterator(self):
return self.task_sampler.complete_iterator
def get_another_complete_iterator(self):
return self.task_sampler.another_complete_iterator
def sample_random(self):
return self.task_sampler.get([np.random.choice(self.tasks)])
def filter_upto(self, task):
return self.task_sampler.filter_upto(task)
def sample_task(self, t, train=True):
return self.task_sampler.get(t, train)
def sample_tasks(self, t, train=False):
# assert(false)
dataset = self.task_sampler.get_task_trainset(t, train)
train_iterator = torch.utils.data.DataLoader(dataset,
batch_size=1,
shuffle=True, num_workers=1)
return train_iterator
class SampleImagenet:
def __init__(self, trainset, testset):
self.task_iterators = []
self.trainset = trainset
self.testset = testset
self.iterators = {}
self.test_iterators = {}
def add_complete_iteraetor(self, tasks):
dataset = self.get_task_trainset(tasks, True)
# dataset = self.get_task_testset(tasks)
train_iterator = torch.utils.data.DataLoader(dataset,
batch_size=64,
shuffle=True, num_workers=1)
self.complete_iterator = train_iterator
logger.info("Len of complete iterator = %d", len(self.complete_iterator) * 256)
train_iterator2 = torch.utils.data.DataLoader(dataset,
batch_size=1,
shuffle=True, num_workers=1)
self.another_complete_iterator = train_iterator2
def add_task_iterator(self, task, train):
dataset = self.get_task_trainset([task], train)
train_iterator = torch.utils.data.DataLoader(dataset,
batch_size=1,
shuffle=True, num_workers=1)
self.iterators[task] = train_iterator
print("Task %d has been added to the list" % task)
return train_iterator
def get(self, tasks, train):
if train:
for task in tasks:
if task in self.iterators:
return self.iterators[task]
else:
return self.add_task_iterator(task, True)
else:
for task in tasks:
if tasks in self.test_iterators:
return self.test_iterators[task]
else:
return self.add_task_iterator(task, False)
def get_task_trainset(self, task, train):
if train:
trainset = copy.deepcopy(self.trainset)
else:
trainset = copy.deepcopy(self.testset)
# class_labels = np.array([x[1] for x in trainset._flat_character_images])
# indices = np.zeros_like(class_labels)
data_temp = []
labels_temp = []
for a in task:
data_temp.append(trainset.data[a])
labels_temp.append(trainset.labels[a])
# labels_temp.append(trainset.)
# indices = indices + (class_labels == a).astype(int)
# indices = np.nonzero(indices)
#
# trainset._flat_character_images = [trainset._flat_character_images[i] for i in indices[0]]
trainset.data = data_temp
trainset.labels = labels_temp
# trainset.targets = [trainset.targets[i] for i in indices[0]]
return trainset
def get_task_testset(self, task):
trainset = copy.deepcopy(self.testset)
class_labels = np.array([x[1] for x in trainset._flat_character_images])
indices = np.zeros_like(class_labels)
for a in task:
indices = indices + (class_labels == a).astype(int)
indices = np.nonzero(indices)
trainset._flat_character_images = [trainset._flat_character_images[i] for i in indices[0]]
trainset.data = [trainset.data[i] for i in indices[0]]
trainset.targets = [trainset.targets[i] for i in indices[0]]
return trainset
def filter_upto(self, task):
trainset = copy.deepcopy(self.trainset)
trainset.data = trainset.data[trainset.data['target'] <= task]
return trainset | 34.883162 | 172 | 0.592996 | 2,373 | 20,302 | 4.897598 | 0.063211 | 0.048184 | 0.054208 | 0.043366 | 0.884615 | 0.877044 | 0.873946 | 0.867149 | 0.862846 | 0.862846 | 0 | 0.009417 | 0.309575 | 20,302 | 582 | 173 | 34.883162 | 0.819719 | 0.044823 | 0 | 0.847087 | 0 | 0 | 0.023282 | 0 | 0 | 0 | 0 | 0 | 0.002427 | 1 | 0.167476 | false | 0.007282 | 0.009709 | 0.058252 | 0.368932 | 0.009709 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0298c309bebdf6ba9ce0b82bf5b8c05a550be290 | 7,574 | py | Python | rdr_service/alembic/versions/f2aa951ca1a7_add_summary_columns.py | all-of-us/raw-data-repository | d28ad957557587b03ff9c63d55dd55e0508f91d8 | [
"BSD-3-Clause"
] | 39 | 2017-10-13T19:16:27.000Z | 2021-09-24T16:58:21.000Z | rdr_service/alembic/versions/f2aa951ca1a7_add_summary_columns.py | all-of-us/raw-data-repository | d28ad957557587b03ff9c63d55dd55e0508f91d8 | [
"BSD-3-Clause"
] | 312 | 2017-09-08T15:42:13.000Z | 2022-03-23T18:21:40.000Z | rdr_service/alembic/versions/f2aa951ca1a7_add_summary_columns.py | all-of-us/raw-data-repository | d28ad957557587b03ff9c63d55dd55e0508f91d8 | [
"BSD-3-Clause"
] | 19 | 2017-09-15T13:58:00.000Z | 2022-02-07T18:33:20.000Z | """Add summary columns
Revision ID: f2aa951ca1a7
Revises: 7e250583b9cb
Create Date: 2017-10-23 16:50:06.586388
"""
import model.utils
import sqlalchemy as sa
from alembic import op
from rdr_service.participant_enums import OrderStatus
# revision identifiers, used by Alembic.
revision = "f2aa951ca1a7"
down_revision = "7e250583b9cb"
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("participant_summary", sa.Column("biospecimen_collected_site_id", sa.Integer(), nullable=True))
op.add_column("participant_summary", sa.Column("biospecimen_finalized_site_id", sa.Integer(), nullable=True))
op.add_column("participant_summary", sa.Column("biospecimen_order_time", model.utils.UTCDateTime(), nullable=True))
op.add_column("participant_summary", sa.Column("biospecimen_processed_site_id", sa.Integer(), nullable=True))
op.add_column("participant_summary", sa.Column("biospecimen_source_site_id", sa.Integer(), nullable=True))
op.add_column("participant_summary", sa.Column("biospecimen_status", model.utils.Enum(OrderStatus), nullable=True))
op.add_column(
"participant_summary", sa.Column("physical_measurements_created_site_id", sa.Integer(), nullable=True)
)
op.add_column(
"participant_summary", sa.Column("physical_measurements_finalized_site_id", sa.Integer(), nullable=True)
)
op.add_column(
"participant_summary",
sa.Column("physical_measurements_finalized_time", model.utils.UTCDateTime(), nullable=True),
)
op.add_column(
"participant_summary", sa.Column("sample_order_status_1ed04", model.utils.Enum(OrderStatus), nullable=True)
)
op.add_column(
"participant_summary", sa.Column("sample_order_status_1ed04_time", model.utils.UTCDateTime(), nullable=True)
)
op.add_column(
"participant_summary", sa.Column("sample_order_status_1ed10", model.utils.Enum(OrderStatus), nullable=True)
)
op.add_column(
"participant_summary", sa.Column("sample_order_status_1ed10_time", model.utils.UTCDateTime(), nullable=True)
)
op.add_column(
"participant_summary", sa.Column("sample_order_status_1hep4", model.utils.Enum(OrderStatus), nullable=True)
)
op.add_column(
"participant_summary", sa.Column("sample_order_status_1hep4_time", model.utils.UTCDateTime(), nullable=True)
)
op.add_column(
"participant_summary", sa.Column("sample_order_status_1pst8", model.utils.Enum(OrderStatus), nullable=True)
)
op.add_column(
"participant_summary", sa.Column("sample_order_status_1pst8_time", model.utils.UTCDateTime(), nullable=True)
)
op.add_column(
"participant_summary", sa.Column("sample_order_status_1sal", model.utils.Enum(OrderStatus), nullable=True)
)
op.add_column(
"participant_summary", sa.Column("sample_order_status_1sal_time", model.utils.UTCDateTime(), nullable=True)
)
op.add_column(
"participant_summary", sa.Column("sample_order_status_1sst8", model.utils.Enum(OrderStatus), nullable=True)
)
op.add_column(
"participant_summary", sa.Column("sample_order_status_1sst8_time", model.utils.UTCDateTime(), nullable=True)
)
op.add_column(
"participant_summary", sa.Column("sample_order_status_1ur10", model.utils.Enum(OrderStatus), nullable=True)
)
op.add_column(
"participant_summary", sa.Column("sample_order_status_1ur10_time", model.utils.UTCDateTime(), nullable=True)
)
op.add_column(
"participant_summary", sa.Column("sample_order_status_2ed10", model.utils.Enum(OrderStatus), nullable=True)
)
op.add_column(
"participant_summary", sa.Column("sample_order_status_2ed10_time", model.utils.UTCDateTime(), nullable=True)
)
op.create_foreign_key(None, "participant_summary", "site", ["biospecimen_processed_site_id"], ["site_id"])
op.create_foreign_key(
None, "participant_summary", "site", ["physical_measurements_finalized_site_id"], ["site_id"]
)
op.create_foreign_key(None, "participant_summary", "site", ["physical_measurements_created_site_id"], ["site_id"])
op.create_foreign_key(None, "participant_summary", "site", ["biospecimen_collected_site_id"], ["site_id"])
op.create_foreign_key(None, "participant_summary", "site", ["biospecimen_source_site_id"], ["site_id"])
op.create_foreign_key(None, "participant_summary", "site", ["biospecimen_finalized_site_id"], ["site_id"])
op.add_column("physical_measurements", sa.Column("finalized", model.utils.UTCDateTime(), nullable=True))
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("physical_measurements", "finalized")
op.drop_constraint(None, "participant_summary", type_="foreignkey")
op.drop_constraint(None, "participant_summary", type_="foreignkey")
op.drop_constraint(None, "participant_summary", type_="foreignkey")
op.drop_constraint(None, "participant_summary", type_="foreignkey")
op.drop_constraint(None, "participant_summary", type_="foreignkey")
op.drop_constraint(None, "participant_summary", type_="foreignkey")
op.drop_column("participant_summary", "sample_order_status_2ed10_time")
op.drop_column("participant_summary", "sample_order_status_2ed10")
op.drop_column("participant_summary", "sample_order_status_1ur10_time")
op.drop_column("participant_summary", "sample_order_status_1ur10")
op.drop_column("participant_summary", "sample_order_status_1sst8_time")
op.drop_column("participant_summary", "sample_order_status_1sst8")
op.drop_column("participant_summary", "sample_order_status_1sal_time")
op.drop_column("participant_summary", "sample_order_status_1sal")
op.drop_column("participant_summary", "sample_order_status_1pst8_time")
op.drop_column("participant_summary", "sample_order_status_1pst8")
op.drop_column("participant_summary", "sample_order_status_1hep4_time")
op.drop_column("participant_summary", "sample_order_status_1hep4")
op.drop_column("participant_summary", "sample_order_status_1ed10_time")
op.drop_column("participant_summary", "sample_order_status_1ed10")
op.drop_column("participant_summary", "sample_order_status_1ed04_time")
op.drop_column("participant_summary", "sample_order_status_1ed04")
op.drop_column("participant_summary", "physical_measurements_finalized_time")
op.drop_column("participant_summary", "physical_measurements_finalized_site_id")
op.drop_column("participant_summary", "physical_measurements_created_site_id")
op.drop_column("participant_summary", "biospecimen_status")
op.drop_column("participant_summary", "biospecimen_source_site_id")
op.drop_column("participant_summary", "biospecimen_processed_site_id")
op.drop_column("participant_summary", "biospecimen_order_time")
op.drop_column("participant_summary", "biospecimen_finalized_site_id")
op.drop_column("participant_summary", "biospecimen_collected_site_id")
# ### end Alembic commands ###
| 49.181818 | 119 | 0.746633 | 918 | 7,574 | 5.784314 | 0.095861 | 0.210169 | 0.225989 | 0.103578 | 0.885311 | 0.867797 | 0.844821 | 0.834087 | 0.759322 | 0.570057 | 0 | 0.018706 | 0.124769 | 7,574 | 153 | 120 | 49.503268 | 0.78232 | 0.060074 | 0 | 0.219512 | 0 | 0 | 0.425999 | 0.226963 | 0 | 0 | 0 | 0 | 0 | 1 | 0.04878 | false | 0.01626 | 0.03252 | 0 | 0.081301 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
02df56a6955395bf6f9a57e9ac618338dca0a29c | 50,973 | py | Python | tests/tests.py | pje3110/user-statistician | 8b5a6d90ca880b653ee2fcfde1a1d688359ced43 | [
"MIT"
] | null | null | null | tests/tests.py | pje3110/user-statistician | 8b5a6d90ca880b653ee2fcfde1a1d688359ced43 | [
"MIT"
] | null | null | null | tests/tests.py | pje3110/user-statistician | 8b5a6d90ca880b653ee2fcfde1a1d688359ced43 | [
"MIT"
] | null | null | null | # user-statistician: Github action for generating a user stats card
#
# Copyright (c) 2021 Vincent A Cicirello
# https://www.cicirello.org/
#
# MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import unittest
import sys
sys.path.insert(0,'src')
from Statistician import *
from StatsImageGenerator import StatsImageGenerator
from UserStatistician import writeImageToFile
from Colors import *
from StatConfig import *
from ColorUtil import isValidColor, _namedColors, highContrastingColor, contrastRatio
from TextLength import *
import copy
executedQueryResultsOriginal = [
{'data': {'user': {'contributionsCollection': {'totalCommitContributions': 3602, 'totalIssueContributions': 79, 'totalPullRequestContributions': 289, 'totalPullRequestReviewContributions': 315, 'totalRepositoryContributions': 18, 'restrictedContributionsCount': 105, 'contributionYears': [2021, 2020, 2019, 2018, 2017, 2016, 2015, 2014, 2013, 2012, 2011]}, 'followers': {'totalCount': 9}, 'following': {'totalCount': 7}, 'issues': {'totalCount': 81}, 'login': 'someuser', 'name': 'Firstname M. Lastname', 'pullRequests': {'totalCount': 289}, 'repositoriesContributedTo': {'totalCount': 3}, 'sponsorshipsAsMaintainer': {'totalCount': 7}, 'sponsorshipsAsSponsor': {'totalCount': 5}}}},
[{'data': {'user': {'repositories': {'totalCount': 31, 'nodes': [{'stargazerCount': 0, 'forkCount': 0, 'isArchived': True, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo1', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 7139, 'edges': [{'size': 7139, 'node': {'color': '#b07219', 'name': 'Java'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo2', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 4, 'totalSize': 1479512, 'edges': [{'size': 1309108, 'node': {'color': '#e34c26', 'name': 'HTML'}}, {'size': 168479, 'node': {'color': '#3D6117', 'name': 'TeX'}}, {'size': 1721, 'node': {'color': '#563d7c', 'name': 'CSS'}}, {'size': 204, 'node': {'color': '#f1e05a', 'name': 'JavaScript'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo3', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 5842, 'edges': [{'size': 5842, 'node': {'color': '#b07219', 'name': 'Java'}}]}}, {'stargazerCount': 3, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo4', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 2, 'totalSize': 45961, 'edges': [{'size': 44035, 'node': {'color': '#b07219', 'name': 'Java'}}, {'size': 1926, 'node': {'color': '#89e051', 'name': 'Shell'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': True, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo5', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 7717, 'edges': [{'size': 7717, 'node': {'color': '#b07219', 'name': 'Java'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo6', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 8491, 'edges': [{'size': 8491, 'node': {'color': '#b07219', 'name': 'Java'}}]}}, {'stargazerCount': 0, 'forkCount': 3, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo7', 'watchers': {'totalCount': 2}, 'languages': {'totalCount': 1, 'totalSize': 74003, 'edges': [{'size': 74003, 'node': {'color': '#3572A5', 'name': 'Python'}}]}}, {'stargazerCount': 3, 'forkCount': 2, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo8', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 739339, 'edges': [{'size': 739339, 'node': {'color': '#b07219', 'name': 'Java'}}]}}, {'stargazerCount': 2, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo9', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 52285, 'edges': [{'size': 52285, 'node': {'color': '#b07219', 'name': 'Java'}}]}}, {'stargazerCount': 7, 'forkCount': 4, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo10', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 2100055, 'edges': [{'size': 2100055, 'node': {'color': '#b07219', 'name': 'Java'}}]}}, {'stargazerCount': 3, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo11', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 3, 'totalSize': 266774, 'edges': [{'size': 198236, 'node': {'color': '#b07219', 'name': 'Java'}}, {'size': 34345, 'node': {'color': '#3D6117', 'name': 'TeX'}}, {'size': 34193, 'node': {'color': '#e34c26', 'name': 'HTML'}}]}}, {'stargazerCount': 3, 'forkCount': 2, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo12', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 2, 'totalSize': 39091, 'edges': [{'size': 38882, 'node': {'color': '#3572A5', 'name': 'Python'}}, {'size': 209, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo13', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 0, 'totalSize': 0, 'edges': []}}, {'stargazerCount': 0, 'forkCount': 1, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo14', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 852, 'edges': [{'size': 852, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo15', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 787, 'edges': [{'size': 787, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}, {'stargazerCount': 1, 'forkCount': 2, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo16', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 1412, 'edges': [{'size': 1412, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo17', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 0, 'totalSize': 0, 'edges': []}}, {'stargazerCount': 2, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo18', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 2, 'totalSize': 31866, 'edges': [{'size': 31656, 'node': {'color': '#3572A5', 'name': 'Python'}}, {'size': 210, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo19', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 692, 'edges': [{'size': 692, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}, {'stargazerCount': 2, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo20', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 4, 'totalSize': 36101, 'edges': [{'size': 26436, 'node': {'color': '#b07219', 'name': 'Java'}}, {'size': 7807, 'node': {'color': '#3572A5', 'name': 'Python'}}, {'size': 1758, 'node': {'color': '#427819', 'name': 'Makefile'}}, {'size': 100, 'node': {'color': '#C1F12E', 'name': 'Batchfile'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo21', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 2, 'totalSize': 107241, 'edges': [{'size': 106048, 'node': {'color': '#b07219', 'name': 'Java'}}, {'size': 1193, 'node': {'color': '#427819', 'name': 'Makefile'}}]}}, {'stargazerCount': 1, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': True, 'name': 'repo22', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 2, 'totalSize': 1943, 'edges': [{'size': 1469, 'node': {'color': '#3572A5', 'name': 'Python'}}, {'size': 474, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}, {'stargazerCount': 9, 'forkCount': 14, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo23', 'watchers': {'totalCount': 2}, 'languages': {'totalCount': 2, 'totalSize': 46228, 'edges': [{'size': 45994, 'node': {'color': '#3572A5', 'name': 'Python'}}, {'size': 234, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo24', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 2, 'totalSize': 91844, 'edges': [{'size': 90353, 'node': {'color': '#b07219', 'name': 'Java'}}, {'size': 1491, 'node': {'color': '#427819', 'name': 'Makefile'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo25', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 0, 'totalSize': 0, 'edges': []}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo26', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 2, 'totalSize': 1984, 'edges': [{'size': 1763, 'node': {'color': '#3572A5', 'name': 'Python'}}, {'size': 221, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo27', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 0, 'totalSize': 0, 'edges': []}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo28', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 0, 'totalSize': 0, 'edges': []}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo29', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 3, 'totalSize': 75220, 'edges': [{'size': 72961, 'node': {'color': '#3572A5', 'name': 'Python'}}, {'size': 1902, 'node': {'color': '#e10098', 'name': 'GraphQL'}}, {'size': 357, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': True, 'isPrivate': False, 'isTemplate': False, 'name': 'repo30', 'watchers': {'totalCount': 0}, 'languages': {'totalCount': 0, 'totalSize': 0, 'edges': []}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': True, 'isPrivate': False, 'isTemplate': False, 'name': 'repo31', 'watchers': {'totalCount': 0}, 'languages': {'totalCount': 7, 'totalSize': 1415534, 'edges': [{'size': 998990, 'node': {'color': '#f1e05a', 'name': 'JavaScript'}}, {'size': 247728, 'node': {'color': '#2b7489', 'name': 'TypeScript'}}, {'size': 127643, 'node': {'color': '#e34c26', 'name': 'HTML'}}, {'size': 26509, 'node': {'color': '#c6538c', 'name': 'SCSS'}}, {'size': 5854, 'node': {'color': '#3572A5', 'name': 'Python'}}, {'size': 5303, 'node': {'color': '#89e051', 'name': 'Shell'}}, {'size': 3507, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}], 'pageInfo': {'hasNextPage': False, 'endCursor': 'Y3Vyc29yOnYyOpHOFwfoDg=='}}}}}],
[{'data': {'user': {'watching': {'totalCount': 28, 'nodes': [{'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}], 'pageInfo': {'hasNextPage': False, 'endCursor': 'Mjg'}}}}}],
{'data': {'user': {'year2021': {'totalCommitContributions': 1850, 'totalPullRequestReviewContributions': 223, 'restrictedContributionsCount': 105}, 'year2020': {'totalCommitContributions': 1845, 'totalPullRequestReviewContributions': 92, 'restrictedContributionsCount': 0}, 'year2019': {'totalCommitContributions': 194, 'totalPullRequestReviewContributions': 0, 'restrictedContributionsCount': 0}, 'year2018': {'totalCommitContributions': 198, 'totalPullRequestReviewContributions': 0, 'restrictedContributionsCount': 0}, 'year2017': {'totalCommitContributions': 177, 'totalPullRequestReviewContributions': 0, 'restrictedContributionsCount': 0}, 'year2016': {'totalCommitContributions': 138, 'totalPullRequestReviewContributions': 0, 'restrictedContributionsCount': 0}, 'year2015': {'totalCommitContributions': 0, 'totalPullRequestReviewContributions': 0, 'restrictedContributionsCount': 0}, 'year2014': {'totalCommitContributions': 0, 'totalPullRequestReviewContributions': 0, 'restrictedContributionsCount': 0}, 'year2013': {'totalCommitContributions': 0, 'totalPullRequestReviewContributions': 0, 'restrictedContributionsCount': 0}, 'year2012': {'totalCommitContributions': 0, 'totalPullRequestReviewContributions': 0, 'restrictedContributionsCount': 0}, 'year2011': {'totalCommitContributions': 0, 'totalPullRequestReviewContributions': 0, 'restrictedContributionsCount': 0}}}},
[{'data': {'user': {'topRepositories': {'totalCount': 34, 'nodes': [{'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someUserA'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someUserA'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someUserB'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someUserC'}}, {'owner': {'login': 'someUserD'}}, {'owner': {'login': 'someUserE'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someUserA'}}, {'owner': {'login': 'someUserF'}}], 'pageInfo': {'hasNextPage': False, 'endCursor': 'MzQ'}}}}}]
]
executedQueryResultsMultiPage = [
{'data': {'user': {'contributionsCollection': {'totalCommitContributions': 3602, 'totalIssueContributions': 79, 'totalPullRequestContributions': 289, 'totalPullRequestReviewContributions': 315, 'totalRepositoryContributions': 18, 'restrictedContributionsCount': 105, 'contributionYears': [2021, 2020, 2019, 2018, 2017, 2016, 2015, 2014, 2013, 2012, 2011]}, 'followers': {'totalCount': 9}, 'following': {'totalCount': 7}, 'issues': {'totalCount': 81}, 'login': 'someuser', 'name': 'Firstname M. Lastname', 'pullRequests': {'totalCount': 289}, 'repositoriesContributedTo': {'totalCount': 3}, 'sponsorshipsAsMaintainer': {'totalCount': 7}, 'sponsorshipsAsSponsor': {'totalCount': 5}}}},
[{'data': {'user': {'repositories': {'totalCount': 31, 'nodes': [{'stargazerCount': 0, 'forkCount': 0, 'isArchived': True, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo1', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 7139, 'edges': [{'size': 7139, 'node': {'color': '#b07219', 'name': 'Java'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo2', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 4, 'totalSize': 1479512, 'edges': [{'size': 1309108, 'node': {'color': '#e34c26', 'name': 'HTML'}}, {'size': 168479, 'node': {'color': '#3D6117', 'name': 'TeX'}}, {'size': 1721, 'node': {'color': '#563d7c', 'name': 'CSS'}}, {'size': 204, 'node': {'color': '#f1e05a', 'name': 'JavaScript'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo3', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 5842, 'edges': [{'size': 5842, 'node': {'color': '#b07219', 'name': 'Java'}}]}}, {'stargazerCount': 3, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo4', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 2, 'totalSize': 45961, 'edges': [{'size': 44035, 'node': {'color': '#b07219', 'name': 'Java'}}, {'size': 1926, 'node': {'color': '#89e051', 'name': 'Shell'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': True, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo5', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 7717, 'edges': [{'size': 7717, 'node': {'color': '#b07219', 'name': 'Java'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo6', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 8491, 'edges': [{'size': 8491, 'node': {'color': '#b07219', 'name': 'Java'}}]}}, {'stargazerCount': 0, 'forkCount': 3, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo7', 'watchers': {'totalCount': 2}, 'languages': {'totalCount': 1, 'totalSize': 74003, 'edges': [{'size': 74003, 'node': {'color': '#3572A5', 'name': 'Python'}}]}}, {'stargazerCount': 3, 'forkCount': 2, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo8', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 739339, 'edges': [{'size': 739339, 'node': {'color': '#b07219', 'name': 'Java'}}]}}, {'stargazerCount': 2, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo9', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 52285, 'edges': [{'size': 52285, 'node': {'color': '#b07219', 'name': 'Java'}}]}}, {'stargazerCount': 7, 'forkCount': 4, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo10', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 2100055, 'edges': [{'size': 2100055, 'node': {'color': '#b07219', 'name': 'Java'}}]}}], 'pageInfo': {'hasNextPage': True, 'endCursor': 'Y3Vyc29yOnYyOpHOEEbJCQ=='}}}}}, {'data': {'user': {'repositories': {'totalCount': 31, 'nodes': [{'stargazerCount': 3, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo11', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 3, 'totalSize': 266774, 'edges': [{'size': 198236, 'node': {'color': '#b07219', 'name': 'Java'}}, {'size': 34345, 'node': {'color': '#3D6117', 'name': 'TeX'}}, {'size': 34193, 'node': {'color': '#e34c26', 'name': 'HTML'}}]}}, {'stargazerCount': 3, 'forkCount': 2, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo12', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 2, 'totalSize': 39091, 'edges': [{'size': 38882, 'node': {'color': '#3572A5', 'name': 'Python'}}, {'size': 209, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo13', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 0, 'totalSize': 0, 'edges': []}}, {'stargazerCount': 0, 'forkCount': 1, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo14', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 852, 'edges': [{'size': 852, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo15', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 787, 'edges': [{'size': 787, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}, {'stargazerCount': 1, 'forkCount': 2, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo16', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 1412, 'edges': [{'size': 1412, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo17', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 0, 'totalSize': 0, 'edges': []}}, {'stargazerCount': 2, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo18', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 2, 'totalSize': 31866, 'edges': [{'size': 31656, 'node': {'color': '#3572A5', 'name': 'Python'}}, {'size': 210, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo19', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 1, 'totalSize': 692, 'edges': [{'size': 692, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}, {'stargazerCount': 2, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo20', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 4, 'totalSize': 36101, 'edges': [{'size': 26436, 'node': {'color': '#b07219', 'name': 'Java'}}, {'size': 7807, 'node': {'color': '#3572A5', 'name': 'Python'}}, {'size': 1758, 'node': {'color': '#427819', 'name': 'Makefile'}}, {'size': 100, 'node': {'color': '#C1F12E', 'name': 'Batchfile'}}]}}], 'pageInfo': {'hasNextPage': True, 'endCursor': 'Y3Vyc29yOnYyOpHOEcjkCw=='}}}}}, {'data': {'user': {'repositories': {'totalCount': 31, 'nodes': [{'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo21', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 2, 'totalSize': 107241, 'edges': [{'size': 106048, 'node': {'color': '#b07219', 'name': 'Java'}}, {'size': 1193, 'node': {'color': '#427819', 'name': 'Makefile'}}]}}, {'stargazerCount': 1, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': True, 'name': 'repo22', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 2, 'totalSize': 1943, 'edges': [{'size': 1469, 'node': {'color': '#3572A5', 'name': 'Python'}}, {'size': 474, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}, {'stargazerCount': 9, 'forkCount': 14, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo23', 'watchers': {'totalCount': 2}, 'languages': {'totalCount': 2, 'totalSize': 46228, 'edges': [{'size': 45994, 'node': {'color': '#3572A5', 'name': 'Python'}}, {'size': 234, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo24', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 2, 'totalSize': 91844, 'edges': [{'size': 90353, 'node': {'color': '#b07219', 'name': 'Java'}}, {'size': 1491, 'node': {'color': '#427819', 'name': 'Makefile'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo25', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 0, 'totalSize': 0, 'edges': []}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo26', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 2, 'totalSize': 1984, 'edges': [{'size': 1763, 'node': {'color': '#3572A5', 'name': 'Python'}}, {'size': 221, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo27', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 0, 'totalSize': 0, 'edges': []}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo28', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 0, 'totalSize': 0, 'edges': []}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': False, 'isPrivate': False, 'isTemplate': False, 'name': 'repo29', 'watchers': {'totalCount': 1}, 'languages': {'totalCount': 3, 'totalSize': 75220, 'edges': [{'size': 72961, 'node': {'color': '#3572A5', 'name': 'Python'}}, {'size': 1902, 'node': {'color': '#e10098', 'name': 'GraphQL'}}, {'size': 357, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}, {'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': True, 'isPrivate': False, 'isTemplate': False, 'name': 'repo30', 'watchers': {'totalCount': 0}, 'languages': {'totalCount': 0, 'totalSize': 0, 'edges': []}}], 'pageInfo': {'hasNextPage': True, 'endCursor': 'Y3Vyc29yOnYyOpHOFvwXeA=='}}}}}, {'data': {'user': {'repositories': {'totalCount': 31, 'nodes': [{'stargazerCount': 0, 'forkCount': 0, 'isArchived': False, 'isFork': True, 'isPrivate': False, 'isTemplate': False, 'name': 'repo31', 'watchers': {'totalCount': 0}, 'languages': {'totalCount': 7, 'totalSize': 1415534, 'edges': [{'size': 998990, 'node': {'color': '#f1e05a', 'name': 'JavaScript'}}, {'size': 247728, 'node': {'color': '#2b7489', 'name': 'TypeScript'}}, {'size': 127643, 'node': {'color': '#e34c26', 'name': 'HTML'}}, {'size': 26509, 'node': {'color': '#c6538c', 'name': 'SCSS'}}, {'size': 5854, 'node': {'color': '#3572A5', 'name': 'Python'}}, {'size': 5303, 'node': {'color': '#89e051', 'name': 'Shell'}}, {'size': 3507, 'node': {'color': '#384d54', 'name': 'Dockerfile'}}]}}], 'pageInfo': {'hasNextPage': False, 'endCursor': 'Y3Vyc29yOnYyOpHOFwfoDg=='}}}}}],
[{'data': {'user': {'watching': {'totalCount': 28, 'nodes': [{'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}], 'pageInfo': {'hasNextPage': True, 'endCursor': 'MTA'}}}}}, {'data': {'user': {'watching': {'totalCount': 28, 'nodes': [{'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}], 'pageInfo': {'hasNextPage': True, 'endCursor': 'MjA'}}}}}, {'data': {'user': {'watching': {'totalCount': 28, 'nodes': [{'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}, {'isFork': False}], 'pageInfo': {'hasNextPage': False, 'endCursor': 'Mjg'}}}}}],
{'data': {'user': {'year2021': {'totalCommitContributions': 1850, 'totalPullRequestReviewContributions': 223, 'restrictedContributionsCount': 105}, 'year2020': {'totalCommitContributions': 1845, 'totalPullRequestReviewContributions': 92, 'restrictedContributionsCount': 0}, 'year2019': {'totalCommitContributions': 194, 'totalPullRequestReviewContributions': 0, 'restrictedContributionsCount': 0}, 'year2018': {'totalCommitContributions': 198, 'totalPullRequestReviewContributions': 0, 'restrictedContributionsCount': 0}, 'year2017': {'totalCommitContributions': 177, 'totalPullRequestReviewContributions': 0, 'restrictedContributionsCount': 0}, 'year2016': {'totalCommitContributions': 138, 'totalPullRequestReviewContributions': 0, 'restrictedContributionsCount': 0}, 'year2015': {'totalCommitContributions': 0, 'totalPullRequestReviewContributions': 0, 'restrictedContributionsCount': 0}, 'year2014': {'totalCommitContributions': 0, 'totalPullRequestReviewContributions': 0, 'restrictedContributionsCount': 0}, 'year2013': {'totalCommitContributions': 0, 'totalPullRequestReviewContributions': 0, 'restrictedContributionsCount': 0}, 'year2012': {'totalCommitContributions': 0, 'totalPullRequestReviewContributions': 0, 'restrictedContributionsCount': 0}, 'year2011': {'totalCommitContributions': 0, 'totalPullRequestReviewContributions': 0, 'restrictedContributionsCount': 0}}}},
[{'data': {'user': {'topRepositories': {'totalCount': 34, 'nodes': [{'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}], 'pageInfo': {'hasNextPage': True, 'endCursor': 'MTA'}}}}}, {'data': {'user': {'topRepositories': {'totalCount': 34, 'nodes': [{'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someUserA'}}], 'pageInfo': {'hasNextPage': True, 'endCursor': 'MjA'}}}}}, {'data': {'user': {'topRepositories': {'totalCount': 34, 'nodes': [{'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someUserA'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someUserB'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someUserC'}}, {'owner': {'login': 'someUserD'}}, {'owner': {'login': 'someUserE'}}], 'pageInfo': {'hasNextPage': True, 'endCursor': 'MzA'}}}}}, {'data': {'user': {'topRepositories': {'totalCount': 34, 'nodes': [{'owner': {'login': 'someuser'}}, {'owner': {'login': 'someuser'}}, {'owner': {'login': 'someUserA'}}, {'owner': {'login': 'someUserF'}}], 'pageInfo': {'hasNextPage': False, 'endCursor': 'MzQ'}}}}}]
]
class TestSomething(unittest.TestCase) :
def test_parseQueryResults(self) :
executedQueryResults = copy.deepcopy(executedQueryResultsOriginal)
class NoQueries(Statistician) :
def __init__(self, fail, autoLanguages, maxLanguages, languageRepoExclusions, featuredRepo) :
self._autoLanguages = autoLanguages
self._maxLanguages = maxLanguages if maxLanguages >= 1 else 1
self._languageRepoExclusions = languageRepoExclusions
self._featuredRepo = featuredRepo
self.parseStats(
executedQueryResults[0],
executedQueryResults[1],
executedQueryResults[2],
executedQueryResults[4]
)
self.parsePriorYearStats(executedQueryResults[3])
stats = NoQueries(True, False, 1000, set(), None)
self._validate(stats)
def test_parseQueryResultsMultiPage(self) :
executedQueryResults = copy.deepcopy(executedQueryResultsMultiPage)
class NoQueries(Statistician) :
def __init__(self, fail, autoLanguages, maxLanguages, languageRepoExclusions, featuredRepo) :
self._autoLanguages = autoLanguages
self._maxLanguages = maxLanguages if maxLanguages >= 1 else 1
self._languageRepoExclusions = languageRepoExclusions
self._featuredRepo = featuredRepo
self.parseStats(
executedQueryResults[0],
executedQueryResults[1],
executedQueryResults[2],
executedQueryResults[4]
)
self.parsePriorYearStats(executedQueryResults[3])
stats = NoQueries(True, False, 1000, set(), None)
self._validate(stats)
def test_parseQueryResultsSkipRepo(self) :
executedQueryResults = copy.deepcopy(executedQueryResultsOriginal)
class NoQueriesMultipage(Statistician) :
def __init__(self, fail, autoLanguages, maxLanguages, languageRepoExclusions, featuredRepo) :
self._autoLanguages = autoLanguages
self._maxLanguages = maxLanguages if maxLanguages >= 1 else 1
self._languageRepoExclusions = languageRepoExclusions
self._featuredRepo = featuredRepo
self.parseStats(
executedQueryResults[0],
executedQueryResults[1],
executedQueryResults[2],
executedQueryResults[4]
)
self.parsePriorYearStats(executedQueryResults[3])
stats = NoQueriesMultipage(True, False, 1000, {"repo29", "repoDoesntExist"}, None)
self._validate(stats, True)
def test_parseQueryResultsMultipageSkipRepo(self) :
executedQueryResults = copy.deepcopy(executedQueryResultsMultiPage)
class NoQueriesMultipage(Statistician) :
def __init__(self, fail, autoLanguages, maxLanguages, languageRepoExclusions, featuredRepo) :
self._autoLanguages = autoLanguages
self._maxLanguages = maxLanguages if maxLanguages >= 1 else 1
self._languageRepoExclusions = languageRepoExclusions
self._featuredRepo = featuredRepo
self.parseStats(
executedQueryResults[0],
executedQueryResults[1],
executedQueryResults[2],
executedQueryResults[4]
)
self.parsePriorYearStats(executedQueryResults[3])
stats = NoQueriesMultipage(True, False, 1000, {"repo29", "repoDoesntExist"}, None)
self._validate(stats, True)
def test_parseQueryResultsAllForks(self) :
executedQueryResults = copy.deepcopy(executedQueryResultsOriginal)
# Change all repos to forks for this testcase.
self._changeToAllForks(executedQueryResults)
class NoQueries(Statistician) :
def __init__(self, fail, autoLanguages, maxLanguages, languageRepoExclusions, featuredRepo) :
self._autoLanguages = autoLanguages
self._maxLanguages = maxLanguages if maxLanguages >= 1 else 1
self._languageRepoExclusions = languageRepoExclusions
self._featuredRepo = featuredRepo
self.parseStats(
executedQueryResults[0],
executedQueryResults[1],
executedQueryResults[2],
executedQueryResults[4]
)
self.parsePriorYearStats(executedQueryResults[3])
stats = NoQueries(True, False, 1000, set(), None)
self._validateAllForks(stats)
def test_color_themes(self) :
originalThemes = {"light", "dark", "dark-dimmed"}
for theme in originalThemes :
self._colorValidation(colorMapping[theme])
for theme in colorMapping :
if theme not in originalThemes :
self._colorValidation(colorMapping[theme])
def test_color_contrast_text_vs_bg(self) :
for theme, colors in colorMapping.items() :
crText = contrastRatio(colors["bg"], colors["text"])
crTitle = contrastRatio(colors["bg"], colors["title"])
self.assertTrue(crText >= 4.5, msg=theme+" "+str(crText))
self.assertTrue(crTitle >= 4.5, msg=theme+" "+str(crTitle))
def test_title_templates(self) :
unlikelyInTemplate = "qwertyuiop"
try :
for locale in supportedLocales :
title = titleTemplates[locale].format(unlikelyInTemplate)
self.assertTrue(title.find(unlikelyInTemplate)>=0)
except IndexError :
self.fail()
def test_categories(self) :
categories = {"general", "repositories", "contributions", "languages"}
self.assertEqual(set(categoryOrder), categories)
statistics = {
"joined", "featured", "mostStarred", "mostForked", "followers", "following", "sponsors", "sponsoring",
"public", "starredBy",
"forkedBy", "watchedBy", "templates", "archived", "commits",
"issues", "prs", "reviews", "contribTo", "private"
}
# Make sure all are accounted for in a category
statKeys = { stat for cat in categoryOrder for stat in statsByCategory[cat]}
self.assertEqual(statistics, statKeys)
# Make sure none are in more than one categories
numStats = sum(len(statsByCategory[cat]) for cat in categoryOrder)
self.assertEqual(numStats, len(statistics))
def test_category_labels(self) :
categories = categoryOrder
types = {"heading", "column-one", "column-two"}
for locale in supportedLocales :
self.assertTrue(locale in categoryLabels)
labelMap = categoryLabels[locale]
for cat in categories :
self.assertTrue(cat in labelMap)
for t in types :
self.assertTrue(t in labelMap[cat])
def test_stat_labels(self) :
keys = {
"joined", "featured", "mostStarred", "mostForked",
"followers", "following", "sponsors", "sponsoring",
"public", "starredBy",
"forkedBy", "watchedBy", "templates", "archived", "commits",
"issues", "prs", "reviews", "contribTo", "private"
}
self.assertTrue(all(k in statLabels for k in keys))
for k in keys :
self.assertTrue("icon" in statLabels[k])
self.assertTrue(statLabels[k]["icon"].startswith("<path "))
self.assertTrue(statLabels[k]["icon"].endswith("/>"))
labelsByLocale = statLabels[k]["label"]
for locale in supportedLocales :
self.assertTrue(locale in labelsByLocale)
def test_isValidColor(self) :
for colorName, colorHex in _namedColors.items() :
self.assertTrue(isValidColor(colorHex))
self.assertTrue(isValidColor(colorName))
def test_highContrastingColor(self) :
# Not really a good way to test this in an automated way.
# This test method generates an SVG, outputted to standard out
# with one rectangle for each named color and the name of the color
# in the computed high contrasting color in text.
# Uncomment the print statement to view results.
# One time test. Only rerun if code of highContrastingColor
# is changed.
rows = ["""<svg width="130" height="{0}" viewBox="0 0 130 {0}" xmlns="http://www.w3.org/2000/svg">""".format(str(len(_namedColors)*20))]
templateRect = """<rect width="130" height="20" fill="{0}" x="0" y="{1}" />"""
templateText = """<text font-size="14" x="15" y="{2}" fill="{1}">{0}</text>"""
y = 0
for c in _namedColors :
rows.append(templateRect.format(c, str(y)))
rows.append(templateText.format(c, highContrastingColor(c), str(y+12.5)))
y += 20
rows.append("</svg>")
# Uncomment me and pipe to colorTest.svg
#print("\n".join(rows))
def test_TextLength(self) :
# We have known text lengths of "coverage" and "branches"
# from another project, so using these as test cases.
self.assertEqual(510, calculateTextLength110("coverage"))
self.assertEqual(507, calculateTextLength110("branches"))
self.assertAlmostEqual(51.0, calculateTextLength("coverage", 11, False, 400))
self.assertAlmostEqual(50.7, calculateTextLength("branches", 11, False, 400))
self.assertAlmostEqual(510, calculateTextLength("coverage", 146 + 2/3, True, 400))
self.assertAlmostEqual(507, calculateTextLength("branches", 146 + 2/3, True, 400))
self.assertAlmostEqual(51.0, calculateTextLength("coverage", 14 + 2/3, True, 400))
self.assertAlmostEqual(50.7, calculateTextLength("branches", 14 + 2/3, True, 400))
self.assertAlmostEqual(76.5, calculateTextLength("coverage", 11, False, 600))
self.assertAlmostEqual(76.05, calculateTextLength("branches", 11, False, 600))
self.assertAlmostEqual(765, calculateTextLength("coverage", 146 + 2/3, True, 600))
self.assertAlmostEqual(760.5, calculateTextLength("branches", 146 + 2/3, True, 600))
self.assertAlmostEqual(76.5, calculateTextLength("coverage", 14 + 2/3, True, 600))
self.assertAlmostEqual(76.05, calculateTextLength("branches", 14 + 2/3, True, 600))
def test_generateSVG(self) :
executedQueryResults = copy.deepcopy(executedQueryResultsOriginal)
# UNCOMMENT: to generate SVG when user only owns forks, which should
# have no repo stats, no languages chart, no most starred, no most forked.
# self._changeToAllForks(executedQueryResults)
class NoQueries(Statistician) :
def __init__(self, fail, autoLanguages, maxLanguages, languageRepoExclusions, featuredRepo) :
self._autoLanguages = autoLanguages
self._maxLanguages = maxLanguages if maxLanguages >= 1 else 1
self._languageRepoExclusions = languageRepoExclusions
self._featuredRepo = featuredRepo
self.parseStats(
executedQueryResults[0],
executedQueryResults[1],
executedQueryResults[2],
executedQueryResults[4]
)
self.parsePriorYearStats(executedQueryResults[3])
stats = NoQueries(True, False, 10, set(), "FavoriteRepo")
#categories = ["general", "repositories", "languages", "contributions"]
categories = categoryOrder[:]
svgGen = StatsImageGenerator(
stats,
copy.deepcopy(colorMapping["dark"]),
"en",
6,
18,
categories,
True,
10,
0, # Doesn't matter since will autosize
None,
True,
{}
)
image = svgGen.generateImage()
#UNCOMMENT to output an svg to stdout during run of tests
#writeImageToFile("testing.svg", image, False)
def _colorValidation(self, theme) :
props = {"bg", "border", "icons", "text", "title"}
validHexDigits = set("0123456789abcdefABCDEF")
for p in props :
color = theme[p]
self.assertTrue(isValidColor(color))
def _validate(self, stats, skip=False) :
self.assertEqual("repo23", stats._user["mostStarred"][0])
self.assertEqual("repo23", stats._user["mostForked"][0])
self.assertEqual(2011, stats._user["joined"][0])
self.assertEqual(9, stats._user["followers"][0])
self.assertEqual(7, stats._user["following"][0])
self.assertEqual(7, stats._user["sponsors"][0])
self.assertEqual(5, stats._user["sponsoring"][0])
self.assertEqual(29, stats._repo["public"][0])
self.assertEqual(31, stats._repo["public"][1])
self.assertEqual(36, stats._repo["starredBy"][0])
self.assertEqual(36, stats._repo["starredBy"][1])
self.assertEqual(28, stats._repo["forkedBy"][0])
self.assertEqual(28, stats._repo["forkedBy"][1])
self.assertEqual(3, stats._repo["watchedBy"][0])
self.assertEqual(3, stats._repo["watchedBy"][1])
self.assertEqual(2, stats._repo["archived"][0])
self.assertEqual(2, stats._repo["archived"][1])
self.assertEqual(1, stats._repo["templates"][0])
self.assertEqual(1, stats._repo["templates"][1])
self.assertEqual(3602, stats._contrib["commits"][0])
self.assertEqual(4402, stats._contrib["commits"][1])
self.assertEqual(79, stats._contrib["issues"][0])
self.assertEqual(81, stats._contrib["issues"][1])
self.assertEqual(289, stats._contrib["prs"][0])
self.assertEqual(289, stats._contrib["prs"][1])
self.assertEqual(315, stats._contrib["reviews"][0])
self.assertEqual(315, stats._contrib["reviews"][1])
self.assertEqual(3, stats._contrib["contribTo"][0])
self.assertEqual(8, stats._contrib["contribTo"][1])
self.assertEqual(105, stats._contrib["private"][0])
self.assertEqual(105, stats._contrib["private"][1])
if skip :
self._validateLanguagesSkip(stats)
else :
self._validateLanguages(stats)
def _validateAllForks(self, stats) :
self.assertTrue("mostStarred" not in stats._user)
self.assertTrue("mostForked" not in stats._user)
self.assertEqual(2011, stats._user["joined"][0])
self.assertEqual(9, stats._user["followers"][0])
self.assertEqual(7, stats._user["following"][0])
self.assertEqual(7, stats._user["sponsors"][0])
self.assertEqual(5, stats._user["sponsoring"][0])
self.assertEqual(0, stats._repo["public"][0])
self.assertEqual(31, stats._repo["public"][1])
self.assertEqual(0, stats._repo["starredBy"][0])
self.assertEqual(36, stats._repo["starredBy"][1])
self.assertEqual(0, stats._repo["forkedBy"][0])
self.assertEqual(28, stats._repo["forkedBy"][1])
self.assertEqual(0, stats._repo["watchedBy"][0])
self.assertEqual(3, stats._repo["watchedBy"][1])
self.assertEqual(0, stats._repo["archived"][0])
self.assertEqual(2, stats._repo["archived"][1])
self.assertEqual(0, stats._repo["templates"][0])
self.assertEqual(1, stats._repo["templates"][1])
self.assertEqual(3602, stats._contrib["commits"][0])
self.assertEqual(4402, stats._contrib["commits"][1])
self.assertEqual(79, stats._contrib["issues"][0])
self.assertEqual(81, stats._contrib["issues"][1])
self.assertEqual(289, stats._contrib["prs"][0])
self.assertEqual(289, stats._contrib["prs"][1])
self.assertEqual(315, stats._contrib["reviews"][0])
self.assertEqual(315, stats._contrib["reviews"][1])
self.assertEqual(3, stats._contrib["contribTo"][0])
self.assertEqual(8, stats._contrib["contribTo"][1])
self.assertEqual(105, stats._contrib["private"][0])
self.assertEqual(105, stats._contrib["private"][1])
self.assertEqual(0, stats._languages["totalSize"])
self.assertEqual(0, len(stats._languages["languages"]))
def _validateLanguages(self, stats) :
total = 5222379
self.assertEqual(total, stats._languages["totalSize"])
self.assertEqual(11, len(stats._languages["languages"]))
expectedLanguages = [
"Java",
"HTML",
"Python",
"TeX",
"Dockerfile",
"Makefile",
"Shell",
"GraphQL",
"CSS",
"JavaScript",
"Batchfile"
]
expectedColors = [
'#b07219',
'#e34c26',
'#3572A5',
'#3D6117',
'#384d54',
'#427819',
'#89e051',
'#e10098',
'#563d7c',
'#f1e05a',
'#C1F12E'
]
expectedSize = [3385976, 1343301, 274535, 202824, 5448, 4442, 1926, 1902, 1721, 204, 100]
for i, L in enumerate(stats._languages["languages"]) :
self.assertEqual(expectedLanguages[i], L[0])
self.assertEqual(expectedColors[i], L[1]["color"])
self.assertEqual(expectedSize[i], L[1]["size"])
self.assertAlmostEqual(expectedSize[i]/total, L[1]["percentage"], places=5)
def _validateLanguagesSkip(self, stats) :
total = 5147159
self.assertEqual(total, stats._languages["totalSize"])
self.assertEqual(10, len(stats._languages["languages"]))
expectedLanguages = [
"Java",
"HTML",
"TeX",
"Python",
"Dockerfile",
"Makefile",
"Shell",
"CSS",
"JavaScript",
"Batchfile"
]
expectedColors = [
'#b07219',
'#e34c26',
'#3D6117',
'#3572A5',
'#384d54',
'#427819',
'#89e051',
'#563d7c',
'#f1e05a',
'#C1F12E'
]
expectedSize = [3385976, 1343301, 202824, 201574, 5091, 4442, 1926, 1721, 204, 100]
for i, L in enumerate(stats._languages["languages"]) :
self.assertEqual(expectedLanguages[i], L[0])
self.assertEqual(expectedColors[i], L[1]["color"])
self.assertEqual(expectedSize[i], L[1]["size"])
self.assertAlmostEqual(expectedSize[i]/total, L[1]["percentage"], places=5)
def _changeToAllForks(self, queryResults) :
for repo in queryResults[1][0]["data"]["user"]["repositories"]["nodes"] :
repo["isFork"] = True
for repo in queryResults[4][0]["data"]["user"]["topRepositories"]["nodes"] :
repo["isFork"] = True
for repo in queryResults[2][0]["data"]["user"]["watching"]["nodes"] :
repo["isFork"] = True
| 107.993644 | 10,867 | 0.616071 | 4,864 | 50,973 | 6.422903 | 0.125617 | 0.04014 | 0.054288 | 0.055696 | 0.834448 | 0.820428 | 0.794341 | 0.774719 | 0.765308 | 0.765308 | 0 | 0.060587 | 0.169129 | 50,973 | 471 | 10,868 | 108.22293 | 0.677064 | 0.043435 | 0 | 0.541451 | 0 | 0.007772 | 0.364715 | 0.053313 | 0 | 0 | 0 | 0 | 0.277202 | 1 | 0.069948 | false | 0 | 0.025907 | 0 | 0.11399 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
02e5451fe9e7516154c2706c381cfb2f4f002ffd | 1,391 | py | Python | testfile/test_utils.py | lhliew/flood-warning | 234bb3f7ec7174fc91963d8b7e64df1893694e1b | [
"MIT"
] | null | null | null | testfile/test_utils.py | lhliew/flood-warning | 234bb3f7ec7174fc91963d8b7e64df1893694e1b | [
"MIT"
] | null | null | null | testfile/test_utils.py | lhliew/flood-warning | 234bb3f7ec7174fc91963d8b7e64df1893694e1b | [
"MIT"
] | null | null | null | """Unit test for the utils module"""
import pytest
import floodsystem.utils
def test_sort():
"""Test sort container by specific index"""
a = (10, 3, 3)
b = (5, 1, -1)
c = (1, -3, 4)
list0 = (a, b, c)
# Test sort on 1st entry
list1 = floodsystem.utils.sorted_by_key(list0, 0)
assert list1[0] == c
assert list1[1] == b
assert list1[2] == a
# Test sort on 2nd entry
list1 = floodsystem.utils.sorted_by_key(list0, 1)
assert list1[0] == c
assert list1[1] == b
assert list1[2] == a
# Test sort on 3rd entry
list1 = floodsystem.utils.sorted_by_key(list0, 2)
assert list1[0] == b
assert list1[1] == a
assert list1[2] == c
def test_reverse_sort():
"""Test sort container by specific index (reverse)"""
a = (10, 3, 3)
b = (5, 1, -1)
c = (1, -3, 4)
list0 = (a, b, c)
# Test sort on 1st entry
list1 = floodsystem.utils.sorted_by_key(list0, 0, reverse=True)
assert list1[0] == a
assert list1[1] == b
assert list1[2] == c
# Test sort on 2nd entry
list1 = floodsystem.utils.sorted_by_key(list0, 1, reverse=True)
assert list1[0] == a
assert list1[1] == b
assert list1[2] == c
# Test sort on 3rd entry
list1 = floodsystem.utils.sorted_by_key(list0, 2, reverse=True)
assert list1[0] == c
assert list1[1] == a
assert list1[2] == b
| 23.576271 | 67 | 0.588785 | 219 | 1,391 | 3.671233 | 0.16895 | 0.246269 | 0.074627 | 0.19403 | 0.873134 | 0.859453 | 0.859453 | 0.691542 | 0.691542 | 0.691542 | 0 | 0.081188 | 0.273904 | 1,391 | 58 | 68 | 23.982759 | 0.714851 | 0.183321 | 0 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.5 | 1 | 0.055556 | false | 0 | 0.055556 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 1 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b82ced24d65ac18cfa708bf5acd64ec376630f28 | 7,479 | py | Python | template_creator/tests/test_lambda_writer.py | VanOvermeire/sam-template-creator | 0b39440c9051ccd30fc80bfa2e4d7da40c7e50b7 | [
"MIT"
] | 3 | 2019-06-10T19:46:23.000Z | 2021-05-06T12:15:45.000Z | template_creator/tests/test_lambda_writer.py | VanOvermeire/sam-template-creator | 0b39440c9051ccd30fc80bfa2e4d7da40c7e50b7 | [
"MIT"
] | 2 | 2019-10-20T14:57:50.000Z | 2020-01-01T00:52:32.000Z | template_creator/tests/test_lambda_writer.py | VanOvermeire/sam-template-creator | 0b39440c9051ccd30fc80bfa2e4d7da40c7e50b7 | [
"MIT"
] | 2 | 2019-10-19T07:40:53.000Z | 2019-10-19T08:29:40.000Z | import unittest
from template_creator.writer import lambda_writer
class TestLambdaWriter(unittest.TestCase):
def test_find_existing_env_variables(self):
first_resource = {'Type': 'AWS::Serverless::Function', 'Properties': {'Environment': {'Variables': {'BUCKET_NAME': 'my-own-bucket-name'}}}}
second_resource = {'Type': 'AWS::Serverless::Function', 'Properties': {}}
third_resource = {'Type': 'AWS::Role'}
fourth_resource = {'Type': 'AWS::Serverless::Function', 'Properties': {'Environment': {'Variables': {'BUCKET_NAME': 'my-own-bucket-name'}}}}
fifth_resource = {'Type': 'AWS::Serverless::Function', 'Properties': {'Environment': {'Variables': {'SNS_TOPIC': 'sns-topic-name'}}}}
template_dict = {'Resources': {'First': first_resource, 'Second': second_resource, 'Third': third_resource, 'Fourth': fourth_resource, 'Fifth': fifth_resource}}
result = lambda_writer.find_existing_env_var_values(template_dict)
self.assertDictEqual(result, {'BUCKET_NAME': 'my-own-bucket-name', 'SNS_TOPIC': 'sns-topic-name'})
def test_create_role_adds_correct_permissions(self):
name, role = lambda_writer.create_role('HelloWorld', ['dynamodb:*', 's3:*'])
self.assertEqual(name, 'HelloWorldRole')
self.assertEqual(role['Type'], 'AWS::IAM::Role')
self.assertEqual(role['Properties']['Policies'][0]['PolicyDocument']['Statement'][0]['Action'], ['logs:CreateLogStream', 'logs:CreateLogGroup', 'logs:PutLogEvents', 'dynamodb:*', 's3:*'])
def test_create_role_name(self):
result = lambda_writer.create_role_name('HelloWorldLambda')
self.assertEqual(result, 'HelloWorldLambdaRole')
def test_create_event_name(self):
result = lambda_writer.create_event_name('HelloLambda', 'DynamoDB')
self.assertEqual(result, 'HelloLambdaDynamoDBEvent')
def test_create_lambda_function(self):
result = lambda_writer.create_lambda_function('HelloLambda', 'file.handler', 'uridir', ['BUCKET'], ['S3'], [], {})
self.assertEqual(result['Type'], 'AWS::Serverless::Function')
properties = result['Properties']
self.assertEqual(properties['CodeUri'], 'uridir')
self.assertEqual(properties['Handler'], 'file.handler')
self.assertEqual(properties['Role']['Fn::GetAtt'], ['HelloLambdaRole', 'Arn'])
self.assertEqual(properties['Environment']['Variables']['BUCKET'], 'Fill in value or delete if not needed')
self.assertEqual(properties['Events']['HelloLambdaS3Event']['Type'], 'S3')
self.assertEqual(properties['Events']['HelloLambdaS3Event']['Properties']['Bucket']['Ref'], 'S3EventBucket')
self.assertEqual(properties['Events']['HelloLambdaS3Event']['Properties']['Events'], 's3:ObjectCreated:*')
def test_create_lambda_function_existing_value(self):
first_resource = {'Type': 'AWS::Serverless::Function', 'Properties': {'Environment': {'Variables': {'BUCKET': 'my-own-bucket-name'}}}}
existing_template_dict = {'Resources': {'First': first_resource}}
result = lambda_writer.create_lambda_function('HelloLambda', 'file.handler', 'uridir', ['BUCKET'], ['S3'], [], existing_template_dict)
self.assertEqual(result['Type'], 'AWS::Serverless::Function')
properties = result['Properties']
self.assertEqual(properties['CodeUri'], 'uridir')
self.assertEqual(properties['Handler'], 'file.handler')
self.assertEqual(properties['Role']['Fn::GetAtt'], ['HelloLambdaRole', 'Arn'])
self.assertEqual(properties['Environment']['Variables']['BUCKET'], 'my-own-bucket-name')
self.assertEqual(properties['Events']['HelloLambdaS3Event']['Type'], 'S3')
self.assertEqual(properties['Events']['HelloLambdaS3Event']['Properties']['Bucket']['Ref'], 'S3EventBucket')
self.assertEqual(properties['Events']['HelloLambdaS3Event']['Properties']['Events'], 's3:ObjectCreated:*')
def test_create_lambda_function_with_rate_schedule_event(self):
result = lambda_writer.create_lambda_function('HelloLambda', 'file.handler', 'uridir', [], ['Schedule:2 hours'], [], {})
self.assertEqual(result['Type'], 'AWS::Serverless::Function')
properties = result['Properties']
self.assertEqual(properties['CodeUri'], 'uridir')
self.assertEqual(properties['Handler'], 'file.handler')
self.assertEqual(properties['Role']['Fn::GetAtt'], ['HelloLambdaRole', 'Arn'])
self.assertEqual(properties['Events']['HelloLambdaScheduleEvent']['Type'], 'Schedule')
self.assertEqual(properties['Events']['HelloLambdaScheduleEvent']['Properties']['Schedule'], 'rate(2 hours)')
def test_create_lambda_function_no_events(self):
result = lambda_writer.create_lambda_function('HelloLambda', 'file.handler', 'uridir', ['BUCKET'], [], [], {})
self.assertEqual(result['Type'], 'AWS::Serverless::Function')
properties = result['Properties']
self.assertEqual(properties['CodeUri'], 'uridir')
self.assertEqual(properties['Handler'], 'file.handler')
self.assertEqual(properties['Role']['Fn::GetAtt'], ['HelloLambdaRole', 'Arn'])
self.assertEqual(properties['Environment']['Variables']['BUCKET'], 'Fill in value or delete if not needed')
self.assertFalse('Events' in properties)
def test_create_lambda_function_no_variables(self):
result = lambda_writer.create_lambda_function('HelloLambda', 'file.handler', 'uridir', [], ['S3'], [], {})
self.assertEqual(result['Type'], 'AWS::Serverless::Function')
properties = result['Properties']
self.assertEqual(properties['CodeUri'], 'uridir')
self.assertEqual(properties['Handler'], 'file.handler')
self.assertEqual(properties['Role']['Fn::GetAtt'], ['HelloLambdaRole', 'Arn'])
self.assertEqual(properties['Events']['HelloLambdaS3Event']['Type'], 'S3')
self.assertEqual(properties['Events']['HelloLambdaS3Event']['Properties']['Bucket']['Ref'], 'S3EventBucket')
self.assertEqual(properties['Events']['HelloLambdaS3Event']['Properties']['Events'], 's3:ObjectCreated:*')
self.assertFalse('Environment' in properties)
def test_create_lambda_function_no_variables_or_events(self):
result = lambda_writer.create_lambda_function('HelloLambda', 'file.handler', 'uridir', [], [], [], {})
self.assertEqual(result['Type'], 'AWS::Serverless::Function')
properties = result['Properties']
self.assertEqual(properties['CodeUri'], 'uridir')
self.assertEqual(properties['Handler'], 'file.handler')
self.assertEqual(properties['Role']['Fn::GetAtt'], ['HelloLambdaRole', 'Arn'])
self.assertFalse('Events' in properties)
self.assertFalse('Environment' in properties)
def test_create_lambda_function_api(self):
result = lambda_writer.create_lambda_function('HelloLambda', 'file.handler', 'uridir', [], [], ['get', '/hello/world'], {})
self.assertEqual(result['Type'], 'AWS::Serverless::Function')
properties = result['Properties']
self.assertEqual(properties['CodeUri'], 'uridir')
self.assertEqual(properties['Handler'], 'file.handler')
self.assertEqual(properties['Events']['GET']['Type'], 'Api')
self.assertEqual(properties['Events']['GET']['Properties']['Path'], '/hello/world')
self.assertEqual(properties['Events']['GET']['Properties']['Method'], 'get')
| 54.195652 | 195 | 0.673218 | 731 | 7,479 | 6.737346 | 0.138167 | 0.149239 | 0.187817 | 0.088122 | 0.807513 | 0.766701 | 0.700305 | 0.700305 | 0.681421 | 0.669036 | 0 | 0.004208 | 0.142131 | 7,479 | 137 | 196 | 54.591241 | 0.763404 | 0 | 0 | 0.521277 | 0 | 0 | 0.335919 | 0.049746 | 0 | 0 | 0 | 0 | 0.574468 | 1 | 0.117021 | false | 0 | 0.021277 | 0 | 0.148936 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b838b38c313a54feab30d196c1413356309649cf | 143 | py | Python | melp/__init__.py | estein05/melp-1 | a35041e32874f50f6b6131dfcf0803ac014d24fa | [
"MIT"
] | null | null | null | melp/__init__.py | estein05/melp-1 | a35041e32874f50f6b6131dfcf0803ac014d24fa | [
"MIT"
] | null | null | null | melp/__init__.py | estein05/melp-1 | a35041e32874f50f6b6131dfcf0803ac014d24fa | [
"MIT"
] | null | null | null | from melp.tilehitangle import *
from melp.tilehitrate import *
from melp.multithreading import *
# libs
from melp.libs.helices import *
| 20.428571 | 34 | 0.755245 | 18 | 143 | 6 | 0.444444 | 0.296296 | 0.259259 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.174825 | 143 | 6 | 35 | 23.833333 | 0.915254 | 0.027972 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
b88b9133b4679d1c6293d5d8ae5ff73df8d4686d | 130 | py | Python | endktheme/style.py | SimonLarsen/python-endktheme | 2c942a682eb258f1d8b5fee80b93b9565a7ebde7 | [
"MIT"
] | null | null | null | endktheme/style.py | SimonLarsen/python-endktheme | 2c942a682eb258f1d8b5fee80b93b9565a7ebde7 | [
"MIT"
] | null | null | null | endktheme/style.py | SimonLarsen/python-endktheme | 2c942a682eb258f1d8b5fee80b93b9565a7ebde7 | [
"MIT"
] | null | null | null | """General styling definitions."""
from typing import List
def font_family() -> List[str]:
return ["Calibri", "sans-serif"]
| 18.571429 | 36 | 0.676923 | 16 | 130 | 5.4375 | 0.9375 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.153846 | 130 | 6 | 37 | 21.666667 | 0.790909 | 0.215385 | 0 | 0 | 0 | 0 | 0.177083 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
b89f971d466b683d7f115b026fb88ea493cb6201 | 137 | py | Python | Codes/optimization/src/simulation/__init__.py | dychen24/magx | 3d72cfa447bcab050e97ee517b1688ef99dd480d | [
"MIT"
] | 7 | 2021-11-16T06:01:41.000Z | 2022-03-30T21:09:14.000Z | Codes/optimization/src/simulation/__init__.py | dychen24/magx | 3d72cfa447bcab050e97ee517b1688ef99dd480d | [
"MIT"
] | null | null | null | Codes/optimization/src/simulation/__init__.py | dychen24/magx | 3d72cfa447bcab050e97ee517b1688ef99dd480d | [
"MIT"
] | 2 | 2021-08-18T09:21:35.000Z | 2021-08-19T03:43:33.000Z | from .generate_fake_data import Simu_Test, Simu_Data, expression, simulate_2mag_3type, simulate_2mag_3type_test, simulate_2mag_3type_box
| 68.5 | 136 | 0.890511 | 21 | 137 | 5.238095 | 0.571429 | 0.327273 | 0.463636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.046875 | 0.065693 | 137 | 1 | 137 | 137 | 0.8125 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 9 |
b284f8cabd89367d44b9eea14d15efce8aed25c8 | 5,231 | py | Python | DFL.py | Ien001/non-random-initialization-for-DFL-CNN | f5982a2f3679970de122ec9d5a2389e11385d0eb | [
"MIT"
] | 17 | 2019-01-15T08:04:43.000Z | 2021-09-03T22:55:32.000Z | DFL.py | Ien001/non-random-initialization-for-DFL-CNN | f5982a2f3679970de122ec9d5a2389e11385d0eb | [
"MIT"
] | 8 | 2019-01-17T15:12:02.000Z | 2020-10-28T07:22:36.000Z | DFL.py | Ien001/non-random-initialization-for-DFL-CNN | f5982a2f3679970de122ec9d5a2389e11385d0eb | [
"MIT"
] | 4 | 2019-01-21T06:58:40.000Z | 2019-07-31T07:42:22.000Z | import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision
class DFL_VGG16(nn.Module):
def __init__(self, k = 10, nclass = 200):
super(DFL_VGG16, self).__init__()
self.k = k
self.nclass = nclass
# k channels for one class, nclass is total classes, therefore k * nclass for conv6
vgg16featuremap = torchvision.models.vgg16_bn(pretrained=True).features
conv1_conv4 = torch.nn.Sequential(*list(vgg16featuremap.children())[:-11])
conv5 = torch.nn.Sequential(*list(vgg16featuremap.children())[-11:])
conv6 = torch.nn.Conv2d(512, k * nclass, kernel_size = 1, stride = 1, padding = 0)
pool6 = torch.nn.MaxPool2d((56, 56), stride = (56, 56), return_indices = True)
# Feature extraction root
self.conv1_conv4 = conv1_conv4
# G-Stream
self.conv5 = conv5
self.cls5 = nn.Sequential(
nn.Conv2d(512, 200, kernel_size=1, stride = 1, padding = 0),
nn.BatchNorm2d(200),
nn.ReLU(True),
nn.AdaptiveAvgPool2d((1,1)),
)
# P-Stream
self.conv6 = conv6
self.pool6 = pool6
self.cls6 = nn.Sequential(
nn.Conv2d(k * nclass, nclass, kernel_size = 1, stride = 1, padding = 0),
nn.AdaptiveAvgPool2d((1,1)),
)
# Side-branch
self.cross_channel_pool = nn.AvgPool1d(kernel_size = k, stride = k, padding = 0)
def forward(self, x):
batchsize = x.size(0)
# Stem: Feature extractionc
inter4 = self.conv1_conv4(x)
#print(inter4.shape)
# G-stream
x_g = self.conv5(inter4)
out1 = self.cls5(x_g)
out1 = out1.view(batchsize, -1)
# P-stream ,indices is for visualization
x_p = self.conv6(inter4)
x_p, indices = self.pool6(x_p)
inter6 = x_p
out2 = self.cls6(x_p)
out2 = out2.view(batchsize, -1)
# Side-branch
inter6 = inter6.view(batchsize, -1, self.k * self.nclass)
out3 = self.cross_channel_pool(inter6)
out3 = out3.view(batchsize, -1)
return out1, out2, out3, indices
class DFL_ResNet_for_sample(nn.Module):
def __init__(self, k = 10, nclass = 200):
super(DFL_ResNet_for_sample, self).__init__()
self.k = k
self.nclass = nclass
# k channels for one class, nclass is total classes, therefore k * nclass for conv6
resnet50 = torchvision.models.resnet50(pretrained=True)
# conv1_conv4
layers_conv1_conv4 = [
resnet50.conv1,
resnet50.bn1,
resnet50.relu,
resnet50.maxpool,
]
for i in range(3):
name = 'layer%d' % (i + 1)
layers_conv1_conv4.append(getattr(resnet50, name))
conv1_conv4 = torch.nn.Sequential(*layers_conv1_conv4)
self.conv1_conv4 = conv1_conv4
def forward(self, x):
batchsize = x.size(0)
# Stem: Feature extraction
inter4 = self.conv1_conv4(x)
center = torch.norm(inter4.norm(2,0),2,0).mean()
return center
class DFL_ResNet(nn.Module):
def __init__(self, k = 10, nclass = 200):
super(DFL_ResNet, self).__init__()
self.k = k
self.nclass = nclass
# k channels for one class, nclass is total classes, therefore k * nclass for conv6
resnet50 = torchvision.models.resnet50(pretrained=True)
# conv1_conv4
layers_conv1_conv4 = [
resnet50.conv1,
resnet50.bn1,
resnet50.relu,
resnet50.maxpool,
]
for i in range(3):
name = 'layer%d' % (i + 1)
layers_conv1_conv4.append(getattr(resnet50, name))
conv1_conv4 = torch.nn.Sequential(*layers_conv1_conv4)
# conv5
layers_conv5 = []
layers_conv5.append(getattr(resnet50, 'layer4'))
conv5 = torch.nn.Sequential(*layers_conv5)
conv6 = torch.nn.Conv2d(1024, k * nclass, kernel_size = 1, stride = 1, padding = 0)
pool6 = torch.nn.MaxPool2d((28, 28), stride = (28, 28), return_indices = True)
# Feature extraction root
self.conv1_conv4 = conv1_conv4
# G-Stream
self.conv5 = conv5
self.cls5 = nn.Sequential(
nn.Conv2d(2048, 200, kernel_size=1, stride = 1, padding = 0),
nn.BatchNorm2d(200),
nn.ReLU(True),
nn.AdaptiveAvgPool2d((1,1)),
)
# P-Stream
self.conv6 = conv6
self.pool6 = pool6
self.cls6 = nn.Sequential(
nn.Conv2d(k * nclass, nclass, kernel_size = 1, stride = 1, padding = 0),
nn.AdaptiveAvgPool2d((1,1)),
)
# Side-branch
self.cross_channel_pool = nn.AvgPool1d(kernel_size = k, stride = k, padding = 0)
def forward(self, x):
batchsize = x.size(0)
# Stem: Feature extraction
inter4 = self.conv1_conv4(x)
#print('inter4',inter4.shape)
# G-stream
#print('inter4',inter4.shape)
x_g = self.conv5(inter4)
out1 = self.cls5(x_g)
out1 = out1.view(batchsize, -1)
#print('out1',out1.shape)
# P-stream ,indices is for visualization
x_p = self.conv6(inter4)
#print('conv6',x_p.shape)
x_p, indices = self.pool6(x_p)
#print(x_p.shape)
inter6 = x_p
out2 = self.cls6(x_p)
out2 = out2.view(batchsize, -1)
#print('out2',out2.shape)
# Side-branch
inter6 = inter6.view(batchsize, -1, self.k * self.nclass)
out3 = self.cross_channel_pool(inter6)
out3 = out3.view(batchsize, -1)
#print('out3',out3.shape)
return out1, out2, out3, indices
if __name__ == '__main__':
input_test = torch.ones(1,10,3,448,448)
net = DFL_ResNet()
output_test = net(input_test)
print(output_test)
| 27.103627 | 86 | 0.656471 | 742 | 5,231 | 4.478437 | 0.154987 | 0.060187 | 0.033704 | 0.030695 | 0.806199 | 0.780921 | 0.780921 | 0.728258 | 0.728258 | 0.728258 | 0 | 0.07571 | 0.212197 | 5,231 | 192 | 87 | 27.244792 | 0.730648 | 0.146435 | 0 | 0.713115 | 0 | 0 | 0.006607 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.04918 | false | 0 | 0.032787 | 0 | 0.131148 | 0.008197 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b2999e0b2557c2305321af6dbcf3383637f04209 | 9,264 | py | Python | clients/kratos/python/test/test_settings_via_api_response.py | UkonnRa/sdk | 23ab5408a89cdf6ba7a6d8944f8d1b1cdc68aa4c | [
"Apache-2.0"
] | null | null | null | clients/kratos/python/test/test_settings_via_api_response.py | UkonnRa/sdk | 23ab5408a89cdf6ba7a6d8944f8d1b1cdc68aa4c | [
"Apache-2.0"
] | null | null | null | clients/kratos/python/test/test_settings_via_api_response.py | UkonnRa/sdk | 23ab5408a89cdf6ba7a6d8944f8d1b1cdc68aa4c | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Ory Kratos
Welcome to the ORY Kratos HTTP API documentation! # noqa: E501
The version of the OpenAPI document: v0.5.0-alpha.1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import ory_kratos_client
from ory_kratos_client.models.settings_via_api_response import SettingsViaApiResponse # noqa: E501
from ory_kratos_client.rest import ApiException
class TestSettingsViaApiResponse(unittest.TestCase):
"""SettingsViaApiResponse unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test SettingsViaApiResponse
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = ory_kratos_client.models.settings_via_api_response.SettingsViaApiResponse() # noqa: E501
if include_optional :
return SettingsViaApiResponse(
flow = ory_kratos_client.models.flow_represents_a_settings_flow.Flow represents a Settings Flow(
active = '0',
expires_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
id = '0',
identity = ory_kratos_client.models.identity.Identity(
id = '0',
recovery_addresses = [
ory_kratos_client.models.recovery_address.RecoveryAddress(
id = '0',
value = '0',
via = '0', )
],
schema_id = '0',
schema_url = '0',
traits = ory_kratos_client.models.traits.traits(),
verifiable_addresses = [
ory_kratos_client.models.verifiable_address.VerifiableAddress(
id = '0',
status = '0',
value = '0',
verified = True,
verified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
via = '0', )
], ),
issued_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
messages = [
ory_kratos_client.models.message.Message(
context = ory_kratos_client.models.context.context(),
id = 56,
text = '0',
type = '0', )
],
methods = {
'key' : ory_kratos_client.models.settings_flow_method.settingsFlowMethod(
config = ory_kratos_client.models.settings_flow_method_config.settingsFlowMethodConfig(
action = '0',
fields = [
ory_kratos_client.models.form_field.formField(
disabled = True,
name = '0',
pattern = '0',
required = True,
type = '0',
value = ory_kratos_client.models.value.value(), )
],
method = '0', ),
method = '0', )
},
request_url = '0',
state = '0',
type = '0', ),
identity = ory_kratos_client.models.identity.Identity(
id = '0',
recovery_addresses = [
ory_kratos_client.models.recovery_address.RecoveryAddress(
id = '0',
value = '0',
via = '0', )
],
schema_id = '0',
schema_url = '0',
traits = ory_kratos_client.models.traits.traits(),
verifiable_addresses = [
ory_kratos_client.models.verifiable_address.VerifiableAddress(
id = '0',
status = '0',
value = '0',
verified = True,
verified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
via = '0', )
], )
)
else :
return SettingsViaApiResponse(
flow = ory_kratos_client.models.flow_represents_a_settings_flow.Flow represents a Settings Flow(
active = '0',
expires_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
id = '0',
identity = ory_kratos_client.models.identity.Identity(
id = '0',
recovery_addresses = [
ory_kratos_client.models.recovery_address.RecoveryAddress(
id = '0',
value = '0',
via = '0', )
],
schema_id = '0',
schema_url = '0',
traits = ory_kratos_client.models.traits.traits(),
verifiable_addresses = [
ory_kratos_client.models.verifiable_address.VerifiableAddress(
id = '0',
status = '0',
value = '0',
verified = True,
verified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
via = '0', )
], ),
issued_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
messages = [
ory_kratos_client.models.message.Message(
context = ory_kratos_client.models.context.context(),
id = 56,
text = '0',
type = '0', )
],
methods = {
'key' : ory_kratos_client.models.settings_flow_method.settingsFlowMethod(
config = ory_kratos_client.models.settings_flow_method_config.settingsFlowMethodConfig(
action = '0',
fields = [
ory_kratos_client.models.form_field.formField(
disabled = True,
name = '0',
pattern = '0',
required = True,
type = '0',
value = ory_kratos_client.models.value.value(), )
],
method = '0', ),
method = '0', )
},
request_url = '0',
state = '0',
type = '0', ),
identity = ory_kratos_client.models.identity.Identity(
id = '0',
recovery_addresses = [
ory_kratos_client.models.recovery_address.RecoveryAddress(
id = '0',
value = '0',
via = '0', )
],
schema_id = '0',
schema_url = '0',
traits = ory_kratos_client.models.traits.traits(),
verifiable_addresses = [
ory_kratos_client.models.verifiable_address.VerifiableAddress(
id = '0',
status = '0',
value = '0',
verified = True,
verified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
via = '0', )
], ),
)
def testSettingsViaApiResponse(self):
"""Test SettingsViaApiResponse"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| 47.752577 | 124 | 0.403389 | 729 | 9,264 | 4.914952 | 0.18107 | 0.090427 | 0.142339 | 0.187552 | 0.786492 | 0.769188 | 0.769188 | 0.769188 | 0.745186 | 0.745186 | 0 | 0.046228 | 0.504965 | 9,264 | 193 | 125 | 48 | 0.735063 | 0.013061 | 0 | 0.866667 | 1 | 0 | 0.048026 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.012121 | 0.036364 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
a245684988d7dcd92763dcd1acedc4ae76b7f1c7 | 180 | py | Python | test/test_randomforest.py | USGS-EROS/lcmap-gen | 1be50eb316f7d737d6bbd000bd6a8b5006730928 | [
"Unlicense"
] | 6 | 2018-07-09T00:33:52.000Z | 2019-11-14T16:36:39.000Z | test/test_randomforest.py | USGS-EROS/lcmap-gen | 1be50eb316f7d737d6bbd000bd6a8b5006730928 | [
"Unlicense"
] | 1 | 2018-06-11T19:35:07.000Z | 2018-06-11T19:35:07.000Z | test/test_randomforest.py | USGS-EROS/lcmap-gen | 1be50eb316f7d737d6bbd000bd6a8b5006730928 | [
"Unlicense"
] | 2 | 2018-06-11T17:59:03.000Z | 2018-07-09T00:33:54.000Z | from ccdc import randomforest
def test_pipeline():
assert True
def test_train():
assert True
def test_classify():
assert True
def test_dedensify():
assert True
| 12.857143 | 29 | 0.711111 | 24 | 180 | 5.166667 | 0.5 | 0.225806 | 0.314516 | 0.41129 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.222222 | 180 | 13 | 30 | 13.846154 | 0.885714 | 0 | 0 | 0.444444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.444444 | 1 | 0.444444 | true | 0 | 0.111111 | 0 | 0.555556 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
a2ef02b934e5b4f1a1308f07185ce669fbfdf625 | 19,746 | py | Python | utils/data.py | J-L-O/UNO | 6fc01d6601ab54e1c82367f2b3d4db2f5fbd911f | [
"MIT"
] | null | null | null | utils/data.py | J-L-O/UNO | 6fc01d6601ab54e1c82367f2b3d4db2f5fbd911f | [
"MIT"
] | null | null | null | utils/data.py | J-L-O/UNO | 6fc01d6601ab54e1c82367f2b3d4db2f5fbd911f | [
"MIT"
] | null | null | null | import torch
import torchvision
import pytorch_lightning as pl
from utils.datasets import ImageNetSubset
from utils.transforms import get_transforms
import numpy as np
import os
def get_datamodule(args, mode):
if mode == "pretrain":
if args.dataset == "ImageNet":
return PretrainImageNetDataModule(args)
else:
return PretrainCIFARDataModule(args)
elif mode == "discover":
if args.dataset == "ImageNet":
return DiscoverImageNetDataModule(args)
else:
return DiscoverCIFARDataModule(args)
elif mode == "evaluate":
if args.dataset == "ImageNet":
return EvaluateImageNetDataModule(args)
else:
return EvaluateCIFARDataModule(args)
class PretrainCIFARDataModule(pl.LightningDataModule):
def __init__(self, args):
super().__init__()
self.data_dir = args.data_dir
self.download = args.download
self.batch_size = args.batch_size
self.num_workers = args.num_workers
self.num_labeled_classes = args.num_labeled_classes
self.num_unlabeled_classes = args.num_unlabeled_classes
self.dataset_class = getattr(torchvision.datasets, args.dataset)
self.transform_train = get_transforms("unsupervised", args.dataset)
self.transform_val = get_transforms("eval", args.dataset)
def prepare_data(self):
self.dataset_class(self.data_dir, train=True, download=self.download)
self.dataset_class(self.data_dir, train=False, download=self.download)
def setup(self, stage=None):
labeled_classes = range(self.num_labeled_classes)
# train dataset
self.train_dataset = self.dataset_class(
self.data_dir, train=True, transform=self.transform_train
)
train_indices_lab = np.where(
np.isin(np.array(self.train_dataset.targets), labeled_classes)
)[0]
self.train_dataset = torch.utils.data.Subset(self.train_dataset, train_indices_lab)
# val datasets
self.val_dataset = self.dataset_class(
self.data_dir, train=False, transform=self.transform_val
)
val_indices_lab = np.where(np.isin(np.array(self.val_dataset.targets), labeled_classes))[0]
self.val_dataset = torch.utils.data.Subset(self.val_dataset, val_indices_lab)
def train_dataloader(self):
use_persistent_workers = self.num_workers > 0
return torch.utils.data.DataLoader(
self.train_dataset,
batch_size=self.batch_size,
shuffle=True,
num_workers=self.num_workers,
pin_memory=not use_persistent_workers,
drop_last=True,
persistent_workers=use_persistent_workers
)
def val_dataloader(self):
use_persistent_workers = self.num_workers > 0
return torch.utils.data.DataLoader(
self.val_dataset,
batch_size=self.batch_size,
shuffle=False,
num_workers=self.num_workers,
pin_memory=not use_persistent_workers,
drop_last=False,
persistent_workers=use_persistent_workers
)
class DiscoverCIFARDataModule(pl.LightningDataModule):
def __init__(self, args):
super().__init__()
self.data_dir = args.data_dir
self.download = args.download
self.batch_size = args.batch_size
self.num_workers = args.num_workers
self.num_labeled_classes = args.num_labeled_classes
self.num_unlabeled_classes = args.num_unlabeled_classes
self.dataset_class = getattr(torchvision.datasets, args.dataset)
self.transform_train = get_transforms(
"unsupervised",
args.dataset,
multicrop=args.multicrop,
num_large_crops=args.num_large_crops,
num_small_crops=args.num_small_crops,
)
self.transform_val = get_transforms("eval", args.dataset)
def prepare_data(self):
self.dataset_class(self.data_dir, train=True, download=self.download)
self.dataset_class(self.data_dir, train=False, download=self.download)
def setup(self, stage=None):
labeled_classes = range(self.num_labeled_classes)
unlabeled_classes = range(
self.num_labeled_classes, self.num_labeled_classes + self.num_unlabeled_classes
)
# train dataset
self.train_dataset = self.dataset_class(
self.data_dir, train=True, transform=self.transform_train
)
# val datasets
val_dataset_train = self.dataset_class(
self.data_dir, train=True, transform=self.transform_val
)
val_dataset_test = self.dataset_class(
self.data_dir, train=False, transform=self.transform_val
)
# unlabeled classes, train set
val_indices_unlab_train = np.where(
np.isin(np.array(val_dataset_train.targets), unlabeled_classes)
)[0]
val_subset_unlab_train = torch.utils.data.Subset(val_dataset_train, val_indices_unlab_train)
# unlabeled classes, test set
val_indices_unlab_test = np.where(
np.isin(np.array(val_dataset_test.targets), unlabeled_classes)
)[0]
val_subset_unlab_test = torch.utils.data.Subset(val_dataset_test, val_indices_unlab_test)
# labeled classes, test set
val_indices_lab_test = np.where(
np.isin(np.array(val_dataset_test.targets), labeled_classes)
)[0]
val_subset_lab_test = torch.utils.data.Subset(val_dataset_test, val_indices_lab_test)
self.val_datasets = [val_subset_unlab_train, val_subset_unlab_test, val_subset_lab_test]
@property
def dataloader_mapping(self):
return {0: "unlab/train", 1: "unlab/test", 2: "lab/test"}
def train_dataloader(self):
use_persistent_workers = self.num_workers > 0
return torch.utils.data.DataLoader(
self.train_dataset,
batch_size=self.batch_size,
shuffle=True,
num_workers=self.num_workers,
pin_memory=not use_persistent_workers,
drop_last=True,
persistent_workers=use_persistent_workers
)
def val_dataloader(self):
use_persistent_workers = self.num_workers > 0
return [
torch.utils.data.DataLoader(
dataset,
batch_size=self.batch_size,
shuffle=False,
num_workers=self.num_workers,
pin_memory=not use_persistent_workers,
drop_last=False,
persistent_workers=use_persistent_workers
)
for dataset in self.val_datasets
]
class EvaluateCIFARDataModule(pl.LightningDataModule):
def __init__(self, args):
super().__init__()
self.data_dir = args.data_dir
self.download = args.download
self.batch_size = args.batch_size
self.num_workers = args.num_workers
self.num_labeled_classes = args.num_labeled_classes
self.num_unlabeled_classes = args.num_unlabeled_classes
self.dataset_class = getattr(torchvision.datasets, args.dataset)
self.transform = get_transforms("eval", args.dataset)
def prepare_data(self):
self.dataset_class(self.data_dir, train=True, download=self.download)
self.dataset_class(self.data_dir, train=False, download=self.download)
def setup(self, stage=None):
labeled_classes = range(self.num_labeled_classes)
unlabeled_classes = range(
self.num_labeled_classes, self.num_labeled_classes + self.num_unlabeled_classes
)
# val datasets
dataset_train = self.dataset_class(
self.data_dir, train=True, transform=self.transform
)
dataset_test = self.dataset_class(
self.data_dir, train=False, transform=self.transform
)
# labeled classes, train set
indices_lab_train = np.where(
np.isin(np.array(dataset_train.targets), labeled_classes)
)[0]
subset_lab_train = torch.utils.data.Subset(dataset_train, indices_lab_train)
# unlabeled classes, train set
indices_unlab_train = np.where(
np.isin(np.array(dataset_train.targets), unlabeled_classes)
)[0]
subset_unlab_train = torch.utils.data.Subset(dataset_train, indices_unlab_train)
self.train_datasets = [subset_lab_train, subset_unlab_train]
# unlabeled classes, test set
indices_unlab_test = np.where(
np.isin(np.array(dataset_test.targets), unlabeled_classes)
)[0]
subset_unlab_test = torch.utils.data.Subset(dataset_test, indices_unlab_test)
# labeled classes, test set
indices_lab_test = np.where(
np.isin(np.array(dataset_test.targets), labeled_classes)
)[0]
subset_lab_test = torch.utils.data.Subset(dataset_test, indices_lab_test)
self.val_datasets = [subset_lab_test, subset_unlab_test]
def train_dataloader(self):
use_persistent_workers = self.num_workers > 0
return [
torch.utils.data.DataLoader(
dataset,
batch_size=self.batch_size,
shuffle=False,
num_workers=self.num_workers,
pin_memory=not use_persistent_workers,
drop_last=False,
persistent_workers=use_persistent_workers
)
for dataset in self.train_datasets
]
def val_dataloader(self):
use_persistent_workers = self.num_workers > 0
return [
torch.utils.data.DataLoader(
dataset,
batch_size=self.batch_size,
shuffle=False,
num_workers=self.num_workers,
pin_memory=not use_persistent_workers,
drop_last=False,
persistent_workers=use_persistent_workers
)
for dataset in self.val_datasets
]
class DiscoverDataset:
def __init__(self, labeled_dataset, unlabeled_dataset):
self.labeled_dataset = labeled_dataset
self.unlabeled_dataset = unlabeled_dataset
def __len__(self):
return max([len(self.labeled_dataset), len(self.unlabeled_dataset)])
def __getitem__(self, index):
labeled_index = index % len(self.labeled_dataset)
labeled_data = self.labeled_dataset[labeled_index]
unlabeled_index = index % len(self.unlabeled_dataset)
unlabeled_data = self.unlabeled_dataset[unlabeled_index]
return (*labeled_data, *unlabeled_data)
class EvaluateImageNetDataModule(pl.LightningDataModule):
def __init__(self, args):
super().__init__()
self.data_dir = args.data_dir
self.batch_size = args.batch_size
self.num_workers = args.num_workers
self.subset = args.imagenet_subset
self.imagenet_split = args.imagenet_split
self.breeds_info = args.breeds_info
self.dataset_class = ImageNetSubset
self.transform = get_transforms("eval", args.dataset)
def prepare_data(self):
pass
def setup(self, stage=None):
train_data_dir = os.path.join(self.data_dir, "train")
val_data_dir = os.path.join(self.data_dir, "val")
# train dataset
labeled_subset_train = self.dataset_class(train_data_dir, transform=self.transform,
subset=self.subset, subset_split=self.imagenet_split,
is_labeled=True, breeds_info=self.breeds_info)
unlabeled_subset_train = self.dataset_class(train_data_dir, transform=self.transform,
subset=self.subset, subset_split=self.imagenet_split,
is_labeled=False, breeds_info=self.breeds_info)
self.train_datasets = [labeled_subset_train, unlabeled_subset_train]
# val datasets
unlabeled_subset_test = self.dataset_class(val_data_dir, transform=self.transform,
subset=self.subset, subset_split=self.imagenet_split,
is_labeled=False, breeds_info=self.breeds_info)
labeled_subset_test = self.dataset_class(val_data_dir, transform=self.transform,
subset=self.subset, subset_split=self.imagenet_split,
is_labeled=True, breeds_info=self.breeds_info)
self.val_datasets = [labeled_subset_test, unlabeled_subset_test]
def train_dataloader(self):
use_persistent_workers = self.num_workers > 0
return [
torch.utils.data.DataLoader(
dataset,
batch_size=self.batch_size,
shuffle=False,
num_workers=self.num_workers,
pin_memory=not use_persistent_workers,
drop_last=False,
persistent_workers=use_persistent_workers
)
for dataset in self.train_datasets
]
def val_dataloader(self):
use_persistent_workers = self.num_workers > 0
return [
torch.utils.data.DataLoader(
dataset,
batch_size=self.batch_size,
shuffle=False,
num_workers=self.num_workers,
pin_memory=not use_persistent_workers,
drop_last=False,
persistent_workers=use_persistent_workers
)
for dataset in self.val_datasets
]
class DiscoverImageNetDataModule(pl.LightningDataModule):
def __init__(self, args):
super().__init__()
self.data_dir = args.data_dir
self.batch_size = args.batch_size
self.num_workers = args.num_workers
self.subset = args.imagenet_subset
self.imagenet_split = args.imagenet_split
self.breeds_info = args.breeds_info
self.dataset_class = ImageNetSubset
self.transform_train = get_transforms(
"unsupervised",
args.dataset,
multicrop=args.multicrop,
num_large_crops=args.num_large_crops,
num_small_crops=args.num_small_crops,
)
self.transform_val = get_transforms("eval", args.dataset)
def prepare_data(self):
pass
def setup(self, stage=None):
train_data_dir = os.path.join(self.data_dir, "train")
val_data_dir = os.path.join(self.data_dir, "val")
# train dataset
labeled_subset = self.dataset_class(train_data_dir, transform=self.transform_train,
subset=self.subset, subset_split=self.imagenet_split,
is_labeled=True, breeds_info=self.breeds_info)
unlabeled_subset = self.dataset_class(train_data_dir, transform=self.transform_train,
subset=self.subset, subset_split=self.imagenet_split,
is_labeled=False, breeds_info=self.breeds_info)
self.train_dataset = DiscoverDataset(labeled_subset, unlabeled_subset)
# val datasets
unlabeled_subset_train = self.dataset_class(train_data_dir, transform=self.transform_val,
subset=self.subset, subset_split=self.imagenet_split,
is_labeled=False, breeds_info=self.breeds_info)
unlabeled_subset_test = self.dataset_class(val_data_dir, transform=self.transform_val,
subset=self.subset, subset_split=self.imagenet_split,
is_labeled=False, breeds_info=self.breeds_info)
labeled_subset_test = self.dataset_class(val_data_dir, transform=self.transform_val,
subset=self.subset, subset_split=self.imagenet_split,
is_labeled=True, breeds_info=self.breeds_info)
self.val_datasets = [unlabeled_subset_train, unlabeled_subset_test, labeled_subset_test]
@property
def dataloader_mapping(self):
return {0: "unlab/train", 1: "unlab/test", 2: "lab/test"}
def train_dataloader(self):
use_persistent_workers = self.num_workers > 0
return torch.utils.data.DataLoader(
self.train_dataset,
batch_size=self.batch_size // 2,
shuffle=True,
num_workers=self.num_workers,
pin_memory=not use_persistent_workers,
drop_last=True,
persistent_workers=use_persistent_workers
)
def val_dataloader(self):
use_persistent_workers = self.num_workers > 0
return [
torch.utils.data.DataLoader(
dataset,
batch_size=self.batch_size,
shuffle=False,
num_workers=self.num_workers,
pin_memory=not use_persistent_workers,
drop_last=False,
persistent_workers=use_persistent_workers
)
for dataset in self.val_datasets
]
class PretrainImageNetDataModule(pl.LightningDataModule):
def __init__(self, args):
super().__init__()
self.data_dir = args.data_dir
self.batch_size = args.batch_size
self.num_workers = args.num_workers
self.subset = args.imagenet_subset
self.imagenet_split = args.imagenet_split
self.breeds_info = args.breeds_info
self.dataset_class = ImageNetSubset
self.transform_train = get_transforms("unsupervised", args.dataset)
self.transform_val = get_transforms("eval", args.dataset)
def prepare_data(self):
pass
def setup(self, stage=None):
train_data_dir = os.path.join(self.data_dir, "train")
val_data_dir = os.path.join(self.data_dir, "val")
# train dataset
labeled_subset = self.dataset_class(train_data_dir, transform=self.transform_train,
subset=self.subset, subset_split=self.imagenet_split,
is_labeled=True, breeds_info=self.breeds_info)
self.train_dataset = labeled_subset
# val datasets
labeled_subset_test = self.dataset_class(val_data_dir, transform=self.transform_val,
subset=self.subset, subset_split=self.imagenet_split,
is_labeled=True, breeds_info=self.breeds_info)
self.val_dataset = labeled_subset_test
def train_dataloader(self):
use_persistent_workers = self.num_workers > 0
return torch.utils.data.DataLoader(
self.train_dataset,
batch_size=self.batch_size,
shuffle=True,
num_workers=self.num_workers,
pin_memory=not use_persistent_workers,
drop_last=True,
persistent_workers=use_persistent_workers
)
def val_dataloader(self):
use_persistent_workers = self.num_workers > 0
return torch.utils.data.DataLoader(
self.val_dataset,
batch_size=self.batch_size,
shuffle=False,
num_workers=self.num_workers,
pin_memory=not use_persistent_workers,
drop_last=False,
persistent_workers=use_persistent_workers
)
| 38.946746 | 105 | 0.629241 | 2,205 | 19,746 | 5.302948 | 0.047619 | 0.028735 | 0.061575 | 0.043103 | 0.871633 | 0.858377 | 0.829214 | 0.812623 | 0.794835 | 0.781066 | 0 | 0.002011 | 0.294794 | 19,746 | 506 | 106 | 39.023715 | 0.837702 | 0.017219 | 0 | 0.716749 | 0 | 0 | 0.010419 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.08867 | false | 0.007389 | 0.017241 | 0.007389 | 0.17734 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a2fb2cd7f095d3384cd27930afe1a9ea33ec0bc7 | 24,077 | py | Python | reefbot_msgs/src/reefbot_msgs/srv/_FindSpecies.py | MRSD2018/reefbot-1 | a595ca718d0cda277726894a3105815cef000475 | [
"MIT"
] | null | null | null | reefbot_msgs/src/reefbot_msgs/srv/_FindSpecies.py | MRSD2018/reefbot-1 | a595ca718d0cda277726894a3105815cef000475 | [
"MIT"
] | null | null | null | reefbot_msgs/src/reefbot_msgs/srv/_FindSpecies.py | MRSD2018/reefbot-1 | a595ca718d0cda277726894a3105815cef000475 | [
"MIT"
] | null | null | null | """autogenerated by genpy from reefbot_msgs/FindSpeciesRequest.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import sensor_msgs.msg
import reefbot_msgs.msg
import std_msgs.msg
class FindSpeciesRequest(genpy.Message):
_md5sum = "d75780091c908e247bf645a43c77f4ce"
_type = "reefbot_msgs/FindSpeciesRequest"
_has_header = False #flag to mark the presence of a Header object
_full_text = """ImageCaptured image
================================================================================
MSG: reefbot_msgs/ImageCaptured
# This message specifies that a still image was captured by the system.
#
# If you're using OpenCV to handle images, look up the cv_bridge in
# ROS to easily write/read this message.
#
# Author: Mark Desnoyer (markd@cmu.edu)
# Date: Sept 2010
Header header
# Optional Id to identify the image
uint64 image_id
# Full color image that was captured
sensor_msgs/Image image
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.secs: seconds (stamp_secs) since epoch
# * stamp.nsecs: nanoseconds since stamp_secs
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: sensor_msgs/Image
# This message contains an uncompressed image
# (0, 0) is at top-left corner of image
#
Header header # Header timestamp should be acquisition time of image
# Header frame_id should be optical frame of camera
# origin of frame should be optical center of cameara
# +x should point to the right in the image
# +y should point down in the image
# +z should point into to plane of the image
# If the frame_id here and the frame_id of the CameraInfo
# message associated with the image conflict
# the behavior is undefined
uint32 height # image height, that is, number of rows
uint32 width # image width, that is, number of columns
# The legal values for encoding are in file src/image_encodings.cpp
# If you want to standardize a new string format, join
# ros-users@lists.sourceforge.net and send an email proposing a new encoding.
string encoding # Encoding of pixels -- channel meaning, ordering, size
# taken from the list of strings in src/image_encodings.cpp
uint8 is_bigendian # is this data bigendian?
uint32 step # Full row length in bytes
uint8[] data # actual matrix data, size is (step * rows)
"""
__slots__ = ['image']
_slot_types = ['reefbot_msgs/ImageCaptured']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
image
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(FindSpeciesRequest, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.image is None:
self.image = reefbot_msgs.msg.ImageCaptured()
else:
self.image = reefbot_msgs.msg.ImageCaptured()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.image.header.seq, _x.image.header.stamp.secs, _x.image.header.stamp.nsecs))
_x = self.image.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_Q3I.pack(_x.image.image_id, _x.image.image.header.seq, _x.image.image.header.stamp.secs, _x.image.image.header.stamp.nsecs))
_x = self.image.image.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_2I.pack(_x.image.image.height, _x.image.image.width))
_x = self.image.image.encoding
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_BI.pack(_x.image.image.is_bigendian, _x.image.image.step))
_x = self.image.image.data
length = len(_x)
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.image is None:
self.image = reefbot_msgs.msg.ImageCaptured()
end = 0
_x = self
start = end
end += 12
(_x.image.header.seq, _x.image.header.stamp.secs, _x.image.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.image.header.frame_id = str[start:end].decode('utf-8')
else:
self.image.header.frame_id = str[start:end]
_x = self
start = end
end += 20
(_x.image.image_id, _x.image.image.header.seq, _x.image.image.header.stamp.secs, _x.image.image.header.stamp.nsecs,) = _struct_Q3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.image.image.header.frame_id = str[start:end].decode('utf-8')
else:
self.image.image.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.image.image.height, _x.image.image.width,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.image.image.encoding = str[start:end].decode('utf-8')
else:
self.image.image.encoding = str[start:end]
_x = self
start = end
end += 5
(_x.image.image.is_bigendian, _x.image.image.step,) = _struct_BI.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.image.image.data = str[start:end].decode('utf-8')
else:
self.image.image.data = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.image.header.seq, _x.image.header.stamp.secs, _x.image.header.stamp.nsecs))
_x = self.image.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_Q3I.pack(_x.image.image_id, _x.image.image.header.seq, _x.image.image.header.stamp.secs, _x.image.image.header.stamp.nsecs))
_x = self.image.image.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_2I.pack(_x.image.image.height, _x.image.image.width))
_x = self.image.image.encoding
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_BI.pack(_x.image.image.is_bigendian, _x.image.image.step))
_x = self.image.image.data
length = len(_x)
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.image is None:
self.image = reefbot_msgs.msg.ImageCaptured()
end = 0
_x = self
start = end
end += 12
(_x.image.header.seq, _x.image.header.stamp.secs, _x.image.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.image.header.frame_id = str[start:end].decode('utf-8')
else:
self.image.header.frame_id = str[start:end]
_x = self
start = end
end += 20
(_x.image.image_id, _x.image.image.header.seq, _x.image.image.header.stamp.secs, _x.image.image.header.stamp.nsecs,) = _struct_Q3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.image.image.header.frame_id = str[start:end].decode('utf-8')
else:
self.image.image.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.image.image.height, _x.image.image.width,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.image.image.encoding = str[start:end].decode('utf-8')
else:
self.image.image.encoding = str[start:end]
_x = self
start = end
end += 5
(_x.image.image.is_bigendian, _x.image.image.step,) = _struct_BI.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.image.image.data = str[start:end].decode('utf-8')
else:
self.image.image.data = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_3I = struct.Struct("<3I")
_struct_BI = struct.Struct("<BI")
_struct_Q3I = struct.Struct("<Q3I")
_struct_2I = struct.Struct("<2I")
"""autogenerated by genpy from reefbot_msgs/FindSpeciesResponse.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import sensor_msgs.msg
import reefbot_msgs.msg
import std_msgs.msg
class FindSpeciesResponse(genpy.Message):
_md5sum = "2660d795deb5b215e3a6337b079df1fa"
_type = "reefbot_msgs/FindSpeciesResponse"
_has_header = False #flag to mark the presence of a Header object
_full_text = """SpeciesIDResponse response
================================================================================
MSG: reefbot_msgs/SpeciesIDResponse
# Response to a SpeciesIDRequest that tells the user the best guess of
# what species were found.
#
# Author: Mark Desnoyer (markd@cmu.edu)
# Date: June 2010
Header header
# Optional Id to identify the image
uint64 image_id
# Answers, one for each region in the Request and in the same order.
SingleSpeciesId[] answers
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.secs: seconds (stamp_secs) since epoch
# * stamp.nsecs: nanoseconds since stamp_secs
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: reefbot_msgs/SingleSpeciesId
# The best few matches for the species in a single region
#
# Author: Mark Desnoyer (markd@cmu.edu)
# Date: June 2010
# The bounding box where we found the individual
sensor_msgs/RegionOfInterest bounding_box
# The most likely species in descending order. This array could be
# empty if there was no good match.
SpeciesScore[] best_species
================================================================================
MSG: sensor_msgs/RegionOfInterest
# This message is used to specify a region of interest within an image.
#
# When used to specify the ROI setting of the camera when the image was
# taken, the height and width fields should either match the height and
# width fields for the associated image; or height = width = 0
# indicates that the full resolution image was captured.
uint32 x_offset # Leftmost pixel of the ROI
# (0 if the ROI includes the left edge of the image)
uint32 y_offset # Topmost pixel of the ROI
# (0 if the ROI includes the top edge of the image)
uint32 height # Height of ROI
uint32 width # Width of ROI
# True if a distinct rectified ROI should be calculated from the "raw"
# ROI in this message. Typically this should be False if the full image
# is captured (ROI not used), and True if a subwindow is captured (ROI
# used).
bool do_rectify
================================================================================
MSG: reefbot_msgs/SpeciesScore
# The score of a species identification. The score is dependent on the
# algorithm being used so it doesn't necessarily have semantic meaning
# except that a higher score is better.
#
# Author: Mark Desnoyer (markd@cmu.edu)
# Date: June 2010
# Unique identifier for the species
uint32 species_id
# Score for the species. Higher is better
float32 score
# Optional extra information about the score. This will be algorithm
# dependent and might be useful for debugging
string meta_data
"""
__slots__ = ['response']
_slot_types = ['reefbot_msgs/SpeciesIDResponse']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
response
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(FindSpeciesResponse, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.response is None:
self.response = reefbot_msgs.msg.SpeciesIDResponse()
else:
self.response = reefbot_msgs.msg.SpeciesIDResponse()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.response.header.seq, _x.response.header.stamp.secs, _x.response.header.stamp.nsecs))
_x = self.response.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_Q.pack(self.response.image_id))
length = len(self.response.answers)
buff.write(_struct_I.pack(length))
for val1 in self.response.answers:
_v1 = val1.bounding_box
_x = _v1
buff.write(_struct_4IB.pack(_x.x_offset, _x.y_offset, _x.height, _x.width, _x.do_rectify))
length = len(val1.best_species)
buff.write(_struct_I.pack(length))
for val2 in val1.best_species:
_x = val2
buff.write(_struct_If.pack(_x.species_id, _x.score))
_x = val2.meta_data
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.response is None:
self.response = reefbot_msgs.msg.SpeciesIDResponse()
end = 0
_x = self
start = end
end += 12
(_x.response.header.seq, _x.response.header.stamp.secs, _x.response.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.response.header.frame_id = str[start:end].decode('utf-8')
else:
self.response.header.frame_id = str[start:end]
start = end
end += 8
(self.response.image_id,) = _struct_Q.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.response.answers = []
for i in range(0, length):
val1 = reefbot_msgs.msg.SingleSpeciesId()
_v2 = val1.bounding_box
_x = _v2
start = end
end += 17
(_x.x_offset, _x.y_offset, _x.height, _x.width, _x.do_rectify,) = _struct_4IB.unpack(str[start:end])
_v2.do_rectify = bool(_v2.do_rectify)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.best_species = []
for i in range(0, length):
val2 = reefbot_msgs.msg.SpeciesScore()
_x = val2
start = end
end += 8
(_x.species_id, _x.score,) = _struct_If.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2.meta_data = str[start:end].decode('utf-8')
else:
val2.meta_data = str[start:end]
val1.best_species.append(val2)
self.response.answers.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.response.header.seq, _x.response.header.stamp.secs, _x.response.header.stamp.nsecs))
_x = self.response.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_Q.pack(self.response.image_id))
length = len(self.response.answers)
buff.write(_struct_I.pack(length))
for val1 in self.response.answers:
_v3 = val1.bounding_box
_x = _v3
buff.write(_struct_4IB.pack(_x.x_offset, _x.y_offset, _x.height, _x.width, _x.do_rectify))
length = len(val1.best_species)
buff.write(_struct_I.pack(length))
for val2 in val1.best_species:
_x = val2
buff.write(_struct_If.pack(_x.species_id, _x.score))
_x = val2.meta_data
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.response is None:
self.response = reefbot_msgs.msg.SpeciesIDResponse()
end = 0
_x = self
start = end
end += 12
(_x.response.header.seq, _x.response.header.stamp.secs, _x.response.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.response.header.frame_id = str[start:end].decode('utf-8')
else:
self.response.header.frame_id = str[start:end]
start = end
end += 8
(self.response.image_id,) = _struct_Q.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.response.answers = []
for i in range(0, length):
val1 = reefbot_msgs.msg.SingleSpeciesId()
_v4 = val1.bounding_box
_x = _v4
start = end
end += 17
(_x.x_offset, _x.y_offset, _x.height, _x.width, _x.do_rectify,) = _struct_4IB.unpack(str[start:end])
_v4.do_rectify = bool(_v4.do_rectify)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.best_species = []
for i in range(0, length):
val2 = reefbot_msgs.msg.SpeciesScore()
_x = val2
start = end
end += 8
(_x.species_id, _x.score,) = _struct_If.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2.meta_data = str[start:end].decode('utf-8')
else:
val2.meta_data = str[start:end]
val1.best_species.append(val2)
self.response.answers.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_Q = struct.Struct("<Q")
_struct_3I = struct.Struct("<3I")
_struct_4IB = struct.Struct("<4IB")
_struct_If = struct.Struct("<If")
class FindSpecies(object):
_type = 'reefbot_msgs/FindSpecies'
_md5sum = '7da9366c50b21c9cef266c42d369b77c'
_request_class = FindSpeciesRequest
_response_class = FindSpeciesResponse
| 35.35536 | 159 | 0.631225 | 3,284 | 24,077 | 4.466809 | 0.117235 | 0.054537 | 0.041993 | 0.037085 | 0.782262 | 0.776399 | 0.762833 | 0.760584 | 0.760584 | 0.752471 | 0 | 0.016689 | 0.238485 | 24,077 | 680 | 160 | 35.407353 | 0.783365 | 0.101466 | 0 | 0.797491 | 1 | 0 | 0.289036 | 0.05277 | 0 | 0 | 0.000941 | 0 | 0 | 1 | 0.021505 | false | 0 | 0.021505 | 0 | 0.087814 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0c2b440a2f8fe3544c136abbdde09fd483d78953 | 2,366 | py | Python | SCLPsolver/pytests/MCQN_basic_test.py | haroldship/SCLPsolver | 70b79acb074f51d4a269993f6a1fcf04a8196a89 | [
"Apache-2.0"
] | 1 | 2020-08-11T16:01:55.000Z | 2020-08-11T16:01:55.000Z | SCLPsolver/pytests/MCQN_basic_test.py | haroldship/SCLPsolver | 70b79acb074f51d4a269993f6a1fcf04a8196a89 | [
"Apache-2.0"
] | null | null | null | SCLPsolver/pytests/MCQN_basic_test.py | haroldship/SCLPsolver | 70b79acb074f51d4a269993f6a1fcf04a8196a89 | [
"Apache-2.0"
] | 2 | 2020-06-29T15:23:59.000Z | 2022-02-07T08:58:40.000Z | import pytest
import os, sys
proj = os.path.realpath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'))
sys.path.append(proj)
from SCLPsolver.SCLP import SCLP, SCLP_settings
from SCLPsolver.doe.data_generators.MCQN import generate_MCQN_data
seeds = range(1001, 1009)
@pytest.mark.parametrize("seed", seeds)
def test_basic_mcqn(seed):
K = 400
I = 40
import time
solver_settings = SCLP_settings(find_alt_line=False, check_intermediate_solution=False, memory_management=False,
suppress_printing=False)
settings = {'alpha_rate': 1, 'cost_scale': 2, 'a_rate': 0.05, 'sum_rate': 0.95, 'nz': 0.5,
'gamma_rate': 0, 'c_scale': 0, 'h_rate': 0.2}
G, H, F, gamma, c, d, alpha, a, b, TT, total_buffer_cost, buffer_cost = generate_MCQN_data(seed, K, I, **settings)
TT = 100
result = {'servers': I, 'buffers': K, 'seed': seed}
start_time = time.time()
solution, STEPCOUNT, param_line, res = SCLP(G, H, F, a, b, c, d, alpha, gamma, 3 / 12 * TT, solver_settings)
t, x, q, u, p, pivots, obj, err, NN, tau, maxT = solution.get_final_solution(True)
assert obj is not None, "solution obj is None!"
assert 0 < maxT < TT, f"0 < {maxT} < {TT} failed"
assert len(t) > 0, f"len(t) is {len(t)}"
@pytest.mark.parametrize("seed", seeds)
def test_degenerate_mcqn(seed):
K = 400
I = 40
import time
solver_settings = SCLP_settings(find_alt_line=False, check_intermediate_solution=False, memory_management=False,
suppress_printing=False)
settings = {'alpha_rate': 1, 'cost_scale': 2, 'a_rate': 0.05, 'sum_rate': 0.95, 'nz': 0.5,
'gamma_rate': 0, 'c_scale': 0, 'h_rate': 0.2}
G, H, F, gamma, c, d, alpha, a, b, TT, total_buffer_cost, buffer_cost = generate_MCQN_data(seed, K, I, **settings)
a[0:4] = [0, 0, 0, 0]
c[6:8] = [0, 0]
TT = 100
result = {'servers': I, 'buffers': K, 'seed': seed}
start_time = time.time()
solution, STEPCOUNT, param_line, res = SCLP(G, H, F, a, b, c, d, alpha, gamma, 3 / 12 * TT, solver_settings)
t, x, q, u, p, pivots, obj, err, NN, tau, maxT = solution.get_final_solution(True)
assert obj is not None, "solution obj is None!"
assert 0 < maxT < TT, f"0 < {maxT} < {TT} failed"
assert len(t) > 0, f"len(t) is {len(t)}"
| 38.16129 | 118 | 0.620879 | 378 | 2,366 | 3.722222 | 0.275132 | 0.028429 | 0.008529 | 0.035537 | 0.828714 | 0.828714 | 0.828714 | 0.776119 | 0.776119 | 0.776119 | 0 | 0.040283 | 0.223584 | 2,366 | 61 | 119 | 38.786885 | 0.72564 | 0 | 0 | 0.755556 | 0 | 0 | 0.122673 | 0 | 0 | 0 | 0 | 0 | 0.133333 | 1 | 0.044444 | false | 0 | 0.133333 | 0 | 0.177778 | 0.044444 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0c2ced6c68f3f7028b8c7a1906fe1260bc42646b | 112 | py | Python | CodeWars/8 Kyu/Expressions Matter.py | anubhab-code/Competitive-Programming | de28cb7d44044b9e7d8bdb475da61e37c018ac35 | [
"MIT"
] | null | null | null | CodeWars/8 Kyu/Expressions Matter.py | anubhab-code/Competitive-Programming | de28cb7d44044b9e7d8bdb475da61e37c018ac35 | [
"MIT"
] | null | null | null | CodeWars/8 Kyu/Expressions Matter.py | anubhab-code/Competitive-Programming | de28cb7d44044b9e7d8bdb475da61e37c018ac35 | [
"MIT"
] | null | null | null | def expression_matter(a, b, c):
possible= [a+b+c,a*b*c,a*b+c,a*(b+c),a+b*c,(a+b)*c]
return max(possible) | 37.333333 | 55 | 0.589286 | 28 | 112 | 2.321429 | 0.321429 | 0.215385 | 0.323077 | 0.307692 | 0.276923 | 0.276923 | 0.276923 | 0.276923 | 0.276923 | 0.276923 | 0 | 0 | 0.133929 | 112 | 3 | 56 | 37.333333 | 0.670103 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 7 |
a76cb47e2c00391cfb9c6f5b930c1430c571d5cc | 187 | py | Python | session1/5_operadores_de_comparacion.py | mario21ic/python_curso | 3a183c0161a158ced09c3561d8f422d7bf62180e | [
"Apache-2.0"
] | 1 | 2016-04-06T15:32:13.000Z | 2016-04-06T15:32:13.000Z | session1/5_operadores_de_comparacion.py | mario21ic/python_curso | 3a183c0161a158ced09c3561d8f422d7bf62180e | [
"Apache-2.0"
] | null | null | null | session1/5_operadores_de_comparacion.py | mario21ic/python_curso | 3a183c0161a158ced09c3561d8f422d7bf62180e | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
print "#"*3 +" Operadores de comparación "+"#"*3
print ">",(0>-1)
print "<",(1<2)
print ">=",(2>=2)
print "<=",(1<=2)
print "==",('xD'=='xD')
print "!=",(3!='3')
| 18.7 | 48 | 0.449198 | 27 | 187 | 3.111111 | 0.444444 | 0.214286 | 0.166667 | 0.285714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.080247 | 0.13369 | 187 | 9 | 49 | 20.777778 | 0.438272 | 0.112299 | 0 | 0 | 0 | 0 | 0.268293 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 1 | 1 | 0 | 0 | null | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
a7b28540b2ba99397f89f1c414705c083797d940 | 16,898 | py | Python | sportybet.py | kennedyC2/Arbitrage | 2562bd7a976186e2d0472ceb5311251ad9859dd2 | [
"MIT"
] | null | null | null | sportybet.py | kennedyC2/Arbitrage | 2562bd7a976186e2d0472ceb5311251ad9859dd2 | [
"MIT"
] | null | null | null | sportybet.py | kennedyC2/Arbitrage | 2562bd7a976186e2d0472ceb5311251ad9859dd2 | [
"MIT"
] | null | null | null | # Dependencies
# =============================================================================================================
import undetected_chromedriver.v2 as uc
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.by import By
from bs4 import BeautifulSoup
import time
import json
# import os
# from pathlib import Path
# Browser Configurations
# =============================================================================================================
BrowserMode = uc.ChromeOptions()
BrowserMode.headless = False
BrowserMode.add_argument('--user-data-dir=./chrome_profile/')
BrowserMode.add_argument("--start-maximized")
Browser = uc.Chrome(options=BrowserMode)
actions = ActionChains(Browser)
# Websites
# =============================================================================================================
sportybet = "https://www.sportybet.com/ng/sport/football"
# Functions
# =============================================================================================================
# Get Menu
def getMenu():
# Object
data = {}
# Initiate Browser
Browser.get(sportybet)
# Wait
time.sleep(5)
# Parse HtmlDoc
soup = BeautifulSoup(Browser.page_source, "html5lib")
# Fetch Menu
collection = soup.select(
".sport-list > .category-list-item > .category-item")
for div in collection:
span = div.find_all('span')[0]
title = span.get_text().strip()
id = collection.index(div)
data[title] = {}
data[title]['location'] = id
# fetch Submenu and Menu link
for each in data:
l = data[each]['location']
# Parse HtmlDoc
soup = BeautifulSoup(Browser.page_source, "html5lib")
d = soup.select(
".sport-list > .category-list-item > .tournament-list > ul")[l]
e = []
for t in d.find_all('span', 'tournament-name'):
e.append(t.get_text().strip())
data[each]['submenu'] = e
# Save as JSON
with open('./Sportybet/sportybet_menu.txt', 'w') as outfile:
json.dump(data, outfile, indent=4)
Browser.quit()
# Sportybet Odds
# ======================================================================================
# Single
def Single():
# Odds
odds = []
# fetch links
with open('./Sportybet/sportybet_menu.txt', 'r') as json_file:
global data
data = json.load(json_file)
# Initiate Browser
Browser.get(sportybet)
for e in data:
# Index
l = data[e]['location']
# Menu
item = Browser.find_element(By.XPATH,
'/html/body/div[2]/div/div[2]/div/div[1]/div/div/div[4]/ul[2]/li[' + str(l + 1) + ']')
# Extend submenu
soup = BeautifulSoup(Browser.page_source, 'html5lib')
ln = len(soup.select('#sportList > div.game-list > ul.sport-list > li:nth-child(' +
str(l + 1) + ') > div.tournament-list > ul > li'))
# loop and click
actions.move_to_element(item).perform()
for z in range(1, ln + 1):
# Click
d1 = Browser.find_element(By.XPATH,
'/html/body/div[2]/div/div[2]/div/div[1]/div/div/div[4]/ul[2]/li[' + str(l + 1) + ']/div[2]/ul/li[' + str(z) + ']')
Browser.execute_script("arguments[0].click();", d1)
# Wait 10 seconds
time.sleep(10)
# Activate Single
for j in range(ln):
Browser.find_element(By.XPATH,
'/html/body/div[2]/div/div[2]/div/div[2]/div/div[2]/div[2]/div[' + str(2 + j) + ']/div/div[3]/div[1]').click()
# Wait 5 seconds
time.sleep(5)
# Parse HtmlDoc
soup = BeautifulSoup(Browser.page_source, "html5lib")
elem = soup.find_all('div', 'match-league')
for each in elem:
# Compile
category = each.find('span', 'text').get_text().strip()
div = each.find_all('div', 'm-content-row')
for cc in div:
info = {}
info['category'] = category
home_team = cc.find(
'div', 'home-team').get_text().strip()
away_team = cc.find(
'div', 'away-team').get_text().strip()
info['match'] = home_team + ' vs ' + away_team
info['time'] = cc.find(
'div', 'clock-time').get_text().replace(' ', '').strip()
info['home'] = cc.find_all(
'span', 'm-outcome-odds')[0].get_text().strip() or 0
info['away'] = cc.find_all(
'span', 'm-outcome-odds')[2].get_text().strip() or 0
# Upload
odds.append(info)
actions.move_to_element(item).perform()
for z in range(1, ln + 1):
# UnClick
d1 = Browser.find_element(By.XPATH,
'/html/body/div[2]/div/div[2]/div/div[1]/div/div/div[4]/ul[2]/li[' + str(l + 1) + ']/div[2]/ul/li[' + str(z) + ']')
Browser.execute_script("arguments[0].click();", d1)
# Wait 10 seconds
time.sleep(10)
with open('./Sportybet/sportybet_Single.txt', 'w') as outfile:
json.dump(odds, outfile, indent=4)
Browser.quit()
# Double
def Double():
# Odds
odds = []
# fetch links
with open('./Sportybet/sportybet_menu.txt', 'r') as json_file:
global data
data = json.load(json_file)
# Initiate Browser
Browser.get(sportybet)
for e in data:
# Index
l = data[e]['location']
# Menu
item = Browser.find_element(By.XPATH,
'/html/body/div[2]/div/div[2]/div/div[1]/div/div/div[4]/ul[2]/li[' + str(l + 1) + ']')
# Extend submenu
soup = BeautifulSoup(Browser.page_source, 'html5lib')
ln = len(soup.select('#sportList > div.game-list > ul.sport-list > li:nth-child(' +
str(l + 1) + ') > div.tournament-list > ul > li'))
# loop and click
actions.move_to_element(item).perform()
for z in range(1, ln + 1):
# Click
d1 = Browser.find_element(By.XPATH,
'/html/body/div[2]/div/div[2]/div/div[1]/div/div/div[4]/ul[2]/li[' + str(l + 1) + ']/div[2]/ul/li[' + str(z) + ']')
Browser.execute_script("arguments[0].click();", d1)
# Wait 10 seconds
time.sleep(15)
# Activate Double
for j in range(ln):
Browser.find_element(By.XPATH,
'/html/body/div[2]/div/div[2]/div/div[2]/div/div[2]/div[2]/div[' + str(2 + j) + ']/div/div[3]/div[2]').click()
# Wait 5 seconds
time.sleep(5)
# Parse HtmlDoc
soup = BeautifulSoup(Browser.page_source, "html5lib")
elem = soup.find_all('div', 'match-league')
for each in elem:
# Compile
category = each.find('span', 'text').get_text().strip()
div = each.find_all('div', 'm-content-row')
for cc in div:
info = {}
info['category'] = category
home_team = cc.find(
'div', 'home-team').get_text().strip()
away_team = cc.find(
'div', 'away-team').get_text().strip()
info['match'] = home_team + ' vs ' + away_team
info['time'] = cc.find(
'div', 'clock-time').get_text().replace(' ', '').strip()
if not(cc.find_all('div', 'm-outcome--disabled')):
info['1X'] = cc.find_all(
'span', 'm-outcome-odds')[0].get_text().strip() or 0
info['2X'] = cc.find_all(
'span', 'm-outcome-odds')[2].get_text().strip() or 0
else:
if cc.select('.m-market > div')[0]['class'].index('m-outcome--disabled') > -1:
info['1X'] = 0
else:
info['1X'] = cc.find_all(
'span', 'm-outcome-odds')[0].get_text().strip() or 0
if cc.select('.m-market > div')[2]['class'].index('m-outcome--disabled') > -1:
info['2X'] = 0
else:
info['2X'] = cc.find_all(
'span', 'm-outcome-odds')[2].get_text().strip() or 0
# Upload
odds.append(info)
actions.move_to_element(item).perform()
for z in range(1, ln + 1):
# UnClick
d1 = Browser.find_element(By.XPATH,
'/html/body/div[2]/div/div[2]/div/div[1]/div/div/div[4]/ul[2]/li[' + str(l + 1) + ']/div[2]/ul/li[' + str(z) + ']')
Browser.execute_script("arguments[0].click();", d1)
# Wait 10 seconds
time.sleep(10)
with open('./Sportybet/sportybet_Double.txt', 'w') as outfile:
json.dump(odds, outfile, indent=4)
Browser.quit()
# GGNG
def GGNG():
# Odds
odds = []
# fetch links
with open('./Sportybet/sportybet_menu.txt', 'r') as json_file:
global data
data = json.load(json_file)
# Initiate Browser
Browser.get(sportybet)
for e in data:
# Index
l = data[e]['location']
# Menu
item = Browser.find_element(By.XPATH,
'/html/body/div[2]/div/div[2]/div/div[1]/div/div/div[4]/ul[2]/li[' + str(l + 1) + ']')
# Extend submenu
soup = BeautifulSoup(Browser.page_source, 'html5lib')
ln = len(soup.select('#sportList > div.game-list > ul.sport-list > li:nth-child(' +
str(l + 1) + ') > div.tournament-list > ul > li'))
# loop and click
actions.move_to_element(item).perform()
for z in range(1, ln + 1):
# Click
d1 = Browser.find_element(By.XPATH,
'/html/body/div[2]/div/div[2]/div/div[1]/div/div/div[4]/ul[2]/li[' + str(l + 1) + ']/div[2]/ul/li[' + str(z) + ']')
Browser.execute_script("arguments[0].click();", d1)
# Wait 10 seconds
time.sleep(15)
# Activate GGNG
for j in range(ln):
Browser.find_element(By.XPATH,
'/html/body/div[2]/div/div[2]/div/div[2]/div/div[2]/div[2]/div[' + str(2 + j) + ']/div/div[3]/div[3]').click()
# Wait 5 seconds
time.sleep(5)
# Parse HtmlDoc
soup = BeautifulSoup(Browser.page_source, "html5lib")
elem = soup.find_all('div', 'match-league')
for each in elem:
# Compile
category = each.find('span', 'text').get_text().strip()
div = each.find_all('div', 'm-content-row')
for cc in div:
info = {}
info['category'] = category
home_team = cc.find(
'div', 'home-team').get_text().strip()
away_team = cc.find(
'div', 'away-team').get_text().strip()
info['match'] = home_team + ' vs ' + away_team
info['time'] = cc.find(
'div', 'clock-time').get_text().replace(' ', '').strip()
if not(cc.find_all('div', 'm-outcome--disabled')):
info['GG'] = cc.find_all(
'span', 'm-outcome-odds')[0].get_text().strip() or 0
info['NG'] = cc.find_all(
'span', 'm-outcome-odds')[1].get_text().strip() or 0
else:
if cc.select('.m-market > div')[0]['class'].index('m-outcome--disabled') > -1:
info['GG'] = 0
else:
info['GG'] = cc.find_all(
'span', 'm-outcome-odds')[0].get_text().strip() or 0
if cc.select('.m-market > div')[1]['class'].index('m-outcome--disabled') > -1:
info['NG'] = 0
else:
info['NG'] = cc.find_all(
'span', 'm-outcome-odds')[1].get_text().strip() or 0
# Upload
odds.append(info)
actions.move_to_element(item).perform()
for z in range(1, ln + 1):
# UnClick
d1 = Browser.find_element(By.XPATH,
'/html/body/div[2]/div/div[2]/div/div[1]/div/div/div[4]/ul[2]/li[' + str(l + 1) + ']/div[2]/ul/li[' + str(z) + ']')
Browser.execute_script("arguments[0].click();", d1)
# Wait 10 seconds
time.sleep(10)
with open('./Sportybet/sportybet_GGNG.txt', 'w') as outfile:
json.dump(odds, outfile, indent=4)
Browser.quit()
# DNB
def DNB():
# Odds
odds = []
# fetch links
with open('./Sportybet/sportybet_menu.txt', 'r') as json_file:
global data
data = json.load(json_file)
# Initiate Browser
Browser.get(sportybet)
for e in data:
# Index
l = data[e]['location']
# Menu
item = Browser.find_element(By.XPATH,
'/html/body/div[2]/div/div[2]/div/div[1]/div/div/div[4]/ul[2]/li[' + str(l + 1) + ']')
# Extend submenu
soup = BeautifulSoup(Browser.page_source, 'html5lib')
ln = len(soup.select('#sportList > div.game-list > ul.sport-list > li:nth-child(' +
str(l + 1) + ') > div.tournament-list > ul > li'))
# loop and click
actions.move_to_element(item).perform()
for z in range(1, ln + 1):
# Click
d1 = Browser.find_element(By.XPATH,
'/html/body/div[2]/div/div[2]/div/div[1]/div/div/div[4]/ul[2]/li[' + str(l + 1) + ']/div[2]/ul/li[' + str(z) + ']')
Browser.execute_script("arguments[0].click();", d1)
# Wait 10 seconds
time.sleep(15)
# Activate Draw No Bet
for j in range(ln):
Browser.find_element(By.XPATH,
'/html/body/div[2]/div/div[2]/div/div[2]/div/div[2]/div[2]/div[' + str(2 + j) + ']/div/div[3]/div[4]').click()
# Wait 5 seconds
time.sleep(5)
# Parse HtmlDoc
soup = BeautifulSoup(Browser.page_source, "html5lib")
elem = soup.find_all('div', 'match-league')
for each in elem:
# Compile
category = each.find('span', 'text').get_text().strip()
div = each.find_all('div', 'm-content-row')
for cc in div:
info = {}
info['category'] = category
home_team = cc.find(
'div', 'home-team').get_text().strip()
away_team = cc.find(
'div', 'away-team').get_text().strip()
info['match'] = home_team + ' vs ' + away_team
info['time'] = cc.find(
'div', 'clock-time').get_text().replace(' ', '').strip()
if not(cc.find_all('div', 'm-outcome--disabled')):
info['home'] = cc.find_all(
'span', 'm-outcome-odds')[0].get_text().strip() or 0
info['away'] = cc.find_all(
'span', 'm-outcome-odds')[1].get_text().strip() or 0
else:
if cc.select('.m-market > div')[0]['class'].index('m-outcome--disabled') > -1:
info['home'] = 0
else:
info['home'] = cc.find_all(
'span', 'm-outcome-odds')[0].get_text().strip() or 0
if cc.select('.m-market > div')[1]['class'].index('m-outcome--disabled') > -1:
info['away'] = 0
else:
info['away'] = cc.find_all(
'span', 'm-outcome-odds')[1].get_text().strip() or 0
# Upload
odds.append(info)
actions.move_to_element(item).perform()
for z in range(1, ln + 1):
# UnClick
d1 = Browser.find_element(By.XPATH,
'/html/body/div[2]/div/div[2]/div/div[1]/div/div/div[4]/ul[2]/li[' + str(l + 1) + ']/div[2]/ul/li[' + str(z) + ']')
Browser.execute_script("arguments[0].click();", d1)
# Wait 10 seconds
time.sleep(10)
with open('./Sportybet/sportybet_DNB.txt', 'w') as outfile:
json.dump(odds, outfile, indent=4)
Browser.quit()
Single()
# getMenu()
# Double()
# GGNG()
# DNB()
| 35.574737 | 153 | 0.470292 | 2,011 | 16,898 | 3.878667 | 0.088513 | 0.049231 | 0.039487 | 0.046154 | 0.871538 | 0.863974 | 0.857436 | 0.844487 | 0.837564 | 0.837564 | 0 | 0.023841 | 0.337259 | 16,898 | 474 | 154 | 35.649789 | 0.672649 | 0.090602 | 0 | 0.812721 | 0 | 0.070671 | 0.223663 | 0.097558 | 0 | 0 | 0 | 0 | 0 | 1 | 0.017668 | false | 0 | 0.021201 | 0 | 0.038869 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ac6718c915faf78809b9cb47b2aacb57bcdcdeba | 6,579 | py | Python | linear_recurrent_net/tensorflow_binding/__init__.py | danielwen/parallel-recurrence | d62350d6f5516e9747b79256d6796a16e32fee70 | [
"MIT"
] | null | null | null | linear_recurrent_net/tensorflow_binding/__init__.py | danielwen/parallel-recurrence | d62350d6f5516e9747b79256d6796a16e32fee70 | [
"MIT"
] | null | null | null | linear_recurrent_net/tensorflow_binding/__init__.py | danielwen/parallel-recurrence | d62350d6f5516e9747b79256d6796a16e32fee70 | [
"MIT"
] | null | null | null | from functools import partial
import os
import tensorflow as tf
from tensorflow.python.framework import ops
dir = os.path.dirname(os.path.abspath(__file__))
_lr_module = tf.load_op_library('%s/../lib/tf_linear_recurrence.so' % dir)
def fast_linear_recurrence(decays, impulses, initial_state=None):
'''
Compute r[i] = decays[i] * r[i - 1] + impulses[i] with r[0] = initial_state.
decays and impulses must have the same shape and are [n_steps, ...].
initial_state must be None (to zero initialize) or [...]
'''
if initial_state is None:
initial_state = tf.zeros_like(impulses[0, :])
shape = tf.shape(decays)
rank = shape.get_shape()[0].value
if rank > 2:
tail = tf.reduce_prod(shape[1:])
decays = tf.reshape(decays, [shape[0], tail])
impulses = tf.reshape(impulses, [shape[0], tail])
initial_state = tf.reshape(initial_state, [tail])
# serial = False
# if not serial:
resp = _lr_module.fast_linear_recurrence(decays, impulses, initial_state)
# else:
# resp = _lr_module.serial_linear_recurrence(decays, impulses, initial_state)
if rank > 2:
resp = tf.reshape(resp, shape)
return resp
def linear_recurrence_baseline(decays, impulses, initial_state=None):
'''
Compute r[i] = decays[i] * r[i - 1] + impulses[i] with r[0] = initial_state.
decays and impulses must have the same shape and are [n_steps, ...].
initial_state must be None (to zero initialize) or [...]
'''
if initial_state is None:
initial_state = tf.zeros_like(impulses[0, :])
shape = tf.shape(decays)
rank = shape.get_shape()[0].value
if rank > 2:
tail = tf.reduce_prod(shape[1:])
decays = tf.reshape(decays, [shape[0], tail])
impulses = tf.reshape(impulses, [shape[0], tail])
initial_state = tf.reshape(initial_state, [tail])
# serial = False
# if not serial:
resp = _lr_module.linear_recurrence_baseline(decays, impulses, initial_state)
# else:
# resp = _lr_module.serial_linear_recurrence(decays, impulses, initial_state)
if rank > 2:
resp = tf.reshape(resp, shape)
return resp
def serial_linear_recurrence_baseline(decays, impulses, initial_state=None):
'''
Compute r[i] = decays[i] * r[i - 1] + impulses[i] with r[0] = initial_state.
decays and impulses must have the same shape and are [n_steps, ...].
initial_state must be None (to zero initialize) or [...]
'''
if initial_state is None:
initial_state = tf.zeros_like(impulses[0, :])
shape = tf.shape(decays)
rank = shape.get_shape()[0].value
if rank > 2:
tail = tf.reduce_prod(shape[1:])
decays = tf.reshape(decays, [shape[0], tail])
impulses = tf.reshape(impulses, [shape[0], tail])
initial_state = tf.reshape(initial_state, [tail])
resp = _lr_module.serial_linear_recurrence_baseline(decays, impulses, initial_state)
if rank > 2:
resp = tf.reshape(resp, shape)
return resp
@ops.RegisterGradient("FastLinearRecurrence")
def _fast_linear_recurrence_grad(op, dl_dresp):
serial=False
decays = op.inputs[0]
impulses = op.inputs[1]
initial_state = op.inputs[2]
n_steps = tf.shape(impulses)[0]
# forwards goes from h_0 to h_{T-1}
# forwards_tail = linear_recurrence(decays, impulses, initial_state, serial=serial)[:-1, :]
forwards_tail = fast_linear_recurrence(decays, impulses, initial_state)[:-1, :]
forwards = tf.concat([tf.expand_dims(initial_state, 0), forwards_tail],
axis=0)
reverse = lambda x: tf.reverse(x, axis=[0])
# recur on
# decays from T, T-1, ..., 2
# output gradients from T-1, T-2, ..., 1
dl_dh_head = reverse(
fast_linear_recurrence(
reverse(decays)[:-1, :],
reverse(dl_dresp)[1:, :],
dl_dresp[-1, :],
# serial=serial
)
)
dl_dh = tf.concat([dl_dh_head, dl_dresp[-1:, :]], axis=0)
dl_dinit = decays[0, :] * dl_dh[0, :]
dl_dimpulses = dl_dh
dl_ddecays = dl_dh * forwards
return [dl_ddecays, dl_dimpulses, dl_dinit]
@ops.RegisterGradient("LinearRecurrenceBaseline")
def _linear_recurrence_baseline_grad(op, dl_dresp):
serial=False
decays = op.inputs[0]
impulses = op.inputs[1]
initial_state = op.inputs[2]
n_steps = tf.shape(impulses)[0]
# forwards goes from h_0 to h_{T-1}
# forwards_tail = linear_recurrence(decays, impulses, initial_state, serial=serial)[:-1, :]
forwards_tail = linear_recurrence_baseline(decays, impulses, initial_state)[:-1, :]
forwards = tf.concat([tf.expand_dims(initial_state, 0), forwards_tail],
axis=0)
reverse = lambda x: tf.reverse(x, axis=[0])
# recur on
# decays from T, T-1, ..., 2
# output gradients from T-1, T-2, ..., 1
dl_dh_head = reverse(
linear_recurrence_baseline(
reverse(decays)[:-1, :],
reverse(dl_dresp)[1:, :],
dl_dresp[-1, :],
# serial=serial
)
)
dl_dh = tf.concat([dl_dh_head, dl_dresp[-1:, :]], axis=0)
dl_dinit = decays[0, :] * dl_dh[0, :]
dl_dimpulses = dl_dh
dl_ddecays = dl_dh * forwards
return [dl_ddecays, dl_dimpulses, dl_dinit]
@ops.RegisterGradient("SerialLinearRecurrenceBaseline")
def _serial_linear_recurrence_baseline_grad(op, dl_dresp):
serial=True
decays = op.inputs[0]
impulses = op.inputs[1]
initial_state = op.inputs[2]
n_steps = tf.shape(impulses)[0]
# forwards goes from h_0 to h_{T-1}
forwards_tail = serial_linear_recurrence_baseline(decays, impulses, initial_state)[:-1, :]
forwards = tf.concat([tf.expand_dims(initial_state, 0), forwards_tail],
axis=0)
reverse = lambda x: tf.reverse(x, axis=[0])
# recur on
# decays from T, T-1, ..., 2
# output gradients from T-1, T-2, ..., 1
dl_dh_head = reverse(
serial_linear_recurrence_baseline(
reverse(decays)[:-1, :],
reverse(dl_dresp)[1:, :],
dl_dresp[-1, :],
# serial=serial
)
)
dl_dh = tf.concat([dl_dh_head, dl_dresp[-1:, :]], axis=0)
dl_dinit = decays[0, :] * dl_dh[0, :]
dl_dimpulses = dl_dh
dl_ddecays = dl_dh * forwards
return [dl_ddecays, dl_dimpulses, dl_dinit]
# ops.RegisterGradient("LinearRecurrence")(partial(_linear_recurrence_grad, serial=False))
# ops.RegisterGradient("SerialLinearRecurrence")(partial(_linear_recurrence_grad, serial=True))
| 32.25 | 95 | 0.635507 | 891 | 6,579 | 4.472503 | 0.1156 | 0.111418 | 0.068507 | 0.084818 | 0.90138 | 0.88256 | 0.879548 | 0.858469 | 0.831117 | 0.831117 | 0 | 0.018793 | 0.231646 | 6,579 | 203 | 96 | 32.408867 | 0.769535 | 0.23955 | 0 | 0.735043 | 0 | 0 | 0.021806 | 0.01773 | 0 | 0 | 0 | 0 | 0 | 1 | 0.051282 | false | 0 | 0.034188 | 0 | 0.136752 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ac673369d46aca2d2b6d0bc15982abf68fcd0d7c | 17,810 | py | Python | src/config/api-server/vnc_cfg_api_server/tests/resources/test_bgpaas.py | pltf/contrail-controller | eae904fcfd8f7a3157f8acd25837c5da18c19ba9 | [
"Apache-2.0"
] | null | null | null | src/config/api-server/vnc_cfg_api_server/tests/resources/test_bgpaas.py | pltf/contrail-controller | eae904fcfd8f7a3157f8acd25837c5da18c19ba9 | [
"Apache-2.0"
] | null | null | null | src/config/api-server/vnc_cfg_api_server/tests/resources/test_bgpaas.py | pltf/contrail-controller | eae904fcfd8f7a3157f8acd25837c5da18c19ba9 | [
"Apache-2.0"
] | 1 | 2020-11-20T06:49:58.000Z | 2020-11-20T06:49:58.000Z | #
# Copyright (c) 2019 Juniper Networks, Inc. All rights reserved.
#
from builtins import range
from builtins import str
import logging
from cfgm_common.exceptions import BadRequest
from vnc_api.vnc_api import BGPaaSControlNodeZoneAttributes
from vnc_api.vnc_api import BgpAsAService
from vnc_api.vnc_api import BgpSessionAttributes
from vnc_api.vnc_api import ControlNodeZone
from vnc_api.vnc_api import GlobalSystemConfig
from vnc_cfg_api_server.tests import test_case
logger = logging.getLogger(__name__)
class TestBgpaas(test_case.ApiServerTestCase):
@classmethod
def setUpClass(cls, *args, **kwargs):
cls.console_handler = logging.StreamHandler()
cls.console_handler.setLevel(logging.DEBUG)
logger.addHandler(cls.console_handler)
super(TestBgpaas, cls).setUpClass(*args, **kwargs)
@classmethod
def tearDownClass(cls, *args, **kwargs):
logger.removeHandler(cls.console_handler)
super(TestBgpaas, cls).tearDownClass(*args, **kwargs)
@property
def api(self):
return self._vnc_lib
def create_bgpaas(self):
proj = self.api.project_read(
fq_name=["default-domain", "default-project"])
bgpaas_obj = BgpAsAService(name='bgpaas_update',
parent_obj=proj)
# Set a valid ASN and create bgpaas object
bgpaas_obj.autonomous_system = 64512
bgpaas_uuid = self.api.bgp_as_a_service_create(bgpaas_obj)
return bgpaas_uuid, bgpaas_obj
def create_bgpaas_with_local_asn(self):
proj = self.api.project_read(
fq_name=["default-domain", "default-project"])
bgp_session_attr = BgpSessionAttributes(local_autonomous_system=64500)
bgpaas_obj = BgpAsAService(name='bgpaas',
parent_obj=proj,
bgpaas_session_attributes=bgp_session_attr)
bgpaas_obj.autonomous_system = 64512
bgpaas_uuid = self.api.bgp_as_a_service_create(bgpaas_obj)
return bgpaas_uuid, bgpaas_obj
def test_bgpaas_create_with_valid_2_byte_asn(self):
proj = self.api.project_read(
fq_name=["default-domain", "default-project"])
bgp_session_attr = BgpSessionAttributes(local_autonomous_system=64500)
bgpaas_obj = BgpAsAService(name='bgpaas',
parent_obj=proj,
bgpaas_session_attributes=bgp_session_attr)
# Set a valid ASN and create bgpaas object
bgpaas_obj.autonomous_system = 64512
bgpaas_uuid = self.api.bgp_as_a_service_create(bgpaas_obj)
# Now delete the bgpaas object
self.api.bgp_as_a_service_delete(id=bgpaas_uuid)
def test_bgpaas_create_with_valid_4_byte_asn(self):
gsc = self.api.global_system_config_read(GlobalSystemConfig().fq_name)
# Enable 4 byte ASN flag in GSC
gsc.enable_4byte_as = True
self.api.global_system_config_update(gsc)
proj = self.api.project_read(
fq_name=["default-domain", "default-project"])
bgp_session_attr = BgpSessionAttributes(
local_autonomous_system=700001)
bgpaas_obj = BgpAsAService(name='bgpaas',
parent_obj=proj,
bgpaas_session_attributes=bgp_session_attr)
# Set a valid ASN and create bgpaas object
bgpaas_obj.autonomous_system = 700000
bgpaas_uuid = self.api.bgp_as_a_service_create(bgpaas_obj)
# Now delete the bgpaas object and disable 4 byte ASN flag
self.api.bgp_as_a_service_delete(id=bgpaas_uuid)
gsc.enable_4byte_as = False
self.api.global_system_config_update(gsc)
def test_bgpaas_create_with_invalid_2_byte_asn(self):
proj = self.api.project_read(
fq_name=["default-domain", "default-project"])
bgp_session_attr = BgpSessionAttributes(local_autonomous_system=64500)
bgpaas_obj = BgpAsAService(name='bgpaas',
parent_obj=proj,
bgpaas_session_attributes=bgp_session_attr)
# Set a invalid ASN and create bgpaas object
bgpaas_obj.autonomous_system = 700000
self.assertRaises(BadRequest, self.api.bgp_as_a_service_create,
bgpaas_obj)
def test_bgpaas_create_with_invalid_2_byte_local_asn(self):
proj = self.api.project_read(
fq_name=["default-domain", "default-project"])
# Set a invalid Local ASN and create bgpaas object
bgp_session_attr = BgpSessionAttributes(
local_autonomous_system=700001)
bgpaas_obj = BgpAsAService(name='bgpaas',
parent_obj=proj,
bgpaas_session_attributes=bgp_session_attr)
bgpaas_obj.autonomous_system = 64512
self.assertRaises(BadRequest, self.api.bgp_as_a_service_create,
bgpaas_obj)
def test_bgpaas_create_with_invalid_4_byte_asn(self):
gsc = self.api.global_system_config_read(GlobalSystemConfig().fq_name)
# Enable 4 byte ASN flag in GSC
gsc.enable_4byte_as = True
self.api.global_system_config_update(gsc)
proj = self.api.project_read(
fq_name=["default-domain", "default-project"])
bgpaas_obj = BgpAsAService(name='bgpaas',
parent_obj=proj)
# Set a invalid ASN and create bgpaas object
bgpaas_obj.autonomous_system = 0x1FFFFFFFF
self.assertRaises(BadRequest, self.api.bgp_as_a_service_create,
bgpaas_obj)
# Finally, disable 4 byte ASN flag
gsc.enable_4byte_as = False
self.api.global_system_config_update(gsc)
def test_bgpaas_create_with_invalid_4_byte_local_asn(self):
gsc = self.api.global_system_config_read(GlobalSystemConfig().fq_name)
# Enable 4 byte ASN flag in GSC
gsc.enable_4byte_as = True
self.api.global_system_config_update(gsc)
# Set a invalid Local ASN and create bgpaas object
proj = self.api.project_read(
fq_name=["default-domain", "default-project"])
bgp_session_attr = BgpSessionAttributes(
local_autonomous_system=0x1FFFFFFFF)
bgpaas_obj = BgpAsAService(name='bgpaas',
parent_obj=proj,
bgpaas_session_attributes=bgp_session_attr)
bgpaas_obj.autonomous_system = 0x7000001
self.assertRaises(BadRequest, self.api.bgp_as_a_service_create,
bgpaas_obj)
# Finally, disable 4 byte ASN flag
gsc.enable_4byte_as = False
self.api.global_system_config_update(gsc)
def test_bgpaas_update_with_valid_2_byte_asn(self):
# Create bgpaas object with ASN 64512
bgpaas_uuid, bgpaas_obj = self.create_bgpaas()
# Update ASN with a valid value
bgpaas_obj.autonomous_system = 64500
self.api.bgp_as_a_service_update(bgpaas_obj)
# Finally, delete the bgpaas object
self.api.bgp_as_a_service_delete(id=bgpaas_uuid)
def test_bgpaas_update_with_valid_2_byte_local_asn(self):
# Create bgpaas object with ASN 64512 and Local ASN 64500
bgpaas_uuid, bgpaas_obj = self.create_bgpaas_with_local_asn()
# Update Local ASN with a valid value
bgp_session_attr = BgpSessionAttributes(local_autonomous_system=64000)
bgpaas_obj.set_bgpaas_session_attributes(bgp_session_attr)
self.api.bgp_as_a_service_update(bgpaas_obj)
# Finally, delete the bgpaas object
self.api.bgp_as_a_service_delete(id=bgpaas_uuid)
def test_bgpaas_update_with_valid_4_byte_asn(self):
gsc = self.api.global_system_config_read(GlobalSystemConfig().fq_name)
# Enable 4 byte ASN flag in GSC
gsc.enable_4byte_as = True
self.api.global_system_config_update(gsc)
# Create bgpaas object with ASN 64512
bgpaas_uuid, bgpaas_obj = self.create_bgpaas()
# Update ASN with a valid 4 byte value
bgpaas_obj.autonomous_system = 700000
self.api.bgp_as_a_service_update(bgpaas_obj)
# Disable 4 byte ASN flag
gsc.enable_4byte_as = False
self.api.global_system_config_update(gsc)
# Finally, delete the bgpaas object
self.api.bgp_as_a_service_delete(id=bgpaas_uuid)
def test_bgpaas_update_with_valid_4_byte_local_asn(self):
gsc = self.api.global_system_config_read(GlobalSystemConfig().fq_name)
# Enable 4 byte ASN flag in GSC
gsc.enable_4byte_as = True
self.api.global_system_config_update(gsc)
# Create bgpaas object with ASN 64512 and Local ASN 64500
bgpaas_uuid, bgpaas_obj = self.create_bgpaas_with_local_asn()
# Update Local ASN with a valid value
bgp_session_attr = BgpSessionAttributes(local_autonomous_system=700002)
bgpaas_obj.set_bgpaas_session_attributes(bgp_session_attr)
self.api.bgp_as_a_service_update(bgpaas_obj)
# Disable 4 byte ASN flag
gsc.enable_4byte_as = False
self.api.global_system_config_update(gsc)
# Finally, delete the bgpaas object
self.api.bgp_as_a_service_delete(id=bgpaas_uuid)
def test_bgpaas_update_with_invalid_2_byte_asn(self):
# Create bgpaas object with ASN 64512
bgpaas_uuid, bgpaas_obj = self.create_bgpaas()
# Update ASN with an invalid 2 byte value
bgpaas_obj.autonomous_system = 700000
self.assertRaises(BadRequest, self.api.bgp_as_a_service_update,
bgpaas_obj)
# Finally, delete the bgpaas object
self.api.bgp_as_a_service_delete(id=bgpaas_uuid)
def test_bgpaas_update_with_invalid_2_byte_local_asn(self):
# Create bgpaas object with ASN 64512 and Local ASN 64500
bgpaas_uuid, bgpaas_obj = self.create_bgpaas_with_local_asn()
# Update Local ASN with an invalid value
bgp_session_attr = BgpSessionAttributes(local_autonomous_system=700001)
bgpaas_obj.set_bgpaas_session_attributes(bgp_session_attr)
self.assertRaises(BadRequest, self.api.bgp_as_a_service_update,
bgpaas_obj)
# Finally, delete the bgpaas object
self.api.bgp_as_a_service_delete(id=bgpaas_uuid)
def test_bgpaas_update_with_invalid_4_byte_asn(self):
gsc = self.api.global_system_config_read(GlobalSystemConfig().fq_name)
# Enable 4 byte ASN flag in GSC
gsc.enable_4byte_as = True
self.api.global_system_config_update(gsc)
# Create bgpaas object with ASN 64512
bgpaas_uuid, bgpaas_obj = self.create_bgpaas()
# Update ASN with an invalid 4 byte value
bgpaas_obj.autonomous_system = 0x1FFFFFFFF
self.assertRaises(BadRequest, self.api.bgp_as_a_service_update,
bgpaas_obj)
# Disable 4 byte ASN flag
gsc.enable_4byte_as = False
self.api.global_system_config_update(gsc)
# Finally, delete the bgpaas object
self.api.bgp_as_a_service_delete(id=bgpaas_uuid)
def test_bgpaas_update_with_invalid_4_byte_local_asn(self):
gsc = self.api.global_system_config_read(GlobalSystemConfig().fq_name)
# Enable 4 byte ASN flag in GSC
gsc.enable_4byte_as = True
self.api.global_system_config_update(gsc)
# Create bgpaas object with ASN 64512 and Local ASN 64500
bgpaas_uuid, bgpaas_obj = self.create_bgpaas_with_local_asn()
# Update Local ASN with an invalid 4 byte value
bgp_session_attr = BgpSessionAttributes(
local_autonomous_system=0x1FFFFFFFF)
bgpaas_obj.set_bgpaas_session_attributes(bgp_session_attr)
self.assertRaises(BadRequest, self.api.bgp_as_a_service_update,
bgpaas_obj)
# Disable 4 byte ASN flag
gsc.enable_4byte_as = False
self.api.global_system_config_update(gsc)
# Finally, delete the bgpaas object
self.api.bgp_as_a_service_delete(id=bgpaas_uuid)
def test_control_node_zone(self):
gsc = self.api.global_system_config_read(GlobalSystemConfig().fq_name)
cnz = []
for i in range(2):
cnz_name = "cnz-" + str(i)
cnz.append(ControlNodeZone(name=cnz_name, parent_obj=gsc))
self.api.control_node_zone_create(cnz[i])
proj = self.api.project_read(
fq_name=["default-domain", "default-project"])
bgpaas_obj = BgpAsAService(name='bgpaas', parent_obj=proj)
primary_attr = BGPaaSControlNodeZoneAttributes("primary")
secondary_attr = BGPaaSControlNodeZoneAttributes("secondary")
# create bgpaas with two control-node-zones as "primary"
bgpaas_obj.add_control_node_zone(cnz[0], primary_attr)
bgpaas_obj.add_control_node_zone(cnz[1], primary_attr)
try:
self.api.bgp_as_a_service_create(bgpaas_obj)
except BadRequest:
pass
# create bgpaas with two control-node-zones as "secondary"
bgpaas_obj.add_control_node_zone(cnz[0], secondary_attr)
bgpaas_obj.add_control_node_zone(cnz[1], secondary_attr)
try:
self.api.bgp_as_a_service_create(bgpaas_obj)
except BadRequest:
pass
# update bgpaas with two control-node-zones as "primary"
bgpaas_obj = BgpAsAService(name='bgpaas', parent_obj=proj)
bgpaas_uuid = self.api.bgp_as_a_service_create(bgpaas_obj)
bgpaas_obj = self.api.bgp_as_a_service_read(id=bgpaas_uuid)
bgpaas_obj.add_control_node_zone(cnz[0], primary_attr)
bgpaas_obj.add_control_node_zone(cnz[1], primary_attr)
try:
self.api.bgp_as_a_service_update(bgpaas_obj)
except BadRequest:
self.api.bgp_as_a_service_delete(id=bgpaas_uuid)
# update bgpaas with two control-node-zones as "secondary"
bgpaas_obj = BgpAsAService(name='bgpaas', parent_obj=proj)
bgpaas_uuid = self.api.bgp_as_a_service_create(bgpaas_obj)
bgpaas_obj = self.api.bgp_as_a_service_read(id=bgpaas_uuid)
bgpaas_obj.add_control_node_zone(cnz[0], secondary_attr)
bgpaas_obj.add_control_node_zone(cnz[1], secondary_attr)
try:
self.api.bgp_as_a_service_update(bgpaas_obj)
except BadRequest:
self.api.bgp_as_a_service_delete(id=bgpaas_uuid)
# update bgpaas with same control-node-zone as "primary"
bgpaas_obj = BgpAsAService(name='bgpaas', parent_obj=proj)
bgpaas_obj.add_control_node_zone(cnz[0], primary_attr)
bgpaas_uuid = self.api.bgp_as_a_service_create(bgpaas_obj)
bgpaas_obj = self.api.bgp_as_a_service_read(id=bgpaas_uuid)
bgpaas_obj.add_control_node_zone(cnz[0], primary_attr)
self.api.bgp_as_a_service_update(bgpaas_obj)
self.api.bgp_as_a_service_delete(id=bgpaas_uuid)
# update bgpaas with same control-node-zone as "secondary"
bgpaas_obj = BgpAsAService(name='bgpaas', parent_obj=proj)
bgpaas_obj.add_control_node_zone(cnz[0], primary_attr)
bgpaas_uuid = self.api.bgp_as_a_service_create(bgpaas_obj)
bgpaas_obj = self.api.bgp_as_a_service_read(id=bgpaas_uuid)
bgpaas_obj.add_control_node_zone(cnz[0], secondary_attr)
self.api.bgp_as_a_service_update(bgpaas_obj)
self.api.bgp_as_a_service_delete(id=bgpaas_uuid)
# update bgpaas with new control-node-zone as "primary"
bgpaas_obj = BgpAsAService(name='bgpaas', parent_obj=proj)
bgpaas_obj.add_control_node_zone(cnz[0], primary_attr)
bgpaas_uuid = self.api.bgp_as_a_service_create(bgpaas_obj)
bgpaas_obj = self.api.bgp_as_a_service_read(id=bgpaas_uuid)
bgpaas_obj.add_control_node_zone(cnz[1], primary_attr)
try:
self.api.bgp_as_a_service_update(bgpaas_obj)
except BadRequest:
self.api.bgp_as_a_service_delete(id=bgpaas_uuid)
# update bgpaas with same control-node-zone as "secondary"
bgpaas_obj = BgpAsAService(name='bgpaas', parent_obj=proj)
bgpaas_obj.add_control_node_zone(cnz[0], secondary_attr)
bgpaas_uuid = self.api.bgp_as_a_service_create(bgpaas_obj)
bgpaas_obj = self.api.bgp_as_a_service_read(id=bgpaas_uuid)
bgpaas_obj.add_control_node_zone(cnz[0], secondary_attr)
self.api.bgp_as_a_service_update(bgpaas_obj)
self.api.bgp_as_a_service_delete(id=bgpaas_uuid)
# update bgpaas with same control-node-zone as "primary"
bgpaas_obj = BgpAsAService(name='bgpaas', parent_obj=proj)
bgpaas_obj.add_control_node_zone(cnz[0], secondary_attr)
bgpaas_uuid = self.api.bgp_as_a_service_create(bgpaas_obj)
bgpaas_obj = self.api.bgp_as_a_service_read(id=bgpaas_uuid)
bgpaas_obj.add_control_node_zone(cnz[0], primary_attr)
self.api.bgp_as_a_service_update(bgpaas_obj)
self.api.bgp_as_a_service_delete(id=bgpaas_uuid)
# update bgpaas with new control-node-zone as "secondary"
bgpaas_obj = BgpAsAService(name='bgpaas', parent_obj=proj)
bgpaas_obj.add_control_node_zone(cnz[0], secondary_attr)
bgpaas_uuid = self.api.bgp_as_a_service_create(bgpaas_obj)
bgpaas_obj = self.api.bgp_as_a_service_read(id=bgpaas_uuid)
bgpaas_obj.add_control_node_zone(cnz[1], secondary_attr)
try:
self.api.bgp_as_a_service_update(bgpaas_obj)
except BadRequest:
self.api.bgp_as_a_service_delete(id=bgpaas_uuid)
for i in range(2):
self.api.control_node_zone_delete(fq_name=cnz[i].fq_name)
| 42.915663 | 79 | 0.688209 | 2,380 | 17,810 | 4.781092 | 0.055882 | 0.083048 | 0.052729 | 0.063274 | 0.92328 | 0.910449 | 0.893664 | 0.8905 | 0.881976 | 0.874945 | 0 | 0.017803 | 0.239921 | 17,810 | 414 | 80 | 43.019324 | 0.822782 | 0.130769 | 0 | 0.809353 | 0 | 0 | 0.02529 | 0 | 0 | 0 | 0.003437 | 0 | 0.028777 | 1 | 0.071942 | false | 0.007194 | 0.035971 | 0.003597 | 0.122302 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ac893c3ff7d29d7ba4ea2920a59e5df119973373 | 8,702 | py | Python | simplify/core/externals.py | WithPrecedent/ml_funnel | 5302da8bf4944ac518d22cc37c181e5a09baaabe | [
"Apache-2.0"
] | null | null | null | simplify/core/externals.py | WithPrecedent/ml_funnel | 5302da8bf4944ac518d22cc37c181e5a09baaabe | [
"Apache-2.0"
] | null | null | null | simplify/core/externals.py | WithPrecedent/ml_funnel | 5302da8bf4944ac518d22cc37c181e5a09baaabe | [
"Apache-2.0"
] | 2 | 2019-10-07T14:36:26.000Z | 2020-02-23T00:50:20.000Z | """
components: core components of a data science workflow
Corey Rayburn Yung <coreyrayburnyung@gmail.com>
Copyright 2020, Corey Rayburn Yung
License: Apache-2.0 (https://www.apache.org/licenses/LICENSE-2.0)
Contents:
"""
from __future__ import annotations
import abc
import dataclasses
from typing import (Any, Callable, ClassVar, Dict, Iterable, List, Mapping,
Optional, Sequence, Tuple, Type, Union)
import more_itertools
import sourdough
from . import base
from . import components
from . import stages
@dataclasses.dataclass
class SklearnModel(components.Technique):
"""Wrapper for a scikit-learn model (an algorithm that doesn't transform).
Args:
name (str): designates the name of a class instance that is used for
internal referencing throughout siMpLify. For example, if a siMpLify
instance needs options from a Settings instance, 'name' should match
the appropriate section name in a Settings instance. Defaults to
None.
contents (Union[Callable, Type, object, str]): stored item(s) for use by
a Component subclass instance. If it is Type or str, an instance
will be created. If it is a str, that instance will be found in
'module'. Defaults to None.
parameters (Union[Mapping[str, Any], base.Parameters]): parameters, in
the form of an ordinary dict or a Parameters instance, to be
attached to 'contents' when the 'implement' method is called.
Defaults to an empty Parameters instance.
iterations (Union[int, str]): number of times the 'implement' method
should be called. If 'iterations' is 'infinite', the 'implement'
method will continue indefinitely unless the method stops further
iteration. Defaults to 1.
module (str): name of module where 'contents' is located if 'contents'
is a string. It can either be a siMpLify or external module, as
long as it is available to the python environment. Defaults to None.
parallel (ClassVar[bool]): indicates whether this Component design is
meant to be part of a parallel workflow structure. Defaults to
False.
"""
name: str = None
contents: Union[Callable, Type, object, str] = None
parameters: Union[Mapping[str, Any], base.Parameters] = base.Parameters()
iterations: Union[int, str] = 1
module: str = None
parallel: ClassVar[bool] = False
""" Public Methods """
def implement(self, project: sourdough.Project) -> sourdough.Project:
"""[summary]
Args:
project (sourdough.Project): [description]
Returns:
sourdough.Project: [description]
"""
try:
self.parameters = self.parameters.finalize(project = project)
except AttributeError:
pass
self.contents = self.contents(**self.parameters)
self.contents.fit[project.data.x_train]
return project
@dataclasses.dataclass
class SklearnSplitter(components.Technique):
"""Wrapper for a scikit-learn data splitter.
Args:
name (str): designates the name of a class instance that is used for
internal referencing throughout siMpLify. For example, if a siMpLify
instance needs options from a Settings instance, 'name' should match
the appropriate section name in a Settings instance. Defaults to
None.
contents (Union[Callable, Type, object, str]): stored item(s) for use by
a Component subclass instance. If it is Type or str, an instance
will be created. If it is a str, that instance will be found in
'module'. Defaults to None.
parameters (Union[Mapping[str, Any], base.Parameters]): parameters, in
the form of an ordinary dict or a Parameters instance, to be
attached to 'contents' when the 'implement' method is called.
Defaults to an empty Parameters instance.
iterations (Union[int, str]): number of times the 'implement' method
should be called. If 'iterations' is 'infinite', the 'implement'
method will continue indefinitely unless the method stops further
iteration. Defaults to 1.
module (str): name of module where 'contents' is located if 'contents'
is a string. It can either be a siMpLify or external module, as
long as it is available to the python environment. Defaults to None.
parallel (ClassVar[bool]): indicates whether this Component design is
meant to be part of a parallel workflow structure. Defaults to
False.
"""
name: str = None
contents: Union[Callable, Type, object, str] = None
parameters: Union[Mapping[str, Any], base.Parameters] = base.Parameters()
iterations: Union[int, str] = 1
module: str = None
parallel: ClassVar[bool] = False
""" Public Methods """
def implement(self, project: sourdough.Project) -> sourdough.Project:
"""[summary]
Args:
project (sourdough.Project): [description]
Returns:
sourdough.Project: [description]
"""
try:
self.parameters = self.parameters.finalize(project = project)
except AttributeError:
pass
self.contents = self.contents(**self.parameters)
project.data.splits = tuple(self.contents.split(project.data.x))
project.data.split()
return project
@dataclasses.dataclass
class SklearnTransformer(components.Technique):
"""Wrapper for a scikit-learn transformer.
Args:
name (str): designates the name of a class instance that is used for
internal referencing throughout siMpLify. For example, if a siMpLify
instance needs options from a Settings instance, 'name' should match
the appropriate section name in a Settings instance. Defaults to
None.
contents (Union[Callable, Type, object, str]): stored item(s) for use by
a Component subclass instance. If it is Type or str, an instance
will be created. If it is a str, that instance will be found in
'module'. Defaults to None.
parameters (Union[Mapping[str, Any], base.Parameters]): parameters, in
the form of an ordinary dict or a Parameters instance, to be
attached to 'contents' when the 'implement' method is called.
Defaults to an empty Parameters instance.
iterations (Union[int, str]): number of times the 'implement' method
should be called. If 'iterations' is 'infinite', the 'implement'
method will continue indefinitely unless the method stops further
iteration. Defaults to 1.
module (str): name of module where 'contents' is located if 'contents'
is a string. It can either be a siMpLify or external module, as
long as it is available to the python environment. Defaults to None.
parallel (ClassVar[bool]): indicates whether this Component design is
meant to be part of a parallel workflow structure. Defaults to
False.
"""
name: str = None
contents: Union[Callable, Type, object, str] = None
parameters: Union[Mapping[str, Any], base.Parameters] = base.Parameters()
iterations: Union[int, str] = 1
module: str = None
parallel: ClassVar[bool] = False
""" Public Methods """
def implement(self, project: sourdough.Project) -> sourdough.Project:
"""[summary]
Args:
project (sourdough.Project): [description]
Returns:
sourdough.Project: [description]
"""
try:
self.parameters = self.parameters.finalize(project = project)
except AttributeError:
pass
self.contents = self.contents(**self.parameters)
data = project.data
data.x_train = self.contents.fit[data.x_train]
data.x_train = self.contents.transform(data.x_train)
if data.x_test is not None:
data.x_test = self.contents.transform(data.x_test)
if data.x_validate is not None:
data.x_validate = self.contents.transform(data.x_validate)
project.data = data
return project
| 42.656863 | 81 | 0.628361 | 1,031 | 8,702 | 5.28807 | 0.165858 | 0.033015 | 0.023111 | 0.027513 | 0.865187 | 0.825935 | 0.825935 | 0.803375 | 0.803375 | 0.803375 | 0 | 0.00229 | 0.297403 | 8,702 | 204 | 82 | 42.656863 | 0.889434 | 0.613882 | 0 | 0.636364 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.045455 | false | 0.045455 | 0.136364 | 0 | 0.545455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 7 |
ce381a31c3c7a5004ca07bc71ce595f4fddffd16 | 141 | py | Python | 29/09/8.py | pylangstudy/201705 | c69de524faa67fa2d96267d5a51ed9794208f0e4 | [
"CC0-1.0"
] | null | null | null | 29/09/8.py | pylangstudy/201705 | c69de524faa67fa2d96267d5a51ed9794208f0e4 | [
"CC0-1.0"
] | 38 | 2017-05-25T07:08:48.000Z | 2017-05-31T01:42:41.000Z | 29/09/8.py | pylangstudy/201705 | c69de524faa67fa2d96267d5a51ed9794208f0e4 | [
"CC0-1.0"
] | null | null | null | def Sum(value1, value2=200, value3=300):
return value1 + value2 + value3
print(Sum(100))
print(Sum(value3=333, value2=222, value1=111))
| 23.5 | 46 | 0.70922 | 22 | 141 | 4.545455 | 0.590909 | 0.24 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.221311 | 0.134752 | 141 | 5 | 47 | 28.2 | 0.598361 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0 | 0.25 | 0.5 | 0.5 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 7 |
023338c95a7114f3973d99a042aa481fc718a8a6 | 18,991 | py | Python | models/Utils/NormalizingFlowFactories.py | jonathandumas/generative-models-power-systems | c1eff78cd17bb943bfd79d13dbee7bdc03c66955 | [
"BSD-2-Clause"
] | 11 | 2021-09-28T11:56:44.000Z | 2022-03-21T03:10:18.000Z | models/Utils/NormalizingFlowFactories.py | jonathandumas/generative-models-power-systems | c1eff78cd17bb943bfd79d13dbee7bdc03c66955 | [
"BSD-2-Clause"
] | null | null | null | models/Utils/NormalizingFlowFactories.py | jonathandumas/generative-models-power-systems | c1eff78cd17bb943bfd79d13dbee7bdc03c66955 | [
"BSD-2-Clause"
] | 4 | 2021-10-20T12:37:56.000Z | 2021-12-18T12:31:19.000Z | import torch
import torch.nn as nn
from ..Normalizers import *
from ..Conditionners import *
from models.Step.NormalizingFlow import NormalizingFlowStep, FCNormalizingFlow, CNNormalizingFlow
from models.Utils.MLP import MNISTCNN, CIFAR10CNN, MLP
from models.Examples.UFlow import UFlow, ImprovedUFlow
from .Distributions import *
def buildFCNormalizingFlow(nb_steps, conditioner_type, conditioner_args, normalizer_type, normalizer_args):
flow_steps = []
for step in range(nb_steps):
conditioner = conditioner_type(**conditioner_args)
if normalizer_type is MonotonicNormalizer and normalizer_args["hot_encoding"] and issubclass(conditioner_type, DAGConditioner):
normalizer_args["cond_size"] = normalizer_args["cond_size"] + conditioner_args["in_size"]
normalizer = normalizer_type(**normalizer_args)
flow_step = NormalizingFlowStep(conditioner, normalizer)
flow_steps.append(flow_step)
return FCNormalizingFlow(flow_steps, NormalLogDensity())
def MNIST_A_prior(in_size, kernel):
A = torch.zeros(in_size**2, in_size**2)
row_pix = torch.arange(in_size).view(1, -1).expand(in_size, -1).contiguous().view(-1, 1)
col_pix = torch.arange(in_size).view(-1, 1).expand(-1, in_size).contiguous().view(-1, 1)
for i in range(-kernel, kernel + 1):
for j in range(-kernel, kernel + 1):
mask = ((col_pix + i) < in_size) * ((col_pix + i) >= 0) * ((row_pix + j) < in_size) * ((row_pix + j) >= 0)
idx = ((row_pix * in_size + col_pix) * in_size**2 + col_pix + i + in_size * (row_pix + j)) * mask
A.view(-1)[idx] = 1.
A.view(-1)[torch.arange(0, in_size**4, in_size**2+1)] = 0
return A
def buildMNISTNormalizingFlow(nb_inner_steps, normalizer_type, normalizer_args, l1=0., nb_epoch_update=10,
hot_encoding=False, prior_kernel=None):
if len(nb_inner_steps) == 3:
img_sizes = [[1, 28, 28], [1, 14, 14], [1, 7, 7]]
dropping_factors = [[1, 2, 2], [1, 2, 2], [1, 1, 1]]
fc_l = [[2304, 128], [400, 64], [16, 16]]
nb_epoch_update = [10, 25, 50]
outter_steps = []
for i, fc in zip(range(len(fc_l)), fc_l):
in_size = img_sizes[i][0] * img_sizes[i][1] * img_sizes[i][2]
inner_steps = []
for step in range(nb_inner_steps[i]):
emb_s = 2 if normalizer_type is AffineNormalizer else 30
hidden = MNISTCNN(fc_l=fc, size_img=img_sizes[i], out_d=emb_s)
A_prior = MNIST_A_prior(img_sizes[i][1], prior_kernel) if prior_kernel is not None else None
cond = DAGConditioner(in_size, hidden, emb_s, l1=l1, nb_epoch_update=nb_epoch_update[i],
hot_encoding=hot_encoding, A_prior=A_prior)
if normalizer_type is MonotonicNormalizer:
#emb_s = 30 + in_size if hot_encoding else 30
emb_s = 30 + 2 if hot_encoding else 30
norm = normalizer_type(**normalizer_args, cond_size=emb_s)
else:
norm = normalizer_type(**normalizer_args)
flow_step = NormalizingFlowStep(cond, norm)
inner_steps.append(flow_step)
flow = FCNormalizingFlow(inner_steps, None)
flow.img_sizes = img_sizes[i]
outter_steps.append(flow)
return CNNormalizingFlow(outter_steps, NormalLogDensity(), dropping_factors)
elif len(nb_inner_steps) == 1:
inner_steps = []
for step in range(nb_inner_steps[0]):
emb_s = 2 if normalizer_type is AffineNormalizer else 30
hidden = MNISTCNN(fc_l=[2304, 128], size_img=[1, 28, 28], out_d=emb_s)
A_prior = MNIST_A_prior(28, prior_kernel) if prior_kernel is not None else None
cond = DAGConditioner(1*28*28, hidden, emb_s, l1=l1, nb_epoch_update=nb_epoch_update,
hot_encoding=hot_encoding, A_prior=A_prior)
if normalizer_type is MonotonicNormalizer:
#emb_s = 30 + 28*28 if hot_encoding else 30
emb_s = 30 + 2 if hot_encoding else 30
norm = normalizer_type(**normalizer_args, cond_size=emb_s)
else:
norm = normalizer_type(**normalizer_args)
flow_step = NormalizingFlowStep(cond, norm)
inner_steps.append(flow_step)
flow = FCNormalizingFlow(inner_steps, NormalLogDensity())
return flow
else:
return None
def buildCIFAR10NormalizingFlow(nb_inner_steps, normalizer_type, normalizer_args, l1=0., nb_epoch_update=5):
if len(nb_inner_steps) == 4:
img_sizes = [[3, 32, 32], [1, 32, 32], [1, 16, 16], [1, 8, 8]]
dropping_factors = [[3, 1, 1], [1, 2, 2], [1, 2, 2]]
fc_l = [[400, 128, 84], [576, 128, 32], [64, 32, 32], [16, 32, 32]]
k_sizes = [5, 3, 3, 2]
outter_steps = []
for i, fc in zip(range(len(fc_l)), fc_l):
in_size = img_sizes[i][0] * img_sizes[i][1] * img_sizes[i][2]
inner_steps = []
for step in range(nb_inner_steps[i]):
emb_s = 2 if normalizer_type is AffineNormalizer else 30
hidden = CIFAR10CNN(out_d=emb_s, fc_l=fc, size_img=img_sizes[i], k_size=k_sizes[i])
cond = DAGConditioner(in_size, hidden, emb_s, l1=l1, nb_epoch_update=nb_epoch_update)
norm = normalizer_type(**normalizer_args)
flow_step = NormalizingFlowStep(cond, norm)
inner_steps.append(flow_step)
flow = FCNormalizingFlow(inner_steps, None)
flow.img_sizes = img_sizes[i]
outter_steps.append(flow)
return CNNormalizingFlow(outter_steps, NormalLogDensity(), dropping_factors)
elif len(nb_inner_steps) == 1:
inner_steps = []
for step in range(nb_inner_steps[0]):
emb_s = 2 if normalizer_type is AffineNormalizer else 30
hidden = CIFAR10CNN(fc_l=[400, 128, 84], size_img=[3, 32, 32], out_d=emb_s, k_size=5)
cond = DAGConditioner(3*32*32, hidden, emb_s, l1=l1, nb_epoch_update=nb_epoch_update)
norm = normalizer_type(**normalizer_args)
flow_step = NormalizingFlowStep(cond, norm)
inner_steps.append(flow_step)
flow = FCNormalizingFlow(inner_steps, NormalLogDensity())
return flow
else:
return None
def buildSubMNISTNormalizingFlow(nb_inner_steps, normalizer_type, normalizer_args, l1=0., nb_epoch_update=10,
hot_encoding=False, prior_kernel=2):
if len(nb_inner_steps) == 3:
img_sizes = [[1, 28, 28], [1, 14, 14], [1, 7, 7]]
dropping_factors = [[1, 2, 2], [1, 2, 2], [1, 1, 1]]
fc_l = [[2304, 128], [400, 64], [16, 16]]
nb_epoch_update = [10, 25, 50]
outter_steps = []
for i, fc in zip(range(len(fc_l)), fc_l):
in_size = img_sizes[i][0] * img_sizes[i][1] * img_sizes[i][2]
inner_steps = []
for step in range(nb_inner_steps[i]):
emb_s = 2 if normalizer_type is AffineNormalizer else 30
#TODO ADD pixel position
in_s = (prior_kernel * 2 + 1) ** 2 - 1
hidden = MLP(in_d=in_s, hidden=[in_s * 4, in_s * 4], out_d=emb_s, act_f=nn.ELU())
A_prior = MNIST_A_prior(img_sizes[i][1], prior_kernel)
# Computing the mask indices, filled with zero when the number of indices is smaller than the size.
sub_mask = torch.zeros(A_prior.shape[0], (prior_kernel*2+1)**2 - 1).int()
for pix in range(A_prior.shape[0]):
idx = torch.nonzero(A_prior[pix, :]).int().view(-1)
if idx.shape[0] < sub_mask.shape[1]:
idx = torch.cat((idx.int(), torch.zeros(sub_mask.shape[1] - idx.shape[0]).int()))
sub_mask[pix, :] = idx
cond = SubDAGConditioner(in_size, hidden, emb_s, l1=l1, nb_epoch_update=nb_epoch_update[i],
hot_encoding=hot_encoding, A_prior=A_prior, sub_mask=sub_mask.long())
if normalizer_type is MonotonicNormalizer:
#emb_s = 30 + in_size if hot_encoding else 30
emb_s = 30 + 2 if hot_encoding else 30
norm = normalizer_type(**normalizer_args, cond_size=emb_s)
else:
norm = normalizer_type(**normalizer_args)
flow_step = NormalizingFlowStep(cond, norm)
inner_steps.append(flow_step)
flow = FCNormalizingFlow(inner_steps, None)
flow.img_sizes = img_sizes[i]
outter_steps.append(flow)
return CNNormalizingFlow(outter_steps, NormalLogDensity(), dropping_factors)
elif len(nb_inner_steps) == 1:
inner_steps = []
for step in range(nb_inner_steps[0]):
emb_s = 2 if normalizer_type is AffineNormalizer else 30
# TODO ADD pixel position
in_s = (prior_kernel * 2 + 1) ** 2 - 1 + 2
hidden = MLP(in_d=in_s, hidden=[in_s * 4, in_s * 4], out_d=emb_s)
#hidden = SubMNISTCNN(fc_l=[16, 50], size_img=[1, 5, 5], out_d=emb_s)
A_prior = MNIST_A_prior(28, prior_kernel)
# Computing the mask indices, filled with zero when the number of indices is smaller than the size.
sub_mask = torch.zeros(A_prior.shape[0], (prior_kernel * 2 + 1) ** 2 - 1).int()
for pix in range(A_prior.shape[0]):
idx = torch.nonzero(A_prior[pix, :]).int().view(-1)
if idx.shape[0] < sub_mask.shape[1]:
idx = torch.cat((idx.int(), torch.zeros(sub_mask.shape[1] - idx.shape[0]).int()))
sub_mask[pix, :] = idx
cond = SubDAGConditioner(1*28*28, hidden, emb_s, l1=l1, nb_epoch_update=nb_epoch_update,
hot_encoding=hot_encoding, A_prior=A_prior, sub_mask=sub_mask.long())
if normalizer_type is MonotonicNormalizer:
#emb_s = 30 + 28*28 if hot_encoding else 30
emb_s = 30 + 2 if hot_encoding else 30
norm = normalizer_type(**normalizer_args, cond_size=emb_s)
else:
norm = normalizer_type(**normalizer_args)
flow_step = NormalizingFlowStep(cond, norm)
inner_steps.append(flow_step)
flow = FCNormalizingFlow(inner_steps, NormalLogDensity())
return flow
else:
return None
def buildMNISTUFlow(normalizer_type, normalizer_args, prior_kernel=2, hot_encoding=False,
nb_epoch_update=10, nb_inner_steps=[1, 1, 1], emb_net=None):
img_sizes = [[1, 28, 28], [1, 14, 14], [1, 7, 7]]
dropping_factors = [[1, 2, 2], [1, 2, 2], [1, 1, 1]]
nb_epoch_update = [10, 25, 50]
enc_outter_steps = []
for i in range(3):
in_size = img_sizes[i][0] * img_sizes[i][1] * img_sizes[i][2]
inner_steps = []
for step in range(nb_inner_steps[i]):
emb_s = 2 if normalizer_type is AffineNormalizer else 30
in_s = (prior_kernel * 2 + 1) ** 2 - 1
hidden_l = emb_net if emb_net[:-1] is not None else [in_s * 4, in_s * 4, in_s * 4, in_s * 4]
hidden = MLP(in_d=in_s+2, hidden=hidden_l, out_d=emb_s)
A_prior = MNIST_A_prior(img_sizes[i][1], prior_kernel)
# Computing the mask indices, filled with zero when the number of indices is smaller than the size.
sub_mask = torch.zeros(A_prior.shape[0], (prior_kernel*2+1)**2 - 1).int()
for pix in range(A_prior.shape[0]):
idx = torch.nonzero(A_prior[pix, :]).int().view(-1)
if idx.shape[0] < sub_mask.shape[1]:
idx = torch.cat((idx.int(), torch.zeros(sub_mask.shape[1] - idx.shape[0]).int()))
sub_mask[pix, :] = idx
cond = SubDAGConditioner(in_size, hidden, emb_s, l1=0., nb_epoch_update=nb_epoch_update[i], hot_encoding=hot_encoding,
A_prior=A_prior, sub_mask=sub_mask.long())
if normalizer_type is MonotonicNormalizer:
emb_s = 30 + 2 if hot_encoding else 30
norm = normalizer_type(**normalizer_args, cond_size=emb_s)
else:
norm = normalizer_type(**normalizer_args)
flow_step = NormalizingFlowStep(cond, norm)
inner_steps.append(flow_step)
flow = FCNormalizingFlow(inner_steps, None)
flow.img_sizes = img_sizes[i]
enc_outter_steps.append(flow)
dec_outter_steps = []
for i in range(2, -1, -1):
in_size = img_sizes[i][0] * img_sizes[i][1] * img_sizes[i][2]
inner_steps = []
for step in range(nb_inner_steps[i]):
emb_s = 2 if normalizer_type is AffineNormalizer else 30
in_s = (prior_kernel * 2 + 1) ** 2 - 1
hidden = MLP(in_d=in_s + 2, hidden=[in_s * 4, in_s * 4, in_s * 4, in_s * 4], out_d=emb_s)
A_prior = MNIST_A_prior(img_sizes[i][1], prior_kernel)
# Computing the mask indices, filled with zero when the number of indices is smaller than the size.
sub_mask = torch.zeros(A_prior.shape[0], (prior_kernel * 2 + 1) ** 2 - 1).int()
for pix in range(A_prior.shape[0]):
idx = torch.nonzero(A_prior[pix, :]).int().view(-1)
if idx.shape[0] < sub_mask.shape[1]:
idx = torch.cat((idx.int(), torch.zeros(sub_mask.shape[1] - idx.shape[0]).int()))
sub_mask[pix, :] = idx
cond = SubDAGConditioner(in_size, hidden, emb_s, l1=0., nb_epoch_update=nb_epoch_update[i], hot_encoding=hot_encoding,
A_prior=A_prior, sub_mask=sub_mask.long())
if normalizer_type is MonotonicNormalizer:
emb_s = 30 + 2 if hot_encoding else 30
norm = normalizer_type(**normalizer_args, cond_size=emb_s)
else:
norm = normalizer_type(**normalizer_args)
flow_step = NormalizingFlowStep(cond, norm)
inner_steps.append(flow_step)
flow = FCNormalizingFlow(inner_steps, None)
flow.img_sizes = img_sizes[i]
dec_outter_steps.append(flow)
return UFlow(enc_outter_steps, dec_outter_steps, NormalLogDensity(), dropping_factors)
def buildMNISTIUFlow(normalizer_type, normalizer_args, prior_kernel=2, hot_encoding=False,
nb_epoch_update=10, nb_inner_steps=[1, 1, 1]):
img_sizes = [[1, 28, 28], [1, 14, 14], [1, 7, 7]]
fc_l = [[2304, 128], [400, 64]]
dropping_factors = [[1, 2, 2], [1, 2, 2], [1, 1, 1]]
nb_epoch_update = [10, 25, 50]
context_nets = nn.ModuleList()
context_size = 20
for i in range(2):
net = MNISTCNN(out_d=context_size, fc_l=fc_l[i], size_img=img_sizes[i])
context_nets.append(net)
enc_outter_steps = []
for i in range(3):
in_size = img_sizes[i][0] * img_sizes[i][1] * img_sizes[i][2]
inner_steps = []
for step in range(nb_inner_steps[i]):
emb_s = 2 if normalizer_type is AffineNormalizer else 30
# TODO REMOVE
if i == 2:
emb_s = 30
in_s = (prior_kernel * 2 + 1) ** 2 - 1
in_s_mlp = in_s + 2 if i == 0 else in_s + context_size + 2
hidden = MLP(in_d=in_s_mlp, hidden=[in_s * 4, in_s * 4, in_s * 4], out_d=emb_s)
A_prior = MNIST_A_prior(img_sizes[i][1], prior_kernel)
# Computing the mask indices, filled with zero when the number of indices is smaller than the size.
sub_mask = torch.zeros(A_prior.shape[0], (prior_kernel*2+1)**2 - 1).int()
for pix in range(A_prior.shape[0]):
idx = torch.nonzero(A_prior[pix, :]).int().view(-1)
if idx.shape[0] < sub_mask.shape[1]:
idx = torch.cat((idx.int(), torch.zeros(sub_mask.shape[1] - idx.shape[0]).int()))
sub_mask[pix, :] = idx
cond = SubDAGConditioner(in_size, hidden, emb_s, l1=0., nb_epoch_update=nb_epoch_update[i], hot_encoding=hot_encoding,
A_prior=A_prior, sub_mask=sub_mask.long())
if normalizer_type is MonotonicNormalizer:
emb_s = 30 + 2 if hot_encoding else 30
norm = normalizer_type(**normalizer_args, cond_size=emb_s)
else:
norm = normalizer_type(**normalizer_args)
# TODO REMOVE
if i == 2:
emb_s = 30 + 2 if hot_encoding else 30
norm_args = {"integrand_net": [50, 50, 50], "nb_steps": 15, "solver": "CC"}
norm = MonotonicNormalizer(**norm_args, cond_size=emb_s)
flow_step = NormalizingFlowStep(cond, norm)
inner_steps.append(flow_step)
flow = FCNormalizingFlow(inner_steps, None)
flow.img_sizes = img_sizes[i]
enc_outter_steps.append(flow)
dec_outter_steps = []
for i in range(2, -1, -1):
in_size = img_sizes[i][0] * img_sizes[i][1] * img_sizes[i][2]
inner_steps = []
for step in range(nb_inner_steps[i]):
emb_s = 2 if normalizer_type is AffineNormalizer else 30
in_s = (prior_kernel * 2 + 1) ** 2 - 1
hidden = MLP(in_d=in_s + 2, hidden=[in_s * 4, in_s * 4, in_s * 4], out_d=emb_s)
A_prior = MNIST_A_prior(img_sizes[i][1], prior_kernel)
# Computing the mask indices, filled with zero when the number of indices is smaller than the size.
sub_mask = torch.zeros(A_prior.shape[0], (prior_kernel * 2 + 1) ** 2 - 1).int()
for pix in range(A_prior.shape[0]):
idx = torch.nonzero(A_prior[pix, :]).int().view(-1)
if idx.shape[0] < sub_mask.shape[1]:
idx = torch.cat((idx.int(), torch.zeros(sub_mask.shape[1] - idx.shape[0]).int()))
sub_mask[pix, :] = idx
cond = SubDAGConditioner(in_size, hidden, emb_s, l1=0., nb_epoch_update=nb_epoch_update[i], hot_encoding=hot_encoding,
A_prior=A_prior, sub_mask=sub_mask.long())
if normalizer_type is MonotonicNormalizer:
emb_s = 30 + 2 if hot_encoding else 30
norm = normalizer_type(**normalizer_args, cond_size=emb_s)
else:
norm = normalizer_type(**normalizer_args)
flow_step = NormalizingFlowStep(cond, norm)
inner_steps.append(flow_step)
flow = FCNormalizingFlow(inner_steps, None)
flow.img_sizes = img_sizes[i]
dec_outter_steps.append(flow)
return ImprovedUFlow(enc_outter_steps, dec_outter_steps, NormalLogDensity(), dropping_factors, context_nets)
| 51.887978 | 135 | 0.589542 | 2,669 | 18,991 | 3.938179 | 0.057325 | 0.02055 | 0.031681 | 0.066597 | 0.864142 | 0.841499 | 0.837504 | 0.830654 | 0.822186 | 0.802968 | 0 | 0.046168 | 0.29514 | 18,991 | 365 | 136 | 52.030137 | 0.739056 | 0.047338 | 0 | 0.75082 | 0 | 0 | 0.003651 | 0 | 0 | 0 | 0 | 0.00274 | 0 | 1 | 0.022951 | false | 0 | 0.02623 | 0 | 0.091803 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
024e1b8da9c0cd2bcf0856caa1921b91e6db1998 | 26,200 | py | Python | neural_network/Stef_nn.py | StefanHD13/Machine_Learning_Projects | 2c6f935ed689a07d6bdb63a91566d40bf94809b8 | [
"MIT"
] | null | null | null | neural_network/Stef_nn.py | StefanHD13/Machine_Learning_Projects | 2c6f935ed689a07d6bdb63a91566d40bf94809b8 | [
"MIT"
] | null | null | null | neural_network/Stef_nn.py | StefanHD13/Machine_Learning_Projects | 2c6f935ed689a07d6bdb63a91566d40bf94809b8 | [
"MIT"
] | null | null | null | #need 2 functions one for predicting and one for training
import numpy as np
from math import exp
import random
#prediction function
def nn_predict(features,weights,neurons):
# features : an array of the features to predit labels for
# weights : a list of the weights. the first weight for each layer is for the bias neuron (1)
# neurons : a list containing the number of neurons in each hidden layer and output layer
# first find the number of input features
rows, features_num = np.shape(features)
# get the number of hidden layers
hidden_lay_num = (len(neurons)-1)
# make an error check to see that the number of neurons and number of weights are matching
### NEED TO DO THIS ######
#create an array of zeros to store the predicted labels
labels = np.zeros((rows,neurons[hidden_lay_num]))
# need to loop through the number of samples and predict labels for each of them
for s in range(0,rows):
#initialise the weight location
weight_loc = 0
#loop through each hidden layer and the output layer
for i in range(0,(hidden_lay_num+1)):
#get number of neurons for current layer
current_neurons_num = neurons[i]
#get number of neurons for previous layer and set the values for the previous layer neurons
if i == 0:
previous_neurons_num = features_num
x = features[s,:]
previous_neurons = x.tolist()
else:
previous_neurons_num = neurons[i-1]
previous_neurons = current_neurons
#calculate the values for each of the neurons in the current layer
current_neurons = [0]*current_neurons_num
for j in range(0,current_neurons_num):
#add the bias to the neuron
current_neurons[j] = 1 * weights[weight_loc]
weight_loc = weight_loc +1
#loop through each of the neurons in the previous layer
for k in range(0,previous_neurons_num):
current_neurons[j] = current_neurons[j] + (previous_neurons[k]*weights[weight_loc])
weight_loc = weight_loc + 1
#now need to normalise the value for this neuron
# normalise using sigmoid function
x = current_neurons[j]
current_neurons[j] = exp(x)/(exp(x)+1)
#should now have the neuron values for output layer under current_neurons
#store them in the numpy array
for i in range(0,neurons[hidden_lay_num]):
labels[s,i] = current_neurons[i]
return labels
# train a neural network using genetic algorithm
def nn_train_elite(features,labels,hidden_lay_num,neurons_num,population,iterations,mutations,batch_size):
#features: an array of features to train from
#labels: an array of labels to train to predict
#hidden_lay_num: the number of hidden layers for the neural network
#hidden_lay_neurons: a list containing the number of neurons for each hidden layer
#population: the population of the genetic algorithm IMPORTANT: Half of the population must be an even number
#iterations: the total number of iterations for the genetic algorithm
#mutations: affects the chance of a gene mutating when children are created. generates a random integer between 0 and this value.
# if the generated integer is 0, that gene will mutate
#batch_size: the size of the random batch in each generation. if None, will use all of the samples in each generation
# INVALID INPUTS CHECKS
# check that the population divided by two is an even number. if not, give an error
#first calculate the number of weights needed
samples_num,features_num = np.shape(features)
samples_num,labels_num = np.shape(labels)
neurons_num.append(labels_num)
weights_num = (features_num+1)*neurons_num[0] #the +1 is for the bias neuron
x = len(neurons_num)
for i in range(1,x):
weights_num = weights_num + (neurons_num[i]*(neurons_num[i-1]+1)) #the +1 is for the bias neuron
#now weights_num has the number of weights for the neural network
#create a numpy array of random numbers with columns equal to the number of weights and rows equal to the population
networks = np.random.uniform(-2,2,(population,weights_num))
x = np.zeros((population,1))
networks = np.concatenate((networks,x),1) #the last column of networks is for the fitness
#create a small random batch of features and labels
if batch_size == None:
labels_batch = labels
features_batch = features
batch_samples = samples_num
else:
#generating a list of random numbers determined by batch_size
random_nums = random.sample(range(0,samples_num),batch_size)
labels_batch = labels[random_nums,:]
features_batch = features[random_nums,:]
batch_samples = batch_size
#now for these inital networks, calculate the fitness for each one
for i in range(0,population):
#print('Calculating fitness for initial networks: '+str(i)+'/'+str(population))
#get the weights for the network in a list
x = networks[i,0:weights_num]
weights = x.tolist()
#predict the labels using this network
labels_pred = nn_predict(features_batch,weights,neurons_num)
#calculate a fitness score based on the labels
#the fitness will be the mean of the squared residuals between each label
fitness=0
count=0
for j in range(0,batch_samples):
for k in range(0,labels_num):
#add the squared residual between the current location label and predicted label
curr_label = labels_batch[j,k]
curr_label_pred = labels_pred[j,k]
fitness = fitness + ((curr_label-curr_label_pred)**2)
count = count +1
#now store the fitness score in the last column
fitness = fitness/count
networks[i,weights_num] = fitness
##### ITERATIONS START HERE ######
new_pop = round(population/2)
#create a list to store the best fitnesses from each generation
best_fitnesses = [0] * iterations
for s in range(0,iterations):
print('Beginning generation '+str(s))
#now find 50% of the networks with the smallest fitness
networks_best = np.zeros((new_pop,(weights_num+1)))
for i in range(0,new_pop):
index = np.argmin(networks[:, weights_num])
#add that row to the new network array
networks_best[i,:] = networks[index,:]
#add the fitness value to the best_fitnesses list
best_fitnesses[s] = networks[index,weights_num]
#delete that row from the network array
networks = np.delete(networks,index,0)
#now need to create an array for the children
networks_children = np.zeros((new_pop,(weights_num+1)))
i=0
while i < new_pop:
#print('Creating children: '+str(i)+'/'+str(new_pop))
#each pair will create 2 children which are the inverse of each other
for j in range(0,weights_num):
#generate a random integer (either 0 or 1)
x = random.randint(0,1)
if x==0:
networks_children[i,j] = networks_best[i,j]
networks_children[i+1,j] = networks_best[i+1,j]
else:
networks_children[i,j] = networks_best[i+1,j]
networks_children[i+1,j] = networks_best[i,j]
#generate two random integers based on the mutation variable. if it is 0, mutate the weight
x = random.randint(0,mutations)
y = random.randint(0,mutations)
#mutate the weight by generating a random float between 0 and twice the current weight
if x==0:
#mutate the child at [i,j]
gene = networks_children[i,j]
gene = random.uniform(-(2*gene),(2*gene))
networks_children[i,j] = gene
if y ==0:
#mutate the child at [i+1,j]
gene = networks_children[i+1,j]
gene = random.uniform(-(2*gene),(2*gene))
networks_children[i+1,j] = gene
i = i +2
#networks_children contains the child networks from networks_best
#create a small random batch of features and labels
if batch_size == None:
labels_batch = labels
features_batch = features
batch_samples = samples_num
else:
#generating a list of random numbers determined by batch_size
random_nums = random.sample(range(0,samples_num),batch_size)
labels_batch = labels[random_nums,:]
features_batch = features[random_nums,:]
batch_samples = batch_size
#now just need to calculate fitnesses for networks_children
#now calculate the fitness for each one
for i in range(0,new_pop):
#print('Calculating children fitnesses: '+str(i)+'/'+str(new_pop))
#get the weights for the network in a list
x = networks_children[i,0:weights_num]
weights = x.tolist()
#predict the labels using this network
labels_pred = nn_predict(features_batch,weights,neurons_num)
#calculate a fitness score based on the labels
#the fitness will be the mean of the squared residuals between each label
fitness=0
count =0
for j in range(0,batch_samples):
for k in range(0,labels_num):
#add the squared residual between the current location label and predicted label
curr_label = labels_batch[j,k]
curr_label_pred = labels_pred[j,k]
fitness = fitness + ((curr_label-curr_label_pred)**2)
count = count+1
#now store the fitness score in the last column
fitness = fitness/count
networks_children[i,weights_num] = fitness
#concenate networks_children with networks_best and repeat dat loop
networks = np.concatenate((networks_best,networks_children),0)
#put stuff here to show how far the training is
print('Generation '+str(s)+' complete')
####### ITERATIONS END HERE #######
#best_fitnesses contains the ebst fitness score from each iteration
#return the current list of networks
return networks, best_fitnesses
def nn_train_stoch(features,labels,hidden_lay_num,neurons_num,population,iterations,mutations,batch_size):
#uses a stochastic sampling method for parent selection rather than the previous elitist selection
#uses stochastic acceptance
#features: an array of features to train from
#labels: an array of labels to train to predict
#hidden_lay_num: the number of hidden layers for the neural network
#hidden_lay_neurons: a list containing the number of neurons for each hidden layer
#population: the population of the genetic algorithm IMPORTANT: Half of the population must be an even number
#iterations: the total number of iterations for the genetic algorithm
#mutations: affects the chance of a gene mutating when children are created. generates a random integer between 0 and this value.
# if the generated integer is 0, that gene will mutate
#batch_size: the size of the random batch in each generation. if None, will use all of the samples in each generation
# INVALID INPUTS CHECKS
# check that the population divided by two is an even number. if not, give an error
#first calculate the number of weights needed
samples_num,features_num = np.shape(features)
samples_num,labels_num = np.shape(labels)
neurons_num.append(labels_num)
weights_num = (features_num+1)*neurons_num[0] #the +1 is for the bias neuron
x = len(neurons_num)
for i in range(1,x):
weights_num = weights_num + (neurons_num[i]*(neurons_num[i-1]+1)) #the +1 is for the bias neuron
#now weights_num has the number of weights for the neural network
#create a numpy array of random numbers with columns equal to the number of weights and rows equal to the population
networks = np.random.uniform(-2,2,(population,weights_num))
x = np.zeros((population,1))
networks = np.concatenate((networks,x),1) #the last column of networks is for the fitness
#create a small random batch of features and labels
if batch_size == None:
labels_batch = labels
features_batch = features
batch_samples = samples_num
else:
#generating a list of random numbers determined by batch_size
random_nums = random.sample(range(0,samples_num),batch_size)
labels_batch = labels[random_nums,:]
features_batch = features[random_nums,:]
batch_samples = batch_size
#now for these inital networks, calculate the fitness for each one
for i in range(0,population):
#print('Calculating fitness for initial networks: '+str(i)+'/'+str(population))
#get the weights for the network in a list
x = networks[i,0:weights_num]
weights = x.tolist()
#predict the labels using this network
labels_pred = nn_predict(features_batch,weights,neurons_num)
#calculate a fitness score based on the labels
#the fitness will be the mean of the squared residuals between each label
fitness=0
count=0
for j in range(0,batch_samples):
for k in range(0,labels_num):
#add the squared residual between the current location label and predicted label
curr_label = labels_batch[j,k]
curr_label_pred = labels_pred[j,k]
fitness = fitness + ((curr_label-curr_label_pred)**2)
count = count +1
#now store the fitness score in the last column
fitness = fitness/count
networks[i,weights_num] = fitness
##### ITERATIONS START HERE ######
new_pop = round(population/2)
#create a numpy array to store the best,average,and worst fitnesses from each generation (four columns with first being gen number)
fitness_store = np.zeros((iterations,4))
for s in range(0,iterations):
print('Beginning generation '+str(s))
#now use stochastic acceptance to select the parents
networks_best = np.zeros((new_pop,(weights_num+1)))
i=0
rows,cols = np.shape(networks)
# find the value for the maximum fitness
index = np.argmax(networks[:, weights_num])
max_fitness = networks[index,weights_num]
while (i < new_pop):
# select a random row
row_select = random.randint(0,(rows-1))
#calcuate the probability of the row being accepted (fitness/max)
fitness = networks[row_select,weights_num]
probability = fitness/max_fitness
#generate a random float between 0 and 1
num = random.random()
#accept that row if the float is more than or equal to probability (since a lower fitness is better)
if (num>=probability):
#add that row to the new network array
networks_best[i,:] = networks[row_select,:]
#delete that row from the network array
networks = np.delete(networks,row_select,0)
rows = rows -1
#add one to i
i = i+1
#now need to create an array for the children
networks_children = np.zeros((new_pop,(weights_num+1)))
i=0
while i < new_pop:
#print('Creating children: '+str(i)+'/'+str(new_pop))
#each pair will create 2 children which are the inverse of each other
for j in range(0,weights_num):
#generate a random integer (either 0 or 1)
x = random.randint(0,1)
if x==0:
networks_children[i,j] = networks_best[i,j]
networks_children[i+1,j] = networks_best[i+1,j]
else:
networks_children[i,j] = networks_best[i+1,j]
networks_children[i+1,j] = networks_best[i,j]
#generate two random integers based on the mutation variable. if it is 0, mutate the weight
x = random.randint(0,mutations)
y = random.randint(0,mutations)
#mutate the weight by generating a random float between 0 and twice the current weight
if x==0:
#mutate the child at [i,j]
gene = networks_children[i,j]
gene = random.uniform(-(2*gene),(2*gene))
networks_children[i,j] = gene
if y ==0:
#mutate the child at [i+1,j]
gene = networks_children[i+1,j]
gene = random.uniform(-(2*gene),(2*gene))
networks_children[i+1,j] = gene
i = i +2
#networks_children contains the child networks from networks_best
#create a small random batch of features and labels
if batch_size == None:
labels_batch = labels
features_batch = features
batch_samples = samples_num
else:
#generating a list of random numbers determined by batch_size
random_nums = random.sample(range(0,samples_num),batch_size)
labels_batch = labels[random_nums,:]
features_batch = features[random_nums,:]
batch_samples = batch_size
#now just need to calculate fitnesses for networks_children
#now calculate the fitness for each one
for i in range(0,new_pop):
#print('Calculating children fitnesses: '+str(i)+'/'+str(new_pop))
#get the weights for the network in a list
x = networks_children[i,0:weights_num]
weights = x.tolist()
#predict the labels using this network
labels_pred = nn_predict(features_batch,weights,neurons_num)
#calculate a fitness score based on the labels
#the fitness will be the mean of the squared residuals between each label
fitness=0
count =0
for j in range(0,batch_samples):
for k in range(0,labels_num):
#add the squared residual between the current location label and predicted label
curr_label = labels_batch[j,k]
curr_label_pred = labels_pred[j,k]
fitness = fitness + ((curr_label-curr_label_pred)**2)
count = count+1
#now store the fitness score in the last column
fitness = fitness/count
networks_children[i,weights_num] = fitness
#concenate networks_children with networks_best and repeat dat loop
networks = np.concatenate((networks_best,networks_children),0)
# put the best, average and worst fitnesses from the generation in fitness_store
fitness_best = np.max(networks[:,weights_num]) #find the best fitness
fitness_average = np.mean(networks[:,weights_num]) #find the average fitness
fitness_worst = np.min(networks[:,weights_num]) #find the worst fitness
fitness_store[s,0] = s
fitness_store[s,1] = fitness_best
fitness_store[s,2] = fitness_average
fitness_store[s,3] = fitness_worst
#put stuff here to show how far the training is
print('Generation '+str(s)+' complete')
####### ITERATIONS END HERE #######
#fitness_store contains the best, average, and worst fitness from each generation
#return the current list of networks
return networks, fitness_store
def nn_train_stoch_bank_fraud(features,labels,hidden_lay_num,neurons_num,population,iterations,mutations,batch_size):
#THIS IS SPECIFICALLY FOR THE BANK FRAUD DATA
#selects and equal number of fraudalent and non fraudalent samples in each batch
#BATCH SIZE MUST BE EVEN
#uses a stochastic sampling method for parent selection rather than the previous elitist selection
#uses stochastic acceptance
#
#features: an array of features to train from
#labels: an array of labels to train to predict
#hidden_lay_num: the number of hidden layers for the neural network
#hidden_lay_neurons: a list containing the number of neurons for each hidden layer
#population: the population of the genetic algorithm IMPORTANT: Half of the population must be an even number
#iterations: the total number of iterations for the genetic algorithm
#mutations: affects the chance of a gene mutating when children are created. generates a random integer between 0 and this value.
# if the generated integer is 0, that gene will mutate
#batch_size: the size of the random batch in each generation. if None, will use all of the samples in each generation
# INVALID INPUTS CHECKS
# check that the population divided by two is an even number. if not, give an error
#first calculate the number of weights needed
samples_num,features_num = np.shape(features)
samples_num,labels_num = np.shape(labels)
neurons_num.append(labels_num)
weights_num = (features_num+1)*neurons_num[0] #the +1 is for the bias neuron
x = len(neurons_num)
for i in range(1,x):
weights_num = weights_num + (neurons_num[i]*(neurons_num[i-1]+1)) #the +1 is for the bias neuron
#now weights_num has the number of weights for the neural network
#create a numpy array of random numbers with columns equal to the number of weights and rows equal to the population
networks = np.random.uniform(-2,2,(population,weights_num))
x = np.zeros((population,1))
networks = np.concatenate((networks,x),1) #the last column of networks is for the fitness
#create a small random batch of features and labels
if batch_size == None:
labels_batch = labels
features_batch = features
batch_samples = samples_num
else:
#create an array of fraud features and an array for those labels
x = []
for i in range(0,samples_num):
y = labels[i,0]
if y==1:
x.append(i)
features_fraud = features[x,:]
labels_fraud = labels[x,:]
fraud_samples_num,x = np.shape(labels_fraud)
#generating a list of random numbers determined by batch_size
random_nums = random.sample(range(0,samples_num),round(batch_size/2))
random_nums_fraud = random.sample(range(0,fraud_samples_num),round(batch_size/2))
labels_batch = np.concatenate((labels[random_nums,:],labels_fraud[random_nums_fraud,:]),0)
features_batch = np.concatenate((features[random_nums,:],features_fraud[random_nums_fraud,:]),0)
batch_samples = batch_size
#now for these inital networks, calculate the fitness for each one
for i in range(0,population):
#print('Calculating fitness for initial networks: '+str(i)+'/'+str(population))
#get the weights for the network in a list
x = networks[i,0:weights_num]
weights = x.tolist()
#predict the labels using this network
labels_pred = nn_predict(features_batch,weights,neurons_num)
#calculate a fitness score based on the labels
#the fitness will be the mean of the squared residuals between each label
fitness=0
count=0
for j in range(0,batch_samples):
for k in range(0,labels_num):
#add the squared residual between the current location label and predicted label
curr_label = labels_batch[j,k]
curr_label_pred = labels_pred[j,k]
fitness = fitness + ((curr_label-curr_label_pred)**2)
count = count +1
#now store the fitness score in the last column
fitness = fitness/count
networks[i,weights_num] = fitness
##### ITERATIONS START HERE ######
new_pop = round(population/2)
#create a numpy array to store the best,average,and worst fitnesses from each generation (four columns with first being gen number)
fitness_store = np.zeros((iterations,4))
for s in range(0,iterations):
print('Beginning generation '+str(s))
#now use stochastic acceptance to select the parents
networks_best = np.zeros((new_pop,(weights_num+1)))
i=0
rows,cols = np.shape(networks)
# find the value for the maximum fitness
index = np.argmax(networks[:, weights_num])
max_fitness = networks[index,weights_num]
while (i < new_pop):
# select a random row
row_select = random.randint(0,(rows-1))
#calcuate the probability of the row being accepted (fitness/max)
fitness = networks[row_select,weights_num]
probability = fitness/max_fitness
#generate a random float between 0 and 1
num = random.random()
#accept that row if the float is more than or equal to probability (since a lower fitness is better)
if (num>=probability):
#add that row to the new network array
networks_best[i,:] = networks[row_select,:]
#delete that row from the network array
networks = np.delete(networks,row_select,0)
rows = rows -1
#add one to i
i = i+1
#now need to create an array for the children
networks_children = np.zeros((new_pop,(weights_num+1)))
i=0
while i < new_pop:
#print('Creating children: '+str(i)+'/'+str(new_pop))
#each pair will create 2 children which are the inverse of each other
for j in range(0,weights_num):
#generate a random integer (either 0 or 1)
x = random.randint(0,1)
if x==0:
networks_children[i,j] = networks_best[i,j]
networks_children[i+1,j] = networks_best[i+1,j]
else:
networks_children[i,j] = networks_best[i+1,j]
networks_children[i+1,j] = networks_best[i,j]
#generate two random integers based on the mutation variable. if it is 0, mutate the weight
x = random.randint(0,mutations)
y = random.randint(0,mutations)
#mutate the weight by generating a random float between 0 and twice the current weight
if x==0:
#mutate the child at [i,j]
gene = networks_children[i,j]
gene = random.uniform(-(2*gene),(2*gene))
networks_children[i,j] = gene
if y ==0:
#mutate the child at [i+1,j]
gene = networks_children[i+1,j]
gene = random.uniform(-(2*gene),(2*gene))
networks_children[i+1,j] = gene
i = i +2
#networks_children contains the child networks from networks_best
#create a small random batch of features and labels
if batch_size == None:
labels_batch = labels
features_batch = features
batch_samples = samples_num
else:
#generating a list of random numbers determined by batch_size
random_nums = random.sample(range(0,samples_num),round(batch_size/2))
random_nums_fraud = random.sample(range(0,fraud_samples_num),round(batch_size/2))
labels_batch = np.concatenate((labels[random_nums,:],labels_fraud[random_nums_fraud,:]),0)
features_batch = np.concatenate((features[random_nums,:],features_fraud[random_nums_fraud,:]),0)
batch_samples = batch_size
#now just need to calculate fitnesses for networks_children
#now calculate the fitness for each one
for i in range(0,new_pop):
#print('Calculating children fitnesses: '+str(i)+'/'+str(new_pop))
#get the weights for the network in a list
x = networks_children[i,0:weights_num]
weights = x.tolist()
#predict the labels using this network
labels_pred = nn_predict(features_batch,weights,neurons_num)
#calculate a fitness score based on the labels
#the fitness will be the mean of the squared residuals between each label
fitness=0
count =0
for j in range(0,batch_samples):
for k in range(0,labels_num):
#add the squared residual between the current location label and predicted label
curr_label = labels_batch[j,k]
curr_label_pred = labels_pred[j,k]
fitness = fitness + ((curr_label-curr_label_pred)**2)
count = count+1
#now store the fitness score in the last column
fitness = fitness/count
networks_children[i,weights_num] = fitness
#concenate networks_children with networks_best and repeat dat loop
networks = np.concatenate((networks_best,networks_children),0)
# put the best, average and worst fitnesses from the generation in fitness_store
fitness_best = np.max(networks[:,weights_num]) #find the best fitness
fitness_average = np.mean(networks[:,weights_num]) #find the average fitness
fitness_worst = np.min(networks[:,weights_num]) #find the worst fitness
fitness_store[s,0] = s
fitness_store[s,1] = fitness_best
fitness_store[s,2] = fitness_average
fitness_store[s,3] = fitness_worst
#put stuff here to show how far the training is
print('Generation '+str(s)+' complete')
####### ITERATIONS END HERE #######
#fitness_store contains the best, average, and worst fitness from each generation
#return the current list of networks
return networks, fitness_store | 39.16293 | 132 | 0.735534 | 4,153 | 26,200 | 4.519143 | 0.063809 | 0.026641 | 0.013214 | 0.007619 | 0.896473 | 0.886829 | 0.881394 | 0.880435 | 0.872229 | 0.870311 | 0 | 0.011489 | 0.179427 | 26,200 | 669 | 133 | 39.16293 | 0.861482 | 0.475763 | 0 | 0.873563 | 0 | 0 | 0.009155 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.011494 | false | 0 | 0.008621 | 0 | 0.031609 | 0.017241 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0274bb3e0bc51d8598ca11d9f7a2476ca0da74c1 | 133 | py | Python | smartmirror/__init__.py | fredrikiselius/solid-umbrella | 8f65647db59647467396d92ec03263fc00b11f44 | [
"MIT"
] | null | null | null | smartmirror/__init__.py | fredrikiselius/solid-umbrella | 8f65647db59647467396d92ec03263fc00b11f44 | [
"MIT"
] | 2 | 2018-08-30T08:49:05.000Z | 2018-08-30T08:53:08.000Z | smartmirror/__init__.py | fredrikiselius/solid-umbrella | 8f65647db59647467396d92ec03263fc00b11f44 | [
"MIT"
] | null | null | null | from smartmirror import entity
from smartmirror import pointshape
from smartmirror import transfer
from smartmirror.widgets import *
| 26.6 | 34 | 0.864662 | 16 | 133 | 7.1875 | 0.4375 | 0.521739 | 0.547826 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.120301 | 133 | 4 | 35 | 33.25 | 0.982906 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
5a44a8d2b050b66dd040697bfe38399151b0ecba | 4,269 | py | Python | menu.py | cccaaannn/project_creator | 11614716019971a85c08933ea0ad9d64410bceea | [
"MIT"
] | null | null | null | menu.py | cccaaannn/project_creator | 11614716019971a85c08933ea0ad9d64410bceea | [
"MIT"
] | null | null | null | menu.py | cccaaannn/project_creator | 11614716019971a85c08933ea0ad9d64410bceea | [
"MIT"
] | null | null | null | from project_creator import python_project_creator, cpp_project_creator, c_project_creator
import sys
def menu(cfg_path):
# Project Language selection menu
print("<<< Project Creator >>>\n"
"\n"
"Python (1)\n"
"cpp (2)\n"
"c (3)\n"
"exit (0)\n"
"\n"
"Select Project Language\n"
)
while True:
try:
project_language = int(input(": "))
if not(project_language < 4 and project_language >= 0):
raise ValueError
else:
if project_language == 0:
sys.exit(0)
break
except ValueError:
print("Enter a valid value")
# Project type selection menu
if(project_language == 0):
sys.exit(0)
# python project creation
elif(project_language == 1):
creator = python_project_creator(cfg_path = cfg_path)
print("\n<<< Create Python Project >>>\n"
"\n"
"standart project (1)\n"
"git project (2)\n"
"pypi project (3)\n"
"exit (0)\n"
"\n"
"Select Project Type\n"
)
while True:
try:
project_type = int(input(": "))
if not(project_type < 4 and project_type >= 0):
raise ValueError
else:
if project_type == 0:
sys.exit(0)
break
except ValueError:
print("Enter a valid value")
project_name = input("\nProject name: ")
if project_type == 1:
creator.standart_project(project_name)
elif project_type == 2:
creator.git_project(project_name)
elif project_type == 3:
creator.pypi_project(project_name)
elif project_type == 0:
sys.exit(0)
# cpp project creation
elif(project_language == 2):
creator = cpp_project_creator(cfg_path = cfg_path)
print("\n<<< Create cpp Project >>>\n"
"\n"
"standart project (1)\n"
"git project (2)\n"
"exit (0)\n"
"\n"
"Select Project Type\n"
)
while True:
try:
project_type = int(input(": "))
if not(project_type < 3 and project_type >= 0):
raise ValueError
else:
if project_type == 0:
sys.exit(0)
break
except ValueError:
print("Enter a valid value")
project_name = input("\nProject name: ")
if project_type == 1:
creator.standart_project(project_name)
elif project_type == 2:
creator.git_project(project_name)
elif project_type == 0:
sys.exit(0)
# c project creation
elif(project_language == 3):
creator = c_project_creator(cfg_path = cfg_path)
print("\n<<< Create cProject >>>\n"
"\n"
"standart project (1)\n"
"git project (2)\n"
"exit (0)\n"
"\n"
"Select Project Type\n"
)
while True:
try:
project_type = int(input(": "))
if not(project_type < 3 and project_type >= 0):
raise ValueError
else:
if project_type == 0:
sys.exit(0)
break
except ValueError:
print("Enter a valid value")
project_name = input("\nProject name: ")
if project_type == 1:
creator.standart_project(project_name)
elif project_type == 2:
creator.git_project(project_name)
elif project_type == 0:
sys.exit(0)
| 27.541935 | 90 | 0.436168 | 408 | 4,269 | 4.39951 | 0.122549 | 0.159331 | 0.060167 | 0.040111 | 0.828969 | 0.749861 | 0.738719 | 0.714763 | 0.70195 | 0.635097 | 0 | 0.021305 | 0.472242 | 4,269 | 154 | 91 | 27.720779 | 0.775411 | 0.028812 | 0 | 0.741071 | 0 | 0 | 0.156356 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.008929 | false | 0 | 0.017857 | 0 | 0.026786 | 0.071429 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5a72a024664689227cab45882fc40322fa227f7d | 150 | py | Python | plugins/uniq/komand_uniq/actions/__init__.py | lukaszlaszuk/insightconnect-plugins | 8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892 | [
"MIT"
] | 46 | 2019-06-05T20:47:58.000Z | 2022-03-29T10:18:01.000Z | plugins/uniq/komand_uniq/actions/__init__.py | lukaszlaszuk/insightconnect-plugins | 8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892 | [
"MIT"
] | 386 | 2019-06-07T20:20:39.000Z | 2022-03-30T17:35:01.000Z | plugins/uniq/komand_uniq/actions/__init__.py | lukaszlaszuk/insightconnect-plugins | 8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892 | [
"MIT"
] | 43 | 2019-07-09T14:13:58.000Z | 2022-03-28T12:04:46.000Z | # GENERATED BY KOMAND SDK - DO NOT EDIT
from .uniq_integer_array.action import UniqIntegerArray
from .uniq_string_array.action import UniqStringArray
| 37.5 | 55 | 0.846667 | 21 | 150 | 5.857143 | 0.761905 | 0.130081 | 0.276423 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.113333 | 150 | 3 | 56 | 50 | 0.924812 | 0.246667 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
5a87bacf87b50ca42044913bb5366c03015a7f4e | 2,912 | py | Python | argent/generator/sync.py | robertfasano/argent | 49a779e54063ad4f6432b78d1f8070d2f0a932a7 | [
"MIT"
] | null | null | null | argent/generator/sync.py | robertfasano/argent | 49a779e54063ad4f6432b78d1f8070d2f0a932a7 | [
"MIT"
] | 16 | 2020-11-01T20:39:22.000Z | 2022-02-17T18:23:49.000Z | argent/generator/sync.py | robertfasano/argent | 49a779e54063ad4f6432b78d1f8070d2f0a932a7 | [
"MIT"
] | null | null | null | import datetime
''' Convenience functions handling communications between the ARTIQ kernel and the Argent server '''
@rpc(flags={"async"})
def __push__(self, stage, stage_name, cycle, parameter_names, parameter_values, variable_names, variable_values, addr):
variables = dict(zip(variable_names, variable_values))
parameters = dict(zip(parameter_names, parameter_values))
timestamp = datetime.datetime.now().isoformat()
print('\n' + timestamp + ' - Cycle {}, stage {}'.format(cycle, stage))
for key, val in parameters.items():
print(key, ':', val)
try:
results = {"parameters": parameters, "variables": variables, "pid": self.__pid__, "stage": int(stage), 'cycle': int(cycle), 'timestamp': timestamp, 'sequence': stage_name}
requests.post("http://{}/results".format(addr), json=results)
except Exception as e:
print(e)
@rpc(flags={"async"})
def __push_variables__(self, stage, stage_name, cycle, variable_names, variable_values, addr):
variables = dict(zip(variable_names, variable_values))
timestamp = datetime.datetime.now().isoformat()
print('\n' + timestamp + ' - Cycle {}, stage {}'.format(cycle, stage))
try:
results = {"parameters": {}, "variables": variables, "pid": self.__pid__, "stage": int(stage), 'cycle': int(cycle), 'timestamp': timestamp, 'sequence': stage_name}
requests.post("http://{}/results".format(addr), json=results)
except Exception as e:
print(e)
@rpc(flags={"async"})
def __push_parameters__(self, stage, stage_name, cycle, parameter_names, parameter_values, addr):
parameters = dict(zip(parameter_names, parameter_values))
timestamp = datetime.datetime.now().isoformat()
print('\n' + timestamp + ' - Cycle {}, stage {}'.format(cycle, stage))
for key, val in parameters.items():
print(key, ':', val)
try:
results = {"parameters": parameters, "variables": {}, "pid": self.__pid__, "stage": int(stage), 'cycle': int(cycle), 'timestamp': timestamp, 'sequence': stage_name}
requests.post("http://{}/results".format(addr), json=results)
except Exception as e:
print(e)
@rpc(flags={"async"})
def __heartbeat__(self, stage, stage_name, cycle, addr):
timestamp = datetime.datetime.now().isoformat()
print('\n' + timestamp + ' - Cycle {}, stage {}'.format(cycle, stage))
try:
results = {"parameters": {}, "variables": {}, "pid": self.__pid__, "stage": int(stage), 'cycle': int(cycle), 'timestamp': timestamp, 'sequence': stage_name}
requests.post("http://{}/results".format(addr), json=results)
except Exception as e:
print(e)
@rpc(flags={"async"})
def __pull__(self, addr):
try:
self.variables = requests.get("http://{}/variables".format(addr)).json()
except Exception as e:
print(e)
def __update__(self, name) -> TFloat:
return float(self.variables[name])
| 46.967742 | 179 | 0.657967 | 339 | 2,912 | 5.457227 | 0.182891 | 0.038919 | 0.035135 | 0.043243 | 0.865946 | 0.83027 | 0.817297 | 0.817297 | 0.817297 | 0.761081 | 0 | 0 | 0.168956 | 2,912 | 61 | 180 | 47.737705 | 0.764463 | 0 | 0 | 0.754717 | 0 | 0 | 0.14301 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.113208 | false | 0 | 0.018868 | 0.018868 | 0.150943 | 0.207547 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ce8cbf662d5e5c5ae0c16ed9490ee170001c47a0 | 42 | py | Python | model_logger/__init__.py | mbernico/model_logging | 58b6ce70f279d54f32dad69d93a06494f04564e1 | [
"Apache-2.0"
] | 1 | 2018-05-26T06:41:05.000Z | 2018-05-26T06:41:05.000Z | model_logger/__init__.py | mbernico/model_logging | 58b6ce70f279d54f32dad69d93a06494f04564e1 | [
"Apache-2.0"
] | null | null | null | model_logger/__init__.py | mbernico/model_logging | 58b6ce70f279d54f32dad69d93a06494f04564e1 | [
"Apache-2.0"
] | null | null | null | from model_logger.model_logger import *
| 10.5 | 39 | 0.809524 | 6 | 42 | 5.333333 | 0.666667 | 0.6875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.142857 | 42 | 3 | 40 | 14 | 0.888889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
cec201372f8076560ef9aad91fd979f3f2431133 | 247,046 | py | Python | src/utils/env.py | TranQuangDuc/SceneMover | 6d9b683baf909198eb206fdd64bfe8cc3ef0428e | [
"MIT"
] | 80 | 2020-08-06T14:21:19.000Z | 2022-03-01T02:11:05.000Z | src/utils/env.py | TranQuangDuc/SceneMover | 6d9b683baf909198eb206fdd64bfe8cc3ef0428e | [
"MIT"
] | 5 | 2020-10-19T06:55:06.000Z | 2021-06-17T09:47:32.000Z | src/utils/env.py | TranQuangDuc/SceneMover | 6d9b683baf909198eb206fdd64bfe8cc3ef0428e | [
"MIT"
] | 16 | 2020-08-09T14:59:12.000Z | 2021-07-28T08:40:29.000Z | import numpy as np
import queue
from math import *
from copy import deepcopy
import time
from queue import PriorityQueue as PQ
import threading
import pickle
_x = [-1,1,0,0]
_y = [0,0,-1,1]
import ctypes
from numpy.ctypeslib import ndpointer
so = ctypes.CDLL('./search.so')
search = so.search
search.argtypes = [ndpointer(ctypes.c_int), ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_int,ndpointer(ctypes.c_int),ndpointer(ctypes.c_int),ndpointer(ctypes.c_int),ndpointer(ctypes.c_int)]
search.restype = None
search_transpose = so.search_transpose
search_transpose.argtypes = [ndpointer(ctypes.c_int), ctypes.c_int, ctypes.c_int,
ctypes.c_int,
ctypes.c_float, ctypes.c_float,
ctypes.c_float, ctypes.c_float,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ndpointer(ctypes.c_int),ndpointer(ctypes.c_int), ctypes.c_int,
ndpointer(ctypes.c_float),ndpointer(ctypes.c_float),ndpointer(ctypes.c_int),ndpointer(ctypes.c_int),ndpointer(ctypes.c_int),
ndpointer(ctypes.c_int)]
search_transpose.restype = None
search_transpose_poly = so.search_transpose_poly
search_transpose_poly.argtypes = [ndpointer(ctypes.c_int), ndpointer(ctypes.c_float), ndpointer(ctypes.c_float), ndpointer(ctypes.c_int), ctypes.c_int, ctypes.c_int,
ctypes.c_int, ctypes.c_int,
ndpointer(ctypes.c_float),ndpointer(ctypes.c_float),
ndpointer(ctypes.c_float),ndpointer(ctypes.c_float),
ndpointer(ctypes.c_int),ndpointer(ctypes.c_int), ctypes.c_int,
ndpointer(ctypes.c_float),ndpointer(ctypes.c_float),ndpointer(ctypes.c_int),ndpointer(ctypes.c_int),ndpointer(ctypes.c_int),
ndpointer(ctypes.c_int)]
search_transpose_poly.restype = None
search_transpose_poly_rot = so.search_transpose_poly_rot
search_transpose_poly_rot.argtypes = [ndpointer(ctypes.c_int), ndpointer(ctypes.c_float), ndpointer(ctypes.c_float), ndpointer(ctypes.c_int), ctypes.c_int, ctypes.c_int,
ctypes.c_int, ctypes.c_int,
ndpointer(ctypes.c_float),ndpointer(ctypes.c_float),
ndpointer(ctypes.c_float),ndpointer(ctypes.c_float),
ndpointer(ctypes.c_int),ndpointer(ctypes.c_int), ctypes.c_int,
ndpointer(ctypes.c_float),ndpointer(ctypes.c_float),ndpointer(ctypes.c_int),ndpointer(ctypes.c_int),ndpointer(ctypes.c_int),
ndpointer(ctypes.c_int)]
search_transpose_poly_rot.restype = None
translate = so.translate
translate.argtypes = [ndpointer(ctypes.c_int), ndpointer(ctypes.c_float), ndpointer(ctypes.c_float), ndpointer(ctypes.c_int), ctypes.c_int, ctypes.c_int,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ndpointer(ctypes.c_float), ndpointer(ctypes.c_float),
ndpointer(ctypes.c_int),ndpointer(ctypes.c_int), ctypes.c_int,
ndpointer(ctypes.c_int)]
translate.restype = None
EPS = 1e-4
class ENV: # if current state happened before, give a penalty.
def __init__(self, size=(5,5),max_num=5):
self.map_size = size
self.map = np.zeros(self.map_size)
self.target_map = np.zeros(self.map_size)
self.route = []
self.bin = 24
self.max_num = max_num
"""
params
pos:
a list of the left bottom point of the furniture
size:
a list of the size of the furniture
"""
def setmap(self, pos, target, shape, cstate, tstate, wall=[]):
self.map = np.zeros(self.map_size)
self.target_map = np.zeros(self.map_size)
self.pos = deepcopy(np.array(pos))
self.shape = deepcopy(shape)
self.target = np.array(target)
self.cstate = np.array(cstate).astype(np.int)
self.tstate = np.array(tstate).astype(np.int)
self.wall = deepcopy(wall)
self.dis = np.zeros(len(pos))
self.route = []
self.state_dict = {}
self.finished = np.zeros(len(pos))
self.shapex = []
self.shapey = []
# self.boundx = []
# self.boundy = []
self.pn = []
# self.bn = []
self.edge = []
self.bound = []
# print(shape)
# for bd in self.bound:
# for sh in self.shape:
# print(len(sh))
# for p in self.pos:
# print(p)
# print()
for p in wall:
x, y = p
x = int(x)
y = int(y)
self.map[x,y] = 1
self.target_map[x,y] = 1
# cut_list = []
for i, _ in enumerate(pos):
if len(shape[i]) == 0:
continue
x, y = _
s = self.cstate[i]
radian = 2 * pi * s / self.bin
points, bbox = self.rotate(self.shape[i], radian)
id_list = []
for i_,p in enumerate(points):
xx,yy = p
tx = max(0,min(x+xx,self.map_size[0])) + EPS
ty = max(0,min(y+yy,self.map_size[1])) + EPS
# cut_list.append((int(tx),int(ty)))
if self.map[int(tx),int(ty)] >= 1 and self.map[int(tx),int(ty)] != i+2:
# if i == 5:
# print('map',self.map[int(tx),int(ty)])
id_list.append(i_)
else:
self.map[int(tx),int(ty)] = i + 2
if self.map[int(tx),int(ty)] == 1:
self.map[int(tx),int(ty)] = 0
if self.target_map[int(tx),int(ty)] == 1:
self.target_map[int(tx),int(ty)] = 0
x,y = target[i]
s = self.tstate[i]
radian = 2 * pi * s / self.bin
points, bbox = self.rotate(self.shape[i], radian)
# for p in points:
# xx, yy = p
# xmax = max(xmax,xx)
# xmin = min(xmin,xx)
# ymax = max(ymax,yy)
# ymin = min(ymin,yy)
# x = max(0,x - 0.5*(xmax-xmin+1))
# y = max(0,y - 0.5*(ymax-ymin+1))
for i_,p in enumerate(points):
xx,yy = p
tx = max(0,min(x+xx,self.map_size[0])) + EPS
ty = max(0,min(y+yy,self.map_size[1])) + EPS
# cut_list.append((int(tx),int(ty)))
if self.target_map[int(tx),int(ty)] >= 1 and self.target_map[int(tx),int(ty)] != i + 2:
# if i == 5:
# print('tmap',self.map[int(tx),int(ty)])
if not i_ in id_list:
id_list.append(i_)
else:
self.target_map[int(tx),int(ty)] = i + 2
if self.map[int(tx),int(ty)] == 1:
self.map[int(tx),int(ty)] = 0
if self.target_map[int(tx),int(ty)] == 1:
self.target_map[int(tx),int(ty)] = 0
tmp = deepcopy(self.shape[i])
# print(self.shape[i])
for i_ in id_list:
# print(i_,tmp[i_])
self.shape[i].remove(tmp[i_])
# dx, dy = size[i]
# x, y = _
# x_, y_ = target[i]
# self.map[x:x+dx, y:y+dy] = i + 2
# self.target_map[x_:x_+dx, y_:y_+dy] = i + 2
# print('==========')
# for sh in self.shape:
# print(len(sh))
self.map = (self.map == 1).astype(np.int32)
self.target_map = (self.target_map == 1).astype(np.int32)
for i, _ in enumerate(pos):
if len(self.shape[i]) == 0:
continue
x, y = _
s = self.cstate[i]
radian = 2 * pi * s / self.bin
points, bbox = self.rotate(self.shape[i], radian)
id_list = []
for i_,p in enumerate(points):
xx,yy = p
tx = max(0,min(x+xx,self.map_size[0])) + EPS
ty = max(0,min(y+yy,self.map_size[1])) + EPS
self.map[int(tx),int(ty)] = i + 2
x,y = target[i]
s = self.tstate[i]
radian = 2 * pi * s / self.bin
points, bbox = self.rotate(self.shape[i], radian)
for i_,p in enumerate(points):
xx,yy = p
tx = max(0,min(x+xx,self.map_size[0])) + EPS
ty = max(0,min(y+yy,self.map_size[1])) + EPS
self.target_map[int(tx),int(ty)] = i + 2
for i, sh in enumerate(self.shape):
ed = []
# bd = []
map_ = np.zeros(self.map_size)
mark_ = np.zeros(self.map_size)
for p in sh:
map_[p[0],p[1]] = 1
# print(i,sh)
def dfs(p):
gx = [1,0,-1,0]
gy = [0,1,0,-1]
rx = [1,1,1,0,0,-1,-1,-1]
ry = [-1,0,1,-1,1,-1,0,1]
stack = []
stack.append((p,-1))
last = -2
while stack.__len__() != 0:
flag = True
p,direction = stack.pop()
if mark_[p[0],p[1]] == 1:
continue
mark_[p[0],p[1]] = 1
# print(p)
for i in range(8):
x = p[0] + rx[i]
y = p[1] + ry[i]
if x < 0 or x >= self.map_size[0] or y >= self.map_size[1] or y < 0 or map_[x,y] == 0:
# if last == direction:
# ed.pop()
ed.append(p)
# bd.append(p)
last = direction
flag = False
break
vs = []
for i in range(8):
vs.append([])
for i in range(4):
x = p[0] + gx[i]
y = p[1] + gy[i]
if x >= 0 and x < self.map_size[0] and y >= 0 and y < self.map_size[1] and map_[x,y] == 1 and mark_[x,y] == 0:
# print(x,y)
# dfs((x,y))
if flag:
# mark_[x,y] = 1
stack.append(((x,y),i))
break
else:
cnt = 0
for j in range(8):
xx = x + rx[j]
yy = y + ry[j]
if xx < 0 or xx >= self.map_size[0] or yy >= self.map_size[1] or yy < 0 or map_[xx,yy] == 0:
# mark_[x,y] = 1
cnt += 1
vs[cnt].append(((x,y),i))
for v in vs:
for p in v:
stack.append(p)
if len(sh) != 0:
dfs(sh[0])
self.edge.append(ed)
# self.bound.append(bd)
# for i in range(len(self.pos)):
# print(len(self.shape[i]),len(self.edge[i]))
for i, _ in enumerate(pos):
if self.equal(self.pos[i],self.target[i]) and self.cstate[i] == self.tstate[i]:
self.finished[i] = 1
for ed in self.edge:
self.pn.append(len(ed))
for p in ed:
self.shapex.append(p[0])
self.shapey.append(p[1])
hash_ = self.hash()
self.state_dict[hash_] = 1
# self.check()
def hash(self):
mod = 1000000007
num = len(self.pos) + 1
total = 0
for row in self.map:
for _ in row:
total *= num
total += int(_)
total %= mod
return total
def rotate(self, points, radian):
eps = 1e-6
res_list = []
points = np.array(points).astype(np.float32)
# print(points.shape)
points[:,0] -= points[:,0].min()
points[:,1] -= points[:,1].min()
mx = points[:,0].max()
my = points[:,1].max()
points[:,0] -= 0.5 * mx + 0.5
points[:,1] -= 0.5 * my + 0.5
points = points.transpose()
rot = np.array([[cos(radian),-sin(radian)],[sin(radian), cos(radian)]])
points = np.matmul(rot,points)
points[0,:] += eps
points[1,:] += eps
# points[0,:] += 0.5 * mx - 0.5 + eps
# points[1,:] += 0.5 * my - 0.5 + eps
# points = np.round(points).astype(np.int)
xmax = points[0].max()
xmin = points[0].min()
ymax = points[1].max()
ymin = points[1].min()
# if xmin < 0:
# points[0] += 1
# if ymin < 0:
# points[1] += 1
return points.transpose(), np.array([[xmin, ymin], [xmax, ymax]])
'''
check the state
0 represent not accessible
1 represent accessible
'''
def check(self, index):
flag = True
# tx,ty = self.target[index]
# sx,sy = self.pos[index]
# shape = self.shape[index]
# cstate = self.cstate[index]
# print(cstate)
# tstate = self.tstate[index]
route = []
# ps = np.array(shape)
# px = np.array(ps[:,0],dtype=np.int32)
# py = np.array(ps[:,1],dtype=np.int32)
# num = len(ps)
lx = np.zeros([60000],dtype=np.float32)
ly = np.zeros([60000],dtype=np.float32)
lr = np.zeros([60000],dtype=np.int32)
ld = np.zeros([60000],dtype=np.int32)
length = np.array([0],dtype=np.int32)
flag = np.array([1],dtype=np.int32)
n,m = self.map_size
tx_ar = np.zeros(len(self.target),dtype=np.float32)
ty_ar = np.zeros_like(tx_ar,dtype=np.float32)
sx_ar = np.zeros_like(tx_ar,dtype=np.float32)
sy_ar = np.zeros_like(tx_ar,dtype=np.float32)
for i,p in enumerate(self.target):
tx_ar[i] = p[0]
ty_ar[i] = p[1]
for i,p in enumerate(self.pos):
sx_ar[i] = p[0]
sy_ar[i] = p[1]
tmp_map = np.array(self.map,dtype=np.int32)
shape,bbox = self.getitem(index,self.cstate[index])
sx,sy = self.pos[index]
for p in shape:
x,y = p
x += sx + EPS
y += sy + EPS
tmp_map[min(int(x),self.map_size[0]-1),min(int(y),self.map_size[1]-1)] = 0
# print('in')
# print(index)
# tx = int(tx)
# ty = int(ty)
# sx = int(sx)
# sy = int(sy)
# search_transpose(np.array(self.map,dtype=np.int32),n,m,index,tx,ty,sx,sy, cstate,tstate,self.bin,px,py,num,lx,ly,lr,ld,length,flag)
search_transpose_poly(
tmp_map, np.array(self.shapex,dtype=np.float32),np.array(self.shapey,dtype=np.float32),np.array(self.pn,dtype=np.int32),n,m,
len(self.pos),index,
tx_ar,ty_ar,
sx_ar,sy_ar,
np.array(self.cstate, dtype=np.int32), np.array(self.tstate, dtype=np.int32), self.bin,
lx, ly, lr, ld, length,
flag
)
# print('out')
# search(np.array(self.map,dtype=np.int32),n,m,index,tx,ty,sx,sy,h,w,lx,ly,length,flag)
for i in range(length[0]):
route.append((lx[i],ly[i],lr[i],ld[i]))
route.reverse()
return flag[0], route
def check_s(self, index):
flag = True
# tx,ty = self.target[index]
# sx,sy = self.pos[index]
# shape = self.shape[index]
# cstate = self.cstate[index]
# print(cstate)
# tstate = self.tstate[index]
route = []
# ps = np.array(shape)
# px = np.array(ps[:,0],dtype=np.int32)
# py = np.array(ps[:,1],dtype=np.int32)
# num = len(ps)
lx = np.zeros([60000],dtype=np.float32)
ly = np.zeros([60000],dtype=np.float32)
lr = np.zeros([60000],dtype=np.int32)
ld = np.zeros([60000],dtype=np.int32)
length = np.array([0],dtype=np.int32)
flag = np.array([1],dtype=np.int32)
n,m = self.map_size
tx_ar = np.zeros(len(self.target),dtype=np.float32)
ty_ar = np.zeros_like(tx_ar,dtype=np.float32)
sx_ar = np.zeros_like(tx_ar,dtype=np.float32)
sy_ar = np.zeros_like(tx_ar,dtype=np.float32)
for i,p in enumerate(self.target):
tx_ar[i] = p[0]
ty_ar[i] = p[1]
for i,p in enumerate(self.pos):
sx_ar[i] = p[0]
sy_ar[i] = p[1]
tmp_map = np.array(self.map,dtype=np.int32)
shape,bbox = self.getitem(index,self.cstate[index])
sx,sy = self.pos[index]
for p in shape:
x,y = p
x += sx + EPS
y += sy + EPS
tmp_map[min(int(x),self.map_size[0]-1),min(int(y),self.map_size[1]-1)] = 0
# print('in')
# print(index)
# tx = int(tx)
# ty = int(ty)
# sx = int(sx)
# sy = int(sy)
# search_transpose(np.array(self.map,dtype=np.int32),n,m,index,tx,ty,sx,sy, cstate,tstate,self.bin,px,py,num,lx,ly,lr,ld,length,flag)
search_transpose_poly_rot(
tmp_map, np.array(self.shapex,dtype=np.float32),np.array(self.shapey,dtype=np.float32),np.array(self.pn,dtype=np.int32),n,m,
len(self.pos),index,
tx_ar,ty_ar,
sx_ar,sy_ar,
np.array(self.cstate, dtype=np.int32), np.array(self.tstate, dtype=np.int32), self.bin,
lx, ly, lr, ld, length,
flag
)
# print('out')
# search(np.array(self.map,dtype=np.int32),n,m,index,tx,ty,sx,sy,h,w,lx,ly,length,flag)
for i in range(length[0]):
route.append((lx[i],ly[i],lr[i],ld[i]))
route.reverse()
return flag[0], route
def equal(self, a, b):
return fabs(a[0]-b[0])+fabs(a[1]-b[1]) < 1e-3
'''
return reward, finish_flag 1 represent the finished state
'''
def move(self, index, direction):
base = -1
if index >= len(self.pos):
return -500, -1
if len(self.shape[index]) == 0:
return -500, -1
map_size = self.map_size
# size = self.size[index]
target = self.target[index]
pos = self.pos[index]
cstate = self.cstate[index]
tstate = self.tstate[index]
shape, bbox = self.getitem(index, cstate)
n,m = self.map_size
sx_ar = np.zeros(len(self.target),dtype=np.float32)
sy_ar = np.zeros_like(sx_ar,dtype=np.float32)
steps_ar = np.zeros(1,dtype=np.int32)
for i,p in enumerate(self.pos):
sx_ar[i] = p[0]
sy_ar[i] = p[1]
if direction < 4:
xx, yy = pos
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
if self.equal(pos, target) and cstate == tstate: # prevent it from being trapped in local minima that moving object around the destination
base += -4
translate(
np.array(self.map,dtype=np.int32),np.array(self.shapex,dtype=np.float32),np.array(self.shapey,dtype=np.float32),np.array(self.pn,dtype=np.int32),n,m,
len(self.pos),index,direction,
sx_ar,sy_ar,
np.array(self.cstate, dtype=np.int32), np.array(self.tstate, dtype=np.int32), self.bin,
steps_ar
)
steps = steps_ar[0]
x = xx + steps * _x[direction]
y = yy + steps * _y[direction]
self.last_steps = steps
if steps == 0:
return -500, -1
else:
for p in shape:
self.map[min(int(xx+p[0]+EPS),self.map_size[0]-1),min(int(yy+p[1]+EPS),self.map_size[1]-1)] = 0
for p in shape:
self.map[min(int(x+p[0]+EPS),self.map_size[0]-1),min(int(y+p[1]+EPS),self.map_size[1]-1)] = index + 2
# self.map[xx:xx+size[0],yy:yy+size[1]] = 0
# self.map[x:x+size[0],y:y+size[1]] = index + 2
# x = x + 0.5*(bbox[1][0]-bbox[0][0]+1)
# y = y + 0.5*(bbox[1][1]-bbox[0][1]+1)
self.pos[index] = (x, y)
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
penlty = self.state_dict[hash_]
self.state_dict[hash_] += 1
penlty = 1
return base-2 * penlty, 0
self.state_dict[hash_] = 1
if fabs(x-target[0]) + fabs(y-target[1]) < 1e-6 and cstate == tstate:
if self.finished[index] == 1:
return base + 2, 0
else:
self.finished[index] = 1
return base + 4, 0
# return base + 500, 0
# return base + 200, 0
return base, 0
# for i in range(size[0]):
# for j in range(size[1]):
# self.map[target[0]+i, target[1]+j] = 1
# if it has already reached the target place.
if self.equal(pos, target) and cstate == tstate:
return -500, -1
# check if legal, calc the distance
flag, route = self.check(index)
if flag == 1:
xx, yy = pos
x,y = target
self.route = route
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
tshape, bbox = self.getitem(index, tstate)
# x = max(0,x - 0.5*(bbox[1][0]-bbox[0][0]+1))
# y = max(0,y - 0.5*(bbox[1][1]-bbox[0][1]+1))
for p in shape:
tx = max(0,min(xx+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(yy+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1),min(int(ty),self.map_size[1]-1)] = 0
# self.map[xx:xx+size[0], yy:yy+size[1]] = 0
for p in tshape:
tx = max(0,min(x+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(y+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1), min(int(ty),self.map_size[1]-1)] = index + 2
self.pos[index] = target
self.cstate[index] = tstate
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
penlty = self.state_dict[hash_]
self.state_dict[hash_] += 1
penlty = 1
return base -2 * penlty, 0
ff = True
for i,v in enumerate(self.pos): # check if the task is done
w = self.target[i]
if fabs(v[0]-w[0])+fabs(v[1]-w[1]) > 1e-6 or self.cstate[i] != self.tstate[i]:
ff = False
break
if ff:
self.finished[index] = 1
# return base + 10000, 1
return base + 50, 1
else:
if self.finished[index] == 1: # if the object was placed before
return base + 2, 0
else:
self.finished[index] = 1
return base + 4, 0
# return base + 500, 0
# return base + 200, 0
# elif flag == -1:
# return -1000, 1
else:
# return -500, -1
return base, -1
def move_s(self, index, direction):
base = -10
if index >= len(self.pos):
return -500, -1
if len(self.shape[index]) == 0:
return -500, -1
map_size = self.map_size
# size = self.size[index]
target = self.target[index]
pos = self.pos[index]
cstate = self.cstate[index]
tstate = self.tstate[index]
shape, bbox = self.getitem(index, cstate)
n,m = self.map_size
sx_ar = np.zeros(len(self.target),dtype=np.float32)
sy_ar = np.zeros_like(sx_ar,dtype=np.float32)
steps_ar = np.zeros(1,dtype=np.int32)
for i,p in enumerate(self.pos):
sx_ar[i] = p[0]
sy_ar[i] = p[1]
if self.equal(pos, target) and cstate == tstate: # prevent it from being trapped in local minima that moving object around the destination
# base += -60
base += -600
if direction < 4:
xx, yy = pos
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
translate(
np.array(self.map,dtype=np.int32),np.array(self.shapex,dtype=np.float32),np.array(self.shapey,dtype=np.float32),np.array(self.pn,dtype=np.int32),n,m,
len(self.pos),index,direction,
sx_ar,sy_ar,
np.array(self.cstate, dtype=np.int32), np.array(self.tstate, dtype=np.int32), self.bin,
steps_ar
)
steps = steps_ar[0]
x = xx + steps * _x[direction]
y = yy + steps * _y[direction]
if steps == 0:
return -500, -1
else:
for p in shape:
self.map[min(int(xx+p[0]+EPS),self.map_size[0]-1),min(int(yy+p[1]+EPS),self.map_size[1]-1)] = 0
for p in shape:
self.map[min(int(x+p[0]+EPS),self.map_size[0]-1),min(int(y+p[1]+EPS),self.map_size[1]-1)] = index + 2
# self.map[xx:xx+size[0],yy:yy+size[1]] = 0
# self.map[x:x+size[0],y:y+size[1]] = index + 2
# x = x + 0.5*(bbox[1][0]-bbox[0][0]+1)
# y = y + 0.5*(bbox[1][1]-bbox[0][1]+1)
self.pos[index] = (x, y)
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
return -600, 0
self.state_dict[hash_] = 1
if fabs(x-target[0]) + fabs(y-target[1]) < 1e-6 and cstate == tstate:
if self.finished[index] == 1:
# return base + 50, 0
return base + 500, 0
else:
self.finished[index] = 1
return base + 2000, 0
# return base + 500, 0
# return base + 200, 0
return base, 0
# for i in range(size[0]):
# for j in range(size[1]):
# self.map[target[0]+i, target[1]+j] = 1
# if it has already reached the target place.
if self.equal(pos, target) and cstate == tstate:
return -500, -1
# check if legal, calc the distance
flag, route = self.check_s(index)
if flag == 1:
xx, yy = pos
x,y = target
self.route = route
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
tshape, bbox = self.getitem(index, tstate)
# x = max(0,x - 0.5*(bbox[1][0]-bbox[0][0]+1))
# y = max(0,y - 0.5*(bbox[1][1]-bbox[0][1]+1))
for p in shape:
tx = max(0,min(xx+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(yy+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1),min(int(ty),self.map_size[1]-1)] = 0
# self.map[xx:xx+size[0], yy:yy+size[1]] = 0
for p in tshape:
tx = max(0,min(x+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(y+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1), min(int(ty),self.map_size[1]-1)] = index + 2
self.pos[index] = target
self.cstate[index] = tstate
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
# return -40, 0
return -600, 0
ff = True
for i,v in enumerate(self.pos): # check if the task is done
w = self.target[i]
if fabs(v[0]-w[0])+fabs(v[1]-w[1]) > 1e-6 or self.cstate[i] != self.tstate[i]:
ff = False
break
if ff:
self.finished[index] = 1
# return base + 10000, 1
return base + 100000, 1
else:
if self.finished[index] == 1: # if the object was placed before
# return base + 50, 0
return base + 500, 0
else:
self.finished[index] = 1
return base + 2000, 0
# return base + 500, 0
# return base + 200, 0
# elif flag == -1:
# return -1000, 1
else:
# return -500, -1
return base, -1
def move_p(self, index, direction):
base = -1
if index >= len(self.pos):
return -500, -1
if len(self.shape[index]) == 0:
return -500, -1
map_size = self.map_size
# size = self.size[index]
target = self.target[index]
pos = self.pos[index]
cstate = self.cstate[index]
tstate = self.tstate[index]
shape, bbox = self.getitem(index, cstate)
n,m = self.map_size
sx_ar = np.zeros(len(self.target),dtype=np.float32)
sy_ar = np.zeros_like(sx_ar,dtype=np.float32)
steps_ar = np.zeros(1,dtype=np.int32)
for i,p in enumerate(self.pos):
sx_ar[i] = p[0]
sy_ar[i] = p[1]
if direction < 4:
xx, yy = pos
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
if self.equal(pos, target) and cstate == tstate: # prevent it from being trapped in local minima that moving object around the destination
base += -4
translate(
np.array(self.map,dtype=np.int32),np.array(self.shapex,dtype=np.float32),np.array(self.shapey,dtype=np.float32),np.array(self.pn,dtype=np.int32),n,m,
len(self.pos),index,direction,
sx_ar,sy_ar,
np.array(self.cstate, dtype=np.int32), np.array(self.tstate, dtype=np.int32), self.bin,
steps_ar
)
steps = steps_ar[0]
x = xx + steps * _x[direction]
y = yy + steps * _y[direction]
if steps == 0:
return -500, -1
else:
for p in shape:
self.map[min(int(xx+p[0]+EPS),self.map_size[0]-1),min(int(yy+p[1]+EPS),self.map_size[1]-1)] = 0
for p in shape:
self.map[min(int(x+p[0]+EPS),self.map_size[0]-1),min(int(y+p[1]+EPS),self.map_size[1]-1)] = index + 2
# self.map[xx:xx+size[0],yy:yy+size[1]] = 0
# self.map[x:x+size[0],y:y+size[1]] = index + 2
# x = x + 0.5*(bbox[1][0]-bbox[0][0]+1)
# y = y + 0.5*(bbox[1][1]-bbox[0][1]+1)
self.pos[index] = (x, y)
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
penlty = self.state_dict[hash_]
self.state_dict[hash_] += 1
penlty = 1
return base-2 * penlty, 0
self.state_dict[hash_] = 1
if fabs(x-target[0]) + fabs(y-target[1]) < 1e-6 and cstate == tstate:
if self.finished[index] == 1:
return base + 2, 0
else:
self.finished[index] = 1
return base + 4, 0
# return base + 500, 0
# return base + 200, 0
return base, 0
# for i in range(size[0]):
# for j in range(size[1]):
# self.map[target[0]+i, target[1]+j] = 1
# if it has already reached the target place.
if self.equal(pos, target) and cstate == tstate:
return -500, -1
# check if legal, calc the distance
flag, route = self.check_s(index)
if flag == 1:
xx, yy = pos
x,y = target
self.route = route
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
tshape, bbox = self.getitem(index, tstate)
# x = max(0,x - 0.5*(bbox[1][0]-bbox[0][0]+1))
# y = max(0,y - 0.5*(bbox[1][1]-bbox[0][1]+1))
for p in shape:
tx = max(0,min(xx+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(yy+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1),min(int(ty),self.map_size[1]-1)] = 0
# self.map[xx:xx+size[0], yy:yy+size[1]] = 0
for p in tshape:
tx = max(0,min(x+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(y+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1), min(int(ty),self.map_size[1]-1)] = index + 2
self.pos[index] = target
self.cstate[index] = tstate
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
penlty = self.state_dict[hash_]
self.state_dict[hash_] += 1
penlty = 1
return base -2 * penlty, 0
ff = True
for i,v in enumerate(self.pos): # check if the task is done
w = self.target[i]
if fabs(v[0]-w[0])+fabs(v[1]-w[1]) > 1e-6 or self.cstate[i] != self.tstate[i]:
ff = False
break
if ff:
self.finished[index] = 1
# return base + 10000, 1
return base + 50, 1
else:
if self.finished[index] == 1: # if the object was placed before
return base + 2, 0
else:
self.finished[index] = 1
return base + 4, 0
# return base + 500, 0
# return base + 200, 0
# elif flag == -1:
# return -1000, 1
else:
# return -500, -1
return base, -1
def move_debug(self, index, direction):
base = -10
if index >= len(self.pos):
print('index error')
return -500, -1
map_size = self.map_size
# size = self.size[index]
target = self.target[index]
pos = self.pos[index]
cstate = self.cstate[index]
tstate = self.tstate[index]
shape,bbox = self.getitem(index, cstate)
if pos == target and cstate == tstate: # prevent it from being trapped in local minima that moving object around the destination
base += -60
if direction < 4:
xx, yy = pos
print('xx',xx,'yy',yy)
steps = 1
while True:
x = xx + steps*_x[direction]
y = yy + steps*_y[direction]
if x < 0 or x >= map_size[0] or y < 0 or y >= map_size[1] :
steps -= 1
print('early done')
break
# return -500, -1
flag = True
for p in shape:
if x+p[0] < 0 or x+p[0] >= map_size[0] or y+p[1] < 0 or y+p[1] >= map_size[1]:
flag = False
print('over bound',x+p[0],)
break
if self.map[x+p[0],y+p[1]] != 0:
print('collide!!',self.map[x+p[0],y+p[1]],index+2)
flag = False
break
if not flag:
steps -= 1
break
steps += 1
x = xx + steps*_x[direction]
y = yy + steps*_y[direction]
if steps == 0:
print('cannot move error')
return -500, -1
else:
for p in shape:
self.map[xx+p[0],yy+p[1]] = 0
for p in shape:
self.map[x+p[0],y+p[1]] = index + 2
# self.map[xx:xx+size[0],yy:yy+size[1]] = 0
# self.map[x:x+size[0],y:y+size[1]] = index + 2
self.pos[index] = (x, y)
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
return -600, 0
self.state_dict[hash_] = 1
if int(x) == int(target[0]) and int(y) == int(target[1]) and cstate == tstate:
if self.finished[index] == 1:
return base + 50, 0
else:
self.finished[index] = 1
return base + 500, 0
# return base + 200, 0
return base, 0
# for i in range(size[0]):
# for j in range(size[1]):
# self.map[target[0]+i, target[1]+j] = 1
# if it has already reached the target place.
if pos == target and cstate == tstate:
print('position error')
return -500, -1
# check if legal, calc the distance
flag, route = self.check(index)
if flag == 1:
xx, yy = pos
x,y = target
self.route = route
tshape,bbox = self.getitem(index,tstate)
for p in shape:
self.map[xx+p[0],yy+p[1]] = 0
# self.map[xx:xx+size[0], yy:yy+size[1]] = 0
for p in tshape:
self.map[x+p[0], y+p[1]] = index + 2
self.pos[index] = target
self.cstate[index] = tstate
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
return -40, 0
ff = True
for i,v in enumerate(self.pos): # check if the task is done
w = self.target[i]
if v != w or self.cstate[i] != self.tstate[i]:
ff = False
break
if ff:
self.finished[index] = 1
return base + 10000, 1
else:
if self.finished[index] == 1: # if the object was placed before
return base + 50, 0
else:
self.finished[index] = 1
return base + 500, 0
# return base + 200, 0
# elif flag == -1:
# return -1000, 1
else:
print('path find error')
return -500, -1
def getlastroute(self):
return self.route
def printmap(self):
with open('op.txt','a') as fp:
for _ in self.map:
for __ in _:
fp.write('%d '%__)
fp.write('\n')
fp.write('\n')
def build_wall(self):
size = self.map_size
p = 0.005
q = 0.05
dix = [1,0,-1,0]
diy = [0,1,0,-1]
while True:
px = np.random.randint(size[0])
py = np.random.randint(size[1])
if fabs(px-size[0]/2) + fabs(py-size[1]/2) > (size[0]+size[1])/4:
break
d = int(px < size[0]/2) + int(py < size[1]/2)*2
if d == 3:
d = 2
elif d == 2:
d = 3
l = 0
wall_list = []
while True:
cnt = 0
while True:
tx = px + dix[d]
ty = py + diy[d]
cnt += 1
if cnt > 10:
break
if tx == size[0] or tx == -1 or ty == size[1] or ty == -1:
d = np.random.randint(4)
continue
break
if tx >= 0 and tx < size[0] and ty >= 0 and ty < size[1]:
px = tx
py = ty
if not (px,py) in wall_list :
wall_list.append((px,py))
r = np.random.rand()
if r < q:
t = np.random.rand()
if t < 0.9:
d = (d+1)%4
else:
d = np.random.randint(4)
r = np.random.rand()
if r < p and l > 100:
break
l += 1
return wall_list
def randominit(self,num=5):
import random
size = self.map_size
# print(size)
pos_list = []
size_list = []
target_list = []
for i in range(num):
while True:
px = np.random.randint(size[0])
py = np.random.randint(size[1])
flag = True
for _ in pos_list:
dx,dy = _
if dx == px or dy == py:
flag = False
break
if flag:
pos_list.append((px,py))
break
while True:
px = np.random.randint(size[0])
py = np.random.randint(size[1])
flag = True
for _ in target_list:
dx,dy = _
if dx == px or dy == py:
flag = False
break
if flag:
target_list.append((px,py))
break
random.shuffle(pos_list)
random.shuffle(target_list)
for i in range(num):
px, py = pos_list[i]
tx, ty = target_list[i]
while True:
dx = np.random.randint(1,11)
dy = np.random.randint(1,11)
if px + dx > size[0] or py + dy > size[1] or tx + dx > size[0] or ty + dy > size[1]:
# print(px,py,dx,dy)
continue
flag = True
for j in range(num):
if j == i:
continue
px_, py_ = pos_list[j]
tx_, ty_ = target_list[j]
if j > len(size_list) - 1:
dx_, dy_ = 1, 1
else:
dx_, dy_ = size_list[j]
lx = max(px, px_)
ly = max(py, py_)
rx = min(px + dx, px_ + dx_) - 1
ry = min(py + dy, py_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
lx = max(tx, tx_)
ly = max(ty, ty_)
rx = min(tx + dx, tx_ + dx_) - 1
ry = min(ty + dy, ty_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
if flag:
size_list.append((dx,dy))
break
# list_ =[(pair[0],(dx,dy)),(pair[1],(dx_,dy_))
# random.shuffle(list_)
# for i in list_:
# pos_list.append(i[0])
# size_list.append(i[1])
self.setmap(pos_list,target_list,size_list)
# for i in range(size[0]):
# for j in range(size[1]):
# for i in range(size[0]):
# for j in range(size[1]):
def randominit_crowded(self,num=5):
import random
size = self.map_size
# print(size)
while True:
reboot = False
pos_list = []
size_list = []
target_list = []
# self.finished = np.zeros(num)
tmp = self.build_wall()
# tmp += self.build_wall()
# tmp += self.build_wall()
sorted(tmp)
wall_list = []
l = None
map_ = np.zeros(self.map_size)
for t in tmp:
if t != l:
l = t
map_[l[0],l[1]] = 1
wall_list.append(t)
def dfs(p,tp):
gx = [1,0,-1,0]
gy = [0,1,0,-1]
stack = []
stack.append(p)
mark_ = np.zeros(self.map_size)
while stack.__len__() != 0:
flag = True
p = stack.pop()
if mark_[p[0],p[1]] == 1:
continue
mark_[p[0],p[1]] = 1
if p[0] == tp[0] and p[1] == tp[1]:
break
for i in range(4):
x = p[0] + gx[i]
y = p[1] + gy[i]
if x >= 0 and x < self.map_size[0] and y >= 0 and y < self.map_size[1] and map_[x,y] != 1 and mark_[x,y] == 0:
stack.append((x,y))
return mark_[tp[0],tp[1]] == 1
for i in range(num):
while True:
px = np.random.randint(1,size[0]-3)
py = np.random.randint(1,size[1]-3)
flag = True
hh = [-1,0,1]
for _ in pos_list:
dx, dy = _
if dx == px or dy == py or abs(dx - px) + abs(dy - py) < 5:
flag = False
for k in hh:
for l in hh:
xx = px + k
yy = py + l
if dx == xx and dy == yy:
flag = False
break
if flag == False:
break
if flag == False:
break
for _ in wall_list:
if flag == False:
break
dx,dy = _
if dx == px and dy == py:
flag = False
break
for k in hh:
for l in hh:
xx = px + k
yy = py + l
if dx == xx and dy == yy:
flag = False
break
if flag == False:
break
if flag == False:
break
if flag:
pos_list.append((px,py))
break
reboot_cnt = 0
while reboot_cnt < 10000:
reboot_cnt += 1
px = np.random.randint(1,size[0]-3)
py = np.random.randint(1,size[1]-3)
# flag = True
flag = dfs((px,py),pos_list[-1])
# print(flag)
for _ in target_list:
dx, dy = _
if dx == px or dy == py or abs(dx-px)+abs(dy-py) < 5:
flag = False
break
for k in hh:
for l in hh:
xx = px + k
yy = py + l
if dx == xx and dy == yy:
flag = False
break
if flag == False:
break
if flag == False:
break
for _ in wall_list:
if flag == False:
break
dx,dy = _
if dx == px and dy == py:
flag = False
break
for k in hh:
for l in hh:
xx = px + k
yy = py + l
if dx == xx and dy == yy:
flag = False
break
if flag == False:
break
if flag == False:
break
if flag:
target_list.append((px,py))
break
if reboot_cnt >= 10000:
reboot = True
break
if reboot:
continue
# random.shuffle(pos_list)
# random.shuffle(target_list)
sub_num = self.max_num - num
# print(self.max_num, num)
pos_s = [ (_, i) for i, _ in enumerate(pos_list)]
for _ in range(sub_num):
pos_s.append(((0,0),-1))
# print(len(pos_list))
random.shuffle(pos_list)
pos_list = np.array([_[0] for _ in pos_s])
tmp_target_list = np.zeros_like(pos_list)
# random.shuffle(target_list)
_ = 0
# for target in target_list:
# while pos_list[_][0] == pos_list[_][1] and pos_list[_][0] == 0:
# _ += 1
# tmp_target_list[_] = target
# _ += 1
for i,_ in enumerate(pos_s):
p, id_ = _
if p[0] == p[1] and p[0] == 0:
continue
tmp_target_list[i] = target_list[id_]
target_list = tmp_target_list
def dfs(p,tp,size):
gx = [1,0,-1,0]
gy = [0,1,0,-1]
dx, dy = size
stack = []
stack.append(p)
mark_ = np.zeros(self.map_size)
while stack.__len__() != 0:
flag = True
p = stack.pop()
if mark_[p[0],p[1]] == 1:
continue
mark_[p[0],p[1]] = 1
if p[0] == tp[0] and p[1] == tp[1]:
break
for i in range(4):
x = p[0] + gx[i]
y = p[1] + gy[i]
if x >= 0 and x + dx - 1 < self.map_size[0] and y >= 0 and y + dy - 1 < self.map_size[1] and map_[x,y] != 1 and mark_[x,y] == 0:
flag = True
for j in range(dx):
for k in range(dy):
if map_[x+j,y+k] == 1:
flag = False
break
if flag == False:
break
if flag:
stack.append((x,y))
return mark_[tp[0],tp[1]] == 1
for i in range(self.max_num):
px, py = pos_list[i]
tx, ty = target_list[i]
if px == py and py == 0:
size_list.append((0,0))
continue
reboot_cnt = 0
while reboot_cnt < 10000:
reboot_cnt += 1
dx = np.random.randint(2,31)
dy = np.random.randint(2,31)
if px + dx > size[0] or py + dy > size[1] or tx + dx > size[0] or ty + dy > size[1]:
# print(px,py,dx,dy)
continue
flag = True
for j in range(self.max_num):
if j == i:
continue
px_, py_ = pos_list[j]
tx_, ty_ = target_list[j]
if px_ == py_ and py_ == 0:
# size_list.append((0,0))
continue
if j > len(size_list) - 1:
dx_, dy_ = 2, 2
else:
dx_, dy_ = size_list[j]
lx = max(px, px_)
ly = max(py, py_)
rx = min(px + dx, px_ + dx_) - 1
ry = min(py + dy, py_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
lx = max(tx, tx_)
ly = max(ty, ty_)
rx = min(tx + dx, tx_ + dx_) - 1
ry = min(ty + dy, ty_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
# if flag:
# flag = dfs(pos_list[i],target_list[i],(dx,dy))
for wall in wall_list:
if not flag:
break
px_, py_ = wall
tx_, ty_ = wall
dx_, dy_ = 1, 1
lx = max(px, px_)
ly = max(py, py_)
rx = min(px + dx, px_ + dx_) - 1
ry = min(py + dy, py_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
lx = max(tx, tx_)
ly = max(ty, ty_)
rx = min(tx + dx, tx_ + dx_) - 1
ry = min(ty + dy, ty_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
if flag:
size_list.append((dx,dy))
break
if reboot_cnt >= 10000:
reboot = True
break
if reboot:
continue
shapes = []
for i in range(self.max_num):
x,y = pos_list[i]
w,d = size_list[i]
pos_list[i] = (x+0.5*w,y+0.5*d)
x,y = target_list[i]
target_list[i] = (x+0.5*w,y+0.5*d)
shape = []
for a in range(w):
for b in range(d):
shape.append((a,b))
shapes.append(shape)
self.setmap(pos_list,target_list,shapes,np.zeros(len(pos_list)),np.zeros(len(pos_list)),wall_list)
break
def getstate_1(self,shape=[64,64]):
state = []
# temp = np.zeros(shape).astype(np.float32)
# tmap = self.map == 1
# oshape = self.map.shape
# for i in range(shape[0]):
# x = int(1.0 * i * oshape[0] / shape[0])
# for j in range(shape[1]):
# y = int(1.0 * j * oshape[1] / shape[1])
# temp[i,j] = tmap[x,y]
# for i in range(oshape[0]):
# x = 1.0*i/oshape[0]*shape[0]
# for j in range(oshape[1]):
# y = 1.0*j/oshape[1]*shape[1]
# temp[x,y] = tmap[i,j]
state.append((self.map == 1).astype(np.float32))
# state.append(temp)
for i in range(len(self.pos)):
# temp = np.zeros(shape).astype(np.float32)
# tmap = self.map == (i+2)
# for i in range(shape[0]):
# x = int(1.0 * i * oshape[0] / shape[0])
# for j in range(shape[1]):
# y = int(1.0 * j * oshape[1] / shape[1])
# temp[i,j] = tmap[x,y]
# state.append(temp)
# temp = np.zeros(shape).astype(np.float32)
# tmap = self.target_map == (i+2)
# for i in range(shape[0]):
# x = int(1.0 * i * oshape[0] / shape[0])
# for j in range(shape[1]):
# y = int(1.0 * j * oshape[1] / shape[1])
# temp[i,j] = tmap[x,y]
# state.append(temp)
state.append((self.map == (i+2)).astype(np.float32))
state.append((self.target_map == (i+2)).astype(np.float32))
return np.transpose(np.array(state),[1,2,0])
def getstate_2(self, index):
state = []
obs = (self.map == 1).astype(np.float32)
cho = (self.map == (index+2)).astype(np.float32)
cho_t = (self.target_map == (index+2)).astype(np.float32)
oth = np.zeros_like(obs).astype(np.bool)
oth_t = np.zeros_like(obs).astype(np.bool)
for i in range(len(self.pos)):
if i != index + 2:
oth |= self.map == (i+2)
oth_t |= self.target_map == (i+2)
# state.append((self.map == (i+1)).astype(np.float32))
state = [obs,cho,cho_t,oth,oth_t]
return np.transpose(np.array(state),[1,2,0])
def getstate_3(self,shape=[64,64]):
state = []
state.append(np.array(self.map).astype(np.float32))
state.append(np.array(self.target_map).astype(np.float32))
return np.transpose(np.array(state),[1,2,0])
def getmap(self):
return np.array(self.map)
def gettargetmap(self):
return self.target_map
def getconfig(self):
return (self.pos,self.target,self.shape,self.cstate,self.tstate,self.wall)
def getfinished(self):
return deepcopy(self.finished)
def getconflict(self):
num = len(self.finished)
res = np.zeros([num,num,2])
# size = np.zeros([num,2])
# for i,shape in enumerate(self.shape):
# size[i]
return res
for axis in range(2):
for i in range(num):
l = np.min([self.pos[i][axis],self.target[i][axis]])
r = np.max([self.pos[i][axis],self.target[i][axis]]) + self.size[i][axis] - 1
for j in range(num):
if (self.pos[j][axis] >= l and self.pos[j][axis] <= r) or (self.pos[j][axis] + self.size[j][axis] >= l and self.pos[j][axis] + self.size[j][axis] <= r):
res[i,j,axis] = 1
return res
def getitem(self, index, state):
shape = self.shape[index]
opoints = np.array(shape)
# for i in range(size[0]):
# for j in range(size[1]):
# opoints.append((i,j))
radian = 2 * pi * state / self.bin
points, bbox = self.rotate(opoints, radian)
return points, bbox
def getcleanmap(self, index):
start = self.pos[index]
# size = self.size[index]
cstate = self.cstate[index]
res = np.array(self.map)
# shape = self.shape[index]
# opoints = np.array(shape)
# opoints = []
# for i in range(size[0]):
# for j in range(size[1]):
# opoints.append((i,j))
points, bbox = self.getitem(index, cstate)
# radian = 2 * pi * cstate / self.bin
# points, bbox = self.rotate(opoints, radian)
xx, yy = start
# xx = max(0, xx - 0.5*(bbox[1,0]-bbox[0,0]+1))
# yy = max(0, yy - 0.5*(bbox[1,1]-bbox[0,1]+1))
for p in points:
x, y = p
x += xx
y += yy
if x >= self.map_size[0] or y >= self.map_size[0]:
print('wrong cords',x,y)
tx = max(0,min(x,self.map_size[0])) + EPS
ty = max(0,min(y,self.map_size[1])) + EPS
res[min(int(tx),self.map_size[0]-1),min(int(ty),self.map_size[1]-1)] = 0
# x = min(x, self.map_size[0]-1)
# y = min(y, self.map_size[1]-1)
# res[int(x), int(y)] = 0
return res
def __deepcopy__(self,memodict={}):
res = ENV(self.map_size,self.max_num)
res.map = np.array(self.map)
res.target_map = np.array(self.target_map)
res.pos = np.array(self.pos)
res.target = self.target
res.shape = self.shape
res.cstate = np.array(self.cstate)
res.tstate = self.tstate
res.wall = self.wall
res.state_dict = deepcopy(self.state_dict)
res.finished = np.array(self.finished)
res.shapex = self.shapex
res.shapey = self.shapey
res.pn = self.pn
res.edge = self.edge
res.bound = self.bound
return res
class ENV_ablation_wo_base: # if current state happened before, give a penalty.
def __init__(self, size=(5,5),max_num=5):
self.map_size = size
self.map = np.zeros(self.map_size)
self.target_map = np.zeros(self.map_size)
self.route = []
self.bin = 24
self.max_num = max_num
"""
params
pos:
a list of the left bottom point of the furniture
size:
a list of the size of the furniture
"""
def setmap(self, pos, target, shape, cstate, tstate, wall=[]):
self.map = np.zeros(self.map_size)
self.target_map = np.zeros(self.map_size)
self.pos = deepcopy(np.array(pos))
self.shape = deepcopy(shape)
self.target = np.array(target)
self.cstate = np.array(cstate).astype(np.int)
self.tstate = np.array(tstate).astype(np.int)
self.wall = deepcopy(wall)
self.dis = np.zeros(len(pos))
self.route = []
self.state_dict = {}
self.finished = np.zeros(len(pos))
self.shapex = []
self.shapey = []
# self.boundx = []
# self.boundy = []
self.pn = []
# self.bn = []
self.edge = []
self.bound = []
# print(shape)
# for bd in self.bound:
# for sh in self.shape:
# print(len(sh))
# for p in self.pos:
# print(p)
# print()
for p in wall:
x, y = p
x = int(x)
y = int(y)
self.map[x,y] = 1
self.target_map[x,y] = 1
# cut_list = []
for i, _ in enumerate(pos):
if len(shape[i]) == 0:
continue
x, y = _
s = self.cstate[i]
radian = 2 * pi * s / self.bin
points, bbox = self.rotate(self.shape[i], radian)
id_list = []
for i_,p in enumerate(points):
xx,yy = p
tx = max(0,min(x+xx,self.map_size[0])) + EPS
ty = max(0,min(y+yy,self.map_size[1])) + EPS
# cut_list.append((int(tx),int(ty)))
if self.map[int(tx),int(ty)] >= 1 and self.map[int(tx),int(ty)] != i+2:
# if i == 5:
# print('map',self.map[int(tx),int(ty)])
id_list.append(i_)
else:
self.map[int(tx),int(ty)] = i + 2
if self.map[int(tx),int(ty)] == 1:
self.map[int(tx),int(ty)] = 0
if self.target_map[int(tx),int(ty)] == 1:
self.target_map[int(tx),int(ty)] = 0
x,y = target[i]
s = self.tstate[i]
radian = 2 * pi * s / self.bin
points, bbox = self.rotate(self.shape[i], radian)
# for p in points:
# xx, yy = p
# xmax = max(xmax,xx)
# xmin = min(xmin,xx)
# ymax = max(ymax,yy)
# ymin = min(ymin,yy)
# x = max(0,x - 0.5*(xmax-xmin+1))
# y = max(0,y - 0.5*(ymax-ymin+1))
for i_,p in enumerate(points):
xx,yy = p
tx = max(0,min(x+xx,self.map_size[0])) + EPS
ty = max(0,min(y+yy,self.map_size[1])) + EPS
# cut_list.append((int(tx),int(ty)))
if self.target_map[int(tx),int(ty)] >= 1 and self.target_map[int(tx),int(ty)] != i + 2:
# if i == 5:
# print('tmap',self.map[int(tx),int(ty)])
if not i_ in id_list:
id_list.append(i_)
else:
self.target_map[int(tx),int(ty)] = i + 2
if self.map[int(tx),int(ty)] == 1:
self.map[int(tx),int(ty)] = 0
if self.target_map[int(tx),int(ty)] == 1:
self.target_map[int(tx),int(ty)] = 0
tmp = deepcopy(self.shape[i])
# print(self.shape[i])
for i_ in id_list:
# print(i_,tmp[i_])
self.shape[i].remove(tmp[i_])
# dx, dy = size[i]
# x, y = _
# x_, y_ = target[i]
# self.map[x:x+dx, y:y+dy] = i + 2
# self.target_map[x_:x_+dx, y_:y_+dy] = i + 2
# print('==========')
# for sh in self.shape:
# print(len(sh))
self.map = (self.map == 1).astype(np.int32)
self.target_map = (self.target_map == 1).astype(np.int32)
for i, _ in enumerate(pos):
if len(self.shape[i]) == 0:
continue
x, y = _
s = self.cstate[i]
radian = 2 * pi * s / self.bin
points, bbox = self.rotate(self.shape[i], radian)
id_list = []
for i_,p in enumerate(points):
xx,yy = p
tx = max(0,min(x+xx,self.map_size[0])) + EPS
ty = max(0,min(y+yy,self.map_size[1])) + EPS
self.map[int(tx),int(ty)] = i + 2
x,y = target[i]
s = self.tstate[i]
radian = 2 * pi * s / self.bin
points, bbox = self.rotate(self.shape[i], radian)
for i_,p in enumerate(points):
xx,yy = p
tx = max(0,min(x+xx,self.map_size[0])) + EPS
ty = max(0,min(y+yy,self.map_size[1])) + EPS
self.target_map[int(tx),int(ty)] = i + 2
for i, sh in enumerate(self.shape):
ed = []
# bd = []
map_ = np.zeros(self.map_size)
mark_ = np.zeros(self.map_size)
for p in sh:
map_[p[0],p[1]] = 1
# print(i,sh)
def dfs(p):
gx = [1,0,-1,0]
gy = [0,1,0,-1]
rx = [1,1,1,0,0,-1,-1,-1]
ry = [-1,0,1,-1,1,-1,0,1]
stack = []
stack.append((p,-1))
last = -2
while stack.__len__() != 0:
flag = True
p,direction = stack.pop()
if mark_[p[0],p[1]] == 1:
continue
mark_[p[0],p[1]] = 1
# print(p)
for i in range(8):
x = p[0] + rx[i]
y = p[1] + ry[i]
if x < 0 or x >= self.map_size[0] or y >= self.map_size[1] or y < 0 or map_[x,y] == 0:
# if last == direction:
# ed.pop()
ed.append(p)
# bd.append(p)
last = direction
flag = False
break
vs = []
for i in range(8):
vs.append([])
for i in range(4):
x = p[0] + gx[i]
y = p[1] + gy[i]
if x >= 0 and x < self.map_size[0] and y >= 0 and y < self.map_size[1] and map_[x,y] == 1 and mark_[x,y] == 0:
# print(x,y)
# dfs((x,y))
if flag:
# mark_[x,y] = 1
stack.append(((x,y),i))
break
else:
cnt = 0
for j in range(8):
xx = x + rx[j]
yy = y + ry[j]
if xx < 0 or xx >= self.map_size[0] or yy >= self.map_size[1] or yy < 0 or map_[xx,yy] == 0:
# mark_[x,y] = 1
cnt += 1
vs[cnt].append(((x,y),i))
for v in vs:
for p in v:
stack.append(p)
if len(sh) != 0:
dfs(sh[0])
self.edge.append(ed)
# self.bound.append(bd)
# for i in range(len(self.pos)):
# print(len(self.shape[i]),len(self.edge[i]))
for i, _ in enumerate(pos):
if self.equal(self.pos[i],self.target[i]) and self.cstate[i] == self.tstate[i]:
self.finished[i] = 1
for ed in self.edge:
self.pn.append(len(ed))
for p in ed:
self.shapex.append(p[0])
self.shapey.append(p[1])
hash_ = self.hash()
self.state_dict[hash_] = 1
# self.check()
def hash(self):
mod = 1000000007
num = len(self.pos) + 1
total = 0
for row in self.map:
for _ in row:
total *= num
total += int(_)
total %= mod
return total
def rotate(self, points, radian):
eps = 1e-6
res_list = []
points = np.array(points).astype(np.float32)
# print(points.shape)
points[:,0] -= points[:,0].min()
points[:,1] -= points[:,1].min()
mx = points[:,0].max()
my = points[:,1].max()
points[:,0] -= 0.5 * mx + 0.5
points[:,1] -= 0.5 * my + 0.5
points = points.transpose()
rot = np.array([[cos(radian),-sin(radian)],[sin(radian), cos(radian)]])
points = np.matmul(rot,points)
points[0,:] += eps
points[1,:] += eps
# points[0,:] += 0.5 * mx - 0.5 + eps
# points[1,:] += 0.5 * my - 0.5 + eps
# points = np.round(points).astype(np.int)
xmax = points[0].max()
xmin = points[0].min()
ymax = points[1].max()
ymin = points[1].min()
# if xmin < 0:
# points[0] += 1
# if ymin < 0:
# points[1] += 1
return points.transpose(), np.array([[xmin, ymin], [xmax, ymax]])
'''
check the state
0 represent not accessible
1 represent accessible
'''
def check(self, index):
flag = True
# tx,ty = self.target[index]
# sx,sy = self.pos[index]
# shape = self.shape[index]
# cstate = self.cstate[index]
# print(cstate)
# tstate = self.tstate[index]
route = []
# ps = np.array(shape)
# px = np.array(ps[:,0],dtype=np.int32)
# py = np.array(ps[:,1],dtype=np.int32)
# num = len(ps)
lx = np.zeros([60000],dtype=np.float32)
ly = np.zeros([60000],dtype=np.float32)
lr = np.zeros([60000],dtype=np.int32)
ld = np.zeros([60000],dtype=np.int32)
length = np.array([0],dtype=np.int32)
flag = np.array([1],dtype=np.int32)
n,m = self.map_size
tx_ar = np.zeros(len(self.target),dtype=np.float32)
ty_ar = np.zeros_like(tx_ar,dtype=np.float32)
sx_ar = np.zeros_like(tx_ar,dtype=np.float32)
sy_ar = np.zeros_like(tx_ar,dtype=np.float32)
for i,p in enumerate(self.target):
tx_ar[i] = p[0]
ty_ar[i] = p[1]
for i,p in enumerate(self.pos):
sx_ar[i] = p[0]
sy_ar[i] = p[1]
tmp_map = np.array(self.map,dtype=np.int32)
shape,bbox = self.getitem(index,self.cstate[index])
sx,sy = self.pos[index]
for p in shape:
x,y = p
x += sx + EPS
y += sy + EPS
tmp_map[min(int(x),self.map_size[0]-1),min(int(y),self.map_size[1]-1)] = 0
# print('in')
# print(index)
# tx = int(tx)
# ty = int(ty)
# sx = int(sx)
# sy = int(sy)
# search_transpose(np.array(self.map,dtype=np.int32),n,m,index,tx,ty,sx,sy, cstate,tstate,self.bin,px,py,num,lx,ly,lr,ld,length,flag)
search_transpose_poly(
tmp_map, np.array(self.shapex,dtype=np.float32),np.array(self.shapey,dtype=np.float32),np.array(self.pn,dtype=np.int32),n,m,
len(self.pos),index,
tx_ar,ty_ar,
sx_ar,sy_ar,
np.array(self.cstate, dtype=np.int32), np.array(self.tstate, dtype=np.int32), self.bin,
lx, ly, lr, ld, length,
flag
)
# print('out')
# search(np.array(self.map,dtype=np.int32),n,m,index,tx,ty,sx,sy,h,w,lx,ly,length,flag)
for i in range(length[0]):
route.append((lx[i],ly[i],lr[i],ld[i]))
route.reverse()
return flag[0], route
def check_s(self, index):
flag = True
# tx,ty = self.target[index]
# sx,sy = self.pos[index]
# shape = self.shape[index]
# cstate = self.cstate[index]
# print(cstate)
# tstate = self.tstate[index]
route = []
# ps = np.array(shape)
# px = np.array(ps[:,0],dtype=np.int32)
# py = np.array(ps[:,1],dtype=np.int32)
# num = len(ps)
lx = np.zeros([60000],dtype=np.float32)
ly = np.zeros([60000],dtype=np.float32)
lr = np.zeros([60000],dtype=np.int32)
ld = np.zeros([60000],dtype=np.int32)
length = np.array([0],dtype=np.int32)
flag = np.array([1],dtype=np.int32)
n,m = self.map_size
tx_ar = np.zeros(len(self.target),dtype=np.float32)
ty_ar = np.zeros_like(tx_ar,dtype=np.float32)
sx_ar = np.zeros_like(tx_ar,dtype=np.float32)
sy_ar = np.zeros_like(tx_ar,dtype=np.float32)
for i,p in enumerate(self.target):
tx_ar[i] = p[0]
ty_ar[i] = p[1]
for i,p in enumerate(self.pos):
sx_ar[i] = p[0]
sy_ar[i] = p[1]
tmp_map = np.array(self.map,dtype=np.int32)
shape,bbox = self.getitem(index,self.cstate[index])
sx,sy = self.pos[index]
for p in shape:
x,y = p
x += sx + EPS
y += sy + EPS
tmp_map[min(int(x),self.map_size[0]-1),min(int(y),self.map_size[1]-1)] = 0
# print('in')
# print(index)
# tx = int(tx)
# ty = int(ty)
# sx = int(sx)
# sy = int(sy)
# search_transpose(np.array(self.map,dtype=np.int32),n,m,index,tx,ty,sx,sy, cstate,tstate,self.bin,px,py,num,lx,ly,lr,ld,length,flag)
search_transpose_poly_rot(
tmp_map, np.array(self.shapex,dtype=np.float32),np.array(self.shapey,dtype=np.float32),np.array(self.pn,dtype=np.int32),n,m,
len(self.pos),index,
tx_ar,ty_ar,
sx_ar,sy_ar,
np.array(self.cstate, dtype=np.int32), np.array(self.tstate, dtype=np.int32), self.bin,
lx, ly, lr, ld, length,
flag
)
# print('out')
# search(np.array(self.map,dtype=np.int32),n,m,index,tx,ty,sx,sy,h,w,lx,ly,length,flag)
for i in range(length[0]):
route.append((lx[i],ly[i],lr[i],ld[i]))
route.reverse()
return flag[0], route
def equal(self, a, b):
return fabs(a[0]-b[0])+fabs(a[1]-b[1]) < 1e-3
def route_length(self, route):
res = 0
lx = None
ly = None
for _ in route:
x,y,_,_ = _
if lx is not None:
res += abs(lx-x) + abs(ly-y)
lx = x
ly = y
return res
'''
return reward, finish_flag 1 represent the finished state
'''
def move(self, index, direction):
base = 0
if index >= len(self.pos):
return -500, -1
if len(self.shape[index]) == 0:
return -500, -1
map_size = self.map_size
# size = self.size[index]
target = self.target[index]
pos = self.pos[index]
cstate = self.cstate[index]
tstate = self.tstate[index]
shape, bbox = self.getitem(index, cstate)
n,m = self.map_size
sx_ar = np.zeros(len(self.target),dtype=np.float32)
sy_ar = np.zeros_like(sx_ar,dtype=np.float32)
steps_ar = np.zeros(1,dtype=np.int32)
for i,p in enumerate(self.pos):
sx_ar[i] = p[0]
sy_ar[i] = p[1]
if direction < 4:
xx, yy = pos
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
if self.equal(pos, target) and cstate == tstate: # prevent it from being trapped in local minima that moving object around the destination
base += -4
translate(
np.array(self.map,dtype=np.int32),np.array(self.shapex,dtype=np.float32),np.array(self.shapey,dtype=np.float32),np.array(self.pn,dtype=np.int32),n,m,
len(self.pos),index,direction,
sx_ar,sy_ar,
np.array(self.cstate, dtype=np.int32), np.array(self.tstate, dtype=np.int32), self.bin,
steps_ar
)
steps = steps_ar[0]
x = xx + steps * _x[direction]
y = yy + steps * _y[direction]
self.last_steps = steps
if steps == 0:
return -500, -1
else:
base += -0.5 * log(steps) / log(10)
for p in shape:
self.map[min(int(xx+p[0]+EPS),self.map_size[0]-1),min(int(yy+p[1]+EPS),self.map_size[1]-1)] = 0
for p in shape:
self.map[min(int(x+p[0]+EPS),self.map_size[0]-1),min(int(y+p[1]+EPS),self.map_size[1]-1)] = index + 2
# self.map[xx:xx+size[0],yy:yy+size[1]] = 0
# self.map[x:x+size[0],y:y+size[1]] = index + 2
# x = x + 0.5*(bbox[1][0]-bbox[0][0]+1)
# y = y + 0.5*(bbox[1][1]-bbox[0][1]+1)
self.pos[index] = (x, y)
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
penlty = self.state_dict[hash_]
self.state_dict[hash_] += 1
penlty = 1
return base-2 * penlty, 0
self.state_dict[hash_] = 1
if fabs(x-target[0]) + fabs(y-target[1]) < 1e-6 and cstate == tstate:
if self.finished[index] == 1:
return base + 2, 0
else:
self.finished[index] = 1
return base + 4, 0
# return base + 500, 0
# return base + 200, 0
return base, 0
# for i in range(size[0]):
# for j in range(size[1]):
# self.map[target[0]+i, target[1]+j] = 1
# if it has already reached the target place.
if self.equal(pos, target) and cstate == tstate:
return -500, -1
# check if legal, calc the distance
flag, route = self.check(index)
if flag == 1:
xx, yy = pos
x,y = target
self.route = route
length = self.route_length(self.route)
if length != 0:
base += -0.5 * log(length) / log(10)
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
tshape, bbox = self.getitem(index, tstate)
# x = max(0,x - 0.5*(bbox[1][0]-bbox[0][0]+1))
# y = max(0,y - 0.5*(bbox[1][1]-bbox[0][1]+1))
for p in shape:
tx = max(0,min(xx+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(yy+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1),min(int(ty),self.map_size[1]-1)] = 0
# self.map[xx:xx+size[0], yy:yy+size[1]] = 0
for p in tshape:
tx = max(0,min(x+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(y+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1), min(int(ty),self.map_size[1]-1)] = index + 2
self.pos[index] = target
self.cstate[index] = tstate
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
penlty = self.state_dict[hash_]
self.state_dict[hash_] += 1
penlty = 1
return base -2 * penlty, 0
ff = True
for i,v in enumerate(self.pos): # check if the task is done
w = self.target[i]
if fabs(v[0]-w[0])+fabs(v[1]-w[1]) > 1e-6 or self.cstate[i] != self.tstate[i]:
ff = False
break
if ff:
self.finished[index] = 1
# return base + 10000, 1
return base + 50, 1
else:
if self.finished[index] == 1: # if the object was placed before
return base + 2, 0
else:
self.finished[index] = 1
return base + 4, 0
# return base + 500, 0
# return base + 200, 0
# elif flag == -1:
# return -1000, 1
else:
# return -500, -1
return base, -1
def move_s(self, index, direction):
base = -10
if index >= len(self.pos):
return -500, -1
if len(self.shape[index]) == 0:
return -500, -1
map_size = self.map_size
# size = self.size[index]
target = self.target[index]
pos = self.pos[index]
cstate = self.cstate[index]
tstate = self.tstate[index]
shape, bbox = self.getitem(index, cstate)
n,m = self.map_size
sx_ar = np.zeros(len(self.target),dtype=np.float32)
sy_ar = np.zeros_like(sx_ar,dtype=np.float32)
steps_ar = np.zeros(1,dtype=np.int32)
for i,p in enumerate(self.pos):
sx_ar[i] = p[0]
sy_ar[i] = p[1]
if self.equal(pos, target) and cstate == tstate: # prevent it from being trapped in local minima that moving object around the destination
# base += -60
base += -600
if direction < 4:
xx, yy = pos
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
translate(
np.array(self.map,dtype=np.int32),np.array(self.shapex,dtype=np.float32),np.array(self.shapey,dtype=np.float32),np.array(self.pn,dtype=np.int32),n,m,
len(self.pos),index,direction,
sx_ar,sy_ar,
np.array(self.cstate, dtype=np.int32), np.array(self.tstate, dtype=np.int32), self.bin,
steps_ar
)
steps = steps_ar[0]
x = xx + steps * _x[direction]
y = yy + steps * _y[direction]
if steps == 0:
return -500, -1
else:
for p in shape:
self.map[min(int(xx+p[0]+EPS),self.map_size[0]-1),min(int(yy+p[1]+EPS),self.map_size[1]-1)] = 0
for p in shape:
self.map[min(int(x+p[0]+EPS),self.map_size[0]-1),min(int(y+p[1]+EPS),self.map_size[1]-1)] = index + 2
# self.map[xx:xx+size[0],yy:yy+size[1]] = 0
# self.map[x:x+size[0],y:y+size[1]] = index + 2
# x = x + 0.5*(bbox[1][0]-bbox[0][0]+1)
# y = y + 0.5*(bbox[1][1]-bbox[0][1]+1)
self.pos[index] = (x, y)
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
return -600, 0
self.state_dict[hash_] = 1
if fabs(x-target[0]) + fabs(y-target[1]) < 1e-6 and cstate == tstate:
if self.finished[index] == 1:
# return base + 50, 0
return base + 500, 0
else:
self.finished[index] = 1
return base + 2000, 0
# return base + 500, 0
# return base + 200, 0
return base, 0
# for i in range(size[0]):
# for j in range(size[1]):
# self.map[target[0]+i, target[1]+j] = 1
# if it has already reached the target place.
if self.equal(pos, target) and cstate == tstate:
return -500, -1
# check if legal, calc the distance
flag, route = self.check_s(index)
if flag == 1:
xx, yy = pos
x,y = target
self.route = route
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
tshape, bbox = self.getitem(index, tstate)
# x = max(0,x - 0.5*(bbox[1][0]-bbox[0][0]+1))
# y = max(0,y - 0.5*(bbox[1][1]-bbox[0][1]+1))
for p in shape:
tx = max(0,min(xx+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(yy+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1),min(int(ty),self.map_size[1]-1)] = 0
# self.map[xx:xx+size[0], yy:yy+size[1]] = 0
for p in tshape:
tx = max(0,min(x+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(y+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1), min(int(ty),self.map_size[1]-1)] = index + 2
self.pos[index] = target
self.cstate[index] = tstate
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
# return -40, 0
return -600, 0
ff = True
for i,v in enumerate(self.pos): # check if the task is done
w = self.target[i]
if fabs(v[0]-w[0])+fabs(v[1]-w[1]) > 1e-6 or self.cstate[i] != self.tstate[i]:
ff = False
break
if ff:
self.finished[index] = 1
# return base + 10000, 1
return base + 100000, 1
else:
if self.finished[index] == 1: # if the object was placed before
# return base + 50, 0
return base + 500, 0
else:
self.finished[index] = 1
return base + 2000, 0
# return base + 500, 0
# return base + 200, 0
# elif flag == -1:
# return -1000, 1
else:
# return -500, -1
return base, -1
def move_p(self, index, direction):
base = -1
if index >= len(self.pos):
return -500, -1
if len(self.shape[index]) == 0:
return -500, -1
map_size = self.map_size
# size = self.size[index]
target = self.target[index]
pos = self.pos[index]
cstate = self.cstate[index]
tstate = self.tstate[index]
shape, bbox = self.getitem(index, cstate)
n,m = self.map_size
sx_ar = np.zeros(len(self.target),dtype=np.float32)
sy_ar = np.zeros_like(sx_ar,dtype=np.float32)
steps_ar = np.zeros(1,dtype=np.int32)
for i,p in enumerate(self.pos):
sx_ar[i] = p[0]
sy_ar[i] = p[1]
if direction < 4:
xx, yy = pos
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
if self.equal(pos, target) and cstate == tstate: # prevent it from being trapped in local minima that moving object around the destination
base += -4
translate(
np.array(self.map,dtype=np.int32),np.array(self.shapex,dtype=np.float32),np.array(self.shapey,dtype=np.float32),np.array(self.pn,dtype=np.int32),n,m,
len(self.pos),index,direction,
sx_ar,sy_ar,
np.array(self.cstate, dtype=np.int32), np.array(self.tstate, dtype=np.int32), self.bin,
steps_ar
)
steps = steps_ar[0]
x = xx + steps * _x[direction]
y = yy + steps * _y[direction]
if steps == 0:
return -500, -1
else:
for p in shape:
self.map[min(int(xx+p[0]+EPS),self.map_size[0]-1),min(int(yy+p[1]+EPS),self.map_size[1]-1)] = 0
for p in shape:
self.map[min(int(x+p[0]+EPS),self.map_size[0]-1),min(int(y+p[1]+EPS),self.map_size[1]-1)] = index + 2
# self.map[xx:xx+size[0],yy:yy+size[1]] = 0
# self.map[x:x+size[0],y:y+size[1]] = index + 2
# x = x + 0.5*(bbox[1][0]-bbox[0][0]+1)
# y = y + 0.5*(bbox[1][1]-bbox[0][1]+1)
self.pos[index] = (x, y)
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
penlty = self.state_dict[hash_]
self.state_dict[hash_] += 1
penlty = 1
return base-2 * penlty, 0
self.state_dict[hash_] = 1
if fabs(x-target[0]) + fabs(y-target[1]) < 1e-6 and cstate == tstate:
if self.finished[index] == 1:
return base + 2, 0
else:
self.finished[index] = 1
return base + 4, 0
# return base + 500, 0
# return base + 200, 0
return base, 0
# for i in range(size[0]):
# for j in range(size[1]):
# self.map[target[0]+i, target[1]+j] = 1
# if it has already reached the target place.
if self.equal(pos, target) and cstate == tstate:
return -500, -1
# check if legal, calc the distance
flag, route = self.check_s(index)
if flag == 1:
xx, yy = pos
x,y = target
self.route = route
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
tshape, bbox = self.getitem(index, tstate)
# x = max(0,x - 0.5*(bbox[1][0]-bbox[0][0]+1))
# y = max(0,y - 0.5*(bbox[1][1]-bbox[0][1]+1))
for p in shape:
tx = max(0,min(xx+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(yy+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1),min(int(ty),self.map_size[1]-1)] = 0
# self.map[xx:xx+size[0], yy:yy+size[1]] = 0
for p in tshape:
tx = max(0,min(x+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(y+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1), min(int(ty),self.map_size[1]-1)] = index + 2
self.pos[index] = target
self.cstate[index] = tstate
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
penlty = self.state_dict[hash_]
self.state_dict[hash_] += 1
penlty = 1
return base -2 * penlty, 0
ff = True
for i,v in enumerate(self.pos): # check if the task is done
w = self.target[i]
if fabs(v[0]-w[0])+fabs(v[1]-w[1]) > 1e-6 or self.cstate[i] != self.tstate[i]:
ff = False
break
if ff:
self.finished[index] = 1
# return base + 10000, 1
return base + 50, 1
else:
if self.finished[index] == 1: # if the object was placed before
return base + 2, 0
else:
self.finished[index] = 1
return base + 4, 0
# return base + 500, 0
# return base + 200, 0
# elif flag == -1:
# return -1000, 1
else:
# return -500, -1
return base, -1
def move_debug(self, index, direction):
base = -10
if index >= len(self.pos):
print('index error')
return -500, -1
map_size = self.map_size
# size = self.size[index]
target = self.target[index]
pos = self.pos[index]
cstate = self.cstate[index]
tstate = self.tstate[index]
shape,bbox = self.getitem(index, cstate)
if pos == target and cstate == tstate: # prevent it from being trapped in local minima that moving object around the destination
base += -60
if direction < 4:
xx, yy = pos
print('xx',xx,'yy',yy)
steps = 1
while True:
x = xx + steps*_x[direction]
y = yy + steps*_y[direction]
if x < 0 or x >= map_size[0] or y < 0 or y >= map_size[1] :
steps -= 1
print('early done')
break
# return -500, -1
flag = True
for p in shape:
if x+p[0] < 0 or x+p[0] >= map_size[0] or y+p[1] < 0 or y+p[1] >= map_size[1]:
flag = False
print('over bound',x+p[0],)
break
if self.map[x+p[0],y+p[1]] != 0:
print('collide!!',self.map[x+p[0],y+p[1]],index+2)
flag = False
break
if not flag:
steps -= 1
break
steps += 1
x = xx + steps*_x[direction]
y = yy + steps*_y[direction]
if steps == 0:
print('cannot move error')
return -500, -1
else:
for p in shape:
self.map[xx+p[0],yy+p[1]] = 0
for p in shape:
self.map[x+p[0],y+p[1]] = index + 2
# self.map[xx:xx+size[0],yy:yy+size[1]] = 0
# self.map[x:x+size[0],y:y+size[1]] = index + 2
self.pos[index] = (x, y)
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
return -600, 0
self.state_dict[hash_] = 1
if int(x) == int(target[0]) and int(y) == int(target[1]) and cstate == tstate:
if self.finished[index] == 1:
return base + 50, 0
else:
self.finished[index] = 1
return base + 500, 0
# return base + 200, 0
return base, 0
# for i in range(size[0]):
# for j in range(size[1]):
# self.map[target[0]+i, target[1]+j] = 1
# if it has already reached the target place.
if pos == target and cstate == tstate:
print('position error')
return -500, -1
# check if legal, calc the distance
flag, route = self.check(index)
if flag == 1:
xx, yy = pos
x,y = target
self.route = route
tshape,bbox = self.getitem(index,tstate)
for p in shape:
self.map[xx+p[0],yy+p[1]] = 0
# self.map[xx:xx+size[0], yy:yy+size[1]] = 0
for p in tshape:
self.map[x+p[0], y+p[1]] = index + 2
self.pos[index] = target
self.cstate[index] = tstate
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
return -40, 0
ff = True
for i,v in enumerate(self.pos): # check if the task is done
w = self.target[i]
if v != w or self.cstate[i] != self.tstate[i]:
ff = False
break
if ff:
self.finished[index] = 1
return base + 10000, 1
else:
if self.finished[index] == 1: # if the object was placed before
return base + 50, 0
else:
self.finished[index] = 1
return base + 500, 0
# return base + 200, 0
# elif flag == -1:
# return -1000, 1
else:
print('path find error')
return -500, -1
def getlastroute(self):
return self.route
def printmap(self):
with open('op.txt','a') as fp:
for _ in self.map:
for __ in _:
fp.write('%d '%__)
fp.write('\n')
fp.write('\n')
def build_wall(self):
size = self.map_size
p = 0.005
q = 0.05
dix = [1,0,-1,0]
diy = [0,1,0,-1]
while True:
px = np.random.randint(size[0])
py = np.random.randint(size[1])
if fabs(px-size[0]/2) + fabs(py-size[1]/2) > (size[0]+size[1])/4:
break
d = int(px < size[0]/2) + int(py < size[1]/2)*2
if d == 3:
d = 2
elif d == 2:
d = 3
l = 0
wall_list = []
while True:
cnt = 0
while True:
tx = px + dix[d]
ty = py + diy[d]
cnt += 1
if cnt > 10:
break
if tx == size[0] or tx == -1 or ty == size[1] or ty == -1:
d = np.random.randint(4)
continue
break
if tx >= 0 and tx < size[0] and ty >= 0 and ty < size[1]:
px = tx
py = ty
if not (px,py) in wall_list :
wall_list.append((px,py))
r = np.random.rand()
if r < q:
t = np.random.rand()
if t < 0.9:
d = (d+1)%4
else:
d = np.random.randint(4)
r = np.random.rand()
if r < p and l > 100:
break
l += 1
return wall_list
def randominit(self,num=5):
import random
size = self.map_size
# print(size)
pos_list = []
size_list = []
target_list = []
for i in range(num):
while True:
px = np.random.randint(size[0])
py = np.random.randint(size[1])
flag = True
for _ in pos_list:
dx,dy = _
if dx == px or dy == py:
flag = False
break
if flag:
pos_list.append((px,py))
break
while True:
px = np.random.randint(size[0])
py = np.random.randint(size[1])
flag = True
for _ in target_list:
dx,dy = _
if dx == px or dy == py:
flag = False
break
if flag:
target_list.append((px,py))
break
random.shuffle(pos_list)
random.shuffle(target_list)
for i in range(num):
px, py = pos_list[i]
tx, ty = target_list[i]
while True:
dx = np.random.randint(1,11)
dy = np.random.randint(1,11)
if px + dx > size[0] or py + dy > size[1] or tx + dx > size[0] or ty + dy > size[1]:
# print(px,py,dx,dy)
continue
flag = True
for j in range(num):
if j == i:
continue
px_, py_ = pos_list[j]
tx_, ty_ = target_list[j]
if j > len(size_list) - 1:
dx_, dy_ = 1, 1
else:
dx_, dy_ = size_list[j]
lx = max(px, px_)
ly = max(py, py_)
rx = min(px + dx, px_ + dx_) - 1
ry = min(py + dy, py_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
lx = max(tx, tx_)
ly = max(ty, ty_)
rx = min(tx + dx, tx_ + dx_) - 1
ry = min(ty + dy, ty_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
if flag:
size_list.append((dx,dy))
break
# list_ =[(pair[0],(dx,dy)),(pair[1],(dx_,dy_))
# random.shuffle(list_)
# for i in list_:
# pos_list.append(i[0])
# size_list.append(i[1])
self.setmap(pos_list,target_list,size_list)
# for i in range(size[0]):
# for j in range(size[1]):
# for i in range(size[0]):
# for j in range(size[1]):
def randominit_crowded(self,num=5):
import random
size = self.map_size
# print(size)
while True:
reboot = False
pos_list = []
size_list = []
target_list = []
# self.finished = np.zeros(num)
tmp = self.build_wall()
# tmp += self.build_wall()
# tmp += self.build_wall()
sorted(tmp)
wall_list = []
l = None
map_ = np.zeros(self.map_size)
for t in tmp:
if t != l:
l = t
map_[l[0],l[1]] = 1
wall_list.append(t)
def dfs(p,tp):
gx = [1,0,-1,0]
gy = [0,1,0,-1]
stack = []
stack.append(p)
mark_ = np.zeros(self.map_size)
while stack.__len__() != 0:
flag = True
p = stack.pop()
if mark_[p[0],p[1]] == 1:
continue
mark_[p[0],p[1]] = 1
if p[0] == tp[0] and p[1] == tp[1]:
break
for i in range(4):
x = p[0] + gx[i]
y = p[1] + gy[i]
if x >= 0 and x < self.map_size[0] and y >= 0 and y < self.map_size[1] and map_[x,y] != 1 and mark_[x,y] == 0:
stack.append((x,y))
return mark_[tp[0],tp[1]] == 1
for i in range(num):
while True:
px = np.random.randint(1,size[0]-3)
py = np.random.randint(1,size[1]-3)
flag = True
hh = [-1,0,1]
for _ in pos_list:
dx, dy = _
if dx == px or dy == py or abs(dx - px) + abs(dy - py) < 5:
flag = False
for k in hh:
for l in hh:
xx = px + k
yy = py + l
if dx == xx and dy == yy:
flag = False
break
if flag == False:
break
if flag == False:
break
for _ in wall_list:
if flag == False:
break
dx,dy = _
if dx == px and dy == py:
flag = False
break
for k in hh:
for l in hh:
xx = px + k
yy = py + l
if dx == xx and dy == yy:
flag = False
break
if flag == False:
break
if flag == False:
break
if flag:
pos_list.append((px,py))
break
reboot_cnt = 0
while reboot_cnt < 10000:
reboot_cnt += 1
px = np.random.randint(1,size[0]-3)
py = np.random.randint(1,size[1]-3)
# flag = True
flag = dfs((px,py),pos_list[-1])
# print(flag)
for _ in target_list:
dx, dy = _
if dx == px or dy == py or abs(dx-px)+abs(dy-py) < 5:
flag = False
break
for k in hh:
for l in hh:
xx = px + k
yy = py + l
if dx == xx and dy == yy:
flag = False
break
if flag == False:
break
if flag == False:
break
for _ in wall_list:
if flag == False:
break
dx,dy = _
if dx == px and dy == py:
flag = False
break
for k in hh:
for l in hh:
xx = px + k
yy = py + l
if dx == xx and dy == yy:
flag = False
break
if flag == False:
break
if flag == False:
break
if flag:
target_list.append((px,py))
break
if reboot_cnt >= 10000:
reboot = True
break
if reboot:
continue
# random.shuffle(pos_list)
# random.shuffle(target_list)
sub_num = self.max_num - num
# print(self.max_num, num)
pos_s = [ (_, i) for i, _ in enumerate(pos_list)]
for _ in range(sub_num):
pos_s.append(((0,0),-1))
# print(len(pos_list))
random.shuffle(pos_list)
pos_list = np.array([_[0] for _ in pos_s])
tmp_target_list = np.zeros_like(pos_list)
# random.shuffle(target_list)
_ = 0
# for target in target_list:
# while pos_list[_][0] == pos_list[_][1] and pos_list[_][0] == 0:
# _ += 1
# tmp_target_list[_] = target
# _ += 1
for i,_ in enumerate(pos_s):
p, id_ = _
if p[0] == p[1] and p[0] == 0:
continue
tmp_target_list[i] = target_list[id_]
target_list = tmp_target_list
def dfs(p,tp,size):
gx = [1,0,-1,0]
gy = [0,1,0,-1]
dx, dy = size
stack = []
stack.append(p)
mark_ = np.zeros(self.map_size)
while stack.__len__() != 0:
flag = True
p = stack.pop()
if mark_[p[0],p[1]] == 1:
continue
mark_[p[0],p[1]] = 1
if p[0] == tp[0] and p[1] == tp[1]:
break
for i in range(4):
x = p[0] + gx[i]
y = p[1] + gy[i]
if x >= 0 and x + dx - 1 < self.map_size[0] and y >= 0 and y + dy - 1 < self.map_size[1] and map_[x,y] != 1 and mark_[x,y] == 0:
flag = True
for j in range(dx):
for k in range(dy):
if map_[x+j,y+k] == 1:
flag = False
break
if flag == False:
break
if flag:
stack.append((x,y))
return mark_[tp[0],tp[1]] == 1
for i in range(self.max_num):
px, py = pos_list[i]
tx, ty = target_list[i]
if px == py and py == 0:
size_list.append((0,0))
continue
reboot_cnt = 0
while reboot_cnt < 10000:
reboot_cnt += 1
dx = np.random.randint(2,31)
dy = np.random.randint(2,31)
if px + dx > size[0] or py + dy > size[1] or tx + dx > size[0] or ty + dy > size[1]:
# print(px,py,dx,dy)
continue
flag = True
for j in range(self.max_num):
if j == i:
continue
px_, py_ = pos_list[j]
tx_, ty_ = target_list[j]
if px_ == py_ and py_ == 0:
# size_list.append((0,0))
continue
if j > len(size_list) - 1:
dx_, dy_ = 2, 2
else:
dx_, dy_ = size_list[j]
lx = max(px, px_)
ly = max(py, py_)
rx = min(px + dx, px_ + dx_) - 1
ry = min(py + dy, py_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
lx = max(tx, tx_)
ly = max(ty, ty_)
rx = min(tx + dx, tx_ + dx_) - 1
ry = min(ty + dy, ty_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
# if flag:
# flag = dfs(pos_list[i],target_list[i],(dx,dy))
for wall in wall_list:
if not flag:
break
px_, py_ = wall
tx_, ty_ = wall
dx_, dy_ = 1, 1
lx = max(px, px_)
ly = max(py, py_)
rx = min(px + dx, px_ + dx_) - 1
ry = min(py + dy, py_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
lx = max(tx, tx_)
ly = max(ty, ty_)
rx = min(tx + dx, tx_ + dx_) - 1
ry = min(ty + dy, ty_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
if flag:
size_list.append((dx,dy))
break
if reboot_cnt >= 10000:
reboot = True
break
if reboot:
continue
shapes = []
for i in range(self.max_num):
x,y = pos_list[i]
w,d = size_list[i]
pos_list[i] = (x+0.5*w,y+0.5*d)
x,y = target_list[i]
target_list[i] = (x+0.5*w,y+0.5*d)
shape = []
for a in range(w):
for b in range(d):
shape.append((a,b))
shapes.append(shape)
self.setmap(pos_list,target_list,shapes,np.zeros(len(pos_list)),np.zeros(len(pos_list)),wall_list)
break
def getstate_1(self,shape=[64,64]):
state = []
# temp = np.zeros(shape).astype(np.float32)
# tmap = self.map == 1
# oshape = self.map.shape
# for i in range(shape[0]):
# x = int(1.0 * i * oshape[0] / shape[0])
# for j in range(shape[1]):
# y = int(1.0 * j * oshape[1] / shape[1])
# temp[i,j] = tmap[x,y]
# for i in range(oshape[0]):
# x = 1.0*i/oshape[0]*shape[0]
# for j in range(oshape[1]):
# y = 1.0*j/oshape[1]*shape[1]
# temp[x,y] = tmap[i,j]
state.append((self.map == 1).astype(np.float32))
# state.append(temp)
for i in range(len(self.pos)):
# temp = np.zeros(shape).astype(np.float32)
# tmap = self.map == (i+2)
# for i in range(shape[0]):
# x = int(1.0 * i * oshape[0] / shape[0])
# for j in range(shape[1]):
# y = int(1.0 * j * oshape[1] / shape[1])
# temp[i,j] = tmap[x,y]
# state.append(temp)
# temp = np.zeros(shape).astype(np.float32)
# tmap = self.target_map == (i+2)
# for i in range(shape[0]):
# x = int(1.0 * i * oshape[0] / shape[0])
# for j in range(shape[1]):
# y = int(1.0 * j * oshape[1] / shape[1])
# temp[i,j] = tmap[x,y]
# state.append(temp)
state.append((self.map == (i+2)).astype(np.float32))
state.append((self.target_map == (i+2)).astype(np.float32))
return np.transpose(np.array(state),[1,2,0])
def getstate_2(self, index):
state = []
obs = (self.map == 1).astype(np.float32)
cho = (self.map == (index+2)).astype(np.float32)
cho_t = (self.target_map == (index+2)).astype(np.float32)
oth = np.zeros_like(obs).astype(np.bool)
oth_t = np.zeros_like(obs).astype(np.bool)
for i in range(len(self.pos)):
if i != index + 2:
oth |= self.map == (i+2)
oth_t |= self.target_map == (i+2)
# state.append((self.map == (i+1)).astype(np.float32))
state = [obs,cho,cho_t,oth,oth_t]
return np.transpose(np.array(state),[1,2,0])
def getstate_3(self,shape=[64,64]):
state = []
state.append(np.array(self.map).astype(np.float32))
state.append(np.array(self.target_map).astype(np.float32))
return np.transpose(np.array(state),[1,2,0])
def getmap(self):
return np.array(self.map)
def gettargetmap(self):
return self.target_map
def getconfig(self):
return (self.pos,self.target,self.shape,self.cstate,self.tstate,self.wall)
def getfinished(self):
return deepcopy(self.finished)
def getconflict(self):
num = len(self.finished)
res = np.zeros([num,num,2])
# size = np.zeros([num,2])
# for i,shape in enumerate(self.shape):
# size[i]
return res
for axis in range(2):
for i in range(num):
l = np.min([self.pos[i][axis],self.target[i][axis]])
r = np.max([self.pos[i][axis],self.target[i][axis]]) + self.size[i][axis] - 1
for j in range(num):
if (self.pos[j][axis] >= l and self.pos[j][axis] <= r) or (self.pos[j][axis] + self.size[j][axis] >= l and self.pos[j][axis] + self.size[j][axis] <= r):
res[i,j,axis] = 1
return res
def getitem(self, index, state):
shape = self.shape[index]
opoints = np.array(shape)
# for i in range(size[0]):
# for j in range(size[1]):
# opoints.append((i,j))
radian = 2 * pi * state / self.bin
points, bbox = self.rotate(opoints, radian)
return points, bbox
def getcleanmap(self, index):
start = self.pos[index]
# size = self.size[index]
cstate = self.cstate[index]
res = np.array(self.map)
# shape = self.shape[index]
# opoints = np.array(shape)
# opoints = []
# for i in range(size[0]):
# for j in range(size[1]):
# opoints.append((i,j))
points, bbox = self.getitem(index, cstate)
# radian = 2 * pi * cstate / self.bin
# points, bbox = self.rotate(opoints, radian)
xx, yy = start
# xx = max(0, xx - 0.5*(bbox[1,0]-bbox[0,0]+1))
# yy = max(0, yy - 0.5*(bbox[1,1]-bbox[0,1]+1))
for p in points:
x, y = p
x += xx
y += yy
if x >= self.map_size[0] or y >= self.map_size[0]:
print('wrong cords',x,y)
tx = max(0,min(x,self.map_size[0])) + EPS
ty = max(0,min(y,self.map_size[1])) + EPS
res[min(int(tx),self.map_size[0]-1),min(int(ty),self.map_size[1]-1)] = 0
# x = min(x, self.map_size[0]-1)
# y = min(y, self.map_size[1]-1)
# res[int(x), int(y)] = 0
return res
def __deepcopy__(self,memodict={}):
res = ENV_ablation_wo_base(self.map_size,self.max_num)
res.map = np.array(self.map)
res.target_map = np.array(self.target_map)
res.pos = np.array(self.pos)
res.target = self.target
res.shape = self.shape
res.cstate = np.array(self.cstate)
res.tstate = self.tstate
res.wall = self.wall
res.state_dict = deepcopy(self.state_dict)
res.finished = np.array(self.finished)
res.shapex = self.shapex
res.shapey = self.shapey
res.pn = self.pn
res.edge = self.edge
res.bound = self.bound
return res
class ENV_ablation_wo_repetition: # if current state happened before, give a penalty.
def __init__(self, size=(5,5),max_num=5):
self.map_size = size
self.map = np.zeros(self.map_size)
self.target_map = np.zeros(self.map_size)
self.route = []
self.bin = 24
self.max_num = max_num
"""
params
pos:
a list of the left bottom point of the furniture
size:
a list of the size of the furniture
"""
def setmap(self, pos, target, shape, cstate, tstate, wall=[]):
self.map = np.zeros(self.map_size)
self.target_map = np.zeros(self.map_size)
self.pos = deepcopy(np.array(pos))
self.shape = deepcopy(shape)
self.target = np.array(target)
self.cstate = np.array(cstate).astype(np.int)
self.tstate = np.array(tstate).astype(np.int)
self.wall = deepcopy(wall)
self.dis = np.zeros(len(pos))
self.route = []
self.state_dict = {}
self.finished = np.zeros(len(pos))
self.shapex = []
self.shapey = []
# self.boundx = []
# self.boundy = []
self.pn = []
# self.bn = []
self.edge = []
self.bound = []
# print(shape)
# for bd in self.bound:
# for sh in self.shape:
# print(len(sh))
# for p in self.pos:
# print(p)
# print()
for p in wall:
x, y = p
x = int(x)
y = int(y)
self.map[x,y] = 1
self.target_map[x,y] = 1
# cut_list = []
for i, _ in enumerate(pos):
if len(shape[i]) == 0:
continue
x, y = _
s = self.cstate[i]
radian = 2 * pi * s / self.bin
points, bbox = self.rotate(self.shape[i], radian)
id_list = []
for i_,p in enumerate(points):
xx,yy = p
tx = max(0,min(x+xx,self.map_size[0])) + EPS
ty = max(0,min(y+yy,self.map_size[1])) + EPS
# cut_list.append((int(tx),int(ty)))
if self.map[int(tx),int(ty)] >= 1 and self.map[int(tx),int(ty)] != i+2:
# if i == 5:
# print('map',self.map[int(tx),int(ty)])
id_list.append(i_)
else:
self.map[int(tx),int(ty)] = i + 2
if self.map[int(tx),int(ty)] == 1:
self.map[int(tx),int(ty)] = 0
if self.target_map[int(tx),int(ty)] == 1:
self.target_map[int(tx),int(ty)] = 0
x,y = target[i]
s = self.tstate[i]
radian = 2 * pi * s / self.bin
points, bbox = self.rotate(self.shape[i], radian)
# for p in points:
# xx, yy = p
# xmax = max(xmax,xx)
# xmin = min(xmin,xx)
# ymax = max(ymax,yy)
# ymin = min(ymin,yy)
# x = max(0,x - 0.5*(xmax-xmin+1))
# y = max(0,y - 0.5*(ymax-ymin+1))
for i_,p in enumerate(points):
xx,yy = p
tx = max(0,min(x+xx,self.map_size[0])) + EPS
ty = max(0,min(y+yy,self.map_size[1])) + EPS
# cut_list.append((int(tx),int(ty)))
if self.target_map[int(tx),int(ty)] >= 1 and self.target_map[int(tx),int(ty)] != i + 2:
# if i == 5:
# print('tmap',self.map[int(tx),int(ty)])
if not i_ in id_list:
id_list.append(i_)
else:
self.target_map[int(tx),int(ty)] = i + 2
if self.map[int(tx),int(ty)] == 1:
self.map[int(tx),int(ty)] = 0
if self.target_map[int(tx),int(ty)] == 1:
self.target_map[int(tx),int(ty)] = 0
tmp = deepcopy(self.shape[i])
# print(self.shape[i])
for i_ in id_list:
# print(i_,tmp[i_])
self.shape[i].remove(tmp[i_])
# dx, dy = size[i]
# x, y = _
# x_, y_ = target[i]
# self.map[x:x+dx, y:y+dy] = i + 2
# self.target_map[x_:x_+dx, y_:y_+dy] = i + 2
# print('==========')
# for sh in self.shape:
# print(len(sh))
self.map = (self.map == 1).astype(np.int32)
self.target_map = (self.target_map == 1).astype(np.int32)
for i, _ in enumerate(pos):
if len(self.shape[i]) == 0:
continue
x, y = _
s = self.cstate[i]
radian = 2 * pi * s / self.bin
points, bbox = self.rotate(self.shape[i], radian)
id_list = []
for i_,p in enumerate(points):
xx,yy = p
tx = max(0,min(x+xx,self.map_size[0])) + EPS
ty = max(0,min(y+yy,self.map_size[1])) + EPS
self.map[int(tx),int(ty)] = i + 2
x,y = target[i]
s = self.tstate[i]
radian = 2 * pi * s / self.bin
points, bbox = self.rotate(self.shape[i], radian)
for i_,p in enumerate(points):
xx,yy = p
tx = max(0,min(x+xx,self.map_size[0])) + EPS
ty = max(0,min(y+yy,self.map_size[1])) + EPS
self.target_map[int(tx),int(ty)] = i + 2
for i, sh in enumerate(self.shape):
ed = []
# bd = []
map_ = np.zeros(self.map_size)
mark_ = np.zeros(self.map_size)
for p in sh:
map_[p[0],p[1]] = 1
# print(i,sh)
def dfs(p):
gx = [1,0,-1,0]
gy = [0,1,0,-1]
rx = [1,1,1,0,0,-1,-1,-1]
ry = [-1,0,1,-1,1,-1,0,1]
stack = []
stack.append((p,-1))
last = -2
while stack.__len__() != 0:
flag = True
p,direction = stack.pop()
if mark_[p[0],p[1]] == 1:
continue
mark_[p[0],p[1]] = 1
# print(p)
for i in range(8):
x = p[0] + rx[i]
y = p[1] + ry[i]
if x < 0 or x >= self.map_size[0] or y >= self.map_size[1] or y < 0 or map_[x,y] == 0:
# if last == direction:
# ed.pop()
ed.append(p)
# bd.append(p)
last = direction
flag = False
break
vs = []
for i in range(8):
vs.append([])
for i in range(4):
x = p[0] + gx[i]
y = p[1] + gy[i]
if x >= 0 and x < self.map_size[0] and y >= 0 and y < self.map_size[1] and map_[x,y] == 1 and mark_[x,y] == 0:
# print(x,y)
# dfs((x,y))
if flag:
# mark_[x,y] = 1
stack.append(((x,y),i))
break
else:
cnt = 0
for j in range(8):
xx = x + rx[j]
yy = y + ry[j]
if xx < 0 or xx >= self.map_size[0] or yy >= self.map_size[1] or yy < 0 or map_[xx,yy] == 0:
# mark_[x,y] = 1
cnt += 1
vs[cnt].append(((x,y),i))
for v in vs:
for p in v:
stack.append(p)
if len(sh) != 0:
dfs(sh[0])
self.edge.append(ed)
# self.bound.append(bd)
# for i in range(len(self.pos)):
# print(len(self.shape[i]),len(self.edge[i]))
for i, _ in enumerate(pos):
if self.equal(self.pos[i],self.target[i]) and self.cstate[i] == self.tstate[i]:
self.finished[i] = 1
for ed in self.edge:
self.pn.append(len(ed))
for p in ed:
self.shapex.append(p[0])
self.shapey.append(p[1])
hash_ = self.hash()
self.state_dict[hash_] = 1
# self.check()
def hash(self):
mod = 1000000007
num = len(self.pos) + 1
total = 0
for row in self.map:
for _ in row:
total *= num
total += int(_)
total %= mod
return total
def rotate(self, points, radian):
eps = 1e-6
res_list = []
points = np.array(points).astype(np.float32)
# print(points.shape)
points[:,0] -= points[:,0].min()
points[:,1] -= points[:,1].min()
mx = points[:,0].max()
my = points[:,1].max()
points[:,0] -= 0.5 * mx + 0.5
points[:,1] -= 0.5 * my + 0.5
points = points.transpose()
rot = np.array([[cos(radian),-sin(radian)],[sin(radian), cos(radian)]])
points = np.matmul(rot,points)
points[0,:] += eps
points[1,:] += eps
# points[0,:] += 0.5 * mx - 0.5 + eps
# points[1,:] += 0.5 * my - 0.5 + eps
# points = np.round(points).astype(np.int)
xmax = points[0].max()
xmin = points[0].min()
ymax = points[1].max()
ymin = points[1].min()
# if xmin < 0:
# points[0] += 1
# if ymin < 0:
# points[1] += 1
return points.transpose(), np.array([[xmin, ymin], [xmax, ymax]])
'''
check the state
0 represent not accessible
1 represent accessible
'''
def check(self, index):
flag = True
# tx,ty = self.target[index]
# sx,sy = self.pos[index]
# shape = self.shape[index]
# cstate = self.cstate[index]
# print(cstate)
# tstate = self.tstate[index]
route = []
# ps = np.array(shape)
# px = np.array(ps[:,0],dtype=np.int32)
# py = np.array(ps[:,1],dtype=np.int32)
# num = len(ps)
lx = np.zeros([60000],dtype=np.float32)
ly = np.zeros([60000],dtype=np.float32)
lr = np.zeros([60000],dtype=np.int32)
ld = np.zeros([60000],dtype=np.int32)
length = np.array([0],dtype=np.int32)
flag = np.array([1],dtype=np.int32)
n,m = self.map_size
tx_ar = np.zeros(len(self.target),dtype=np.float32)
ty_ar = np.zeros_like(tx_ar,dtype=np.float32)
sx_ar = np.zeros_like(tx_ar,dtype=np.float32)
sy_ar = np.zeros_like(tx_ar,dtype=np.float32)
for i,p in enumerate(self.target):
tx_ar[i] = p[0]
ty_ar[i] = p[1]
for i,p in enumerate(self.pos):
sx_ar[i] = p[0]
sy_ar[i] = p[1]
tmp_map = np.array(self.map,dtype=np.int32)
shape,bbox = self.getitem(index,self.cstate[index])
sx,sy = self.pos[index]
for p in shape:
x,y = p
x += sx + EPS
y += sy + EPS
tmp_map[min(int(x),self.map_size[0]-1),min(int(y),self.map_size[1]-1)] = 0
# print('in')
# print(index)
# tx = int(tx)
# ty = int(ty)
# sx = int(sx)
# sy = int(sy)
# search_transpose(np.array(self.map,dtype=np.int32),n,m,index,tx,ty,sx,sy, cstate,tstate,self.bin,px,py,num,lx,ly,lr,ld,length,flag)
search_transpose_poly(
tmp_map, np.array(self.shapex,dtype=np.float32),np.array(self.shapey,dtype=np.float32),np.array(self.pn,dtype=np.int32),n,m,
len(self.pos),index,
tx_ar,ty_ar,
sx_ar,sy_ar,
np.array(self.cstate, dtype=np.int32), np.array(self.tstate, dtype=np.int32), self.bin,
lx, ly, lr, ld, length,
flag
)
# print('out')
# search(np.array(self.map,dtype=np.int32),n,m,index,tx,ty,sx,sy,h,w,lx,ly,length,flag)
for i in range(length[0]):
route.append((lx[i],ly[i],lr[i],ld[i]))
route.reverse()
return flag[0], route
def check_s(self, index):
flag = True
# tx,ty = self.target[index]
# sx,sy = self.pos[index]
# shape = self.shape[index]
# cstate = self.cstate[index]
# print(cstate)
# tstate = self.tstate[index]
route = []
# ps = np.array(shape)
# px = np.array(ps[:,0],dtype=np.int32)
# py = np.array(ps[:,1],dtype=np.int32)
# num = len(ps)
lx = np.zeros([60000],dtype=np.float32)
ly = np.zeros([60000],dtype=np.float32)
lr = np.zeros([60000],dtype=np.int32)
ld = np.zeros([60000],dtype=np.int32)
length = np.array([0],dtype=np.int32)
flag = np.array([1],dtype=np.int32)
n,m = self.map_size
tx_ar = np.zeros(len(self.target),dtype=np.float32)
ty_ar = np.zeros_like(tx_ar,dtype=np.float32)
sx_ar = np.zeros_like(tx_ar,dtype=np.float32)
sy_ar = np.zeros_like(tx_ar,dtype=np.float32)
for i,p in enumerate(self.target):
tx_ar[i] = p[0]
ty_ar[i] = p[1]
for i,p in enumerate(self.pos):
sx_ar[i] = p[0]
sy_ar[i] = p[1]
tmp_map = np.array(self.map,dtype=np.int32)
shape,bbox = self.getitem(index,self.cstate[index])
sx,sy = self.pos[index]
for p in shape:
x,y = p
x += sx + EPS
y += sy + EPS
tmp_map[min(int(x),self.map_size[0]-1),min(int(y),self.map_size[1]-1)] = 0
# print('in')
# print(index)
# tx = int(tx)
# ty = int(ty)
# sx = int(sx)
# sy = int(sy)
# search_transpose(np.array(self.map,dtype=np.int32),n,m,index,tx,ty,sx,sy, cstate,tstate,self.bin,px,py,num,lx,ly,lr,ld,length,flag)
search_transpose_poly_rot(
tmp_map, np.array(self.shapex,dtype=np.float32),np.array(self.shapey,dtype=np.float32),np.array(self.pn,dtype=np.int32),n,m,
len(self.pos),index,
tx_ar,ty_ar,
sx_ar,sy_ar,
np.array(self.cstate, dtype=np.int32), np.array(self.tstate, dtype=np.int32), self.bin,
lx, ly, lr, ld, length,
flag
)
# print('out')
# search(np.array(self.map,dtype=np.int32),n,m,index,tx,ty,sx,sy,h,w,lx,ly,length,flag)
for i in range(length[0]):
route.append((lx[i],ly[i],lr[i],ld[i]))
route.reverse()
return flag[0], route
def equal(self, a, b):
return fabs(a[0]-b[0])+fabs(a[1]-b[1]) < 1e-3
def route_length(self, route):
res = 0
lx = None
ly = None
for _ in route:
x,y,_,_ = _
if lx is not None:
res += abs(lx-x) + abs(ly-y)
lx = x
ly = y
return res
'''
return reward, finish_flag 1 represent the finished state
'''
def move(self, index, direction):
base = -1
if index >= len(self.pos):
return -500, -1
if len(self.shape[index]) == 0:
return -500, -1
map_size = self.map_size
# size = self.size[index]
target = self.target[index]
pos = self.pos[index]
cstate = self.cstate[index]
tstate = self.tstate[index]
shape, bbox = self.getitem(index, cstate)
n,m = self.map_size
sx_ar = np.zeros(len(self.target),dtype=np.float32)
sy_ar = np.zeros_like(sx_ar,dtype=np.float32)
steps_ar = np.zeros(1,dtype=np.int32)
for i,p in enumerate(self.pos):
sx_ar[i] = p[0]
sy_ar[i] = p[1]
if direction < 4:
xx, yy = pos
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
if self.equal(pos, target) and cstate == tstate: # prevent it from being trapped in local minima that moving object around the destination
base += -4
translate(
np.array(self.map,dtype=np.int32),np.array(self.shapex,dtype=np.float32),np.array(self.shapey,dtype=np.float32),np.array(self.pn,dtype=np.int32),n,m,
len(self.pos),index,direction,
sx_ar,sy_ar,
np.array(self.cstate, dtype=np.int32), np.array(self.tstate, dtype=np.int32), self.bin,
steps_ar
)
steps = steps_ar[0]
x = xx + steps * _x[direction]
y = yy + steps * _y[direction]
self.last_steps = steps
if steps == 0:
return -500, -1
else:
base += -0.5 * log(steps) / log(10)
for p in shape:
self.map[min(int(xx+p[0]+EPS),self.map_size[0]-1),min(int(yy+p[1]+EPS),self.map_size[1]-1)] = 0
for p in shape:
self.map[min(int(x+p[0]+EPS),self.map_size[0]-1),min(int(y+p[1]+EPS),self.map_size[1]-1)] = index + 2
# self.map[xx:xx+size[0],yy:yy+size[1]] = 0
# self.map[x:x+size[0],y:y+size[1]] = index + 2
# x = x + 0.5*(bbox[1][0]-bbox[0][0]+1)
# y = y + 0.5*(bbox[1][1]-bbox[0][1]+1)
self.pos[index] = (x, y)
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
penlty = self.state_dict[hash_]
self.state_dict[hash_] += 1
penlty = 0
return base-2 * penlty, 0
self.state_dict[hash_] = 1
if fabs(x-target[0]) + fabs(y-target[1]) < 1e-6 and cstate == tstate:
if self.finished[index] == 1:
return base + 2, 0
else:
self.finished[index] = 1
return base + 4, 0
# return base + 500, 0
# return base + 200, 0
return base, 0
# for i in range(size[0]):
# for j in range(size[1]):
# self.map[target[0]+i, target[1]+j] = 1
# if it has already reached the target place.
if self.equal(pos, target) and cstate == tstate:
return -500, -1
# check if legal, calc the distance
flag, route = self.check(index)
if flag == 1:
xx, yy = pos
x,y = target
self.route = route
length = self.route_length(self.route)
if length != 0:
base += -0.5 * log(length) / log(10)
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
tshape, bbox = self.getitem(index, tstate)
# x = max(0,x - 0.5*(bbox[1][0]-bbox[0][0]+1))
# y = max(0,y - 0.5*(bbox[1][1]-bbox[0][1]+1))
for p in shape:
tx = max(0,min(xx+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(yy+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1),min(int(ty),self.map_size[1]-1)] = 0
# self.map[xx:xx+size[0], yy:yy+size[1]] = 0
for p in tshape:
tx = max(0,min(x+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(y+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1), min(int(ty),self.map_size[1]-1)] = index + 2
self.pos[index] = target
self.cstate[index] = tstate
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
penlty = self.state_dict[hash_]
self.state_dict[hash_] += 1
penlty = 0
return base -2 * penlty, 0
ff = True
for i,v in enumerate(self.pos): # check if the task is done
w = self.target[i]
if fabs(v[0]-w[0])+fabs(v[1]-w[1]) > 1e-6 or self.cstate[i] != self.tstate[i]:
ff = False
break
if ff:
self.finished[index] = 1
# return base + 10000, 1
return base + 50, 1
else:
if self.finished[index] == 1: # if the object was placed before
return base + 2, 0
else:
self.finished[index] = 1
return base + 4, 0
# return base + 500, 0
# return base + 200, 0
# elif flag == -1:
# return -1000, 1
else:
# return -500, -1
return base, -1
def move_s(self, index, direction):
base = -10
if index >= len(self.pos):
return -500, -1
if len(self.shape[index]) == 0:
return -500, -1
map_size = self.map_size
# size = self.size[index]
target = self.target[index]
pos = self.pos[index]
cstate = self.cstate[index]
tstate = self.tstate[index]
shape, bbox = self.getitem(index, cstate)
n,m = self.map_size
sx_ar = np.zeros(len(self.target),dtype=np.float32)
sy_ar = np.zeros_like(sx_ar,dtype=np.float32)
steps_ar = np.zeros(1,dtype=np.int32)
for i,p in enumerate(self.pos):
sx_ar[i] = p[0]
sy_ar[i] = p[1]
if self.equal(pos, target) and cstate == tstate: # prevent it from being trapped in local minima that moving object around the destination
# base += -60
base += -600
if direction < 4:
xx, yy = pos
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
translate(
np.array(self.map,dtype=np.int32),np.array(self.shapex,dtype=np.float32),np.array(self.shapey,dtype=np.float32),np.array(self.pn,dtype=np.int32),n,m,
len(self.pos),index,direction,
sx_ar,sy_ar,
np.array(self.cstate, dtype=np.int32), np.array(self.tstate, dtype=np.int32), self.bin,
steps_ar
)
steps = steps_ar[0]
x = xx + steps * _x[direction]
y = yy + steps * _y[direction]
if steps == 0:
return -500, -1
else:
for p in shape:
self.map[min(int(xx+p[0]+EPS),self.map_size[0]-1),min(int(yy+p[1]+EPS),self.map_size[1]-1)] = 0
for p in shape:
self.map[min(int(x+p[0]+EPS),self.map_size[0]-1),min(int(y+p[1]+EPS),self.map_size[1]-1)] = index + 2
# self.map[xx:xx+size[0],yy:yy+size[1]] = 0
# self.map[x:x+size[0],y:y+size[1]] = index + 2
# x = x + 0.5*(bbox[1][0]-bbox[0][0]+1)
# y = y + 0.5*(bbox[1][1]-bbox[0][1]+1)
self.pos[index] = (x, y)
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
return -600, 0
self.state_dict[hash_] = 1
if fabs(x-target[0]) + fabs(y-target[1]) < 1e-6 and cstate == tstate:
if self.finished[index] == 1:
# return base + 50, 0
return base + 500, 0
else:
self.finished[index] = 1
return base + 2000, 0
# return base + 500, 0
# return base + 200, 0
return base, 0
# for i in range(size[0]):
# for j in range(size[1]):
# self.map[target[0]+i, target[1]+j] = 1
# if it has already reached the target place.
if self.equal(pos, target) and cstate == tstate:
return -500, -1
# check if legal, calc the distance
flag, route = self.check_s(index)
if flag == 1:
xx, yy = pos
x,y = target
self.route = route
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
tshape, bbox = self.getitem(index, tstate)
# x = max(0,x - 0.5*(bbox[1][0]-bbox[0][0]+1))
# y = max(0,y - 0.5*(bbox[1][1]-bbox[0][1]+1))
for p in shape:
tx = max(0,min(xx+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(yy+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1),min(int(ty),self.map_size[1]-1)] = 0
# self.map[xx:xx+size[0], yy:yy+size[1]] = 0
for p in tshape:
tx = max(0,min(x+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(y+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1), min(int(ty),self.map_size[1]-1)] = index + 2
self.pos[index] = target
self.cstate[index] = tstate
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
# return -40, 0
return -600, 0
ff = True
for i,v in enumerate(self.pos): # check if the task is done
w = self.target[i]
if fabs(v[0]-w[0])+fabs(v[1]-w[1]) > 1e-6 or self.cstate[i] != self.tstate[i]:
ff = False
break
if ff:
self.finished[index] = 1
# return base + 10000, 1
return base + 100000, 1
else:
if self.finished[index] == 1: # if the object was placed before
# return base + 50, 0
return base + 500, 0
else:
self.finished[index] = 1
return base + 2000, 0
# return base + 500, 0
# return base + 200, 0
# elif flag == -1:
# return -1000, 1
else:
# return -500, -1
return base, -1
def move_p(self, index, direction):
base = -1
if index >= len(self.pos):
return -500, -1
if len(self.shape[index]) == 0:
return -500, -1
map_size = self.map_size
# size = self.size[index]
target = self.target[index]
pos = self.pos[index]
cstate = self.cstate[index]
tstate = self.tstate[index]
shape, bbox = self.getitem(index, cstate)
n,m = self.map_size
sx_ar = np.zeros(len(self.target),dtype=np.float32)
sy_ar = np.zeros_like(sx_ar,dtype=np.float32)
steps_ar = np.zeros(1,dtype=np.int32)
for i,p in enumerate(self.pos):
sx_ar[i] = p[0]
sy_ar[i] = p[1]
if direction < 4:
xx, yy = pos
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
if self.equal(pos, target) and cstate == tstate: # prevent it from being trapped in local minima that moving object around the destination
base += -4
translate(
np.array(self.map,dtype=np.int32),np.array(self.shapex,dtype=np.float32),np.array(self.shapey,dtype=np.float32),np.array(self.pn,dtype=np.int32),n,m,
len(self.pos),index,direction,
sx_ar,sy_ar,
np.array(self.cstate, dtype=np.int32), np.array(self.tstate, dtype=np.int32), self.bin,
steps_ar
)
steps = steps_ar[0]
x = xx + steps * _x[direction]
y = yy + steps * _y[direction]
if steps == 0:
return -500, -1
else:
for p in shape:
self.map[min(int(xx+p[0]+EPS),self.map_size[0]-1),min(int(yy+p[1]+EPS),self.map_size[1]-1)] = 0
for p in shape:
self.map[min(int(x+p[0]+EPS),self.map_size[0]-1),min(int(y+p[1]+EPS),self.map_size[1]-1)] = index + 2
# self.map[xx:xx+size[0],yy:yy+size[1]] = 0
# self.map[x:x+size[0],y:y+size[1]] = index + 2
# x = x + 0.5*(bbox[1][0]-bbox[0][0]+1)
# y = y + 0.5*(bbox[1][1]-bbox[0][1]+1)
self.pos[index] = (x, y)
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
penlty = self.state_dict[hash_]
self.state_dict[hash_] += 1
penlty = 1
return base-2 * penlty, 0
self.state_dict[hash_] = 1
if fabs(x-target[0]) + fabs(y-target[1]) < 1e-6 and cstate == tstate:
if self.finished[index] == 1:
return base + 2, 0
else:
self.finished[index] = 1
return base + 4, 0
# return base + 500, 0
# return base + 200, 0
return base, 0
# for i in range(size[0]):
# for j in range(size[1]):
# self.map[target[0]+i, target[1]+j] = 1
# if it has already reached the target place.
if self.equal(pos, target) and cstate == tstate:
return -500, -1
# check if legal, calc the distance
flag, route = self.check_s(index)
if flag == 1:
xx, yy = pos
x,y = target
self.route = route
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
tshape, bbox = self.getitem(index, tstate)
# x = max(0,x - 0.5*(bbox[1][0]-bbox[0][0]+1))
# y = max(0,y - 0.5*(bbox[1][1]-bbox[0][1]+1))
for p in shape:
tx = max(0,min(xx+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(yy+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1),min(int(ty),self.map_size[1]-1)] = 0
# self.map[xx:xx+size[0], yy:yy+size[1]] = 0
for p in tshape:
tx = max(0,min(x+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(y+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1), min(int(ty),self.map_size[1]-1)] = index + 2
self.pos[index] = target
self.cstate[index] = tstate
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
penlty = self.state_dict[hash_]
self.state_dict[hash_] += 1
penlty = 1
return base -2 * penlty, 0
ff = True
for i,v in enumerate(self.pos): # check if the task is done
w = self.target[i]
if fabs(v[0]-w[0])+fabs(v[1]-w[1]) > 1e-6 or self.cstate[i] != self.tstate[i]:
ff = False
break
if ff:
self.finished[index] = 1
# return base + 10000, 1
return base + 50, 1
else:
if self.finished[index] == 1: # if the object was placed before
return base + 2, 0
else:
self.finished[index] = 1
return base + 4, 0
# return base + 500, 0
# return base + 200, 0
# elif flag == -1:
# return -1000, 1
else:
# return -500, -1
return base, -1
def move_debug(self, index, direction):
base = -10
if index >= len(self.pos):
print('index error')
return -500, -1
map_size = self.map_size
# size = self.size[index]
target = self.target[index]
pos = self.pos[index]
cstate = self.cstate[index]
tstate = self.tstate[index]
shape,bbox = self.getitem(index, cstate)
if pos == target and cstate == tstate: # prevent it from being trapped in local minima that moving object around the destination
base += -60
if direction < 4:
xx, yy = pos
print('xx',xx,'yy',yy)
steps = 1
while True:
x = xx + steps*_x[direction]
y = yy + steps*_y[direction]
if x < 0 or x >= map_size[0] or y < 0 or y >= map_size[1] :
steps -= 1
print('early done')
break
# return -500, -1
flag = True
for p in shape:
if x+p[0] < 0 or x+p[0] >= map_size[0] or y+p[1] < 0 or y+p[1] >= map_size[1]:
flag = False
print('over bound',x+p[0],)
break
if self.map[x+p[0],y+p[1]] != 0:
print('collide!!',self.map[x+p[0],y+p[1]],index+2)
flag = False
break
if not flag:
steps -= 1
break
steps += 1
x = xx + steps*_x[direction]
y = yy + steps*_y[direction]
if steps == 0:
print('cannot move error')
return -500, -1
else:
for p in shape:
self.map[xx+p[0],yy+p[1]] = 0
for p in shape:
self.map[x+p[0],y+p[1]] = index + 2
# self.map[xx:xx+size[0],yy:yy+size[1]] = 0
# self.map[x:x+size[0],y:y+size[1]] = index + 2
self.pos[index] = (x, y)
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
return -600, 0
self.state_dict[hash_] = 1
if int(x) == int(target[0]) and int(y) == int(target[1]) and cstate == tstate:
if self.finished[index] == 1:
return base + 50, 0
else:
self.finished[index] = 1
return base + 500, 0
# return base + 200, 0
return base, 0
# for i in range(size[0]):
# for j in range(size[1]):
# self.map[target[0]+i, target[1]+j] = 1
# if it has already reached the target place.
if pos == target and cstate == tstate:
print('position error')
return -500, -1
# check if legal, calc the distance
flag, route = self.check(index)
if flag == 1:
xx, yy = pos
x,y = target
self.route = route
tshape,bbox = self.getitem(index,tstate)
for p in shape:
self.map[xx+p[0],yy+p[1]] = 0
# self.map[xx:xx+size[0], yy:yy+size[1]] = 0
for p in tshape:
self.map[x+p[0], y+p[1]] = index + 2
self.pos[index] = target
self.cstate[index] = tstate
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
return -40, 0
ff = True
for i,v in enumerate(self.pos): # check if the task is done
w = self.target[i]
if v != w or self.cstate[i] != self.tstate[i]:
ff = False
break
if ff:
self.finished[index] = 1
return base + 10000, 1
else:
if self.finished[index] == 1: # if the object was placed before
return base + 50, 0
else:
self.finished[index] = 1
return base + 500, 0
# return base + 200, 0
# elif flag == -1:
# return -1000, 1
else:
print('path find error')
return -500, -1
def getlastroute(self):
return self.route
def printmap(self):
with open('op.txt','a') as fp:
for _ in self.map:
for __ in _:
fp.write('%d '%__)
fp.write('\n')
fp.write('\n')
def build_wall(self):
size = self.map_size
p = 0.005
q = 0.05
dix = [1,0,-1,0]
diy = [0,1,0,-1]
while True:
px = np.random.randint(size[0])
py = np.random.randint(size[1])
if fabs(px-size[0]/2) + fabs(py-size[1]/2) > (size[0]+size[1])/4:
break
d = int(px < size[0]/2) + int(py < size[1]/2)*2
if d == 3:
d = 2
elif d == 2:
d = 3
l = 0
wall_list = []
while True:
cnt = 0
while True:
tx = px + dix[d]
ty = py + diy[d]
cnt += 1
if cnt > 10:
break
if tx == size[0] or tx == -1 or ty == size[1] or ty == -1:
d = np.random.randint(4)
continue
break
if tx >= 0 and tx < size[0] and ty >= 0 and ty < size[1]:
px = tx
py = ty
if not (px,py) in wall_list :
wall_list.append((px,py))
r = np.random.rand()
if r < q:
t = np.random.rand()
if t < 0.9:
d = (d+1)%4
else:
d = np.random.randint(4)
r = np.random.rand()
if r < p and l > 100:
break
l += 1
return wall_list
def randominit(self,num=5):
import random
size = self.map_size
# print(size)
pos_list = []
size_list = []
target_list = []
for i in range(num):
while True:
px = np.random.randint(size[0])
py = np.random.randint(size[1])
flag = True
for _ in pos_list:
dx,dy = _
if dx == px or dy == py:
flag = False
break
if flag:
pos_list.append((px,py))
break
while True:
px = np.random.randint(size[0])
py = np.random.randint(size[1])
flag = True
for _ in target_list:
dx,dy = _
if dx == px or dy == py:
flag = False
break
if flag:
target_list.append((px,py))
break
random.shuffle(pos_list)
random.shuffle(target_list)
for i in range(num):
px, py = pos_list[i]
tx, ty = target_list[i]
while True:
dx = np.random.randint(1,11)
dy = np.random.randint(1,11)
if px + dx > size[0] or py + dy > size[1] or tx + dx > size[0] or ty + dy > size[1]:
# print(px,py,dx,dy)
continue
flag = True
for j in range(num):
if j == i:
continue
px_, py_ = pos_list[j]
tx_, ty_ = target_list[j]
if j > len(size_list) - 1:
dx_, dy_ = 1, 1
else:
dx_, dy_ = size_list[j]
lx = max(px, px_)
ly = max(py, py_)
rx = min(px + dx, px_ + dx_) - 1
ry = min(py + dy, py_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
lx = max(tx, tx_)
ly = max(ty, ty_)
rx = min(tx + dx, tx_ + dx_) - 1
ry = min(ty + dy, ty_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
if flag:
size_list.append((dx,dy))
break
# list_ =[(pair[0],(dx,dy)),(pair[1],(dx_,dy_))
# random.shuffle(list_)
# for i in list_:
# pos_list.append(i[0])
# size_list.append(i[1])
self.setmap(pos_list,target_list,size_list)
# for i in range(size[0]):
# for j in range(size[1]):
# for i in range(size[0]):
# for j in range(size[1]):
def randominit_crowded(self,num=5):
import random
size = self.map_size
# print(size)
while True:
reboot = False
pos_list = []
size_list = []
target_list = []
# self.finished = np.zeros(num)
tmp = self.build_wall()
# tmp += self.build_wall()
# tmp += self.build_wall()
sorted(tmp)
wall_list = []
l = None
map_ = np.zeros(self.map_size)
for t in tmp:
if t != l:
l = t
map_[l[0],l[1]] = 1
wall_list.append(t)
def dfs(p,tp):
gx = [1,0,-1,0]
gy = [0,1,0,-1]
stack = []
stack.append(p)
mark_ = np.zeros(self.map_size)
while stack.__len__() != 0:
flag = True
p = stack.pop()
if mark_[p[0],p[1]] == 1:
continue
mark_[p[0],p[1]] = 1
if p[0] == tp[0] and p[1] == tp[1]:
break
for i in range(4):
x = p[0] + gx[i]
y = p[1] + gy[i]
if x >= 0 and x < self.map_size[0] and y >= 0 and y < self.map_size[1] and map_[x,y] != 1 and mark_[x,y] == 0:
stack.append((x,y))
return mark_[tp[0],tp[1]] == 1
for i in range(num):
while True:
px = np.random.randint(1,size[0]-3)
py = np.random.randint(1,size[1]-3)
flag = True
hh = [-1,0,1]
for _ in pos_list:
dx, dy = _
if dx == px or dy == py or abs(dx - px) + abs(dy - py) < 5:
flag = False
for k in hh:
for l in hh:
xx = px + k
yy = py + l
if dx == xx and dy == yy:
flag = False
break
if flag == False:
break
if flag == False:
break
for _ in wall_list:
if flag == False:
break
dx,dy = _
if dx == px and dy == py:
flag = False
break
for k in hh:
for l in hh:
xx = px + k
yy = py + l
if dx == xx and dy == yy:
flag = False
break
if flag == False:
break
if flag == False:
break
if flag:
pos_list.append((px,py))
break
reboot_cnt = 0
while reboot_cnt < 10000:
reboot_cnt += 1
px = np.random.randint(1,size[0]-3)
py = np.random.randint(1,size[1]-3)
# flag = True
flag = dfs((px,py),pos_list[-1])
# print(flag)
for _ in target_list:
dx, dy = _
if dx == px or dy == py or abs(dx-px)+abs(dy-py) < 5:
flag = False
break
for k in hh:
for l in hh:
xx = px + k
yy = py + l
if dx == xx and dy == yy:
flag = False
break
if flag == False:
break
if flag == False:
break
for _ in wall_list:
if flag == False:
break
dx,dy = _
if dx == px and dy == py:
flag = False
break
for k in hh:
for l in hh:
xx = px + k
yy = py + l
if dx == xx and dy == yy:
flag = False
break
if flag == False:
break
if flag == False:
break
if flag:
target_list.append((px,py))
break
if reboot_cnt >= 10000:
reboot = True
break
if reboot:
continue
# random.shuffle(pos_list)
# random.shuffle(target_list)
sub_num = self.max_num - num
# print(self.max_num, num)
pos_s = [ (_, i) for i, _ in enumerate(pos_list)]
for _ in range(sub_num):
pos_s.append(((0,0),-1))
# print(len(pos_list))
random.shuffle(pos_list)
pos_list = np.array([_[0] for _ in pos_s])
tmp_target_list = np.zeros_like(pos_list)
# random.shuffle(target_list)
_ = 0
# for target in target_list:
# while pos_list[_][0] == pos_list[_][1] and pos_list[_][0] == 0:
# _ += 1
# tmp_target_list[_] = target
# _ += 1
for i,_ in enumerate(pos_s):
p, id_ = _
if p[0] == p[1] and p[0] == 0:
continue
tmp_target_list[i] = target_list[id_]
target_list = tmp_target_list
def dfs(p,tp,size):
gx = [1,0,-1,0]
gy = [0,1,0,-1]
dx, dy = size
stack = []
stack.append(p)
mark_ = np.zeros(self.map_size)
while stack.__len__() != 0:
flag = True
p = stack.pop()
if mark_[p[0],p[1]] == 1:
continue
mark_[p[0],p[1]] = 1
if p[0] == tp[0] and p[1] == tp[1]:
break
for i in range(4):
x = p[0] + gx[i]
y = p[1] + gy[i]
if x >= 0 and x + dx - 1 < self.map_size[0] and y >= 0 and y + dy - 1 < self.map_size[1] and map_[x,y] != 1 and mark_[x,y] == 0:
flag = True
for j in range(dx):
for k in range(dy):
if map_[x+j,y+k] == 1:
flag = False
break
if flag == False:
break
if flag:
stack.append((x,y))
return mark_[tp[0],tp[1]] == 1
for i in range(self.max_num):
px, py = pos_list[i]
tx, ty = target_list[i]
if px == py and py == 0:
size_list.append((0,0))
continue
reboot_cnt = 0
while reboot_cnt < 10000:
reboot_cnt += 1
dx = np.random.randint(2,31)
dy = np.random.randint(2,31)
if px + dx > size[0] or py + dy > size[1] or tx + dx > size[0] or ty + dy > size[1]:
# print(px,py,dx,dy)
continue
flag = True
for j in range(self.max_num):
if j == i:
continue
px_, py_ = pos_list[j]
tx_, ty_ = target_list[j]
if px_ == py_ and py_ == 0:
# size_list.append((0,0))
continue
if j > len(size_list) - 1:
dx_, dy_ = 2, 2
else:
dx_, dy_ = size_list[j]
lx = max(px, px_)
ly = max(py, py_)
rx = min(px + dx, px_ + dx_) - 1
ry = min(py + dy, py_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
lx = max(tx, tx_)
ly = max(ty, ty_)
rx = min(tx + dx, tx_ + dx_) - 1
ry = min(ty + dy, ty_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
# if flag:
# flag = dfs(pos_list[i],target_list[i],(dx,dy))
for wall in wall_list:
if not flag:
break
px_, py_ = wall
tx_, ty_ = wall
dx_, dy_ = 1, 1
lx = max(px, px_)
ly = max(py, py_)
rx = min(px + dx, px_ + dx_) - 1
ry = min(py + dy, py_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
lx = max(tx, tx_)
ly = max(ty, ty_)
rx = min(tx + dx, tx_ + dx_) - 1
ry = min(ty + dy, ty_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
if flag:
size_list.append((dx,dy))
break
if reboot_cnt >= 10000:
reboot = True
break
if reboot:
continue
shapes = []
for i in range(self.max_num):
x,y = pos_list[i]
w,d = size_list[i]
pos_list[i] = (x+0.5*w,y+0.5*d)
x,y = target_list[i]
target_list[i] = (x+0.5*w,y+0.5*d)
shape = []
for a in range(w):
for b in range(d):
shape.append((a,b))
shapes.append(shape)
self.setmap(pos_list,target_list,shapes,np.zeros(len(pos_list)),np.zeros(len(pos_list)),wall_list)
break
def getstate_1(self,shape=[64,64]):
state = []
# temp = np.zeros(shape).astype(np.float32)
# tmap = self.map == 1
# oshape = self.map.shape
# for i in range(shape[0]):
# x = int(1.0 * i * oshape[0] / shape[0])
# for j in range(shape[1]):
# y = int(1.0 * j * oshape[1] / shape[1])
# temp[i,j] = tmap[x,y]
# for i in range(oshape[0]):
# x = 1.0*i/oshape[0]*shape[0]
# for j in range(oshape[1]):
# y = 1.0*j/oshape[1]*shape[1]
# temp[x,y] = tmap[i,j]
state.append((self.map == 1).astype(np.float32))
# state.append(temp)
for i in range(len(self.pos)):
# temp = np.zeros(shape).astype(np.float32)
# tmap = self.map == (i+2)
# for i in range(shape[0]):
# x = int(1.0 * i * oshape[0] / shape[0])
# for j in range(shape[1]):
# y = int(1.0 * j * oshape[1] / shape[1])
# temp[i,j] = tmap[x,y]
# state.append(temp)
# temp = np.zeros(shape).astype(np.float32)
# tmap = self.target_map == (i+2)
# for i in range(shape[0]):
# x = int(1.0 * i * oshape[0] / shape[0])
# for j in range(shape[1]):
# y = int(1.0 * j * oshape[1] / shape[1])
# temp[i,j] = tmap[x,y]
# state.append(temp)
state.append((self.map == (i+2)).astype(np.float32))
state.append((self.target_map == (i+2)).astype(np.float32))
return np.transpose(np.array(state),[1,2,0])
def getstate_2(self, index):
state = []
obs = (self.map == 1).astype(np.float32)
cho = (self.map == (index+2)).astype(np.float32)
cho_t = (self.target_map == (index+2)).astype(np.float32)
oth = np.zeros_like(obs).astype(np.bool)
oth_t = np.zeros_like(obs).astype(np.bool)
for i in range(len(self.pos)):
if i != index + 2:
oth |= self.map == (i+2)
oth_t |= self.target_map == (i+2)
# state.append((self.map == (i+1)).astype(np.float32))
state = [obs,cho,cho_t,oth,oth_t]
return np.transpose(np.array(state),[1,2,0])
def getstate_3(self,shape=[64,64]):
state = []
state.append(np.array(self.map).astype(np.float32))
state.append(np.array(self.target_map).astype(np.float32))
return np.transpose(np.array(state),[1,2,0])
def getmap(self):
return np.array(self.map)
def gettargetmap(self):
return self.target_map
def getconfig(self):
return (self.pos,self.target,self.shape,self.cstate,self.tstate,self.wall)
def getfinished(self):
return deepcopy(self.finished)
def getconflict(self):
num = len(self.finished)
res = np.zeros([num,num,2])
# size = np.zeros([num,2])
# for i,shape in enumerate(self.shape):
# size[i]
return res
for axis in range(2):
for i in range(num):
l = np.min([self.pos[i][axis],self.target[i][axis]])
r = np.max([self.pos[i][axis],self.target[i][axis]]) + self.size[i][axis] - 1
for j in range(num):
if (self.pos[j][axis] >= l and self.pos[j][axis] <= r) or (self.pos[j][axis] + self.size[j][axis] >= l and self.pos[j][axis] + self.size[j][axis] <= r):
res[i,j,axis] = 1
return res
def getitem(self, index, state):
shape = self.shape[index]
opoints = np.array(shape)
# for i in range(size[0]):
# for j in range(size[1]):
# opoints.append((i,j))
radian = 2 * pi * state / self.bin
points, bbox = self.rotate(opoints, radian)
return points, bbox
def getcleanmap(self, index):
start = self.pos[index]
# size = self.size[index]
cstate = self.cstate[index]
res = np.array(self.map)
# shape = self.shape[index]
# opoints = np.array(shape)
# opoints = []
# for i in range(size[0]):
# for j in range(size[1]):
# opoints.append((i,j))
points, bbox = self.getitem(index, cstate)
# radian = 2 * pi * cstate / self.bin
# points, bbox = self.rotate(opoints, radian)
xx, yy = start
# xx = max(0, xx - 0.5*(bbox[1,0]-bbox[0,0]+1))
# yy = max(0, yy - 0.5*(bbox[1,1]-bbox[0,1]+1))
for p in points:
x, y = p
x += xx
y += yy
if x >= self.map_size[0] or y >= self.map_size[0]:
print('wrong cords',x,y)
tx = max(0,min(x,self.map_size[0])) + EPS
ty = max(0,min(y,self.map_size[1])) + EPS
res[min(int(tx),self.map_size[0]-1),min(int(ty),self.map_size[1]-1)] = 0
# x = min(x, self.map_size[0]-1)
# y = min(y, self.map_size[1]-1)
# res[int(x), int(y)] = 0
return res
def __deepcopy__(self,memodict={}):
res = ENV_ablation_wo_repetition(self.map_size,self.max_num)
res.map = np.array(self.map)
res.target_map = np.array(self.target_map)
res.pos = np.array(self.pos)
res.target = self.target
res.shape = self.shape
res.cstate = np.array(self.cstate)
res.tstate = self.tstate
res.wall = self.wall
res.state_dict = deepcopy(self.state_dict)
res.finished = np.array(self.finished)
res.shapex = self.shapex
res.shapey = self.shapey
res.pn = self.pn
res.edge = self.edge
res.bound = self.bound
return res
class ENV_ablation_wo_multi: # if current state happened before, give a penalty.
def __init__(self, size=(5,5),max_num=5):
self.map_size = size
self.map = np.zeros(self.map_size)
self.target_map = np.zeros(self.map_size)
self.route = []
self.bin = 24
self.max_num = max_num
"""
params
pos:
a list of the left bottom point of the furniture
size:
a list of the size of the furniture
"""
def setmap(self, pos, target, shape, cstate, tstate, wall=[]):
self.map = np.zeros(self.map_size)
self.target_map = np.zeros(self.map_size)
self.pos = deepcopy(np.array(pos))
self.shape = deepcopy(shape)
self.target = np.array(target)
self.cstate = np.array(cstate).astype(np.int)
self.tstate = np.array(tstate).astype(np.int)
self.wall = deepcopy(wall)
self.dis = np.zeros(len(pos))
self.route = []
self.state_dict = {}
self.finished = np.zeros(len(pos))
self.shapex = []
self.shapey = []
# self.boundx = []
# self.boundy = []
self.pn = []
# self.bn = []
self.edge = []
self.bound = []
# print(shape)
# for bd in self.bound:
# for sh in self.shape:
# print(len(sh))
# for p in self.pos:
# print(p)
# print()
for p in wall:
x, y = p
x = int(x)
y = int(y)
self.map[x,y] = 1
self.target_map[x,y] = 1
# cut_list = []
for i, _ in enumerate(pos):
if len(shape[i]) == 0:
continue
x, y = _
s = self.cstate[i]
radian = 2 * pi * s / self.bin
points, bbox = self.rotate(self.shape[i], radian)
id_list = []
for i_,p in enumerate(points):
xx,yy = p
tx = max(0,min(x+xx,self.map_size[0])) + EPS
ty = max(0,min(y+yy,self.map_size[1])) + EPS
# cut_list.append((int(tx),int(ty)))
if self.map[int(tx),int(ty)] >= 1 and self.map[int(tx),int(ty)] != i+2:
# if i == 5:
# print('map',self.map[int(tx),int(ty)])
id_list.append(i_)
else:
self.map[int(tx),int(ty)] = i + 2
if self.map[int(tx),int(ty)] == 1:
self.map[int(tx),int(ty)] = 0
if self.target_map[int(tx),int(ty)] == 1:
self.target_map[int(tx),int(ty)] = 0
x,y = target[i]
s = self.tstate[i]
radian = 2 * pi * s / self.bin
points, bbox = self.rotate(self.shape[i], radian)
# for p in points:
# xx, yy = p
# xmax = max(xmax,xx)
# xmin = min(xmin,xx)
# ymax = max(ymax,yy)
# ymin = min(ymin,yy)
# x = max(0,x - 0.5*(xmax-xmin+1))
# y = max(0,y - 0.5*(ymax-ymin+1))
for i_,p in enumerate(points):
xx,yy = p
tx = max(0,min(x+xx,self.map_size[0])) + EPS
ty = max(0,min(y+yy,self.map_size[1])) + EPS
# cut_list.append((int(tx),int(ty)))
if self.target_map[int(tx),int(ty)] >= 1 and self.target_map[int(tx),int(ty)] != i + 2:
# if i == 5:
# print('tmap',self.map[int(tx),int(ty)])
if not i_ in id_list:
id_list.append(i_)
else:
self.target_map[int(tx),int(ty)] = i + 2
if self.map[int(tx),int(ty)] == 1:
self.map[int(tx),int(ty)] = 0
if self.target_map[int(tx),int(ty)] == 1:
self.target_map[int(tx),int(ty)] = 0
tmp = deepcopy(self.shape[i])
# print(self.shape[i])
for i_ in id_list:
# print(i_,tmp[i_])
self.shape[i].remove(tmp[i_])
# dx, dy = size[i]
# x, y = _
# x_, y_ = target[i]
# self.map[x:x+dx, y:y+dy] = i + 2
# self.target_map[x_:x_+dx, y_:y_+dy] = i + 2
# print('==========')
# for sh in self.shape:
# print(len(sh))
self.map = (self.map == 1).astype(np.int32)
self.target_map = (self.target_map == 1).astype(np.int32)
for i, _ in enumerate(pos):
if len(self.shape[i]) == 0:
continue
x, y = _
s = self.cstate[i]
radian = 2 * pi * s / self.bin
points, bbox = self.rotate(self.shape[i], radian)
id_list = []
for i_,p in enumerate(points):
xx,yy = p
tx = max(0,min(x+xx,self.map_size[0])) + EPS
ty = max(0,min(y+yy,self.map_size[1])) + EPS
self.map[int(tx),int(ty)] = i + 2
x,y = target[i]
s = self.tstate[i]
radian = 2 * pi * s / self.bin
points, bbox = self.rotate(self.shape[i], radian)
for i_,p in enumerate(points):
xx,yy = p
tx = max(0,min(x+xx,self.map_size[0])) + EPS
ty = max(0,min(y+yy,self.map_size[1])) + EPS
self.target_map[int(tx),int(ty)] = i + 2
for i, sh in enumerate(self.shape):
ed = []
# bd = []
map_ = np.zeros(self.map_size)
mark_ = np.zeros(self.map_size)
for p in sh:
map_[p[0],p[1]] = 1
# print(i,sh)
def dfs(p):
gx = [1,0,-1,0]
gy = [0,1,0,-1]
rx = [1,1,1,0,0,-1,-1,-1]
ry = [-1,0,1,-1,1,-1,0,1]
stack = []
stack.append((p,-1))
last = -2
while stack.__len__() != 0:
flag = True
p,direction = stack.pop()
if mark_[p[0],p[1]] == 1:
continue
mark_[p[0],p[1]] = 1
# print(p)
for i in range(8):
x = p[0] + rx[i]
y = p[1] + ry[i]
if x < 0 or x >= self.map_size[0] or y >= self.map_size[1] or y < 0 or map_[x,y] == 0:
# if last == direction:
# ed.pop()
ed.append(p)
# bd.append(p)
last = direction
flag = False
break
vs = []
for i in range(8):
vs.append([])
for i in range(4):
x = p[0] + gx[i]
y = p[1] + gy[i]
if x >= 0 and x < self.map_size[0] and y >= 0 and y < self.map_size[1] and map_[x,y] == 1 and mark_[x,y] == 0:
# print(x,y)
# dfs((x,y))
if flag:
# mark_[x,y] = 1
stack.append(((x,y),i))
break
else:
cnt = 0
for j in range(8):
xx = x + rx[j]
yy = y + ry[j]
if xx < 0 or xx >= self.map_size[0] or yy >= self.map_size[1] or yy < 0 or map_[xx,yy] == 0:
# mark_[x,y] = 1
cnt += 1
vs[cnt].append(((x,y),i))
for v in vs:
for p in v:
stack.append(p)
if len(sh) != 0:
dfs(sh[0])
self.edge.append(ed)
# self.bound.append(bd)
# for i in range(len(self.pos)):
# print(len(self.shape[i]),len(self.edge[i]))
for i, _ in enumerate(pos):
if self.equal(self.pos[i],self.target[i]) and self.cstate[i] == self.tstate[i]:
self.finished[i] = 1
for ed in self.edge:
self.pn.append(len(ed))
for p in ed:
self.shapex.append(p[0])
self.shapey.append(p[1])
hash_ = self.hash()
self.state_dict[hash_] = 1
# self.check()
def hash(self):
mod = 1000000007
num = len(self.pos) + 1
total = 0
for row in self.map:
for _ in row:
total *= num
total += int(_)
total %= mod
return total
def rotate(self, points, radian):
eps = 1e-6
res_list = []
points = np.array(points).astype(np.float32)
# print(points.shape)
points[:,0] -= points[:,0].min()
points[:,1] -= points[:,1].min()
mx = points[:,0].max()
my = points[:,1].max()
points[:,0] -= 0.5 * mx + 0.5
points[:,1] -= 0.5 * my + 0.5
points = points.transpose()
rot = np.array([[cos(radian),-sin(radian)],[sin(radian), cos(radian)]])
points = np.matmul(rot,points)
points[0,:] += eps
points[1,:] += eps
# points[0,:] += 0.5 * mx - 0.5 + eps
# points[1,:] += 0.5 * my - 0.5 + eps
# points = np.round(points).astype(np.int)
xmax = points[0].max()
xmin = points[0].min()
ymax = points[1].max()
ymin = points[1].min()
# if xmin < 0:
# points[0] += 1
# if ymin < 0:
# points[1] += 1
return points.transpose(), np.array([[xmin, ymin], [xmax, ymax]])
'''
check the state
0 represent not accessible
1 represent accessible
'''
def check(self, index):
flag = True
# tx,ty = self.target[index]
# sx,sy = self.pos[index]
# shape = self.shape[index]
# cstate = self.cstate[index]
# print(cstate)
# tstate = self.tstate[index]
route = []
# ps = np.array(shape)
# px = np.array(ps[:,0],dtype=np.int32)
# py = np.array(ps[:,1],dtype=np.int32)
# num = len(ps)
lx = np.zeros([60000],dtype=np.float32)
ly = np.zeros([60000],dtype=np.float32)
lr = np.zeros([60000],dtype=np.int32)
ld = np.zeros([60000],dtype=np.int32)
length = np.array([0],dtype=np.int32)
flag = np.array([1],dtype=np.int32)
n,m = self.map_size
tx_ar = np.zeros(len(self.target),dtype=np.float32)
ty_ar = np.zeros_like(tx_ar,dtype=np.float32)
sx_ar = np.zeros_like(tx_ar,dtype=np.float32)
sy_ar = np.zeros_like(tx_ar,dtype=np.float32)
for i,p in enumerate(self.target):
tx_ar[i] = p[0]
ty_ar[i] = p[1]
for i,p in enumerate(self.pos):
sx_ar[i] = p[0]
sy_ar[i] = p[1]
tmp_map = np.array(self.map,dtype=np.int32)
shape,bbox = self.getitem(index,self.cstate[index])
sx,sy = self.pos[index]
for p in shape:
x,y = p
x += sx + EPS
y += sy + EPS
tmp_map[min(int(x),self.map_size[0]-1),min(int(y),self.map_size[1]-1)] = 0
# print('in')
# print(index)
# tx = int(tx)
# ty = int(ty)
# sx = int(sx)
# sy = int(sy)
# search_transpose(np.array(self.map,dtype=np.int32),n,m,index,tx,ty,sx,sy, cstate,tstate,self.bin,px,py,num,lx,ly,lr,ld,length,flag)
search_transpose_poly(
tmp_map, np.array(self.shapex,dtype=np.float32),np.array(self.shapey,dtype=np.float32),np.array(self.pn,dtype=np.int32),n,m,
len(self.pos),index,
tx_ar,ty_ar,
sx_ar,sy_ar,
np.array(self.cstate, dtype=np.int32), np.array(self.tstate, dtype=np.int32), self.bin,
lx, ly, lr, ld, length,
flag
)
# print('out')
# search(np.array(self.map,dtype=np.int32),n,m,index,tx,ty,sx,sy,h,w,lx,ly,length,flag)
for i in range(length[0]):
route.append((lx[i],ly[i],lr[i],ld[i]))
route.reverse()
return flag[0], route
def check_s(self, index):
flag = True
# tx,ty = self.target[index]
# sx,sy = self.pos[index]
# shape = self.shape[index]
# cstate = self.cstate[index]
# print(cstate)
# tstate = self.tstate[index]
route = []
# ps = np.array(shape)
# px = np.array(ps[:,0],dtype=np.int32)
# py = np.array(ps[:,1],dtype=np.int32)
# num = len(ps)
lx = np.zeros([60000],dtype=np.float32)
ly = np.zeros([60000],dtype=np.float32)
lr = np.zeros([60000],dtype=np.int32)
ld = np.zeros([60000],dtype=np.int32)
length = np.array([0],dtype=np.int32)
flag = np.array([1],dtype=np.int32)
n,m = self.map_size
tx_ar = np.zeros(len(self.target),dtype=np.float32)
ty_ar = np.zeros_like(tx_ar,dtype=np.float32)
sx_ar = np.zeros_like(tx_ar,dtype=np.float32)
sy_ar = np.zeros_like(tx_ar,dtype=np.float32)
for i,p in enumerate(self.target):
tx_ar[i] = p[0]
ty_ar[i] = p[1]
for i,p in enumerate(self.pos):
sx_ar[i] = p[0]
sy_ar[i] = p[1]
tmp_map = np.array(self.map,dtype=np.int32)
shape,bbox = self.getitem(index,self.cstate[index])
sx,sy = self.pos[index]
for p in shape:
x,y = p
x += sx + EPS
y += sy + EPS
tmp_map[min(int(x),self.map_size[0]-1),min(int(y),self.map_size[1]-1)] = 0
# print('in')
# print(index)
# tx = int(tx)
# ty = int(ty)
# sx = int(sx)
# sy = int(sy)
# search_transpose(np.array(self.map,dtype=np.int32),n,m,index,tx,ty,sx,sy, cstate,tstate,self.bin,px,py,num,lx,ly,lr,ld,length,flag)
search_transpose_poly_rot(
tmp_map, np.array(self.shapex,dtype=np.float32),np.array(self.shapey,dtype=np.float32),np.array(self.pn,dtype=np.int32),n,m,
len(self.pos),index,
tx_ar,ty_ar,
sx_ar,sy_ar,
np.array(self.cstate, dtype=np.int32), np.array(self.tstate, dtype=np.int32), self.bin,
lx, ly, lr, ld, length,
flag
)
# print('out')
# search(np.array(self.map,dtype=np.int32),n,m,index,tx,ty,sx,sy,h,w,lx,ly,length,flag)
for i in range(length[0]):
route.append((lx[i],ly[i],lr[i],ld[i]))
route.reverse()
return flag[0], route
def equal(self, a, b):
return fabs(a[0]-b[0])+fabs(a[1]-b[1]) < 1e-3
def route_length(self, route):
res = 0
lx = None
ly = None
for _ in route:
x,y,_,_ = _
if lx is not None:
res += abs(lx-x) + abs(ly-y)
lx = x
ly = y
return res
'''
return reward, finish_flag 1 represent the finished state
'''
def move(self, index, direction):
base = -1
if index >= len(self.pos):
return -500, -1
if len(self.shape[index]) == 0:
return -500, -1
map_size = self.map_size
# size = self.size[index]
target = self.target[index]
pos = self.pos[index]
cstate = self.cstate[index]
tstate = self.tstate[index]
shape, bbox = self.getitem(index, cstate)
n,m = self.map_size
sx_ar = np.zeros(len(self.target),dtype=np.float32)
sy_ar = np.zeros_like(sx_ar,dtype=np.float32)
steps_ar = np.zeros(1,dtype=np.int32)
for i,p in enumerate(self.pos):
sx_ar[i] = p[0]
sy_ar[i] = p[1]
if direction < 4:
xx, yy = pos
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
if self.equal(pos, target) and cstate == tstate: # prevent it from being trapped in local minima that moving object around the destination
base += -4
translate(
np.array(self.map,dtype=np.int32),np.array(self.shapex,dtype=np.float32),np.array(self.shapey,dtype=np.float32),np.array(self.pn,dtype=np.int32),n,m,
len(self.pos),index,direction,
sx_ar,sy_ar,
np.array(self.cstate, dtype=np.int32), np.array(self.tstate, dtype=np.int32), self.bin,
steps_ar
)
steps = steps_ar[0]
x = xx + steps * _x[direction]
y = yy + steps * _y[direction]
self.last_steps = steps
if steps == 0:
return -500, -1
else:
base += -0.5 * log(steps) / log(10)
for p in shape:
self.map[min(int(xx+p[0]+EPS),self.map_size[0]-1),min(int(yy+p[1]+EPS),self.map_size[1]-1)] = 0
for p in shape:
self.map[min(int(x+p[0]+EPS),self.map_size[0]-1),min(int(y+p[1]+EPS),self.map_size[1]-1)] = index + 2
# self.map[xx:xx+size[0],yy:yy+size[1]] = 0
# self.map[x:x+size[0],y:y+size[1]] = index + 2
# x = x + 0.5*(bbox[1][0]-bbox[0][0]+1)
# y = y + 0.5*(bbox[1][1]-bbox[0][1]+1)
self.pos[index] = (x, y)
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
penlty = self.state_dict[hash_]
self.state_dict[hash_] += 1
penlty = 1
return base-2 * penlty, 0
self.state_dict[hash_] = 1
if fabs(x-target[0]) + fabs(y-target[1]) < 1e-6 and cstate == tstate:
# if self.finished[index] == 1:
# return base + 2, 0
# else:
self.finished[index] = 1
return base + 4, 0
# return base + 500, 0
# return base + 200, 0
return base, 0
# for i in range(size[0]):
# for j in range(size[1]):
# self.map[target[0]+i, target[1]+j] = 1
# if it has already reached the target place.
if self.equal(pos, target) and cstate == tstate:
return -500, -1
# check if legal, calc the distance
flag, route = self.check(index)
if flag == 1:
xx, yy = pos
x,y = target
self.route = route
length = self.route_length(self.route)
if length != 0:
base += -0.5 * log(length) / log(10)
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
tshape, bbox = self.getitem(index, tstate)
# x = max(0,x - 0.5*(bbox[1][0]-bbox[0][0]+1))
# y = max(0,y - 0.5*(bbox[1][1]-bbox[0][1]+1))
for p in shape:
tx = max(0,min(xx+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(yy+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1),min(int(ty),self.map_size[1]-1)] = 0
# self.map[xx:xx+size[0], yy:yy+size[1]] = 0
for p in tshape:
tx = max(0,min(x+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(y+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1), min(int(ty),self.map_size[1]-1)] = index + 2
self.pos[index] = target
self.cstate[index] = tstate
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
penlty = self.state_dict[hash_]
self.state_dict[hash_] += 1
penlty = 1
return base -2 * penlty, 0
ff = True
for i,v in enumerate(self.pos): # check if the task is done
w = self.target[i]
if fabs(v[0]-w[0])+fabs(v[1]-w[1]) > 1e-6 or self.cstate[i] != self.tstate[i]:
ff = False
break
if ff:
self.finished[index] = 1
# return base + 10000, 1
return base + 50, 1
else:
# if self.finished[index] == 1: # if the object was placed before
# return base + 2, 0
# else:
self.finished[index] = 1
return base + 4, 0
# return base + 500, 0
# return base + 200, 0
# elif flag == -1:
# return -1000, 1
else:
# return -500, -1
return base, -1
def move_s(self, index, direction):
base = -10
if index >= len(self.pos):
return -500, -1
if len(self.shape[index]) == 0:
return -500, -1
map_size = self.map_size
# size = self.size[index]
target = self.target[index]
pos = self.pos[index]
cstate = self.cstate[index]
tstate = self.tstate[index]
shape, bbox = self.getitem(index, cstate)
n,m = self.map_size
sx_ar = np.zeros(len(self.target),dtype=np.float32)
sy_ar = np.zeros_like(sx_ar,dtype=np.float32)
steps_ar = np.zeros(1,dtype=np.int32)
for i,p in enumerate(self.pos):
sx_ar[i] = p[0]
sy_ar[i] = p[1]
if self.equal(pos, target) and cstate == tstate: # prevent it from being trapped in local minima that moving object around the destination
# base += -60
base += -600
if direction < 4:
xx, yy = pos
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
translate(
np.array(self.map,dtype=np.int32),np.array(self.shapex,dtype=np.float32),np.array(self.shapey,dtype=np.float32),np.array(self.pn,dtype=np.int32),n,m,
len(self.pos),index,direction,
sx_ar,sy_ar,
np.array(self.cstate, dtype=np.int32), np.array(self.tstate, dtype=np.int32), self.bin,
steps_ar
)
steps = steps_ar[0]
x = xx + steps * _x[direction]
y = yy + steps * _y[direction]
if steps == 0:
return -500, -1
else:
for p in shape:
self.map[min(int(xx+p[0]+EPS),self.map_size[0]-1),min(int(yy+p[1]+EPS),self.map_size[1]-1)] = 0
for p in shape:
self.map[min(int(x+p[0]+EPS),self.map_size[0]-1),min(int(y+p[1]+EPS),self.map_size[1]-1)] = index + 2
# self.map[xx:xx+size[0],yy:yy+size[1]] = 0
# self.map[x:x+size[0],y:y+size[1]] = index + 2
# x = x + 0.5*(bbox[1][0]-bbox[0][0]+1)
# y = y + 0.5*(bbox[1][1]-bbox[0][1]+1)
self.pos[index] = (x, y)
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
return -600, 0
self.state_dict[hash_] = 1
if fabs(x-target[0]) + fabs(y-target[1]) < 1e-6 and cstate == tstate:
if self.finished[index] == 1:
# return base + 50, 0
return base + 500, 0
else:
self.finished[index] = 1
return base + 2000, 0
# return base + 500, 0
# return base + 200, 0
return base, 0
# for i in range(size[0]):
# for j in range(size[1]):
# self.map[target[0]+i, target[1]+j] = 1
# if it has already reached the target place.
if self.equal(pos, target) and cstate == tstate:
return -500, -1
# check if legal, calc the distance
flag, route = self.check_s(index)
if flag == 1:
xx, yy = pos
x,y = target
self.route = route
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
tshape, bbox = self.getitem(index, tstate)
# x = max(0,x - 0.5*(bbox[1][0]-bbox[0][0]+1))
# y = max(0,y - 0.5*(bbox[1][1]-bbox[0][1]+1))
for p in shape:
tx = max(0,min(xx+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(yy+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1),min(int(ty),self.map_size[1]-1)] = 0
# self.map[xx:xx+size[0], yy:yy+size[1]] = 0
for p in tshape:
tx = max(0,min(x+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(y+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1), min(int(ty),self.map_size[1]-1)] = index + 2
self.pos[index] = target
self.cstate[index] = tstate
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
# return -40, 0
return -600, 0
ff = True
for i,v in enumerate(self.pos): # check if the task is done
w = self.target[i]
if fabs(v[0]-w[0])+fabs(v[1]-w[1]) > 1e-6 or self.cstate[i] != self.tstate[i]:
ff = False
break
if ff:
self.finished[index] = 1
# return base + 10000, 1
return base + 100000, 1
else:
if self.finished[index] == 1: # if the object was placed before
# return base + 50, 0
return base + 500, 0
else:
self.finished[index] = 1
return base + 2000, 0
# return base + 500, 0
# return base + 200, 0
# elif flag == -1:
# return -1000, 1
else:
# return -500, -1
return base, -1
def move_p(self, index, direction):
base = -1
if index >= len(self.pos):
return -500, -1
if len(self.shape[index]) == 0:
return -500, -1
map_size = self.map_size
# size = self.size[index]
target = self.target[index]
pos = self.pos[index]
cstate = self.cstate[index]
tstate = self.tstate[index]
shape, bbox = self.getitem(index, cstate)
n,m = self.map_size
sx_ar = np.zeros(len(self.target),dtype=np.float32)
sy_ar = np.zeros_like(sx_ar,dtype=np.float32)
steps_ar = np.zeros(1,dtype=np.int32)
for i,p in enumerate(self.pos):
sx_ar[i] = p[0]
sy_ar[i] = p[1]
if direction < 4:
xx, yy = pos
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
if self.equal(pos, target) and cstate == tstate: # prevent it from being trapped in local minima that moving object around the destination
base += -4
translate(
np.array(self.map,dtype=np.int32),np.array(self.shapex,dtype=np.float32),np.array(self.shapey,dtype=np.float32),np.array(self.pn,dtype=np.int32),n,m,
len(self.pos),index,direction,
sx_ar,sy_ar,
np.array(self.cstate, dtype=np.int32), np.array(self.tstate, dtype=np.int32), self.bin,
steps_ar
)
steps = steps_ar[0]
x = xx + steps * _x[direction]
y = yy + steps * _y[direction]
if steps == 0:
return -500, -1
else:
for p in shape:
self.map[min(int(xx+p[0]+EPS),self.map_size[0]-1),min(int(yy+p[1]+EPS),self.map_size[1]-1)] = 0
for p in shape:
self.map[min(int(x+p[0]+EPS),self.map_size[0]-1),min(int(y+p[1]+EPS),self.map_size[1]-1)] = index + 2
# self.map[xx:xx+size[0],yy:yy+size[1]] = 0
# self.map[x:x+size[0],y:y+size[1]] = index + 2
# x = x + 0.5*(bbox[1][0]-bbox[0][0]+1)
# y = y + 0.5*(bbox[1][1]-bbox[0][1]+1)
self.pos[index] = (x, y)
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
penlty = self.state_dict[hash_]
self.state_dict[hash_] += 1
penlty = 1
return base-2 * penlty, 0
self.state_dict[hash_] = 1
if fabs(x-target[0]) + fabs(y-target[1]) < 1e-6 and cstate == tstate:
if self.finished[index] == 1:
return base + 2, 0
else:
self.finished[index] = 1
return base + 4, 0
# return base + 500, 0
# return base + 200, 0
return base, 0
# for i in range(size[0]):
# for j in range(size[1]):
# self.map[target[0]+i, target[1]+j] = 1
# if it has already reached the target place.
if self.equal(pos, target) and cstate == tstate:
return -500, -1
# check if legal, calc the distance
flag, route = self.check_s(index)
if flag == 1:
xx, yy = pos
x,y = target
self.route = route
# xx = max(0,xx - 0.5*(bbox[1][0]-bbox[0][0]+1))
# yy = max(0,yy - 0.5*(bbox[1][1]-bbox[0][1]+1))
tshape, bbox = self.getitem(index, tstate)
# x = max(0,x - 0.5*(bbox[1][0]-bbox[0][0]+1))
# y = max(0,y - 0.5*(bbox[1][1]-bbox[0][1]+1))
for p in shape:
tx = max(0,min(xx+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(yy+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1),min(int(ty),self.map_size[1]-1)] = 0
# self.map[xx:xx+size[0], yy:yy+size[1]] = 0
for p in tshape:
tx = max(0,min(x+p[0],self.map_size[0]-1)) + EPS
ty = max(0,min(y+p[1],self.map_size[1]-1)) + EPS
self.map[min(int(tx),self.map_size[0]-1), min(int(ty),self.map_size[1]-1)] = index + 2
self.pos[index] = target
self.cstate[index] = tstate
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
penlty = self.state_dict[hash_]
self.state_dict[hash_] += 1
penlty = 1
return base -2 * penlty, 0
ff = True
for i,v in enumerate(self.pos): # check if the task is done
w = self.target[i]
if fabs(v[0]-w[0])+fabs(v[1]-w[1]) > 1e-6 or self.cstate[i] != self.tstate[i]:
ff = False
break
if ff:
self.finished[index] = 1
# return base + 10000, 1
return base + 50, 1
else:
if self.finished[index] == 1: # if the object was placed before
return base + 2, 0
else:
self.finished[index] = 1
return base + 4, 0
# return base + 500, 0
# return base + 200, 0
# elif flag == -1:
# return -1000, 1
else:
# return -500, -1
return base, -1
def move_debug(self, index, direction):
base = -10
if index >= len(self.pos):
print('index error')
return -500, -1
map_size = self.map_size
# size = self.size[index]
target = self.target[index]
pos = self.pos[index]
cstate = self.cstate[index]
tstate = self.tstate[index]
shape,bbox = self.getitem(index, cstate)
if pos == target and cstate == tstate: # prevent it from being trapped in local minima that moving object around the destination
base += -60
if direction < 4:
xx, yy = pos
print('xx',xx,'yy',yy)
steps = 1
while True:
x = xx + steps*_x[direction]
y = yy + steps*_y[direction]
if x < 0 or x >= map_size[0] or y < 0 or y >= map_size[1] :
steps -= 1
print('early done')
break
# return -500, -1
flag = True
for p in shape:
if x+p[0] < 0 or x+p[0] >= map_size[0] or y+p[1] < 0 or y+p[1] >= map_size[1]:
flag = False
print('over bound',x+p[0],)
break
if self.map[x+p[0],y+p[1]] != 0:
print('collide!!',self.map[x+p[0],y+p[1]],index+2)
flag = False
break
if not flag:
steps -= 1
break
steps += 1
x = xx + steps*_x[direction]
y = yy + steps*_y[direction]
if steps == 0:
print('cannot move error')
return -500, -1
else:
for p in shape:
self.map[xx+p[0],yy+p[1]] = 0
for p in shape:
self.map[x+p[0],y+p[1]] = index + 2
# self.map[xx:xx+size[0],yy:yy+size[1]] = 0
# self.map[x:x+size[0],y:y+size[1]] = index + 2
self.pos[index] = (x, y)
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
return -600, 0
self.state_dict[hash_] = 1
if int(x) == int(target[0]) and int(y) == int(target[1]) and cstate == tstate:
if self.finished[index] == 1:
return base + 50, 0
else:
self.finished[index] = 1
return base + 500, 0
# return base + 200, 0
return base, 0
# for i in range(size[0]):
# for j in range(size[1]):
# self.map[target[0]+i, target[1]+j] = 1
# if it has already reached the target place.
if pos == target and cstate == tstate:
print('position error')
return -500, -1
# check if legal, calc the distance
flag, route = self.check(index)
if flag == 1:
xx, yy = pos
x,y = target
self.route = route
tshape,bbox = self.getitem(index,tstate)
for p in shape:
self.map[xx+p[0],yy+p[1]] = 0
# self.map[xx:xx+size[0], yy:yy+size[1]] = 0
for p in tshape:
self.map[x+p[0], y+p[1]] = index + 2
self.pos[index] = target
self.cstate[index] = tstate
hash_ = self.hash() # get the hash value of the current state
if hash_ in self.state_dict: # if the current state happened before
return -40, 0
ff = True
for i,v in enumerate(self.pos): # check if the task is done
w = self.target[i]
if v != w or self.cstate[i] != self.tstate[i]:
ff = False
break
if ff:
self.finished[index] = 1
return base + 10000, 1
else:
if self.finished[index] == 1: # if the object was placed before
return base + 50, 0
else:
self.finished[index] = 1
return base + 500, 0
# return base + 200, 0
# elif flag == -1:
# return -1000, 1
else:
print('path find error')
return -500, -1
def getlastroute(self):
return self.route
def printmap(self):
with open('op.txt','a') as fp:
for _ in self.map:
for __ in _:
fp.write('%d '%__)
fp.write('\n')
fp.write('\n')
def build_wall(self):
size = self.map_size
p = 0.005
q = 0.05
dix = [1,0,-1,0]
diy = [0,1,0,-1]
while True:
px = np.random.randint(size[0])
py = np.random.randint(size[1])
if fabs(px-size[0]/2) + fabs(py-size[1]/2) > (size[0]+size[1])/4:
break
d = int(px < size[0]/2) + int(py < size[1]/2)*2
if d == 3:
d = 2
elif d == 2:
d = 3
l = 0
wall_list = []
while True:
cnt = 0
while True:
tx = px + dix[d]
ty = py + diy[d]
cnt += 1
if cnt > 10:
break
if tx == size[0] or tx == -1 or ty == size[1] or ty == -1:
d = np.random.randint(4)
continue
break
if tx >= 0 and tx < size[0] and ty >= 0 and ty < size[1]:
px = tx
py = ty
if not (px,py) in wall_list :
wall_list.append((px,py))
r = np.random.rand()
if r < q:
t = np.random.rand()
if t < 0.9:
d = (d+1)%4
else:
d = np.random.randint(4)
r = np.random.rand()
if r < p and l > 100:
break
l += 1
return wall_list
def randominit(self,num=5):
import random
size = self.map_size
# print(size)
pos_list = []
size_list = []
target_list = []
for i in range(num):
while True:
px = np.random.randint(size[0])
py = np.random.randint(size[1])
flag = True
for _ in pos_list:
dx,dy = _
if dx == px or dy == py:
flag = False
break
if flag:
pos_list.append((px,py))
break
while True:
px = np.random.randint(size[0])
py = np.random.randint(size[1])
flag = True
for _ in target_list:
dx,dy = _
if dx == px or dy == py:
flag = False
break
if flag:
target_list.append((px,py))
break
random.shuffle(pos_list)
random.shuffle(target_list)
for i in range(num):
px, py = pos_list[i]
tx, ty = target_list[i]
while True:
dx = np.random.randint(1,11)
dy = np.random.randint(1,11)
if px + dx > size[0] or py + dy > size[1] or tx + dx > size[0] or ty + dy > size[1]:
# print(px,py,dx,dy)
continue
flag = True
for j in range(num):
if j == i:
continue
px_, py_ = pos_list[j]
tx_, ty_ = target_list[j]
if j > len(size_list) - 1:
dx_, dy_ = 1, 1
else:
dx_, dy_ = size_list[j]
lx = max(px, px_)
ly = max(py, py_)
rx = min(px + dx, px_ + dx_) - 1
ry = min(py + dy, py_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
lx = max(tx, tx_)
ly = max(ty, ty_)
rx = min(tx + dx, tx_ + dx_) - 1
ry = min(ty + dy, ty_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
if flag:
size_list.append((dx,dy))
break
# list_ =[(pair[0],(dx,dy)),(pair[1],(dx_,dy_))
# random.shuffle(list_)
# for i in list_:
# pos_list.append(i[0])
# size_list.append(i[1])
self.setmap(pos_list,target_list,size_list)
# for i in range(size[0]):
# for j in range(size[1]):
# for i in range(size[0]):
# for j in range(size[1]):
def randominit_crowded(self,num=5):
import random
size = self.map_size
# print(size)
while True:
reboot = False
pos_list = []
size_list = []
target_list = []
# self.finished = np.zeros(num)
tmp = self.build_wall()
# tmp += self.build_wall()
# tmp += self.build_wall()
sorted(tmp)
wall_list = []
l = None
map_ = np.zeros(self.map_size)
for t in tmp:
if t != l:
l = t
map_[l[0],l[1]] = 1
wall_list.append(t)
def dfs(p,tp):
gx = [1,0,-1,0]
gy = [0,1,0,-1]
stack = []
stack.append(p)
mark_ = np.zeros(self.map_size)
while stack.__len__() != 0:
flag = True
p = stack.pop()
if mark_[p[0],p[1]] == 1:
continue
mark_[p[0],p[1]] = 1
if p[0] == tp[0] and p[1] == tp[1]:
break
for i in range(4):
x = p[0] + gx[i]
y = p[1] + gy[i]
if x >= 0 and x < self.map_size[0] and y >= 0 and y < self.map_size[1] and map_[x,y] != 1 and mark_[x,y] == 0:
stack.append((x,y))
return mark_[tp[0],tp[1]] == 1
for i in range(num):
while True:
px = np.random.randint(1,size[0]-3)
py = np.random.randint(1,size[1]-3)
flag = True
hh = [-1,0,1]
for _ in pos_list:
dx, dy = _
if dx == px or dy == py or abs(dx - px) + abs(dy - py) < 5:
flag = False
for k in hh:
for l in hh:
xx = px + k
yy = py + l
if dx == xx and dy == yy:
flag = False
break
if flag == False:
break
if flag == False:
break
for _ in wall_list:
if flag == False:
break
dx,dy = _
if dx == px and dy == py:
flag = False
break
for k in hh:
for l in hh:
xx = px + k
yy = py + l
if dx == xx and dy == yy:
flag = False
break
if flag == False:
break
if flag == False:
break
if flag:
pos_list.append((px,py))
break
reboot_cnt = 0
while reboot_cnt < 10000:
reboot_cnt += 1
px = np.random.randint(1,size[0]-3)
py = np.random.randint(1,size[1]-3)
# flag = True
flag = dfs((px,py),pos_list[-1])
# print(flag)
for _ in target_list:
dx, dy = _
if dx == px or dy == py or abs(dx-px)+abs(dy-py) < 5:
flag = False
break
for k in hh:
for l in hh:
xx = px + k
yy = py + l
if dx == xx and dy == yy:
flag = False
break
if flag == False:
break
if flag == False:
break
for _ in wall_list:
if flag == False:
break
dx,dy = _
if dx == px and dy == py:
flag = False
break
for k in hh:
for l in hh:
xx = px + k
yy = py + l
if dx == xx and dy == yy:
flag = False
break
if flag == False:
break
if flag == False:
break
if flag:
target_list.append((px,py))
break
if reboot_cnt >= 10000:
reboot = True
break
if reboot:
continue
# random.shuffle(pos_list)
# random.shuffle(target_list)
sub_num = self.max_num - num
# print(self.max_num, num)
pos_s = [ (_, i) for i, _ in enumerate(pos_list)]
for _ in range(sub_num):
pos_s.append(((0,0),-1))
# print(len(pos_list))
random.shuffle(pos_list)
pos_list = np.array([_[0] for _ in pos_s])
tmp_target_list = np.zeros_like(pos_list)
# random.shuffle(target_list)
_ = 0
# for target in target_list:
# while pos_list[_][0] == pos_list[_][1] and pos_list[_][0] == 0:
# _ += 1
# tmp_target_list[_] = target
# _ += 1
for i,_ in enumerate(pos_s):
p, id_ = _
if p[0] == p[1] and p[0] == 0:
continue
tmp_target_list[i] = target_list[id_]
target_list = tmp_target_list
def dfs(p,tp,size):
gx = [1,0,-1,0]
gy = [0,1,0,-1]
dx, dy = size
stack = []
stack.append(p)
mark_ = np.zeros(self.map_size)
while stack.__len__() != 0:
flag = True
p = stack.pop()
if mark_[p[0],p[1]] == 1:
continue
mark_[p[0],p[1]] = 1
if p[0] == tp[0] and p[1] == tp[1]:
break
for i in range(4):
x = p[0] + gx[i]
y = p[1] + gy[i]
if x >= 0 and x + dx - 1 < self.map_size[0] and y >= 0 and y + dy - 1 < self.map_size[1] and map_[x,y] != 1 and mark_[x,y] == 0:
flag = True
for j in range(dx):
for k in range(dy):
if map_[x+j,y+k] == 1:
flag = False
break
if flag == False:
break
if flag:
stack.append((x,y))
return mark_[tp[0],tp[1]] == 1
for i in range(self.max_num):
px, py = pos_list[i]
tx, ty = target_list[i]
if px == py and py == 0:
size_list.append((0,0))
continue
reboot_cnt = 0
while reboot_cnt < 10000:
reboot_cnt += 1
dx = np.random.randint(2,31)
dy = np.random.randint(2,31)
if px + dx > size[0] or py + dy > size[1] or tx + dx > size[0] or ty + dy > size[1]:
# print(px,py,dx,dy)
continue
flag = True
for j in range(self.max_num):
if j == i:
continue
px_, py_ = pos_list[j]
tx_, ty_ = target_list[j]
if px_ == py_ and py_ == 0:
# size_list.append((0,0))
continue
if j > len(size_list) - 1:
dx_, dy_ = 2, 2
else:
dx_, dy_ = size_list[j]
lx = max(px, px_)
ly = max(py, py_)
rx = min(px + dx, px_ + dx_) - 1
ry = min(py + dy, py_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
lx = max(tx, tx_)
ly = max(ty, ty_)
rx = min(tx + dx, tx_ + dx_) - 1
ry = min(ty + dy, ty_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
# if flag:
# flag = dfs(pos_list[i],target_list[i],(dx,dy))
for wall in wall_list:
if not flag:
break
px_, py_ = wall
tx_, ty_ = wall
dx_, dy_ = 1, 1
lx = max(px, px_)
ly = max(py, py_)
rx = min(px + dx, px_ + dx_) - 1
ry = min(py + dy, py_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
lx = max(tx, tx_)
ly = max(ty, ty_)
rx = min(tx + dx, tx_ + dx_) - 1
ry = min(ty + dy, ty_ + dy_) - 1
if lx <= rx and ly <= ry:
flag = False
break
if flag:
size_list.append((dx,dy))
break
if reboot_cnt >= 10000:
reboot = True
break
if reboot:
continue
shapes = []
for i in range(self.max_num):
x,y = pos_list[i]
w,d = size_list[i]
pos_list[i] = (x+0.5*w,y+0.5*d)
x,y = target_list[i]
target_list[i] = (x+0.5*w,y+0.5*d)
shape = []
for a in range(w):
for b in range(d):
shape.append((a,b))
shapes.append(shape)
self.setmap(pos_list,target_list,shapes,np.zeros(len(pos_list)),np.zeros(len(pos_list)),wall_list)
break
def getstate_1(self,shape=[64,64]):
state = []
# temp = np.zeros(shape).astype(np.float32)
# tmap = self.map == 1
# oshape = self.map.shape
# for i in range(shape[0]):
# x = int(1.0 * i * oshape[0] / shape[0])
# for j in range(shape[1]):
# y = int(1.0 * j * oshape[1] / shape[1])
# temp[i,j] = tmap[x,y]
# for i in range(oshape[0]):
# x = 1.0*i/oshape[0]*shape[0]
# for j in range(oshape[1]):
# y = 1.0*j/oshape[1]*shape[1]
# temp[x,y] = tmap[i,j]
state.append((self.map == 1).astype(np.float32))
# state.append(temp)
for i in range(len(self.pos)):
# temp = np.zeros(shape).astype(np.float32)
# tmap = self.map == (i+2)
# for i in range(shape[0]):
# x = int(1.0 * i * oshape[0] / shape[0])
# for j in range(shape[1]):
# y = int(1.0 * j * oshape[1] / shape[1])
# temp[i,j] = tmap[x,y]
# state.append(temp)
# temp = np.zeros(shape).astype(np.float32)
# tmap = self.target_map == (i+2)
# for i in range(shape[0]):
# x = int(1.0 * i * oshape[0] / shape[0])
# for j in range(shape[1]):
# y = int(1.0 * j * oshape[1] / shape[1])
# temp[i,j] = tmap[x,y]
# state.append(temp)
state.append((self.map == (i+2)).astype(np.float32))
state.append((self.target_map == (i+2)).astype(np.float32))
return np.transpose(np.array(state),[1,2,0])
def getstate_2(self, index):
state = []
obs = (self.map == 1).astype(np.float32)
cho = (self.map == (index+2)).astype(np.float32)
cho_t = (self.target_map == (index+2)).astype(np.float32)
oth = np.zeros_like(obs).astype(np.bool)
oth_t = np.zeros_like(obs).astype(np.bool)
for i in range(len(self.pos)):
if i != index + 2:
oth |= self.map == (i+2)
oth_t |= self.target_map == (i+2)
# state.append((self.map == (i+1)).astype(np.float32))
state = [obs,cho,cho_t,oth,oth_t]
return np.transpose(np.array(state),[1,2,0])
def getstate_3(self,shape=[64,64]):
state = []
state.append(np.array(self.map).astype(np.float32))
state.append(np.array(self.target_map).astype(np.float32))
return np.transpose(np.array(state),[1,2,0])
def getmap(self):
return np.array(self.map)
def gettargetmap(self):
return self.target_map
def getconfig(self):
return (self.pos,self.target,self.shape,self.cstate,self.tstate,self.wall)
def getfinished(self):
return deepcopy(self.finished)
def getconflict(self):
num = len(self.finished)
res = np.zeros([num,num,2])
# size = np.zeros([num,2])
# for i,shape in enumerate(self.shape):
# size[i]
return res
for axis in range(2):
for i in range(num):
l = np.min([self.pos[i][axis],self.target[i][axis]])
r = np.max([self.pos[i][axis],self.target[i][axis]]) + self.size[i][axis] - 1
for j in range(num):
if (self.pos[j][axis] >= l and self.pos[j][axis] <= r) or (self.pos[j][axis] + self.size[j][axis] >= l and self.pos[j][axis] + self.size[j][axis] <= r):
res[i,j,axis] = 1
return res
def getitem(self, index, state):
shape = self.shape[index]
opoints = np.array(shape)
# for i in range(size[0]):
# for j in range(size[1]):
# opoints.append((i,j))
radian = 2 * pi * state / self.bin
points, bbox = self.rotate(opoints, radian)
return points, bbox
def getcleanmap(self, index):
start = self.pos[index]
# size = self.size[index]
cstate = self.cstate[index]
res = np.array(self.map)
# shape = self.shape[index]
# opoints = np.array(shape)
# opoints = []
# for i in range(size[0]):
# for j in range(size[1]):
# opoints.append((i,j))
points, bbox = self.getitem(index, cstate)
# radian = 2 * pi * cstate / self.bin
# points, bbox = self.rotate(opoints, radian)
xx, yy = start
# xx = max(0, xx - 0.5*(bbox[1,0]-bbox[0,0]+1))
# yy = max(0, yy - 0.5*(bbox[1,1]-bbox[0,1]+1))
for p in points:
x, y = p
x += xx
y += yy
if x >= self.map_size[0] or y >= self.map_size[0]:
print('wrong cords',x,y)
tx = max(0,min(x,self.map_size[0])) + EPS
ty = max(0,min(y,self.map_size[1])) + EPS
res[min(int(tx),self.map_size[0]-1),min(int(ty),self.map_size[1]-1)] = 0
# x = min(x, self.map_size[0]-1)
# y = min(y, self.map_size[1]-1)
# res[int(x), int(y)] = 0
return res
def __deepcopy__(self,memodict={}):
res = ENV_ablation_wo_multi(self.map_size,self.max_num)
res.map = np.array(self.map)
res.target_map = np.array(self.target_map)
res.pos = np.array(self.pos)
res.target = self.target
res.shape = self.shape
res.cstate = np.array(self.cstate)
res.tstate = self.tstate
res.wall = self.wall
res.state_dict = deepcopy(self.state_dict)
res.finished = np.array(self.finished)
res.shapex = self.shapex
res.shapey = self.shapey
res.pn = self.pn
res.edge = self.edge
res.bound = self.bound
return res
if __name__ == '__main__':
| 34.62938 | 265 | 0.407029 | 31,186 | 247,046 | 3.145578 | 0.00994 | 0.046525 | 0.039919 | 0.016636 | 0.994913 | 0.994302 | 0.994302 | 0.994302 | 0.994302 | 0.994302 | 0 | 0.048071 | 0.465804 | 247,046 | 7,134 | 266 | 34.62938 | 0.695259 | 0.147721 | 0 | 0.986807 | 0 | 0 | 0.002303 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.003958 | null | null | 0.008795 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
0c89a4a5baa6543672681f00d615a9a9339d9a13 | 70 | py | Python | src/shell/builder/__init__.py | hsravat-4590/PathFinder | ef57ab6d3d2565f7b8c2d3e4cff92287d568a433 | [
"MIT"
] | null | null | null | src/shell/builder/__init__.py | hsravat-4590/PathFinder | ef57ab6d3d2565f7b8c2d3e4cff92287d568a433 | [
"MIT"
] | null | null | null | src/shell/builder/__init__.py | hsravat-4590/PathFinder | ef57ab6d3d2565f7b8c2d3e4cff92287d568a433 | [
"MIT"
] | null | null | null | from src.shell.builder import base
from src.shell.builder import tree
| 23.333333 | 34 | 0.828571 | 12 | 70 | 4.833333 | 0.583333 | 0.241379 | 0.413793 | 0.655172 | 0.862069 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.114286 | 70 | 2 | 35 | 35 | 0.935484 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 9 |
0c9f001283a1f7d8a81bd87d7cad30eb19e99f30 | 1,580 | py | Python | tests/transformers/test_python2_future.py | luk3yx/py-backwards | 548c8e9f83619f0efb683e8b7f7d2aa1c594ec62 | [
"MIT"
] | null | null | null | tests/transformers/test_python2_future.py | luk3yx/py-backwards | 548c8e9f83619f0efb683e8b7f7d2aa1c594ec62 | [
"MIT"
] | null | null | null | tests/transformers/test_python2_future.py | luk3yx/py-backwards | 548c8e9f83619f0efb683e8b7f7d2aa1c594ec62 | [
"MIT"
] | null | null | null | import pytest
from py_backwards.utils.helpers import VariablesGenerator
from py_backwards.transformers.python2_future import Python2FutureTransformer
@pytest.mark.parametrize('before, after', [
('print(10)', r'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
try:
input, range, str, bytes, chr = raw_input, xrange, unicode, str, unichr
except NameError:
pass
else:
from itertools import ifilter as filter, imap as map, izip as zip
import itertools as _py_backwards_i_0
_py_backwards_i_0.filterfalse, \
_py_backwards_i_0.zip_longest = \
_py_backwards_i_0.ifilterfalse, \
_py_backwards_i_0.izip_longest
del _py_backwards_i_0
print(10)
'''),
('a = 1', r'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
try:
input, range, str, bytes, chr = raw_input, xrange, unicode, str, unichr
except NameError:
pass
else:
from itertools import ifilter as filter, imap as map, izip as zip
import itertools as _py_backwards_i_0
_py_backwards_i_0.filterfalse, \
_py_backwards_i_0.zip_longest = \
_py_backwards_i_0.ifilterfalse, \
_py_backwards_i_0.izip_longest
del _py_backwards_i_0
a = 1
''')])
def test_transform(transform, ast, before, after):
VariablesGenerator._counter = 0
code = transform(Python2FutureTransformer, before)
assert ast(code) == ast(after)
| 28.727273 | 77 | 0.747468 | 213 | 1,580 | 5.089202 | 0.28169 | 0.142066 | 0.132841 | 0.143911 | 0.712177 | 0.712177 | 0.712177 | 0.712177 | 0.712177 | 0.712177 | 0 | 0.017041 | 0.182911 | 1,580 | 54 | 78 | 29.259259 | 0.822618 | 0 | 0 | 0.695652 | 0 | 0 | 0.747468 | 0.151899 | 0 | 0 | 0 | 0 | 0.021739 | 1 | 0.021739 | false | 0.043478 | 0.326087 | 0 | 0.347826 | 0.086957 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
0cd674983c1bf821aa647d7e836ccaf9022938d0 | 24,224 | py | Python | parser/team19/BDTytus/AST/Expresiones.py | webdev188/tytus | 847071edb17b218f51bb969d335a8ec093d13f94 | [
"MIT"
] | 35 | 2020-12-07T03:11:43.000Z | 2021-04-15T17:38:16.000Z | parser/team19/BDTytus/AST/Expresiones.py | webdev188/tytus | 847071edb17b218f51bb969d335a8ec093d13f94 | [
"MIT"
] | 47 | 2020-12-09T01:29:09.000Z | 2021-01-13T05:37:50.000Z | parser/team19/BDTytus/AST/Expresiones.py | webdev188/tytus | 847071edb17b218f51bb969d335a8ec093d13f94 | [
"MIT"
] | 556 | 2020-12-07T03:13:31.000Z | 2021-06-17T17:41:10.000Z | import AST.Nodo as Node
import math as m
from TablaSimbolos.Tipos import *
from TablaSimbolos.TS import *
from Errores.Nodo_Error import *
class Expression(Node.Nodo):
def __init__(self, *args):
if len(args) == 6:
if args[5] == 'math2':
self.val1 = args[1]
self.val2 = args[2]
self.line = args[3]
self.column = args[4]
self.function = args[0]
self.op_type = args[5]
else:
self.exp1 = args[0]
self.exp2 = args[1]
self.op = args[2]
self.line = args[3]
self.column = args[4]
self.op_type = args[5]
self.val = None
self.type = None
elif len(args) == 5:
if args[4] == 'unario':
self.op_type = args[4]
self.type = args[0]
self.val = args[1]
self.line = args[2]
self.column = args[3]
elif args[4] == 'as':
self.type = None
self.val = args[0]
self.asid = args[1]
self.line = args[2]
self.column = args[3]
self.op_type = 'as'
elif args[4] == 'aggregate':
self.val = args[0]
self.asid = args[1]
self.line = args[2]
self.column = args[3]
self.op_type = 'agg'
elif args[4] == 'indice':
self.val = args[0]
self.asid = args[1]
self.line = args[2]
self.column = args[3]
self.op_type = 'in'
elif args[4] == 'math':
self.val = args[1]
self.function = args[0]
self.line = args[2]
self.column = args[3]
self.op_type = 'math'
self.type = None
elif args[4] == 'trigo':
self.val = args[1]
self.function = args[0]
self.line = args[2]
self.column = args[3]
self.op_type = 'trigo'
self.type = None
elif len(args) == 4:
self.line = args[1]
self.column = args[2]
self.val = args[0]
self.op_type = 'valor'
if args[3] == "decimal":
self.type = 'FLOAT'
elif args[3] == "entero":
self.type = 'INT'
elif args[3] == "char":
self.type = 'CHAR'
elif args[3] == "string":
self.type = 'STR'
elif args[3] == "t_true":
self.type = 'BOOLEAN'
self.val = True
elif args[3] == "t_false":
self.type = 'BOOLEAN'
self.val = False
elif len(args) == 3:
self.val = None
self.type = None
self.op_type = 'iden'
self.id = args[0]
self.line = args[1]
self.column = args[2]
def ejecutar(self, TS, Errores):
if self.op_type == 'valor':
return self
elif self.op_type == 'unario':
self.val.ejecutar(TS, Errores)
if self.type == '-':
self.val = -self.val.val
return self
elif self.op_type == 'as' or self.op_type == 'in' or self.op_type == 'agg':
self.val.ejecutar(TS, Errores)
self.asid.ejecutar(TS, Errores)
self.asid = self.asid.id
return self
elif self.op_type == 'math':
if self.function == 'ceil' or self.function == 'ceiling':
self.val.ejecutar(TS, Errores)
if isinstance(self.val.val, int):
self.val = m.__ceil__(self.val.val)
else:
self.val = m.ceil(self.val.val)
elif self.function == 'abs':
self.val = m.fabs(self.val.val)
elif self.function == 'cbrt':
self.val = m.ceil(self.val.val**(1/3))
elif self.function == 'degrees':
self.val = m.degrees(self.val.val)
elif self.function == 'div':
self.val = m.exp(self.val.val)
elif self.function == 'exp':
self.val = m.exp(self.val.val)
elif self.function == 'factorial':
self.val = m.factorial(self.val.val)
elif self.function == 'floor':
self.val = m.floor(self.val.val)
elif self.function == 'gcd':
self.val = m.gcd(self.val.val)
elif self.function == 'ln':
self.val = m.log(self.val.val)
elif self.function == 'log':
self.val = m.log10(self.val.val)
elif self.function == 'pi':
self.val = m.pi
return self
elif self.op_type == 'trigo':
if self.function == 'acos':
self.val = m.acos(self.val.val)
elif self.function == 'acosd':
self.val = m.degrees(m.acos(self.val.val))
elif self.function == 'asin':
self.val = m.asin(self.val.val)
elif self.function == 'asind':
self.val = m.degrees(m.asin(self.val.val))
elif self.function == 'atan':
self.val = m.atan(self.val.val)
elif self.function == 'atand':
self.val = m.degrees(m.atan(self.val.val))
elif self.function == 'cos':
self.val = m.cos(self.val.val)
elif self.function == 'cosd':
self.val = m.cos(m.radians(self.val.val))
elif self.function == 'sin':
self.val = m.sin(self.val.val)
elif self.function == 'sind':
self.val = m.sin(m.radians(self.val.val))
elif self.function == 'tan':
self.val = m.tan(self.val.val)
elif self.function == 'tand':
self.val = m.tan(m.radians(self.val.val))
return self
elif self.op_type == 'iden':
return self
elif self.op_type == 'Aritmetica':
val1 = self.exp1.ejecutar(TS, Errores)
val2 = self.exp2.ejecutar(TS, Errores)
#----------------------------------------------------------------------> Se validan operaciones con int
if isinstance(val1.val, int):
if isinstance(val2.val, int):
if self.op == '+':
self.val = val1.val + val2.val
elif self.op == '-':
self.val = val1.val - val2.val
elif self.op == '*':
self.val = val1.val * val2.val
elif self.op == '/':
if val2.val != 0:
self.val = val1.val / val2.val
else:
Errores.insertar(
Nodo_Error("Semantico", "No es posible division entre 0", self.line,
self.column))
return TIPO_DATOS.ERROR
elif self.op == '%':
self.val = val1.val % val2.val
elif self.op == '^':
self.val = pow(val1.val, val2.val)
return self
elif isinstance(val2.val, float):
if self.op == '+':
self.val = val1.val + val2.val
elif self.op == '-':
self.val = val1.val - val2.val
elif self.op == '*':
self.val = val1.val * val2.val
elif self.op == '/':
if val2.val != 0.0:
self.val = val1.val / val2.val
else:
Errores.insertar(
Nodo_Error("Semantico", "No es posible division entre 0", self.line,
self.column))
return TIPO_DATOS.ERROR
elif self.op == '%':
self.val = val1.val % val2.val
elif self.op == '^':
self.val = pow(val1.val, val2.val)
return self
elif isinstance(val2.val, bool):
if self.op == '+':
self.val = val1.val + val2.val
elif self.op == '-':
self.val = val1.val - val2.val
elif self.op == '*':
self.val = val1.val * val2.val
elif self.op == '/':
if val2 != 0 or val2 is not False:
self.val = val1.val / val2.val
else:
Errores.insertar(
Nodo_Error("Semantico", "No es posible division entre 0", self.line,
self.column))
return TIPO_DATOS.ERROR
elif self.op == '%':
self.val = val1.val % val2.val
elif self.op == '^':
self.val = pow(val1.val, val2.val)
return self
else:
Errores.insertar(Nodo_Error("Semantico", "No es posible ejecutar la operacion \'"+ str(self.op) +"\' con los tipos de datos \'"+str(val1.type)+"\' y " + "\'" + str(val2.type) + "\' en", self.line, self.column))
return TIPO_DATOS.ERROR
#----------------------------------------------------------------------> Se validan operaciones con FLOAT
elif isinstance(val1.val, float):
if isinstance(val2.val, int):
if self.op == '+':
self.val = val1.val + val2.val
elif self.op == '-':
self.val = val1.val - val2.val
elif self.op == '*':
self.val = val1.val * val2.val
elif self.op == '/':
if val2.val != 0:
self.val = val1.val / val2.val
else:
Errores.insertar(
Nodo_Error("Semantico", "No es posible division entre 0", self.line,
self.column))
return TIPO_DATOS.ERROR
elif self.op == '%':
self.val = val1.val % val2.val
elif self.op == '^':
self.val = pow(val1.val, val2.val)
return self
elif isinstance(val2.val, float):
if self.op == '+':
self.val = val1.val + val2.val
elif self.op == '-':
self.val = val1.val - val2.val
elif self.op == '*':
self.val = val1.val * val2.val
elif self.op == '/':
if val2.val != 0.0:
self.val = val1.val / val2.val
else:
Errores.insertar(
Nodo_Error("Semantico", "No es posible division entre 0", self.line,
self.column))
return TIPO_DATOS.ERROR
elif self.op == '%':
self.val = val1.val % val2.val
elif self.op == '^':
self.val = pow(val1.val, val2.val)
return self
elif isinstance(val2.val, bool):
if self.op == '+':
self.val = val1.val + val2.val
elif self.op == '-':
self.val = val1.val - val2.val
elif self.op == '*':
self.val = val1.val * val2.val
elif self.op == '/':
if val2 != 0 or val2 is not False:
self.val = val1.val / val2.val
else:
Errores.insertar(
Nodo_Error("Semantico", "No es posible division entre 0", self.line,
self.column))
return TIPO_DATOS.ERROR
elif self.op == '%':
self.val = val1.val % val2.val
elif self.op == '^':
self.val = pow(val1.val, val2.val)
return self
else:
Errores.insertar(Nodo_Error("Semantico", "No es posible ejecutar la operacion \'"+ str(self.op) +"\' con los tipos de datos \'"+str(val1.type)+"\' y " + "\'" + str(val2.type) + "\' en", self.line, self.column))
return TIPO_DATOS.ERROR
# ----------------------------------------------------------------------> Se validan operaciones con BOOLEAN
elif isinstance(val1.val, bool):
if isinstance(val2.val, int):
if self.op == '+':
self.val = val1.val + val2.val
elif self.op == '-':
self.val = val1.val - val2.val
elif self.op == '*':
self.val = val1.val * val2.val
elif self.op == '/':
if val2.val != 0:
self.val = val1.val / val2.val
else:
Errores.insertar(
Nodo_Error("Semantico", "No es posible division entre 0", self.line,
self.column))
return TIPO_DATOS.ERROR
elif self.op == '%':
self.val = val1.val % val2.val
elif self.op == '^':
self.val = pow(val1.val, val2.val)
return self
elif isinstance(val2.val, float):
if self.op == '+':
self.val = val1.val + val2.val
elif self.op == '-':
self.val = val1.val - val2.val
elif self.op == '*':
self.val = val1.val * val2.val
elif self.op == '/':
if val2.val != 0.0:
self.val = val1.val / val2.val
else:
Errores.insertar(
Nodo_Error("Semantico", "No es posible division entre 0", self.line,
self.column))
return TIPO_DATOS.ERROR
elif self.op == '%':
self.val = val1.val % val2.val
elif self.op == '^':
self.val = pow(val1.val, val2.val)
return self
elif isinstance(val2.val, bool):
if self.op == '+':
self.val = val1.val + val2.val
elif self.op == '-':
self.val = val1.val - val2.val
elif self.op == '*':
self.val = val1.val * val2.val
elif self.op == '/':
if val2 != 0 or val2 is not False:
self.val = val1.val / val2.val
else:
Errores.insertar(
Nodo_Error("Semantico", "No es posible division entre 0", self.line,
self.column))
return TIPO_DATOS.ERROR
elif self.op == '%':
self.val = val1.val % val2.val
elif self.op == '^':
self.val = pow(val1.val, val2.val)
return self
else:
Errores.insertar(Nodo_Error("Semantico", "No es posible ejecutar la operacion \'"+ str(self.op) +"\' con los tipos de datos \'"+str(val1.type)+"\' y " + "\'" + str(val2.type) + "\' en", self.line, self.column))
return TIPO_DATOS.ERROR
elif self.op_type == 'Relacional':
val1 = self.exp1.ejecutar(TS, Errores)
val2 = self.exp2.ejecutar(TS, Errores)
# ----------------------------------------------------------------------> Se validan operaciones con int
while val1 != TIPO_DATOS.ERROR:
if isinstance(val1.val, int):
if isinstance(val2.val, int):
if self.op == '<':
self.val = val1.val < val2.val
elif self.op == '>':
self.val = val1.val > val2.val
elif self.op == '<>':
self.val = val1.val != val2.val
elif self.op == '!=':
self.val = val1.val != val2.val
elif self.op == '>=':
self.val = val1.val >= val2.val
elif self.op == '<=':
self.val = val1.val <= val2.val
elif self.op == '=':
self.val = val1.val == val2.val
return self
elif isinstance(val2.val, float):
if self.op == '<':
self.val = val1.val < val2.val
elif self.op == '>':
self.val = val1.val > val2.val
elif self.op == '<>':
self.val = val1.val != val2.val
elif self.op == '!=':
self.val = val1.val != val2.val
elif self.op == '>=':
self.val = val1.val >= val2.val
elif self.op == '<=':
self.val = val1.val <= val2.val
elif self.op == '=':
self.val = val1.val == val2.val
return self
elif isinstance(val2.val, str):
if self.op == '<':
self.val = val1.val < int(val2.val)
elif self.op == '>':
self.val = val1.val > int(val2.val)
elif self.op == '<>':
self.val = val1.val != int(val2.val)
elif self.op == '!=':
self.val = val1.val != int(val2.val)
elif self.op == '>=':
self.val = val1.val >= int(val2.val)
elif self.op == '<=':
self.val = val1.val <= int(val2.val)
elif self.op == '=':
self.val = val1.val == int(val2.val)
return self
else:
Errores.insertar(Nodo_Error("Semantico", "No es posible ejecutar la operacion '" + str(
self.op) + "' con los tipos de datos \'" + str(val2.type) + "\' y " + "'" + str(
val2.type) + "' en", self.line, self.column))
return TIPO_DATOS.ERROR
# ----------------------------------------------------------------------> Se validan operaciones con int
elif isinstance(val1.val, float):
if isinstance(val2.val, int):
if self.op == '<':
self.val = val1.val < val2.val
elif self.op == '>':
self.val = val1.val > val2.val
elif self.op == '<>':
self.val = val1.val != val2.val
elif self.op == '!=':
self.val = val1.val != val2.val
elif self.op == '>=':
self.val = val1.val >= val2.val
elif self.op == '<=':
self.val = val1.val <= val2.val
elif self.op == '=':
self.val = val1.val == val2.val
return self
elif isinstance(val2.val, float):
if self.op == '<':
self.val = val1.val < val2.val
elif self.op == '>':
self.val = val1.val > val2.val
elif self.op == '<>':
self.val = val1.val != val2.val
elif self.op == '!=':
self.val = val1.val != val2.val
elif self.op == '>=':
self.val = val1.val >= val2.val
elif self.op == '<=':
self.val = val1.val <= val2.val
elif self.op == '=':
self.val = val1.val == val2.val
return self
elif isinstance(val2.val, str):
if self.op == '<':
self.val = val1.val < float(val2.val)
elif self.op == '>':
self.val = val1.val > float(val2.val)
elif self.op == '<>':
self.val = val1.val != float(val2.val)
elif self.op == '!=':
self.val = val1.val != float(val2.val)
elif self.op == '>=':
self.val = val1.val >= float(val2.val)
elif self.op == '<=':
self.val = val1.val <= float(val2.val)
elif self.op == '=':
self.val = val1.val == float(val2.val)
return self
else:
Errores.insertar(Nodo_Error("Semantico", "No es posible ejecutar la operacion \'" + str(
self.op) + "\' con los tipos de datos \'" + str(val1.type) + "\' y " + "\'" + str(
val2.type) + "\' en", self.line, self.column))
return TIPO_DATOS.ERROR
elif val1.op_type == 'iden':
val1.val = val2.val
return val1
else:
return TIPO_DATOS.ERROR
return TIPO_DATOS.ERROR
elif self.op_type == 'Logica':
val1 = self.exp1.ejecutar(TS, Errores)
val2 = self.exp2.ejecutar(TS, Errores)
if isinstance(val1.val, bool):
if isinstance(val2.val, bool):
if self.op == 'and':
self.val = val1.val and val2.val
elif self.op == 'or':
self.val = val1.val or val2.val
return self
def getC3D(self, TS):
codigo = ""
codigo += self.Exp.getC3D(TS)
temp = TS.getTemp()
codigo += temp + '= (' + self.cast + ')' + self.Exp.temporal + ';\n'
self.temporal = temp
return codigo;
def graficarasc(self, padre, grafica):
pass
| 47.3125 | 234 | 0.37273 | 2,327 | 24,224 | 3.8526 | 0.057585 | 0.128054 | 0.115337 | 0.138985 | 0.847964 | 0.816843 | 0.760625 | 0.751924 | 0.706302 | 0.698717 | 0 | 0.028772 | 0.499257 | 24,224 | 511 | 235 | 47.405088 | 0.710305 | 0.021425 | 0 | 0.709939 | 0 | 0 | 0.05089 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.008114 | false | 0.002028 | 0.010142 | 0 | 0.10142 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0b21a6977f770476fda953153243d2f75d2c9941 | 258,048 | py | Python | tests/unit/gapic/compute_v1/test_region_instance_group_managers.py | LaudateCorpus1/python-compute | a36c637f153c7b4ef49bb6a78c8b09f3746e7af1 | [
"Apache-2.0"
] | null | null | null | tests/unit/gapic/compute_v1/test_region_instance_group_managers.py | LaudateCorpus1/python-compute | a36c637f153c7b4ef49bb6a78c8b09f3746e7af1 | [
"Apache-2.0"
] | null | null | null | tests/unit/gapic/compute_v1/test_region_instance_group_managers.py | LaudateCorpus1/python-compute | a36c637f153c7b4ef49bb6a78c8b09f3746e7af1 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import grpc
from grpc.experimental import aio
import json
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from requests import Response
from requests import Request, PreparedRequest
from requests.sessions import Session
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.compute_v1.services.region_instance_group_managers import (
RegionInstanceGroupManagersClient,
)
from google.cloud.compute_v1.services.region_instance_group_managers import pagers
from google.cloud.compute_v1.services.region_instance_group_managers import transports
from google.cloud.compute_v1.types import compute
from google.oauth2 import service_account
import google.auth
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert RegionInstanceGroupManagersClient._get_default_mtls_endpoint(None) is None
assert (
RegionInstanceGroupManagersClient._get_default_mtls_endpoint(api_endpoint)
== api_mtls_endpoint
)
assert (
RegionInstanceGroupManagersClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
RegionInstanceGroupManagersClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
RegionInstanceGroupManagersClient._get_default_mtls_endpoint(
sandbox_mtls_endpoint
)
== sandbox_mtls_endpoint
)
assert (
RegionInstanceGroupManagersClient._get_default_mtls_endpoint(non_googleapi)
== non_googleapi
)
@pytest.mark.parametrize("client_class", [RegionInstanceGroupManagersClient,])
def test_region_instance_group_managers_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "compute.googleapis.com:443"
@pytest.mark.parametrize(
"transport_class,transport_name",
[(transports.RegionInstanceGroupManagersRestTransport, "rest"),],
)
def test_region_instance_group_managers_client_service_account_always_use_jwt(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize("client_class", [RegionInstanceGroupManagersClient,])
def test_region_instance_group_managers_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "compute.googleapis.com:443"
def test_region_instance_group_managers_client_get_transport_class():
transport = RegionInstanceGroupManagersClient.get_transport_class()
available_transports = [
transports.RegionInstanceGroupManagersRestTransport,
]
assert transport in available_transports
transport = RegionInstanceGroupManagersClient.get_transport_class("rest")
assert transport == transports.RegionInstanceGroupManagersRestTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(
RegionInstanceGroupManagersClient,
transports.RegionInstanceGroupManagersRestTransport,
"rest",
),
],
)
@mock.patch.object(
RegionInstanceGroupManagersClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(RegionInstanceGroupManagersClient),
)
def test_region_instance_group_managers_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(
RegionInstanceGroupManagersClient, "get_transport_class"
) as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(
RegionInstanceGroupManagersClient, "get_transport_class"
) as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class(transport=transport_name)
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class(transport=transport_name)
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(
RegionInstanceGroupManagersClient,
transports.RegionInstanceGroupManagersRestTransport,
"rest",
"true",
),
(
RegionInstanceGroupManagersClient,
transports.RegionInstanceGroupManagersRestTransport,
"rest",
"false",
),
],
)
@mock.patch.object(
RegionInstanceGroupManagersClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(RegionInstanceGroupManagersClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_region_instance_group_managers_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize("client_class", [RegionInstanceGroupManagersClient])
@mock.patch.object(
RegionInstanceGroupManagersClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(RegionInstanceGroupManagersClient),
)
def test_region_instance_group_managers_client_get_mtls_endpoint_and_cert_source(
client_class,
):
mock_client_cert_source = mock.Mock()
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source == mock_client_cert_source
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
mock_client_cert_source = mock.Mock()
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=mock_client_cert_source,
):
(
api_endpoint,
cert_source,
) = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source == mock_client_cert_source
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(
RegionInstanceGroupManagersClient,
transports.RegionInstanceGroupManagersRestTransport,
"rest",
),
],
)
def test_region_instance_group_managers_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(
RegionInstanceGroupManagersClient,
transports.RegionInstanceGroupManagersRestTransport,
"rest",
),
],
)
def test_region_instance_group_managers_client_client_options_credentials_file(
client_class, transport_class, transport_name
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"request_type", [compute.AbandonInstancesRegionInstanceGroupManagerRequest, dict,]
)
def test_abandon_instances_unary_rest(request_type):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init[
"region_instance_group_managers_abandon_instances_request_resource"
] = {"instances": ["instances_value_1", "instances_value_2"]}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.abandon_instances_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_abandon_instances_unary_rest_required_fields(
request_type=compute.AbandonInstancesRegionInstanceGroupManagerRequest,
):
transport_class = transports.RegionInstanceGroupManagersRestTransport
request_init = {}
request_init["instance_group_manager"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).abandon_instances._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instanceGroupManager"] = "instance_group_manager_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).abandon_instances._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instanceGroupManager" in jsonified_request
assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.abandon_instances_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_abandon_instances_unary_rest_unset_required_fields():
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.abandon_instances._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"instanceGroupManager",
"project",
"region",
"regionInstanceGroupManagersAbandonInstancesRequestResource",
)
)
)
def test_abandon_instances_unary_rest_bad_request(
transport: str = "rest",
request_type=compute.AbandonInstancesRegionInstanceGroupManagerRequest,
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init[
"region_instance_group_managers_abandon_instances_request_resource"
] = {"instances": ["instances_value_1", "instances_value_2"]}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.abandon_instances_unary(request)
def test_abandon_instances_unary_rest_flattened():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
region_instance_group_managers_abandon_instances_request_resource=compute.RegionInstanceGroupManagersAbandonInstancesRequest(
instances=["instances_value"]
),
)
mock_args.update(sample_request)
client.abandon_instances_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/abandonInstances"
% client.transport._host,
args[1],
)
def test_abandon_instances_unary_rest_flattened_error(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.abandon_instances_unary(
compute.AbandonInstancesRegionInstanceGroupManagerRequest(),
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
region_instance_group_managers_abandon_instances_request_resource=compute.RegionInstanceGroupManagersAbandonInstancesRequest(
instances=["instances_value"]
),
)
def test_abandon_instances_unary_rest_error():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type",
[compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, dict,],
)
def test_apply_updates_to_instances_unary_rest(request_type):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init["region_instance_group_managers_apply_updates_request_resource"] = {
"all_instances": True,
"instances": ["instances_value_1", "instances_value_2"],
"minimal_action": "minimal_action_value",
"most_disruptive_allowed_action": "most_disruptive_allowed_action_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.apply_updates_to_instances_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_apply_updates_to_instances_unary_rest_required_fields(
request_type=compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest,
):
transport_class = transports.RegionInstanceGroupManagersRestTransport
request_init = {}
request_init["instance_group_manager"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).apply_updates_to_instances._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instanceGroupManager"] = "instance_group_manager_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).apply_updates_to_instances._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instanceGroupManager" in jsonified_request
assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.apply_updates_to_instances_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_apply_updates_to_instances_unary_rest_unset_required_fields():
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.apply_updates_to_instances._get_unset_required_fields({})
assert set(unset_fields) == (
set(())
& set(
(
"instanceGroupManager",
"project",
"region",
"regionInstanceGroupManagersApplyUpdatesRequestResource",
)
)
)
def test_apply_updates_to_instances_unary_rest_bad_request(
transport: str = "rest",
request_type=compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest,
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init["region_instance_group_managers_apply_updates_request_resource"] = {
"all_instances": True,
"instances": ["instances_value_1", "instances_value_2"],
"minimal_action": "minimal_action_value",
"most_disruptive_allowed_action": "most_disruptive_allowed_action_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.apply_updates_to_instances_unary(request)
def test_apply_updates_to_instances_unary_rest_flattened():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
region_instance_group_managers_apply_updates_request_resource=compute.RegionInstanceGroupManagersApplyUpdatesRequest(
all_instances=True
),
)
mock_args.update(sample_request)
client.apply_updates_to_instances_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/applyUpdatesToInstances"
% client.transport._host,
args[1],
)
def test_apply_updates_to_instances_unary_rest_flattened_error(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.apply_updates_to_instances_unary(
compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest(),
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
region_instance_group_managers_apply_updates_request_resource=compute.RegionInstanceGroupManagersApplyUpdatesRequest(
all_instances=True
),
)
def test_apply_updates_to_instances_unary_rest_error():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.CreateInstancesRegionInstanceGroupManagerRequest, dict,]
)
def test_create_instances_unary_rest(request_type):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init["region_instance_group_managers_create_instances_request_resource"] = {
"instances": [
{
"fingerprint": "fingerprint_value",
"name": "name_value",
"preserved_state": {"disks": {}, "metadata": {}},
"status": "status_value",
}
]
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.create_instances_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_create_instances_unary_rest_required_fields(
request_type=compute.CreateInstancesRegionInstanceGroupManagerRequest,
):
transport_class = transports.RegionInstanceGroupManagersRestTransport
request_init = {}
request_init["instance_group_manager"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).create_instances._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instanceGroupManager"] = "instance_group_manager_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).create_instances._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instanceGroupManager" in jsonified_request
assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.create_instances_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_create_instances_unary_rest_unset_required_fields():
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.create_instances._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"instanceGroupManager",
"project",
"region",
"regionInstanceGroupManagersCreateInstancesRequestResource",
)
)
)
def test_create_instances_unary_rest_bad_request(
transport: str = "rest",
request_type=compute.CreateInstancesRegionInstanceGroupManagerRequest,
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init["region_instance_group_managers_create_instances_request_resource"] = {
"instances": [
{
"fingerprint": "fingerprint_value",
"name": "name_value",
"preserved_state": {"disks": {}, "metadata": {}},
"status": "status_value",
}
]
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.create_instances_unary(request)
def test_create_instances_unary_rest_flattened():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
region_instance_group_managers_create_instances_request_resource=compute.RegionInstanceGroupManagersCreateInstancesRequest(
instances=[compute.PerInstanceConfig(fingerprint="fingerprint_value")]
),
)
mock_args.update(sample_request)
client.create_instances_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/createInstances"
% client.transport._host,
args[1],
)
def test_create_instances_unary_rest_flattened_error(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_instances_unary(
compute.CreateInstancesRegionInstanceGroupManagerRequest(),
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
region_instance_group_managers_create_instances_request_resource=compute.RegionInstanceGroupManagersCreateInstancesRequest(
instances=[compute.PerInstanceConfig(fingerprint="fingerprint_value")]
),
)
def test_create_instances_unary_rest_error():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.DeleteRegionInstanceGroupManagerRequest, dict,]
)
def test_delete_unary_rest(request_type):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.delete_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_delete_unary_rest_required_fields(
request_type=compute.DeleteRegionInstanceGroupManagerRequest,
):
transport_class = transports.RegionInstanceGroupManagersRestTransport
request_init = {}
request_init["instance_group_manager"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).delete._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instanceGroupManager"] = "instance_group_manager_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).delete._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instanceGroupManager" in jsonified_request
assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "delete",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.delete_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_delete_unary_rest_unset_required_fields():
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.delete._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",)) & set(("instanceGroupManager", "project", "region",))
)
def test_delete_unary_rest_bad_request(
transport: str = "rest",
request_type=compute.DeleteRegionInstanceGroupManagerRequest,
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.delete_unary(request)
def test_delete_unary_rest_flattened():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
)
mock_args.update(sample_request)
client.delete_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}"
% client.transport._host,
args[1],
)
def test_delete_unary_rest_flattened_error(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_unary(
compute.DeleteRegionInstanceGroupManagerRequest(),
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
)
def test_delete_unary_rest_error():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.DeleteInstancesRegionInstanceGroupManagerRequest, dict,]
)
def test_delete_instances_unary_rest(request_type):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init["region_instance_group_managers_delete_instances_request_resource"] = {
"instances": ["instances_value_1", "instances_value_2"],
"skip_instances_on_validation_error": True,
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.delete_instances_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_delete_instances_unary_rest_required_fields(
request_type=compute.DeleteInstancesRegionInstanceGroupManagerRequest,
):
transport_class = transports.RegionInstanceGroupManagersRestTransport
request_init = {}
request_init["instance_group_manager"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).delete_instances._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instanceGroupManager"] = "instance_group_manager_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).delete_instances._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instanceGroupManager" in jsonified_request
assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.delete_instances_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_delete_instances_unary_rest_unset_required_fields():
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.delete_instances._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"instanceGroupManager",
"project",
"region",
"regionInstanceGroupManagersDeleteInstancesRequestResource",
)
)
)
def test_delete_instances_unary_rest_bad_request(
transport: str = "rest",
request_type=compute.DeleteInstancesRegionInstanceGroupManagerRequest,
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init["region_instance_group_managers_delete_instances_request_resource"] = {
"instances": ["instances_value_1", "instances_value_2"],
"skip_instances_on_validation_error": True,
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.delete_instances_unary(request)
def test_delete_instances_unary_rest_flattened():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
region_instance_group_managers_delete_instances_request_resource=compute.RegionInstanceGroupManagersDeleteInstancesRequest(
instances=["instances_value"]
),
)
mock_args.update(sample_request)
client.delete_instances_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/deleteInstances"
% client.transport._host,
args[1],
)
def test_delete_instances_unary_rest_flattened_error(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_instances_unary(
compute.DeleteInstancesRegionInstanceGroupManagerRequest(),
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
region_instance_group_managers_delete_instances_request_resource=compute.RegionInstanceGroupManagersDeleteInstancesRequest(
instances=["instances_value"]
),
)
def test_delete_instances_unary_rest_error():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type",
[compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest, dict,],
)
def test_delete_per_instance_configs_unary_rest(request_type):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init[
"region_instance_group_manager_delete_instance_config_req_resource"
] = {"names": ["names_value_1", "names_value_2"]}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.delete_per_instance_configs_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_delete_per_instance_configs_unary_rest_required_fields(
request_type=compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest,
):
transport_class = transports.RegionInstanceGroupManagersRestTransport
request_init = {}
request_init["instance_group_manager"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).delete_per_instance_configs._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instanceGroupManager"] = "instance_group_manager_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).delete_per_instance_configs._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instanceGroupManager" in jsonified_request
assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.delete_per_instance_configs_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_delete_per_instance_configs_unary_rest_unset_required_fields():
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.delete_per_instance_configs._get_unset_required_fields({})
assert set(unset_fields) == (
set(())
& set(
(
"instanceGroupManager",
"project",
"region",
"regionInstanceGroupManagerDeleteInstanceConfigReqResource",
)
)
)
def test_delete_per_instance_configs_unary_rest_bad_request(
transport: str = "rest",
request_type=compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest,
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init[
"region_instance_group_manager_delete_instance_config_req_resource"
] = {"names": ["names_value_1", "names_value_2"]}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.delete_per_instance_configs_unary(request)
def test_delete_per_instance_configs_unary_rest_flattened():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
region_instance_group_manager_delete_instance_config_req_resource=compute.RegionInstanceGroupManagerDeleteInstanceConfigReq(
names=["names_value"]
),
)
mock_args.update(sample_request)
client.delete_per_instance_configs_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/deletePerInstanceConfigs"
% client.transport._host,
args[1],
)
def test_delete_per_instance_configs_unary_rest_flattened_error(
transport: str = "rest",
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_per_instance_configs_unary(
compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest(),
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
region_instance_group_manager_delete_instance_config_req_resource=compute.RegionInstanceGroupManagerDeleteInstanceConfigReq(
names=["names_value"]
),
)
def test_delete_per_instance_configs_unary_rest_error():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.GetRegionInstanceGroupManagerRequest, dict,]
)
def test_get_rest(request_type):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.InstanceGroupManager(
base_instance_name="base_instance_name_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
fingerprint="fingerprint_value",
id=205,
instance_group="instance_group_value",
instance_template="instance_template_value",
kind="kind_value",
name="name_value",
region="region_value",
self_link="self_link_value",
target_pools=["target_pools_value"],
target_size=1185,
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstanceGroupManager.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.InstanceGroupManager)
assert response.base_instance_name == "base_instance_name_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.fingerprint == "fingerprint_value"
assert response.id == 205
assert response.instance_group == "instance_group_value"
assert response.instance_template == "instance_template_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.target_pools == ["target_pools_value"]
assert response.target_size == 1185
assert response.zone == "zone_value"
def test_get_rest_required_fields(
request_type=compute.GetRegionInstanceGroupManagerRequest,
):
transport_class = transports.RegionInstanceGroupManagersRestTransport
request_init = {}
request_init["instance_group_manager"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instanceGroupManager"] = "instance_group_manager_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instanceGroupManager" in jsonified_request
assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.InstanceGroupManager()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstanceGroupManager.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_get_rest_unset_required_fields():
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.get._get_unset_required_fields({})
assert set(unset_fields) == (
set(()) & set(("instanceGroupManager", "project", "region",))
)
def test_get_rest_bad_request(
transport: str = "rest", request_type=compute.GetRegionInstanceGroupManagerRequest
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get(request)
def test_get_rest_flattened():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.InstanceGroupManager()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstanceGroupManager.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
)
mock_args.update(sample_request)
client.get(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}"
% client.transport._host,
args[1],
)
def test_get_rest_flattened_error(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get(
compute.GetRegionInstanceGroupManagerRequest(),
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
)
def test_get_rest_error():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.InsertRegionInstanceGroupManagerRequest, dict,]
)
def test_insert_unary_rest(request_type):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "region": "sample2"}
request_init["instance_group_manager_resource"] = {
"auto_healing_policies": [
{"health_check": "health_check_value", "initial_delay_sec": 1778}
],
"base_instance_name": "base_instance_name_value",
"creation_timestamp": "creation_timestamp_value",
"current_actions": {
"abandoning": 1041,
"creating": 845,
"creating_without_retries": 2589,
"deleting": 844,
"none": 432,
"recreating": 1060,
"refreshing": 1069,
"restarting": 1091,
"verifying": 979,
},
"description": "description_value",
"distribution_policy": {
"target_shape": "target_shape_value",
"zones": [{"zone": "zone_value"}],
},
"fingerprint": "fingerprint_value",
"id": 205,
"instance_group": "instance_group_value",
"instance_template": "instance_template_value",
"kind": "kind_value",
"name": "name_value",
"named_ports": [{"name": "name_value", "port": 453}],
"region": "region_value",
"self_link": "self_link_value",
"stateful_policy": {"preserved_state": {"disks": {}}},
"status": {
"autoscaler": "autoscaler_value",
"is_stable": True,
"stateful": {
"has_stateful_config": True,
"per_instance_configs": {"all_effective": True},
},
"version_target": {"is_reached": True},
},
"target_pools": ["target_pools_value_1", "target_pools_value_2"],
"target_size": 1185,
"update_policy": {
"instance_redistribution_type": "instance_redistribution_type_value",
"max_surge": {"calculated": 1042, "fixed": 528, "percent": 753},
"max_unavailable": {"calculated": 1042, "fixed": 528, "percent": 753},
"minimal_action": "minimal_action_value",
"replacement_method": "replacement_method_value",
"type_": "type__value",
},
"versions": [
{
"instance_template": "instance_template_value",
"name": "name_value",
"target_size": {"calculated": 1042, "fixed": 528, "percent": 753},
}
],
"zone": "zone_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.insert_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_insert_unary_rest_required_fields(
request_type=compute.InsertRegionInstanceGroupManagerRequest,
):
transport_class = transports.RegionInstanceGroupManagersRestTransport
request_init = {}
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).insert._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).insert._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.insert_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_insert_unary_rest_unset_required_fields():
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.insert._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(("instanceGroupManagerResource", "project", "region",))
)
def test_insert_unary_rest_bad_request(
transport: str = "rest",
request_type=compute.InsertRegionInstanceGroupManagerRequest,
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "region": "sample2"}
request_init["instance_group_manager_resource"] = {
"auto_healing_policies": [
{"health_check": "health_check_value", "initial_delay_sec": 1778}
],
"base_instance_name": "base_instance_name_value",
"creation_timestamp": "creation_timestamp_value",
"current_actions": {
"abandoning": 1041,
"creating": 845,
"creating_without_retries": 2589,
"deleting": 844,
"none": 432,
"recreating": 1060,
"refreshing": 1069,
"restarting": 1091,
"verifying": 979,
},
"description": "description_value",
"distribution_policy": {
"target_shape": "target_shape_value",
"zones": [{"zone": "zone_value"}],
},
"fingerprint": "fingerprint_value",
"id": 205,
"instance_group": "instance_group_value",
"instance_template": "instance_template_value",
"kind": "kind_value",
"name": "name_value",
"named_ports": [{"name": "name_value", "port": 453}],
"region": "region_value",
"self_link": "self_link_value",
"stateful_policy": {"preserved_state": {"disks": {}}},
"status": {
"autoscaler": "autoscaler_value",
"is_stable": True,
"stateful": {
"has_stateful_config": True,
"per_instance_configs": {"all_effective": True},
},
"version_target": {"is_reached": True},
},
"target_pools": ["target_pools_value_1", "target_pools_value_2"],
"target_size": 1185,
"update_policy": {
"instance_redistribution_type": "instance_redistribution_type_value",
"max_surge": {"calculated": 1042, "fixed": 528, "percent": 753},
"max_unavailable": {"calculated": 1042, "fixed": 528, "percent": 753},
"minimal_action": "minimal_action_value",
"replacement_method": "replacement_method_value",
"type_": "type__value",
},
"versions": [
{
"instance_template": "instance_template_value",
"name": "name_value",
"target_size": {"calculated": 1042, "fixed": 528, "percent": 753},
}
],
"zone": "zone_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.insert_unary(request)
def test_insert_unary_rest_flattened():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1", "region": "sample2"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
instance_group_manager_resource=compute.InstanceGroupManager(
auto_healing_policies=[
compute.InstanceGroupManagerAutoHealingPolicy(
health_check="health_check_value"
)
]
),
)
mock_args.update(sample_request)
client.insert_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers"
% client.transport._host,
args[1],
)
def test_insert_unary_rest_flattened_error(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.insert_unary(
compute.InsertRegionInstanceGroupManagerRequest(),
project="project_value",
region="region_value",
instance_group_manager_resource=compute.InstanceGroupManager(
auto_healing_policies=[
compute.InstanceGroupManagerAutoHealingPolicy(
health_check="health_check_value"
)
]
),
)
def test_insert_unary_rest_error():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.ListRegionInstanceGroupManagersRequest, dict,]
)
def test_list_rest(request_type):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "region": "sample2"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.RegionInstanceGroupManagerList(
id="id_value",
kind="kind_value",
next_page_token="next_page_token_value",
self_link="self_link_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.RegionInstanceGroupManagerList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list(request)
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListPager)
assert response.id == "id_value"
assert response.kind == "kind_value"
assert response.next_page_token == "next_page_token_value"
assert response.self_link == "self_link_value"
def test_list_rest_required_fields(
request_type=compute.ListRegionInstanceGroupManagersRequest,
):
transport_class = transports.RegionInstanceGroupManagersRestTransport
request_init = {}
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).list._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).list._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(
("max_results", "filter", "order_by", "page_token", "return_partial_success",)
)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.RegionInstanceGroupManagerList()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.RegionInstanceGroupManagerList.to_json(
return_value
)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_list_rest_unset_required_fields():
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.list._get_unset_required_fields({})
assert set(unset_fields) == (
set(("maxResults", "filter", "orderBy", "pageToken", "returnPartialSuccess",))
& set(("project", "region",))
)
def test_list_rest_bad_request(
transport: str = "rest", request_type=compute.ListRegionInstanceGroupManagersRequest
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "region": "sample2"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.list(request)
def test_list_rest_flattened():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.RegionInstanceGroupManagerList()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.RegionInstanceGroupManagerList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1", "region": "sample2"}
# get truthy value for each flattened field
mock_args = dict(project="project_value", region="region_value",)
mock_args.update(sample_request)
client.list(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers"
% client.transport._host,
args[1],
)
def test_list_rest_flattened_error(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list(
compute.ListRegionInstanceGroupManagersRequest(),
project="project_value",
region="region_value",
)
def test_list_rest_pager(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# TODO(kbandes): remove this mock unless there's a good reason for it.
# with mock.patch.object(path_template, 'transcode') as transcode:
# Set the response as a series of pages
response = (
compute.RegionInstanceGroupManagerList(
items=[
compute.InstanceGroupManager(),
compute.InstanceGroupManager(),
compute.InstanceGroupManager(),
],
next_page_token="abc",
),
compute.RegionInstanceGroupManagerList(items=[], next_page_token="def",),
compute.RegionInstanceGroupManagerList(
items=[compute.InstanceGroupManager(),], next_page_token="ghi",
),
compute.RegionInstanceGroupManagerList(
items=[compute.InstanceGroupManager(), compute.InstanceGroupManager(),],
),
)
# Two responses for two calls
response = response + response
# Wrap the values into proper Response objs
response = tuple(
compute.RegionInstanceGroupManagerList.to_json(x) for x in response
)
return_values = tuple(Response() for i in response)
for return_val, response_val in zip(return_values, response):
return_val._content = response_val.encode("UTF-8")
return_val.status_code = 200
req.side_effect = return_values
sample_request = {"project": "sample1", "region": "sample2"}
pager = client.list(request=sample_request)
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, compute.InstanceGroupManager) for i in results)
pages = list(client.list(request=sample_request).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize(
"request_type", [compute.ListErrorsRegionInstanceGroupManagersRequest, dict,]
)
def test_list_errors_rest(request_type):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.RegionInstanceGroupManagersListErrorsResponse(
next_page_token="next_page_token_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.RegionInstanceGroupManagersListErrorsResponse.to_json(
return_value
)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list_errors(request)
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListErrorsPager)
assert response.next_page_token == "next_page_token_value"
def test_list_errors_rest_required_fields(
request_type=compute.ListErrorsRegionInstanceGroupManagersRequest,
):
transport_class = transports.RegionInstanceGroupManagersRestTransport
request_init = {}
request_init["instance_group_manager"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).list_errors._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instanceGroupManager"] = "instance_group_manager_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).list_errors._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(
("max_results", "filter", "order_by", "page_token", "return_partial_success",)
)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instanceGroupManager" in jsonified_request
assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.RegionInstanceGroupManagersListErrorsResponse()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.RegionInstanceGroupManagersListErrorsResponse.to_json(
return_value
)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list_errors(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_list_errors_rest_unset_required_fields():
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.list_errors._get_unset_required_fields({})
assert set(unset_fields) == (
set(("maxResults", "filter", "orderBy", "pageToken", "returnPartialSuccess",))
& set(("instanceGroupManager", "project", "region",))
)
def test_list_errors_rest_bad_request(
transport: str = "rest",
request_type=compute.ListErrorsRegionInstanceGroupManagersRequest,
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.list_errors(request)
def test_list_errors_rest_flattened():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.RegionInstanceGroupManagersListErrorsResponse()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.RegionInstanceGroupManagersListErrorsResponse.to_json(
return_value
)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
)
mock_args.update(sample_request)
client.list_errors(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/listErrors"
% client.transport._host,
args[1],
)
def test_list_errors_rest_flattened_error(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_errors(
compute.ListErrorsRegionInstanceGroupManagersRequest(),
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
)
def test_list_errors_rest_pager(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# TODO(kbandes): remove this mock unless there's a good reason for it.
# with mock.patch.object(path_template, 'transcode') as transcode:
# Set the response as a series of pages
response = (
compute.RegionInstanceGroupManagersListErrorsResponse(
items=[
compute.InstanceManagedByIgmError(),
compute.InstanceManagedByIgmError(),
compute.InstanceManagedByIgmError(),
],
next_page_token="abc",
),
compute.RegionInstanceGroupManagersListErrorsResponse(
items=[], next_page_token="def",
),
compute.RegionInstanceGroupManagersListErrorsResponse(
items=[compute.InstanceManagedByIgmError(),], next_page_token="ghi",
),
compute.RegionInstanceGroupManagersListErrorsResponse(
items=[
compute.InstanceManagedByIgmError(),
compute.InstanceManagedByIgmError(),
],
),
)
# Two responses for two calls
response = response + response
# Wrap the values into proper Response objs
response = tuple(
compute.RegionInstanceGroupManagersListErrorsResponse.to_json(x)
for x in response
)
return_values = tuple(Response() for i in response)
for return_val, response_val in zip(return_values, response):
return_val._content = response_val.encode("UTF-8")
return_val.status_code = 200
req.side_effect = return_values
sample_request = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
pager = client.list_errors(request=sample_request)
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, compute.InstanceManagedByIgmError) for i in results)
pages = list(client.list_errors(request=sample_request).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize(
"request_type",
[compute.ListManagedInstancesRegionInstanceGroupManagersRequest, dict,],
)
def test_list_managed_instances_rest(request_type):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.RegionInstanceGroupManagersListInstancesResponse(
next_page_token="next_page_token_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.RegionInstanceGroupManagersListInstancesResponse.to_json(
return_value
)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list_managed_instances(request)
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListManagedInstancesPager)
assert response.next_page_token == "next_page_token_value"
def test_list_managed_instances_rest_required_fields(
request_type=compute.ListManagedInstancesRegionInstanceGroupManagersRequest,
):
transport_class = transports.RegionInstanceGroupManagersRestTransport
request_init = {}
request_init["instance_group_manager"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).list_managed_instances._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instanceGroupManager"] = "instance_group_manager_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).list_managed_instances._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(
("max_results", "filter", "order_by", "page_token", "return_partial_success",)
)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instanceGroupManager" in jsonified_request
assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.RegionInstanceGroupManagersListInstancesResponse()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.RegionInstanceGroupManagersListInstancesResponse.to_json(
return_value
)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list_managed_instances(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_list_managed_instances_rest_unset_required_fields():
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.list_managed_instances._get_unset_required_fields({})
assert set(unset_fields) == (
set(("maxResults", "filter", "orderBy", "pageToken", "returnPartialSuccess",))
& set(("instanceGroupManager", "project", "region",))
)
def test_list_managed_instances_rest_bad_request(
transport: str = "rest",
request_type=compute.ListManagedInstancesRegionInstanceGroupManagersRequest,
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.list_managed_instances(request)
def test_list_managed_instances_rest_flattened():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.RegionInstanceGroupManagersListInstancesResponse()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.RegionInstanceGroupManagersListInstancesResponse.to_json(
return_value
)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
)
mock_args.update(sample_request)
client.list_managed_instances(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/listManagedInstances"
% client.transport._host,
args[1],
)
def test_list_managed_instances_rest_flattened_error(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_managed_instances(
compute.ListManagedInstancesRegionInstanceGroupManagersRequest(),
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
)
def test_list_managed_instances_rest_pager(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# TODO(kbandes): remove this mock unless there's a good reason for it.
# with mock.patch.object(path_template, 'transcode') as transcode:
# Set the response as a series of pages
response = (
compute.RegionInstanceGroupManagersListInstancesResponse(
managed_instances=[
compute.ManagedInstance(),
compute.ManagedInstance(),
compute.ManagedInstance(),
],
next_page_token="abc",
),
compute.RegionInstanceGroupManagersListInstancesResponse(
managed_instances=[], next_page_token="def",
),
compute.RegionInstanceGroupManagersListInstancesResponse(
managed_instances=[compute.ManagedInstance(),], next_page_token="ghi",
),
compute.RegionInstanceGroupManagersListInstancesResponse(
managed_instances=[
compute.ManagedInstance(),
compute.ManagedInstance(),
],
),
)
# Two responses for two calls
response = response + response
# Wrap the values into proper Response objs
response = tuple(
compute.RegionInstanceGroupManagersListInstancesResponse.to_json(x)
for x in response
)
return_values = tuple(Response() for i in response)
for return_val, response_val in zip(return_values, response):
return_val._content = response_val.encode("UTF-8")
return_val.status_code = 200
req.side_effect = return_values
sample_request = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
pager = client.list_managed_instances(request=sample_request)
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, compute.ManagedInstance) for i in results)
pages = list(client.list_managed_instances(request=sample_request).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize(
"request_type",
[compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest, dict,],
)
def test_list_per_instance_configs_rest(request_type):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp(
next_page_token="next_page_token_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp.to_json(
return_value
)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list_per_instance_configs(request)
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListPerInstanceConfigsPager)
assert response.next_page_token == "next_page_token_value"
def test_list_per_instance_configs_rest_required_fields(
request_type=compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest,
):
transport_class = transports.RegionInstanceGroupManagersRestTransport
request_init = {}
request_init["instance_group_manager"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).list_per_instance_configs._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instanceGroupManager"] = "instance_group_manager_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).list_per_instance_configs._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(
("max_results", "filter", "order_by", "page_token", "return_partial_success",)
)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instanceGroupManager" in jsonified_request
assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp.to_json(
return_value
)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list_per_instance_configs(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_list_per_instance_configs_rest_unset_required_fields():
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.list_per_instance_configs._get_unset_required_fields({})
assert set(unset_fields) == (
set(("maxResults", "filter", "orderBy", "pageToken", "returnPartialSuccess",))
& set(("instanceGroupManager", "project", "region",))
)
def test_list_per_instance_configs_rest_bad_request(
transport: str = "rest",
request_type=compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest,
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.list_per_instance_configs(request)
def test_list_per_instance_configs_rest_flattened():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp.to_json(
return_value
)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
)
mock_args.update(sample_request)
client.list_per_instance_configs(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/listPerInstanceConfigs"
% client.transport._host,
args[1],
)
def test_list_per_instance_configs_rest_flattened_error(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_per_instance_configs(
compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest(),
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
)
def test_list_per_instance_configs_rest_pager(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# TODO(kbandes): remove this mock unless there's a good reason for it.
# with mock.patch.object(path_template, 'transcode') as transcode:
# Set the response as a series of pages
response = (
compute.RegionInstanceGroupManagersListInstanceConfigsResp(
items=[
compute.PerInstanceConfig(),
compute.PerInstanceConfig(),
compute.PerInstanceConfig(),
],
next_page_token="abc",
),
compute.RegionInstanceGroupManagersListInstanceConfigsResp(
items=[], next_page_token="def",
),
compute.RegionInstanceGroupManagersListInstanceConfigsResp(
items=[compute.PerInstanceConfig(),], next_page_token="ghi",
),
compute.RegionInstanceGroupManagersListInstanceConfigsResp(
items=[compute.PerInstanceConfig(), compute.PerInstanceConfig(),],
),
)
# Two responses for two calls
response = response + response
# Wrap the values into proper Response objs
response = tuple(
compute.RegionInstanceGroupManagersListInstanceConfigsResp.to_json(x)
for x in response
)
return_values = tuple(Response() for i in response)
for return_val, response_val in zip(return_values, response):
return_val._content = response_val.encode("UTF-8")
return_val.status_code = 200
req.side_effect = return_values
sample_request = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
pager = client.list_per_instance_configs(request=sample_request)
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, compute.PerInstanceConfig) for i in results)
pages = list(client.list_per_instance_configs(request=sample_request).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize(
"request_type", [compute.PatchRegionInstanceGroupManagerRequest, dict,]
)
def test_patch_unary_rest(request_type):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init["instance_group_manager_resource"] = {
"auto_healing_policies": [
{"health_check": "health_check_value", "initial_delay_sec": 1778}
],
"base_instance_name": "base_instance_name_value",
"creation_timestamp": "creation_timestamp_value",
"current_actions": {
"abandoning": 1041,
"creating": 845,
"creating_without_retries": 2589,
"deleting": 844,
"none": 432,
"recreating": 1060,
"refreshing": 1069,
"restarting": 1091,
"verifying": 979,
},
"description": "description_value",
"distribution_policy": {
"target_shape": "target_shape_value",
"zones": [{"zone": "zone_value"}],
},
"fingerprint": "fingerprint_value",
"id": 205,
"instance_group": "instance_group_value",
"instance_template": "instance_template_value",
"kind": "kind_value",
"name": "name_value",
"named_ports": [{"name": "name_value", "port": 453}],
"region": "region_value",
"self_link": "self_link_value",
"stateful_policy": {"preserved_state": {"disks": {}}},
"status": {
"autoscaler": "autoscaler_value",
"is_stable": True,
"stateful": {
"has_stateful_config": True,
"per_instance_configs": {"all_effective": True},
},
"version_target": {"is_reached": True},
},
"target_pools": ["target_pools_value_1", "target_pools_value_2"],
"target_size": 1185,
"update_policy": {
"instance_redistribution_type": "instance_redistribution_type_value",
"max_surge": {"calculated": 1042, "fixed": 528, "percent": 753},
"max_unavailable": {"calculated": 1042, "fixed": 528, "percent": 753},
"minimal_action": "minimal_action_value",
"replacement_method": "replacement_method_value",
"type_": "type__value",
},
"versions": [
{
"instance_template": "instance_template_value",
"name": "name_value",
"target_size": {"calculated": 1042, "fixed": 528, "percent": 753},
}
],
"zone": "zone_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.patch_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_patch_unary_rest_required_fields(
request_type=compute.PatchRegionInstanceGroupManagerRequest,
):
transport_class = transports.RegionInstanceGroupManagersRestTransport
request_init = {}
request_init["instance_group_manager"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).patch._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instanceGroupManager"] = "instance_group_manager_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).patch._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instanceGroupManager" in jsonified_request
assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "patch",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.patch_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_patch_unary_rest_unset_required_fields():
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.patch._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"instanceGroupManager",
"instanceGroupManagerResource",
"project",
"region",
)
)
)
def test_patch_unary_rest_bad_request(
transport: str = "rest", request_type=compute.PatchRegionInstanceGroupManagerRequest
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init["instance_group_manager_resource"] = {
"auto_healing_policies": [
{"health_check": "health_check_value", "initial_delay_sec": 1778}
],
"base_instance_name": "base_instance_name_value",
"creation_timestamp": "creation_timestamp_value",
"current_actions": {
"abandoning": 1041,
"creating": 845,
"creating_without_retries": 2589,
"deleting": 844,
"none": 432,
"recreating": 1060,
"refreshing": 1069,
"restarting": 1091,
"verifying": 979,
},
"description": "description_value",
"distribution_policy": {
"target_shape": "target_shape_value",
"zones": [{"zone": "zone_value"}],
},
"fingerprint": "fingerprint_value",
"id": 205,
"instance_group": "instance_group_value",
"instance_template": "instance_template_value",
"kind": "kind_value",
"name": "name_value",
"named_ports": [{"name": "name_value", "port": 453}],
"region": "region_value",
"self_link": "self_link_value",
"stateful_policy": {"preserved_state": {"disks": {}}},
"status": {
"autoscaler": "autoscaler_value",
"is_stable": True,
"stateful": {
"has_stateful_config": True,
"per_instance_configs": {"all_effective": True},
},
"version_target": {"is_reached": True},
},
"target_pools": ["target_pools_value_1", "target_pools_value_2"],
"target_size": 1185,
"update_policy": {
"instance_redistribution_type": "instance_redistribution_type_value",
"max_surge": {"calculated": 1042, "fixed": 528, "percent": 753},
"max_unavailable": {"calculated": 1042, "fixed": 528, "percent": 753},
"minimal_action": "minimal_action_value",
"replacement_method": "replacement_method_value",
"type_": "type__value",
},
"versions": [
{
"instance_template": "instance_template_value",
"name": "name_value",
"target_size": {"calculated": 1042, "fixed": 528, "percent": 753},
}
],
"zone": "zone_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.patch_unary(request)
def test_patch_unary_rest_flattened():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
instance_group_manager_resource=compute.InstanceGroupManager(
auto_healing_policies=[
compute.InstanceGroupManagerAutoHealingPolicy(
health_check="health_check_value"
)
]
),
)
mock_args.update(sample_request)
client.patch_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}"
% client.transport._host,
args[1],
)
def test_patch_unary_rest_flattened_error(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.patch_unary(
compute.PatchRegionInstanceGroupManagerRequest(),
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
instance_group_manager_resource=compute.InstanceGroupManager(
auto_healing_policies=[
compute.InstanceGroupManagerAutoHealingPolicy(
health_check="health_check_value"
)
]
),
)
def test_patch_unary_rest_error():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type",
[compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest, dict,],
)
def test_patch_per_instance_configs_unary_rest(request_type):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init["region_instance_group_manager_patch_instance_config_req_resource"] = {
"per_instance_configs": [
{
"fingerprint": "fingerprint_value",
"name": "name_value",
"preserved_state": {"disks": {}, "metadata": {}},
"status": "status_value",
}
]
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.patch_per_instance_configs_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_patch_per_instance_configs_unary_rest_required_fields(
request_type=compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest,
):
transport_class = transports.RegionInstanceGroupManagersRestTransport
request_init = {}
request_init["instance_group_manager"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).patch_per_instance_configs._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instanceGroupManager"] = "instance_group_manager_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).patch_per_instance_configs._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instanceGroupManager" in jsonified_request
assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.patch_per_instance_configs_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_patch_per_instance_configs_unary_rest_unset_required_fields():
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.patch_per_instance_configs._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"instanceGroupManager",
"project",
"region",
"regionInstanceGroupManagerPatchInstanceConfigReqResource",
)
)
)
def test_patch_per_instance_configs_unary_rest_bad_request(
transport: str = "rest",
request_type=compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest,
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init["region_instance_group_manager_patch_instance_config_req_resource"] = {
"per_instance_configs": [
{
"fingerprint": "fingerprint_value",
"name": "name_value",
"preserved_state": {"disks": {}, "metadata": {}},
"status": "status_value",
}
]
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.patch_per_instance_configs_unary(request)
def test_patch_per_instance_configs_unary_rest_flattened():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
region_instance_group_manager_patch_instance_config_req_resource=compute.RegionInstanceGroupManagerPatchInstanceConfigReq(
per_instance_configs=[
compute.PerInstanceConfig(fingerprint="fingerprint_value")
]
),
)
mock_args.update(sample_request)
client.patch_per_instance_configs_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/patchPerInstanceConfigs"
% client.transport._host,
args[1],
)
def test_patch_per_instance_configs_unary_rest_flattened_error(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.patch_per_instance_configs_unary(
compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest(),
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
region_instance_group_manager_patch_instance_config_req_resource=compute.RegionInstanceGroupManagerPatchInstanceConfigReq(
per_instance_configs=[
compute.PerInstanceConfig(fingerprint="fingerprint_value")
]
),
)
def test_patch_per_instance_configs_unary_rest_error():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.RecreateInstancesRegionInstanceGroupManagerRequest, dict,]
)
def test_recreate_instances_unary_rest(request_type):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init["region_instance_group_managers_recreate_request_resource"] = {
"instances": ["instances_value_1", "instances_value_2"]
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.recreate_instances_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_recreate_instances_unary_rest_required_fields(
request_type=compute.RecreateInstancesRegionInstanceGroupManagerRequest,
):
transport_class = transports.RegionInstanceGroupManagersRestTransport
request_init = {}
request_init["instance_group_manager"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).recreate_instances._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instanceGroupManager"] = "instance_group_manager_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).recreate_instances._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instanceGroupManager" in jsonified_request
assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.recreate_instances_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_recreate_instances_unary_rest_unset_required_fields():
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.recreate_instances._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"instanceGroupManager",
"project",
"region",
"regionInstanceGroupManagersRecreateRequestResource",
)
)
)
def test_recreate_instances_unary_rest_bad_request(
transport: str = "rest",
request_type=compute.RecreateInstancesRegionInstanceGroupManagerRequest,
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init["region_instance_group_managers_recreate_request_resource"] = {
"instances": ["instances_value_1", "instances_value_2"]
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.recreate_instances_unary(request)
def test_recreate_instances_unary_rest_flattened():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
region_instance_group_managers_recreate_request_resource=compute.RegionInstanceGroupManagersRecreateRequest(
instances=["instances_value"]
),
)
mock_args.update(sample_request)
client.recreate_instances_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/recreateInstances"
% client.transport._host,
args[1],
)
def test_recreate_instances_unary_rest_flattened_error(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.recreate_instances_unary(
compute.RecreateInstancesRegionInstanceGroupManagerRequest(),
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
region_instance_group_managers_recreate_request_resource=compute.RegionInstanceGroupManagersRecreateRequest(
instances=["instances_value"]
),
)
def test_recreate_instances_unary_rest_error():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.ResizeRegionInstanceGroupManagerRequest, dict,]
)
def test_resize_unary_rest(request_type):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.resize_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_resize_unary_rest_required_fields(
request_type=compute.ResizeRegionInstanceGroupManagerRequest,
):
transport_class = transports.RegionInstanceGroupManagersRestTransport
request_init = {}
request_init["instance_group_manager"] = ""
request_init["project"] = ""
request_init["region"] = ""
request_init["size"] = 0
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
assert "size" not in jsonified_request
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).resize._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
assert "size" in jsonified_request
assert jsonified_request["size"] == request_init["size"]
jsonified_request["instanceGroupManager"] = "instance_group_manager_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
jsonified_request["size"] = 443
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).resize._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id", "size",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instanceGroupManager" in jsonified_request
assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
assert "size" in jsonified_request
assert jsonified_request["size"] == 443
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.resize_unary(request)
expected_params = [
("size", 0,),
]
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_resize_unary_rest_unset_required_fields():
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.resize._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId", "size",))
& set(("instanceGroupManager", "project", "region", "size",))
)
def test_resize_unary_rest_bad_request(
transport: str = "rest",
request_type=compute.ResizeRegionInstanceGroupManagerRequest,
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.resize_unary(request)
def test_resize_unary_rest_flattened():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
size=443,
)
mock_args.update(sample_request)
client.resize_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/resize"
% client.transport._host,
args[1],
)
def test_resize_unary_rest_flattened_error(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.resize_unary(
compute.ResizeRegionInstanceGroupManagerRequest(),
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
size=443,
)
def test_resize_unary_rest_error():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type",
[compute.SetInstanceTemplateRegionInstanceGroupManagerRequest, dict,],
)
def test_set_instance_template_unary_rest(request_type):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init["region_instance_group_managers_set_template_request_resource"] = {
"instance_template": "instance_template_value"
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_instance_template_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_instance_template_unary_rest_required_fields(
request_type=compute.SetInstanceTemplateRegionInstanceGroupManagerRequest,
):
transport_class = transports.RegionInstanceGroupManagersRestTransport
request_init = {}
request_init["instance_group_manager"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_instance_template._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instanceGroupManager"] = "instance_group_manager_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_instance_template._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instanceGroupManager" in jsonified_request
assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_instance_template_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_set_instance_template_unary_rest_unset_required_fields():
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.set_instance_template._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"instanceGroupManager",
"project",
"region",
"regionInstanceGroupManagersSetTemplateRequestResource",
)
)
)
def test_set_instance_template_unary_rest_bad_request(
transport: str = "rest",
request_type=compute.SetInstanceTemplateRegionInstanceGroupManagerRequest,
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init["region_instance_group_managers_set_template_request_resource"] = {
"instance_template": "instance_template_value"
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_instance_template_unary(request)
def test_set_instance_template_unary_rest_flattened():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
region_instance_group_managers_set_template_request_resource=compute.RegionInstanceGroupManagersSetTemplateRequest(
instance_template="instance_template_value"
),
)
mock_args.update(sample_request)
client.set_instance_template_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/setInstanceTemplate"
% client.transport._host,
args[1],
)
def test_set_instance_template_unary_rest_flattened_error(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_instance_template_unary(
compute.SetInstanceTemplateRegionInstanceGroupManagerRequest(),
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
region_instance_group_managers_set_template_request_resource=compute.RegionInstanceGroupManagersSetTemplateRequest(
instance_template="instance_template_value"
),
)
def test_set_instance_template_unary_rest_error():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.SetTargetPoolsRegionInstanceGroupManagerRequest, dict,]
)
def test_set_target_pools_unary_rest(request_type):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init["region_instance_group_managers_set_target_pools_request_resource"] = {
"fingerprint": "fingerprint_value",
"target_pools": ["target_pools_value_1", "target_pools_value_2"],
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_target_pools_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_target_pools_unary_rest_required_fields(
request_type=compute.SetTargetPoolsRegionInstanceGroupManagerRequest,
):
transport_class = transports.RegionInstanceGroupManagersRestTransport
request_init = {}
request_init["instance_group_manager"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_target_pools._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instanceGroupManager"] = "instance_group_manager_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_target_pools._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instanceGroupManager" in jsonified_request
assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_target_pools_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_set_target_pools_unary_rest_unset_required_fields():
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.set_target_pools._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"instanceGroupManager",
"project",
"region",
"regionInstanceGroupManagersSetTargetPoolsRequestResource",
)
)
)
def test_set_target_pools_unary_rest_bad_request(
transport: str = "rest",
request_type=compute.SetTargetPoolsRegionInstanceGroupManagerRequest,
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init["region_instance_group_managers_set_target_pools_request_resource"] = {
"fingerprint": "fingerprint_value",
"target_pools": ["target_pools_value_1", "target_pools_value_2"],
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_target_pools_unary(request)
def test_set_target_pools_unary_rest_flattened():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
region_instance_group_managers_set_target_pools_request_resource=compute.RegionInstanceGroupManagersSetTargetPoolsRequest(
fingerprint="fingerprint_value"
),
)
mock_args.update(sample_request)
client.set_target_pools_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/setTargetPools"
% client.transport._host,
args[1],
)
def test_set_target_pools_unary_rest_flattened_error(transport: str = "rest"):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_target_pools_unary(
compute.SetTargetPoolsRegionInstanceGroupManagerRequest(),
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
region_instance_group_managers_set_target_pools_request_resource=compute.RegionInstanceGroupManagersSetTargetPoolsRequest(
fingerprint="fingerprint_value"
),
)
def test_set_target_pools_unary_rest_error():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type",
[compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest, dict,],
)
def test_update_per_instance_configs_unary_rest(request_type):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init[
"region_instance_group_manager_update_instance_config_req_resource"
] = {
"per_instance_configs": [
{
"fingerprint": "fingerprint_value",
"name": "name_value",
"preserved_state": {"disks": {}, "metadata": {}},
"status": "status_value",
}
]
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.update_per_instance_configs_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_update_per_instance_configs_unary_rest_required_fields(
request_type=compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest,
):
transport_class = transports.RegionInstanceGroupManagersRestTransport
request_init = {}
request_init["instance_group_manager"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).update_per_instance_configs._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instanceGroupManager"] = "instance_group_manager_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).update_per_instance_configs._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instanceGroupManager" in jsonified_request
assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.update_per_instance_configs_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_update_per_instance_configs_unary_rest_unset_required_fields():
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.update_per_instance_configs._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"instanceGroupManager",
"project",
"region",
"regionInstanceGroupManagerUpdateInstanceConfigReqResource",
)
)
)
def test_update_per_instance_configs_unary_rest_bad_request(
transport: str = "rest",
request_type=compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest,
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
request_init[
"region_instance_group_manager_update_instance_config_req_resource"
] = {
"per_instance_configs": [
{
"fingerprint": "fingerprint_value",
"name": "name_value",
"preserved_state": {"disks": {}, "metadata": {}},
"status": "status_value",
}
]
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.update_per_instance_configs_unary(request)
def test_update_per_instance_configs_unary_rest_flattened():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"instance_group_manager": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
region_instance_group_manager_update_instance_config_req_resource=compute.RegionInstanceGroupManagerUpdateInstanceConfigReq(
per_instance_configs=[
compute.PerInstanceConfig(fingerprint="fingerprint_value")
]
),
)
mock_args.update(sample_request)
client.update_per_instance_configs_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/updatePerInstanceConfigs"
% client.transport._host,
args[1],
)
def test_update_per_instance_configs_unary_rest_flattened_error(
transport: str = "rest",
):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_per_instance_configs_unary(
compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest(),
project="project_value",
region="region_value",
instance_group_manager="instance_group_manager_value",
region_instance_group_manager_update_instance_config_req_resource=compute.RegionInstanceGroupManagerUpdateInstanceConfigReq(
per_instance_configs=[
compute.PerInstanceConfig(fingerprint="fingerprint_value")
]
),
)
def test_update_per_instance_configs_unary_rest_error():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = RegionInstanceGroupManagersClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide an api_key and a transport instance.
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
options = client_options.ClientOptions()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = RegionInstanceGroupManagersClient(
client_options=options, transport=transport,
)
# It is an error to provide an api_key and a credential.
options = mock.Mock()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = RegionInstanceGroupManagersClient(
client_options=options, credentials=ga_credentials.AnonymousCredentials()
)
# It is an error to provide scopes and a transport instance.
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = RegionInstanceGroupManagersClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.RegionInstanceGroupManagersRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = RegionInstanceGroupManagersClient(transport=transport)
assert client.transport is transport
@pytest.mark.parametrize(
"transport_class", [transports.RegionInstanceGroupManagersRestTransport,]
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_region_instance_group_managers_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.RegionInstanceGroupManagersTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_region_instance_group_managers_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.compute_v1.services.region_instance_group_managers.transports.RegionInstanceGroupManagersTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.RegionInstanceGroupManagersTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"abandon_instances",
"apply_updates_to_instances",
"create_instances",
"delete",
"delete_instances",
"delete_per_instance_configs",
"get",
"insert",
"list",
"list_errors",
"list_managed_instances",
"list_per_instance_configs",
"patch",
"patch_per_instance_configs",
"recreate_instances",
"resize",
"set_instance_template",
"set_target_pools",
"update_per_instance_configs",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
with pytest.raises(NotImplementedError):
transport.close()
def test_region_instance_group_managers_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.compute_v1.services.region_instance_group_managers.transports.RegionInstanceGroupManagersTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.RegionInstanceGroupManagersTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id="octopus",
)
def test_region_instance_group_managers_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.compute_v1.services.region_instance_group_managers.transports.RegionInstanceGroupManagersTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.RegionInstanceGroupManagersTransport()
adc.assert_called_once()
def test_region_instance_group_managers_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
RegionInstanceGroupManagersClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id=None,
)
def test_region_instance_group_managers_http_transport_client_cert_source_for_mtls():
cred = ga_credentials.AnonymousCredentials()
with mock.patch(
"google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"
) as mock_configure_mtls_channel:
transports.RegionInstanceGroupManagersRestTransport(
credentials=cred, client_cert_source_for_mtls=client_cert_source_callback
)
mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
def test_region_instance_group_managers_host_no_port():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="compute.googleapis.com"
),
)
assert client.transport._host == "compute.googleapis.com:443"
def test_region_instance_group_managers_host_with_port():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="compute.googleapis.com:8000"
),
)
assert client.transport._host == "compute.googleapis.com:8000"
def test_common_billing_account_path():
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = RegionInstanceGroupManagersClient.common_billing_account_path(
billing_account
)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "clam",
}
path = RegionInstanceGroupManagersClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = RegionInstanceGroupManagersClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "whelk"
expected = "folders/{folder}".format(folder=folder,)
actual = RegionInstanceGroupManagersClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "octopus",
}
path = RegionInstanceGroupManagersClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = RegionInstanceGroupManagersClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "oyster"
expected = "organizations/{organization}".format(organization=organization,)
actual = RegionInstanceGroupManagersClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nudibranch",
}
path = RegionInstanceGroupManagersClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = RegionInstanceGroupManagersClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "cuttlefish"
expected = "projects/{project}".format(project=project,)
actual = RegionInstanceGroupManagersClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "mussel",
}
path = RegionInstanceGroupManagersClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = RegionInstanceGroupManagersClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
actual = RegionInstanceGroupManagersClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = RegionInstanceGroupManagersClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = RegionInstanceGroupManagersClient.parse_common_location_path(path)
assert expected == actual
def test_client_with_default_client_info():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.RegionInstanceGroupManagersTransport, "_prep_wrapped_messages"
) as prep:
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.RegionInstanceGroupManagersTransport, "_prep_wrapped_messages"
) as prep:
transport_class = RegionInstanceGroupManagersClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
def test_transport_close():
transports = {
"rest": "_session",
}
for transport, close_name in transports.items():
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
with mock.patch.object(
type(getattr(client.transport, close_name)), "close"
) as close:
with client:
close.assert_not_called()
close.assert_called_once()
def test_client_ctx():
transports = [
"rest",
]
for transport in transports:
client = RegionInstanceGroupManagersClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
# Test client calls underlying transport.
with mock.patch.object(type(client.transport), "close") as close:
close.assert_not_called()
with client:
pass
close.assert_called()
@pytest.mark.parametrize(
"client_class,transport_class",
[
(
RegionInstanceGroupManagersClient,
transports.RegionInstanceGroupManagersRestTransport,
),
],
)
def test_api_key_credentials(client_class, transport_class):
with mock.patch.object(
google.auth._default, "get_api_key_credentials", create=True
) as get_api_key_credentials:
mock_cred = mock.Mock()
get_api_key_credentials.return_value = mock_cred
options = client_options.ClientOptions()
options.api_key = "api_key"
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=mock_cred,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
| 39.693586 | 144 | 0.683838 | 26,794 | 258,048 | 6.281817 | 0.021609 | 0.022808 | 0.026866 | 0.048885 | 0.930975 | 0.918023 | 0.904958 | 0.887907 | 0.873873 | 0.864361 | 0 | 0.006806 | 0.233608 | 258,048 | 6,500 | 145 | 39.699692 | 0.844276 | 0.127259 | 0 | 0.768613 | 0 | 0.00281 | 0.160421 | 0.053319 | 0 | 0 | 0 | 0.000154 | 0.127032 | 1 | 0.034116 | false | 0.000201 | 0.005017 | 0.000401 | 0.039534 | 0.004415 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0b6d890036d326c2159227a2aac3f157b084687e | 128 | py | Python | python_module/SuperGLU/Services/TextProcessing/Utilities/__init__.py | GeneralizedLearningUtilities/SuperGLU | 1c373d1358431fb96dd70b324b26a14fc8ed1fcb | [
"MIT"
] | 8 | 2015-07-13T23:07:20.000Z | 2020-11-13T21:09:55.000Z | python_module/SuperGLU/Services/TextProcessing/Utilities/__init__.py | GeneralizedLearningUtilities/SuperGLU | 1c373d1358431fb96dd70b324b26a14fc8ed1fcb | [
"MIT"
] | 7 | 2016-01-13T12:13:56.000Z | 2021-12-14T21:12:28.000Z | python_module/SuperGLU/Services/TextProcessing/Utilities/__init__.py | GeneralizedLearningUtilities/SuperGLU | 1c373d1358431fb96dd70b324b26a14fc8ed1fcb | [
"MIT"
] | 6 | 2015-09-23T17:53:32.000Z | 2020-04-30T07:27:01.000Z | # -*- coding: utf-8 -*-
import SuperGLU.Util.ModuleRegistration
SuperGLU.Util.ModuleRegistration.importAllInDirectory(__file__)
| 32 | 63 | 0.8125 | 12 | 128 | 8.333333 | 0.75 | 0.24 | 0.6 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008333 | 0.0625 | 128 | 3 | 64 | 42.666667 | 0.825 | 0.164063 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
0bbe9c9dd616fc5512ca99895adc603c0a98ad3a | 4,021 | py | Python | Portfolio_Strategies/risk_management.py | vhn0912/Finance | 39cf49d4d778d322537531cee4ce3981cc9951f9 | [
"MIT"
] | 441 | 2020-04-22T02:21:19.000Z | 2022-03-29T15:00:24.000Z | Portfolio_Strategies/risk_management.py | happydasch/Finance | 4f6c5ea8f60fb0dc3b965ffb9628df83c2ecef35 | [
"MIT"
] | 5 | 2020-07-06T15:19:58.000Z | 2021-07-23T18:32:29.000Z | Portfolio_Strategies/risk_management.py | happydasch/Finance | 4f6c5ea8f60fb0dc3b965ffb9628df83c2ecef35 | [
"MIT"
] | 111 | 2020-04-21T11:40:39.000Z | 2022-03-20T07:26:17.000Z | import pandas as pd
import numpy as np
import yfinance as yf
import datetime as dt
from pandas_datareader import data as pdr
import statistics
import time
yf.pdr_override()
now = dt.datetime.now()
start =dt.datetime(2019,1,1)
smaUsed=[50,200]
emaUsed=[21]
stock = input('Enter a ticker: ')
position = input('Buy or Short? ')
AvgGain= int(input('Enter Your Average Gain: '))
AvgLoss= int(input('Enter Your Average Loss: '))
if position.lower() == 'buy':
df = pdr.get_data_yahoo(stock, start, now)
close=df["Adj Close"][-1]
maxStop=close*((100-AvgLoss)/100)
Target1R=round(close*((100+AvgGain)/100),2)
Target2R=round(close*(((100+(2*AvgGain))/100)),2)
Target3R=round(close*(((100+(3*AvgGain))/100)),2)
for x in smaUsed:
sma=x
df["SMA_"+str(sma)]=round(df.iloc[:,4].rolling(window=sma).mean(),2)
for x in emaUsed:
ema=x
df['EMA_'+str(ema)] = round(df.iloc[:,4].ewm(span=ema,adjust=False).mean(),2)
sma50=round(df["SMA_50"][-1],2)
sma200=round(df["SMA_200"][-1],2)
ema21=round(df["EMA_21"][-1],2)
low5=round(min(df["Low"].tail(5)),2)
pf50=round(((close/sma50)-1)*100,2)
check50=df["SMA_50"][-1]>maxStop
pf200=round(((close/sma200)-1)*100,2)
check200=((close/df["SMA_200"][-1])-1)*100>100
pf21=round(((close/ema21)-1)*100,2)
check21=df["EMA_21"][-1]>maxStop
pfl=round(((close/low5)-1)*100,2)
checkl=pfl>maxStop
print()
print("Current Stock: "+stock+" Price: "+str(round(close,2)))
print("21 EMA: "+str(ema21)+ " | 50 SMA: "+str(sma50)+ " | 200 SMA: "+str(sma200)+ " | 5 day Low: "+str(low5))
print("-------------------------------------------------")
print("Max Stop: "+str(round(maxStop,2)))
print("Price Targets:")
print("1R: "+str(Target1R))
print("2R: "+str(Target2R))
print("3R: "+str(Target3R))
print("From 5 Day Low "+ str(pfl)+ "% -Within Max Stop: "+str(checkl))
print("From 21 day EMA "+ str(pf21)+ "% -Within Max Stop: "+str(check21))
print("From 50 day SMA "+ str(pf50)+ "% -Within Max Stop: "+str(check50))
print("From 200 Day SMA "+ str(pf200)+ "% -In Danger Zone (Over 100% from 200 SMA): "+str(check200))
print()
elif position.lower() == 'short':
df = pdr.get_data_yahoo(stock, start, now)
close=df["Adj Close"][-1]
maxStop=close*((100+AvgLoss)/100)
Target3R=round(close*(((100-(3*AvgGain))/100)),2)
Target2R=round(close*(((100-(2*AvgGain))/100)),2)
Target1R=round(close*((100-AvgGain)/100),2)
for x in smaUsed:
sma=x
df["SMA_"+str(sma)]=round(df.iloc[:,4].rolling(window=sma).mean(),2)
for x in emaUsed:
ema=x
df['EMA_'+str(ema)] = round(df.iloc[:,4].ewm(span=ema,adjust=False).mean(),2)
sma50=round(df["SMA_50"][-1],2)
sma200=round(df["SMA_200"][-1],2)
ema21=round(df["EMA_21"][-1],2)
low5=round(min(df["Low"].tail(5)),2)
pf50=round(((close/sma50)-1)*100,2)
check50=df["SMA_50"][-1]>maxStop
pf200=round(((close/sma200)-1)*100,2)
check200=((close/df["SMA_200"][-1])-1)*100>100
pf21=round(((close/ema21)-1)*100,2)
check21=df["EMA_21"][-1]>maxStop
pfl=round(((close/low5)-1)*100,2)
checkl=pfl>maxStop
print()
print("Current Stock: "+stock+" Price: "+str(round(close,2)))
print("21 EMA: "+str(ema21)+ " | 50 SMA: "+str(sma50)+ " | 200 SMA: "+str(sma200)+ " | 5 day Low: "+str(low5))
print("-------------------------------------------------")
print("Max Stop: "+str(round(maxStop,2)))
print("Price Targets:")
print("1R: "+str(Target1R))
print("2R: "+str(Target2R))
print("3R: "+str(Target3R))
print("From 5 Day Low "+ str(pfl)+ "% -Within Max Stop: "+str(checkl))
print("From 21 day EMA "+ str(pf21)+ "% -Within Max Stop: "+str(check21))
print("From 50 day SMA "+ str(pf50)+ "% -Within Max Stop: "+str(check50))
print("From 200 Day SMA "+ str(pf200)+ "% -In Danger Zone (Over 100% from 200 SMA): "+str(check200))
print() | 37.579439 | 114 | 0.581945 | 611 | 4,021 | 3.793781 | 0.168576 | 0.069025 | 0.017256 | 0.041415 | 0.86799 | 0.847282 | 0.847282 | 0.829163 | 0.810181 | 0.810181 | 0 | 0.107089 | 0.175578 | 4,021 | 107 | 115 | 37.579439 | 0.592157 | 0 | 0 | 0.723404 | 0 | 0 | 0.210343 | 0.024366 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.074468 | 0 | 0.074468 | 0.297872 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f034521bf30006dfbf2cb6cf86f41cc79cafc9d4 | 38 | py | Python | nmap.py | juniordevsec2021/nmap | 32f7efa1e358d6a682bc4ab4e6816cb85677cf81 | [
"MIT"
] | null | null | null | nmap.py | juniordevsec2021/nmap | 32f7efa1e358d6a682bc4ab4e6816cb85677cf81 | [
"MIT"
] | null | null | null | nmap.py | juniordevsec2021/nmap | 32f7efa1e358d6a682bc4ab4e6816cb85677cf81 | [
"MIT"
] | null | null | null | import python_nmap
print(python_nmap)
| 12.666667 | 18 | 0.868421 | 6 | 38 | 5.166667 | 0.666667 | 0.645161 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.078947 | 38 | 2 | 19 | 19 | 0.885714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0.5 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 7 |
f048431036d6fdcae8282f0c671649b456e912dd | 640 | py | Python | TCIT-Hf/benchmark/count_heavy.py | zhaoqy1996/TCIT_thermo | 913b4eef31997eba9bbda94593c868841a1c95d1 | [
"MIT"
] | 3 | 2021-05-28T16:36:45.000Z | 2021-10-31T02:41:41.000Z | TCIT-Hf/benchmark/count_heavy.py | zhaoqy1996/TCIT_thermo | 913b4eef31997eba9bbda94593c868841a1c95d1 | [
"MIT"
] | null | null | null | TCIT-Hf/benchmark/count_heavy.py | zhaoqy1996/TCIT_thermo | 913b4eef31997eba9bbda94593c868841a1c95d1 | [
"MIT"
] | null | null | null | smiles = []
with open("Solid_compare.txt","r") as f:
for lc,lines in enumerate(f):
fields = lines.split()
smiles += [fields[0]]
from rdkit import Chem
import numpy as np
Nheavy = [Chem.MolFromSmiles(i).GetNumAtoms() for i in smiles]
print(len([i for i in Nheavy if i > 12]))
print(np.mean(Nheavy))
smiles = []
with open("Liquid_compare.txt","r") as f:
for lc,lines in enumerate(f):
fields = lines.split()
smiles += [fields[0]]
from rdkit import Chem
import numpy as np
Nheavy = [Chem.MolFromSmiles(i).GetNumAtoms() for i in smiles]
print(len([i for i in Nheavy if i > 12]))
print(np.mean(Nheavy))
| 26.666667 | 62 | 0.654688 | 104 | 640 | 4.009615 | 0.317308 | 0.038369 | 0.057554 | 0.06235 | 0.906475 | 0.906475 | 0.906475 | 0.906475 | 0.906475 | 0.906475 | 0 | 0.011696 | 0.198438 | 640 | 23 | 63 | 27.826087 | 0.80117 | 0 | 0 | 0.9 | 0 | 0 | 0.057813 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0.2 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f06b9d5a49e7ddf4c2f8454994766d353f4882c9 | 173 | py | Python | 01_Language/01_Functions/python/acosh.py | cliff363825/TwentyFour | 09df59bd5d275e66463e343647f46027397d1233 | [
"MIT"
] | 3 | 2020-06-28T07:42:51.000Z | 2021-01-15T10:32:11.000Z | 01_Language/01_Functions/python/acosh.py | cliff363825/TwentyFour | 09df59bd5d275e66463e343647f46027397d1233 | [
"MIT"
] | 9 | 2021-03-10T22:45:40.000Z | 2022-02-27T06:53:20.000Z | 01_Language/01_Functions/python/acosh.py | cliff363825/TwentyFour | 09df59bd5d275e66463e343647f46027397d1233 | [
"MIT"
] | 1 | 2021-01-15T10:51:24.000Z | 2021-01-15T10:51:24.000Z | # coding: utf-8
import math
if __name__ == '__main__':
print(math.acosh(1))
print(math.acosh(1.000000000000001))
print(math.acosh(2))
print(math.acosh(3))
| 17.3 | 40 | 0.653179 | 25 | 173 | 4.2 | 0.56 | 0.342857 | 0.533333 | 0.285714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.140845 | 0.179191 | 173 | 9 | 41 | 19.222222 | 0.598592 | 0.075145 | 0 | 0 | 0 | 0 | 0.050633 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.166667 | 0 | 0.166667 | 0.666667 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
b2cb0ee4fd580b25f55beb7c3000fa156eed3c8f | 2,591 | py | Python | tests/test.py | redocnib/BitBooleanFlags | 189afe5cb188f26302df7c8c292f804703d9ae78 | [
"MIT"
] | 2 | 2021-03-25T08:44:33.000Z | 2021-04-17T12:49:35.000Z | tests/test.py | redocnib/BitBooleanFlags | 189afe5cb188f26302df7c8c292f804703d9ae78 | [
"MIT"
] | null | null | null | tests/test.py | redocnib/BitBooleanFlags | 189afe5cb188f26302df7c8c292f804703d9ae78 | [
"MIT"
] | null | null | null |
from BitBooleanFlags import BitBooleanFlags
import unittest
class TestBitBooleanFlagsMethods(unittest.TestCase):
def test_initialize(self):
bitBoolenFlags = BitBooleanFlags("create","read","update","delete")
self.assertEqual(type(bitBoolenFlags), BitBooleanFlags)
def test_add_flags(self):
bitBoolenFlags = BitBooleanFlags("create","read","update","delete")
flags = 0
flags = bitBoolenFlags(flags).add("update","create")
self.assertTrue(bitBoolenFlags(flags).has("create"))
self.assertTrue(bitBoolenFlags(flags).has("update"))
self.assertFalse(bitBoolenFlags(flags).has("read"))
self.assertFalse(bitBoolenFlags(flags).has("delete"))
bitBoolenFlags = BitBooleanFlags("create","read","update","delete")
flags = 0
flags = bitBoolenFlags(flags).add("update","create")
self.assertTrue(bitBoolenFlags(flags).has("create"))
self.assertTrue(bitBoolenFlags(flags).has("update"))
self.assertFalse(bitBoolenFlags(flags).has("read"))
self.assertFalse(bitBoolenFlags(flags).has("delete"))
def test_remove_flags(self):
bitBoolenFlags = BitBooleanFlags("create","read","update","delete")
flags = 0
flags = bitBoolenFlags(flags).add("update","create")
self.assertTrue(bitBoolenFlags(flags).has("create"))
self.assertTrue(bitBoolenFlags(flags).has("update"))
self.assertFalse(bitBoolenFlags(flags).has("read"))
self.assertFalse(bitBoolenFlags(flags).has("delete"))
bitBoolenFlags = BitBooleanFlags("create","read","update","delete")
flags = 0
flags = bitBoolenFlags(flags).add("update","create")
self.assertTrue(bitBoolenFlags(flags).has("create"))
self.assertTrue(bitBoolenFlags(flags).has("update"))
self.assertFalse(bitBoolenFlags(flags).has("read"))
self.assertFalse(bitBoolenFlags(flags).has("delete"))
flags = bitBoolenFlags(flags).remove("update")
self.assertTrue(bitBoolenFlags(flags).has("create"))
self.assertFalse(bitBoolenFlags(flags).has("update"))
self.assertFalse(bitBoolenFlags(flags).has("read"))
self.assertFalse(bitBoolenFlags(flags).has("delete"))
def test_key_exceptions(self):
bitBoolenFlags = BitBooleanFlags("create","read","update","delete")
flags = 0
flags = bitBoolenFlags(flags).add("update","create")
with self.assertRaises(KeyError):
bitBoolenFlags(flags).has("kill")
bitBoolenFlags(flags).add("kill")
bitBoolenFlags(flags).remove("kill")
if __name__ == '__main__':
unittest.main() | 39.257576 | 73 | 0.690467 | 256 | 2,591 | 6.929688 | 0.132813 | 0.310598 | 0.260428 | 0.210823 | 0.795941 | 0.789741 | 0.789741 | 0.732807 | 0.732807 | 0.732807 | 0 | 0.002277 | 0.152451 | 2,591 | 66 | 74 | 39.257576 | 0.805556 | 0 | 0 | 0.686275 | 0 | 0 | 0.126592 | 0 | 0 | 0 | 0 | 0 | 0.431373 | 1 | 0.078431 | false | 0 | 0.039216 | 0 | 0.137255 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
b2dbd6c001790cf19b87f0f8a9f7f0eefb31c571 | 536 | py | Python | dotbot/dotbot/context.py | wonkalous/dotfiles | 67fa339887e8f1c7cd588eec1e93e175ff8e8ed0 | [
"MIT"
] | 14 | 2019-06-09T18:02:05.000Z | 2022-03-06T21:27:22.000Z | dotbot/dotbot/context.py | wonkalous/dotfiles | 67fa339887e8f1c7cd588eec1e93e175ff8e8ed0 | [
"MIT"
] | 12 | 2020-07-26T20:12:12.000Z | 2022-01-30T13:21:04.000Z | dotbot/dotbot/context.py | wonkalous/dotfiles | 67fa339887e8f1c7cd588eec1e93e175ff8e8ed0 | [
"MIT"
] | 5 | 2019-08-20T14:32:07.000Z | 2021-08-19T07:45:49.000Z | import copy
class Context(object):
'''
Contextual data and information for plugins.
'''
def __init__(self, base_directory):
self._base_directory = base_directory
self._defaults = {}
pass
def set_base_directory(self, base_directory):
self._base_directory = base_directory
def base_directory(self):
return self._base_directory
def set_defaults(self, defaults):
self._defaults = defaults
def defaults(self):
return copy.deepcopy(self._defaults)
| 22.333333 | 49 | 0.666045 | 60 | 536 | 5.6 | 0.35 | 0.348214 | 0.252976 | 0.1875 | 0.318452 | 0.279762 | 0.279762 | 0.279762 | 0 | 0 | 0 | 0 | 0.253731 | 536 | 23 | 50 | 23.304348 | 0.84 | 0.08209 | 0 | 0.142857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.357143 | false | 0.071429 | 0.071429 | 0.142857 | 0.642857 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 7 |
b2e7242c7affb3593aabed48d814891935c7dafa | 16,983 | py | Python | aws-frauddetector-detector/src/aws_frauddetector_detector/tests/helpers/test_create_worker_helpers.py | srrokib/aws-cloudformation-resource-providers-frauddetector | 08c049a57e4f9d54666ef7bc250d878853be3cd4 | [
"Apache-2.0"
] | 4 | 2021-05-24T05:35:05.000Z | 2021-11-08T09:43:48.000Z | aws-frauddetector-detector/src/aws_frauddetector_detector/tests/helpers/test_create_worker_helpers.py | srrokib/aws-cloudformation-resource-providers-frauddetector | 08c049a57e4f9d54666ef7bc250d878853be3cd4 | [
"Apache-2.0"
] | 3 | 2021-04-29T20:30:17.000Z | 2021-05-14T16:28:19.000Z | aws-frauddetector-detector/src/aws_frauddetector_detector/tests/helpers/test_create_worker_helpers.py | srrokib/aws-cloudformation-resource-providers-frauddetector | 08c049a57e4f9d54666ef7bc250d878853be3cd4 | [
"Apache-2.0"
] | 3 | 2021-04-07T16:03:03.000Z | 2021-10-30T03:25:33.000Z | from aws_frauddetector_detector.helpers import (
create_worker_helpers,
validation_helpers,
)
from aws_frauddetector_detector import models
from cloudformation_cli_python_lib import (
exceptions,
)
from .. import unit_test_utils
from unittest.mock import MagicMock
def test_validate_dependencies_for_detector_create_happy_case(monkeypatch):
# Arrange
mock_afd_client = unit_test_utils.create_mock_afd_client()
fake_model = unit_test_utils.create_fake_model()
mock_check_get_outcomes = MagicMock()
mock_check_get_event_types = MagicMock()
mock_check_get_variables = MagicMock(return_value=(False, None))
mock_check_get_labels = MagicMock(return_value=(False, None))
mock_check_get_entity_types = MagicMock(return_value=(False, None))
mock_get_external_models = MagicMock(return_value={"externalModels": [unit_test_utils.FAKE_EXTERNAL_MODEL]})
monkeypatch.setattr(validation_helpers, "check_if_get_outcomes_succeeds", mock_check_get_outcomes)
monkeypatch.setattr(
validation_helpers,
"check_if_get_event_types_succeeds",
mock_check_get_event_types,
)
monkeypatch.setattr(validation_helpers, "check_if_get_variables_succeeds", mock_check_get_variables)
monkeypatch.setattr(validation_helpers, "check_if_get_labels_succeeds", mock_check_get_labels)
monkeypatch.setattr(
validation_helpers,
"check_if_get_entity_types_succeeds",
mock_check_get_entity_types,
)
mock_afd_client.get_external_models = mock_get_external_models
# Act
create_worker_helpers.validate_dependencies_for_detector_create(mock_afd_client, fake_model)
# Assert
assert mock_check_get_outcomes.call_count == 0
assert mock_check_get_event_types.call_count == 0
assert mock_check_get_variables.call_count == 2
assert mock_check_get_labels.call_count == 2
assert mock_check_get_entity_types.call_count == 1
assert mock_get_external_models.call_count == 1
def test_validate_dependencies_for_detector_create_external_model_dne_throws_exception(monkeypatch):
# Arrange
mock_afd_client = unit_test_utils.create_mock_afd_client()
fake_model = unit_test_utils.create_fake_model()
fake_model.AssociatedModels = [models.Model(Arn=unit_test_utils.FAKE_EXTERNAL_MODEL.get("arn", "not/found"))]
mock_check_get_outcomes = MagicMock()
mock_check_get_event_types = MagicMock()
mock_check_get_variables = MagicMock(return_value=(False, None))
mock_check_get_labels = MagicMock(return_value=(False, None))
mock_check_get_entity_types = MagicMock(return_value=(False, None))
mock_get_external_models = MagicMock(return_value={"externalModels": []})
monkeypatch.setattr(validation_helpers, "check_if_get_outcomes_succeeds", mock_check_get_outcomes)
monkeypatch.setattr(
validation_helpers,
"check_if_get_event_types_succeeds",
mock_check_get_event_types,
)
monkeypatch.setattr(validation_helpers, "check_if_get_variables_succeeds", mock_check_get_variables)
monkeypatch.setattr(validation_helpers, "check_if_get_labels_succeeds", mock_check_get_labels)
monkeypatch.setattr(
validation_helpers,
"check_if_get_entity_types_succeeds",
mock_check_get_entity_types,
)
mock_afd_client.get_external_models = mock_get_external_models
# Act
exception_thrown = None
try:
create_worker_helpers.validate_dependencies_for_detector_create(mock_afd_client, fake_model)
except exceptions.NotFound as e:
exception_thrown = e
# Assert
assert mock_check_get_outcomes.call_count == 0
assert mock_check_get_event_types.call_count == 0
assert mock_check_get_variables.call_count == 2
assert mock_check_get_labels.call_count == 2
assert mock_check_get_entity_types.call_count == 1
assert mock_get_external_models.call_count == 1
assert exception_thrown is not None
def test_validate_dependencies_for_detector_create_with_referenced_dependencies(
monkeypatch,
):
# Arrange
mock_afd_client = unit_test_utils.create_mock_afd_client()
fake_model = unit_test_utils.create_fake_model_with_references()
get_outcomes_response = {"outcomes": [unit_test_utils.FAKE_OUTCOME]}
mock_check_get_outcomes = MagicMock(return_value=(True, get_outcomes_response))
get_event_types_response = {"eventTypes": [unit_test_utils.FAKE_EVENT_TYPE]}
mock_check_get_event_types = MagicMock(return_value=(True, get_event_types_response))
mock_check_get_variables = MagicMock()
mock_check_get_labels = MagicMock()
mock_check_get_entity_types = MagicMock()
monkeypatch.setattr(validation_helpers, "check_if_get_outcomes_succeeds", mock_check_get_outcomes)
monkeypatch.setattr(
validation_helpers,
"check_if_get_event_types_succeeds",
mock_check_get_event_types,
)
monkeypatch.setattr(validation_helpers, "check_if_get_variables_succeeds", mock_check_get_variables)
monkeypatch.setattr(validation_helpers, "check_if_get_labels_succeeds", mock_check_get_labels)
monkeypatch.setattr(
validation_helpers,
"check_if_get_entity_types_succeeds",
mock_check_get_entity_types,
)
# Act
create_worker_helpers.validate_dependencies_for_detector_create(mock_afd_client, fake_model)
# Assert
assert mock_check_get_outcomes.call_count == 1
assert mock_check_get_event_types.call_count == 1
assert mock_check_get_variables.call_count == 0
assert mock_check_get_labels.call_count == 0
assert mock_check_get_entity_types.call_count == 0
def test_validate_dependencies_for_detector_create_with_inline_event_type_with_referenced_dependencies(
monkeypatch,
):
# Arrange
mock_afd_client = unit_test_utils.create_mock_afd_client()
fake_model = unit_test_utils.create_fake_model()
fake_model.EventType = unit_test_utils.create_fake_inline_event_type_with_referenced_dependencies()
get_outcomes_response = {"outcomes": [unit_test_utils.FAKE_OUTCOME]}
mock_check_get_outcomes = MagicMock(return_value=(True, get_outcomes_response))
get_event_types_response = {"eventTypes": [unit_test_utils.FAKE_EVENT_TYPE]}
mock_check_get_event_types = MagicMock(return_value=(True, get_event_types_response))
get_variables_response = {
"variables": [
unit_test_utils.FAKE_IP_VARIABLE,
unit_test_utils.FAKE_EMAIL_VARIABLE,
]
}
mock_check_get_variables = MagicMock(return_value=(True, get_variables_response))
get_labels_response = {"labels": [unit_test_utils.FAKE_FRAUD_LABEL, unit_test_utils.FAKE_LEGIT_LABEL]}
mock_check_get_labels = MagicMock(return_value=(True, get_labels_response))
get_entity_types_response = {"entityTypes": [unit_test_utils.FAKE_ENTITY_TYPE]}
mock_check_get_entity_types = MagicMock(return_value=(True, get_entity_types_response))
monkeypatch.setattr(validation_helpers, "check_if_get_outcomes_succeeds", mock_check_get_outcomes)
monkeypatch.setattr(
validation_helpers,
"check_if_get_event_types_succeeds",
mock_check_get_event_types,
)
monkeypatch.setattr(validation_helpers, "check_if_get_variables_succeeds", mock_check_get_variables)
monkeypatch.setattr(validation_helpers, "check_if_get_labels_succeeds", mock_check_get_labels)
monkeypatch.setattr(
validation_helpers,
"check_if_get_entity_types_succeeds",
mock_check_get_entity_types,
)
# Act
create_worker_helpers.validate_dependencies_for_detector_create(mock_afd_client, fake_model)
# Assert
assert mock_check_get_outcomes.call_count == 0
assert mock_check_get_event_types.call_count == 0
assert mock_check_get_variables.call_count == 2
assert mock_check_get_labels.call_count == 2
assert mock_check_get_entity_types.call_count == 1
def test_validate_dependencies_for_detector_create_happy_case_with_model_version(monkeypatch):
# Arrange
mock_afd_client = unit_test_utils.create_mock_afd_client()
fake_model = unit_test_utils.create_fake_model()
fake_model.AssociatedModels = [models.Model(Arn=unit_test_utils.FAKE_MODEL_VERSION_ARN)]
mock_check_get_outcomes = MagicMock()
mock_check_get_event_types = MagicMock()
mock_check_get_model_version = MagicMock(return_value=(True, {"status": "ACTIVE"}))
mock_check_get_variables = MagicMock(return_value=(False, None))
mock_check_get_labels = MagicMock(return_value=(False, None))
mock_check_get_entity_types = MagicMock(return_value=(False, None))
mock_get_external_models = MagicMock(return_value={"externalModels": [unit_test_utils.FAKE_EXTERNAL_MODEL]})
monkeypatch.setattr(validation_helpers, "check_if_get_outcomes_succeeds", mock_check_get_outcomes)
monkeypatch.setattr(
validation_helpers,
"check_if_get_event_types_succeeds",
mock_check_get_event_types,
)
monkeypatch.setattr(validation_helpers, "check_if_get_variables_succeeds", mock_check_get_variables)
monkeypatch.setattr(validation_helpers, "check_if_get_labels_succeeds", mock_check_get_labels)
monkeypatch.setattr(
validation_helpers,
"check_if_get_entity_types_succeeds",
mock_check_get_entity_types,
)
mock_afd_client.get_external_models = mock_get_external_models
monkeypatch.setattr(validation_helpers, "check_if_get_model_version_succeeds", mock_check_get_model_version)
# Act
create_worker_helpers.validate_dependencies_for_detector_create(mock_afd_client, fake_model)
# Assert
assert mock_check_get_outcomes.call_count == 0
assert mock_check_get_event_types.call_count == 0
assert mock_check_get_variables.call_count == 2
assert mock_check_get_labels.call_count == 2
assert mock_check_get_entity_types.call_count == 1
assert mock_get_external_models.call_count == 1
assert mock_check_get_model_version.call_count == 1
def test_validate_dependencies_for_detector_create_with_invalid_model_version_arn(monkeypatch):
# Arrange
mock_afd_client = unit_test_utils.create_mock_afd_client()
fake_model = unit_test_utils.create_fake_model()
fake_model.AssociatedModels = [models.Model(Arn="invalid_arn")]
mock_check_get_outcomes = MagicMock()
mock_check_get_event_types = MagicMock()
mock_check_get_model_version = MagicMock(return_value=(False, {"status": "ACTIVE"}))
mock_check_get_variables = MagicMock(return_value=(False, None))
mock_check_get_labels = MagicMock(return_value=(False, None))
mock_check_get_entity_types = MagicMock(return_value=(False, None))
mock_get_external_models = MagicMock(return_value={"externalModels": [unit_test_utils.FAKE_EXTERNAL_MODEL]})
monkeypatch.setattr(validation_helpers, "check_if_get_outcomes_succeeds", mock_check_get_outcomes)
monkeypatch.setattr(
validation_helpers,
"check_if_get_event_types_succeeds",
mock_check_get_event_types,
)
monkeypatch.setattr(validation_helpers, "check_if_get_variables_succeeds", mock_check_get_variables)
monkeypatch.setattr(validation_helpers, "check_if_get_labels_succeeds", mock_check_get_labels)
monkeypatch.setattr(
validation_helpers,
"check_if_get_entity_types_succeeds",
mock_check_get_entity_types,
)
mock_afd_client.get_external_models = mock_get_external_models
monkeypatch.setattr(validation_helpers, "check_if_get_model_version_succeeds", mock_check_get_model_version)
# Act
exception_thrown = None
try:
create_worker_helpers.validate_dependencies_for_detector_create(mock_afd_client, fake_model)
except exceptions.InvalidRequest as e:
exception_thrown = e
# Assert
assert mock_check_get_outcomes.call_count == 0
assert mock_check_get_event_types.call_count == 0
assert mock_check_get_variables.call_count == 2
assert mock_check_get_labels.call_count == 2
assert mock_check_get_entity_types.call_count == 1
assert mock_get_external_models.call_count == 1
assert mock_check_get_model_version.call_count == 0 # get_model_version should not be called
assert exception_thrown is not None
assert str(exception_thrown) == "Unexpected ARN provided in AssociatedModels: {}".format(
fake_model.AssociatedModels[0].Arn
)
def test_validate_dependencies_for_detector_create_model_version_not_active(monkeypatch):
# Arrange
mock_afd_client = unit_test_utils.create_mock_afd_client()
fake_model = unit_test_utils.create_fake_model()
fake_model.AssociatedModels = [models.Model(Arn=unit_test_utils.FAKE_MODEL_VERSION_ARN)]
mock_check_get_outcomes = MagicMock()
mock_check_get_event_types = MagicMock()
mock_check_get_model_version = MagicMock(return_value=(True, {"status": "TRAINING_IN_PROGRESS"}))
mock_check_get_variables = MagicMock(return_value=(False, None))
mock_check_get_labels = MagicMock(return_value=(False, None))
mock_check_get_entity_types = MagicMock(return_value=(False, None))
mock_get_external_models = MagicMock(return_value={"externalModels": [unit_test_utils.FAKE_EXTERNAL_MODEL]})
monkeypatch.setattr(validation_helpers, "check_if_get_outcomes_succeeds", mock_check_get_outcomes)
monkeypatch.setattr(
validation_helpers,
"check_if_get_event_types_succeeds",
mock_check_get_event_types,
)
monkeypatch.setattr(validation_helpers, "check_if_get_variables_succeeds", mock_check_get_variables)
monkeypatch.setattr(validation_helpers, "check_if_get_labels_succeeds", mock_check_get_labels)
monkeypatch.setattr(
validation_helpers,
"check_if_get_entity_types_succeeds",
mock_check_get_entity_types,
)
mock_afd_client.get_external_models = mock_get_external_models
monkeypatch.setattr(validation_helpers, "check_if_get_model_version_succeeds", mock_check_get_model_version)
# Act
exception_thrown = None
try:
create_worker_helpers.validate_dependencies_for_detector_create(mock_afd_client, fake_model)
except exceptions.InvalidRequest as e:
exception_thrown = e
# Assert
assert mock_check_get_outcomes.call_count == 0
assert mock_check_get_event_types.call_count == 0
assert mock_check_get_variables.call_count == 2
assert mock_check_get_labels.call_count == 2
assert mock_check_get_entity_types.call_count == 1
assert mock_get_external_models.call_count == 1
assert mock_check_get_model_version.call_count == 1
assert exception_thrown is not None
assert str(exception_thrown) == "Specified model must be in status:ACTIVE, ModelVersion arn='{}'".format(
unit_test_utils.FAKE_MODEL_VERSION_ARN
)
def test_validate_dependencies_for_detector_create_get_model_version_fails(monkeypatch):
# Arrange
mock_afd_client = unit_test_utils.create_mock_afd_client()
fake_model = unit_test_utils.create_fake_model()
fake_model.AssociatedModels = [models.Model(Arn=unit_test_utils.FAKE_MODEL_VERSION_ARN)]
mock_check_get_outcomes = MagicMock()
mock_check_get_event_types = MagicMock()
mock_check_get_model_version = MagicMock(return_value=(False, {"status": "ACTIVE"}))
mock_check_get_variables = MagicMock(return_value=(False, None))
mock_check_get_labels = MagicMock(return_value=(False, None))
mock_check_get_entity_types = MagicMock(return_value=(False, None))
mock_get_external_models = MagicMock(return_value={"externalModels": [unit_test_utils.FAKE_EXTERNAL_MODEL]})
monkeypatch.setattr(validation_helpers, "check_if_get_outcomes_succeeds", mock_check_get_outcomes)
monkeypatch.setattr(
validation_helpers,
"check_if_get_event_types_succeeds",
mock_check_get_event_types,
)
monkeypatch.setattr(validation_helpers, "check_if_get_variables_succeeds", mock_check_get_variables)
monkeypatch.setattr(validation_helpers, "check_if_get_labels_succeeds", mock_check_get_labels)
monkeypatch.setattr(
validation_helpers,
"check_if_get_entity_types_succeeds",
mock_check_get_entity_types,
)
mock_afd_client.get_external_models = mock_get_external_models
monkeypatch.setattr(validation_helpers, "check_if_get_model_version_succeeds", mock_check_get_model_version)
# Act
exception_thrown = None
try:
create_worker_helpers.validate_dependencies_for_detector_create(mock_afd_client, fake_model)
except exceptions.NotFound as e:
exception_thrown = e
# Assert
assert mock_check_get_outcomes.call_count == 0
assert mock_check_get_event_types.call_count == 0
assert mock_check_get_variables.call_count == 2
assert mock_check_get_labels.call_count == 2
assert mock_check_get_entity_types.call_count == 1
assert mock_get_external_models.call_count == 1
assert mock_check_get_model_version.call_count == 1
assert exception_thrown is not None
| 45.9 | 113 | 0.788671 | 2,231 | 16,983 | 5.437472 | 0.047961 | 0.097931 | 0.130575 | 0.126947 | 0.93224 | 0.924491 | 0.915588 | 0.900668 | 0.875278 | 0.868189 | 0 | 0.003501 | 0.142319 | 16,983 | 369 | 114 | 46.02439 | 0.829329 | 0.011188 | 0 | 0.782895 | 0 | 0 | 0.103119 | 0.082782 | 0 | 0 | 0 | 0 | 0.184211 | 1 | 0.026316 | false | 0 | 0.016447 | 0 | 0.042763 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b2ed232dad8a4f9e3796f1b1b4b84b318535d769 | 96 | py | Python | hydroserver/hydroserver_wof/__init__.py | kjlippold/his_hydroserver | aaf3939965d12dd5bc74f69d22b653ce548bec0a | [
"MIT"
] | 1 | 2021-01-27T19:19:05.000Z | 2021-01-27T19:19:05.000Z | hydroserver/hydroserver_wof/__init__.py | CUAHSI-APPS/his_hydroserver | aaf3939965d12dd5bc74f69d22b653ce548bec0a | [
"MIT"
] | 1 | 2019-09-27T16:20:49.000Z | 2019-09-27T16:20:49.000Z | hydroserver/hydroserver_wof/__init__.py | kjlippold/his_hydroserver | aaf3939965d12dd5bc74f69d22b653ce548bec0a | [
"MIT"
] | 1 | 2020-06-08T21:43:38.000Z | 2020-06-08T21:43:38.000Z | from hydroserver_wof import wof_database_models
from hydroserver_wof import wof_response_models
| 32 | 47 | 0.916667 | 14 | 96 | 5.857143 | 0.5 | 0.365854 | 0.439024 | 0.585366 | 0.658537 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.083333 | 96 | 2 | 48 | 48 | 0.931818 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
654845e08b227e1ab40b85b09f29aa6bcfffa0e9 | 17,893 | py | Python | app/api/transform_api.py | 1c3be4r/Apfell | 3d577283cb0cb1ebb1ad75cd827949844e148fe4 | [
"BSD-3-Clause"
] | 1 | 2019-06-14T06:43:45.000Z | 2019-06-14T06:43:45.000Z | app/api/transform_api.py | 1c3be4r/Apfell | 3d577283cb0cb1ebb1ad75cd827949844e148fe4 | [
"BSD-3-Clause"
] | null | null | null | app/api/transform_api.py | 1c3be4r/Apfell | 3d577283cb0cb1ebb1ad75cd827949844e148fe4 | [
"BSD-3-Clause"
] | null | null | null | from app import apfell, db_objects
from sanic.response import json, file
from app.database_models.model import Transform, CommandTransform
from sanic_jwt.decorators import protected, inject_user
from urllib.parse import unquote_plus
from app.api.transforms.utils import TransformOperation, CommandTransformOperation
import datetime
import importlib, sys
import base64
import app.database_models.model as db_model
@apfell.route(apfell.config['API_BASE'] + "/transforms/bytype/<ptype:string>", methods=['GET'])
@inject_user()
@protected()
async def get_transforms_by_type(request, ptype, user):
payload_type = unquote_plus(ptype)
try:
query = await db_model.payloadtype_query()
payloadtype = await db_objects.get(query, ptype=payload_type)
except Exception as e:
print(e)
return json({'status': 'error', 'error': 'failed to find payload type'})
try:
query = await db_model.transform_query()
transforms = await db_objects.execute(query.where(Transform.payload_type == payloadtype).order_by(
Transform.t_type, Transform.order
))
except Exception as e:
print(e)
return json({'status': 'error', 'error': 'failed to get the transforms'})
return json({'status': 'success', 'transforms': [t.to_json() for t in transforms]})
@apfell.route(apfell.config['API_BASE'] + "/transforms/options", methods=['GET'])
@inject_user()
@protected()
async def get_transforms_options(request, user):
return json(await get_transforms_options_func())
async def get_transforms_options_func():
# reload the transform data so we can provide updated information
try:
import app.api.transforms.utils
importlib.reload(sys.modules['app.api.transforms.utils'])
except Exception as e:
print(e)
from app.api.transforms.utils import TransformOperation
t = TransformOperation()
method_list = {func: await get_type_hints(getattr(t, func).__annotations__) for func in dir(t) if
callable(getattr(t, func)) and not func.startswith("__")}
return method_list
async def get_type_hints(func):
# we don't want information about the payload or parameter inputs, because that's the same for all of them
# we really care about the input and output so we can make sure they match up
hints = {"return": "unknown", "prior_output": "unknown"}
for hint in func.items():
name = hint[0]
typehint = str(hint[1])
if name is not 'payload':
# fix up the typehint a bit
if "class" in typehint:
typehint = typehint.split(" ")[1][1:-2]
elif "typing" in typehint:
typehint = typehint[7:] # cut out "typing."
elif "NewType" in typehint:
typehint = hint[1].__name__ # function NewType.<locals>.new_type
# if the parameter is typehinted to None then don't provide the option to give a parameter
if typehint != 'None':
# hide the unique names besides "parameter" that people can give
if name is not "return" and name is not "prior_output":
hints["parameter"] = name + ":" + typehint
else:
hints[name] = typehint
return hints
@apfell.route(apfell.config['API_BASE'] + "/transforms/bytype/<ptype:string>", methods=['POST'])
@inject_user()
@protected()
async def register_transform_for_ptype(request, user, ptype):
payload_type = unquote_plus(ptype)
try:
query = await db_model.payloadtype_query()
payloadtype = await db_objects.get(query, ptype=payload_type)
query = await db_model.operator_query()
operator = await db_objects.get(query, username=user['username'])
except Exception as e:
print(e)
return json({'status': 'error', 'error': 'failed to find payload type'})
possible_transforms = await get_transforms_options_func()
data = request.json
# check for right parameters
if "name" not in data or data['name'] is None or data['name'] not in possible_transforms:
return json({'status': 'error', 'error': 'problem with \"name\" parameter'})
if "parameter" not in data or data['parameter'] is None:
data['parameter'] = ""
if "t_type" not in data or data['t_type'] is None:
return json({'status': 'error', 'error': 'Must specify a type for this transform (\"load\" or \"create\"'})
if "order" not in data or data['order'] is None:
return json({'status': 'error', 'error': 'Must provide an order to this transform'})
if data['order'] <= 0:
return json({'status': 'error', 'error': 'Order must be positive'})
try:
transform = await db_objects.create(Transform, name=data['name'], parameter=data['parameter'],
t_type=data['t_type'], order=data['order'], operator=operator,
payload_type=payloadtype)
except Exception as e:
print(e)
return json({'status': 'error', 'error': 'failed to create transform'})
return json({'status': 'success', **transform.to_json()})
@apfell.route(apfell.config['API_BASE'] + "/transforms/<id:int>", methods=['DELETE'])
@inject_user()
@protected()
async def delete_transform(request, user, id):
try:
query = await db_model.transform_query()
transform = await db_objects.get(query, id=id)
except Exception as e:
print(e)
return json({'status': 'error', 'error': 'failed to find that transform'})
transform_json = transform.to_json()
await db_objects.delete(transform)
return json({'status': "success", **transform_json})
@apfell.route(apfell.config['API_BASE'] + "/transforms/code/download", methods=['GET'])
@inject_user()
@protected()
async def download_transform_code(request, user):
return await file("./app/api/transforms/utils.py", filename="utils.py")
@apfell.route(apfell.config['API_BASE'] + "/transforms/code/view", methods=['GET'])
@inject_user()
@protected()
async def view_transform_code(request, user):
try:
code = base64.b64encode(open('./app/api/transforms/utils.py', 'rb').read()).decode('utf-8')
except Exception as e:
print(e)
return json({'status': 'error', 'error': 'failed to read transforms file'})
return json({'status': 'success', 'code': code})
@apfell.route(apfell.config['API_BASE'] + "/transforms/code/upload", methods=['POST'])
@inject_user()
@protected()
async def upload_c2_profile_payload_type_code(request, user):
# upload a new transforms file to our server and reload the transforms code
try:
data = request.json
except Exception as e:
data = {}
if request.files:
try:
code = request.files['upload_file'][0].body
except Exception as e:
print(e)
return json({'status': 'error', 'error': 'failed to get uploaded code: ' + str(e)})
elif 'code' in data:
try:
code = base64.b64decode(data['code'])
except Exception as e:
print(e)
return json({'status': 'error', 'error': 'failed to get code from data '})
else:
return json({'status': 'error', 'error': 'must actually upload files'})
try:
new_utils = open("./app/api/transforms/utils.py", 'wb')
new_utils.write(code)
new_utils.close()
except Exception as e:
print(e)
return json({'status': 'error', 'error': 'failed to write to disk: ' + str(e)})
try:
try:
import app.api.transforms.utils
importlib.reload(sys.modules['app.api.transforms.utils'])
except Exception as e:
print(e)
from app.api.transforms.utils import CommandTransformOperation, TransformOperation
return json({'status': 'success'})
except Exception as e:
print(e)
return json({'status': 'error', 'error': 'failed to reload the transform modules'})
@apfell.route(apfell.config['API_BASE'] + "/transforms/<id:int>", methods=['PUT'])
@inject_user()
@protected()
async def update_transform_for_ptype(request, user, id):
data = request.json
try:
query = await db_model.transform_query()
transform = await db_objects.get(query, id=id)
query = await db_model.operator_query()
operator = await db_objects.get(query, username=user['username'])
except Exception as e:
print(e)
return json({'status': 'error', 'error': 'failed to find transform'})
possible_transforms = await get_transforms_options_func()
if "name" in data and data['name'] in possible_transforms:
transform.name = data['name']
if "t_type" in data:
transform.t_type = data['t_type']
if "order" in data and int(data['order']) <= 0:
return json({'status': 'error', 'error': "can't have order <= 0"})
if "parameter" in data:
transform.parameter = data['parameter']
if "order" in data:
transform.order = int(data['order'])
transform.operator = operator
transform.timestamp = datetime.datetime.utcnow()
await db_objects.update(transform)
return json({'status': 'success', **transform.to_json()})
async def get_transforms_func(ptype, t_type):
try:
query = await db_model.payloadtype_query()
payload_type = await db_objects.get(query, ptype=ptype)
except Exception as e:
print(e)
return {'status': 'error', 'error': 'failed to get payload type specified'}
try:
query = await db_model.transform_query()
transforms = await db_objects.execute(query.where(
(Transform.t_type == t_type) & (Transform.payload_type == payload_type)).order_by(Transform.order))
except Exception as e:
print(e)
return {'status': 'error', 'error': 'failed to get ' + ptype + ' transforms for ' + t_type}
return {'status': 'success', 'transforms': [t.to_json() for t in transforms]}
###################### COMMAND TRANSFORMS SPECIFICALLY BELOW HERE #########################
@apfell.route(apfell.config['API_BASE'] + "/transforms/bycommand/<id:int>", methods=['GET'])
@inject_user()
@protected()
async def get_transforms_by_command(request, id, user):
try:
query = await db_model.operation_query()
operation = await db_objects.get(query, name=user['current_operation'])
query = await db_model.command_query()
command = await db_objects.get(query, id=id)
except:
return json({'status': 'error', 'error': "failed to find that command or current operation"})
try:
query = await db_model.commandtransform_query()
transforms = await db_objects.execute(query.where(
(CommandTransform.command == command) & (CommandTransform.operation == operation)
).order_by(
CommandTransform.order
))
except Exception as e:
print(e)
return json({'status': 'error', 'error': 'failed to get the transforms'})
return json({'status': 'success', 'transforms': [t.to_json() for t in transforms]})
@apfell.route(apfell.config['API_BASE'] + "/transforms/bycommand/options", methods=['GET'])
@inject_user()
@protected()
async def get_commandtransforms_options(request, user):
return json(await get_commandtransforms_options_func())
async def get_commandtransforms_options_func():
try:
import app.api.transforms.utils
importlib.reload(sys.modules['app.api.transforms.utils'])
except Exception as e:
print(e)
from app.api.transforms.utils import CommandTransformOperation
t = CommandTransformOperation()
method_list = {func: await get_command_type_hints(getattr(t, func).__annotations__) for func in dir(t) if
callable(getattr(t, func)) and not func.startswith("__")}
return method_list
async def get_command_type_hints(func):
# we don't want information about the payload or parameter inputs, because that's the same for all of them
# we really care about the input and output so we can make sure they match up
hints = {"return": "unknown", "parameter": "unknown"}
for hint in func.items():
name = hint[0]
typehint = str(hint[1])
if name is not 'task_params':
# fix up the typehint a bit
if "class" in typehint:
typehint = typehint.split(" ")[1][1:-2]
elif "typing" in typehint:
typehint = typehint[7:] # cut out "typing."
elif "NewType" in typehint:
typehint = hint[1].__name__ # function NewType.<locals>.new_type
# if the parameter is typehinted to None then don't provide the option to give a parameter
if typehint != 'None':
# hide the unique names besides "parameter" that people can give
hints[name] = typehint
return hints
@apfell.route(apfell.config['API_BASE'] + "/transforms/bycommand/<id:int>", methods=['POST'])
@inject_user()
@protected()
async def register_transform_for_command(request, user, id):
try:
query = await db_model.command_query()
command = await db_objects.get(query, id=id)
query = await db_model.operator_query()
operator = await db_objects.get(query, username=user['username'])
query = await db_model.operation_query()
operation = await db_objects.get(query, name=user['current_operation'])
except Exception as e:
print(e)
return json({'status': 'error', 'error': 'failed to find command, operation, or current operator'})
possible_transforms = await get_commandtransforms_options_func()
data = request.json
# check for right parameters
if "name" not in data or data['name'] is None or data['name'] not in possible_transforms:
return json({'status': 'error', 'error': 'problem with \"name\" parameter'})
if "parameter" not in data or data['parameter'] is None:
data['parameter'] = ""
if "order" not in data or data['order'] is None:
return json({'status': 'error', 'error': 'Must provide an order to this transform'})
if data['order'] <= 0:
return json({'status': 'error', 'error': 'Order must be positive'})
if 'active' not in data:
data['active'] = True
try:
transform = await db_objects.create(CommandTransform, name=data['name'], parameter=data['parameter'],
order=data['order'], operator=operator, command=command, operation=operation,
active=data['active'])
except Exception as e:
print(e)
return json({'status': 'error', 'error': 'failed to create transform'})
return json({'status': 'success', **transform.to_json()})
@apfell.route(apfell.config['API_BASE'] + "/transforms/bycommand/<id:int>", methods=['DELETE'])
@inject_user()
@protected()
async def delete_commandtransform(request, user, id):
try:
query = await db_model.operation_query()
operation = await db_objects.get(query, name=user['current_operation'])
query = await db_model.commandtransform_query()
transform = await db_objects.get(query, id=id, operation=operation)
except Exception as e:
print(e)
return json({'status': 'error', 'error': 'failed to find that transform'})
transform_json = transform.to_json()
await db_objects.delete(transform)
return json({'status': "success", **transform_json})
@apfell.route(apfell.config['API_BASE'] + "/transforms/bycommand/<id:int>", methods=['PUT'])
@inject_user()
@protected()
async def update_transform_for_command(request, user, id):
data = request.json
try:
query = await db_model.operation_query()
operation = await db_objects.get(query, name=user['current_operation'])
query = await db_model.commandtransform_query()
transform = await db_objects.get(query, id=id, operation=operation)
query = await db_model.operator_query()
operator = await db_objects.get(query, username=user['username'])
except Exception as e:
print(e)
return json({'status': 'error', 'error': 'failed to find transform'})
possible_transforms = await get_commandtransforms_options_func()
if "name" in data and data['name'] in possible_transforms:
transform.name = data['name']
if "order" in data and int(data['order']) <= 0:
return json({'status': 'error', 'error': "can't have order <= 0"})
if "parameter" in data:
transform.parameter = data['parameter']
if "order" in data:
transform.order = int(data['order'])
if "active" in data:
transform.active = data['active']
transform.operator = operator
transform.timestamp = datetime.datetime.utcnow()
await db_objects.update(transform)
return json({'status': 'success', **transform.to_json()})
async def get_commandtransforms_func(command_id, operation_name):
try:
query = await db_model.command_query()
command = await db_objects.get(query, id=command_id)
query = await db_model.operation_query()
operation = await db_objects.get(query, name=operation_name)
except Exception as e:
print(e)
return {'status': 'error', 'error': 'failed to get payload type specified'}
try:
query = await db_model.commandtransform_query()
transforms = await db_objects.execute(query.where(
(CommandTransform.command == command) & (CommandTransform.operation == operation)).order_by(CommandTransform.order))
except Exception as e:
print(e)
return {'status': 'error', 'error': 'failed to get transforms for ' + command_id}
return {'status': 'success', 'transforms': [t.to_json() for t in transforms]} | 43.429612 | 128 | 0.647069 | 2,234 | 17,893 | 5.061773 | 0.09624 | 0.03219 | 0.052352 | 0.050141 | 0.843297 | 0.814821 | 0.79174 | 0.756986 | 0.740891 | 0.726388 | 0 | 0.002374 | 0.223104 | 17,893 | 412 | 129 | 43.429612 | 0.811093 | 0.059185 | 0 | 0.708683 | 0 | 0 | 0.173713 | 0.026418 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.053221 | 0 | 0.193277 | 0.064426 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
654d7f1135a8544d6cea55e22ba9e94b971d486b | 45 | py | Python | yukicoder/yuki128.py | knuu/competitive-programming | 16bc68fdaedd6f96ae24310d697585ca8836ab6e | [
"MIT"
] | 1 | 2018-11-12T15:18:55.000Z | 2018-11-12T15:18:55.000Z | yukicoder/yuki128.py | knuu/competitive-programming | 16bc68fdaedd6f96ae24310d697585ca8836ab6e | [
"MIT"
] | null | null | null | yukicoder/yuki128.py | knuu/competitive-programming | 16bc68fdaedd6f96ae24310d697585ca8836ab6e | [
"MIT"
] | null | null | null | print(int(input())//1000//int(input())*1000)
| 22.5 | 44 | 0.644444 | 7 | 45 | 4.142857 | 0.571429 | 0.551724 | 0.827586 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.181818 | 0.022222 | 45 | 1 | 45 | 45 | 0.477273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 8 |
331897681c871fffeb0d7f86ea8c2a9d94e350e2 | 7,077 | py | Python | src/py/test_order.py | progressive-identity/ref-python | f65cc21c707bcf0629b8b96de7d92074477b1231 | [
"Apache-2.0"
] | null | null | null | src/py/test_order.py | progressive-identity/ref-python | f65cc21c707bcf0629b8b96de7d92074477b1231 | [
"Apache-2.0"
] | null | null | null | src/py/test_order.py | progressive-identity/ref-python | f65cc21c707bcf0629b8b96de7d92074477b1231 | [
"Apache-2.0"
] | null | null | null | from nose.tools import assert_raises
import test_datetime; test_datetime.patch() # noqa E702
import datetime
from copy import deepcopy
from key import secretkey, key
import order
def dummy_order(**kwargs):
return order.new("test", foo={"bar": [42, b"\xde\xad\xbe\xef"]}, **kwargs)
def test():
for name in secretkey.algos():
sk_cls = secretkey.from_algo(name)
yield do_new, sk_cls
yield do_subkey, sk_cls
def do_new(sk_cls):
o = dummy_order()
assert not order.signed(o)
assert order.root_signer(o) is None
assert order.expiration(o) is None
assert not list(order.parents(o))
o_unsigned = deepcopy(o)
with test_datetime.past():
raw = order.to_raw(o)
code = order.to_token(o)
assert o == order.from_token(code)
assert o == order.from_raw(raw)
raw = order.to_raw(o)
code = order.to_token(o)
assert o == order.from_token(code)
assert o == order.from_raw(raw)
with test_datetime.future():
raw = order.to_raw(o)
code = order.to_token(o)
assert o == order.from_token(code)
assert o == order.from_raw(raw)
sk = sk_cls.generate()
order.sign(o, sk)
assert order.signed(o)
assert order.root_signer(o) == sk.public()
assert o != o_unsigned
assert order.expiration(o) is None
assert not list(order.parents(o))
assert list(order.iter_signed(o)) == [o]
with test_datetime.past():
raw = order.to_raw(o)
code = order.to_token(o)
with assert_raises(order.FutureSignatureException):
order.from_token(code)
assert o == order.from_raw(raw)
raw = order.to_raw(o)
code = order.to_token(o)
assert order.from_raw(raw) == o
assert order.from_token(code) == o
with test_datetime.future():
raw = order.to_raw(o)
code = order.to_token(o)
assert o == order.from_token(code)
assert o == order.from_raw(raw)
def do_subkey(sk_cls):
sk = sk_cls.generate()
k = sk.public()
sub_sk = sk_cls.generate()
assert sub_sk != sk
sub_k = sub_sk.public()
assert sub_k != k
# generate subkey
sub_o = order.new(order.ALIAS_SUBKEY, exp=3600, **sub_k.to_dict())
order.sign(sub_o, sk=sk)
# subkey is invalid in the past
with test_datetime.past():
raw = order.to_raw(sub_o)
code = order.to_token(sub_o)
with assert_raises(order.FutureSignatureException):
order.from_token(code)
assert sub_o == order.from_raw(raw)
# subkey is valid just before its expiration
with test_datetime.debug(datetime.datetime.utcfromtimestamp(sub_o['_sig']['dat'] + sub_o['exp'] - .1)):
raw = order.to_raw(sub_o)
code = order.to_token(sub_o)
assert order.from_raw(raw) == sub_o
assert order.from_token(code) == sub_o
# subkey is invalid after its expiration
with test_datetime.debug(datetime.datetime.utcfromtimestamp(sub_o['_sig']['dat'] + sub_o['exp'])):
raw = order.to_raw(sub_o)
code = order.to_token(sub_o)
with assert_raises(order.ExpiredOrderException):
order.from_token(code)
assert sub_o == order.from_raw(raw)
# signed order with subkey before it was signed is invalid
with test_datetime.debug(datetime.datetime.utcfromtimestamp(sub_o['_sig']['dat'] - .1)):
o = dummy_order()
with assert_raises(order.FutureSignatureException):
order.sign(o, sk=sub_sk, k=sub_o)
# assert order.signed(o)
# assert order.root_signer(o) == k
# assert key.from_dict(o['_sig']['k']) == sub_k
# raw = order.to_raw(o)
# code = order.to_token(o)
# with assert_raises(order.FutureSignatureException):
# order.from_token(code)
# assert o == order.from_raw(raw)
# signed order with expired subkey is invalid
with test_datetime.debug(datetime.datetime.utcfromtimestamp(sub_o['_sig']['dat'] + sub_o['exp'])):
o = dummy_order()
with assert_raises(order.ExpiredOrderException):
order.sign(o, sk=sub_sk, k=sub_o)
# assert order.signed(o)
# assert order.root_signer(o) == k
# assert key.from_dict(o['_sig']['k']) == sub_k
# raw = order.to_raw(o)
# code = order.to_token(o)
# with assert_raises(order.ExpiredOrderException):
# order.from_token(code)
# assert o == order.from_raw(raw)
# signed order with valid subkey is valid
with test_datetime.debug(datetime.datetime.utcfromtimestamp(sub_o['_sig']['dat'] + sub_o['exp'] - .1)):
o = dummy_order()
order.sign(o, sk=sub_sk, k=sub_o)
assert order.signed(o)
assert order.root_signer(o) == k
assert key.from_dict(o['_sig']['k']) == sub_k
raw = order.to_raw(o)
code = order.to_token(o)
assert o == order.from_token(code)
assert o == order.from_raw(raw)
# signed order with valid subkey in the future is valid
with test_datetime.future():
raw = order.to_raw(o)
code = order.to_token(o)
assert o == order.from_token(code)
assert o == order.from_raw(raw)
# signed expirating order with valid subkey is valid
with test_datetime.debug(datetime.datetime.utcfromtimestamp(sub_o['_sig']['dat'] + sub_o['exp'] - .1)):
o = dummy_order(exp=3600)
order.sign(o, sk=sub_sk, k=sub_o)
assert 'exp' in o
assert order.signed(o)
assert order.root_signer(o) == k
assert key.from_dict(o['_sig']['k']) == sub_k
raw = order.to_raw(o)
code = order.to_token(o)
assert o == order.from_token(code)
assert o == order.from_raw(raw)
# signed expirating order in the future is invalid
with test_datetime.debug(datetime.datetime.utcfromtimestamp(o['_sig']['dat'] + o['exp'])):
raw = order.to_raw(o)
code = order.to_token(o)
with assert_raises(order.ExpiredOrderException):
order.from_token(code)
assert o == order.from_raw(raw)
# signed with expirating signature order with valid subkey is valid
with test_datetime.debug(datetime.datetime.utcfromtimestamp(sub_o['_sig']['dat'] + sub_o['exp'] - .1)):
o = dummy_order()
order.sign(o, sk=sub_sk, k=sub_o, exp=3600)
assert 'exp' not in o
assert order.signed(o)
assert 'exp' in o['_sig']
assert order.root_signer(o) == k
assert key.from_dict(o['_sig']['k']) == sub_k
raw = order.to_raw(o)
code = order.to_token(o)
assert o == order.from_token(code)
assert o == order.from_raw(raw)
# signed with expirating signature order with valid subkey is valid
with test_datetime.debug(datetime.datetime.utcfromtimestamp(o['_sig']['dat'] + o['_sig']['exp'])):
raw = order.to_raw(o)
code = order.to_token(o)
with assert_raises(order.ExpiredSignatureException):
order.from_token(code)
assert o == order.from_raw(raw)
| 33.225352 | 107 | 0.626678 | 1,022 | 7,077 | 4.149706 | 0.085127 | 0.056119 | 0.054232 | 0.079227 | 0.813016 | 0.808536 | 0.777647 | 0.755718 | 0.739448 | 0.727894 | 0 | 0.004126 | 0.246573 | 7,077 | 212 | 108 | 33.382075 | 0.79126 | 0.152748 | 0 | 0.702128 | 0 | 0 | 0.023802 | 0 | 0 | 0 | 0 | 0 | 0.404255 | 1 | 0.028369 | false | 0 | 0.042553 | 0.007092 | 0.078014 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
332899d6168a3452bda1a12fb462203e24171717 | 145 | py | Python | rq_transformer/__init__.py | lucidrains/RQ-Transformer | 9deaa0456f5fc5d677e27d396d18dd1a35fc304c | [
"MIT"
] | 39 | 2022-03-11T17:33:57.000Z | 2022-03-28T02:39:31.000Z | rq_transformer/__init__.py | lucidrains/RQ-Transformer | 9deaa0456f5fc5d677e27d396d18dd1a35fc304c | [
"MIT"
] | null | null | null | rq_transformer/__init__.py | lucidrains/RQ-Transformer | 9deaa0456f5fc5d677e27d396d18dd1a35fc304c | [
"MIT"
] | 1 | 2022-03-28T06:38:40.000Z | 2022-03-28T06:38:40.000Z | from rq_transformer.rq_transformer import RQTransformer
from rq_transformer.hierarchical_causal_transformer import HierarchicalCausalTransformer
| 48.333333 | 88 | 0.931034 | 15 | 145 | 8.666667 | 0.533333 | 0.3 | 0.261538 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.055172 | 145 | 2 | 89 | 72.5 | 0.948905 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
33454c21e2a00ccfaa00e9f354e69f13b09e7637 | 15,556 | py | Python | pddm/envs/cheetah/cheetah_cgr.py | AtsushiHAM/EEI_Analysis_model_based_rl | 800fa57a0ef5609e487c7844a2b21b31a937ece4 | [
"Apache-2.0"
] | 1 | 2020-11-19T11:03:14.000Z | 2020-11-19T11:03:14.000Z | pddm/envs/cheetah/cheetah_cgr.py | AtsushiHAM/EEI_Analysis_model_based_rl | 800fa57a0ef5609e487c7844a2b21b31a937ece4 | [
"Apache-2.0"
] | null | null | null | pddm/envs/cheetah/cheetah_cgr.py | AtsushiHAM/EEI_Analysis_model_based_rl | 800fa57a0ef5609e487c7844a2b21b31a937ece4 | [
"Apache-2.0"
] | 1 | 2020-11-16T07:55:31.000Z | 2020-11-16T07:55:31.000Z | dd# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import mujoco_py
from gym import utils
from gym.envs.mujoco import mujoco_env
#hamada added
import os
GYM_ASSET_PATH=xml_path = os.path.join(os.path.dirname(__file__), 'assets')
class HalfCheetahEnv(mujoco_env.MujocoEnv, utils.EzPickle):
def __init__(self,file_path=os.path.join(GYM_ASSET_PATH,'half_cheetah_cgr.xml'),max_step=1000):
self.time = 0
self.slicepoint=0
mujoco_env.MujocoEnv.__init__(self, file_path, 3)
utils.EzPickle.__init__(self)
self.skip = self.frame_skip
def get_reward(self, observations, actions):
"""get rewards of a given (observations, actions) pair
Args:
observations: (batchsize, obs_dim) or (obs_dim,)
actions: (batchsize, ac_dim) or (ac_dim,)
Return:
r_total: (batchsize,1) or (1,), reward for that pair
done: (batchsize,1) or (1,), True if reaches terminal state
"""
#initialize and reshape as needed, for batch mode
self.reward_dict = {}
if len(observations.shape)==1:
observations = np.expand_dims(observations, axis = 0)
actions = np.expand_dims(actions, axis = 0)
batch_mode = False
else:
batch_mode = True
# get vars
xvel = observations[:, 9]
body_angle = observations[:, 2]
#bthigh =observations[:, 3]
#fthigh = observations[:, 6]
#goal vel
goal_vel = 4.0
#calc rew
self.reward_dict['actions'] = -0.1 * np.sum(np.square(actions), axis=1)
self.reward_dict['run'] = goal_vel - np.abs(xvel - goal_vel)
self.reward_dict['body_angle_pena']=-20.0 * np.abs(body_angle)
self.reward_dict['xvel'] = xvel
#self.reward_dict['r_total'] = self.reward_dict['run'] + self.reward_dict['actions'] #+self.reward_dict['body_angle_pena']
self.reward_dict['r_total'] = self.reward_dict['xvel'] + self.reward_dict['actions']
#check if done
dones = np.zeros((observations.shape[0],))
dones[body_angle>1.0] = 1
#return
if not batch_mode:
return self.reward_dict['r_total'][0], dones[0]
return self.reward_dict['r_total'], dones
def get_score(self, obs):
xposafter = obs[9]
return xposafter
def step(self, action):
#step
self.do_simulation(action, self.frame_skip)
#obs/reward/done/score
ob = self._get_obs()
rew, done = self.get_reward(ob, action)
score = self.get_score(ob)
#return
env_info = {'time': self.time,
'obs_dict': self.obs_dict,
'rewards': self.reward_dict,
'score': score}
return ob, rew, done, env_info
def _get_obs(self):
self.obs_dict = {}
self.obs_dict['joints_pos'] = self.sim.data.qpos.flat.copy()
self.obs_dict['joints_vel'] = self.sim.data.qvel.flat.copy()
return np.concatenate([
self.obs_dict['joints_pos'], #9
self.obs_dict['joints_vel']#9
])
def reset_model(self):
# set reset pose/vel
self.reset_pose = self.init_qpos + self.np_random.uniform(
low=-.1, high=.1, size=self.model.nq)
self.reset_vel = self.init_qvel + self.np_random.randn(self.model.nv) * .1
#reset the env to that pose/vel
return self.do_reset(self.reset_pose.copy(), self.reset_vel.copy())
def do_reset(self, reset_pose, reset_vel, reset_goal=None):
#reset
self.set_state(reset_pose, reset_vel)
#return
return self._get_obs()
def viewer_setup(self):
self.viewer.cam.distance = self.model.stat.extent * 0.5
class HalfCheetahEnv2(mujoco_env.MujocoEnv, utils.EzPickle):
def __init__(self, file_path=os.path.join(GYM_ASSET_PATH, 'half_cheetah_cgr.xml'), max_step=1000):
self.time = 0
self.slicepoint=0
mujoco_env.MujocoEnv.__init__(self, file_path, 3)
utils.EzPickle.__init__(self)
self.skip = self.frame_skip
def get_reward(self, observations, actions):
"""get rewards of a given (observations, actions) pair
Args:
observations: (batchsize, obs_dim) or (obs_dim,)
actions: (batchsize, ac_dim) or (ac_dim,)
Return:
r_total: (batchsize,1) or (1,), reward for that pair
done: (batchsize,1) or (1,), True if reaches terminal state
"""
# initialize and reshape as needed, for batch mode
self.reward_dict = {}
if len(observations.shape) == 1:
observations = np.expand_dims(observations, axis=0)
actions = np.expand_dims(actions, axis=0)
batch_mode = False
else:
batch_mode = True
# get vars
xvel = observations[:, 9]
body_angle = observations[:, 2]
#goal vel
goal_vel = 4.0
#calc rew
self.reward_dict['actions'] = -0.1 * np.sum(np.square(actions), axis=1)
self.reward_dict['run'] = goal_vel - np.abs(xvel - goal_vel)
self.reward_dict['body_angle_pena']=-20.0 * np.abs(body_angle)
self.reward_dict['xvel'] = xvel
#self.reward_dict['r_total'] = self.reward_dict['run'] + self.reward_dict['actions']#+self.reward_dict['body_angle_pena']
self.reward_dict['r_total'] = self.reward_dict['xvel'] + self.reward_dict['actions']
# check if done
dones = np.zeros((observations.shape[0],))
dones[body_angle > 1.0] = 1
# return
if not batch_mode:
return self.reward_dict['r_total'][0], dones[0]
return self.reward_dict['r_total'], dones
def get_score(self, obs):
xposafter = obs[9]
return xposafter
def step(self, action):
# step
self.do_simulation(action, self.frame_skip)
# obs/reward/done/score
ob = self._get_obs()
rew, done = self.get_reward(ob, action)
score = self.get_score(ob)
# return
env_info = {'time': self.time,
'obs_dict': self.obs_dict,
'rewards': self.reward_dict,
'score': score}
return ob, rew, done, env_info
def _get_obs(self):
self.obs_dict = {}
self.obs_dict['joints_pos'] = self.sim.data.qpos.flat.copy()
self.obs_dict['joints_vel'] = self.sim.data.qvel.flat.copy()
self.obs_dict['joints_force'] = np.asarray([ self.data.sensordata[7],self.data.sensordata[16]]).flat.copy()
return np.concatenate([
self.obs_dict['joints_pos'], # 9
self.obs_dict['joints_vel'], # 9
self.obs_dict['joints_force']
])
def reset_model(self):
# set reset pose/vel
self.reset_pose = self.init_qpos + self.np_random.uniform(
low=-.1, high=.1, size=self.model.nq)
self.reset_vel = self.init_qvel + self.np_random.randn(self.model.nv) * .1
# reset the env to that pose/vel
return self.do_reset(self.reset_pose.copy(), self.reset_vel.copy())
def do_reset(self, reset_pose, reset_vel, reset_goal=None):
# reset
self.set_state(reset_pose, reset_vel)
# return
return self._get_obs()
def viewer_setup(self):
self.viewer.cam.distance = self.model.stat.extent * 0.5
class HalfCheetahEnv6(mujoco_env.MujocoEnv, utils.EzPickle):
def __init__(self,file_path=os.path.join(GYM_ASSET_PATH,'half_cheetah_cgr.xml'),max_step=1000):
self.time = 0
self.slicepoint=0
mujoco_env.MujocoEnv.__init__(self, file_path, 3)
utils.EzPickle.__init__(self)
self.skip = self.frame_skip
def get_reward(self, observations, actions):
"""get rewards of a given (observations, actions) pair
Args:
observations: (batchsize, obs_dim) or (obs_dim,)
actions: (batchsize, ac_dim) or (ac_dim,)
Return:
r_total: (batchsize,1) or (1,), reward for that pair
done: (batchsize,1) or (1,), True if reaches terminal state
"""
#initialize and reshape as needed, for batch mode
self.reward_dict = {}
if len(observations.shape)==1:
observations = np.expand_dims(observations, axis = 0)
actions = np.expand_dims(actions, axis = 0)
batch_mode = False
else:
batch_mode = True
# get vars
xvel = observations[:, 9]
body_angle = observations[:, 2]
# goal vel
goal_vel = 4.0
# calc rew
self.reward_dict['actions'] = -0.1 * np.sum(np.square(actions), axis=1)
self.reward_dict['run'] = goal_vel - np.abs(xvel - goal_vel)
self.reward_dict['body_angle_pena'] = -20.0 * np.abs(body_angle)
self.reward_dict['xvel'] = xvel
#self.reward_dict['r_total'] = self.reward_dict['run'] + self.reward_dict['actions']#+self.reward_dict['body_angle_pena']
self.reward_dict['r_total'] = self.reward_dict['xvel'] + self.reward_dict['actions']
#check if done
dones = np.zeros((observations.shape[0],))
dones[body_angle>1.0] = 1
#return
if not batch_mode:
return self.reward_dict['r_total'][0], dones[0]
return self.reward_dict['r_total'], dones
def get_score(self, obs):
xposafter = obs[9]
return xposafter
def step(self, action):
#step
self.do_simulation(action, self.frame_skip)
#obs/reward/done/score
ob = self._get_obs()
rew, done = self.get_reward(ob, action)
score = self.get_score(ob)
#return
env_info = {'time': self.time,
'obs_dict': self.obs_dict,
'rewards': self.reward_dict,
'score': score}
return ob, rew, done, env_info
def _get_obs(self):
self.obs_dict = {}
self.obs_dict['joints_pos'] = self.sim.data.qpos.flat.copy()
self.obs_dict['joints_vel'] = self.sim.data.qvel.flat.copy()
self.obs_dict['joints_force'] = np.asarray([self.data.sensordata[1],self.data.sensordata[4],self.data.sensordata[7]
,self.data.sensordata[10],self.data.sensordata[13],self.data.sensordata[16]]).flat.copy()
return np.concatenate([
self.obs_dict['joints_pos'], #9
self.obs_dict['joints_vel'], #9
self.obs_dict['joints_force']
])
def reset_model(self):
# set reset pose/vel
self.reset_pose = self.init_qpos + self.np_random.uniform(
low=-.1, high=.1, size=self.model.nq)
self.reset_vel = self.init_qvel + self.np_random.randn(self.model.nv) * .1
#reset the env to that pose/vel
return self.do_reset(self.reset_pose.copy(), self.reset_vel.copy())
def do_reset(self, reset_pose, reset_vel, reset_goal=None):
#reset
self.set_state(reset_pose, reset_vel)
#return
return self._get_obs()
def viewer_setup(self):
self.viewer.cam.distance = self.model.stat.extent * 0.5
class HalfCheetahEnv_Pre2(mujoco_env.MujocoEnv, utils.EzPickle):
def __init__(self,file_path=os.path.join(GYM_ASSET_PATH,'half_cheetah_cgr.xml'),max_step=1000):
self.time = 0
self.slicepoint=0
mujoco_env.MujocoEnv.__init__(self, file_path, 3)
utils.EzPickle.__init__(self)
self.skip = self.frame_skip
def get_reward(self, observations, actions):
"""get rewards of a given (observations, actions) pair
Args:
observations: (batchsize, obs_dim) or (obs_dim,)
actions: (batchsize, ac_dim) or (ac_dim,)
Return:
r_total: (batchsize,1) or (1,), reward for that pair
done: (batchsize,1) or (1,), True if reaches terminal state
"""
#initialize and reshape as needed, for batch mode
self.reward_dict = {}
if len(observations.shape)==1:
observations = np.expand_dims(observations, axis = 0)
actions = np.expand_dims(actions, axis = 0)
batch_mode = False
else:
batch_mode = True
# get vars
xvel = observations[:, 9]
body_angle = observations[:, 2]
# goal vel
goal_vel = 4.0
# calc rew
self.reward_dict['actions'] = -0.1 * np.sum(np.square(actions), axis=1)
self.reward_dict['run'] = goal_vel - np.abs(xvel - goal_vel)
self.reward_dict['body_angle_pena'] = -20.0 * np.abs(body_angle)
self.reward_dict['xvel'] = xvel
#self.reward_dict['r_total'] = self.reward_dict['run'] + self.reward_dict['actions']#+self.reward_dict['body_angle_pena']
self.reward_dict['r_total'] = self.reward_dict['xvel'] + self.reward_dict['actions']
#check if done
dones = np.zeros((observations.shape[0],))
dones[body_angle>1.0] = 1
#return
if not batch_mode:
return self.reward_dict['r_total'][0], dones[0]
return self.reward_dict['r_total'], dones
def get_score(self, obs):
xposafter = obs[9]
return xposafter
def step(self, action):
#step
self.do_simulation(action, self.frame_skip)
#obs/reward/done/score
ob = self._get_obs()
rew, done = self.get_reward(ob, action)
score = self.get_score(ob)
#return
env_info = {'time': self.time,
'obs_dict': self.obs_dict,
'rewards': self.reward_dict,
'score': score}
return ob, rew, done, env_info
def _get_obs(self):
self.obs_dict = {}
self.obs_dict['joints_pos'] = self.sim.data.qpos.flat.copy()
self.obs_dict['joints_vel'] = self.sim.data.qvel.flat.copy()
self.obs_dict['joints_force'] = np.asarray([self.data.sensordata[18],self.data.sensordata[19]]).flat.copy()
return np.concatenate([
self.obs_dict['joints_pos'], #9
self.obs_dict['joints_vel'], #9
self.obs_dict['joints_force']
])
def reset_model(self):
# set reset pose/vel
self.reset_pose = self.init_qpos + self.np_random.uniform(
low=-.1, high=.1, size=self.model.nq)
self.reset_vel = self.init_qvel + self.np_random.randn(self.model.nv) * .1
#reset the env to that pose/vel
return self.do_reset(self.reset_pose.copy(), self.reset_vel.copy())
def do_reset(self, reset_pose, reset_vel, reset_goal=None):
#reset
self.set_state(reset_pose, reset_vel)
#return
return self._get_obs()
def viewer_setup(self):
self.viewer.cam.distance = self.model.stat.extent * 0.5 | 31.0499 | 141 | 0.599833 | 2,083 | 15,556 | 4.269323 | 0.098896 | 0.067469 | 0.094456 | 0.042056 | 0.914652 | 0.914652 | 0.914652 | 0.910379 | 0.910379 | 0.910379 | 0 | 0.016089 | 0.276806 | 15,556 | 501 | 142 | 31.0499 | 0.7744 | 0.19298 | 0 | 0.936 | 0 | 0 | 0.053833 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.128 | false | 0 | 0.02 | 0 | 0.276 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.