hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0870e20d9aa48ef2161bf98ae507ee33f01ff825
| 3,620
|
py
|
Python
|
tests/test_store_retrieve.py
|
akesterson/pass3
|
b2b125ca5b548665de60ace549ed34f692d541b2
|
[
"MIT"
] | 1
|
2016-01-07T00:17:47.000Z
|
2016-01-07T00:17:47.000Z
|
tests/test_store_retrieve.py
|
akesterson/pass3
|
b2b125ca5b548665de60ace549ed34f692d541b2
|
[
"MIT"
] | null | null | null |
tests/test_store_retrieve.py
|
akesterson/pass3
|
b2b125ca5b548665de60ace549ed34f692d541b2
|
[
"MIT"
] | null | null | null |
import nose
import pass3
def test_store_retrieve():
engine = pass3.Engine(
password='testing')
entry = pass3.Record(
scheme='http',
host='localhost.localdomain',
path='somesite',
user='test',
password='test',
title='Some Web Page : Welcome'
)
engine.store(entry)
records = [x for x in engine.search(scheme='http', host='localhost.localdomain', path='somesite')]
assert( len(records) == 1 )
assert( records[0].scheme == 'http' )
assert( records[0].host == 'localhost.localdomain' )
assert( records[0].path == 'somesite' )
assert( records[0].user == 'test' )
assert( records[0].password == 'test' )
assert( records[0].title == 'Some Web Page : Welcome' )
def test_store_retrieve_multiple():
engine = pass3.Engine(
password='testing'
)
engine.store(
pass3.Record(
scheme='http',
host='localhost.localdomain',
path='somesite',
user='test',
password='test',
title='test'
)
)
engine.store(
pass3.Record(
scheme='http',
host='localhost.localdomain',
path='some_other_site',
user='test',
password='test',
title='test 2'
)
)
records = [x for x in engine.search(scheme='http', host='localhost.localdomain')]
assert( len(records) == 2 )
assert( records[0].scheme == 'http' )
assert( records[0].host == 'localhost.localdomain' )
assert( records[0].path == 'somesite' )
assert( records[0].user == 'test' )
assert( records[0].password == 'test' )
assert( records[1].scheme == 'http' )
assert( records[1].host == 'localhost.localdomain' )
assert( records[1].path == 'some_other_site' )
assert( records[1].user == 'test' )
assert( records[1].password == 'test' )
def test_store_retrieve_alternate():
engine = pass3.Engine(
password='testing'
)
record = pass3.Record(
scheme='http',
host='localhost.localdomain',
path='somesite',
user='test',
password='test',
title='test'
)
record.add_alternate(
pass3.Record(
scheme='http',
host='localhost.localdomain',
path='some_other_site'
)
)
engine.store(record)
records = [x for x in engine.search(scheme='http', host='localhost.localdomain', path='somesite')]
assert( len(records) == 1 )
assert( records[0].scheme == 'http' )
assert( records[0].host == 'localhost.localdomain' )
assert( records[0].path == 'somesite' )
assert( records[0].user == 'test' )
assert( records[0].password == 'test' )
assert( len(records[0].alternates) == 1 )
assert( records[0].alternates[0].scheme == 'http' )
assert( records[0].alternates[0].host == 'localhost.localdomain' )
assert( records[0].alternates[0].path == 'some_other_site' )
records = [x for x in engine.search(scheme='http', host='localhost.localdomain', path='some_other_site')]
assert( len(records) == 1 )
assert( records[0].scheme == 'http' )
assert( records[0].host == 'localhost.localdomain' )
assert( records[0].path == 'somesite' )
assert( records[0].user == 'test' )
assert( records[0].password == 'test' )
assert( len(records[0].alternates) == 1 )
assert( records[0].alternates[0].scheme == 'http' )
assert( records[0].alternates[0].host == 'localhost.localdomain' )
assert( records[0].alternates[0].path == 'some_other_site' )
| 33.831776
| 109
| 0.581215
| 397
| 3,620
| 5.246851
| 0.103275
| 0.199712
| 0.181469
| 0.099376
| 0.881421
| 0.786846
| 0.772924
| 0.772924
| 0.772924
| 0.766683
| 0
| 0.020748
| 0.25442
| 3,620
| 106
| 110
| 34.150943
| 0.751019
| 0
| 0
| 0.66
| 0
| 0
| 0.197514
| 0.092818
| 0
| 0
| 0
| 0
| 0.38
| 1
| 0.03
| false
| 0.21
| 0.02
| 0
| 0.05
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
08e0aed52085cc34fabbfd619ac78869d7f5c025
| 2,299
|
py
|
Python
|
tests/test_verify.py
|
haohuifeng0/nexmo-python
|
aacaefcf8bac20eedecaff53f122317e6c09e3ca
|
[
"MIT"
] | null | null | null |
tests/test_verify.py
|
haohuifeng0/nexmo-python
|
aacaefcf8bac20eedecaff53f122317e6c09e3ca
|
[
"MIT"
] | null | null | null |
tests/test_verify.py
|
haohuifeng0/nexmo-python
|
aacaefcf8bac20eedecaff53f122317e6c09e3ca
|
[
"MIT"
] | null | null | null |
from util import *
@responses.activate
def test_start_verification(verify, dummy_data):
stub(responses.POST, "https://api.nexmo.com/verify/json")
params = {"number": "447525856424", "brand": "MyApp"}
assert isinstance(verify.start_verification(params), dict)
assert request_user_agent() == dummy_data.user_agent
assert "number=447525856424" in request_body()
assert "brand=MyApp" in request_body()
@responses.activate
def test_check_verification(verify, dummy_data):
stub(responses.POST, "https://api.nexmo.com/verify/check/json")
assert isinstance(
verify.check("8g88g88eg8g8gg9g90", code="123445"), dict
)
assert request_user_agent() == dummy_data.user_agent
assert "code=123445" in request_body()
assert "request_id=8g88g88eg8g8gg9g90" in request_body()
@responses.activate
def test_get_verification(verify, dummy_data):
stub(responses.GET, "https://api.nexmo.com/verify/search/json")
assert isinstance(verify.search("xxx"), dict)
assert request_user_agent() == dummy_data.user_agent
assert "request_id=xxx" in request_query()
@responses.activate
def test_cancel_verification(verify, dummy_data):
stub(responses.POST, "https://api.nexmo.com/verify/control/json")
assert isinstance(verify.cancel("8g88g88eg8g8gg9g90"), dict)
assert request_user_agent() == dummy_data.user_agent
assert "cmd=cancel" in request_body()
assert "request_id=8g88g88eg8g8gg9g90" in request_body()
@responses.activate
def test_trigger_next_verification_event(verify, dummy_data):
stub(responses.POST, "https://api.nexmo.com/verify/control/json")
assert isinstance(
verify.trigger_next_event("8g88g88eg8g8gg9g90"), dict
)
assert request_user_agent() == dummy_data.user_agent
assert "cmd=trigger_next_event" in request_body()
assert "request_id=8g88g88eg8g8gg9g90" in request_body()
@responses.activate
def test_start_psd2_verification(verify, dummy_data):
stub(responses.POST, "https://api.nexmo.com/verify/psd2/json")
params = {"number": "447525856424", "brand": "MyApp"}
assert isinstance(verify.psd2(params), dict)
assert request_user_agent() == dummy_data.user_agent
assert "number=447525856424" in request_body()
assert "brand=MyApp" in request_body()
| 34.313433
| 69
| 0.743367
| 290
| 2,299
| 5.668966
| 0.165517
| 0.065693
| 0.079075
| 0.087591
| 0.819343
| 0.78528
| 0.760949
| 0.74635
| 0.74635
| 0.673358
| 0
| 0.061809
| 0.134406
| 2,299
| 67
| 70
| 34.313433
| 0.764322
| 0
| 0
| 0.520833
| 0
| 0
| 0.241304
| 0.047391
| 0
| 0
| 0
| 0
| 0.479167
| 1
| 0.125
| false
| 0
| 0.020833
| 0
| 0.145833
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f5b2a5bbb38fe39b0576273f528015c0e2c4d7c8
| 174
|
py
|
Python
|
purescript_show_python/ffi/Data/Semiring.py
|
thautwarm/purescript-show-python
|
e40134362653baa2f5feffced579504d7fe7f7ba
|
[
"MIT"
] | null | null | null |
purescript_show_python/ffi/Data/Semiring.py
|
thautwarm/purescript-show-python
|
e40134362653baa2f5feffced579504d7fe7f7ba
|
[
"MIT"
] | 1
|
2020-02-24T16:38:30.000Z
|
2020-02-24T16:38:30.000Z
|
purescript_show_python/ffi/Data/Semiring.py
|
purescript-python/purescript-show-python
|
e40134362653baa2f5feffced579504d7fe7f7ba
|
[
"MIT"
] | null | null | null |
def intAdd(x):
return lambda y: x + y
def intMul(x):
return lambda y: x * y
def numAdd(x):
return lambda y: x + y
def numMul(x):
return lambda y: x * y
| 11.6
| 26
| 0.574713
| 32
| 174
| 3.125
| 0.28125
| 0.28
| 0.52
| 0.56
| 0.73
| 0.73
| 0.57
| 0
| 0
| 0
| 0
| 0
| 0.310345
| 174
| 14
| 27
| 12.428571
| 0.833333
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
de22aa167477d15038d2b848b127b6e7a3505a4d
| 2,559
|
py
|
Python
|
tests/test_problem_solving_data_structures_trees.py
|
mxdzi/hackerrank
|
4455f73e4479a4204b2e1167253f6a02351aa5b7
|
[
"MIT"
] | null | null | null |
tests/test_problem_solving_data_structures_trees.py
|
mxdzi/hackerrank
|
4455f73e4479a4204b2e1167253f6a02351aa5b7
|
[
"MIT"
] | null | null | null |
tests/test_problem_solving_data_structures_trees.py
|
mxdzi/hackerrank
|
4455f73e4479a4204b2e1167253f6a02351aa5b7
|
[
"MIT"
] | null | null | null |
from problem_solving.data_structures.trees import *
def test_q1_tree_preorder_traversal(capsys, monkeypatch):
inputs = ["6", "1 2 5 3 6 4"]
monkeypatch.setattr('builtins.input', lambda: inputs.pop(0))
q1_tree_preorder_traversal.main()
captured = capsys.readouterr()
output = "1 2 5 3 4 6 "
assert captured.out == output
def test_q2_tree_postorder_traversal(capsys, monkeypatch):
inputs = ["6", "1 2 5 3 6 4"]
monkeypatch.setattr('builtins.input', lambda: inputs.pop(0))
q2_tree_postorder_traversal.main()
captured = capsys.readouterr()
output = "4 3 6 5 2 1 "
assert captured.out == output
inputs = ["15", "1 14 3 7 4 5 15 6 13 10 11 2 12 8 9"]
monkeypatch.setattr('builtins.input', lambda: inputs.pop(0))
q2_tree_postorder_traversal.main()
captured = capsys.readouterr()
output = "2 6 5 4 9 8 12 11 10 13 7 3 15 14 1 "
assert captured.out == output
def test_q3_tree_inorder_traversal(capsys, monkeypatch):
inputs = ["6", "1 2 5 3 6 4"]
monkeypatch.setattr('builtins.input', lambda: inputs.pop(0))
q3_tree_inorder_traversal.main()
captured = capsys.readouterr()
output = "1 2 3 4 5 6 "
assert captured.out == output
inputs = ["15", "1 14 3 7 4 5 15 6 13 10 11 2 12 8 9"]
monkeypatch.setattr('builtins.input', lambda: inputs.pop(0))
q3_tree_inorder_traversal.main()
captured = capsys.readouterr()
output = "1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 "
assert captured.out == output
def test_q4_tree_height_of_a_binary_tree(capsys, monkeypatch):
inputs = ["7", "3 5 2 1 4 6 7"]
monkeypatch.setattr('builtins.input', lambda: inputs.pop(0))
q4_tree_height_of_a_binary_tree.main()
captured = capsys.readouterr()
output = "3\n"
assert captured.out == output
inputs = ["1", "15"]
monkeypatch.setattr('builtins.input', lambda: inputs.pop(0))
q4_tree_height_of_a_binary_tree.main()
captured = capsys.readouterr()
output = "0\n"
assert captured.out == output
inputs = ["5", "3 1 7 5 4"]
monkeypatch.setattr('builtins.input', lambda: inputs.pop(0))
q4_tree_height_of_a_binary_tree.main()
captured = capsys.readouterr()
output = "3\n"
assert captured.out == output
def test_q6_tree_level_order_traversal(capsys, monkeypatch):
inputs = ["6", "1 2 5 3 6 4"]
monkeypatch.setattr('builtins.input', lambda: inputs.pop(0))
q6_tree_level_order_traversal.main()
captured = capsys.readouterr()
output = "1 2 5 3 6 4 "
assert captured.out == output
| 30.464286
| 64
| 0.669402
| 395
| 2,559
| 4.177215
| 0.141772
| 0.098182
| 0.141818
| 0.169091
| 0.872727
| 0.851515
| 0.769697
| 0.754545
| 0.754545
| 0.753939
| 0
| 0.090281
| 0.207894
| 2,559
| 83
| 65
| 30.831325
| 0.72373
| 0
| 0
| 0.7
| 0
| 0
| 0.157874
| 0
| 0
| 0
| 0
| 0
| 0.15
| 1
| 0.083333
| false
| 0
| 0.016667
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9d1a62748459695db18e81a89d3470b80d9c9c1d
| 44
|
py
|
Python
|
.ipynb_checkpoints/config-checkpoint.py
|
jchrystal/weatherpy_homework
|
475cf8aff72e1f137ba3026089d6b2090e18060a
|
[
"MIT"
] | null | null | null |
.ipynb_checkpoints/config-checkpoint.py
|
jchrystal/weatherpy_homework
|
475cf8aff72e1f137ba3026089d6b2090e18060a
|
[
"MIT"
] | null | null | null |
.ipynb_checkpoints/config-checkpoint.py
|
jchrystal/weatherpy_homework
|
475cf8aff72e1f137ba3026089d6b2090e18060a
|
[
"MIT"
] | null | null | null |
api_key = "ccafc75a70c2015a73008b45469e478c"
| 44
| 44
| 0.886364
| 3
| 44
| 12.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 0.045455
| 44
| 1
| 44
| 44
| 0.404762
| 0
| 0
| 0
| 0
| 0
| 0.711111
| 0.711111
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9d5eef291ccdcaa0d74fdf81a430fa00036e9e97
| 4,948
|
py
|
Python
|
src/genie/libs/parser/nxos/tests/ShowIpInterfaceBrief/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/nxos/tests/ShowIpInterfaceBrief/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/nxos/tests/ShowIpInterfaceBrief/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {'interface':
{'Eth5/48.106':
{'interface_status': 'protocol-down/link-down/admin-up',
'ip_address': '10.81.6.1'},
'Lo3':
{'interface_status': 'protocol-up/link-up/admin-up',
'ip_address': '192.168.205.1'},
'Po1.102':
{'interface_status': 'protocol-up/link-up/admin-up', 'ip_address': '192.168.70.2'},
'Lo11':
{'interface_status': 'protocol-up/link-up/admin-up',
'ip_address': '192.168.151.1'},
'Vlan23':
{'vlan_id':
{'23':
{'interface_status': 'protocol-up/link-up/admin-up',
'ip_address': '192.168.186.1'}}},
'Eth5/48.101':
{'interface_status': 'protocol-down/link-down/admin-up',
'ip_address': '10.81.1.1'},
'Eth5/48.102':
{'interface_status': 'protocol-down/link-down/admin-up',
'ip_address': '10.81.2.1'},
'Eth5/48.105':
{'interface_status': 'protocol-down/link-down/admin-up',
'ip_address': '10.81.5.1'},
'Lo2':
{'interface_status': 'protocol-up/link-up/admin-up', 'ip_address': '192.168.51.1'},
'Lo1':
{'interface_status': 'protocol-up/link-up/admin-up',
'ip_address': '192.168.154.1'},
'Eth6/22':
{'interface_status': 'protocol-up/link-up/admin-up',
'ip_address': '192.168.145.1'},
'Po1.101':
{'interface_status': 'protocol-up/link-up/admin-up',
'ip_address': '192.168.151.2'},
'Lo10':
{'interface_status': 'protocol-up/link-up/admin-up', 'ip_address': '192.168.64.1'},
'Po1.103':
{'interface_status': 'protocol-up/link-up/admin-up',
'ip_address': '192.168.246.2'},
'Eth5/48.100':
{'interface_status': 'protocol-down/link-down/admin-up',
'ip_address': '10.81.0.1'},
'Po2.107':
{'interface_status': 'protocol-up/link-up/admin-up', 'ip_address': '192.168.66.1'},
'Eth5/48.103':
{'interface_status': 'protocol-down/link-down/admin-up',
'ip_address': '10.81.3.1'},
'tunnel-te12':
{'interface_status': 'protocol-up/link-up/admin-up',
'ip_address': 'unnumbered(loopback0)'},
'Eth5/48.110':
{'interface_status': 'protocol-down/link-down/admin-up',
'ip_address': '10.81.10.1'},
'Po2.103':
{'interface_status': 'protocol-up/link-up/admin-up', 'ip_address': '192.168.19.1'},
'Lo0':
{'interface_status': 'protocol-up/link-up/admin-up', 'ip_address': '192.168.4.1'},
'Po2.101':
{'interface_status': 'protocol-up/link-up/admin-up',
'ip_address': '192.168.135.1'},
'Po2.100':
{'interface_status': 'protocol-up/link-up/admin-up',
'ip_address': '192.168.196.1'},
'tunnel-te11':
{'interface_status': 'protocol-up/link-up/admin-up',
'ip_address': 'unnumbered(loopback0)'},
'Po2.102':
{'interface_status': 'protocol-up/link-up/admin-up', 'ip_address': '192.168.76.1'},
'Eth5/48.104':
{'interface_status': 'protocol-down/link-down/admin-up', 'ip_address': '10.81.4.1'}
}
}
| 63.435897
| 116
| 0.353678
| 419
| 4,948
| 4.047733
| 0.157518
| 0.229953
| 0.352594
| 0.245283
| 0.851415
| 0.851415
| 0.851415
| 0.851415
| 0.851415
| 0.851415
| 0
| 0.123669
| 0.506467
| 4,948
| 77
| 117
| 64.25974
| 0.570844
| 0
| 0
| 0.266667
| 0
| 0
| 0.397897
| 0.162151
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
9d601b648c8f42019e2264c6e459b3708fd51c2b
| 14,175
|
py
|
Python
|
tests/rule_based_profiler/domain_builder/test_domain.py
|
andyjessen/great_expectations
|
74f7f2aa7b51144f34156ed49490dae4edaa5cb7
|
[
"Apache-2.0"
] | null | null | null |
tests/rule_based_profiler/domain_builder/test_domain.py
|
andyjessen/great_expectations
|
74f7f2aa7b51144f34156ed49490dae4edaa5cb7
|
[
"Apache-2.0"
] | null | null | null |
tests/rule_based_profiler/domain_builder/test_domain.py
|
andyjessen/great_expectations
|
74f7f2aa7b51144f34156ed49490dae4edaa5cb7
|
[
"Apache-2.0"
] | null | null | null |
from typing import Optional
import pytest
from great_expectations.rule_based_profiler.helpers.util import (
integer_semantic_domain_type,
)
from great_expectations.rule_based_profiler.types import (
INFERRED_SEMANTIC_TYPE_KEY,
Domain,
SemanticDomainTypes,
)
def test_semantic_domain_consistency():
domain: Domain
with pytest.raises(ValueError) as excinfo:
# noinspection PyUnusedLocal
domain = Domain(
domain_type="column",
domain_kwargs={"column": "passenger_count"},
details={
"estimator": "categorical",
"cardinality": "low",
INFERRED_SEMANTIC_TYPE_KEY: {
"num_passengers": SemanticDomainTypes.NUMERIC,
},
},
rule_name="my_rule",
)
assert (
"""Cannot instantiate Domain (domain_type "MetricDomainTypes.COLUMN" of type "<enum 'MetricDomainTypes'>" -- key "num_passengers", detected in "inferred_semantic_domain_type" dictionary, does not exist as value of appropriate key in "domain_kwargs" dictionary."""
in str(excinfo.value)
)
def test_semantic_domain_serialization():
domain: Domain
domain = Domain(
domain_type="column",
domain_kwargs={"column": "passenger_count"},
details={
"estimator": "categorical",
"cardinality": "low",
},
rule_name="my_rule",
)
assert domain.to_json_dict() == {
"domain_type": "column",
"domain_kwargs": {"column": "passenger_count"},
"details": {
"estimator": "categorical",
"cardinality": "low",
},
"rule_name": "my_rule",
}
domain = Domain(
domain_type="column",
domain_kwargs={"column": "passenger_count"},
details={
"estimator": "categorical",
"cardinality": "low",
INFERRED_SEMANTIC_TYPE_KEY: {
"passenger_count": SemanticDomainTypes.NUMERIC,
},
},
rule_name="my_rule",
)
assert domain.to_json_dict() == {
"domain_type": "column",
"domain_kwargs": {
"column": "passenger_count",
},
"details": {
"estimator": "categorical",
"cardinality": "low",
INFERRED_SEMANTIC_TYPE_KEY: {
"passenger_count": SemanticDomainTypes.NUMERIC.value,
},
},
"rule_name": "my_rule",
}
domain = Domain(
domain_type="column",
domain_kwargs={"column": "passenger_count"},
details={
"estimator": "categorical",
"cardinality": "low",
INFERRED_SEMANTIC_TYPE_KEY: {
"passenger_count": SemanticDomainTypes.NUMERIC,
},
},
rule_name="my_rule",
)
assert domain.to_json_dict() == {
"domain_type": "column",
"domain_kwargs": {
"column": "passenger_count",
},
"details": {
"estimator": "categorical",
"cardinality": "low",
INFERRED_SEMANTIC_TYPE_KEY: {
"passenger_count": SemanticDomainTypes.NUMERIC.value,
},
},
"rule_name": "my_rule",
}
def test_semantic_domain_equivalence():
domain_a: Domain
domain_b: Domain
domain_c: Domain
domain_a = Domain(
domain_type="column",
domain_kwargs={"column": "VendorID"},
details={
INFERRED_SEMANTIC_TYPE_KEY: {
"VendorID": SemanticDomainTypes.NUMERIC,
},
},
rule_name="my_rule",
)
domain_b = Domain(
domain_type="column",
domain_kwargs={"column": "passenger_count"},
details={
INFERRED_SEMANTIC_TYPE_KEY: {
"passenger_count": SemanticDomainTypes.NUMERIC,
},
},
rule_name="my_rule",
)
domain_c = Domain(
domain_type="column",
domain_kwargs={"column": "passenger_count"},
details={
INFERRED_SEMANTIC_TYPE_KEY: {
"passenger_count": SemanticDomainTypes.NUMERIC,
},
},
rule_name="my_rule",
)
assert not (domain_a == domain_b)
assert domain_b == domain_c
domain_a = Domain(
domain_type="column",
domain_kwargs={"column": "VendorID"},
details={
INFERRED_SEMANTIC_TYPE_KEY: {
"VendorID": SemanticDomainTypes.NUMERIC,
},
},
rule_name="my_rule",
)
domain_b = Domain(
domain_type="column",
domain_kwargs={"column": "passenger_count"},
details={
INFERRED_SEMANTIC_TYPE_KEY: {
"passenger_count": SemanticDomainTypes.NUMERIC,
},
},
rule_name="my_rule",
)
domain_c = Domain(
domain_type="column",
domain_kwargs={"column": "passenger_count"},
details={
INFERRED_SEMANTIC_TYPE_KEY: {
"passenger_count": SemanticDomainTypes.NUMERIC,
},
},
rule_name="my_rule",
)
assert not (domain_a == domain_b)
assert domain_b == domain_c
domain_d: Domain = Domain(
domain_type="column",
domain_kwargs={"column": "passenger_count"},
details={
INFERRED_SEMANTIC_TYPE_KEY: {
"passenger_count": "unknown_semantic_type_as_string",
},
},
rule_name="my_rule",
)
with pytest.raises(ValueError) as excinfo:
# noinspection PyUnusedLocal
domain_as_dict: dict = domain_d.to_json_dict()
assert (
"'unknown_semantic_type_as_string' is not a valid SemanticDomainTypes"
in str(excinfo.value)
)
domain_e: Domain = Domain(
domain_type="column",
domain_kwargs={"column": "passenger_count"},
details={
"estimator": "categorical",
"cardinality": "low",
INFERRED_SEMANTIC_TYPE_KEY: {
"passenger_count": "unknown_semantic_type_as_string",
},
},
rule_name="my_rule",
)
with pytest.raises(ValueError) as excinfo:
# noinspection PyUnusedLocal
domain_as_dict: dict = domain_e.to_json_dict()
assert (
"'unknown_semantic_type_as_string' is not a valid SemanticDomainTypes"
in str(excinfo.value)
)
def test_semantic_domain_comparisons_inclusion():
domain_a: Optional[Domain]
domain_b: Optional[Domain]
domain_a = Domain(
domain_type="column",
domain_kwargs={"column": "passenger_count"},
details={
"estimator": "categorical",
"cardinality": "low",
INFERRED_SEMANTIC_TYPE_KEY: {
"passenger_count": SemanticDomainTypes.NUMERIC,
},
},
rule_name="my_rule",
)
domain_b = None
assert domain_a.is_superset(other=domain_b)
domain_b = Domain(
domain_type="column",
domain_kwargs={"column": "passenger_count"},
details={
"estimator": "categorical",
"cardinality": "low",
INFERRED_SEMANTIC_TYPE_KEY: {
"passenger_count": SemanticDomainTypes.NUMERIC,
},
},
rule_name="my_rule",
)
assert domain_a.is_superset(other=domain_b)
domain_b = Domain(
domain_type="column",
domain_kwargs={"column": "passenger_count"},
details={
"estimator": "categorical",
"cardinality": "low",
INFERRED_SEMANTIC_TYPE_KEY: {
"passenger_count": SemanticDomainTypes.NUMERIC,
},
},
rule_name="",
)
assert domain_a.is_superset(other=domain_b)
domain_b = Domain(
domain_type="column",
domain_kwargs=None,
details={
"estimator": "categorical",
"cardinality": "low",
INFERRED_SEMANTIC_TYPE_KEY: {},
},
rule_name=None,
)
assert domain_a.is_superset(other=domain_b)
domain_b = Domain(
domain_type="column",
domain_kwargs=None,
details={
"estimator": "categorical",
"cardinality": "low",
},
)
assert domain_a.is_superset(other=domain_b)
domain_b = Domain(
domain_type="column",
domain_kwargs=None,
details={},
rule_name=None,
)
assert domain_a.is_superset(other=domain_b)
domain_b = Domain(
domain_type="column",
domain_kwargs=None,
details=None,
rule_name="",
)
assert domain_a.is_superset(other=domain_b)
domain_b = Domain(
domain_type="column",
rule_name="",
)
assert domain_a.is_superset(other=domain_b)
domain_b = Domain(
domain_type="column",
rule_name="my_other_rule",
)
assert not domain_a.is_superset(other=domain_b)
domain_b = Domain(
domain_type="table",
rule_name="my_other_rule",
)
assert not domain_a.is_superset(other=domain_b)
domain_b = Domain(
domain_type="table",
rule_name="",
)
assert not domain_a.is_superset(other=domain_b)
domain_b = Domain(
domain_type="table",
rule_name="my_rule",
)
assert not domain_a.is_superset(other=domain_b)
domain_b = Domain(
domain_type="column",
domain_kwargs={"column": "passenger_count"},
details={
"estimator": "categorical",
"cardinality": "medium",
INFERRED_SEMANTIC_TYPE_KEY: {
"passenger_count": SemanticDomainTypes.NUMERIC,
},
},
rule_name="my_rule",
)
assert not domain_a.is_superset(other=domain_b)
domain_b = Domain(
domain_type="column",
domain_kwargs={"column": "fair_amount"},
rule_name="my_rule",
)
assert not domain_a.is_superset(other=domain_b)
domain_b = Domain(
domain_type="column",
domain_kwargs={"column": "fair_amount"},
details={
INFERRED_SEMANTIC_TYPE_KEY: {
"fair_amount": SemanticDomainTypes.CURRENCY,
},
},
rule_name="my_rule",
)
assert not domain_a.is_superset(other=domain_b)
def test_integer_semantic_domain_type():
domain: Domain
domain = Domain(
domain_type="column",
domain_kwargs={
"column": "passenger_count",
},
details={
INFERRED_SEMANTIC_TYPE_KEY: {
"passenger_count": SemanticDomainTypes.NUMERIC,
},
},
rule_name="my_rule",
)
assert not integer_semantic_domain_type(domain=domain)
domain = Domain(
domain_type="column",
domain_kwargs={
"column": "VendorID",
},
details={
INFERRED_SEMANTIC_TYPE_KEY: {
"VendorID": SemanticDomainTypes.IDENTIFIER,
},
},
rule_name="my_rule",
)
assert integer_semantic_domain_type(domain=domain)
domain = Domain(
domain_type="column",
domain_kwargs={
"column": "is_night_time",
},
details={
INFERRED_SEMANTIC_TYPE_KEY: {
"is_night_time": SemanticDomainTypes.LOGIC,
},
},
rule_name="my_rule",
)
assert integer_semantic_domain_type(domain=domain)
domain = Domain(
domain_type="column",
domain_kwargs={
"column_A": "passenger_count",
"column_B": "fare_amount",
},
rule_name="my_rule",
details={
INFERRED_SEMANTIC_TYPE_KEY: {
"passenger_count": SemanticDomainTypes.NUMERIC,
"fare_amount": SemanticDomainTypes.NUMERIC,
},
},
)
assert not integer_semantic_domain_type(domain=domain)
domain = Domain(
domain_type="column",
domain_kwargs={
"column_A": "passenger_count",
"column_B": "VendorID",
},
details={
INFERRED_SEMANTIC_TYPE_KEY: {
"passenger_count": SemanticDomainTypes.NUMERIC,
"VendorID": SemanticDomainTypes.IDENTIFIER,
},
},
rule_name="my_rule",
)
assert not integer_semantic_domain_type(domain=domain)
domain = Domain(
domain_type="column",
domain_kwargs={
"column_A": "is_night_time",
"column_B": "VendorID",
},
details={
INFERRED_SEMANTIC_TYPE_KEY: {
"is_night_time": SemanticDomainTypes.LOGIC,
"VendorID": SemanticDomainTypes.IDENTIFIER,
},
},
rule_name="my_rule",
)
assert integer_semantic_domain_type(domain=domain)
domain = Domain(
domain_type="column",
domain_kwargs={
"column_list": [
"passenger_count",
"fare_amount",
"is_night_time",
],
},
details={
INFERRED_SEMANTIC_TYPE_KEY: {
"passenger_count": SemanticDomainTypes.NUMERIC,
"fare_amount": SemanticDomainTypes.NUMERIC,
"is_night_time": SemanticDomainTypes.LOGIC,
},
},
rule_name="my_rule",
)
assert not integer_semantic_domain_type(domain=domain)
domain = Domain(
domain_type="column",
domain_kwargs={
"column_list": [
"RatecodeID",
"VendorID",
"fare_amount",
"is_night_time",
],
},
details={
INFERRED_SEMANTIC_TYPE_KEY: {
"VendorID": SemanticDomainTypes.IDENTIFIER,
"RatecodeID": SemanticDomainTypes.IDENTIFIER,
"is_night_time": SemanticDomainTypes.LOGIC,
},
},
rule_name="my_rule",
)
assert integer_semantic_domain_type(domain=domain)
| 27.417795
| 271
| 0.556825
| 1,284
| 14,175
| 5.783489
| 0.070872
| 0.121196
| 0.077565
| 0.097765
| 0.908161
| 0.901427
| 0.891193
| 0.884999
| 0.862375
| 0.841772
| 0
| 0
| 0.333898
| 14,175
| 516
| 272
| 27.47093
| 0.786486
| 0.005644
| 0
| 0.679739
| 0
| 0
| 0.171536
| 0.009257
| 0
| 0
| 0
| 0
| 0.071895
| 1
| 0.010893
| false
| 0.087146
| 0.008715
| 0
| 0.019608
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
c2292b7cf1c207cab0c1d872af5b992028712cfd
| 127
|
py
|
Python
|
OpenMatch/data/__init__.py
|
vishalbelsare/OpenMatch
|
84b25502bf52c58b9e71bd0754b2fc192d9b448f
|
[
"MIT"
] | 403
|
2020-01-17T06:54:46.000Z
|
2022-03-30T05:47:42.000Z
|
OpenMatch/data/__init__.py
|
vishalbelsare/OpenMatch
|
84b25502bf52c58b9e71bd0754b2fc192d9b448f
|
[
"MIT"
] | 30
|
2020-06-07T12:28:07.000Z
|
2022-03-20T05:26:03.000Z
|
OpenMatch/data/__init__.py
|
vishalbelsare/OpenMatch
|
84b25502bf52c58b9e71bd0754b2fc192d9b448f
|
[
"MIT"
] | 48
|
2020-07-15T09:45:46.000Z
|
2022-03-01T07:27:59.000Z
|
from OpenMatch.data.dataloader import DataLoader
from OpenMatch.data.datasets import *
from OpenMatch.data.tokenizers import *
| 31.75
| 48
| 0.84252
| 16
| 127
| 6.6875
| 0.4375
| 0.364486
| 0.476636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094488
| 127
| 3
| 49
| 42.333333
| 0.930435
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c233d2a46dc3eadbe682ee06973916550ef9c65b
| 3,855
|
py
|
Python
|
tests/test_waf.py
|
roecla/econ-project-templates
|
7625f6938094dd02bb863a67d4d5912c584df14d
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_waf.py
|
roecla/econ-project-templates
|
7625f6938094dd02bb863a67d4d5912c584df14d
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_waf.py
|
roecla/econ-project-templates
|
7625f6938094dd02bb863a67d4d5912c584df14d
|
[
"BSD-3-Clause"
] | null | null | null |
import subprocess
import os
import re
import shutil
import pytest
from test_cookie import basic_project_dict
PROJECT_DIRECTORY = os.path.realpath(os.path.curdir)
def remove_dir(dirpath):
shutil.rmtree(os.path.join(PROJECT_DIRECTORY, dirpath))
def _check_configure(result):
try:
log_configure = subprocess.check_output(
["python", "{}".format(result.project.join("waf.py")), "configure"]
)
except subprocess.CalledProcessError as e:
log_configure = e.output
print(log_configure)
assert re.search(r"configure\\?[\'\"] finished successfully", str(log_configure))
def _check_build(result):
try:
log_build = subprocess.check_output(
["python", "{}".format(result.project.join("waf.py")), "configure", "build"]
)
except subprocess.CalledProcessError as e:
log_build = e.output
print(str(log_build))
assert re.search(r"build\\?[\'\"] finished successfully", str(log_build))
def test_waf_configure_python(cookies, basic_project_dict):
result = cookies.bake(extra_context=basic_project_dict)
_check_configure(result)
def test_waf_build_python(cookies, basic_project_dict):
result = cookies.bake(extra_context=basic_project_dict)
_check_build(result)
def test_waf_configure_python_bibtex(cookies, basic_project_dict):
basic_project_dict["use_biber_biblatex_for_tex_bibliographies"] = "n"
result = cookies.bake(extra_context=basic_project_dict)
_check_configure(result)
def test_waf_build_python_bibtex(cookies, basic_project_dict):
basic_project_dict["use_biber_biblatex_for_tex_bibliographies"] = "n"
result = cookies.bake(extra_context=basic_project_dict)
_check_build(result)
def test_waf_build_python_normalise_title(cookies, basic_project_dict):
basic_project_dict["project_name"] = "x_y"
result = cookies.bake(extra_context=basic_project_dict)
_check_build(result)
@pytest.mark.xfail
def test_waf_configure_r(cookies, basic_project_dict):
basic_project_dict["example_to_install"] = "R"
result = cookies.bake(extra_context=basic_project_dict)
_check_configure(result)
@pytest.mark.xfail
def test_waf_build_r(cookies, basic_project_dict):
basic_project_dict["example_to_install"] = "R"
result = cookies.bake(extra_context=basic_project_dict)
_check_build(result)
@pytest.mark.xfail
def test_waf_configure_stata(cookies, basic_project_dict):
basic_project_dict["example_to_install"] = "Stata"
result = cookies.bake(extra_context=basic_project_dict)
_check_configure(result)
@pytest.mark.xfail
def test_waf_build_stata(cookies, basic_project_dict):
basic_project_dict["example_to_install"] = "Stata"
result = cookies.bake(extra_context=basic_project_dict)
_check_build(result)
@pytest.mark.xfail
def test_waf_configure_matlab(cookies, basic_project_dict):
basic_project_dict["example_to_install"] = "Matlab"
result = cookies.bake(extra_context=basic_project_dict)
_check_configure(result)
@pytest.mark.xfail
def test_waf_build_matlab(cookies, basic_project_dict):
basic_project_dict["example_to_install"] = "Matlab"
result = cookies.bake(extra_context=basic_project_dict)
_check_build(result)
@pytest.mark.xfail
def test_waf_configure_julia(cookies, basic_project_dict):
basic_project_dict[
"example_to_install"
] = "Julia (Warning: You will need to fix a lot yourself! Patches welcome!)"
result = cookies.bake(extra_context=basic_project_dict)
_check_configure(result)
@pytest.mark.xfail
def test_waf_build_julia(cookies, basic_project_dict):
basic_project_dict[
"example_to_install"
] = "Julia (Warning: You will need to fix a lot yourself! Patches welcome!)"
result = cookies.bake(extra_context=basic_project_dict)
_check_build(result)
| 30.595238
| 88
| 0.757977
| 512
| 3,855
| 5.308594
| 0.154297
| 0.16777
| 0.223694
| 0.110007
| 0.827447
| 0.817145
| 0.783664
| 0.769316
| 0.769316
| 0.769316
| 0
| 0
| 0.142672
| 3,855
| 125
| 89
| 30.84
| 0.82239
| 0
| 0
| 0.58427
| 0
| 0
| 0.12607
| 0.021271
| 0
| 0
| 0
| 0
| 0.022472
| 1
| 0.179775
| false
| 0
| 0.067416
| 0
| 0.247191
| 0.022472
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c242ea5672f80f1bd42c7f8591c2e1bcf6f9910b
| 96
|
py
|
Python
|
baldrick/blueprints/__init__.py
|
ConorMacBride/baldrick
|
cf93ea5abc4a207afd5c722b6b42a33b0ba91880
|
[
"MIT"
] | 14
|
2018-11-24T20:29:26.000Z
|
2022-03-25T14:53:06.000Z
|
baldrick/blueprints/__init__.py
|
ConorMacBride/baldrick
|
cf93ea5abc4a207afd5c722b6b42a33b0ba91880
|
[
"MIT"
] | 84
|
2018-09-11T13:53:55.000Z
|
2022-02-04T14:51:04.000Z
|
baldrick/blueprints/__init__.py
|
ConorMacBride/baldrick
|
cf93ea5abc4a207afd5c722b6b42a33b0ba91880
|
[
"MIT"
] | 7
|
2018-08-01T16:06:31.000Z
|
2021-12-15T16:23:52.000Z
|
from .circleci import circleci_blueprint # noqa
from .github import github_blueprint # noqa
| 32
| 49
| 0.791667
| 12
| 96
| 6.166667
| 0.5
| 0.351351
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 96
| 2
| 50
| 48
| 0.925
| 0.09375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
dfb012adb657ac9399e78addfd2318367633b781
| 275
|
py
|
Python
|
angr/procedures/win32/critical_section.py
|
Kyle-Kyle/angr
|
345b2131a7a67e3a6ffc7d9fd475146a3e12f837
|
[
"BSD-2-Clause"
] | 6,132
|
2015-08-06T23:24:47.000Z
|
2022-03-31T21:49:34.000Z
|
angr/procedures/win32/critical_section.py
|
Kyle-Kyle/angr
|
345b2131a7a67e3a6ffc7d9fd475146a3e12f837
|
[
"BSD-2-Clause"
] | 2,272
|
2015-08-10T08:40:07.000Z
|
2022-03-31T23:46:44.000Z
|
angr/procedures/win32/critical_section.py
|
Kyle-Kyle/angr
|
345b2131a7a67e3a6ffc7d9fd475146a3e12f837
|
[
"BSD-2-Clause"
] | 1,155
|
2015-08-06T23:37:39.000Z
|
2022-03-31T05:54:11.000Z
|
import angr
class InitializeCriticalSectionAndSpinCount(angr.SimProcedure):
def run(self, lpCriticalSection, dwSpinCount):
return 1
class InitializeCriticalSectionEx(angr.SimProcedure):
def run(self, lpCriticalSection, dwSpinCount, Flags):
return 1
| 27.5
| 63
| 0.767273
| 25
| 275
| 8.44
| 0.56
| 0.151659
| 0.180095
| 0.208531
| 0.511848
| 0.511848
| 0.511848
| 0
| 0
| 0
| 0
| 0.008696
| 0.163636
| 275
| 9
| 64
| 30.555556
| 0.908696
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0.285714
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
dffb91ab1a69063b61425fa986a7327073a1fb2d
| 62
|
py
|
Python
|
multiply.py
|
SudalaiRajkumar/Test-
|
61b7db6c09d7b9507a17944ed83ed09b6cc434b1
|
[
"Apache-2.0"
] | 1
|
2018-09-08T10:55:13.000Z
|
2018-09-08T10:55:13.000Z
|
multiply.py
|
SudalaiRajkumar/Test-
|
61b7db6c09d7b9507a17944ed83ed09b6cc434b1
|
[
"Apache-2.0"
] | null | null | null |
multiply.py
|
SudalaiRajkumar/Test-
|
61b7db6c09d7b9507a17944ed83ed09b6cc434b1
|
[
"Apache-2.0"
] | null | null | null |
def multiply(a,b):
return a*b
def divide(a,b):
return a/b
| 10.333333
| 18
| 0.645161
| 14
| 62
| 2.857143
| 0.428571
| 0.2
| 0.4
| 0.45
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.193548
| 62
| 5
| 19
| 12.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
5f1f32bfb38e70dde7f50823c70f27cc5df4a260
| 9,098
|
py
|
Python
|
medidas/migrations/0023_auto_20210525_1133.py
|
SolanoJason/kiwioptics
|
d5d1fb8383710ca1e431c01407c2c9e1e227b053
|
[
"MIT"
] | 1
|
2021-04-16T22:47:41.000Z
|
2021-04-16T22:47:41.000Z
|
medidas/migrations/0023_auto_20210525_1133.py
|
SolanoJason/kiwioptics
|
d5d1fb8383710ca1e431c01407c2c9e1e227b053
|
[
"MIT"
] | null | null | null |
medidas/migrations/0023_auto_20210525_1133.py
|
SolanoJason/kiwioptics
|
d5d1fb8383710ca1e431c01407c2c9e1e227b053
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.6 on 2021-05-25 11:33
from decimal import Decimal
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('medidas', '0022_auto_20210504_1710'),
]
operations = [
migrations.AlterField(
model_name='prescription',
name='far_cylinder_left',
field=models.DecimalField(blank=True, choices=[('', '--'), (Decimal('0.00'), '0.00'), (Decimal('-0.25'), '-0.25'), (Decimal('-0.50'), '-0.50'), (Decimal('-0.75'), '-0.75'), (Decimal('-1.00'), '-1.00'), (Decimal('-1.25'), '-1.25'), (Decimal('-1.50'), '-1.50'), (Decimal('-1.75'), '-1.75'), (Decimal('-2.00'), '-2.00'), (Decimal('-2.25'), '-2.25'), (Decimal('-2.50'), '-2.50'), (Decimal('-2.75'), '-2.75'), (Decimal('-3.00'), '-3.00'), (Decimal('-3.25'), '-3.25'), (Decimal('-3.50'), '-3.50'), (Decimal('-3.75'), '-3.75'), (Decimal('-4.00'), '-4.00'), (Decimal('-4.25'), '-4.25'), (Decimal('-4.50'), '-4.50'), (Decimal('-4.75'), '-4.75'), (Decimal('-5.00'), '-5.00'), (Decimal('-5.25'), '-5.25'), (Decimal('-5.50'), '-5.50'), (Decimal('-5.75'), '-5.75'), (Decimal('-6.00'), '-6.00'), (Decimal('-6.25'), '-6.25'), (Decimal('-6.50'), '-6.50'), (Decimal('-6.75'), '-6.75'), (Decimal('-7.00'), '-7.00'), (Decimal('-7.25'), '-7.25'), (Decimal('-7.50'), '-7.50'), (Decimal('-7.75'), '-7.75'), (Decimal('-8.00'), '-8.00'), (Decimal('-8.25'), '-8.25'), (Decimal('-8.50'), '-8.50'), (Decimal('-8.75'), '-8.75'), (Decimal('-9.00'), '-9.00'), (Decimal('-9.25'), '-9.25'), (Decimal('-9.50'), '-9.50'), (Decimal('-9.75'), '-9.75'), (Decimal('-10.00'), '-10.00')], decimal_places=2, max_digits=4, null=True, verbose_name='Cil. izquierdo Lejos'),
),
migrations.AlterField(
model_name='prescription',
name='far_cylinder_right',
field=models.DecimalField(blank=True, choices=[('', '--'), (Decimal('0.00'), '0.00'), (Decimal('-0.25'), '-0.25'), (Decimal('-0.50'), '-0.50'), (Decimal('-0.75'), '-0.75'), (Decimal('-1.00'), '-1.00'), (Decimal('-1.25'), '-1.25'), (Decimal('-1.50'), '-1.50'), (Decimal('-1.75'), '-1.75'), (Decimal('-2.00'), '-2.00'), (Decimal('-2.25'), '-2.25'), (Decimal('-2.50'), '-2.50'), (Decimal('-2.75'), '-2.75'), (Decimal('-3.00'), '-3.00'), (Decimal('-3.25'), '-3.25'), (Decimal('-3.50'), '-3.50'), (Decimal('-3.75'), '-3.75'), (Decimal('-4.00'), '-4.00'), (Decimal('-4.25'), '-4.25'), (Decimal('-4.50'), '-4.50'), (Decimal('-4.75'), '-4.75'), (Decimal('-5.00'), '-5.00'), (Decimal('-5.25'), '-5.25'), (Decimal('-5.50'), '-5.50'), (Decimal('-5.75'), '-5.75'), (Decimal('-6.00'), '-6.00'), (Decimal('-6.25'), '-6.25'), (Decimal('-6.50'), '-6.50'), (Decimal('-6.75'), '-6.75'), (Decimal('-7.00'), '-7.00'), (Decimal('-7.25'), '-7.25'), (Decimal('-7.50'), '-7.50'), (Decimal('-7.75'), '-7.75'), (Decimal('-8.00'), '-8.00'), (Decimal('-8.25'), '-8.25'), (Decimal('-8.50'), '-8.50'), (Decimal('-8.75'), '-8.75'), (Decimal('-9.00'), '-9.00'), (Decimal('-9.25'), '-9.25'), (Decimal('-9.50'), '-9.50'), (Decimal('-9.75'), '-9.75'), (Decimal('-10.00'), '-10.00')], decimal_places=2, max_digits=4, null=True, verbose_name='Cil. derecho Lejos'),
),
migrations.AlterField(
model_name='prescription',
name='intermediate_cylinder_left',
field=models.DecimalField(blank=True, choices=[('', '--'), (Decimal('0.00'), '0.00'), (Decimal('-0.25'), '-0.25'), (Decimal('-0.50'), '-0.50'), (Decimal('-0.75'), '-0.75'), (Decimal('-1.00'), '-1.00'), (Decimal('-1.25'), '-1.25'), (Decimal('-1.50'), '-1.50'), (Decimal('-1.75'), '-1.75'), (Decimal('-2.00'), '-2.00'), (Decimal('-2.25'), '-2.25'), (Decimal('-2.50'), '-2.50'), (Decimal('-2.75'), '-2.75'), (Decimal('-3.00'), '-3.00'), (Decimal('-3.25'), '-3.25'), (Decimal('-3.50'), '-3.50'), (Decimal('-3.75'), '-3.75'), (Decimal('-4.00'), '-4.00'), (Decimal('-4.25'), '-4.25'), (Decimal('-4.50'), '-4.50'), (Decimal('-4.75'), '-4.75'), (Decimal('-5.00'), '-5.00'), (Decimal('-5.25'), '-5.25'), (Decimal('-5.50'), '-5.50'), (Decimal('-5.75'), '-5.75'), (Decimal('-6.00'), '-6.00'), (Decimal('-6.25'), '-6.25'), (Decimal('-6.50'), '-6.50'), (Decimal('-6.75'), '-6.75'), (Decimal('-7.00'), '-7.00'), (Decimal('-7.25'), '-7.25'), (Decimal('-7.50'), '-7.50'), (Decimal('-7.75'), '-7.75'), (Decimal('-8.00'), '-8.00'), (Decimal('-8.25'), '-8.25'), (Decimal('-8.50'), '-8.50'), (Decimal('-8.75'), '-8.75'), (Decimal('-9.00'), '-9.00'), (Decimal('-9.25'), '-9.25'), (Decimal('-9.50'), '-9.50'), (Decimal('-9.75'), '-9.75'), (Decimal('-10.00'), '-10.00')], decimal_places=2, max_digits=4, null=True, verbose_name='Cil. izquierdo intermedio'),
),
migrations.AlterField(
model_name='prescription',
name='intermediate_cylinder_right',
field=models.DecimalField(blank=True, choices=[('', '--'), (Decimal('0.00'), '0.00'), (Decimal('-0.25'), '-0.25'), (Decimal('-0.50'), '-0.50'), (Decimal('-0.75'), '-0.75'), (Decimal('-1.00'), '-1.00'), (Decimal('-1.25'), '-1.25'), (Decimal('-1.50'), '-1.50'), (Decimal('-1.75'), '-1.75'), (Decimal('-2.00'), '-2.00'), (Decimal('-2.25'), '-2.25'), (Decimal('-2.50'), '-2.50'), (Decimal('-2.75'), '-2.75'), (Decimal('-3.00'), '-3.00'), (Decimal('-3.25'), '-3.25'), (Decimal('-3.50'), '-3.50'), (Decimal('-3.75'), '-3.75'), (Decimal('-4.00'), '-4.00'), (Decimal('-4.25'), '-4.25'), (Decimal('-4.50'), '-4.50'), (Decimal('-4.75'), '-4.75'), (Decimal('-5.00'), '-5.00'), (Decimal('-5.25'), '-5.25'), (Decimal('-5.50'), '-5.50'), (Decimal('-5.75'), '-5.75'), (Decimal('-6.00'), '-6.00'), (Decimal('-6.25'), '-6.25'), (Decimal('-6.50'), '-6.50'), (Decimal('-6.75'), '-6.75'), (Decimal('-7.00'), '-7.00'), (Decimal('-7.25'), '-7.25'), (Decimal('-7.50'), '-7.50'), (Decimal('-7.75'), '-7.75'), (Decimal('-8.00'), '-8.00'), (Decimal('-8.25'), '-8.25'), (Decimal('-8.50'), '-8.50'), (Decimal('-8.75'), '-8.75'), (Decimal('-9.00'), '-9.00'), (Decimal('-9.25'), '-9.25'), (Decimal('-9.50'), '-9.50'), (Decimal('-9.75'), '-9.75'), (Decimal('-10.00'), '-10.00')], decimal_places=2, max_digits=4, null=True, verbose_name='Cil. derecho intermedio'),
),
migrations.AlterField(
model_name='prescription',
name='near_cylinder_left',
field=models.DecimalField(blank=True, choices=[('', '--'), (Decimal('0.00'), '0.00'), (Decimal('-0.25'), '-0.25'), (Decimal('-0.50'), '-0.50'), (Decimal('-0.75'), '-0.75'), (Decimal('-1.00'), '-1.00'), (Decimal('-1.25'), '-1.25'), (Decimal('-1.50'), '-1.50'), (Decimal('-1.75'), '-1.75'), (Decimal('-2.00'), '-2.00'), (Decimal('-2.25'), '-2.25'), (Decimal('-2.50'), '-2.50'), (Decimal('-2.75'), '-2.75'), (Decimal('-3.00'), '-3.00'), (Decimal('-3.25'), '-3.25'), (Decimal('-3.50'), '-3.50'), (Decimal('-3.75'), '-3.75'), (Decimal('-4.00'), '-4.00'), (Decimal('-4.25'), '-4.25'), (Decimal('-4.50'), '-4.50'), (Decimal('-4.75'), '-4.75'), (Decimal('-5.00'), '-5.00'), (Decimal('-5.25'), '-5.25'), (Decimal('-5.50'), '-5.50'), (Decimal('-5.75'), '-5.75'), (Decimal('-6.00'), '-6.00'), (Decimal('-6.25'), '-6.25'), (Decimal('-6.50'), '-6.50'), (Decimal('-6.75'), '-6.75'), (Decimal('-7.00'), '-7.00'), (Decimal('-7.25'), '-7.25'), (Decimal('-7.50'), '-7.50'), (Decimal('-7.75'), '-7.75'), (Decimal('-8.00'), '-8.00'), (Decimal('-8.25'), '-8.25'), (Decimal('-8.50'), '-8.50'), (Decimal('-8.75'), '-8.75'), (Decimal('-9.00'), '-9.00'), (Decimal('-9.25'), '-9.25'), (Decimal('-9.50'), '-9.50'), (Decimal('-9.75'), '-9.75'), (Decimal('-10.00'), '-10.00')], decimal_places=2, max_digits=4, null=True, verbose_name='Cil. izquierdo Cerca'),
),
migrations.AlterField(
model_name='prescription',
name='near_cylinder_right',
field=models.DecimalField(blank=True, choices=[('', '--'), (Decimal('0.00'), '0.00'), (Decimal('-0.25'), '-0.25'), (Decimal('-0.50'), '-0.50'), (Decimal('-0.75'), '-0.75'), (Decimal('-1.00'), '-1.00'), (Decimal('-1.25'), '-1.25'), (Decimal('-1.50'), '-1.50'), (Decimal('-1.75'), '-1.75'), (Decimal('-2.00'), '-2.00'), (Decimal('-2.25'), '-2.25'), (Decimal('-2.50'), '-2.50'), (Decimal('-2.75'), '-2.75'), (Decimal('-3.00'), '-3.00'), (Decimal('-3.25'), '-3.25'), (Decimal('-3.50'), '-3.50'), (Decimal('-3.75'), '-3.75'), (Decimal('-4.00'), '-4.00'), (Decimal('-4.25'), '-4.25'), (Decimal('-4.50'), '-4.50'), (Decimal('-4.75'), '-4.75'), (Decimal('-5.00'), '-5.00'), (Decimal('-5.25'), '-5.25'), (Decimal('-5.50'), '-5.50'), (Decimal('-5.75'), '-5.75'), (Decimal('-6.00'), '-6.00'), (Decimal('-6.25'), '-6.25'), (Decimal('-6.50'), '-6.50'), (Decimal('-6.75'), '-6.75'), (Decimal('-7.00'), '-7.00'), (Decimal('-7.25'), '-7.25'), (Decimal('-7.50'), '-7.50'), (Decimal('-7.75'), '-7.75'), (Decimal('-8.00'), '-8.00'), (Decimal('-8.25'), '-8.25'), (Decimal('-8.50'), '-8.50'), (Decimal('-8.75'), '-8.75'), (Decimal('-9.00'), '-9.00'), (Decimal('-9.25'), '-9.25'), (Decimal('-9.50'), '-9.50'), (Decimal('-9.75'), '-9.75'), (Decimal('-10.00'), '-10.00')], decimal_places=2, max_digits=4, null=True, verbose_name='Cil. derecho Cerca'),
),
]
| 202.177778
| 1,347
| 0.484062
| 1,431
| 9,098
| 3.050314
| 0.044724
| 0.136082
| 0.034364
| 0.039863
| 0.957847
| 0.957847
| 0.957847
| 0.950974
| 0.880412
| 0.880412
| 0
| 0.188686
| 0.108156
| 9,098
| 44
| 1,348
| 206.772727
| 0.349273
| 0.004946
| 0
| 0.473684
| 1
| 0
| 0.313423
| 0.008438
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.052632
| 0
| 0.131579
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
a043a2a5659f6b7dd5790c18d917303c4e8feb67
| 3,250
|
py
|
Python
|
tests/test_streaming.py
|
Hanaasagi/requests-async
|
aed4c6cd8a4e5183449426ce99f4f4026fa6f2e1
|
[
"Apache-2.0"
] | 1,057
|
2019-03-21T12:43:58.000Z
|
2022-02-04T15:07:33.000Z
|
tests/test_streaming.py
|
Hanaasagi/requests-async
|
aed4c6cd8a4e5183449426ce99f4f4026fa6f2e1
|
[
"Apache-2.0"
] | 42
|
2019-03-21T13:58:49.000Z
|
2019-07-22T21:40:21.000Z
|
tests/test_streaming.py
|
Hanaasagi/requests-async
|
aed4c6cd8a4e5183449426ce99f4f4026fa6f2e1
|
[
"Apache-2.0"
] | 48
|
2019-03-21T12:45:10.000Z
|
2022-01-20T03:24:07.000Z
|
import asyncio
import pytest
import requests_async
@pytest.mark.asyncio
async def test_content_not_available_on_stream(server):
url = "http://127.0.0.1:8000/hello_world"
response = await requests_async.get(url, stream=True)
assert response.status_code == 200
with pytest.raises(requests_async.exceptions.ContentNotAvailable):
response.content
@pytest.mark.asyncio
async def test_iter_content_on_stream(server):
url = "http://127.0.0.1:8000/hello_world"
response = await requests_async.get(url, stream=True)
assert response.status_code == 200
content = b""
async for chunk in response.iter_content():
assert len(chunk) == 1
content += chunk
assert content == b"Hello, world!"
@pytest.mark.asyncio
async def test_iter_text_on_stream(server):
url = "http://127.0.0.1:8000/hello_world"
response = await requests_async.get(url, stream=True)
assert response.status_code == 200
content = ""
async for chunk in response.iter_content(decode_unicode=True):
assert len(chunk) == 1
content += chunk
assert content == "Hello, world!"
@pytest.mark.asyncio
async def test_iter_content_on_content(server):
url = "http://127.0.0.1:8000/hello_world"
response = await requests_async.get(url)
assert response.status_code == 200
content = b""
async for chunk in response.iter_content():
assert len(chunk) == 1
content += chunk
assert content == b"Hello, world!"
@pytest.mark.asyncio
async def test_iter_text_on_content(server):
url = "http://127.0.0.1:8000/hello_world"
response = await requests_async.get(url)
assert response.status_code == 200
content = ""
async for chunk in response.iter_content(decode_unicode=True):
assert len(chunk) == 1
content += chunk
assert content == "Hello, world!"
@pytest.mark.asyncio
async def test_iter(server):
url = "http://127.0.0.1:8000/hello_world"
response = await requests_async.get(url, stream=True)
assert response.status_code == 200
content = b""
async for chunk in response:
content += chunk
assert content == b"Hello, world!"
@pytest.mark.asyncio
async def test_iter_lines(server):
url = "http://127.0.0.1:8000/hello_world"
response = await requests_async.get(url, stream=True)
assert response.status_code == 200
lines = []
async for line in response.iter_lines(decode_unicode=True):
lines.append(line)
assert lines == ["Hello, world!"]
@pytest.mark.asyncio
async def test_iter_lines_with_delimiter(server):
url = "http://127.0.0.1:8000/hello_world"
response = await requests_async.get(url, stream=True)
assert response.status_code == 200
lines = []
async for line in response.iter_lines(decode_unicode=True, delimiter=" "):
lines.append(line)
assert lines == ["Hello,", "world!"]
@pytest.mark.asyncio
async def test_stream_request(server):
url = "http://127.0.0.1:8000/"
async def stream():
yield b"e"
yield b"xample"
response = await requests_async.post(url, data=stream())
assert response.status_code == 200
assert response.json() == {"method": "POST", "url": url, "body": "example"}
| 29.816514
| 79
| 0.681846
| 452
| 3,250
| 4.752212
| 0.126106
| 0.069832
| 0.071229
| 0.092179
| 0.86406
| 0.85149
| 0.837989
| 0.837989
| 0.827281
| 0.8054
| 0
| 0.046201
| 0.194154
| 3,250
| 108
| 80
| 30.092593
| 0.77396
| 0
| 0
| 0.709302
| 0
| 0
| 0.125538
| 0
| 0
| 0
| 0
| 0
| 0.244186
| 1
| 0
| false
| 0
| 0.034884
| 0
| 0.034884
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a0580f5659908c3858ead245c25c37a08cff738e
| 19,401
|
py
|
Python
|
bigtable/snippets/filters/snapshots/snap_filters_test.py
|
summersab/python-docs-samples
|
7c1e9685fe190f7789d8e1dbcfe8c01a20e3dc66
|
[
"Apache-2.0"
] | 34
|
2020-07-27T19:14:01.000Z
|
2022-03-31T14:46:53.000Z
|
bigtable/snippets/filters/snapshots/snap_filters_test.py
|
summersab/python-docs-samples
|
7c1e9685fe190f7789d8e1dbcfe8c01a20e3dc66
|
[
"Apache-2.0"
] | 254
|
2020-01-31T23:44:06.000Z
|
2022-03-23T22:52:49.000Z
|
bigtable/snippets/filters/snapshots/snap_filters_test.py
|
summersab/python-docs-samples
|
7c1e9685fe190f7789d8e1dbcfe8c01a20e3dc66
|
[
"Apache-2.0"
] | 30
|
2020-01-31T20:45:34.000Z
|
2022-03-23T19:56:42.000Z
|
# -*- coding: utf-8 -*-
# snapshottest: v1 - https://goo.gl/zC4yUc
# flake8: noqa
from __future__ import unicode_literals
from snapshottest import Snapshot
snapshots = Snapshot()
snapshots['test_filter_limit_row_regex 1'] = '''Reading data for phone#4c410523#20190501:
Column Family cell_plan
\tdata_plan_01gb: false @2019-05-01 00:00:00+00:00
\tdata_plan_01gb: true @2019-04-30 23:00:00+00:00
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190405.003 @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190501:
Column Family cell_plan
\tdata_plan_10gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190401.002 @2019-05-01 00:00:00+00:00
'''
snapshots['test_filter_limit_cells_per_col 1'] = '''Reading data for phone#4c410523#20190501:
Column Family cell_plan
\tdata_plan_01gb: false @2019-05-01 00:00:00+00:00
\tdata_plan_01gb: true @2019-04-30 23:00:00+00:00
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190405.003 @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190502:
Column Family cell_plan
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190405.004 @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190505:
Column Family cell_plan
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x00 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190406.000 @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190501:
Column Family cell_plan
\tdata_plan_10gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190401.002 @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190502:
Column Family cell_plan
\tdata_plan_10gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x00 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190406.000 @2019-05-01 00:00:00+00:00
'''
snapshots['test_filter_limit_cells_per_row 1'] = '''Reading data for phone#4c410523#20190501:
Column Family cell_plan
\tdata_plan_01gb: false @2019-05-01 00:00:00+00:00
\tdata_plan_01gb: true @2019-04-30 23:00:00+00:00
Reading data for phone#4c410523#20190502:
Column Family cell_plan
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190505:
Column Family cell_plan
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x00 @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190501:
Column Family cell_plan
\tdata_plan_10gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190502:
Column Family cell_plan
\tdata_plan_10gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
'''
snapshots['test_filter_limit_cells_per_row_offset 1'] = '''Reading data for phone#4c410523#20190501:
Column Family cell_plan
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190405.003 @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190502:
Column Family stats_summary
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190405.004 @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190505:
Column Family stats_summary
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190406.000 @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190501:
Column Family stats_summary
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190401.002 @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190502:
Column Family stats_summary
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x00 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190406.000 @2019-05-01 00:00:00+00:00
'''
snapshots['test_filter_limit_col_family_regex 1'] = '''Reading data for phone#4c410523#20190501:
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190405.003 @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190502:
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190405.004 @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190505:
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x00 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190406.000 @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190501:
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190401.002 @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190502:
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x00 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190406.000 @2019-05-01 00:00:00+00:00
'''
snapshots['test_filter_limit_col_qualifier_regex 1'] = '''Reading data for phone#4c410523#20190501:
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190502:
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190505:
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x00 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190501:
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190502:
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x00 @2019-05-01 00:00:00+00:00
'''
snapshots['test_filter_limit_col_range 1'] = '''Reading data for phone#4c410523#20190501:
Column Family cell_plan
\tdata_plan_01gb: false @2019-05-01 00:00:00+00:00
\tdata_plan_01gb: true @2019-04-30 23:00:00+00:00
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190502:
Column Family cell_plan
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190505:
Column Family cell_plan
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00
'''
snapshots['test_filter_limit_value_range 1'] = '''Reading data for phone#4c410523#20190501:
Column Family stats_summary
\tos_build: PQ2A.190405.003 @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190502:
Column Family stats_summary
\tos_build: PQ2A.190405.004 @2019-05-01 00:00:00+00:00
'''
snapshots['test_filter_limit_value_regex 1'] = '''Reading data for phone#4c410523#20190501:
Column Family stats_summary
\tos_build: PQ2A.190405.003 @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190502:
Column Family stats_summary
\tos_build: PQ2A.190405.004 @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190505:
Column Family stats_summary
\tos_build: PQ2A.190406.000 @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190501:
Column Family stats_summary
\tos_build: PQ2A.190401.002 @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190502:
Column Family stats_summary
\tos_build: PQ2A.190406.000 @2019-05-01 00:00:00+00:00
'''
snapshots['test_filter_limit_timestamp_range 1'] = '''Reading data for phone#4c410523#20190501:
Column Family cell_plan
\tdata_plan_01gb: true @2019-04-30 23:00:00+00:00
'''
snapshots['test_filter_limit_block_all 1'] = ''
snapshots['test_filter_limit_pass_all 1'] = '''Reading data for phone#4c410523#20190501:
Column Family cell_plan
\tdata_plan_01gb: false @2019-05-01 00:00:00+00:00
\tdata_plan_01gb: true @2019-04-30 23:00:00+00:00
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190405.003 @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190502:
Column Family cell_plan
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190405.004 @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190505:
Column Family cell_plan
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x00 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190406.000 @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190501:
Column Family cell_plan
\tdata_plan_10gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190401.002 @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190502:
Column Family cell_plan
\tdata_plan_10gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x00 @2019-05-01 00:00:00+00:00
\tos_build: PQ2A.190406.000 @2019-05-01 00:00:00+00:00
'''
snapshots['test_filter_modify_strip_value 1'] = '''Reading data for phone#4c410523#20190501:
Column Family cell_plan
\tdata_plan_01gb: @2019-05-01 00:00:00+00:00
\tdata_plan_01gb: @2019-04-30 23:00:00+00:00
\tdata_plan_05gb: @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: @2019-05-01 00:00:00+00:00
\tconnected_wifi: @2019-05-01 00:00:00+00:00
\tos_build: @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190502:
Column Family cell_plan
\tdata_plan_05gb: @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: @2019-05-01 00:00:00+00:00
\tconnected_wifi: @2019-05-01 00:00:00+00:00
\tos_build: @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190505:
Column Family cell_plan
\tdata_plan_05gb: @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: @2019-05-01 00:00:00+00:00
\tconnected_wifi: @2019-05-01 00:00:00+00:00
\tos_build: @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190501:
Column Family cell_plan
\tdata_plan_10gb: @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: @2019-05-01 00:00:00+00:00
\tconnected_wifi: @2019-05-01 00:00:00+00:00
\tos_build: @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190502:
Column Family cell_plan
\tdata_plan_10gb: @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tconnected_cell: @2019-05-01 00:00:00+00:00
\tconnected_wifi: @2019-05-01 00:00:00+00:00
\tos_build: @2019-05-01 00:00:00+00:00
'''
snapshots['test_filter_modify_apply_label 1'] = '''Reading data for phone#4c410523#20190501:
Column Family cell_plan
\tdata_plan_01gb: false @2019-05-01 00:00:00+00:00 [labelled]
\tdata_plan_01gb: true @2019-04-30 23:00:00+00:00 [labelled]
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00 [labelled]
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 [labelled]
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 [labelled]
\tos_build: PQ2A.190405.003 @2019-05-01 00:00:00+00:00 [labelled]
Reading data for phone#4c410523#20190502:
Column Family cell_plan
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00 [labelled]
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 [labelled]
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 [labelled]
\tos_build: PQ2A.190405.004 @2019-05-01 00:00:00+00:00 [labelled]
Reading data for phone#4c410523#20190505:
Column Family cell_plan
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00 [labelled]
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x00 @2019-05-01 00:00:00+00:00 [labelled]
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 [labelled]
\tos_build: PQ2A.190406.000 @2019-05-01 00:00:00+00:00 [labelled]
Reading data for phone#5c10102#20190501:
Column Family cell_plan
\tdata_plan_10gb: true @2019-05-01 00:00:00+00:00 [labelled]
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 [labelled]
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 [labelled]
\tos_build: PQ2A.190401.002 @2019-05-01 00:00:00+00:00 [labelled]
Reading data for phone#5c10102#20190502:
Column Family cell_plan
\tdata_plan_10gb: true @2019-05-01 00:00:00+00:00 [labelled]
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 [labelled]
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x00 @2019-05-01 00:00:00+00:00 [labelled]
\tos_build: PQ2A.190406.000 @2019-05-01 00:00:00+00:00 [labelled]
'''
snapshots['test_filter_composing_chain 1'] = '''Reading data for phone#4c410523#20190501:
Column Family cell_plan
\tdata_plan_01gb: false @2019-05-01 00:00:00+00:00
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190502:
Column Family cell_plan
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190505:
Column Family cell_plan
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190501:
Column Family cell_plan
\tdata_plan_10gb: true @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190502:
Column Family cell_plan
\tdata_plan_10gb: true @2019-05-01 00:00:00+00:00
'''
snapshots['test_filter_composing_interleave 1'] = '''Reading data for phone#4c410523#20190501:
Column Family cell_plan
\tdata_plan_01gb: true @2019-04-30 23:00:00+00:00
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tos_build: PQ2A.190405.003 @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190502:
Column Family cell_plan
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tos_build: PQ2A.190405.004 @2019-05-01 00:00:00+00:00
Reading data for phone#4c410523#20190505:
Column Family cell_plan
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tos_build: PQ2A.190406.000 @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190501:
Column Family cell_plan
\tdata_plan_10gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tos_build: PQ2A.190401.002 @2019-05-01 00:00:00+00:00
Reading data for phone#5c10102#20190502:
Column Family cell_plan
\tdata_plan_10gb: true @2019-05-01 00:00:00+00:00
Column Family stats_summary
\tos_build: PQ2A.190406.000 @2019-05-01 00:00:00+00:00
'''
snapshots['test_filter_composing_condition 1'] = '''Reading data for phone#4c410523#20190501:
Column Family cell_plan
\tdata_plan_01gb: false @2019-05-01 00:00:00+00:00 [filtered-out]
\tdata_plan_01gb: true @2019-04-30 23:00:00+00:00 [filtered-out]
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00 [filtered-out]
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 [filtered-out]
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 [filtered-out]
\tos_build: PQ2A.190405.003 @2019-05-01 00:00:00+00:00 [filtered-out]
Reading data for phone#4c410523#20190502:
Column Family cell_plan
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00 [filtered-out]
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 [filtered-out]
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 [filtered-out]
\tos_build: PQ2A.190405.004 @2019-05-01 00:00:00+00:00 [filtered-out]
Reading data for phone#4c410523#20190505:
Column Family cell_plan
\tdata_plan_05gb: true @2019-05-01 00:00:00+00:00 [filtered-out]
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x00 @2019-05-01 00:00:00+00:00 [filtered-out]
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 [filtered-out]
\tos_build: PQ2A.190406.000 @2019-05-01 00:00:00+00:00 [filtered-out]
Reading data for phone#5c10102#20190501:
Column Family cell_plan
\tdata_plan_10gb: true @2019-05-01 00:00:00+00:00 [passed-filter]
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 [passed-filter]
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 [passed-filter]
\tos_build: PQ2A.190401.002 @2019-05-01 00:00:00+00:00 [passed-filter]
Reading data for phone#5c10102#20190502:
Column Family cell_plan
\tdata_plan_10gb: true @2019-05-01 00:00:00+00:00 [passed-filter]
Column Family stats_summary
\tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 [passed-filter]
\tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x00 @2019-05-01 00:00:00+00:00 [passed-filter]
\tos_build: PQ2A.190406.000 @2019-05-01 00:00:00+00:00 [passed-filter]
'''
| 40.334719
| 100
| 0.764445
| 3,853
| 19,401
| 3.734493
| 0.023099
| 0.216276
| 0.242268
| 0.213496
| 0.979151
| 0.979151
| 0.979151
| 0.978595
| 0.976162
| 0.975606
| 0
| 0.354659
| 0.082367
| 19,401
| 480
| 101
| 40.41875
| 0.453575
| 0.003866
| 0
| 0.9
| 0
| 0.338462
| 0.973812
| 0.232585
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.023077
| 0.005128
| 0
| 0.005128
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
a059013902f04a60eb741a9660f2a03ea8add282
| 5,058
|
py
|
Python
|
py/levels/bronze.py
|
triffid/kiki
|
b64b8524063c149a5cc9118f48d80afec1d8a942
|
[
"Unlicense"
] | 2
|
2020-01-04T23:44:10.000Z
|
2020-07-12T17:10:09.000Z
|
py/levels/bronze.py
|
triffid/kiki
|
b64b8524063c149a5cc9118f48d80afec1d8a942
|
[
"Unlicense"
] | null | null | null |
py/levels/bronze.py
|
triffid/kiki
|
b64b8524063c149a5cc9118f48d80afec1d8a942
|
[
"Unlicense"
] | 1
|
2022-03-16T05:43:33.000Z
|
2022-03-16T05:43:33.000Z
|
# .................................................................................................................
level_dict["bronze"] = {
"scheme": "bronze_scheme",
"size": (9,6,9),
"intro": "bronze",
"help": (
"$scale(1.5)mission:\nactivate the exit!\n\n" + \
"to activate the exit\nfeed it with electricity:\n\n" + \
"connect the generator\nwith the motor\n"+ \
"and close the circuit\nwith the wire stones",
),
"player": { "position": (0,1,0),
},
"exits": [
{
"name": "exit",
"active": 0,
"position": (0,0,0),
},
],
"create":
"""
s = world.getSize()
d = 2
world.addObjectAtPos (KikiMotorCylinder (KikiFace.PY), KikiPos (s.x/2, 1, s.z/2))
world.addObjectAtPos (KikiMotorGear (KikiFace.PY), KikiPos (s.x/2, 0, s.z/2))
world.addObjectAtPos (KikiGear (KikiFace.PY), KikiPos (s.x/2-1, s.y-1, s.z/2-1))
world.addObjectAtPos (KikiGenerator (KikiFace.PY), KikiPos (s.x/2+1, s.y-1, s.z/2-1))
#world.addObjectAtPos (KikiHealthAtom (), KikiPos (s.x/2+1, s.y-1, s.z/2+1))
world.addObjectAtPos (KikiBomb (), KikiPos (s.x/2-1, s.y-1, s.z/2+1))
world.addObjectAtPos (KikiWireStone (), KikiPos (s.x/2, s.y-1, s.z/2))
world.addObjectAtPos (KikiWireStone (), KikiPos (s.x/2+1, s.y-2, s.z/2))
world.addObjectAtPos (KikiWireStone (), KikiPos (s.x/2-1, s.y-2, s.z/2))
# floor wire square
world.addObjectLine ("KikiWire (KikiFace.PY, 10)", KikiPos (s.x/2-d+1, 0, s.z/2-d), KikiPos (s.x/2+d, 0, s.z/2-d))
world.addObjectLine ("KikiWire (KikiFace.PY, 10)", KikiPos (s.x/2-d+1, 0, s.z/2+d), KikiPos (s.x/2+d, 0, s.z/2+d))
world.addObjectAtPos (KikiWire (KikiFace.PY, 5), KikiPos (s.x/2-d, 0, s.z/2+1))
world.addObjectAtPos (KikiWire (KikiFace.PY, 5), KikiPos (s.x/2-d, 0, s.z/2-1))
world.addObjectAtPos (KikiWire (KikiFace.PY, 13), KikiPos (s.x/2-d, 0, s.z/2))
world.addObjectAtPos (KikiWire (KikiFace.PY, 5), KikiPos (s.x/2+d, 0, s.z/2+1))
world.addObjectAtPos (KikiWire (KikiFace.PY, 5), KikiPos (s.x/2+d, 0, s.z/2-1))
world.addObjectAtPos (KikiWire (KikiFace.PY, 7), KikiPos (s.x/2+d, 0, s.z/2))
# corners of wire square
world.addObjectAtPos (KikiWire (KikiFace.PY, 6), KikiPos (s.x/2-d, 0, s.z/2-d))
world.addObjectAtPos (KikiWire (KikiFace.PY, 3), KikiPos (s.x/2-d, 0, s.z/2+d))
world.addObjectAtPos (KikiWire (KikiFace.PY, 9), KikiPos (s.x/2+d, 0, s.z/2+d))
world.addObjectAtPos (KikiWire (KikiFace.PY, 12), KikiPos (s.x/2+d, 0, s.z/2-d))
world.addObjectLine ("KikiWire (KikiFace.PY, 10)", KikiPos (0, 0, s.z/2), KikiPos (s.x/2-d, 0, s.z/2))
world.addObjectLine ("KikiWire (KikiFace.PY, 10)", KikiPos (s.x/2+d+1, 0, s.z/2), KikiPos (s.x, 0, s.z/2))
# ceiling wire square
world.addObjectLine ("KikiWire (KikiFace.NY, 10)", KikiPos (s.x/2-d+1, s.y-1, s.z/2-d), KikiPos (s.x/2+d, s.y-1, s.z/2-d))
world.addObjectLine ("KikiWire (KikiFace.NY, 10)", KikiPos (s.x/2-d+1, s.y-1, s.z/2+d), KikiPos (s.x/2+d, s.y-1, s.z/2+d))
world.addObjectAtPos (KikiWire (KikiFace.NY, 5), KikiPos (s.x/2-d, s.y-1, s.z/2+1))
world.addObjectAtPos (KikiWire (KikiFace.NY, 5), KikiPos (s.x/2-d, s.y-1, s.z/2-1))
world.addObjectAtPos (KikiWire (KikiFace.NY, 13), KikiPos (s.x/2-d, s.y-1, s.z/2))
world.addObjectAtPos (KikiWire (KikiFace.NY, 5), KikiPos (s.x/2+d, s.y-1, s.z/2+1))
world.addObjectAtPos (KikiWire (KikiFace.NY, 5), KikiPos (s.x/2+d, s.y-1, s.z/2-1))
world.addObjectAtPos (KikiWire (KikiFace.NY, 7), KikiPos (s.x/2+d, s.y-1, s.z/2))
# corners of wire square
world.addObjectAtPos (KikiWire (KikiFace.NY, 3), KikiPos (s.x/2-d, s.y-1, s.z/2-d))
world.addObjectAtPos (KikiWire (KikiFace.NY, 6), KikiPos (s.x/2-d, s.y-1, s.z/2+d))
world.addObjectAtPos (KikiWire (KikiFace.NY, 12), KikiPos (s.x/2+d, s.y-1, s.z/2+d))
world.addObjectAtPos (KikiWire (KikiFace.NY, 9), KikiPos (s.x/2+d, s.y-1, s.z/2-d))
world.addObjectLine ("KikiWire (KikiFace.NY, 10)", KikiPos (0, s.y-1, s.z/2), KikiPos (s.x/2-d, s.y-1, s.z/2))
world.addObjectLine ("KikiWire (KikiFace.NY, 10)", KikiPos (s.x/2+d+1, s.y-1, s.z/2), KikiPos (s.x, s.y-1, s.z/2))
# wall wire lines
world.addObjectLine ("KikiWire (KikiFace.PX, 5)", KikiPos ( 0, 0, s.z/2), KikiPos ( 0, s.y, s.z/2))
world.addObjectLine ("KikiWire (KikiFace.NX, 5)", KikiPos (s.x-1, 0, s.z/2), KikiPos (s.x-1, s.y, s.z/2))
""",
}
| 58.137931
| 122
| 0.5172
| 805
| 5,058
| 3.247205
| 0.093168
| 0.03749
| 0.056236
| 0.156848
| 0.832058
| 0.82785
| 0.787299
| 0.756312
| 0.736802
| 0.726473
| 0
| 0.060163
| 0.270463
| 5,058
| 87
| 123
| 58.137931
| 0.648238
| 0.022341
| 0
| 0
| 0
| 0
| 0.230969
| 0.025087
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a05d70cd2f5abbcf6b69eda2f29cc0850aba18ff
| 191,993
|
py
|
Python
|
python/latex2wolfram/parsetab.py
|
rafaellc28/Latex2Wolfram
|
ac133872a7fb3a884e52df07c45db9bd9188adf7
|
[
"MIT"
] | 2
|
2019-09-24T21:00:57.000Z
|
2021-07-19T21:24:09.000Z
|
python/latex2wolfram/parsetab.py
|
rafaellc28/Latex2Wolfram
|
ac133872a7fb3a884e52df07c45db9bd9188adf7
|
[
"MIT"
] | 1
|
2019-09-24T20:54:08.000Z
|
2019-09-24T20:54:08.000Z
|
python/latex2wolfram/parsetab.py
|
rafaellc28/Latex2Wolfram
|
ac133872a7fb3a884e52df07c45db9bd9188adf7
|
[
"MIT"
] | null | null | null |
# parsetab.py
# This file is automatically generated. Do not edit.
_tabversion = '3.10'
_lr_method = 'LALR'
_lr_signature = 'leftIDleftNUMBERINFINITYleftBEGIN_CASEEND_CASEBEGIN_BMATRIXEND_BMATRIXBEGIN_PMATRIXEND_PMATRIXBACKSLASHESleftINTEGRALDIFFERENTIALDIEPARTIALLIMITTOrightCOMMArightPIPErightLPARENRPARENrightLBRACERBRACELBRACKETRBRACKETFRACleftPIPRIMErightLEGELTGTEQNEQleftINrightDOTSleftSUMPRODleftFACTORIALleftPLUSMINUSleftTIMESDIVIDEMODCHOOSEDOTleftUPLUSUMINUSrightCARETleftLFLOORRFLOORLCEILRCEILSINHASINHSINASINCOSHACOSHCOSACOSTANHATANHTANATANSECASECCSCACSCCOTHACOTHCOTACOTSQRTLNLOGEXPGCDDEGGRADIENTDETERMINANTCROSSFRAC NUMBER PLUS MINUS TIMES DIVIDE LPAREN RPAREN LBRACKET RBRACKET LBRACE RBRACE LFLOOR RFLOOR LCEIL RCEIL ASINH SINH ASIN SIN ACOSH COSH ACOS COS ATANH TANH ATAN TAN ASEC SEC ACSC CSC ACOTH COTH ACOT COT SQRT LOG LN EXP MOD CARET COMMA ID PIPE INFINITY UNDERLINE INTEGRAL DIFFERENTIAL D I E PARTIAL SUM PROD IN DOTS EQ NEQ LT LE GT GE FACTORIAL PERCENT ETA_LOWER ZETA_LOWER PHI_LOWER PSI_LOWER SIGMA_LOWER DELTA_LOWER THETA_LOWER LAMBDA_LOWER EPSILON_LOWER TAU_LOWER KAPPA_LOWER OMEGA_LOWER ALPHA_LOWER XI_LOWER CHI_LOWER NU_LOWER RHO_LOWER OMICRON_LOWER UPSILON_LOWER IOTA_LOWER BETA_LOWER GAMMA_LOWER MU_LOWER PI_UPPER PI BETA GAMMA MU KAPPA OMICRON OMEGA LAMBDA IOTA PSI PHI SIGMA ETA ZETA THETA EPSILON TAU ALPHA XI CHI NU RHO UPSILON LIMIT TO PRIME GCD DEG CHOOSE GRADIENT LAPLACIAN BEGIN_CASE END_CASE BACKSLASHES BEGIN_BMATRIX END_BMATRIX BEGIN_PMATRIX END_PMATRIX BEGIN_VMATRIX END_VMATRIX BEGIN_NMATRIX END_NMATRIX AMPERSAND DETERMINANT CROSS DOTMAIN : Expression\n | Constraint\n | ConstraintSystemFactor : NUMBER\n | ImaginaryNumber\n | NapierNumber\n | ID\n | INFINITY\n | Symbol\n | IteratedExpression\n | DivisorFunction\n | Derivative\n | Integral\n | Limit\n | DifferentialVariable\n | ChooseExpression\n | Matrix\n | Determinant\n | Norm\n | FractionalExpression\n | ID CARET LBRACE Expression RBRACE\n | LPAREN Expression RPARENTerm : Term TIMES Factor\n | Term DOT Factor\n | Term CROSS Factor\n | Term DIVIDE Factor\n | Term MOD Factor\n | Term CARET LBRACE Expression RBRACE\n | FactorExpression : Expression PLUS Term\n | Expression MINUS Term\n | TermFactor : PLUS Expression %prec UPLUS\n | MINUS Expression %prec UMINUSFactor : NUMBER FACTORIAL\n | ID FACTORIAL\n | LPAREN Expression RPAREN FACTORIAL\n | NUMBER PERCENT\n | ID PERCENT\n | LPAREN Expression RPAREN PERCENTFractionalExpression : FRAC LBRACE Expression RBRACE LBRACE Expression RBRACEFactor : SQRT LBRACE Expression RBRACE\n\n | SQRT LBRACKET NUMBER RBRACKET LBRACE Expression RBRACE\n \n | LFLOOR Expression RFLOOR\n \n | LCEIL Expression RCEIL\n \n | PIPE Expression PIPE\n\n | ASINH LPAREN Expression RPAREN\n \n | ASINH ID\n\n | ASINH NUMBER\n\n | SINH LPAREN Expression RPAREN\n \n | SINH ID\n\n | SINH NUMBER\n \n | ASIN LPAREN Expression RPAREN\n\n | ASIN ID\n\n | ASIN NUMBER\n\n | SIN LPAREN Expression RPAREN\n \n | SIN ID\n\n | SIN NUMBER\n\n | ACOSH LPAREN Expression RPAREN\n\n | ACOSH ID\n\n | ACOSH NUMBER\n\n | COSH LPAREN Expression RPAREN\n\n | COSH ID\n\n | COSH NUMBER\n\n | ACOS LPAREN Expression RPAREN\n\n | ACOS ID\n\n | ACOS NUMBER\n\n | COS LPAREN Expression RPAREN\n\n | COS ID\n\n | COS NUMBER\n\n | ATANH LPAREN Expression RPAREN\n\n | ATANH ID\n\n | ATANH NUMBER\n\n | TANH LPAREN Expression RPAREN\n\n | TANH ID\n\n | TANH NUMBER\n\n | ATAN LPAREN Expression COMMA Expression RPAREN\n | ATAN LPAREN Expression RPAREN\n \n | ATAN ID\n\n | ATAN NUMBER\n\n | TAN LPAREN Expression RPAREN\n\n | TAN ID\n\n | TAN NUMBER\n\n | ASEC LPAREN Expression RPAREN\n\n | ASEC ID\n\n | ASEC NUMBER\n\n | SEC LPAREN Expression RPAREN\n \n | SEC ID\n\n | SEC NUMBER\n\n | ACSC LPAREN Expression RPAREN\n\n | ACSC ID\n\n | ACSC NUMBER\n\n | CSC LPAREN Expression RPAREN\n\n | CSC ID\n\n | CSC NUMBER\n\n | ACOTH LPAREN Expression RPAREN\n\n | ACOTH ID\n\n | ACOTH NUMBER\n\n | COTH LPAREN Expression RPAREN\n\n | COTH ID\n\n | COTH NUMBER\n\n | ACOT LPAREN Expression RPAREN\n\n | ACOT ID\n\n | ACOT NUMBER\n\n | COT LPAREN Expression RPAREN\n\n | COT ID\n\n | COT NUMBER\n \n | LOG LPAREN Expression RPAREN\n\n | LOG ID\n\n | LOG NUMBER\n\n | LOG UNDERLINE LBRACE NUMBER RBRACE LPAREN Expression RPAREN\n \n | LN LPAREN Expression RPAREN\n\n | LN ID\n\n | LN NUMBER\n \n | EXP LPAREN Expression RPAREN\n\n | EXP ID\n\n | EXP NUMBER\n\n | GCD LPAREN ExpressionList RPAREN\n\n | GCD ID\n\n | GCD NUMBER\n\n | DEG LPAREN ExpressionList RPAREN\n\n | DEG ID\n\n | DEG NUMBER\n\n | GRADIENT LPAREN ExpressionList RPAREN\n\n | GRADIENT ID\n\n | GRADIENT NUMBER\n\n | GRADIENT DOT LPAREN ExpressionList RPAREN\n\n | GRADIENT DOT ID\n\n | GRADIENT DOT NUMBER\n\n | GRADIENT CROSS LPAREN ExpressionList RPAREN\n \n | GRADIENT CROSS ID\n \n | GRADIENT CROSS NUMBER\n \n | LAPLACIAN LPAREN Expression RPAREN\n \n | LAPLACIAN NUMBER\n \n | LAPLACIAN ID\n \n | DETERMINANT LPAREN Matrix RPAREN\n \n | DETERMINANT Matrix\n\n | Symbol LPAREN ExpressionList RPAREN\n \n | ID LPAREN ExpressionList RPAREN\n \n | ID LPAREN RPARENRange : Expression DOTS ExpressionIndexingExpression : ID IN RangeIteratedExpression : SUM UNDERLINE LBRACE IndexingExpression RBRACE Expression\n | SUM UNDERLINE LBRACE ID EQ Expression RBRACE CARET LBRACE Expression RBRACE Expression\n | PROD UNDERLINE LBRACE IndexingExpression RBRACE Expression\n | PROD UNDERLINE LBRACE ID EQ Expression RBRACE CARET LBRACE Expression RBRACE ExpressionIntegral : INTEGRAL UNDERLINE LBRACE Expression RBRACE CARET LBRACE Expression RBRACE Expression DIFFERENTIAL\n | INTEGRAL UNDERLINE LBRACE Expression RBRACE Expression DIFFERENTIAL\n | INTEGRAL CARET LBRACE Expression RBRACE Expression DIFFERENTIAL\n | INTEGRAL Expression DIFFERENTIALDerivative : FRAC LBRACE D RBRACE LBRACE DIFFERENTIAL RBRACE Expression\n | FRAC LBRACE D CARET LBRACE NUMBER RBRACE RBRACE LBRACE DIFFERENTIAL CARET LBRACE NUMBER RBRACE RBRACE ExpressionDerivative : FRAC LBRACE D Expression RBRACE LBRACE DIFFERENTIAL RBRACE\n | FRAC LBRACE D CARET LBRACE NUMBER RBRACE Expression RBRACE LBRACE DIFFERENTIAL CARET LBRACE NUMBER RBRACE RBRACEDerivative : FRAC LBRACE PARTIAL RBRACE LBRACE PARTIAL ID RBRACE Expression\n | FRAC LBRACE PARTIAL CARET LBRACE NUMBER RBRACE RBRACE LBRACE PARTIAL ID CARET LBRACE NUMBER RBRACE RBRACE ExpressionDerivative : FRAC LBRACE PARTIAL Expression RBRACE LBRACE PARTIAL ID RBRACE\n | FRAC LBRACE PARTIAL CARET LBRACE NUMBER RBRACE Expression RBRACE LBRACE PARTIAL ID CARET LBRACE NUMBER RBRACE RBRACEDivisorFunction : SIGMA_LOWER UNDERLINE LBRACE NUMBER RBRACE LPAREN ExpressionList RPARENImaginaryNumber : I\n | NUMBER INapierNumber : E\n | NUMBER EDifferentialVariable : ID PrimeList LPAREN ExpressionList RPAREN\n | ID PrimeListDifferentialVariable : ID CARET LBRACE LPAREN NUMBER RPAREN RBRACE LPAREN ExpressionList RPAREN\n | ID CARET LBRACE LPAREN NUMBER RPAREN RBRACEChooseExpression : LBRACE Expression CHOOSE Expression RBRACELimit : LIMIT UNDERLINE LBRACE ID TO Expression RBRACE Expression\n | LIMIT UNDERLINE LBRACE ID TO Expression PLUS RBRACE Expression\n | LIMIT UNDERLINE LBRACE ID TO Expression MINUS RBRACE Expression\n | LIMIT UNDERLINE LBRACE ID TO Expression CARET LBRACE PLUS RBRACE RBRACE Expression\n | LIMIT UNDERLINE LBRACE ID TO Expression CARET LBRACE MINUS RBRACE RBRACE Expression\n | LIMIT UNDERLINE LBRACE ID TO Term CARET LBRACE PLUS RBRACE RBRACE Expression\n | LIMIT UNDERLINE LBRACE ID TO Term CARET LBRACE MINUS RBRACE RBRACE ExpressionConstraintSystem : BEGIN_CASE Constraints END_CASE\n | BEGIN_CASE Constraints BACKSLASHES END_CASEConstraints : Constraints BACKSLASHES Constraint\n | ConstraintDeterminant : BEGIN_VMATRIX ExpressionsRows END_VMATRIX\n | BEGIN_VMATRIX ExpressionsRows BACKSLASHES END_VMATRIXNorm : BEGIN_NMATRIX ExpressionsRows END_NMATRIX\n | BEGIN_NMATRIX ExpressionsRows BACKSLASHES END_NMATRIXMatrix : BEGIN_BMATRIX ExpressionsRows END_BMATRIX\n | BEGIN_BMATRIX ExpressionsRows BACKSLASHES END_BMATRIX\n\n | BEGIN_PMATRIX ExpressionsRows END_PMATRIX\n | BEGIN_PMATRIX ExpressionsRows BACKSLASHES END_PMATRIXExpressionsRow : ExpressionsRow AMPERSAND Expression\n | ExpressionExpressionsRows : ExpressionsRows BACKSLASHES ExpressionsRow\n | ExpressionsRowExpressionList : ExpressionList COMMA Expression\n | ExpressionPrimeList : PrimeList PRIME\n | PRIMEConstraint : Expression EQ Expression\n | Expression NEQ Expression\n | Expression LT Expression\n | Expression LE Expression\n | Expression GT Expression\n | Expression GE ExpressionSymbol : PI\n | XI_LOWER\n | CHI_LOWER\n | PHI_LOWER\n | PSI_LOWER\n | SIGMA_LOWER\n | ZETA_LOWER\n | ETA_LOWER\n | DELTA_LOWER\n | THETA_LOWER\n | LAMBDA_LOWER\n | EPSILON_LOWER\n | TAU_LOWER\n | KAPPA_LOWER\n | OMEGA_LOWER\n | ALPHA_LOWER\n | NU_LOWER\n | RHO_LOWER\n | OMICRON_LOWER\n | UPSILON_LOWER\n | IOTA_LOWER\n | BETA_LOWER\n | GAMMA_LOWER\n | MU_LOWER\n | PI_UPPER\n | BETA\n | GAMMA\n | KAPPA\n | OMICRON\n | OMEGA\n | LAMBDA\n | IOTA\n | PSI\n | MU\n | PHI\n | SIGMA\n | ETA\n | ZETA\n | THETA\n | EPSILON\n | TAU\n | ALPHA\n | XI\n | CHI\n | NU\n | RHO\n | UPSILON'
_lr_action_items = {'LFLOOR':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,]),'LAPLACIAN':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,]),'LBRACKET':([114,],[252,]),'LIMIT':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,]),'EXP':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,]),'SIN':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,]),'TAU':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,]),'RPAREN':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,239,241,242,243,244,245,246,249,253,255,256,257,259,260,261,262,263,264,268,269,271,274,275,276,277,278,279,283,284,285,286,287,288,289,292,293,295,296,297,298,299,300,302,308,309,312,314,317,318,319,320,321,323,324,326,327,328,330,332,333,334,336,339,340,342,343,344,346,354,356,358,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,400,401,402,403,404,406,408,416,418,419,428,429,430,431,432,443,448,450,468,469,473,474,476,477,478,481,486,487,492,496,503,504,507,510,514,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,324,-195,-36,-39,326,-165,-126,-125,-61,-60,-44,340,342,343,344,-193,346,-45,354,356,358,361,362,363,364,365,-182,-26,-25,-23,-24,-27,370,371,375,376,379,380,381,382,-46,383,-186,-30,-31,-184,387,-150,390,391,392,-180,394,-22,-140,399,-194,-132,-131,402,-129,-128,406,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,430,431,-124,432,-42,-59,-192,443,-28,-168,-21,455,-164,-130,-127,-77,-145,-143,-41,492,496,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,526,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'ACOTH':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,]),'OMEGA_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,]),'LT':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,84,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,151,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,302,308,309,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,430,431,432,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,209,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,209,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,-186,-30,-31,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-164,-130,-127,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'PLUS':([0,1,2,6,7,8,9,11,12,13,15,17,18,21,22,23,25,26,27,28,30,31,32,33,34,35,37,38,39,43,45,47,48,49,52,53,54,55,63,64,65,66,68,69,70,72,73,74,75,76,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,94,95,97,100,101,103,104,105,106,107,108,109,110,112,113,116,117,118,119,120,121,122,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,147,148,149,151,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,170,172,173,174,175,176,177,178,179,180,181,182,183,185,186,187,188,189,190,192,193,194,195,196,197,198,200,201,202,203,204,206,207,208,209,210,211,212,213,215,217,218,221,222,223,224,225,226,227,228,229,230,231,232,233,234,236,237,238,239,241,242,243,244,245,246,248,249,251,253,254,255,256,257,259,260,262,263,264,265,266,267,268,269,271,272,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,292,293,295,296,299,300,301,302,303,304,305,306,307,308,309,310,311,312,314,315,316,317,318,319,320,321,322,323,324,325,326,328,329,330,331,332,334,335,336,337,339,340,342,343,344,345,346,348,352,354,355,356,358,361,362,363,364,365,366,368,369,370,371,372,375,376,379,380,381,382,383,384,385,387,388,389,390,391,392,393,394,395,396,397,398,399,402,404,406,407,408,415,416,418,419,420,421,422,423,424,426,427,428,429,430,431,432,433,434,435,442,443,444,445,447,448,449,450,451,452,454,456,459,460,461,462,464,466,468,471,473,474,475,476,477,478,479,480,481,482,483,484,486,487,488,490,492,494,496,497,498,499,500,503,504,507,510,511,512,513,523,524,525,526,527,528,529,530,535,536,537,538,539,540,541,546,547,557,560,561,562,564,565,],[15,15,-218,15,-17,-242,-18,-216,-205,-6,15,-233,15,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,15,-228,-5,-202,15,-32,-234,15,-212,-235,-213,-215,-203,-204,-211,-222,-9,15,-162,-206,-244,-229,-208,15,-219,-214,-227,-226,-240,211,-231,-246,15,-16,-224,-225,-11,-15,15,-4,-19,15,-248,-217,-220,-8,-221,-160,15,-239,-7,-241,-230,-243,-232,-223,211,-134,15,-135,-117,15,-116,-34,-98,15,-97,-120,15,-119,-33,-83,15,-82,211,15,-80,15,-79,-49,15,-48,-86,15,-85,211,-123,15,-122,-76,15,-75,-58,-57,15,-67,15,-66,-104,-103,15,211,15,15,15,15,15,-95,15,-94,-101,15,-100,211,-70,15,-69,-107,15,-106,-73,15,-72,-64,15,-63,-137,15,211,-89,15,-88,15,15,15,15,15,15,15,15,-110,15,-109,211,-35,-161,-38,-163,-52,15,-51,-55,15,-54,-114,15,-113,-92,15,-91,211,-195,-36,-39,15,-165,-126,15,-125,15,-61,15,-60,-44,211,211,211,211,211,-45,15,15,211,211,211,211,15,211,211,211,211,-182,15,15,15,-26,-25,-23,-24,-27,211,211,15,211,211,211,211,-46,211,15,-186,211,211,211,211,211,-30,-31,211,15,-184,211,15,15,-150,211,211,211,-180,15,211,-22,15,-140,-194,15,-132,15,-131,-129,15,-128,211,211,-133,-115,-96,-118,15,-81,211,211,-78,15,-47,-84,-121,-74,-56,-65,-102,-183,211,211,-93,-99,211,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,211,211,-50,-53,-112,-181,-90,-37,-40,211,15,-139,-124,-42,-59,15,211,15,211,-28,-168,15,15,15,15,15,15,15,-21,-4,-164,-130,-127,15,-32,461,211,-77,15,211,211,211,211,211,15,211,211,211,15,15,15,15,15,15,-41,15,211,-149,15,-148,-167,-43,500,502,211,15,15,211,-153,211,15,211,-159,211,-111,211,15,15,15,211,211,211,-157,15,15,15,211,211,211,-166,15,15,15,15,15,15,-147,211,211,211,211,211,211,15,-154,211,15,211,-158,]),'TAN':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,]),'IOTA':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,]),'PRIME':([110,241,245,328,],[241,-195,328,-194,]),'LCEIL':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,]),'GT':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,84,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,151,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,302,308,309,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,430,431,432,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,210,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,210,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,-186,-30,-31,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-164,-130,-127,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'FRAC':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,]),'RBRACE':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,265,266,267,279,283,284,285,286,287,299,302,308,309,312,317,321,324,326,328,330,332,334,336,337,340,342,343,344,346,348,352,354,356,357,358,361,362,363,364,365,366,369,370,371,372,374,375,376,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,399,402,404,406,418,419,428,430,431,432,434,435,436,438,440,442,443,445,446,448,449,450,455,456,460,461,462,463,465,466,468,474,476,477,478,481,484,486,487,490,491,492,494,496,497,499,500,501,502,503,504,507,510,515,516,517,518,523,524,526,537,538,539,540,541,546,547,549,552,553,554,556,558,559,560,561,563,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,349,350,353,-182,-26,-25,-23,-24,-27,-46,-186,-30,-31,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,404,-133,-115,-96,-118,-81,410,414,-78,-47,417,-84,-121,-74,-56,-65,-102,-183,418,-93,-99,419,422,-68,-105,424,-71,-62,-136,-138,-87,-187,-185,425,-108,426,427,-50,-53,-112,-181,-90,-37,-40,428,-139,-124,-42,-59,-28,-168,-21,-164,-130,-127,-32,459,462,464,466,468,-77,470,-142,-145,472,-143,477,478,482,483,485,486,488,489,-41,-149,-148,-167,-43,-169,505,-153,-151,509,510,-159,-141,-111,513,515,516,517,518,-171,-170,-155,-157,527,528,529,530,535,536,-166,-147,-175,-174,-173,-172,-146,-144,553,556,557,558,560,562,563,-154,-152,565,-156,-158,]),'ATAN':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,]),'CHI':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,]),'ASINH':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,]),'RCEIL':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,138,140,142,143,145,147,149,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,302,308,309,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,430,431,432,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,264,-80,-79,-49,-48,-86,-85,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,-186,-30,-31,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-164,-130,-127,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'PHI':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,]),'PARTIAL':([139,412,441,508,522,],[266,439,467,521,534,]),'FACTORIAL':([95,110,324,429,],[222,242,395,222,]),'MU':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,]),'XI_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,]),'RHO':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,]),'END_BMATRIX':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,153,155,156,158,159,160,162,164,165,166,169,170,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,214,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,302,308,309,311,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,367,368,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,430,431,432,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-191,-189,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,312,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,-186,-30,-31,385,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-190,-188,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-164,-130,-127,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'CARET':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,94,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,265,266,279,283,284,285,286,287,299,302,308,309,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,427,428,429,430,431,432,434,435,443,448,450,468,470,472,474,476,477,478,481,486,487,492,496,503,504,507,510,520,526,531,533,537,538,539,540,541,545,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,171,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,219,-4,-19,-248,-217,-220,-8,-221,-160,-239,240,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,347,351,-182,-26,-25,-23,-24,-27,-46,-186,171,171,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,453,-21,-4,-164,-130,-127,457,458,-77,-145,-143,-41,493,495,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,532,-166,542,544,-147,-175,-174,-173,-172,551,-146,-144,-154,-152,-156,-158,]),'DELTA_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,]),'ALPHA_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,]),'SIGMA_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,]),'ETA_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,]),'ASEC':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,]),'E':([0,1,6,15,18,38,47,52,70,78,87,94,95,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,429,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[72,72,72,72,72,72,72,72,72,72,72,72,225,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,225,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,]),'BEGIN_CASE':([0,],[38,]),'GAMMA':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,]),'DEG':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,]),'TANH':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,]),'MINUS':([0,1,2,6,7,8,9,11,12,13,15,17,18,21,22,23,25,26,27,28,30,31,32,33,34,35,37,38,39,43,45,47,48,49,52,53,54,55,63,64,65,66,68,69,70,72,73,74,75,76,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,94,95,97,100,101,103,104,105,106,107,108,109,110,112,113,116,117,118,119,120,121,122,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,147,148,149,151,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,170,172,173,174,175,176,177,178,179,180,181,182,183,185,186,187,188,189,190,192,193,194,195,196,197,198,200,201,202,203,204,206,207,208,209,210,211,212,213,215,217,218,221,222,223,224,225,226,227,228,229,230,231,232,233,234,236,237,238,239,241,242,243,244,245,246,248,249,251,253,254,255,256,257,259,260,262,263,264,265,266,267,268,269,271,272,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,292,293,295,296,299,300,301,302,303,304,305,306,307,308,309,310,311,312,314,315,316,317,318,319,320,321,322,323,324,325,326,328,329,330,331,332,334,335,336,337,339,340,342,343,344,345,346,348,352,354,355,356,358,361,362,363,364,365,366,368,369,370,371,372,375,376,379,380,381,382,383,384,385,387,388,389,390,391,392,393,394,395,396,397,398,399,402,404,406,407,408,415,416,418,419,420,421,422,423,424,426,427,428,429,430,431,432,433,434,435,442,443,444,445,447,448,449,450,451,452,454,456,459,460,461,462,464,466,468,471,473,474,475,476,477,478,479,480,481,482,483,484,486,487,488,490,492,494,496,497,498,499,500,503,504,507,510,511,512,513,523,524,525,526,527,528,529,530,535,536,537,538,539,540,541,546,547,557,560,561,562,564,565,],[6,6,-218,6,-17,-242,-18,-216,-205,-6,6,-233,6,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,6,-228,-5,-202,6,-32,-234,6,-212,-235,-213,-215,-203,-204,-211,-222,-9,6,-162,-206,-244,-229,-208,6,-219,-214,-227,-226,-240,212,-231,-246,6,-16,-224,-225,-11,-15,6,-4,-19,6,-248,-217,-220,-8,-221,-160,6,-239,-7,-241,-230,-243,-232,-223,212,-134,6,-135,-117,6,-116,-34,-98,6,-97,-120,6,-119,-33,-83,6,-82,212,6,-80,6,-79,-49,6,-48,-86,6,-85,212,-123,6,-122,-76,6,-75,-58,-57,6,-67,6,-66,-104,-103,6,212,6,6,6,6,6,-95,6,-94,-101,6,-100,212,-70,6,-69,-107,6,-106,-73,6,-72,-64,6,-63,-137,6,212,-89,6,-88,6,6,6,6,6,6,6,6,-110,6,-109,212,-35,-161,-38,-163,-52,6,-51,-55,6,-54,-114,6,-113,-92,6,-91,212,-195,-36,-39,6,-165,-126,6,-125,6,-61,6,-60,-44,212,212,212,212,212,-45,6,6,212,212,212,212,6,212,212,212,212,-182,6,6,6,-26,-25,-23,-24,-27,212,212,6,212,212,212,212,-46,212,6,-186,212,212,212,212,212,-30,-31,212,6,-184,212,6,6,-150,212,212,212,-180,6,212,-22,6,-140,-194,6,-132,6,-131,-129,6,-128,212,212,-133,-115,-96,-118,6,-81,212,212,-78,6,-47,-84,-121,-74,-56,-65,-102,-183,212,212,-93,-99,212,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,212,212,-50,-53,-112,-181,-90,-37,-40,212,6,-139,-124,-42,-59,6,212,6,212,-28,-168,6,6,6,6,6,6,6,-21,-4,-164,-130,-127,6,-32,460,212,-77,6,212,212,212,212,212,6,212,212,212,6,6,6,6,6,6,-41,6,212,-149,6,-148,-167,-43,499,501,212,6,6,212,-153,212,6,212,-159,212,-111,212,6,6,6,212,212,212,-157,6,6,6,212,212,212,-166,6,6,6,6,6,6,-147,212,212,212,212,212,212,6,-154,212,6,212,-158,]),'BACKSLASHES':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,150,152,153,155,156,158,159,160,162,164,165,166,168,169,170,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,205,214,215,218,222,223,224,225,226,228,229,231,232,234,235,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,302,303,304,305,306,307,308,309,310,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,359,361,362,363,364,365,366,367,368,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,430,431,432,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-179,272,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,280,-191,-189,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,301,311,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,322,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,-186,-199,-196,-201,-198,-200,-30,-31,-197,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-178,-121,-74,-56,-65,-102,-183,-190,-188,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-164,-130,-127,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'ACOS':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,]),'PI':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,]),'ACOT':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,]),'BEGIN_NMATRIX':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,]),'$end':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,29,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,56,63,64,65,66,68,69,72,73,74,75,76,77,79,80,81,82,83,84,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,273,279,283,284,285,286,287,299,302,303,304,305,306,307,308,309,310,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,360,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,430,431,432,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,0,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-3,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-2,-219,-214,-227,-226,-240,-1,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-176,-182,-26,-25,-23,-24,-27,-46,-186,-199,-196,-201,-198,-200,-30,-31,-197,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-177,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-164,-130,-127,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'DOTS':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,302,308,309,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,430,431,432,443,447,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,-186,-30,-31,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-164,-130,-127,-77,471,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'GCD':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,]),'CSC':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,]),'END_VMATRIX':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,153,155,156,158,159,160,162,164,165,166,169,170,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,235,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,302,308,309,312,317,321,322,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,367,368,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,430,431,432,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-191,-189,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,321,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,-186,-30,-31,-184,-150,-180,393,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-190,-188,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-164,-130,-127,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'AMPERSAND':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,153,155,156,158,159,160,162,164,165,166,169,170,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,302,308,309,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,367,368,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,430,431,432,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,281,-189,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,-186,-30,-31,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,281,-188,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-164,-130,-127,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'END_NMATRIX':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,153,155,156,158,159,160,162,164,165,166,168,169,170,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,279,280,283,284,285,286,287,299,302,308,309,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,367,368,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,430,431,432,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,279,-191,-189,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,366,-26,-25,-23,-24,-27,-46,-186,-30,-31,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-190,-188,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-164,-130,-127,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'EQ':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,84,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,151,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,302,308,309,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,370,371,373,375,376,377,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,430,431,432,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,207,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,207,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,-186,-30,-31,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-93,-99,420,-68,-105,423,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-164,-130,-127,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'COTH':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,]),'LBRACE':([0,1,6,15,18,19,38,47,52,70,78,87,94,100,108,114,121,123,125,129,132,136,139,141,144,146,148,154,157,161,163,167,171,172,173,174,175,176,178,181,184,186,189,191,193,196,200,203,206,207,208,209,210,211,212,213,216,217,219,220,227,230,233,237,240,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,347,349,350,351,353,355,398,405,407,410,414,415,420,421,422,423,424,426,427,433,444,451,453,457,458,459,460,461,462,464,466,471,475,479,482,483,485,488,489,493,495,498,499,500,505,509,511,512,513,527,528,529,530,532,535,536,542,544,551,557,562,],[52,52,52,52,52,139,52,52,52,52,52,52,52,52,52,251,52,258,52,52,52,52,52,52,52,270,52,52,52,52,52,52,282,52,52,52,52,52,52,52,291,52,52,294,52,52,52,52,52,52,52,52,52,52,52,52,313,52,315,316,52,52,52,52,325,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,409,411,412,413,415,52,52,433,52,437,441,52,52,52,52,52,52,52,52,52,52,52,475,479,480,52,52,52,52,52,52,52,52,52,52,52,506,52,508,511,512,52,52,52,519,522,52,52,52,52,52,52,52,543,52,52,548,550,555,52,52,]),'LAMBDA_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,]),'BEGIN_PMATRIX':([0,1,6,15,18,38,47,52,67,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,199,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,]),'EPSILON_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,]),'PROD':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,]),'LAMBDA':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,117,]),'COS':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,]),'COT':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,]),'SUM':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,]),'ATANH':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,]),'CROSS':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,111,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,302,308,309,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,429,430,431,432,434,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,173,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,247,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,-186,173,173,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-4,-164,-130,-127,173,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'COSH':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,]),'KAPPA_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,]),'ETA':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,]),'END_PMATRIX':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,153,155,156,158,159,160,162,164,165,166,169,170,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,205,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,301,302,308,309,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,367,368,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,430,431,432,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-191,-189,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,302,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,384,-186,-30,-31,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-190,-188,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-164,-130,-127,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'CHI_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,]),'DOT':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,111,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,302,308,309,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,429,430,431,432,434,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,175,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,250,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,-186,175,175,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-4,-164,-130,-127,175,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'NEQ':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,84,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,151,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,302,308,309,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,430,431,432,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,213,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,213,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,-186,-30,-31,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-164,-130,-127,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'THETA_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,]),'DETERMINANT':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,]),'IOTA_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,]),'PIPE':([0,1,2,6,7,8,9,11,12,13,15,17,18,21,22,23,25,26,27,28,30,31,32,33,34,35,37,38,39,43,45,47,48,49,52,53,54,55,63,64,65,66,68,69,70,72,73,74,75,76,78,79,80,81,82,83,85,86,87,88,89,90,91,92,94,95,97,100,101,103,104,105,106,107,108,109,110,112,113,116,117,118,120,121,122,124,125,126,127,128,129,130,131,132,133,134,135,136,137,139,140,141,142,143,144,145,147,148,149,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,172,173,174,175,176,177,178,179,180,181,182,185,186,187,188,189,190,192,193,194,195,196,197,198,200,201,202,203,204,206,207,208,209,210,211,212,213,215,217,218,222,223,224,225,226,227,228,229,230,231,232,233,234,236,237,238,241,242,243,244,245,246,248,249,251,253,254,255,256,264,265,266,272,279,280,281,282,283,284,285,286,287,290,299,301,302,308,309,311,312,315,316,317,321,322,324,325,326,328,329,330,331,332,334,335,336,340,342,343,344,345,346,354,355,356,358,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,398,399,402,404,406,407,415,418,419,420,421,422,423,424,426,427,428,430,431,432,433,443,444,448,450,451,459,460,461,462,464,466,468,471,474,475,476,477,478,479,481,482,483,486,487,488,492,496,498,499,500,503,504,507,510,511,512,513,526,527,528,529,530,535,536,537,538,539,540,541,546,547,557,560,561,562,564,565,],[70,70,-218,70,-17,-242,-18,-216,-205,-6,70,-233,70,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,70,-228,-5,-202,70,-32,-234,70,-212,-235,-213,-215,-203,-204,-211,-222,-9,70,-162,-206,-244,-229,-208,70,-219,-214,-227,-226,-240,-231,-246,70,-16,-224,-225,-11,-15,70,-4,-19,70,-248,-217,-220,-8,-221,-160,70,-239,-7,-241,-230,-243,-232,-223,-134,70,-135,-117,70,-116,-34,-98,70,-97,-120,70,-119,-33,-83,70,-82,70,-80,70,-79,-49,70,-48,-86,70,-85,-123,70,-122,-76,70,-75,-58,-57,70,-67,70,-66,-104,-103,70,70,70,70,70,70,-95,70,-94,-101,70,-100,-70,70,-69,-107,70,-106,-73,70,-72,-64,70,-63,-137,70,299,-89,70,-88,70,70,70,70,70,70,70,70,-110,70,-109,-35,-161,-38,-163,-52,70,-51,-55,70,-54,-114,70,-113,-92,70,-91,-195,-36,-39,70,-165,-126,70,-125,70,-61,70,-60,-44,-45,70,70,70,-182,70,70,70,-26,-25,-23,-24,-27,70,-46,70,-186,-30,-31,70,-184,70,70,-150,-180,70,-22,70,-140,-194,70,-132,70,-131,-129,70,-128,-133,-115,-96,-118,70,-81,-78,70,-47,-84,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,70,-139,-124,-42,-59,70,70,-28,-168,70,70,70,70,70,70,70,-21,-164,-130,-127,70,-77,70,-145,-143,70,70,70,70,70,70,70,-41,70,-149,70,-148,-167,-43,70,-169,70,70,-153,-151,70,-159,-111,70,70,70,-171,-170,-155,-157,70,70,70,-166,70,70,70,70,70,70,-147,-175,-174,-173,-172,-146,-144,70,-154,-152,70,-156,-158,]),'SEC':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,]),'OMEGA':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,]),'NU':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,]),'PSI_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,]),'XI':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,]),'KAPPA':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,]),'ZETA_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,]),'PERCENT':([95,110,324,429,],[224,243,396,224,]),'RHO_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,]),'TAU_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,]),'BETA':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,]),'PSI':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,]),'PI_UPPER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,]),'THETA':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,]),'DIFFERENTIAL':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,221,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,302,308,309,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,411,418,419,428,430,431,432,437,443,448,450,452,454,468,474,476,477,478,481,486,487,492,496,503,504,506,507,510,519,525,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,317,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,-186,-30,-31,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,438,-28,-168,-21,-164,-130,-127,463,-77,-145,-143,474,476,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,520,-155,-157,531,537,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'UNDERLINE':([4,33,57,60,93,94,],[123,146,184,191,216,220,]),'BEGIN_BMATRIX':([0,1,6,15,18,38,47,52,67,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,199,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,]),'CHOOSE':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,153,155,156,158,159,160,162,164,165,166,177,179,180,182,183,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,302,308,309,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,430,431,432,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,290,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,-186,-30,-31,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-164,-130,-127,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'GAMMA_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,]),'END_CASE':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,150,152,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,272,279,283,284,285,286,287,299,302,303,304,305,306,307,308,309,310,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,359,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,430,431,432,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-179,273,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,360,-182,-26,-25,-23,-24,-27,-46,-186,-199,-196,-201,-198,-200,-30,-31,-197,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-178,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-164,-130,-127,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'MU_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,]),'DIVIDE':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,302,308,309,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,429,430,431,432,434,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,172,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,-186,172,172,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-4,-164,-130,-127,172,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'LOG':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,]),'INTEGRAL':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,]),'NUMBER':([0,1,3,5,6,10,14,15,16,18,20,24,36,38,40,41,42,44,46,47,50,51,52,58,59,61,62,70,71,78,87,93,94,96,98,99,100,102,108,111,115,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,247,248,250,251,252,254,265,266,270,272,280,281,282,290,301,311,313,315,316,322,325,329,331,335,345,355,398,407,409,413,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,543,548,550,555,557,562,],[95,95,120,124,95,128,131,95,135,95,140,143,147,95,153,156,159,162,165,95,177,180,95,185,188,192,195,95,202,95,95,215,95,226,229,232,95,236,95,246,253,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,330,95,334,95,338,95,95,95,357,95,95,95,95,95,95,95,386,95,95,95,95,95,95,95,95,95,429,95,436,440,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,549,552,554,559,95,95,]),'RFLOOR':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,119,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,302,308,309,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,430,431,432,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,256,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,-186,-30,-31,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-164,-130,-127,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'SINH':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,]),'D':([139,],[265,]),'UPSILON_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,]),'LE':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,84,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,151,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,302,308,309,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,430,431,432,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,206,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,206,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,-186,-30,-31,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-164,-130,-127,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'LN':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,]),'SIGMA':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,]),'TO':([341,],[407,]),'PHI_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,]),'COMMA':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,261,262,264,268,274,279,283,284,285,286,287,298,299,302,308,309,312,317,321,324,326,327,328,330,332,333,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,400,401,402,403,404,406,408,418,419,428,430,431,432,443,448,450,468,469,474,476,477,478,481,486,487,492,496,503,504,507,510,514,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,345,-193,-45,355,345,-182,-26,-25,-23,-24,-27,345,-46,-186,-30,-31,-184,-150,-180,-22,-140,345,-194,-132,-131,345,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,345,345,-124,345,-42,-59,-192,-28,-168,-21,-164,-130,-127,-77,-145,-143,-41,345,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,345,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'BEGIN_VMATRIX':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,]),'UPSILON':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,]),'ACSC':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,]),'NU_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,]),'OMICRON_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,]),'INFINITY':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,]),'ASIN':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,]),'I':([0,1,6,15,18,38,47,52,70,78,87,94,95,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,429,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[107,107,107,107,107,107,107,107,107,107,107,107,223,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,223,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,]),'TIMES':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,302,308,309,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,429,430,431,432,434,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,174,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,-186,174,174,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-4,-164,-130,-127,174,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'LPAREN':([0,1,2,3,5,6,8,10,11,12,14,15,16,17,18,20,21,22,24,27,28,30,31,33,34,36,38,39,40,41,42,44,45,46,47,49,50,51,52,53,54,55,58,59,61,62,63,64,65,66,67,68,69,70,71,73,74,75,76,78,79,80,81,82,83,85,86,87,89,90,93,94,96,98,99,100,101,102,103,104,106,108,109,110,111,112,113,115,116,117,118,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,241,244,245,247,248,250,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,328,329,331,335,345,355,398,407,415,417,420,421,422,423,424,425,426,427,433,444,451,459,460,461,462,464,466,471,475,477,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[108,108,-218,121,125,108,-242,129,-216,-205,132,108,136,-233,108,141,-245,-236,144,-238,-247,-237,-210,-207,-209,148,108,-228,154,157,161,163,-202,167,108,-234,178,181,108,-212,-235,-213,186,189,193,196,-215,-203,-204,-211,199,-222,200,108,203,-206,-244,-229,-208,108,-219,-214,-227,-226,-240,-231,-246,108,-224,-225,217,108,227,230,233,108,-248,237,-217,-220,-221,108,-239,244,248,-241,-230,254,-243,-232,-223,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,-195,108,329,331,108,335,108,108,108,108,108,108,108,108,108,108,108,108,108,108,398,-194,108,108,108,108,108,108,108,108,444,108,108,108,108,108,451,108,108,108,108,108,108,108,108,108,108,108,108,108,498,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,]),'IN':([373,377,],[421,421,]),'ZETA':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,]),'ID':([0,1,3,5,6,10,14,15,16,18,20,24,36,38,40,41,42,44,46,47,50,51,52,58,59,61,62,70,71,78,87,93,94,96,98,99,100,102,108,111,115,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,247,248,250,251,254,258,265,266,272,280,281,282,290,291,294,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,439,444,451,459,460,461,462,464,466,467,471,475,479,482,483,488,498,499,500,511,512,513,521,527,528,529,530,534,535,536,557,562,],[110,110,122,126,110,130,133,110,137,110,142,145,149,110,155,158,160,164,166,110,179,182,110,187,190,194,197,110,204,110,110,218,110,228,231,234,110,238,110,249,255,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,332,110,336,110,110,341,110,110,110,110,110,110,110,373,377,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,465,110,110,110,110,110,110,110,110,491,110,110,110,110,110,110,110,110,110,110,110,110,533,110,110,110,110,545,110,110,110,110,]),'GRADIENT':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,]),'EPSILON':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,]),'OMICRON':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,]),'GE':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,84,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,151,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,302,308,309,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,430,431,432,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,-32,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,208,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,208,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,-186,-30,-31,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-164,-130,-127,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'SQRT':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,]),'ACOSH':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,]),'ALPHA':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,]),'RBRACKET':([338,],[405,]),'MOD':([2,7,8,9,11,12,13,17,21,22,23,25,26,27,28,30,31,32,33,34,35,37,39,43,45,48,49,53,54,55,63,64,65,66,68,69,72,73,74,75,76,79,80,81,82,83,85,86,88,89,90,91,92,95,97,101,103,104,105,106,107,109,110,112,113,116,117,118,120,122,124,126,127,128,130,131,133,134,135,137,140,142,143,145,147,149,153,155,156,158,159,160,162,164,165,166,177,179,180,182,185,187,188,190,192,194,195,197,198,202,204,215,218,222,223,224,225,226,228,229,231,232,234,236,238,241,242,243,245,246,249,253,255,256,264,279,283,284,285,286,287,299,302,308,309,312,317,321,324,326,328,330,332,334,336,340,342,343,344,346,354,356,358,361,362,363,364,365,366,370,371,375,376,379,380,381,382,383,384,385,387,390,391,392,393,394,395,396,399,402,404,406,418,419,428,429,430,431,432,434,443,448,450,468,474,476,477,478,481,486,487,492,496,503,504,507,510,526,537,538,539,540,541,546,547,560,561,564,565,],[-218,-17,-242,-18,-216,-205,-6,-233,-245,-236,-14,-10,-13,-238,-247,-237,-210,-20,-207,-209,-29,-12,-228,-5,-202,176,-234,-212,-235,-213,-215,-203,-204,-211,-222,-9,-162,-206,-244,-229,-208,-219,-214,-227,-226,-240,-231,-246,-16,-224,-225,-11,-15,-4,-19,-248,-217,-220,-8,-221,-160,-239,-7,-241,-230,-243,-232,-223,-134,-135,-117,-116,-34,-98,-97,-120,-119,-33,-83,-82,-80,-79,-49,-48,-86,-85,-123,-122,-76,-75,-58,-57,-67,-66,-104,-103,-95,-94,-101,-100,-70,-69,-107,-106,-73,-72,-64,-63,-137,-89,-88,-110,-109,-35,-161,-38,-163,-52,-51,-55,-54,-114,-113,-92,-91,-195,-36,-39,-165,-126,-125,-61,-60,-44,-45,-182,-26,-25,-23,-24,-27,-46,-186,176,176,-184,-150,-180,-22,-140,-194,-132,-131,-129,-128,-133,-115,-96,-118,-81,-78,-47,-84,-121,-74,-56,-65,-102,-183,-93,-99,-68,-105,-71,-62,-136,-138,-87,-187,-185,-108,-50,-53,-112,-181,-90,-37,-40,-139,-124,-42,-59,-28,-168,-21,-4,-164,-130,-127,176,-77,-145,-143,-41,-149,-148,-167,-43,-169,-153,-151,-159,-111,-171,-170,-155,-157,-166,-147,-175,-174,-173,-172,-146,-144,-154,-152,-156,-158,]),'BETA_LOWER':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,]),}
_lr_action = {}
for _k, _v in _lr_action_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_action: _lr_action[_x] = {}
_lr_action[_x][_k] = _y
del _lr_action_items
_lr_goto_items = {'DivisorFunction':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,]),'DifferentialVariable':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,]),'Matrix':([0,1,6,15,18,38,47,52,67,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,199,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[7,7,7,7,7,7,7,7,198,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,297,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,]),'FractionalExpression':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,]),'Factor':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,283,284,285,286,287,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,]),'Derivative':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,]),'Norm':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,]),'Determinant':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,]),'ExpressionsRows':([47,78,87,100,],[168,205,214,235,]),'Symbol':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,]),'NapierNumber':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,]),'ImaginaryNumber':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,]),'ExpressionsRow':([47,78,87,100,280,301,311,322,],[169,169,169,169,367,367,367,367,]),'Term':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,308,309,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,434,48,48,48,48,48,48,48,48,48,48,48,48,309,308,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,]),'PrimeList':([110,],[245,]),'Constraint':([0,38,272,],[77,150,359,]),'Range':([421,],[446,]),'Limit':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,]),'IndexingExpression':([291,294,],[374,378,]),'Expression':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,178,181,186,189,193,196,200,203,206,207,208,209,210,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[84,119,127,134,138,151,170,183,201,170,170,221,170,239,257,259,260,262,263,267,268,269,271,262,275,276,277,278,288,289,292,293,295,296,262,300,303,304,305,306,307,310,314,318,319,320,323,262,262,337,339,348,352,151,170,368,369,372,170,170,388,389,170,397,262,262,262,408,416,239,435,442,445,447,448,449,450,452,454,456,262,473,481,484,487,490,494,497,369,503,504,507,262,127,134,523,524,525,538,539,540,541,546,547,561,564,]),'Constraints':([38,],[152,]),'IteratedExpression':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,]),'Integral':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,]),'ConstraintSystem':([0,],[56,]),'ChooseExpression':([0,1,6,15,18,38,47,52,70,78,87,94,100,108,121,125,129,132,136,139,141,144,148,154,157,161,163,167,172,173,174,175,176,178,181,186,189,193,196,200,203,206,207,208,209,210,211,212,213,217,227,230,233,237,244,248,251,254,265,266,272,280,281,282,290,301,311,315,316,322,325,329,331,335,345,355,398,407,415,420,421,422,423,424,426,427,433,444,451,459,460,461,462,464,466,471,475,479,482,483,488,498,499,500,511,512,513,527,528,529,530,535,536,557,562,],[88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,]),'ExpressionList':([132,154,200,244,248,329,331,335,444,498,],[261,274,298,327,333,400,401,403,469,514,]),'MAIN':([0,],[29,]),}
_lr_goto = {}
for _k, _v in _lr_goto_items.items():
for _x, _y in zip(_v[0], _v[1]):
if not _x in _lr_goto: _lr_goto[_x] = {}
_lr_goto[_x][_k] = _y
del _lr_goto_items
_lr_productions = [
("S' -> MAIN","S'",1,None,None,None),
('MAIN -> Expression','MAIN',1,'p_Main','parser.py',49),
('MAIN -> Constraint','MAIN',1,'p_Main','parser.py',50),
('MAIN -> ConstraintSystem','MAIN',1,'p_Main','parser.py',51),
('Factor -> NUMBER','Factor',1,'p_Factor','parser.py',55),
('Factor -> ImaginaryNumber','Factor',1,'p_Factor','parser.py',56),
('Factor -> NapierNumber','Factor',1,'p_Factor','parser.py',57),
('Factor -> ID','Factor',1,'p_Factor','parser.py',58),
('Factor -> INFINITY','Factor',1,'p_Factor','parser.py',59),
('Factor -> Symbol','Factor',1,'p_Factor','parser.py',60),
('Factor -> IteratedExpression','Factor',1,'p_Factor','parser.py',61),
('Factor -> DivisorFunction','Factor',1,'p_Factor','parser.py',62),
('Factor -> Derivative','Factor',1,'p_Factor','parser.py',63),
('Factor -> Integral','Factor',1,'p_Factor','parser.py',64),
('Factor -> Limit','Factor',1,'p_Factor','parser.py',65),
('Factor -> DifferentialVariable','Factor',1,'p_Factor','parser.py',66),
('Factor -> ChooseExpression','Factor',1,'p_Factor','parser.py',67),
('Factor -> Matrix','Factor',1,'p_Factor','parser.py',68),
('Factor -> Determinant','Factor',1,'p_Factor','parser.py',69),
('Factor -> Norm','Factor',1,'p_Factor','parser.py',70),
('Factor -> FractionalExpression','Factor',1,'p_Factor','parser.py',71),
('Factor -> ID CARET LBRACE Expression RBRACE','Factor',5,'p_Factor','parser.py',72),
('Factor -> LPAREN Expression RPAREN','Factor',3,'p_Factor','parser.py',73),
('Term -> Term TIMES Factor','Term',3,'p_Term','parser.py',89),
('Term -> Term DOT Factor','Term',3,'p_Term','parser.py',90),
('Term -> Term CROSS Factor','Term',3,'p_Term','parser.py',91),
('Term -> Term DIVIDE Factor','Term',3,'p_Term','parser.py',92),
('Term -> Term MOD Factor','Term',3,'p_Term','parser.py',93),
('Term -> Term CARET LBRACE Expression RBRACE','Term',5,'p_Term','parser.py',94),
('Term -> Factor','Term',1,'p_Term','parser.py',95),
('Expression -> Expression PLUS Term','Expression',3,'p_Expression_binop','parser.py',127),
('Expression -> Expression MINUS Term','Expression',3,'p_Expression_binop','parser.py',128),
('Expression -> Term','Expression',1,'p_Expression_binop','parser.py',129),
('Factor -> PLUS Expression','Factor',2,'p_UnaryExpressionOperatorBefore','parser.py',145),
('Factor -> MINUS Expression','Factor',2,'p_UnaryExpressionOperatorBefore','parser.py',146),
('Factor -> NUMBER FACTORIAL','Factor',2,'p_UnaryExpressionOperatorAfter','parser.py',156),
('Factor -> ID FACTORIAL','Factor',2,'p_UnaryExpressionOperatorAfter','parser.py',157),
('Factor -> LPAREN Expression RPAREN FACTORIAL','Factor',4,'p_UnaryExpressionOperatorAfter','parser.py',158),
('Factor -> NUMBER PERCENT','Factor',2,'p_UnaryExpressionOperatorAfter','parser.py',159),
('Factor -> ID PERCENT','Factor',2,'p_UnaryExpressionOperatorAfter','parser.py',160),
('Factor -> LPAREN Expression RPAREN PERCENT','Factor',4,'p_UnaryExpressionOperatorAfter','parser.py',161),
('FractionalExpression -> FRAC LBRACE Expression RBRACE LBRACE Expression RBRACE','FractionalExpression',7,'p_FractionalExpression','parser.py',182),
('Factor -> SQRT LBRACE Expression RBRACE','Factor',4,'p_FunctionExpression','parser.py',187),
('Factor -> SQRT LBRACKET NUMBER RBRACKET LBRACE Expression RBRACE','Factor',7,'p_FunctionExpression','parser.py',189),
('Factor -> LFLOOR Expression RFLOOR','Factor',3,'p_FunctionExpression','parser.py',191),
('Factor -> LCEIL Expression RCEIL','Factor',3,'p_FunctionExpression','parser.py',193),
('Factor -> PIPE Expression PIPE','Factor',3,'p_FunctionExpression','parser.py',195),
('Factor -> ASINH LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',197),
('Factor -> ASINH ID','Factor',2,'p_FunctionExpression','parser.py',199),
('Factor -> ASINH NUMBER','Factor',2,'p_FunctionExpression','parser.py',201),
('Factor -> SINH LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',203),
('Factor -> SINH ID','Factor',2,'p_FunctionExpression','parser.py',205),
('Factor -> SINH NUMBER','Factor',2,'p_FunctionExpression','parser.py',207),
('Factor -> ASIN LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',209),
('Factor -> ASIN ID','Factor',2,'p_FunctionExpression','parser.py',211),
('Factor -> ASIN NUMBER','Factor',2,'p_FunctionExpression','parser.py',213),
('Factor -> SIN LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',215),
('Factor -> SIN ID','Factor',2,'p_FunctionExpression','parser.py',217),
('Factor -> SIN NUMBER','Factor',2,'p_FunctionExpression','parser.py',219),
('Factor -> ACOSH LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',221),
('Factor -> ACOSH ID','Factor',2,'p_FunctionExpression','parser.py',223),
('Factor -> ACOSH NUMBER','Factor',2,'p_FunctionExpression','parser.py',225),
('Factor -> COSH LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',227),
('Factor -> COSH ID','Factor',2,'p_FunctionExpression','parser.py',229),
('Factor -> COSH NUMBER','Factor',2,'p_FunctionExpression','parser.py',231),
('Factor -> ACOS LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',233),
('Factor -> ACOS ID','Factor',2,'p_FunctionExpression','parser.py',235),
('Factor -> ACOS NUMBER','Factor',2,'p_FunctionExpression','parser.py',237),
('Factor -> COS LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',239),
('Factor -> COS ID','Factor',2,'p_FunctionExpression','parser.py',241),
('Factor -> COS NUMBER','Factor',2,'p_FunctionExpression','parser.py',243),
('Factor -> ATANH LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',245),
('Factor -> ATANH ID','Factor',2,'p_FunctionExpression','parser.py',247),
('Factor -> ATANH NUMBER','Factor',2,'p_FunctionExpression','parser.py',249),
('Factor -> TANH LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',251),
('Factor -> TANH ID','Factor',2,'p_FunctionExpression','parser.py',253),
('Factor -> TANH NUMBER','Factor',2,'p_FunctionExpression','parser.py',255),
('Factor -> ATAN LPAREN Expression COMMA Expression RPAREN','Factor',6,'p_FunctionExpression','parser.py',257),
('Factor -> ATAN LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',258),
('Factor -> ATAN ID','Factor',2,'p_FunctionExpression','parser.py',260),
('Factor -> ATAN NUMBER','Factor',2,'p_FunctionExpression','parser.py',262),
('Factor -> TAN LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',264),
('Factor -> TAN ID','Factor',2,'p_FunctionExpression','parser.py',266),
('Factor -> TAN NUMBER','Factor',2,'p_FunctionExpression','parser.py',268),
('Factor -> ASEC LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',270),
('Factor -> ASEC ID','Factor',2,'p_FunctionExpression','parser.py',272),
('Factor -> ASEC NUMBER','Factor',2,'p_FunctionExpression','parser.py',274),
('Factor -> SEC LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',276),
('Factor -> SEC ID','Factor',2,'p_FunctionExpression','parser.py',278),
('Factor -> SEC NUMBER','Factor',2,'p_FunctionExpression','parser.py',280),
('Factor -> ACSC LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',282),
('Factor -> ACSC ID','Factor',2,'p_FunctionExpression','parser.py',284),
('Factor -> ACSC NUMBER','Factor',2,'p_FunctionExpression','parser.py',286),
('Factor -> CSC LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',288),
('Factor -> CSC ID','Factor',2,'p_FunctionExpression','parser.py',290),
('Factor -> CSC NUMBER','Factor',2,'p_FunctionExpression','parser.py',292),
('Factor -> ACOTH LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',294),
('Factor -> ACOTH ID','Factor',2,'p_FunctionExpression','parser.py',296),
('Factor -> ACOTH NUMBER','Factor',2,'p_FunctionExpression','parser.py',298),
('Factor -> COTH LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',300),
('Factor -> COTH ID','Factor',2,'p_FunctionExpression','parser.py',302),
('Factor -> COTH NUMBER','Factor',2,'p_FunctionExpression','parser.py',304),
('Factor -> ACOT LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',306),
('Factor -> ACOT ID','Factor',2,'p_FunctionExpression','parser.py',308),
('Factor -> ACOT NUMBER','Factor',2,'p_FunctionExpression','parser.py',310),
('Factor -> COT LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',312),
('Factor -> COT ID','Factor',2,'p_FunctionExpression','parser.py',314),
('Factor -> COT NUMBER','Factor',2,'p_FunctionExpression','parser.py',316),
('Factor -> LOG LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',318),
('Factor -> LOG ID','Factor',2,'p_FunctionExpression','parser.py',320),
('Factor -> LOG NUMBER','Factor',2,'p_FunctionExpression','parser.py',322),
('Factor -> LOG UNDERLINE LBRACE NUMBER RBRACE LPAREN Expression RPAREN','Factor',8,'p_FunctionExpression','parser.py',324),
('Factor -> LN LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',326),
('Factor -> LN ID','Factor',2,'p_FunctionExpression','parser.py',328),
('Factor -> LN NUMBER','Factor',2,'p_FunctionExpression','parser.py',330),
('Factor -> EXP LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',332),
('Factor -> EXP ID','Factor',2,'p_FunctionExpression','parser.py',334),
('Factor -> EXP NUMBER','Factor',2,'p_FunctionExpression','parser.py',336),
('Factor -> GCD LPAREN ExpressionList RPAREN','Factor',4,'p_FunctionExpression','parser.py',338),
('Factor -> GCD ID','Factor',2,'p_FunctionExpression','parser.py',340),
('Factor -> GCD NUMBER','Factor',2,'p_FunctionExpression','parser.py',342),
('Factor -> DEG LPAREN ExpressionList RPAREN','Factor',4,'p_FunctionExpression','parser.py',344),
('Factor -> DEG ID','Factor',2,'p_FunctionExpression','parser.py',346),
('Factor -> DEG NUMBER','Factor',2,'p_FunctionExpression','parser.py',348),
('Factor -> GRADIENT LPAREN ExpressionList RPAREN','Factor',4,'p_FunctionExpression','parser.py',350),
('Factor -> GRADIENT ID','Factor',2,'p_FunctionExpression','parser.py',352),
('Factor -> GRADIENT NUMBER','Factor',2,'p_FunctionExpression','parser.py',354),
('Factor -> GRADIENT DOT LPAREN ExpressionList RPAREN','Factor',5,'p_FunctionExpression','parser.py',356),
('Factor -> GRADIENT DOT ID','Factor',3,'p_FunctionExpression','parser.py',358),
('Factor -> GRADIENT DOT NUMBER','Factor',3,'p_FunctionExpression','parser.py',360),
('Factor -> GRADIENT CROSS LPAREN ExpressionList RPAREN','Factor',5,'p_FunctionExpression','parser.py',362),
('Factor -> GRADIENT CROSS ID','Factor',3,'p_FunctionExpression','parser.py',364),
('Factor -> GRADIENT CROSS NUMBER','Factor',3,'p_FunctionExpression','parser.py',366),
('Factor -> LAPLACIAN LPAREN Expression RPAREN','Factor',4,'p_FunctionExpression','parser.py',368),
('Factor -> LAPLACIAN NUMBER','Factor',2,'p_FunctionExpression','parser.py',370),
('Factor -> LAPLACIAN ID','Factor',2,'p_FunctionExpression','parser.py',372),
('Factor -> DETERMINANT LPAREN Matrix RPAREN','Factor',4,'p_FunctionExpression','parser.py',374),
('Factor -> DETERMINANT Matrix','Factor',2,'p_FunctionExpression','parser.py',376),
('Factor -> Symbol LPAREN ExpressionList RPAREN','Factor',4,'p_FunctionExpression','parser.py',378),
('Factor -> ID LPAREN ExpressionList RPAREN','Factor',4,'p_FunctionExpression','parser.py',380),
('Factor -> ID LPAREN RPAREN','Factor',3,'p_FunctionExpression','parser.py',382),
('Range -> Expression DOTS Expression','Range',3,'p_Range','parser.py',527),
('IndexingExpression -> ID IN Range','IndexingExpression',3,'p_IndexingExpression','parser.py',531),
('IteratedExpression -> SUM UNDERLINE LBRACE IndexingExpression RBRACE Expression','IteratedExpression',6,'p_IteratedExpression','parser.py',535),
('IteratedExpression -> SUM UNDERLINE LBRACE ID EQ Expression RBRACE CARET LBRACE Expression RBRACE Expression','IteratedExpression',12,'p_IteratedExpression','parser.py',536),
('IteratedExpression -> PROD UNDERLINE LBRACE IndexingExpression RBRACE Expression','IteratedExpression',6,'p_IteratedExpression','parser.py',537),
('IteratedExpression -> PROD UNDERLINE LBRACE ID EQ Expression RBRACE CARET LBRACE Expression RBRACE Expression','IteratedExpression',12,'p_IteratedExpression','parser.py',538),
('Integral -> INTEGRAL UNDERLINE LBRACE Expression RBRACE CARET LBRACE Expression RBRACE Expression DIFFERENTIAL','Integral',11,'p_Integral','parser.py',555),
('Integral -> INTEGRAL UNDERLINE LBRACE Expression RBRACE Expression DIFFERENTIAL','Integral',7,'p_Integral','parser.py',556),
('Integral -> INTEGRAL CARET LBRACE Expression RBRACE Expression DIFFERENTIAL','Integral',7,'p_Integral','parser.py',557),
('Integral -> INTEGRAL Expression DIFFERENTIAL','Integral',3,'p_Integral','parser.py',558),
('Derivative -> FRAC LBRACE D RBRACE LBRACE DIFFERENTIAL RBRACE Expression','Derivative',8,'p_Derivative1','parser.py',575),
('Derivative -> FRAC LBRACE D CARET LBRACE NUMBER RBRACE RBRACE LBRACE DIFFERENTIAL CARET LBRACE NUMBER RBRACE RBRACE Expression','Derivative',16,'p_Derivative1','parser.py',576),
('Derivative -> FRAC LBRACE D Expression RBRACE LBRACE DIFFERENTIAL RBRACE','Derivative',8,'p_Derivative2','parser.py',585),
('Derivative -> FRAC LBRACE D CARET LBRACE NUMBER RBRACE Expression RBRACE LBRACE DIFFERENTIAL CARET LBRACE NUMBER RBRACE RBRACE','Derivative',16,'p_Derivative2','parser.py',586),
('Derivative -> FRAC LBRACE PARTIAL RBRACE LBRACE PARTIAL ID RBRACE Expression','Derivative',9,'p_Derivative3','parser.py',595),
('Derivative -> FRAC LBRACE PARTIAL CARET LBRACE NUMBER RBRACE RBRACE LBRACE PARTIAL ID CARET LBRACE NUMBER RBRACE RBRACE Expression','Derivative',17,'p_Derivative3','parser.py',596),
('Derivative -> FRAC LBRACE PARTIAL Expression RBRACE LBRACE PARTIAL ID RBRACE','Derivative',9,'p_Derivative4','parser.py',605),
('Derivative -> FRAC LBRACE PARTIAL CARET LBRACE NUMBER RBRACE Expression RBRACE LBRACE PARTIAL ID CARET LBRACE NUMBER RBRACE RBRACE','Derivative',17,'p_Derivative4','parser.py',606),
('DivisorFunction -> SIGMA_LOWER UNDERLINE LBRACE NUMBER RBRACE LPAREN ExpressionList RPAREN','DivisorFunction',8,'p_DivisorFunction','parser.py',615),
('ImaginaryNumber -> I','ImaginaryNumber',1,'p_ImaginaryNumber','parser.py',620),
('ImaginaryNumber -> NUMBER I','ImaginaryNumber',2,'p_ImaginaryNumber','parser.py',621),
('NapierNumber -> E','NapierNumber',1,'p_NapierNumber','parser.py',628),
('NapierNumber -> NUMBER E','NapierNumber',2,'p_NapierNumber','parser.py',629),
('DifferentialVariable -> ID PrimeList LPAREN ExpressionList RPAREN','DifferentialVariable',5,'p_DifferentialVariable1','parser.py',636),
('DifferentialVariable -> ID PrimeList','DifferentialVariable',2,'p_DifferentialVariable1','parser.py',637),
('DifferentialVariable -> ID CARET LBRACE LPAREN NUMBER RPAREN RBRACE LPAREN ExpressionList RPAREN','DifferentialVariable',10,'p_DifferentialVariable2','parser.py',646),
('DifferentialVariable -> ID CARET LBRACE LPAREN NUMBER RPAREN RBRACE','DifferentialVariable',7,'p_DifferentialVariable2','parser.py',647),
('ChooseExpression -> LBRACE Expression CHOOSE Expression RBRACE','ChooseExpression',5,'p_Choose','parser.py',678),
('Limit -> LIMIT UNDERLINE LBRACE ID TO Expression RBRACE Expression','Limit',8,'p_LIMIT','parser.py',682),
('Limit -> LIMIT UNDERLINE LBRACE ID TO Expression PLUS RBRACE Expression','Limit',9,'p_LIMIT','parser.py',683),
('Limit -> LIMIT UNDERLINE LBRACE ID TO Expression MINUS RBRACE Expression','Limit',9,'p_LIMIT','parser.py',684),
('Limit -> LIMIT UNDERLINE LBRACE ID TO Expression CARET LBRACE PLUS RBRACE RBRACE Expression','Limit',12,'p_LIMIT','parser.py',685),
('Limit -> LIMIT UNDERLINE LBRACE ID TO Expression CARET LBRACE MINUS RBRACE RBRACE Expression','Limit',12,'p_LIMIT','parser.py',686),
('Limit -> LIMIT UNDERLINE LBRACE ID TO Term CARET LBRACE PLUS RBRACE RBRACE Expression','Limit',12,'p_LIMIT','parser.py',687),
('Limit -> LIMIT UNDERLINE LBRACE ID TO Term CARET LBRACE MINUS RBRACE RBRACE Expression','Limit',12,'p_LIMIT','parser.py',688),
('ConstraintSystem -> BEGIN_CASE Constraints END_CASE','ConstraintSystem',3,'p_ConstraintSystem','parser.py',714),
('ConstraintSystem -> BEGIN_CASE Constraints BACKSLASHES END_CASE','ConstraintSystem',4,'p_ConstraintSystem','parser.py',715),
('Constraints -> Constraints BACKSLASHES Constraint','Constraints',3,'p_Constraints','parser.py',719),
('Constraints -> Constraint','Constraints',1,'p_Constraints','parser.py',720),
('Determinant -> BEGIN_VMATRIX ExpressionsRows END_VMATRIX','Determinant',3,'p_Determinant','parser.py',730),
('Determinant -> BEGIN_VMATRIX ExpressionsRows BACKSLASHES END_VMATRIX','Determinant',4,'p_Determinant','parser.py',731),
('Norm -> BEGIN_NMATRIX ExpressionsRows END_NMATRIX','Norm',3,'p_Norm','parser.py',736),
('Norm -> BEGIN_NMATRIX ExpressionsRows BACKSLASHES END_NMATRIX','Norm',4,'p_Norm','parser.py',737),
('Matrix -> BEGIN_BMATRIX ExpressionsRows END_BMATRIX','Matrix',3,'p_Matrix','parser.py',742),
('Matrix -> BEGIN_BMATRIX ExpressionsRows BACKSLASHES END_BMATRIX','Matrix',4,'p_Matrix','parser.py',743),
('Matrix -> BEGIN_PMATRIX ExpressionsRows END_PMATRIX','Matrix',3,'p_Matrix','parser.py',745),
('Matrix -> BEGIN_PMATRIX ExpressionsRows BACKSLASHES END_PMATRIX','Matrix',4,'p_Matrix','parser.py',746),
('ExpressionsRow -> ExpressionsRow AMPERSAND Expression','ExpressionsRow',3,'p_ExpressionsRow','parser.py',751),
('ExpressionsRow -> Expression','ExpressionsRow',1,'p_ExpressionsRow','parser.py',752),
('ExpressionsRows -> ExpressionsRows BACKSLASHES ExpressionsRow','ExpressionsRows',3,'p_ExpressionsRows','parser.py',762),
('ExpressionsRows -> ExpressionsRow','ExpressionsRows',1,'p_ExpressionsRows','parser.py',763),
('ExpressionList -> ExpressionList COMMA Expression','ExpressionList',3,'p_ExpessionList','parser.py',773),
('ExpressionList -> Expression','ExpressionList',1,'p_ExpessionList','parser.py',774),
('PrimeList -> PrimeList PRIME','PrimeList',2,'p_PrimeList','parser.py',784),
('PrimeList -> PRIME','PrimeList',1,'p_PrimeList','parser.py',785),
('Constraint -> Expression EQ Expression','Constraint',3,'p_Constraint','parser.py',794),
('Constraint -> Expression NEQ Expression','Constraint',3,'p_Constraint','parser.py',795),
('Constraint -> Expression LT Expression','Constraint',3,'p_Constraint','parser.py',796),
('Constraint -> Expression LE Expression','Constraint',3,'p_Constraint','parser.py',797),
('Constraint -> Expression GT Expression','Constraint',3,'p_Constraint','parser.py',798),
('Constraint -> Expression GE Expression','Constraint',3,'p_Constraint','parser.py',799),
('Symbol -> PI','Symbol',1,'p_Symbol','parser.py',824),
('Symbol -> XI_LOWER','Symbol',1,'p_Symbol','parser.py',825),
('Symbol -> CHI_LOWER','Symbol',1,'p_Symbol','parser.py',826),
('Symbol -> PHI_LOWER','Symbol',1,'p_Symbol','parser.py',827),
('Symbol -> PSI_LOWER','Symbol',1,'p_Symbol','parser.py',828),
('Symbol -> SIGMA_LOWER','Symbol',1,'p_Symbol','parser.py',829),
('Symbol -> ZETA_LOWER','Symbol',1,'p_Symbol','parser.py',830),
('Symbol -> ETA_LOWER','Symbol',1,'p_Symbol','parser.py',831),
('Symbol -> DELTA_LOWER','Symbol',1,'p_Symbol','parser.py',832),
('Symbol -> THETA_LOWER','Symbol',1,'p_Symbol','parser.py',833),
('Symbol -> LAMBDA_LOWER','Symbol',1,'p_Symbol','parser.py',834),
('Symbol -> EPSILON_LOWER','Symbol',1,'p_Symbol','parser.py',835),
('Symbol -> TAU_LOWER','Symbol',1,'p_Symbol','parser.py',836),
('Symbol -> KAPPA_LOWER','Symbol',1,'p_Symbol','parser.py',837),
('Symbol -> OMEGA_LOWER','Symbol',1,'p_Symbol','parser.py',838),
('Symbol -> ALPHA_LOWER','Symbol',1,'p_Symbol','parser.py',839),
('Symbol -> NU_LOWER','Symbol',1,'p_Symbol','parser.py',840),
('Symbol -> RHO_LOWER','Symbol',1,'p_Symbol','parser.py',841),
('Symbol -> OMICRON_LOWER','Symbol',1,'p_Symbol','parser.py',842),
('Symbol -> UPSILON_LOWER','Symbol',1,'p_Symbol','parser.py',843),
('Symbol -> IOTA_LOWER','Symbol',1,'p_Symbol','parser.py',844),
('Symbol -> BETA_LOWER','Symbol',1,'p_Symbol','parser.py',845),
('Symbol -> GAMMA_LOWER','Symbol',1,'p_Symbol','parser.py',846),
('Symbol -> MU_LOWER','Symbol',1,'p_Symbol','parser.py',847),
('Symbol -> PI_UPPER','Symbol',1,'p_Symbol','parser.py',848),
('Symbol -> BETA','Symbol',1,'p_Symbol','parser.py',849),
('Symbol -> GAMMA','Symbol',1,'p_Symbol','parser.py',850),
('Symbol -> KAPPA','Symbol',1,'p_Symbol','parser.py',851),
('Symbol -> OMICRON','Symbol',1,'p_Symbol','parser.py',852),
('Symbol -> OMEGA','Symbol',1,'p_Symbol','parser.py',853),
('Symbol -> LAMBDA','Symbol',1,'p_Symbol','parser.py',854),
('Symbol -> IOTA','Symbol',1,'p_Symbol','parser.py',855),
('Symbol -> PSI','Symbol',1,'p_Symbol','parser.py',856),
('Symbol -> MU','Symbol',1,'p_Symbol','parser.py',857),
('Symbol -> PHI','Symbol',1,'p_Symbol','parser.py',858),
('Symbol -> SIGMA','Symbol',1,'p_Symbol','parser.py',859),
('Symbol -> ETA','Symbol',1,'p_Symbol','parser.py',860),
('Symbol -> ZETA','Symbol',1,'p_Symbol','parser.py',861),
('Symbol -> THETA','Symbol',1,'p_Symbol','parser.py',862),
('Symbol -> EPSILON','Symbol',1,'p_Symbol','parser.py',863),
('Symbol -> TAU','Symbol',1,'p_Symbol','parser.py',864),
('Symbol -> ALPHA','Symbol',1,'p_Symbol','parser.py',865),
('Symbol -> XI','Symbol',1,'p_Symbol','parser.py',866),
('Symbol -> CHI','Symbol',1,'p_Symbol','parser.py',867),
('Symbol -> NU','Symbol',1,'p_Symbol','parser.py',868),
('Symbol -> RHO','Symbol',1,'p_Symbol','parser.py',869),
('Symbol -> UPSILON','Symbol',1,'p_Symbol','parser.py',870),
]
| 690.622302
| 143,408
| 0.677223
| 45,962
| 191,993
| 2.818502
| 0.019233
| 0.015315
| 0.00799
| 0.010653
| 0.88451
| 0.867643
| 0.848028
| 0.8136
| 0.803565
| 0.784259
| 0
| 0.575224
| 0.032126
| 191,993
| 277
| 143,409
| 693.115523
| 0.121905
| 0.000323
| 0
| 0.007463
| 1
| 0.003731
| 0.153074
| 0.006581
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
a06c8ba9424dec11f2388e37ba3ce7f2f386b32e
| 104
|
py
|
Python
|
example_app/decorators_demo/__init__.py
|
vishwaefor/handy-python-features
|
cbc6f772655a1a329971cc2972d691501b2c66a1
|
[
"Apache-2.0"
] | 2
|
2019-07-05T18:07:36.000Z
|
2019-07-11T15:49:55.000Z
|
example_app/decorators_demo/__init__.py
|
vishwaefor/handy-python-features
|
cbc6f772655a1a329971cc2972d691501b2c66a1
|
[
"Apache-2.0"
] | 1
|
2019-07-03T08:18:21.000Z
|
2019-07-04T07:32:09.000Z
|
example_app/decorators_demo/__init__.py
|
vishwaefor/handy-python-features
|
cbc6f772655a1a329971cc2972d691501b2c66a1
|
[
"Apache-2.0"
] | null | null | null |
from .private_function_check import private_function_check
from .singleton_check import singleton_check
| 34.666667
| 58
| 0.903846
| 14
| 104
| 6.285714
| 0.428571
| 0.340909
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 104
| 2
| 59
| 52
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a06cf7af3e126e26e1ed19707e6cd090cc704834
| 3,445
|
py
|
Python
|
ClassificationAndRegression/SemiSupervised/LabelSpreading.py
|
kopok2/machine-learning-algorithms
|
9d5eb9c17a1354e726b79e9cfae9e5638976b919
|
[
"MIT"
] | null | null | null |
ClassificationAndRegression/SemiSupervised/LabelSpreading.py
|
kopok2/machine-learning-algorithms
|
9d5eb9c17a1354e726b79e9cfae9e5638976b919
|
[
"MIT"
] | null | null | null |
ClassificationAndRegression/SemiSupervised/LabelSpreading.py
|
kopok2/machine-learning-algorithms
|
9d5eb9c17a1354e726b79e9cfae9e5638976b919
|
[
"MIT"
] | null | null | null |
# coding=utf-8
"""
Label Spreading
Semi-supervised Machine Learning algorithm.
"""
import numpy as np
import matplotlib.pyplot as plt
from sklearn import semi_supervised, datasets
if __name__ == '__main__':
print("Generating data...")
samples = 300
X, y = datasets.make_circles(n_samples=samples)
o = 1
i = 0
labels = np.full(samples, -1.0)
labels[0] = o
labels[-1] = i
print("Spreading labels...")
ls = semi_supervised.label_propagation.LabelSpreading(kernel="knn", alpha=0.8)
ls.fit(X, y)
print("Plotting propagation...")
out_l_ = ls.transduction_
plt.figure(figsize=(8.5, 4))
plt.subplot(1, 2, 1)
plt.scatter(X[labels == o, 0], X[labels == o, 1], color='navy',
marker='s', lw=0, label="outer labeled", s=10)
plt.scatter(X[labels == i, 0], X[labels == i, 1], color='c',
marker='s', lw=0, label='inner labeled', s=10)
plt.scatter(X[labels == -1, 0], X[labels == -1, 1], color='darkorange',
marker='.', label='unlabeled')
plt.legend(scatterpoints=1, shadow=False, loc='upper right')
plt.title("Raw data (2 classes=outer and inner)")
plt.subplot(1, 2, 2)
output_label_array = np.asarray(out_l_)
outer_numbers = np.where(output_label_array == o)[0]
inner_numbers = np.where(output_label_array == i)[0]
plt.scatter(X[outer_numbers, 0], X[outer_numbers, 1], color='navy',
marker='s', lw=0, s=10, label="outer learned")
plt.scatter(X[inner_numbers, 0], X[inner_numbers, 1], color='c',
marker='s', lw=0, s=10, label="inner learned")
plt.legend(scatterpoints=1, shadow=False, loc='upper right')
plt.title("Labels learned with Label Spreading (KNN)")
plt.subplots_adjust(left=0.07, bottom=0.07, right=0.93, top=0.92)
plt.show()
print("Generating data...")
samples = 3000
X, y = datasets.make_blobs(n_samples=samples)
o = 1
i = 0
labels = np.full(samples, -1.0)
labels[0] = o
labels[-1] = i
print("Spreading labels...")
ls = semi_supervised.label_propagation.LabelSpreading(kernel="knn", alpha=0.8)
ls.fit(X, y)
print("Plotting propagation...")
out_l_ = ls.transduction_
plt.figure(figsize=(8.5, 4))
plt.subplot(1, 2, 1)
plt.scatter(X[labels == o, 0], X[labels == o, 1], color='navy',
marker='s', lw=0, label="outer labeled", s=10)
plt.scatter(X[labels == i, 0], X[labels == i, 1], color='c',
marker='s', lw=0, label='inner labeled', s=10)
plt.scatter(X[labels == -1, 0], X[labels == -1, 1], color='darkorange',
marker='.', label='unlabeled')
plt.legend(scatterpoints=1, shadow=False, loc='upper right')
plt.title("Raw data (2 classes=outer and inner)")
plt.subplot(1, 2, 2)
output_label_array = np.asarray(out_l_)
outer_numbers = np.where(output_label_array == o)[0]
inner_numbers = np.where(output_label_array == i)[0]
plt.scatter(X[outer_numbers, 0], X[outer_numbers, 1], color='navy',
marker='s', lw=0, s=10, label="outer learned")
plt.scatter(X[inner_numbers, 0], X[inner_numbers, 1], color='c',
marker='s', lw=0, s=10, label="inner learned")
plt.legend(scatterpoints=1, shadow=False, loc='upper right')
plt.title("Labels learned with Label Spreading (KNN)")
plt.subplots_adjust(left=0.07, bottom=0.07, right=0.93, top=0.92)
plt.show()
| 37.043011
| 82
| 0.614514
| 520
| 3,445
| 3.969231
| 0.188462
| 0.040698
| 0.053295
| 0.03876
| 0.877907
| 0.877907
| 0.877907
| 0.877907
| 0.877907
| 0.877907
| 0
| 0.045605
| 0.21074
| 3,445
| 92
| 83
| 37.445652
| 0.713498
| 0.02148
| 0
| 0.891892
| 0
| 0
| 0.149866
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.040541
| 0
| 0.040541
| 0.081081
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a072532fc33103350a0c1c5c0b21149b8b84a7d9
| 2,326
|
py
|
Python
|
12/tests.py
|
remihuguet/aoc2020
|
c313c5b425dda92d949fd9ca4f18ff66f452794f
|
[
"MIT"
] | null | null | null |
12/tests.py
|
remihuguet/aoc2020
|
c313c5b425dda92d949fd9ca4f18ff66f452794f
|
[
"MIT"
] | null | null | null |
12/tests.py
|
remihuguet/aoc2020
|
c313c5b425dda92d949fd9ca4f18ff66f452794f
|
[
"MIT"
] | null | null | null |
import rainrisk
def test_compute_new_direction():
direction = (1, 0)
assert (0, -1) == rainrisk.compute_new_direction(direction, 'R90')
assert (0, 1) == rainrisk.compute_new_direction(direction, 'L90')
assert (-1, 0) == rainrisk.compute_new_direction(direction, 'R180')
assert (-1, 0) == rainrisk.compute_new_direction(direction, 'L180')
assert (0, 1) == rainrisk.compute_new_direction(direction, 'R270')
assert (0, -1) == rainrisk.compute_new_direction(direction, 'L270')
assert (1, 0) == rainrisk.compute_new_direction(direction, 'R360')
assert (1, 0) == rainrisk.compute_new_direction(direction, 'L360')
direction = (0, 1)
assert (1, 0) == rainrisk.compute_new_direction(direction, 'R90')
assert (-1, 0) == rainrisk.compute_new_direction(direction, 'L90')
assert (0, -1) == rainrisk.compute_new_direction(direction, 'R180')
assert (0, -1) == rainrisk.compute_new_direction(direction, 'L180')
assert (-1, 0) == rainrisk.compute_new_direction(direction, 'R270')
assert (1, 0) == rainrisk.compute_new_direction(direction, 'L270')
assert (0, 1) == rainrisk.compute_new_direction(direction, 'R360')
assert (0, 1) == rainrisk.compute_new_direction(direction, 'L360')
def test_compute_final_position():
with open('12/test_input.txt', 'r') as f:
mvts = f.readlines()
assert (17, -8) == rainrisk.compute_final_position(mvts)
def test_compute_manhattan():
assert 25 == rainrisk.compute_manhattan('12/test_input.txt')
def test_compute_position_waypoint():
with open('12/test_input.txt', 'r') as f:
mvts = f.readlines()
initial = (10, 1)
assert (214, -72) == rainrisk.compute_position_waypoint(mvts, initial)
def test_compute_final_manhattan():
assert 286 == rainrisk.compute_final_manhattan('12/test_input.txt')
def test_compute_speed_rotation():
speed = (10, 1)
assert (1, -10) == rainrisk.compute_speed_rotation(speed, 'R90')
assert (-1, 10) == rainrisk.compute_speed_rotation(speed, 'L90')
assert (-10, -1) == rainrisk.compute_speed_rotation(speed, 'R180')
assert (-10, -1) == rainrisk.compute_speed_rotation(speed, 'L180')
assert (-1, 10) == rainrisk.compute_speed_rotation(speed, 'R270')
assert (1, -10) == rainrisk.compute_speed_rotation(speed, 'L270')
| 38.131148
| 78
| 0.686586
| 301
| 2,326
| 5.066445
| 0.139535
| 0.255738
| 0.211803
| 0.312131
| 0.767213
| 0.767213
| 0.761967
| 0.727869
| 0.052459
| 0.052459
| 0
| 0.07216
| 0.159931
| 2,326
| 60
| 79
| 38.766667
| 0.708291
| 0
| 0
| 0.097561
| 0
| 0
| 0.065348
| 0
| 0
| 0
| 0
| 0
| 0.634146
| 1
| 0.146341
| false
| 0
| 0.02439
| 0
| 0.170732
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
264af77119668b0c471580b3b42362742be405a1
| 1,364
|
py
|
Python
|
tests/utils/test_str_checks.py
|
dmalison/hedger
|
8db634a484769fb4f3feb945c1847ef50803fafe
|
[
"MIT"
] | null | null | null |
tests/utils/test_str_checks.py
|
dmalison/hedger
|
8db634a484769fb4f3feb945c1847ef50803fafe
|
[
"MIT"
] | null | null | null |
tests/utils/test_str_checks.py
|
dmalison/hedger
|
8db634a484769fb4f3feb945c1847ef50803fafe
|
[
"MIT"
] | null | null | null |
import unittest
from hedger import utils
class StrChecksTest(unittest.TestCase):
def test_isint_with_zero_returns_true(self):
self.assertTrue(utils.is_str_int("0"))
def test_isint_with_leading_zero_returns_true(self):
self.assertTrue(utils.is_str_int("01"))
def test_isint_with_negative_number_returns_true(self):
self.assertTrue(utils.is_str_int("-1"))
def test_isint_with_float_returns_false(self):
self.assertFalse(utils.is_str_int("1.0"))
def test_isint_with_str_returns_false(self):
self.assertFalse(utils.is_str_int("foo"))
def test_isfloat_with_zero_returns_true(self):
self.assertTrue(utils.is_str_float("0"))
def test_isfloat_with_leading_zero_returns_true(self):
self.assertTrue(utils.is_str_float("01"))
def test_isfloat_with_negative_number_returns_true(self):
self.assertTrue(utils.is_str_float("-1"))
def test_isfloat_with_float_returns_true(self):
self.assertTrue(utils.is_str_float("1.0"))
def test_isfloat_with_negative_float_returns_true(self):
self.assertTrue(utils.is_str_float("-1.0"))
def test_isfloat_with_str_returns_false(self):
self.assertFalse(utils.is_str_float("foo"))
def test_isfloat_with_scientific_notation_returns_true(self):
self.assertTrue(utils.is_str_float("1.0e-7"))
| 32.47619
| 65
| 0.746334
| 201
| 1,364
| 4.616915
| 0.174129
| 0.090517
| 0.12931
| 0.184267
| 0.825431
| 0.71444
| 0.71444
| 0.71444
| 0.71444
| 0.66056
| 0
| 0.01463
| 0.148094
| 1,364
| 41
| 66
| 33.268293
| 0.783993
| 0
| 0
| 0
| 0
| 0
| 0.02346
| 0
| 0
| 0
| 0
| 0
| 0.444444
| 1
| 0.444444
| false
| 0
| 0.074074
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 10
|
265ea2b584609b4162585e10d8730c8ba727c5ab
| 1,072
|
py
|
Python
|
tests/test_spanning_cidr.py
|
akud82/spanning_cidr
|
1b9777c21c77063c58cb6a766c642053a1b8df21
|
[
"MIT"
] | null | null | null |
tests/test_spanning_cidr.py
|
akud82/spanning_cidr
|
1b9777c21c77063c58cb6a766c642053a1b8df21
|
[
"MIT"
] | null | null | null |
tests/test_spanning_cidr.py
|
akud82/spanning_cidr
|
1b9777c21c77063c58cb6a766c642053a1b8df21
|
[
"MIT"
] | null | null | null |
from spanning_cidr import spanning_cidr
def test_address_empty():
assert spanning_cidr([]) == None
def test_address_empty():
assert spanning_cidr(['']) == None
def test_address_single():
assert spanning_cidr(['127.0.0.1']) == '127.0.0.1/32'
def test_address_one_and_empty():
assert spanning_cidr(['127.0.0.1', '']) == '127.0.0.1/32'
def test_address_has_empty_and_long_range():
assert spanning_cidr(['127.0.0.1', '', '86.59.118.159']) == '64.0.0.0/2'
def test_address_not_net_incl():
assert spanning_cidr(['86.59.118.159', '86.59.118.157']) == '86.59.118.156/30'
def test_address_many_not_net_incl():
assert spanning_cidr(['86.59.118.147', '86.59.118.159', '86.59.118.157']) == '86.59.118.144/28'
def test_address_equals_included():
assert spanning_cidr(['86.59.118.144', '86.59.118.147', '86.59.118.159', '86.59.118.157', '86.59.118.149']) == '86.59.118.144/28'
def test_address_first_biggest():
assert spanning_cidr(['86.59.118.159', '86.59.118.147', '86.59.118.157', '86.59.118.149', '86.59.118.144']) == '86.59.118.144/28'
| 36.965517
| 133
| 0.668843
| 195
| 1,072
| 3.461538
| 0.210256
| 0.118519
| 0.207407
| 0.044444
| 0.777778
| 0.77037
| 0.743704
| 0.678519
| 0.626667
| 0.57037
| 0
| 0.259686
| 0.109142
| 1,072
| 28
| 134
| 38.285714
| 0.44712
| 0
| 0
| 0.105263
| 0
| 0
| 0.310634
| 0
| 0
| 0
| 0
| 0
| 0.473684
| 1
| 0.473684
| true
| 0
| 0.052632
| 0
| 0.526316
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
2669769b0bf69f5be480f32c4d2e056c1db395a2
| 140
|
py
|
Python
|
smhsan/training/metrics/__init__.py
|
hoho-wenda0228/Nested_NER
|
22e7973df733d6dcd5b7bf67c941bed4ac64989a
|
[
"MIT"
] | 10
|
2021-06-27T06:58:03.000Z
|
2022-03-30T08:34:03.000Z
|
smhsan/training/metrics/__init__.py
|
hoho-wenda0228/Nested_NER
|
22e7973df733d6dcd5b7bf67c941bed4ac64989a
|
[
"MIT"
] | null | null | null |
smhsan/training/metrics/__init__.py
|
hoho-wenda0228/Nested_NER
|
22e7973df733d6dcd5b7bf67c941bed4ac64989a
|
[
"MIT"
] | 2
|
2021-08-09T07:29:16.000Z
|
2022-03-24T09:05:27.000Z
|
# coding: utf-8
from smhsan.training.metrics.type_f1_measure import TypeF1Measure
from smhsan.training.metrics.bd_metrics import BDMetrics
| 28
| 65
| 0.85
| 20
| 140
| 5.8
| 0.7
| 0.172414
| 0.310345
| 0.431034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023438
| 0.085714
| 140
| 4
| 66
| 35
| 0.882813
| 0.092857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cd8d7c36a596e658e5c27fe9e25a38c34c8f3507
| 13,936
|
py
|
Python
|
tests/test_read_write_ply.py
|
nmaxwell/OpenMesh-Python
|
daa461069decb459f990bfcc1131c55a2db7b5e5
|
[
"BSD-3-Clause"
] | 9
|
2019-09-16T10:03:37.000Z
|
2022-02-03T17:56:24.000Z
|
tests/test_read_write_ply.py
|
Jiawei1996/OpenMesh-Python
|
daa461069decb459f990bfcc1131c55a2db7b5e5
|
[
"BSD-3-Clause"
] | 2
|
2017-01-04T10:55:17.000Z
|
2017-01-16T16:59:34.000Z
|
tests/test_read_write_ply.py
|
jjennings955/openmesh-python-subdivide
|
7f39bd1693f009ddd8739c98671b122c4d4aba81
|
[
"BSD-3-Clause"
] | 1
|
2020-04-13T15:23:59.000Z
|
2020-04-13T15:23:59.000Z
|
import unittest
import openmesh
import os
import numpy as np
class ReadWritePLY(unittest.TestCase):
def setUp(self):
if not os.path.exists('OutFiles'):
os.makedirs('OutFiles')
def test_read_write_read(self):
mesh1 = openmesh.read_trimesh("TestFiles/cube-minimal.obj")
openmesh.write_mesh('OutFiles/test_read_write_read.ply', mesh1)
mesh2 = openmesh.read_trimesh('OutFiles/test_read_write_read.ply')
self.assertTrue(np.allclose(mesh1.points(), mesh2.points()))
self.assertTrue(np.array_equal(mesh1.face_vertex_indices(), mesh2.face_vertex_indices()))
def test_load_simple_point_ply_file_with_bad_encoding(self):
self.mesh = openmesh.read_trimesh("TestFiles/pointCloudBadEncoding.ply")
self.assertEqual(self.mesh.n_vertices(), 10)
self.assertEqual(self.mesh.n_edges(), 0)
self.assertEqual(self.mesh.n_faces(), 0)
def test_load_simple_point_ply_file_with_good_encoding(self):
self.mesh = openmesh.read_trimesh("TestFiles/pointCloudGoodEncoding.ply")
self.assertEqual(self.mesh.n_vertices(), 10)
self.assertEqual(self.mesh.n_edges(), 0)
self.assertEqual(self.mesh.n_faces(), 0)
def test_load_simple_ply(self):
self.mesh = openmesh.read_trimesh("TestFiles/cube-minimal.ply")
self.assertEqual(self.mesh.n_vertices(), 8)
self.assertEqual(self.mesh.n_edges(), 18)
self.assertEqual(self.mesh.n_faces(), 12)
def test_load_simple_ply_force_vertex_colors_although_not_available(self):
with self.assertRaises(RuntimeError):
openmesh.read_trimesh("TestFiles/cube-minimal.ply", vertex_color=True)
def test_load_simple_ply_with_vertex_colors(self):
self.mesh = openmesh.read_trimesh("TestFiles/cube-minimal-vertexColors.ply", vertex_color=True)
self.assertEqual(self.mesh.n_vertices(), 8)
self.assertEqual(self.mesh.n_edges(), 18)
self.assertEqual(self.mesh.n_faces(), 12)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(0))[0], 1.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(0))[1], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(0))[2], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(3))[0], 1.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(3))[1], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(3))[2], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(4))[0], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(4))[1], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(4))[2], 1.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(7))[0], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(7))[1], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(7))[2], 1.0)
self.assertFalse(self.mesh.has_vertex_normals())
self.assertFalse(self.mesh.has_vertex_texcoords1D())
self.assertFalse(self.mesh.has_vertex_texcoords2D())
self.assertFalse(self.mesh.has_vertex_texcoords3D())
self.assertTrue(self.mesh.has_vertex_colors())
self.mesh.release_vertex_colors()
def test_load_ply_from_mesh_lab_with_vertex_colors(self):
self.mesh = openmesh.read_trimesh("TestFiles/meshlab.ply", vertex_color=True)
self.assertEqual(self.mesh.n_vertices(), 8)
self.assertEqual(self.mesh.n_edges(), 18)
self.assertEqual(self.mesh.n_faces(), 12)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(0))[0], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(0))[1], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(0))[2], 1.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(3))[0], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(3))[1], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(3))[2], 1.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(4))[0], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(4))[1], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(4))[2], 1.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(7))[0], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(7))[1], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(7))[2], 1.0)
self.assertFalse(self.mesh.has_vertex_normals())
self.assertFalse(self.mesh.has_vertex_texcoords1D())
self.assertFalse(self.mesh.has_vertex_texcoords2D())
self.assertFalse(self.mesh.has_vertex_texcoords3D())
self.assertTrue(self.mesh.has_vertex_colors())
self.mesh.release_vertex_colors()
def test_write_and_read_binary_ply_with_vertex_colors(self):
self.mesh = openmesh.read_trimesh("TestFiles/meshlab.ply", vertex_color=True)
openmesh.write_mesh("OutFiles/meshlab_binary.ply", self.mesh, vertex_color=True, binary=True)
self.mesh = openmesh.read_trimesh("OutFiles/meshlab_binary.ply", vertex_color=True, binary=True)
self.assertEqual(self.mesh.n_vertices(), 8)
self.assertEqual(self.mesh.n_edges(), 18)
self.assertEqual(self.mesh.n_faces(), 12)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(0))[0], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(0))[1], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(0))[2], 1.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(3))[0], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(3))[1], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(3))[2], 1.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(4))[0], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(4))[1], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(4))[2], 1.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(7))[0], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(7))[1], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(7))[2], 1.0)
self.assertFalse(self.mesh.has_vertex_normals())
self.assertFalse(self.mesh.has_vertex_texcoords1D())
self.assertFalse(self.mesh.has_vertex_texcoords2D())
self.assertFalse(self.mesh.has_vertex_texcoords3D())
self.assertTrue(self.mesh.has_vertex_colors())
self.mesh.release_vertex_colors()
def test_write_and_read_ply_with_float_vertex_colors(self):
self.mesh = openmesh.read_trimesh("TestFiles/meshlab.ply", vertex_color=True)
openmesh.write_mesh("OutFiles/meshlab_float.ply", self.mesh, vertex_color=True, color_float=True)
self.mesh = openmesh.read_trimesh("OutFiles/meshlab_float.ply", vertex_color=True, color_float=True)
self.assertEqual(self.mesh.n_vertices(), 8)
self.assertEqual(self.mesh.n_edges(), 18)
self.assertEqual(self.mesh.n_faces(), 12)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(0))[0], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(0))[1], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(0))[2], 1.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(3))[0], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(3))[1], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(3))[2], 1.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(4))[0], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(4))[1], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(4))[2], 1.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(7))[0], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(7))[1], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(7))[2], 1.0)
self.assertFalse(self.mesh.has_vertex_normals())
self.assertFalse(self.mesh.has_vertex_texcoords1D())
self.assertFalse(self.mesh.has_vertex_texcoords2D())
self.assertFalse(self.mesh.has_vertex_texcoords3D())
self.assertTrue(self.mesh.has_vertex_colors())
self.mesh.release_vertex_colors()
def test_write_and_read_binary_ply_with_float_vertex_colors(self):
self.mesh = openmesh.read_trimesh("TestFiles/meshlab.ply", vertex_color=True)
openmesh.write_mesh("OutFiles/meshlab_binary_float.ply", self.mesh, vertex_color=True, color_float=True, binary=True)
self.mesh = openmesh.read_trimesh("OutFiles/meshlab_binary_float.ply", vertex_color=True, color_float=True, binary=True)
self.assertEqual(self.mesh.n_vertices(), 8)
self.assertEqual(self.mesh.n_edges(), 18)
self.assertEqual(self.mesh.n_faces(), 12)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(0))[0], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(0))[1], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(0))[2], 1.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(3))[0], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(3))[1], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(3))[2], 1.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(4))[0], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(4))[1], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(4))[2], 1.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(7))[0], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(7))[1], 0.0)
self.assertEqual(self.mesh.color(self.mesh.vertex_handle(7))[2], 1.0)
self.assertFalse(self.mesh.has_vertex_normals())
self.assertFalse(self.mesh.has_vertex_texcoords1D())
self.assertFalse(self.mesh.has_vertex_texcoords2D())
self.assertFalse(self.mesh.has_vertex_texcoords3D())
self.assertTrue(self.mesh.has_vertex_colors())
self.mesh.release_vertex_colors()
def test_load_simple_ply_with_texcoords(self):
self.mesh = openmesh.read_trimesh("TestFiles/cube-minimal-texCoords.ply", vertex_tex_coord=True)
self.assertEqual(self.mesh.n_vertices(), 8)
self.assertEqual(self.mesh.n_edges(), 18)
self.assertEqual(self.mesh.n_faces(), 12)
self.assertEqual(self.mesh.texcoord2D(self.mesh.vertex_handle(0))[0], 10.0)
self.assertEqual(self.mesh.texcoord2D(self.mesh.vertex_handle(0))[1], 10.0)
self.assertEqual(self.mesh.texcoord2D(self.mesh.vertex_handle(2))[0], 6.0)
self.assertEqual(self.mesh.texcoord2D(self.mesh.vertex_handle(2))[1], 6.0)
self.assertEqual(self.mesh.texcoord2D(self.mesh.vertex_handle(4))[0], 9.0)
self.assertEqual(self.mesh.texcoord2D(self.mesh.vertex_handle(4))[1], 9.0)
self.assertEqual(self.mesh.texcoord2D(self.mesh.vertex_handle(7))[0], 12.0)
self.assertEqual(self.mesh.texcoord2D(self.mesh.vertex_handle(7))[1], 12.0)
self.assertFalse(self.mesh.has_vertex_normals())
self.assertTrue(self.mesh.has_vertex_texcoords1D())
self.assertTrue(self.mesh.has_vertex_texcoords2D())
self.assertTrue(self.mesh.has_vertex_texcoords3D())
self.assertFalse(self.mesh.has_vertex_colors())
self.mesh.release_vertex_texcoords2D()
def test_load_simple_ply_with_normals(self):
self.mesh = openmesh.read_trimesh("TestFiles/cube-minimal-normals.ply", vertex_normal=True)
self.assertEqual(self.mesh.n_vertices(), 8)
self.assertEqual(self.mesh.n_edges(), 18)
self.assertEqual(self.mesh.n_faces(), 12)
self.assertTrue(self.mesh.has_vertex_normals())
self.assertFalse(self.mesh.has_vertex_texcoords1D())
self.assertFalse(self.mesh.has_vertex_texcoords2D())
self.assertFalse(self.mesh.has_vertex_texcoords3D())
self.assertFalse(self.mesh.has_vertex_colors())
self.assertEqual(self.mesh.normal(self.mesh.vertex_handle(0))[0], 0.0)
self.assertEqual(self.mesh.normal(self.mesh.vertex_handle(0))[1], 0.0)
self.assertEqual(self.mesh.normal(self.mesh.vertex_handle(0))[2], 1.0)
self.assertEqual(self.mesh.normal(self.mesh.vertex_handle(3))[0], 1.0)
self.assertEqual(self.mesh.normal(self.mesh.vertex_handle(3))[1], 0.0)
self.assertEqual(self.mesh.normal(self.mesh.vertex_handle(3))[2], 0.0)
self.assertEqual(self.mesh.normal(self.mesh.vertex_handle(4))[0], 1.0)
self.assertEqual(self.mesh.normal(self.mesh.vertex_handle(4))[1], 0.0)
self.assertEqual(self.mesh.normal(self.mesh.vertex_handle(4))[2], 1.0)
self.assertEqual(self.mesh.normal(self.mesh.vertex_handle(7))[0], 1.0)
self.assertEqual(self.mesh.normal(self.mesh.vertex_handle(7))[1], 1.0)
self.assertEqual(self.mesh.normal(self.mesh.vertex_handle(7))[2], 2.0)
self.mesh.release_vertex_normals()
def test_read_nonexistent_ply(self):
with self.assertRaises(RuntimeError):
self.mesh = openmesh.read_trimesh("TestFiles/nonexistent.ply")
with self.assertRaises(RuntimeError):
self.mesh = openmesh.read_polymesh("TestFiles/nonexistent.ply")
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(ReadWritePLY)
unittest.TextTestRunner(verbosity=2).run(suite)
| 51.424354
| 128
| 0.687213
| 1,982
| 13,936
| 4.657921
| 0.052472
| 0.216638
| 0.226386
| 0.274047
| 0.918653
| 0.911828
| 0.87825
| 0.873375
| 0.837413
| 0.810334
| 0
| 0.03519
| 0.163964
| 13,936
| 270
| 129
| 51.614815
| 0.757188
| 0
| 0
| 0.635468
| 0
| 0
| 0.046929
| 0.045207
| 0
| 0
| 0
| 0
| 0.738916
| 1
| 0.068966
| false
| 0
| 0.019704
| 0
| 0.093596
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
26b3673be1e9de64f10d21e2dd73edb32e0f087f
| 8,481
|
py
|
Python
|
redux/rev_mdf.py
|
joungh93/PyRAF_GMOS_IFU
|
1750caaf846c426cf1fc761ad539f740c8ae64d9
|
[
"MIT"
] | null | null | null |
redux/rev_mdf.py
|
joungh93/PyRAF_GMOS_IFU
|
1750caaf846c426cf1fc761ad539f740c8ae64d9
|
[
"MIT"
] | null | null | null |
redux/rev_mdf.py
|
joungh93/PyRAF_GMOS_IFU
|
1750caaf846c426cf1fc761ad539f740c8ae64d9
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 20 15:25:20 2020
@author: jlee
"""
import numpy as np
import glob, os
import copy
import g0_init_cfg as ic
from astropy.io import fits
# ----- Importing IRAF from the root directory ----- #
current_dir = os.getcwd()
os.chdir(ic.dir_iraf)
from pyraf import iraf
from pyraf.iraf import gemini, gmos
os.chdir(current_dir)
iraf.chdir(current_dir)
iraf.unlearn('gfextract')
# ----- Revising the MDF (copy) ----- #
###########################
########## w6900 ##########
###########################
d = ic.dir_wav[0] # Run every time per central wavelength
dir_sci = sorted(glob.glob(d+"/*"))
for j in np.arange(len(dir_sci)):
# Moving each science directory
name_sci = dir_sci[j].split("/")[-1]
print("Moving path for "+name_sci+"...")
os.chdir(current_dir+"/"+dir_sci[j])
iraf.chdir(current_dir+"/"+dir_sci[j])
# Reading database file
flat = np.loadtxt(ic.lst_flat, dtype=str)
flat0 = flat.item(0)
# Name of aperture file in the database
if (ic.nslit == 1):
apfile = ['aperg'+flat0+'_1']
if (ic.nslit == 2):
apfile = ['aperg'+flat0+'_1', 'aperg'+flat0+'_2']
# Reading MDF file
mdfdata, hdr = fits.getdata(ic.nmdf, ext=1, header=True)
idx_apr_eff = []
# ----- slit-1 ----- #
f = open(ic.dir_db+apfile[0]+'_old','r')
dbfile = f.readlines()
dbfile = np.array(dbfile)
f.close()
os.system('rm -rfv '+ic.dir_db+apfile[0])
g = open(ic.dir_db+apfile[0],'w')
N_apr = 750
idx_lines = np.arange(len(dbfile))
for i in np.arange(N_apr):
apr_num = i+1
apr_lines = (dbfile == '\taperture\t{0:d}\n'.format(apr_num))
if (np.sum(apr_lines) == 1):
apr_idx = idx_lines[apr_lines][0]
n_curve = int(dbfile[apr_idx+17].split('\t')[-1].split('\n')[0])
apr_info = copy.deepcopy(dbfile[apr_idx-4:apr_idx-4+23+n_curve])
# ----- START ----- #
g.writelines(apr_info)
idx_apr_eff.append(int(apr_info[1].split()[3])-1)
# ----- END ----- #
g.close()
# ----- slit-2 ----- #
if (ic.nslit == 2):
f = open(ic.dir_db+apfile[1]+'_old','r')
dbfile = f.readlines()
dbfile = np.array(dbfile)
f.close()
os.system('rm -rfv '+ic.dir_db+apfile[1])
g = open(ic.dir_db+apfile[1],'w')
N_apr = 750
idx_lines = np.arange(len(dbfile))
for i in np.arange(N_apr):
apr_num = 750+i+1
apr_lines = (dbfile == '\taperture\t{0:d}\n'.format(apr_num))
if (np.sum(apr_lines) == 1):
apr_idx = idx_lines[apr_lines][0]
n_curve = int(dbfile[apr_idx+17].split('\t')[-1].split('\n')[0])
apr_info = copy.deepcopy(dbfile[apr_idx-4:apr_idx-4+23+n_curve])
# ----- START ----- #
if (((apr_num >= 795) & (apr_num <= 1150)) | \
((apr_num >= 1156) & (apr_num <= 1499)) | \
(apr_num == 1153)):
apr_info[1] = apr_info[1].replace(apr_info[1].split()[3], '{0:d}'.format(apr_num+1))
apr_info[2] = '\ttitle\t{0:.3f} {1:.3f} '.format(mdfdata['XINST'][apr_num+1-1], mdfdata['YINST'][apr_num+1-1])+mdfdata['BLOCK'][apr_num+1-1]+'\n'
apr_info[4] = '\taperture\t{0:d}\n'.format(apr_num+1)
g.writelines(apr_info)
elif ((apr_num == 1151) | \
(apr_num == 1154)):
apr_info[1] = apr_info[1].replace(apr_info[1].split()[3], '{0:d}'.format(apr_num+2))
apr_info[2] = '\ttitle\t{0:.3f} {1:.3f} '.format(mdfdata['XINST'][apr_num+2-1], mdfdata['YINST'][apr_num+2-1])+mdfdata['BLOCK'][apr_num+2-1]+'\n'
apr_info[4] = '\taperture\t{0:d}\n'.format(apr_num+2)
g.writelines(apr_info)
else:
g.writelines(apr_info)
idx_apr_eff.append(int(apr_info[1].split()[3])-1)
# ----- END ----- #
g.close()
# Overwriting new MDF file
newmdfdata = copy.deepcopy(mdfdata)
bool_apr_eff = np.zeros(len(newmdfdata), dtype=bool)
bool_apr_eff[idx_apr_eff] = True
newmdfdata['BEAM'][bool_apr_eff] = 1
newmdfdata['BEAM'][~bool_apr_eff] = -1
hdu0 = fits.PrimaryHDU()
hdu1 = fits.BinTableHDU()
hdu1.data = newmdfdata
hdu1.header = hdr
hdul = fits.HDUList([hdu0, hdu1])
hdul.writeto(ic.nmdf, overwrite=True)
# Interative tasks for the first science data for each central wavelength
if (j == 0):
dir_db0 = current_dir+"/"+dir_sci[j]+"/"+ic.dir_db
apfile0 = apfile
# Verify the MDF again
iraf.imdelete('erg@'+ic.lst_flat)
iraf.gfextract('rg'+flat0, fl_inter='yes', line=ic.pk_line, exslits=ic.eslit)
else:
for k in np.arange(len(apfile)):
os.system('cp -rpv '+dir_db0+apfile0[k]+' '+ic.dir_db)
# Coming back to current path
os.chdir(current_dir)
iraf.chdir(current_dir)
###########################
########## w7000 ##########
###########################
d = ic.dir_wav[1] # Run every time per central wavelength
dir_sci = sorted(glob.glob(d+"/*"))
for j in np.arange(len(dir_sci)):
# Moving each science directory
name_sci = dir_sci[j].split("/")[-1]
print("Moving path for "+name_sci+"...")
os.chdir(current_dir+"/"+dir_sci[j])
iraf.chdir(current_dir+"/"+dir_sci[j])
# Reading database file
flat = np.loadtxt(ic.lst_flat, dtype=str)
flat0 = flat.item(0)
# Name of aperture file in the database
if (ic.nslit == 1):
apfile = ['aperg'+flat0+'_1']
if (ic.nslit == 2):
apfile = ['aperg'+flat0+'_1', 'aperg'+flat0+'_2']
# Reading MDF file
mdfdata, hdr = fits.getdata(ic.nmdf, ext=1, header=True)
idx_apr_eff = []
# ----- slit-1 ----- #
f = open(ic.dir_db+apfile[0]+'_old','r')
dbfile = f.readlines()
dbfile = np.array(dbfile)
f.close()
os.system('rm -rfv '+ic.dir_db+apfile[0])
g = open(ic.dir_db+apfile[0],'w')
N_apr = 750
idx_lines = np.arange(len(dbfile))
for i in np.arange(N_apr):
apr_num = i+1
apr_lines = (dbfile == '\taperture\t{0:d}\n'.format(apr_num))
if (np.sum(apr_lines) == 1):
apr_idx = idx_lines[apr_lines][0]
n_curve = int(dbfile[apr_idx+17].split('\t')[-1].split('\n')[0])
apr_info = copy.deepcopy(dbfile[apr_idx-4:apr_idx-4+23+n_curve])
# ----- START ----- #
g.writelines(apr_info)
idx_apr_eff.append(int(apr_info[1].split()[3])-1)
# ----- END ----- #
g.close()
# ----- slit-2 ----- #
if (ic.nslit == 2):
f = open(ic.dir_db+apfile[1]+'_old','r')
dbfile = f.readlines()
dbfile = np.array(dbfile)
f.close()
os.system('rm -rfv '+ic.dir_db+apfile[1])
g = open(ic.dir_db+apfile[1],'w')
N_apr = 750
idx_lines = np.arange(len(dbfile))
for i in np.arange(N_apr):
apr_num = 750+i+1
apr_lines = (dbfile == '\taperture\t{0:d}\n'.format(apr_num))
if (np.sum(apr_lines) == 1):
apr_idx = idx_lines[apr_lines][0]
n_curve = int(dbfile[apr_idx+17].split('\t')[-1].split('\n')[0])
apr_info = copy.deepcopy(dbfile[apr_idx-4:apr_idx-4+23+n_curve])
# ----- START ----- #
g.writelines(apr_info)
idx_apr_eff.append(int(apr_info[1].split()[3])-1)
# ----- END ----- #
g.close()
# Overwriting new MDF file
newmdfdata = copy.deepcopy(mdfdata)
bool_apr_eff = np.zeros(len(newmdfdata), dtype=bool)
bool_apr_eff[idx_apr_eff] = True
newmdfdata['BEAM'][bool_apr_eff] = 1
newmdfdata['BEAM'][~bool_apr_eff] = -1
hdu0 = fits.PrimaryHDU()
hdu1 = fits.BinTableHDU()
hdu1.data = newmdfdata
hdu1.header = hdr
hdul = fits.HDUList([hdu0, hdu1])
hdul.writeto(ic.nmdf, overwrite=True)
# Interative tasks for the first science data for each central wavelength
if (j == 0):
# Verify the MDF again
iraf.imdelete('erg@'+ic.lst_flat)
iraf.gfextract('rg'+flat0, fl_inter='yes', line=ic.pk_line, exslits=ic.eslit)
# Coming back to current path
os.chdir(current_dir)
iraf.chdir(current_dir)
| 33.521739
| 163
| 0.542978
| 1,225
| 8,481
| 3.596735
| 0.155102
| 0.034044
| 0.022242
| 0.035406
| 0.879029
| 0.857013
| 0.857013
| 0.857013
| 0.848842
| 0.848842
| 0
| 0.037676
| 0.261408
| 8,481
| 252
| 164
| 33.654762
| 0.665709
| 0.119443
| 0
| 0.838323
| 0
| 0
| 0.058913
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041916
| 0
| 0.041916
| 0.011976
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
26b976ec4ce24fd3fe329e7bc135e8a4046c8692
| 131
|
py
|
Python
|
quax/methods/__init__.py
|
ferchault/Quax
|
53950d03b6b50a3e092f18aed7a607a6318fc6d7
|
[
"BSD-3-Clause"
] | null | null | null |
quax/methods/__init__.py
|
ferchault/Quax
|
53950d03b6b50a3e092f18aed7a607a6318fc6d7
|
[
"BSD-3-Clause"
] | null | null | null |
quax/methods/__init__.py
|
ferchault/Quax
|
53950d03b6b50a3e092f18aed7a607a6318fc6d7
|
[
"BSD-3-Clause"
] | null | null | null |
from . import energy_utils
from . import hartree_fock
from . import mp2
from . import ccsd
from . import ccsd_t
from . import ints
| 18.714286
| 26
| 0.770992
| 21
| 131
| 4.666667
| 0.47619
| 0.612245
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009346
| 0.183206
| 131
| 6
| 27
| 21.833333
| 0.906542
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
26ea518b60c5687af717ad6c9c53e5aeff3800b2
| 2,245
|
py
|
Python
|
src/HartreeParticleDSL/backends/C_AOS/C_AOS_IO_Mixin.py
|
stfc/HartreeParticleDSL
|
17990f1a85c9cbec3c4dfa0923e2c44cad6f381c
|
[
"MIT"
] | null | null | null |
src/HartreeParticleDSL/backends/C_AOS/C_AOS_IO_Mixin.py
|
stfc/HartreeParticleDSL
|
17990f1a85c9cbec3c4dfa0923e2c44cad6f381c
|
[
"MIT"
] | 47
|
2021-09-16T10:28:05.000Z
|
2022-03-15T14:24:33.000Z
|
src/HartreeParticleDSL/backends/C_AOS/C_AOS_IO_Mixin.py
|
stfc/HartreeParticleDSL
|
17990f1a85c9cbec3c4dfa0923e2c44cad6f381c
|
[
"MIT"
] | 1
|
2021-09-27T15:20:01.000Z
|
2021-09-27T15:20:01.000Z
|
from abc import ABCMeta, abstractmethod
class C_AOS_IO_Mixin(metaclass=ABCMeta):
def gen_code_c(self, part_type):
'''
Generates and returns the C code required for this IO module.
:raises NotImplementedError: Abstract method that must be
overriden by children
:returns: The code C_AOS code required for this IO module.
:rtype: str
'''
raise NotImplementedError(f"{self.__class__.__name__} does not "
"implement required function "
"gen_code_c")
def call_input_c(self, part_count, filename):
'''
Returns the C call required to use this IO module for input.
:raises NotImplementedError: Abstract method that must be
overriden by children
:returns: The code required to use this IO module for input.
:rtype: str
'''
raise NotImplementedError(f"{self.__class__.__name__} does not "
"implement required function "
"call_input_c")
def call_output_c(self, part_count, filename):
'''
Returns the C call required to use this IO module for output.
:raises NotImplementedError: Abstract method that must be
overriden by children
:returns: The code required to use this IO module for output.
:rtype: str
'''
raise NotImplementedError(f"{self.__class__.__name__} does not "
"implement required function "
"call_output_c")
def get_includes_c(self):
'''
Returns the C includes required to use this IO module for C_AOS.
:raises NotImplementedError: Abstract method that must be
overriden by children
:returns: The includes for this IO module.
:rtype: List of str
'''
raise NotImplementedError(f"{self.__class__.__name__} does not "
"implement required function "
"get_includes_c")
| 36.803279
| 72
| 0.550111
| 234
| 2,245
| 5.038462
| 0.235043
| 0.067854
| 0.081425
| 0.072095
| 0.804919
| 0.787956
| 0.742154
| 0.718405
| 0.699746
| 0.699746
| 0
| 0
| 0.393764
| 2,245
| 60
| 73
| 37.416667
| 0.866275
| 0.434744
| 0
| 0.444444
| 1
| 0
| 0.287214
| 0.09542
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.055556
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f83e91e804f7b6aa7bd6315428585f8c911083c9
| 14,228
|
py
|
Python
|
Plot/PlotHelper.py
|
AndresOtero/TensorDecompositionMachineLearning
|
455f16b405ec9d031999b0ebf9c5a68d3c20b233
|
[
"MIT"
] | 3
|
2021-06-11T02:46:06.000Z
|
2021-08-17T02:59:30.000Z
|
Plot/PlotHelper.py
|
AndresOtero/TensorDecompositionMachineLearning
|
455f16b405ec9d031999b0ebf9c5a68d3c20b233
|
[
"MIT"
] | null | null | null |
Plot/PlotHelper.py
|
AndresOtero/TensorDecompositionMachineLearning
|
455f16b405ec9d031999b0ebf9c5a68d3c20b233
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
from matplotlib import cm
from scipy.interpolate import interp1d, CubicSpline, interpolate, PchipInterpolator, interp2d
from torch import linspace
from mpl_toolkits.mplot3d import Axes3D # <--- This is important for 3d plotting
import numpy as np
class PlotHelper(object):
@staticmethod
def scatter2d_different_x_axis(list_of_lines, t, legends=[], xLimMin=0, xLimMax=0, yLimMin=0, yLimMax=0, xLabel="x",
yLabel="y",
title="", invert_x=False, show=False, texts=[],set_y_log=False,set_x_log=False):
fig = plt.figure()
ax = fig.gca()
colors = cm.rainbow(np.linspace(0, 1, len(list_of_lines)))
for line in range(len(list_of_lines)):
color=colors[line]
for r in range(len(t[line])):
ax.scatter(t[line][r], list_of_lines[line][r],color=color)
if texts:
ax.text(t[line][r]+1, list_of_lines[line][r]+0.05,texts[line][r], fontsize=7)
ax.legend(legends)
colors = iter(colors)
for leg in ax.get_legend().legendHandles:
leg.set_color(next(colors))
if (xLimMax != 0):
ax.set_xlim(xLimMin, xLimMax)
if (yLimMax != 0):
ax.set_ylim(yLimMin, yLimMax)
ax.set_xlabel(xLabel)
ax.set_ylabel(yLabel)
ax.set_title(title)
if invert_x:
ax.invert_xaxis()
if set_x_log:
ax.set_xscale('log')
if set_y_log:
ax.set_yscale('log')
plt.savefig("./img/" + title + ".png")
plt.grid(True)
if (show):
plt.show()
plt.close()
@staticmethod
def plot2d(list_of_lines, t, legends=[], xLimMin=0, xLimMax=0, yLimMin=0, yLimMax=0, xLabel="x", yLabel="y",
title="", invert_x=False, show=False, texts=[]):
fig = plt.figure()
ax = fig.gca()
for line in range(len(list_of_lines)):
ax.plot(t[line], list_of_lines[line])
ax.legend(legends, loc=1)
if (xLimMax != 0):
ax.set_xlim(xLimMin, xLimMax)
if yLimMax != 0:
ax.set_ylim(yLimMin, yLimMax)
ax.set_xlabel(xLabel)
ax.set_ylabel(yLabel)
ax.set_title(title)
if invert_x:
ax.invert_xaxis()
plt.savefig("./img/" + title + ".png")
plt.grid(True)
if (show):
plt.show()
plt.close()
@staticmethod
def plot2d_different_x_axis(list_of_lines, t, legends=[], xLimMin=0, xLimMax=0, yLimMin=0, yLimMax=0, xLabel="x",
yLabel="y",
title="", invert_x=False, show=False):
fig = plt.figure()
ax = fig.gca()
for line in range(len(list_of_lines)):
ax.plot(t[line], list_of_lines[line])
ax.legend(legends, loc=0)
if (xLimMax != 0):
ax.set_xlim(xLimMin, xLimMax)
if (yLimMax != 0):
ax.set_ylim(yLimMin, yLimMax)
ax.set_xlabel(xLabel)
ax.set_ylabel(yLabel)
ax.set_title(title)
if invert_x:
ax.invert_xaxis()
plt.savefig("./img/" + title + ".png")
plt.grid(True)
if (show):
plt.show()
plt.close()
@staticmethod
def interpolate2d_different_x_axis(list_of_lines, t, legends=[], xLimMin=0, xLimMax=0, yLimMin=0, yLimMax=0, xLabel="x",
yLabel="y",
title="", invert_x=False, show=False,set_x_log=False,set_y_log=False):
fig = plt.figure()
ax = fig.gca()
for line in range(len(list_of_lines)):
x=t[line].astype(int)
y=list_of_lines[line]
new_x,new_y= PlotHelper.GetAverageOfUniqueValues(x, y)
try:
ax.plot(new_x, interp1d(new_x, new_y, kind='cubic')(new_x))
except:
if(len(new_x)==1):
ax.plot(new_x, new_y)
else:
ax.plot(new_x, interp1d(new_x, new_y, kind='quadratic')(new_x))
ax.legend(legends, loc=0)
if (xLimMax != 0):
ax.set_xlim(xLimMin, xLimMax)
if (yLimMax != 0):
ax.set_ylim(yLimMin, yLimMax)
ax.set_xlabel(xLabel)
ax.set_ylabel(yLabel)
#ax.set_title(title)
if invert_x:
ax.invert_xaxis()
if set_x_log:
ax.set_xscale('log')
if set_y_log:
ax.set_yscale('log')
plt.savefig("./img/" + title + ".png")
plt.grid(True)
if (show):
plt.show()
plt.close()
@staticmethod
def GetAverageOfUniqueValues(x,y):
dicc={}
for i in range(len(x)):
x_value=x[i]
y_value=y[i]
if x_value not in dicc:
dicc[x_value]= [y_value]
else:
dicc[x_value].append(y_value)
new_x,new_y=[],[]
for x in sorted(dicc.keys()):
new_x.append(x)
new_y.append(sum(dicc[x]) / len(dicc[x]))
return new_x,new_y
@staticmethod
def GetAverageOfUniqueValues3D(x,y,z):
dicc={}
for i in range(len(x)):
x_value=x[i]
y_value=y[i]
z_value=z[i]
if (x_value,y_value) not in dicc:
dicc[(x_value,y_value)]= [z_value]
else:
dicc[(x_value,y_value)].append(z_value)
new_x,new_y,new_z=[],[],[]
for x,y in sorted(dicc.keys()):
new_x.append(x)
new_y.append(y)
new_z.append(sum(dicc[(x,y)]) / len(dicc[(x,y)]))
return new_x,new_y,new_z
@staticmethod
def interpolate_and_scatter_different_x_axis(list_of_lines, t, legends=[], xLimMin=0, xLimMax=0, yLimMin=0, yLimMax=0,
xLabel="x",
yLabel="y",
title="", invert_x=False, show=False, texts=[],set_y_log=False,set_x_log=False,
grid=False):
fig = plt.figure()
ax = fig.gca()
colors = cm.rainbow(np.linspace(0, 1, len(list_of_lines)))
iter_colors = iter(colors)
for line in range(len(list_of_lines)):
x = t[line].astype(int)
y = list_of_lines[line]
my_color = colors[line]
new_x,new_y= PlotHelper.GetAverageOfUniqueValues(x, y)
try:
ax.plot(new_x, interp1d(new_x, new_y, kind='cubic')(new_x),color=next(iter_colors))
except:
if(len(new_x)==1):
ax.plot(new_x, new_y,color=next(iter_colors))
else:
ax.plot(new_x, interp1d(new_x, new_y, kind='quadratic')(new_x),color=next(iter_colors))
ax.legend(legends, loc=1)
for line in range(len(list_of_lines)):
color = colors[line]
for r in range(len(t[line])):
ax.scatter(t[line][r], list_of_lines[line][r], color=color)
if texts:
ax.text(t[line][r] + 1, list_of_lines[line][r] + 0.05, texts[line][r], fontsize=7)
ax.legend(legends)
colors = iter(colors)
for leg in ax.get_legend().legendHandles:
leg.set_color(next(colors))
if (xLimMax != 0):
ax.set_xlim(xLimMin, xLimMax)
if (yLimMax != 0):
ax.set_ylim(yLimMin, yLimMax)
ax.set_xlabel(xLabel)
ax.set_ylabel(yLabel)
#ax.set_title(title)
if invert_x:
ax.invert_xaxis()
if set_x_log:
ax.set_xscale('log')
if set_y_log:
ax.set_yscale('log')
if(grid):
plt.grid(True)
plt.savefig("./img/" + title + ".png")
if (show):
plt.show()
plt.close()
@staticmethod
def plot3d(x, y, z, xLimMin=0, xLimMax=0, yLimMin=0, yLimMax=0, label_x="x", label_y="y", label_z="z", title="",
show=False):
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.plot(x, y, z)
ax.set_xlabel(label_x)
ax.set_ylabel(label_y)
ax.set_zlabel(label_z)
ax.set_title(title)
if (xLimMax != 0):
ax.set_xlim(xLimMin, xLimMax)
if (yLimMax != 0):
ax.set_ylim(yLimMin, yLimMax)
plt.savefig("../img/" + title + ".png")
plt.grid(True)
if (show):
plt.show()
plt.close()
@staticmethod
def scatter3d(x, y, z, xLimMin=0, xLimMax=0, yLimMin=0, yLimMax=0, label_x="x", label_y="y", label_z="z", label="",
color="", title="", show=False, legend=""):
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.set_xlabel(label_x)
ax.set_ylabel(label_y)
ax.set_zlabel(label_z)
ax.set_title(title)
if label:
ax.set_label(label)
if label:
ax.set_label(label)
for line in range(len(x)):
x_line = x[line]
y_line = y[line]
z_line = z[line]
color_line =color[line]
if color_line:
ax.scatter(x_line, y_line, z_line, c=color_line)
else:
ax.scatter(x_line, y_line, z_line)
if (xLimMax != 0):
ax.set_xlim(xLimMin, xLimMax)
if (yLimMax != 0):
ax.set_ylim(yLimMin, yLimMax)
plt.savefig("./img/" + title + ".png")
ax.legend(legend)
plt.grid(True)
if (show):
plt.show()
plt.close()
@staticmethod
def interpolate_scatter3d(x, y, z, xLimMin=0, xLimMax=0, yLimMin=0, yLimMax=0, label_x="x", label_y="y", label_z="z", label="",
color="", title="", show=False, legend=""):
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.set_xlabel(label_x)
ax.set_ylabel(label_y)
ax.set_zlabel(label_z)
ax.set_title(title)
if label:
ax.set_label(label)
for line in range(len(x)):
x_line = x[line]
y_line = y[line]
z_line = z[line]
color_line =color[line]
new_x,new_y,new_z= PlotHelper.GetAverageOfUniqueValues3D(x_line, y_line,z_line)
ax.plot(new_x, new_y,new_z,color_line)
if color_line:
ax.scatter(x_line, y_line, z_line, c=color_line)
else:
ax.scatter(x_line, y_line, z_line)
if (xLimMax != 0):
ax.set_xlim(xLimMin, xLimMax)
if (yLimMax != 0):
ax.set_ylim(yLimMin, yLimMax)
plt.savefig("./img/" + title + ".png")
ax.legend(legend)
plt.grid(True)
if (show):
plt.show()
plt.close()
@staticmethod
def interpolate3d(x, y, z, xLimMin=0, xLimMax=0, yLimMin=0, yLimMax=0, label_x="x", label_y="y",
label_z="z", label="",
color="", title="", show=False, legend=""):
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.set_xlabel(label_x)
ax.set_ylabel(label_y)
ax.set_zlabel(label_z)
ax.set_title(title)
if label:
ax.set_label(label)
for line in range(len(x)):
x_line = x[line]
y_line = y[line]
z_line = z[line]
color_line = color[line]
new_x, new_y, new_z = PlotHelper.GetAverageOfUniqueValues3D(x_line, y_line, z_line)
ax.plot(new_x, new_y, new_z, color_line)
if (xLimMax != 0):
ax.set_xlim(xLimMin, xLimMax)
if (yLimMax != 0):
ax.set_ylim(yLimMin, yLimMax)
plt.savefig("./img/" + title + ".png")
ax.legend(legend)
plt.grid(True)
if (show):
plt.show()
plt.close()
@staticmethod
def scatter3dWithLines(lines, xLimMin=0, xLimMax=0, yLimMin=0, yLimMax=0, label_x="x", label_y="y", label_z="z",
label="", title="", show=False, legends=""):
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.set_xlabel(label_x)
ax.set_ylabel(label_y)
ax.set_zlabel(label_z)
ax.set_title(title)
if label:
ax.set_label(label)
for line in lines:
x = line[0]
y = line[1]
z = line[2]
if len(line) == 4:
color = line[3]
ax.scatter(x, y, z, c=color)
else:
ax.scatter(x, y, z)
if legends:
ax.legend(legends, loc=0)
if (xLimMax != 0):
ax.set_xlim(xLimMin, xLimMax)
if (yLimMax != 0):
ax.set_ylim(yLimMin, yLimMax)
plt.savefig("./img/" + title + ".png")
plt.grid(True)
if (show):
plt.show()
plt.close()
@staticmethod
def plotHeatMapWithLines(x, y, z, line1=[], line2=[], legends=[], xLimMin=0, xLimMax=0, yLimMin=0, yLimMax=0,
label_x="x", label_y="y", title="", show=False):
# PlotHelper.scatter3d(r_matrix[:,0],r_matrix[:,1],r_matrix[:,2])
fig = plt.figure()
ax = fig.gca()
ax.legend(legends, loc=0)
ax.set_xlabel(label_x)
ax.set_ylabel(label_y)
# ax.imshow(r_matrix,interpolation='nearest',extent=[t[0],t[-1],c_array[0],c_array[-1]])
cntr = ax.contourf(x, y, z, cmap=cm.RdYlBu_r)
fig.colorbar(cntr, ax=ax)
if (xLimMax != 0):
ax.set_xlim(xLimMin, xLimMax)
if (yLimMax != 0):
ax.set_ylim(yLimMin, yLimMax)
ax.plot(line1, color="k", linewidth=1, linestyle='dashed')
ax.plot(line2, color="m", linewidth=1, linestyle='dashed')
ax.set_title(title)
plt.savefig("./img/" + title + ".png")
plt.grid(True)
if (show):
plt.show()
| 35.305211
| 131
| 0.510683
| 1,859
| 14,228
| 3.733728
| 0.080151
| 0.051145
| 0.019882
| 0.018441
| 0.863564
| 0.83086
| 0.819046
| 0.819046
| 0.812131
| 0.79758
| 0
| 0.01443
| 0.352193
| 14,228
| 403
| 132
| 35.305211
| 0.738635
| 0.015954
| 0
| 0.804878
| 0
| 0
| 0.014861
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03523
| false
| 0
| 0.01626
| 0
| 0.059621
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f84ce2780967a05334ecbab9e55444004863158c
| 943
|
py
|
Python
|
cotk/metric/__init__.py
|
Cospui/cotk
|
9038420787f7251049534baf3b35eac538a82148
|
[
"Apache-2.0"
] | null | null | null |
cotk/metric/__init__.py
|
Cospui/cotk
|
9038420787f7251049534baf3b35eac538a82148
|
[
"Apache-2.0"
] | null | null | null |
cotk/metric/__init__.py
|
Cospui/cotk
|
9038420787f7251049534baf3b35eac538a82148
|
[
"Apache-2.0"
] | null | null | null |
r"""
`cotk.metrics` provides functions evaluating results of models. It provides
a fair metric for every model.
"""
from .metric import MetricBase, PerplexityMetric, BleuCorpusMetric, SelfBleuCorpusMetric,\
FwBwBleuCorpusMetric, SingleTurnDialogRecorder, LanguageGenerationRecorder, \
MetricChain, MultiTurnDialogRecorder, MultiTurnPerplexityMetric, \
MultiTurnBleuCorpusMetric, BleuPrecisionRecallMetric, \
EmbSimilarityPrecisionRecallMetric, HashValueRecorder
__all__ = ["MetricBase", "PerplexityMetric", "BleuCorpusMetric", "SelfBleuCorpusMetric", \
"FwBwBleuCorpusMetric", "SingleTurnDialogRecorder", "LanguageGenerationRecorder", \
"MetricChain", "MultiTurnDialogRecorder", "MultiTurnPerplexityMetric", \
"MultiTurnBleuCorpusMetric", "BleuPrecisionRecallMetric", \
"EmbSimilarityPrecisionRecallMetric", "HashValueRecorder"]
| 55.470588
| 97
| 0.73701
| 49
| 943
| 14.102041
| 0.673469
| 0.075253
| 0.121563
| 0.17945
| 0.845152
| 0.845152
| 0.845152
| 0.845152
| 0.845152
| 0.845152
| 0
| 0
| 0.181336
| 943
| 16
| 98
| 58.9375
| 0.895078
| 0.112407
| 0
| 0
| 0
| 0
| 0.352232
| 0.219542
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.090909
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f851c0fdf44807a4d4ca61cf8b251efe78adb034
| 207
|
py
|
Python
|
src/sooli/__init__.py
|
ragavvenkatesan/HandCricket
|
3622091057c1f0d950452e97264455ba6993f31a
|
[
"MIT"
] | null | null | null |
src/sooli/__init__.py
|
ragavvenkatesan/HandCricket
|
3622091057c1f0d950452e97264455ba6993f31a
|
[
"MIT"
] | null | null | null |
src/sooli/__init__.py
|
ragavvenkatesan/HandCricket
|
3622091057c1f0d950452e97264455ba6993f31a
|
[
"MIT"
] | null | null | null |
from sooli.environment import Inning
from sooli.player import Player
from sooli.player import PlayerModes
from sooli.playground import Playground
__all__ = ["Inning", "Player", "Playground", "PlayerModes"]
| 29.571429
| 59
| 0.801932
| 25
| 207
| 6.48
| 0.36
| 0.222222
| 0.185185
| 0.259259
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 207
| 6
| 60
| 34.5
| 0.880435
| 0
| 0
| 0
| 0
| 0
| 0.15942
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f85e0b08598a0ce8472072c7543dc5ef5a963e22
| 2,091
|
py
|
Python
|
honcho/test/integration/test_ports.py
|
hipchat/honcho
|
000a7e6470907eacc9a5f7dfa8abb56c3870d24a
|
[
"MIT"
] | null | null | null |
honcho/test/integration/test_ports.py
|
hipchat/honcho
|
000a7e6470907eacc9a5f7dfa8abb56c3870d24a
|
[
"MIT"
] | null | null | null |
honcho/test/integration/test_ports.py
|
hipchat/honcho
|
000a7e6470907eacc9a5f7dfa8abb56c3870d24a
|
[
"MIT"
] | 1
|
2020-11-21T19:05:42.000Z
|
2020-11-21T19:05:42.000Z
|
import os
from ..helpers import TestCase
from ..helpers import get_honcho_output
from honcho import compat
class TestPorts(TestCase):
def test_proctype_increment(self):
procfile = 'Procfile.portswin' if compat.ON_WINDOWS else 'Procfile.ports'
ret, out, err = get_honcho_output(['-f', procfile, 'start'])
self.assertEqual(ret, 0)
self.assertRegexpMatches(out, r'web\.1 \| (....)?PORT=5000')
self.assertRegexpMatches(out, r'worker\.1 \| (....)?PORT=5100')
self.assertRegexpMatches(out, r'redis\.1 \| (....)?PORT=5200')
self.assertRegexpMatches(out, r'es\.1 \| (....)?PORT=5300')
def test_concurrency_increment(self):
procfile = 'Procfile.portswin' if compat.ON_WINDOWS else 'Procfile.ports'
ret, out, err = get_honcho_output(['-f', procfile, 'start', '-c', 'web=2,worker=3'])
self.assertEqual(ret, 0)
self.assertRegexpMatches(out, r'web\.1 \| (....)?PORT=5000')
self.assertRegexpMatches(out, r'web\.2 \| (....)?PORT=5001')
self.assertRegexpMatches(out, r'worker\.1 \| (....)?PORT=5100')
self.assertRegexpMatches(out, r'worker\.2 \| (....)?PORT=5101')
self.assertRegexpMatches(out, r'worker\.3 \| (....)?PORT=5102')
self.assertRegexpMatches(out, r'redis\.1 \| (....)?PORT=5200')
self.assertRegexpMatches(out, r'es\.1 \| (....)?PORT=5300')
def test_get_port_from_dot_env(self):
procfile = 'Procfile.portswin' if compat.ON_WINDOWS else 'Procfile.ports'
ret, out, err = get_honcho_output(['-f', procfile, '-e', '.env_port', 'start'])
self.assertEqual(ret, 0)
self.assertRegexpMatches(out, r'web\.1 \| (....)?PORT=8000')
def test_get_port_from_env(self):
os.environ['PORT'] = '3000'
procfile = 'Procfile.portswin' if compat.ON_WINDOWS else 'Procfile.ports'
ret, out, err = get_honcho_output(['-f', procfile, 'start'])
del os.environ['PORT']
self.assertEqual(ret, 0)
self.assertRegexpMatches(out, r'web\.1 \| (....)?PORT=3000')
| 41
| 92
| 0.613582
| 256
| 2,091
| 4.902344
| 0.21875
| 0.238247
| 0.269323
| 0.279681
| 0.811952
| 0.755378
| 0.755378
| 0.755378
| 0.755378
| 0.755378
| 0
| 0.044883
| 0.200861
| 2,091
| 50
| 93
| 41.82
| 0.706164
| 0
| 0
| 0.5
| 0
| 0
| 0.27164
| 0
| 0
| 0
| 0
| 0
| 0.472222
| 1
| 0.111111
| false
| 0
| 0.111111
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
f88f7748695f69514d0d92aa622505b29ad64abd
| 73
|
py
|
Python
|
src/common/utils.py
|
Datkros/budget-it
|
e96c0357bf04537128b57b1164db06d8fd508af6
|
[
"MIT"
] | null | null | null |
src/common/utils.py
|
Datkros/budget-it
|
e96c0357bf04537128b57b1164db06d8fd508af6
|
[
"MIT"
] | null | null | null |
src/common/utils.py
|
Datkros/budget-it
|
e96c0357bf04537128b57b1164db06d8fd508af6
|
[
"MIT"
] | null | null | null |
from uuid import uuid4
def unique_id_creator():
return uuid4.uuid4()
| 18.25
| 24
| 0.753425
| 11
| 73
| 4.818182
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04918
| 0.164384
| 73
| 4
| 25
| 18.25
| 0.819672
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
f8920e7aad63cb9c4d6fcd58d9b126e2c3c8bc85
| 432
|
py
|
Python
|
tests/api/test_accounts.py
|
willjohnson/drip-python
|
13e2836b5acb7a822b0e1f9884e3249d37734cef
|
[
"MIT"
] | 5
|
2019-04-11T19:32:14.000Z
|
2020-08-03T21:58:55.000Z
|
tests/api/test_accounts.py
|
willjohnson/drip-python
|
13e2836b5acb7a822b0e1f9884e3249d37734cef
|
[
"MIT"
] | 7
|
2019-03-19T03:54:49.000Z
|
2021-12-09T21:53:28.000Z
|
tests/api/test_accounts.py
|
willjohnson/drip-python
|
13e2836b5acb7a822b0e1f9884e3249d37734cef
|
[
"MIT"
] | 1
|
2021-01-11T21:51:51.000Z
|
2021-01-11T21:51:51.000Z
|
def test_accounts(client):
client.session.get.return_value.json.return_value.update({'accounts': [1, ]})
client.accounts()
client.session.get.called_once_with('https://api.getdrip.com/v2/accounts/')
def test_account(client):
client.session.get.return_value.json.return_value.update({'accounts': [1, ]})
client.account(1234)
client.session.get.called_once_with('https://api.getdrip.com/v2/accounts/1234')
| 33.230769
| 83
| 0.731481
| 60
| 432
| 5.1
| 0.35
| 0.169935
| 0.20915
| 0.143791
| 0.830065
| 0.830065
| 0.830065
| 0.830065
| 0.830065
| 0.830065
| 0
| 0.030769
| 0.097222
| 432
| 12
| 84
| 36
| 0.753846
| 0
| 0
| 0.25
| 0
| 0
| 0.213953
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3e34293e751b378b7d1d1cecee365f20668d5a9e
| 47
|
py
|
Python
|
EGGS_labrad/clients/script_scanner_gui/tree_view/__init__.py
|
EGGS-Experiment/EGGS_Control
|
c29b3ab0e30dcb6e01d1ca3212ac64ad1506143b
|
[
"MIT"
] | 2
|
2021-12-26T05:00:54.000Z
|
2021-12-30T17:15:49.000Z
|
EGGS_labrad/clients/script_scanner_gui/tree_view/__init__.py
|
EGGS-Experiment/EGGS_Control
|
c29b3ab0e30dcb6e01d1ca3212ac64ad1506143b
|
[
"MIT"
] | null | null | null |
EGGS_labrad/clients/script_scanner_gui/tree_view/__init__.py
|
EGGS-Experiment/EGGS_Control
|
c29b3ab0e30dcb6e01d1ca3212ac64ad1506143b
|
[
"MIT"
] | null | null | null |
from .ParametersEditor import ParametersEditor
| 23.5
| 46
| 0.893617
| 4
| 47
| 10.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 47
| 1
| 47
| 47
| 0.976744
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3e47c5b240e93cf40aa3ed305e7fffe68f153866
| 13,227
|
py
|
Python
|
resource_rc.py
|
gil9red/fake-painter
|
8008f4b9a156e8363fce464310c20d229114af47
|
[
"MIT"
] | 3
|
2019-06-11T19:13:40.000Z
|
2020-10-27T06:06:43.000Z
|
resource_rc.py
|
gil9red/fake-painter
|
8008f4b9a156e8363fce464310c20d229114af47
|
[
"MIT"
] | null | null | null |
resource_rc.py
|
gil9red/fake-painter
|
8008f4b9a156e8363fce464310c20d229114af47
|
[
"MIT"
] | 1
|
2021-05-22T05:38:15.000Z
|
2021-05-22T05:38:15.000Z
|
# -*- coding: utf-8 -*-
# Resource object code
#
# Created: Вс 12. июл 02:43:14 2015
# by: The Resource Compiler for PySide (Qt v4.8.6)
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore
qt_resource_data = b"\x00\x00\x02j\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x02\x0cIDATx\xda\x84S\xcdK\x14a\x18\xff\xcd\xec;\xfb5DS\xa8\xab\x08\x15\x12DA\x87\x98\x8b\xb7\x0e\xabx\xa9 (\xbdHB\x0bi\xb7=t\xe8\x0f\xe8\x1a}B(^\x14\x0f\x9e\xd2\x83E\x14{\x8bn\xd2\xc1\xa0E\xbc(\x91[(\xc4\xec8;\xbb3\xefN\xef\xf3:\xa33\xeb\x82/<<\xef\xf3\xf5{~\xcf3\xf3*ooB\x1eE\xc1\xacP&N?\xebA\x80\xe9\xc8`Ax\x11Nsl\xa2d\xa6R)d\xb3Yd2\x19\xa8\xaa\x0a\xdf\xf7\xd1j\xb5\xe0\xba\xae\xc8\x09\xf0iy>\x81\xc6\xda\xedc\x83\x12+\x95\x0ar\xb9\x1c\x1a\x8d\xc6\x09],\x16\x11\xcf\xa7\xa3r\xe1\x88\xa4\x1dF\x0d\xc3\xe8\xaa)\x1e\xcf'a<\x86\xc89\x87\xae\xeb\xb2\x80\xe8\xc6\xb5eY2\xce;\x180\xe1K\x00\xd0\xdc\xd4\xa9S\xc8/\x01x\x07\x80\x1fC\x8c\x12\xa9\xeb\xe1b\x03\xb9\xbcZ\xad&\x97Iq\xff\xc4\x0e8p,\x5c|N%\xd1y\xf7\x9f\x8d\xc1\xa9\xd7\xf8k9G\x0c\xe2\xc2\xfc\x18%\xeaB\x00\x94\xd8\x16\xddI\xa7\x84^{z\x07\x03}\xe7d\x9c\xf2w\x86\x9f\xc8\xbb\xd7l&\x01x8 u\x0eB\x06\x03=\x06\xae\x0c]8Zbu\xe4\xd5\xc5?\xd7\xc6'[\xae\xbb\xe4\xd4\xebI\x00*\x88F\x88\xecN\xff\xbeq5/\x8a\xcb\xe1\x9e\x96\xba2\x88/\xf1s\x7f\x09{\xfaeQ\x1c@\xd3\x18\x5c\xcb\xc6T\xe9\x92\xf9nq\xa3L,\xd9\xf6\x010\x98\xed2\x82(&\xbd\xcd\x0b\xb8\xff\xf06\xaa\x16P\xf7\x81\xf3@\xfe\xdb\x1e0z\xf7\xba\xb9\xba\xf0\xb5\xcc\xde\xff\x06n\xf5\x02\x85\xf4a!\xcdJ\xc5\xb6mK\xda\x86\xff\x13\x0b\xcf\xe6d,\x9dNc\xdfj:=3\x8f\xf3_f?\xae+\xaa\xfaR\x11\x0d\xf3B\xce\xdc;\x8b\x0f\x8f\xa6\x8b\xa6\xa6i\xf21\x91\x8e\xc6\xf0<O\xfe\x0f\xc4\xf0Au\xcc\xb1\xf5\xfe-^\xfb\xf1F\xad\xff\x9ag\x22''\xa4o\xc7\xc1\xe6\xf3\x17\x95S\xdf\xb2zc\xa8\xe0x\x1b\xcb\xfc\xfb\xe2\x0a\xd9\xff\x05\x18\x00\xca\xd1ps_\xc7\xc6\x1f\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x02\x9f\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x02AIDATx\xda\x8cSMk\x13Q\x14=\xf3\xdd\xd4\x866\xc6\xd4D\xec\xc2\x85P\x12t\x1d0\xab\x08\xe2\xc6\x9d;\xff\x80\x8b\xfe\x05[\x08.\x5c\x8b`\xd7\x85\x80\x0b\xd7\x82\x94\x16\x0a]tU\xb2\xd2VS]\xa8\x19c5\x1f&if&3o\xbc\xf7%\x93L\xb1\x88\x17\xee\xbc\xaf{\xce\xdcs\xef{\xca\xc3\xf5u\xb0)\x8a\xf2\x80\x86\x1c\xfem\xbbB\x88\xba\x08\x02\x84a\x88\xa3\xfd}\xe8<a\xa3\xf1\xda\xd6\xc6\xc6\xa6G\x87#v!\x10\xd0Yt^\xab\xd5\xb0\xb5\xbd\xfd\x98\xffE\xfe\x91\xf7VK%\xa8\xcc6qE0h4\x92\xee9\x0e\x86\x83\x01\xfa\xfd>z\xbd\x9e\x1c_W*\x9b\x14w\x97\xb07\xa3\x94\xf4\x80\xc0\x13S\x98Z\xa5\xaf\xa1i\xb3\xa4}\x1f>\x11\xb7Z-\xbc\xa8VQ\xce\xe7_>\xafV\x9f\xd0\xc9SI f\x04*\x83UI\x83\xbfH\xca\xe5\xb2\x94\x93L&\xf1\xb6VkO3\xf0\xe3\x19\x10\xf8\xd3\x9b\x95\x0bj\xc7\xa4\xe3Z\xe0\xdeg\x10F\x99I \xf6\xb8\x04\xd34\xcfCyW\xa5\x89\x90\x85\x96k\xc2\x5cH %\xfc\xfc\xb5\x88\x5c\xd6\xa5\xc58X35h\x96\x81\xc0\xa5\xe2z\x01\xb41\x81:\x05\xf9\x5c\xa4\xb1\xabL[}\x95\xc1\xbb\xf7)X\x96%}!\x9d\xc6\xf2j\x1e\xc9L\x06\xd6\xdc\x9c\xac\x0d\xc7F\xb88\x81\xacA\xb3\xd1\xc1\xce\xae\x01\xcf3\xa1\xe9\x16\x12\xe9\x1c\xac\xa5\xeb\x98Oga\x90<CUel\x84\xd3\xb9\xe7Q6\x9c\x81\xddh\xc0u.\xa3RYD\xe1\x96\x81\xfb\x8f2\xb8\x9d\xbb\x81v7\x80;\xb41\xcf\x12F#u\xd6\x85X\x0d8\x83\xb5\xb5\x01i?\x83\xae\x7f\x87\xa6)\xe8\xd9'h\x7f\xdd\xc1\xb7\x0f.\xba\x8d\x012w %L\x09\xba\xb6\x8dK\xa4/\xeaB6k\x9e\xeb\x9e\x7f\xe6\xc1\xae\xf7\xe0t}\xea\x82\x90\xf7\xc4\x9ftapz\x0a\xbd\xbe\xb7\x87\x95b\x11\x89TJ\xe7\xcd\xc3C\x07\xf1\xbe.]\xa1\x94\x13.N\x8e\xe96\xfe\x08Q\xa2}\xba\xe2\xfa\xb0\xdd\xc6\x97\x83\x03\x19\x97 _\xbeZ,V\xc2\x98\x09\xf2@\x88\x90\xaeqH\x8f+\xa4G\x16:\xbe\x1f\xba\xb4\xe6X\xc60V\x8f\x08~7\x9b\x9d\x85B\xe1\x19\xfe\xc3\xc4p\xd8\x99\x10\xf8\x7f\x04\x18\x00\xb6QGFM\xa6v\x14\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x01\xb2\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x01TIDATx\xda\xa4\x92\xbfN\x83P\x14\xc6O\xb9\x90\xc0`t$1!\xdd}\x08\x9c|\x02W\xe7.M\xec\xea\x82\xb1a\xeb\x02+qs5>\x80/\x81\x8b\x9bi\x5c\x98Y\x9a\x86\x5c(\xd7\xf3!4\xa5\xb4P\xe3I>.9\x9c\xf3\xe3\xfc\xb9#\xa5\x14\xfd\xc7Fl8\x9fXW\xb7\x9e\xf7~J\xd2\xeb|~\xc3\xc7'\xeb\x11\x04\xe8\xcd\xf7}\x05+\x07\x04C,r\x90\xab\xd7PMJI\x05\xb7\xb3\xd9lz\xff.\x84 \xc4\x22\x07\x8f-\xa0,K\x92\x9c\x9c\xe7y/\xc0`!v\x1f \xf0g\x00\xb2\x01\x80\xd2\xb4\xa6J\xd1\x02\x14EA\x92\xc9C\x00\xcd0\x08\xb1\x1d\x00\xa8\xdf\xcb%]\x8e\xc7\xbd\x00\xc4\x1c\xad\xe09\x0cO\xda\xbdeY\x87+\x88\xe3\x98\x8e]\xac\xfa\xbeT\xe6\xban\x07\xa0\xa3\x02\xac(\x08\xc2\xce&\x0c\xee{6\xbb'\xc7q(I\x92f\x06z\x0b\x80\x0at]\xa7\xd5*\xa3\xe9tR9\x17\x8b`[2\xbe\xc1L\xd3lf\xd0\x02\x18\x1a\xaf\x07A\xeb\xb5$\xdb\xb6+'\xde\xab\xd5)\xd1\x02\xd4\xf7\xc0\xa8\xb6\x82\x9eY^\x14E_i\x9afR\xaa*\x18R\xeaW\x8do\x17\xc09g\xc8\xc5d\x1c\xd6\x05\xeb\x9a\xf5\xc0\x81/\x87\x86\xc8}\xdf\xed\xb9>X\x13\x00\xce\x01n\xa6\xfa\x07\xc3 \xb2\x1f\x01\x06\x00\x02o\xd0\x03KB\x1dU\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x01\xb2\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x01TIDATx\xda\xa4\x92\xbfN\x83P\x14\xc6O\xb9\x90\xc0`t$1!\xdd}\x08\x9c|\x02W\xe7.M\xec\xea\x82\xb1a\xeb\x02+qs5>\x80/\x81\x8b\x9bi\x5c\x98Y\x9a\x86\x5c(\xd7\xf3!4\xa5\xb4P\xe3I>.9\x9c\xf3\xe3\xfc\xb9#\xa5\x14\xfd\xc7Fl8\x9fXW\xb7\x9e\xf7~J\xd2\xeb|~\xc3\xc7'\xeb\x11\x04\xe8\xcd\xf7}\x05+\x07\x04C,r\x90\xab\xd7PMJI\x05\xb7\xb3\xd9lz\xff.\x84 \xc4\x22\x07\x8f-\xa0,K\x92\x9c\x9c\xe7y/\xc0`!v\x1f \xf0g\x00\xb2\x01\x80\xd2\xb4\xa6J\xd1\x02\x14EA\x92\xc9C\x00\xcd0\x08\xb1\x1d\x00\xa8\xdf\xcb%]\x8e\xc7\xbd\x00\xc4\x1c\xad\xe09\x0cO\xda\xbdeY\x87+\x88\xe3\x98\x8e]\xac\xfa\xbeT\xe6\xban\x07\xa0\xa3\x02\xac(\x08\xc2\xce&\x0c\xee{6\xbb'\xc7q(I\x92f\x06z\x0b\x80\x0at]\xa7\xd5*\xa3\xe9tR9\x17\x8b`[2\xbe\xc1L\xd3lf\xd0\x02\x18\x1a\xaf\x07A\xeb\xb5$\xdb\xb6+'\xde\xab\xd5)\xd1\x02\xd4\xf7\xc0\xa8\xb6\x82\x9eY^\x14E_i\x9afR\xaa*\x18R\xeaW\x8do\x17\xc09g\xc8\xc5d\x1c\xd6\x05\xeb\x9a\xf5\xc0\x81/\x87\x86\xc8}\xdf\xed\xb9>X\x13\x00\xce\x01n\xa6\xfa\x07\xc3 \xb2\x1f\x01\x06\x00\x02o\xd0\x03KB\x1dU\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x02p\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x06bKGD\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07tIME\x07\xdf\x07\x0b\x14\x044\x14jeV\x00\x00\x01\xfdIDAT8\xcb\xa5\xd3\xcdKTQ\x18\xc7\xf1\xef}\x1b\xaf\xe64\x95F\x1a2\x14\x04\xf6b\xdb(\x17\x0e$\x14T\xb4j\xd3\xaae\xcb(pk\xa4\xb3i\x97\xfb\xfe\x84\x90\x08\xac\x18\x12kc\x8b\xd0\x11r\xa8E\x5c&\xe4\x12\x95\x11d3\xde9\xf7\x9c\xf3\xb4hF\x1c\x07\xac\xe8\xc0\xb38\x8b\xf3y\x1e~<\x07\xfe\xf38\x00\x8e\xe3\xdc\x03N]\x9b\x9c,\xfd\xcd\xa3GSS\x17\x80\x8a\x88\xdc\xa5\x09\xcc\x16\x8bE\x11\x11\xb1\x7f(\x11\x91b\xb1(\x8e\xe3\xcc\x02\xf8M\xd4UJ\xa1E0\xc6\xec\xda\xdd\xf3<\x94R\x00n\x1b`\xadE\x19C\x9a\xa6\xbb\x02\x01`\xad\xed\x00<c\x0c\xca\x18\x92\x1d\x80\xb5\x16U\xa9\xd0U.\x93\x0c\x0f\xb3\x7fl\xac5\xa5\xb7\xa5\x00\x9e\xd6\x1ae-I\x9anU=\xd9$^{\xc7\x86R\x84\xd6R_\x5cDY\x8b\xd6\xba\x130\xc6P\x8d\x22\x820$\x08C\xdc\xc0\xa1\xb1\xf1\x86^\xbd\x80\xdd\xfb\x95\xa5\x81\x01\x92\xd1Q\xaaQ\xd46\x81\xbf}\x82\x8733\xcd\xab\xd0\x9f\x8d\xb9<\xde\xc7\x89\xe1>J\x0b\xafy\xf6\xca#\xe8\xea\xc7u]\xba\xbb\xbb;\x01c\x0c\xe5r\x19\xc71\x1c\x19\xfc\xc9\xc5\xb3C\x1c\xcd\xf72\xffr\x99\xc7s\x9f\x89\xe2,\x221\x00\x85B\xa1\x03\xf0\xb5\xd6\x04\xbe\xcb\xfd\xe9\x1b\x1c\xcaEd\xf7$|\x88\xbe\xd3{\xf0\x0a\xb7&\xceq\xfb\xce\x04\xf9|\x9e8\x8e[\x19\xf8\xdb3\xf0\x8dN\xc9\x0f\xd6\xc8u\xad\x92\xcb6(\xcd\xaf\x10\x7f;N\xca\x08\xb5\xba\xc5\xf7\x7f\xf7\x0a\xc3\xb0\x95A\x1b\x10\xf4d\x12.\x9d?M\xc6Kx2\xb7\xc4\xd3\xf9\x1adF\xd8\xdc\xd4\xd4\xeb\xaa\x0dh\xeeA\xb0\x05\x88\xc8\xe4\xa7\x8f\xcb\xebC\x87O\xca\xea\xfb\x1e\xe6^\xd4Y\xffq\x00+\x19\x8cqi4l\x07 \x22\xd9\xd6g\xca\x03\xfb\xae\x8e\x1f{\x80\xe3\x9ey[\xa9>_\xfbb\xd6\x054`[e\x8c\xb9\xbec)W\x80\x9b\x0e\x90\x03\xc2V\xaa\xffp\x0c\x90\xfc\x02\x04\x81\x1c\x0e\x08\xe9l\xee\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x02p\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x06bKGD\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07tIME\x07\xdf\x07\x0b\x14\x044\x14jeV\x00\x00\x01\xfdIDAT8\xcb\xa5\xd3\xcdKTQ\x18\xc7\xf1\xef}\x1b\xaf\xe64\x95F\x1a2\x14\x04\xf6b\xdb(\x17\x0e$\x14T\xb4j\xd3\xaae\xcb(pk\xa4\xb3i\x97\xfb\xfe\x84\x90\x08\xac\x18\x12kc\x8b\xd0\x11r\xa8E\x5c&\xe4\x12\x95\x11d3\xde9\xf7\x9c\xf3\xb4hF\x1c\x07\xac\xe8\xc0\xb38\x8b\xf3y\x1e~<\x07\xfe\xf38\x00\x8e\xe3\xdc\x03N]\x9b\x9c,\xfd\xcd\xa3GSS\x17\x80\x8a\x88\xdc\xa5\x09\xcc\x16\x8bE\x11\x11\xb1\x7f(\x11\x91b\xb1(\x8e\xe3\xcc\x02\xf8M\xd4UJ\xa1E0\xc6\xec\xda\xdd\xf3<\x94R\x00n\x1b`\xadE\x19C\x9a\xa6\xbb\x02\x01`\xad\xed\x00<c\x0c\xca\x18\x92\x1d\x80\xb5\x16U\xa9\xd0U.\x93\x0c\x0f\xb3\x7fl\xac5\xa5\xb7\xa5\x00\x9e\xd6\x1ae-I\x9anU=\xd9$^{\xc7\x86R\x84\xd6R_\x5cDY\x8b\xd6\xba\x130\xc6P\x8d\x22\x820$\x08C\xdc\xc0\xa1\xb1\xf1\x86^\xbd\x80\xdd\xfb\x95\xa5\x81\x01\x92\xd1Q\xaaQ\xd46\x81\xbf}\x82\x8733\xcd\xab\xd0\x9f\x8d\xb9<\xde\xc7\x89\xe1>J\x0b\xafy\xf6\xca#\xe8\xea\xc7u]\xba\xbb\xbb;\x01c\x0c\xe5r\x19\xc71\x1c\x19\xfc\xc9\xc5\xb3C\x1c\xcd\xf72\xffr\x99\xc7s\x9f\x89\xe2,\x221\x00\x85B\xa1\x03\xf0\xb5\xd6\x04\xbe\xcb\xfd\xe9\x1b\x1c\xcaEd\xf7$|\x88\xbe\xd3{\xf0\x0a\xb7&\xceq\xfb\xce\x04\xf9|\x9e8\x8e[\x19\xf8\xdb3\xf0\x8dN\xc9\x0f\xd6\xc8u\xad\x92\xcb6(\xcd\xaf\x10\x7f;N\xca\x08\xb5\xba\xc5\xf7\x7f\xf7\x0a\xc3\xb0\x95A\x1b\x10\xf4d\x12.\x9d?M\xc6Kx2\xb7\xc4\xd3\xf9\x1adF\xd8\xdc\xd4\xd4\xeb\xaa\x0dh\xeeA\xb0\x05\x88\xc8\xe4\xa7\x8f\xcb\xebC\x87O\xca\xea\xfb\x1e\xe6^\xd4Y\xffq\x00+\x19\x8cqi4l\x07 \x22\xd9\xd6g\xca\x03\xfb\xae\x8e\x1f{\x80\xe3\x9ey[\xa9>_\xfbb\xd6\x054`[e\x8c\xb9\xbec)W\x80\x9b\x0e\x90\x03\xc2V\xaa\xffp\x0c\x90\xfc\x02\x04\x81\x1c\x0e\x08\xe9l\xee\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x02\x87\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x02)IDATx\xda\x8cS\xcfk\x13A\x18};\xb3\xd6n\xfd\xd1\x92hA\xe8!j\xab\xa5^\x84\x88\xa7\x9e\xbc\x09\x0a^<\xe8\xad\xe0A\xf0\xe0? BE\xa8\xffD\xf1\x5cP\x08x\xf1 \xc5\xa3\x10\x04\xb5\xd6\x1fUj[\xd3V\x0b\xa1\xc6\x90l\xb2;3\xeb\xf7Mf7YPp\xd8\xc7\xcc\xce~\xef\xed{\xdf\xeex\xd5\x05\xd8\xe1y\x98\xa3\xa9\x84\xff\x1b\x1bI\x82\xc7\xbc\xf0\x13\xb7C\x1b\xa7f\xae?\xba\xd7\xaa}\x82G7#\xc7N\x22\x18\x9b\x80\x90Cy\xaa\xef\xa3\xbax\xf3avkL\xf6H\x1c:>\x85\x91\xe2iD\xcd=\xb4v?c\xfb\xc3+L\x95\xaf\xe5\xf8^\x10\x808\x22#i\xb2\xe0 \x8cV0Q\x17\xfe\xc1Q\x8c\x9d\x99E\xa7\x13\xa1\xd9\xea@\x13#!W\x16\xb4\xe6\xda\x94'49p\x90\x89\xd6\xc8\xa0HLEh4C\xec\xee5\xfa\x02\x04\xaeMy>\xd5\xa6\xc3\x0a\xb0\x0bkM\x08\xb4\xc3.\x0a.\xa3q\xb3\xa0\x1a\xbad\xd2\xeb\x1b|\xd5\xef\x814:\x86V\xb1\x0b\xe7!V:#\xa637\x808\x92Ve\x12x=\x18\xc17L\xb0\xd6\x19\x1a\x8a6\x998\x08\xee\xc1\xa5ys\x97\x1e\xcd\x19\xeb \x17A\xf5#hi\x1dp\xe6\xdegv3\x094\xc2\x90\xc4\xad\x8b\x5c\x04r\x10\xc3\xb8\x08\x86z\xa0\xe8\xa1q\xc44\x82G\xf3~\xbb\xcd1|\xfb\xa2w\x9b`5\xab\xc8\xd6\xb5\x03\xaf\x95\xd6\x7f\x8dpk\xbdh\xeb\x99'\xee/\x01,\x12+\x0c\xd9\xff\x80\x1a\xd9\x83\xb2\x0el\xab\x07>!\x0b\xd4\xebm[\xcf<\xfe\xa3\x82\x07OP\xf8\x1d\xe2h\xc2M\xb3\x9f\xb2\xf7/p\x93\xa4\x949xth\xeeT\x0e?{\xb1\x82%\xe2\x05\x9cc\x98Px\xbb\x81\x9d\xc9\x97O\x9b\xa5\xe9\x0b\xc3\xa3\xc5\x13\x07\xf8M[\xdf\xd6~\xbdY\x99_\xe5\xac\xc5#\x88\xd2f\xad\xff\xc4\xce\xe22>2\x97\x05\xf6\x19_~\xe0\xf6\xd7\xd5\xea\xf3\xb5\xf7\xd5\xcb\x85\xf1\x89\xb3\x93\xe7.\x96\xc8\xc1\xe6B\x057f\xa7Q\xbfZF7\x15\xa0\xda~\xe7\x07\x0f\x0a\x11*\x14\xbb\xb2]\xab\x9d\xff\xbeU\xbbB&\xc6i\xbbN\xe8\xfe\xeb\x5c\xff\x11`\x00rQ\x96\x83B\xa2m\x81\x00\x00\x00\x00IEND\xaeB`\x82"
qt_resource_name = b"\x00\x05\x00o\xa6S\x00i\x00c\x00o\x00n\x00s\x00\x0f\x05\xcb\x8b\xe7\x00d\x00o\x00o\x00r\x00-\x00-\x00a\x00r\x00r\x00o\x00w\x00.\x00p\x00n\x00g\x00\x12\x05\xb0\x87\xc7\x00d\x00o\x00c\x00u\x00m\x00e\x00n\x00t\x00-\x00i\x00m\x00a\x00g\x00e\x00.\x00p\x00n\x00g\x00\x08\x08\xc8Xg\x00s\x00a\x00v\x00e\x00.\x00p\x00n\x00g\x00\x0a\x0b\x8df\xc7\x00s\x00a\x00v\x00e\x00a\x00s\x00.\x00p\x00n\x00g\x00\x08\x06\xc1Y\x87\x00o\x00p\x00e\x00n\x00.\x00p\x00n\x00g"
qt_resource_struct = b"\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x07\x00\x00\x00\x02\x00\x00\x004\x00\x00\x00\x00\x00\x01\x00\x00\x02n\x00\x00\x00\x10\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x8e\x00\x00\x00\x00\x00\x01\x00\x00\x0de\x00\x00\x00^\x00\x00\x00\x00\x00\x01\x00\x00\x05\x11\x00\x00\x00^\x00\x00\x00\x00\x00\x01\x00\x00\x06\xc7\x00\x00\x00t\x00\x00\x00\x00\x00\x01\x00\x00\x0a\xf1\x00\x00\x00t\x00\x00\x00\x00\x00\x01\x00\x00\x08}"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 601.227273
| 11,767
| 0.748393
| 2,941
| 13,227
| 3.355321
| 0.224753
| 0.108229
| 0.081171
| 0.034049
| 0.574281
| 0.572456
| 0.561411
| 0.559283
| 0.55381
| 0.540839
| 0
| 0.267281
| 0.008014
| 13,227
| 21
| 11,768
| 629.857143
| 0.484795
| 0.013835
| 0
| 0
| 0
| 0.333333
| 0.972001
| 0.97016
| 0
| 0
| 0.000614
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e4c45808c0909f708262d912bbaf14ea9e7f750a
| 102
|
py
|
Python
|
social_auth/backends/pipeline/misc.py
|
merutak/django-social-auth
|
3a6e4414da0e969fcaf625a891852a3b2d7627c0
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 863
|
2015-01-01T00:42:07.000Z
|
2022-03-30T02:47:18.000Z
|
social_auth/backends/pipeline/misc.py
|
merutak/django-social-auth
|
3a6e4414da0e969fcaf625a891852a3b2d7627c0
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 101
|
2015-01-08T00:28:16.000Z
|
2022-03-07T03:11:19.000Z
|
social_auth/backends/pipeline/misc.py
|
merutak/django-social-auth
|
3a6e4414da0e969fcaf625a891852a3b2d7627c0
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 256
|
2015-01-02T16:55:36.000Z
|
2022-03-04T11:10:47.000Z
|
from social.pipeline.partial import save_status_to_session
save_status_to_session # placate pyflakes
| 34
| 58
| 0.882353
| 15
| 102
| 5.6
| 0.733333
| 0.238095
| 0.285714
| 0.452381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 102
| 2
| 59
| 51
| 0.903226
| 0.156863
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
9038dc7214df96da4be3e24273ae6bbc4a171d64
| 11,299
|
py
|
Python
|
silo/benchmarks/results/istc3-8-16-13_multipart_skew.py
|
anshsarkar/TailBench
|
25845756aee9a892229c25b681051591c94daafd
|
[
"MIT"
] | 274
|
2015-01-23T16:24:09.000Z
|
2022-02-22T03:16:14.000Z
|
silo/benchmarks/results/istc3-8-16-13_multipart_skew.py
|
anshsarkar/TailBench
|
25845756aee9a892229c25b681051591c94daafd
|
[
"MIT"
] | 3
|
2015-03-17T11:52:36.000Z
|
2019-07-22T23:04:25.000Z
|
silo/benchmarks/results/istc3-8-16-13_multipart_skew.py
|
anshsarkar/TailBench
|
25845756aee9a892229c25b681051591c94daafd
|
[
"MIT"
] | 94
|
2015-01-07T06:55:36.000Z
|
2022-01-22T08:14:15.000Z
|
RESULTS = [({'disable_gc': False, 'scale_factor': 4, 'db': 'kvdb-st', 'par_load': False, 'threads': 1, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0 --enable-separate-tree-per-partition --enable-partition-locks ', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '16G', 'persist': False, 'disable_snapshots': False}, [(43468.0, 43468.0, 0.0229553, 0.0, 0.0), (42505.9, 42505.9, 0.0234737, 0.0, 0.0), (43154.0, 43154.0, 0.0231245, 0.0, 0.0)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 1, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '4G', 'persist': False, 'disable_snapshots': False}, [(30754.7, 30754.7, 0.0324678, 0.0, 0.0), (30117.4, 30117.4, 0.0331507, 0.0, 0.0), (29800.0, 29800.0, 0.0335089, 0.0, 0.0)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 2, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '8G', 'persist': False, 'disable_snapshots': False}, [(61867.5, 61867.5, 0.0322773, 0.0, 3.19998), (62408.2, 62408.2, 0.0319925, 0.0, 3.06664), (61668.9, 61668.9, 0.0323773, 0.0, 2.96665)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 4, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '16G', 'persist': False, 'disable_snapshots': False}, [(116912.0, 116912.0, 0.0341545, 0.0, 17.1332), (113059.0, 113059.0, 0.0353051, 0.0, 17.3165), (115758.0, 115758.0, 0.0344966, 0.0, 17.2999)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 6, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '24G', 'persist': False, 'disable_snapshots': False}, [(159447.0, 159447.0, 0.0350288, 0.0, 9514.06), (159379.0, 159379.0, 0.0350259, 0.0, 9480.49), (159677.0, 159677.0, 0.0349627, 0.0, 9492.04)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 8, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '32G', 'persist': False, 'disable_snapshots': False}, [(199853.0, 199853.0, 0.0359222, 0.0, 18743.0), (198260.0, 198260.0, 0.0362176, 0.0, 18613.7), (197791.0, 197791.0, 0.0362914, 0.0, 18554.3)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 10, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '40G', 'persist': False, 'disable_snapshots': False}, [(179239.0, 179239.0, 0.0465518, 0.0, 27440.2), (175376.0, 175376.0, 0.0474159, 0.0, 27047.5), (179595.0, 179595.0, 0.0464574, 0.0, 27482.7)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 12, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '48G', 'persist': False, 'disable_snapshots': False}, [(137867.0, 137867.0, 0.0666319, 0.0, 28550.1), (137801.0, 137801.0, 0.0662968, 0.0, 28607.6), (140053.0, 140053.0, 0.0655534, 0.0, 29200.2)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 16, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '64G', 'persist': False, 'disable_snapshots': False}, [(110439.0, 110439.0, 0.0982468, 0.0, 35981.9), (110248.0, 110248.0, 0.0982313, 0.0, 35980.5), (111519.0, 111519.0, 0.0973957, 0.0, 36321.1)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 20, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '80G', 'persist': False, 'disable_snapshots': False}, [(58960.5, 58960.5, 0.189055, 0.0, 29293.0), (58409.4, 58409.4, 0.18975, 0.0, 28755.0), (56021.4, 56021.4, 0.199431, 0.0, 27870.6)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 24, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '96G', 'persist': False, 'disable_snapshots': False}, [(48373.0, 48373.0, 0.249847, 0.0, 30793.9), (48672.1, 48672.1, 0.2476, 0.0, 31244.2), (48770.2, 48770.2, 0.246519, 0.0, 31328.5)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 28, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '112G', 'persist': False, 'disable_snapshots': False}, [(27698.3, 27698.3, 0.429631, 0.0, 22725.1), (28482.3, 28482.3, 0.417004, 0.0, 23111.7), (27963.0, 27963.0, 0.427409, 0.0, 22980.8)])] + [({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 1, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0 --new-order-fast-id-gen', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '4G', 'persist': False, 'disable_snapshots': False}, [(30943.9, 30943.9, 0.0322558, 0.0, 0.0), (31465.1, 31465.1, 0.0317251, 0.0, 0.0), (31357.4, 31357.4, 0.0318378, 0.0, 0.0)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 2, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0 --new-order-fast-id-gen', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '8G', 'persist': False, 'disable_snapshots': False}, [(62177.6, 62177.6, 0.0321066, 0.0, 2.89999), (62432.7, 62432.7, 0.0319747, 0.0, 2.94999), (62648.0, 62648.0, 0.0318621, 0.0, 3.38331)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 4, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0 --new-order-fast-id-gen', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '16G', 'persist': False, 'disable_snapshots': False}, [(118895.0, 118895.0, 0.0335782, 0.0, 17.9332), (117912.0, 117912.0, 0.0338585, 0.0, 17.7666), (118688.0, 118688.0, 0.0336305, 0.0, 17.6999)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 6, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0 --new-order-fast-id-gen', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '24G', 'persist': False, 'disable_snapshots': False}, [(171283.0, 171283.0, 0.0346632, 0.0, 890.859), (170433.0, 170433.0, 0.0348388, 0.0, 873.825), (171277.0, 171277.0, 0.0346634, 0.0, 892.642)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 8, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0 --new-order-fast-id-gen', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '32G', 'persist': False, 'disable_snapshots': False}, [(219415.0, 219415.0, 0.0359304, 0.0, 1712.53), (219942.0, 219942.0, 0.0358451, 0.0, 1716.32), (218084.0, 218084.0, 0.0361459, 0.0, 1706.6)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 10, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0 --new-order-fast-id-gen', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '40G', 'persist': False, 'disable_snapshots': False}, [(206212.0, 206212.0, 0.0472844, 0.0, 2656.74), (205616.0, 205616.0, 0.047406, 0.0, 2657.5), (206888.0, 206888.0, 0.0471279, 0.0, 2660.49)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 12, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0 --new-order-fast-id-gen', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '48G', 'persist': False, 'disable_snapshots': False}, [(169345.0, 169345.0, 0.0685426, 0.0, 2789.03), (170003.0, 170003.0, 0.0683121, 0.0, 2803.62), (169983.0, 169983.0, 0.0683306, 0.0, 2804.06)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 16, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0 --new-order-fast-id-gen', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '64G', 'persist': False, 'disable_snapshots': False}, [(149677.0, 149677.0, 0.10188, 0.0, 3785.16), (145768.0, 145768.0, 0.104591, 0.0, 3691.9), (149610.0, 149610.0, 0.101906, 0.0, 3791.05)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 20, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0 --new-order-fast-id-gen', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '80G', 'persist': False, 'disable_snapshots': False}, [(83235.8, 83235.8, 0.22186, 0.0, 2809.16), (85806.4, 85806.4, 0.216144, 0.0, 2920.95), (88497.0, 88497.0, 0.209852, 0.0, 3036.64)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 24, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0 --new-order-fast-id-gen', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '96G', 'persist': False, 'disable_snapshots': False}, [(75410.7, 75410.7, 0.29186, 0.0, 3327.34), (81033.5, 81033.5, 0.271947, 0.0, 3516.03), (80770.0, 80770.0, 0.27314, 0.0, 3443.05)]), ({'disable_gc': False, 'scale_factor': 4, 'db': 'ndb-proto2', 'par_load': False, 'threads': 28, 'log_compress': False, 'bench_opts': '--workload-mix 100,0,0,0,0 --new-order-fast-id-gen', 'log_fake_writes': False, 'retry': False, 'log_nofsync': False, 'name': 'multipart:skew', 'bench': 'tpcc', 'numa_memory': '112G', 'persist': False, 'disable_snapshots': False}, [(52767.3, 52767.3, 0.47619, 0.0, 2730.02), (51759.0, 51759.0, 0.483218, 0.0, 2647.96), (51844.8, 51844.8, 0.483051, 0.0, 2664.81)])]
| 5,649.5
| 11,298
| 0.651474
| 1,858
| 11,299
| 3.837998
| 0.155005
| 0.056374
| 0.026925
| 0.01795
| 0.722339
| 0.722339
| 0.722339
| 0.718413
| 0.718413
| 0.718413
| 0
| 0.217136
| 0.095141
| 11,299
| 1
| 11,299
| 11,299
| 0.48034
| 0
| 0
| 0
| 0
| 12
| 0.421365
| 0.027702
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
5fc5b4a63bff7ee718a951bf97717d114ab969e5
| 71
|
py
|
Python
|
data/pytimecode/__init__.py
|
baku89/ShowreelViewer
|
67833f6eca628236368132b6da9f1b1626c7a104
|
[
"MIT"
] | null | null | null |
data/pytimecode/__init__.py
|
baku89/ShowreelViewer
|
67833f6eca628236368132b6da9f1b1626c7a104
|
[
"MIT"
] | null | null | null |
data/pytimecode/__init__.py
|
baku89/ShowreelViewer
|
67833f6eca628236368132b6da9f1b1626c7a104
|
[
"MIT"
] | null | null | null |
import pytimecode
class PyTimeCode(pytimecode.PyTimeCode):
pass
| 17.75
| 41
| 0.774648
| 7
| 71
| 7.857143
| 0.571429
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169014
| 71
| 4
| 42
| 17.75
| 0.932203
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
39681dbcfadb522a0460fc0e8271f108fc8b7802
| 435
|
py
|
Python
|
appdir/app/views.py
|
oriyao/FTP-Flask
|
78442a2e2910c1330c6b26130dd0d41296f10136
|
[
"MIT"
] | null | null | null |
appdir/app/views.py
|
oriyao/FTP-Flask
|
78442a2e2910c1330c6b26130dd0d41296f10136
|
[
"MIT"
] | null | null | null |
appdir/app/views.py
|
oriyao/FTP-Flask
|
78442a2e2910c1330c6b26130dd0d41296f10136
|
[
"MIT"
] | null | null | null |
from flask import render_template, request, url_for, redirect
from app import app
@app.route('/', methods=['GET', 'POST'])
def index():
return render_template('index.html')
@app.route('/demolove/')
def demolove():
return render_template('./demo/love.html')
@app.route('/oriyao/')
def oriyao():
return render_template('./demo/oriyao.html')
@app.route('/item/')
def item():
return render_template('./item/item.html')
| 22.894737
| 61
| 0.685057
| 58
| 435
| 5.034483
| 0.396552
| 0.239726
| 0.273973
| 0.164384
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121839
| 435
| 19
| 62
| 22.894737
| 0.764398
| 0
| 0
| 0
| 0
| 0
| 0.211009
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| true
| 0
| 0.142857
| 0.285714
| 0.714286
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
3987927d5c6cbb98b89e98149c6546c8ef438767
| 146
|
py
|
Python
|
sepal_ui/reclassify/__init__.py
|
12rambau/sepal_ui
|
40bccaa72b477bc755f9d5e703ee55ec4592a62b
|
[
"MIT"
] | 4
|
2020-10-20T14:11:18.000Z
|
2022-03-21T08:12:56.000Z
|
sepal_ui/reclassify/__init__.py
|
12rambau/sepal_ui
|
40bccaa72b477bc755f9d5e703ee55ec4592a62b
|
[
"MIT"
] | 361
|
2020-08-07T11:37:03.000Z
|
2022-03-30T09:41:25.000Z
|
sepal_ui/reclassify/__init__.py
|
12rambau/sepal_ui
|
40bccaa72b477bc755f9d5e703ee55ec4592a62b
|
[
"MIT"
] | 1
|
2021-11-02T13:58:45.000Z
|
2021-11-02T13:58:45.000Z
|
from .reclassify_view import *
from .reclassify_model import *
from .reclassify_tile import *
from .table_view import *
from .parameters import *
| 24.333333
| 31
| 0.794521
| 19
| 146
| 5.894737
| 0.421053
| 0.357143
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136986
| 146
| 5
| 32
| 29.2
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
39a488d6c23b984148c03f1f3a7a86a30404129b
| 13,470
|
py
|
Python
|
dataTransformation/py/constructFamilygraph/constructFamilyGraph_step2.py
|
ivozandhuis/dwarsliggers
|
d2049cc1a1d0d9764402f0bd66be77eeaddd1afb
|
[
"CC0-1.0"
] | null | null | null |
dataTransformation/py/constructFamilygraph/constructFamilyGraph_step2.py
|
ivozandhuis/dwarsliggers
|
d2049cc1a1d0d9764402f0bd66be77eeaddd1afb
|
[
"CC0-1.0"
] | null | null | null |
dataTransformation/py/constructFamilygraph/constructFamilyGraph_step2.py
|
ivozandhuis/dwarsliggers
|
d2049cc1a1d0d9764402f0bd66be77eeaddd1afb
|
[
"CC0-1.0"
] | null | null | null |
#! /usr/bin/env python3
# Python3 script to construct familyrelations
import csv
# read personlist
lijst = []
with open('huwelijksakten_personen.csv', newline='') as infile:
reader = csv.DictReader(infile)
for inn in reader:
lijst.append(inn)
# calculate network
edgelist = []
for A in lijst:
for B in lijst:
if (A['sleutel'] == B['sleutel'] and \
A['my_medewerkersnummer'] != B['my_medewerkersnummer']):
# vader-zoon
if A['relatie'] == 'ouders1' and B['relatie'] == 'zelf':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'zoon_van'])
if A['relatie'] == 'ouders2' and B['relatie'] == 'zelf':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'zoon_van'])
if A['relatie'] == 'zelf' and B['relatie'] == 'ouders1':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'vader_van'])
if A['relatie'] == 'zelf' and B['relatie'] == 'ouders2':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'vader_van'])
# schoonvader-schoonzoon
if A['relatie'] == 'schoonouders1' and B['relatie'] == 'zelf':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'schoonzoon_van'])
if A['relatie'] == 'schoonouders2' and B['relatie'] == 'zelf':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'schoonzoon_van'])
if A['relatie'] == 'zelf' and B['relatie'] == 'schoonouders1':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'schoonvader_van'])
if A['relatie'] == 'zelf' and B['relatie'] == 'schoonouders2':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'schoonvader_van'])
# broers
if A['relatie'] == 'ouders1' and B['relatie'] == 'ouders1':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'broer_van'])
if A['relatie'] == 'ouders2' and B['relatie'] == 'ouders2':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'broer_van'])
if A['relatie'] == 'ouders2' and B['relatie'] == 'ouders1':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'broer_van'])
if A['relatie'] == 'ouders1' and B['relatie'] == 'ouders2':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'broer_van'])
# zwagers
if A['relatie'] == 'schoonouders1' and B['relatie'] == 'ouders1':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'zwager_van'])
if A['relatie'] == 'schoonouders2' and B['relatie'] == 'ouders2':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'zwager_van'])
if A['relatie'] == 'schoonouders2' and B['relatie'] == 'ouders1':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'zwager_van'])
if A['relatie'] == 'schoonouders1' and B['relatie'] == 'ouders2':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'zwager_van'])
if A['relatie'] == 'ouders1' and B['relatie'] == 'schoonouders1':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'zwager_van'])
if A['relatie'] == 'ouders2' and B['relatie'] == 'schoonouders2':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'zwager_van'])
if A['relatie'] == 'ouders2' and B['relatie'] == 'schoonouders1':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'zwager_van'])
if A['relatie'] == 'ouders1' and B['relatie'] == 'schoonouders2':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'zwager_van'])
if A['relatie'] == 'schoonouders1' and B['relatie'] == 'schoonouders1':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'zwager_van'])
if A['relatie'] == 'schoonouders2' and B['relatie'] == 'schoonouders2':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'zwager_van'])
if A['relatie'] == 'schoonouders2' and B['relatie'] == 'schoonouders1':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'zwager_van'])
if A['relatie'] == 'schoonouders1' and B['relatie'] == 'schoonouders2':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'zwager_van'])
# ooms
# let op! ook vaders worden als ooms gedetecteerd
if A['relatie'] == 'ouders1' and B['relatie'] == 'oudersVader':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oom_van'])
if A['relatie'] == 'ouders1' and B['relatie'] == 'oudersMoeder':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oom_van'])
if A['relatie'] == 'ouders1' and B['relatie'] == 'oudersSchoonvader':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oom_van'])
if A['relatie'] == 'ouders1' and B['relatie'] == 'oudersSchoonmoeder':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oom_van'])
if A['relatie'] == 'ouders2' and B['relatie'] == 'oudersVader':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oom_van'])
if A['relatie'] == 'ouders2' and B['relatie'] == 'oudersMoeder':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oom_van'])
if A['relatie'] == 'ouders2' and B['relatie'] == 'oudersSchoonvader':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oom_van'])
if A['relatie'] == 'ouders2' and B['relatie'] == 'oudersSchoonmoeder':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oom_van'])
if A['relatie'] == 'schoonouders1' and B['relatie'] == 'oudersVader':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oom_van'])
if A['relatie'] == 'schoonouders1' and B['relatie'] == 'oudersMoeder':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oom_van'])
if A['relatie'] == 'schoonouders1' and B['relatie'] == 'oudersSchoonvader':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oom_van'])
if A['relatie'] == 'schoonouders1' and B['relatie'] == 'oudersSchoonmoeder':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oom_van'])
if A['relatie'] == 'schoonouders2' and B['relatie'] == 'oudersVader':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oom_van'])
if A['relatie'] == 'schoonouders2' and B['relatie'] == 'oudersMoeder':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oom_van'])
if A['relatie'] == 'schoonouders2' and B['relatie'] == 'oudersSchoonvader':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oom_van'])
if A['relatie'] == 'schoonouders2' and B['relatie'] == 'oudersSchoonmoeder':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oom_van'])
# oomzeggers
# let op! ook zonen worden als oomzeggers gedetecteerd
if A['relatie'] == 'oudersVader' and B['relatie'] == 'ouders1':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oomzegger_van'])
if A['relatie'] == 'oudersMoeder' and B['relatie'] == 'ouders1':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oomzegger_van'])
if A['relatie'] == 'oudersSchoonvader' and B['relatie'] == 'ouders1':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oomzegger_van'])
if A['relatie'] == 'oudersSchoonmoeder' and B['relatie'] == 'ouders1':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oomzegger_van'])
if A['relatie'] == 'oudersVader' and B['relatie'] == 'ouders2':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oomzegger_van'])
if A['relatie'] == 'oudersMoeder' and B['relatie'] == 'ouders2':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oomzegger_van'])
if A['relatie'] == 'oudersSchoonvader' and B['relatie'] == 'ouders2':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oomzegger_van'])
if A['relatie'] == 'oudersSchoonmoeder' and B['relatie'] == 'ouders2':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oomzegger_van'])
if A['relatie'] == 'oudersVader' and B['relatie'] == 'schoonouders1':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oomzegger_van'])
if A['relatie'] == 'oudersMoeder' and B['relatie'] == 'schoonouders1':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oomzegger_van'])
if A['relatie'] == 'oudersSchoonvader' and B['relatie'] == 'schoonouders1':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oomzegger_van'])
if A['relatie'] == 'oudersSchoonmoeder' and B['relatie'] == 'schoonouders1':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oomzegger_van'])
if A['relatie'] == 'oudersVader' and B['relatie'] == 'schoonouders2':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oomzegger_van'])
if A['relatie'] == 'oudersMoeder' and B['relatie'] == 'schoonouders2':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oomzegger_van'])
if A['relatie'] == 'oudersSchoonvader' and B['relatie'] == 'schoonouders2':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oomzegger_van'])
if A['relatie'] == 'oudersSchoonmoeder' and B['relatie'] == 'schoonouders2':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'oomzegger_van'])
# neven
# let op! ook broers worden als neven gedetecteerd!
if A['relatie'] == 'oudersVader' and B['relatie'] == 'oudersVader':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'neef_van'])
if A['relatie'] == 'oudersVader' and B['relatie'] == 'oudersMoeder':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'neef_van'])
if A['relatie'] == 'oudersMoeder' and B['relatie'] == 'oudersVader':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'neef_van'])
if A['relatie'] == 'oudersMoeder' and B['relatie'] == 'oudersMoeder':
edgelist.append([A['my_medewerkersnummer'], B['my_medewerkersnummer'], 'neef_van'])
# ontdubbelen
new_edgelist = []
for edge in edgelist:
if edge not in new_edgelist:
new_edgelist.append(edge)
edgelist = new_edgelist
# verwijderen neven die al broer zijn; ooms die al (schoon)vader zijn; oomzeggers die al (schoon)zonen zijn
new_edgelist = []
for edgeA in edgelist:
keep = 1
if edgeA[2] == 'neef_van':
for edgeB in edgelist:
if edgeB[0] == edgeA[0] and \
edgeB[1] == edgeA[1] and \
edgeB[2] == 'broer_van':
keep = 0
if edgeA[2] == 'oomzegger_van':
for edgeB in edgelist:
if edgeB[0] == edgeA[0] and \
edgeB[1] == edgeA[1] and \
edgeB[2] == 'zoon_van':
keep = 0
if edgeA[2] == 'oomzegger_van':
for edgeB in edgelist:
if edgeB[0] == edgeA[0] and \
edgeB[1] == edgeA[1] and \
edgeB[2] == 'schoonzoon_van':
keep = 0
if edgeA[2] == 'oom_van':
for edgeB in edgelist:
if edgeB[0] == edgeA[0] and \
edgeB[1] == edgeA[1] and \
edgeB[2] == 'vader_van':
keep = 0
if edgeA[2] == 'oom_van':
for edgeB in edgelist:
if edgeB[0] == edgeA[0] and \
edgeB[1] == edgeA[1] and \
edgeB[2] == 'schoonvader_van':
keep = 0
if keep:
new_edgelist.append(edgeA)
edgelist = new_edgelist
# print/write etc.
resultfile = open('familynetwork.csv', 'w')
resultwriter = csv.writer(resultfile, delimiter=',', quotechar='"')
resultwriter.writerow(['source', 'target', 'famrel']) # header
for edge in edgelist:
resultwriter.writerow(edge)
resultfile.close()
| 61.227273
| 107
| 0.588048
| 1,383
| 13,470
| 5.582791
| 0.072307
| 0.30022
| 0.158011
| 0.165911
| 0.880715
| 0.880715
| 0.880715
| 0.862324
| 0.844191
| 0.844191
| 0
| 0.010722
| 0.238382
| 13,470
| 219
| 108
| 61.506849
| 0.741885
| 0.034447
| 0
| 0.505618
| 0
| 0
| 0.413459
| 0.002079
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005618
| 0
| 0.005618
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ffc08d33ad682ce77faf82ebb19bcf3dda6d49db
| 3,612
|
py
|
Python
|
Lib/test/test_compiler/test_static/slots_with_default.py
|
isabella232/cinder-1
|
428669a9a925287f192ab361226e5a8ca3fb74d9
|
[
"CNRI-Python-GPL-Compatible"
] | null | null | null |
Lib/test/test_compiler/test_static/slots_with_default.py
|
isabella232/cinder-1
|
428669a9a925287f192ab361226e5a8ca3fb74d9
|
[
"CNRI-Python-GPL-Compatible"
] | null | null | null |
Lib/test/test_compiler/test_static/slots_with_default.py
|
isabella232/cinder-1
|
428669a9a925287f192ab361226e5a8ca3fb74d9
|
[
"CNRI-Python-GPL-Compatible"
] | null | null | null |
from .common import StaticTestBase
class SlotsWithDefaultTests(StaticTestBase):
def test_access_from_instance_and_class(self) -> None:
codestr = """
class C:
x: int = 42
def f():
c = C()
return (C.x, c.x)
"""
with self.in_module(codestr) as mod:
self.assertNotInBytecode(mod.f, "LOAD_FIELD")
self.assertEqual(mod.f(), (42, 42))
def test_nonstatic_access_from_instance_and_class(self) -> None:
codestr = """
class C:
x: int = 42
"""
with self.in_module(codestr) as mod:
C = mod.C
self.assertEqual(C.x, 42)
self.assertEqual(C().x, 42)
def test_write_from_instance(self) -> None:
codestr = """
class C:
x: int = 42
def f():
c = C()
c.x = 21
return (C.x, c.x)
"""
with self.in_module(codestr) as mod:
self.assertNotInBytecode(mod.f, "LOAD_FIELD")
self.assertEqual(mod.f(), (42, 21))
def test_nonstatic_write_from_instance(self) -> None:
codestr = """
class C:
x: int = 42
"""
with self.in_module(codestr) as mod:
C = mod.C
c = C()
c.x = 21
self.assertEqual(C.x, 42)
self.assertEqual(c.x, 21)
def test_write_from_class(self) -> None:
codestr = """
class C:
x: int = 42
def f():
c = C()
C.x = 21
return (C.x, c.x)
"""
with self.in_module(codestr) as mod:
self.assertNotInBytecode(mod.f, "LOAD_FIELD")
self.assertEqual(mod.f(), (21, 21))
def test_nonstatic_write_from_class(self) -> None:
codestr = """
class C:
x: int = 42
"""
with self.in_module(codestr) as mod:
C = mod.C
c = C()
C.x = 21
self.assertEqual(C.x, 21)
self.assertEqual(c.x, 21)
def test_write_to_class_after_instance(self) -> None:
codestr = """
class C:
x: int = 42
def f():
c = C()
c.x = 36 # This write will get clobbered when the class gets patched below.
C.x = 21
return (C.x, c.x)
"""
with self.in_module(codestr) as mod:
self.assertNotInBytecode(mod.f, "LOAD_FIELD")
self.assertEqual(mod.f(), (21, 21))
def test_inheritance(self) -> None:
codestr = """
class C:
x: int = 42
class D(C):
pass
def f():
d = D()
return (D.x, d.x)
"""
with self.in_module(codestr) as mod:
self.assertEqual(mod.f(), (42, 42))
def test_inheritance_with_override(self) -> None:
codestr = """
class C:
x: int = 1
class D(C):
x: int = 3
def f():
c = C()
c.x = 2
d = D()
return (C.x, c.x, D.x, d.x)
"""
with self.in_module(codestr) as mod:
self.assertEqual(mod.f(), (1, 2, 3, 3))
def test_call(self) -> None:
codestr = """
class C:
x: int = 1
class D(C):
pass
def f(c: C):
return c.x
"""
with self.in_module(codestr) as mod:
d = mod.D()
self.assertEqual(mod.f(d), 1)
d.x = 2
self.assertEqual(mod.f(d), 2)
| 25.258741
| 87
| 0.452935
| 448
| 3,612
| 3.542411
| 0.116071
| 0.044108
| 0.034657
| 0.126024
| 0.851922
| 0.819156
| 0.785129
| 0.777568
| 0.721487
| 0.667927
| 0
| 0.033988
| 0.42165
| 3,612
| 142
| 88
| 25.43662
| 0.725706
| 0
| 0
| 0.77686
| 0
| 0
| 0.363234
| 0
| 0
| 0
| 0
| 0
| 0.14876
| 1
| 0.082645
| false
| 0.016529
| 0.008264
| 0
| 0.157025
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0854125e9c92ed986ea63287c6fb360cb74230f6
| 48
|
py
|
Python
|
Tests/fuzzing/corpus/test_1.py
|
psydox/Pyjion
|
ddf69db27b356710ff7db3c4ba9c3e0b55de1039
|
[
"MIT"
] | 1
|
2022-03-15T12:48:11.000Z
|
2022-03-15T12:48:11.000Z
|
Tests/fuzzing/corpus/test_1.py
|
psydox/Pyjion
|
ddf69db27b356710ff7db3c4ba9c3e0b55de1039
|
[
"MIT"
] | null | null | null |
Tests/fuzzing/corpus/test_1.py
|
psydox/Pyjion
|
ddf69db27b356710ff7db3c4ba9c3e0b55de1039
|
[
"MIT"
] | null | null | null |
def f():
x = 3.14
y = 3.14
z = x + y
| 12
| 13
| 0.3125
| 11
| 48
| 1.363636
| 0.636364
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.5
| 48
| 4
| 13
| 12
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.25
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f239a6ad47075f498819cb78c8a4b3accdd954b7
| 1,068
|
py
|
Python
|
Software/run.py
|
gkrish19/SIAM
|
1e530d4c070054045fc2e8e7fe4ce82a54755132
|
[
"MIT"
] | 4
|
2021-02-02T06:50:43.000Z
|
2022-01-29T12:25:32.000Z
|
Software/run.py
|
gkrish19/SIAM
|
1e530d4c070054045fc2e8e7fe4ce82a54755132
|
[
"MIT"
] | null | null | null |
Software/run.py
|
gkrish19/SIAM
|
1e530d4c070054045fc2e8e7fe4ce82a54755132
|
[
"MIT"
] | 2
|
2021-07-07T19:58:40.000Z
|
2022-01-27T22:51:20.000Z
|
import os
os.system('python top_file.py --model_type=DenseNet-BC --dataset=C100+ --saves --logs --renew-logs --train --test')
os.system('python top_file.py --model_type=DenseNet-BC --dataset=C100+ --saves --logs --renew-logs --train --test --quant --act_width=8 --wgt_width=8')
# os.system('python top_file.py --model_type=VGG19 --dataset=SVHN --saves --logs --renew-logs --vat --train --test --stddevVar=0.1 --quant --act_width=8 --wgt_width=8')
dev = [0.1, 0.2, 0.3, 0.4, 0.5]
adc = [4, 5, 6, 7, 8]
xbar = [64, 128, 256, 512]
for i in dev:
os.system('python top_file_small.py --model_type=DenseNet-BC --dataset=C100+ --saves --logs --renew-logs --vat --train --test --stddevVar=%.1f --quant --act_width=8 --wgt_width=8' %(i))
for j in xbar:
j = int(j)
for k in adc:
k= int(k)
os.system('python top_file_small.py --model_type=DenseNet-BC --dataset=C100+ --saves --logs --renew-logs --vat --train --test --stddevVar=%.1f --quant --act_width=8 --wgt_width=8 --rram --xbar_size=%d --adc_bits=%d' %(i,j,k))
| 66.75
| 238
| 0.625468
| 182
| 1,068
| 3.549451
| 0.296703
| 0.074303
| 0.108359
| 0.131579
| 0.806502
| 0.806502
| 0.806502
| 0.735294
| 0.625387
| 0.625387
| 0
| 0.058036
| 0.161049
| 1,068
| 15
| 239
| 71.2
| 0.662946
| 0.155431
| 0
| 0
| 0
| 0.307692
| 0.689266
| 0.108475
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f23e2a22e9f8a24e64869407d27eb297af2f2247
| 11,066
|
py
|
Python
|
plot/retrieval.py
|
wyxzou/Federated-Learning-PyTorch
|
753b5b04ff6628b7bc2f54aadeaa07d67b719014
|
[
"MIT"
] | null | null | null |
plot/retrieval.py
|
wyxzou/Federated-Learning-PyTorch
|
753b5b04ff6628b7bc2f54aadeaa07d67b719014
|
[
"MIT"
] | null | null | null |
plot/retrieval.py
|
wyxzou/Federated-Learning-PyTorch
|
753b5b04ff6628b7bc2f54aadeaa07d67b719014
|
[
"MIT"
] | null | null | null |
import pdb
import pickle
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
def batch_to_epoch(arr, num, ignore_first=False):
if len(arr) == 0:
return []
i = 0
epoch_vals = []
while i + num <= len(arr):
if ignore_first:
if i == 0:
epoch_vals.append(max(arr[i+1: i+num]))
else:
epoch_vals.append(max(arr[i: i+num]))
else:
epoch_vals.append(max(arr[i: i+num]))
i = i + num
return epoch_vals
def batch_to_epoch_avg(arr, num, ignore_first=False):
if len(arr) == 0:
return []
i = 0
epoch_vals = []
while i + num <= len(arr):
if ignore_first:
if i == 0:
epoch_vals.append(sum(arr[i + 1: i+num]) / (num - 1))
else:
epoch_vals.append(sum(arr[i + 1: i+num]) / num)
else:
epoch_vals.append(sum(arr[i + 1: i+num]) / num)
i = i + num
return epoch_vals
def batch_to_epoch_min(arr, n):
i = 0
epoch_vals = []
while i + n <= len(arr):
epoch_vals.append(min(arr[i: i+n]))
i = i + n
return epoch_vals
def get_values(lrs, num_users=20, epochs=100, model = "mlp", dataset = "mnist", local_bs=10, numbers = [1], index=4):
frac = "1.0"
iid = 1
topk = 0.001
topk_d = 0.001
all_experiments = []
experiments = []
for number in numbers:
file_name = '../save/{}-{}/{}_{}_EPOCH[{}]_USERS[{}]_C[{}]_iid[{}]_B[{}]_OPT[{}]_LR[{}]_DIR[{}]_TOPK[{}]_TOPKD[{}]_NUM[{}].pkl' \
.format(dataset, model, dataset, model, epochs, num_users, frac, iid,
local_bs, "sparsetopk", lrs[0], 0, topk, topk_d, number)
with open(file_name, 'rb') as pickle_file:
experiments.append(pickle.load(pickle_file))
xi_values = np.mean(np.array(experiments)[:, index], axis=0)
all_experiments.append(xi_values)
experiments = []
for number in numbers:
file_name = '../save/{}-{}/{}_{}_EPOCH[{}]_USERS[{}]_C[{}]_iid[{}]_B[{}]_OPT[{}]_LR[{}]_DIR[{}]_TOPK[{}]_TOPKD[{}]_NUM[{}].pkl' \
.format(dataset, model, dataset, model, epochs, num_users, frac, iid,
local_bs, "sparsetopk", lrs[1], 1, topk, topk_d, number)
with open(file_name, 'rb') as pickle_file:
experiments.append(pickle.load(pickle_file))
xi_values = np.mean(np.array(experiments)[:, index], axis=0)
all_experiments.append(xi_values)
epochs = max(len(all_experiments[0]), len(all_experiments[1]))
return epochs, all_experiments
def get_downlink(lrs, num_users=20, epochs=100, model = "mlp", dataset = "mnist", local_bs=10, numbers = [1], index=4, ignore_first=False, topk_d=0.001, directory=".."):
frac = "1.0"
iid = 1
topk = 0.001
experiments = []
for number in numbers:
file_name = '{}/save/{}-{}/{}_{}_EPOCH[{}]_USERS[{}]_C[{}]_iid[{}]_B[{}]_OPT[{}]_LR[{}]_DIR[{}]_TOPK[{}]_TOPKD[{}]_NUM[{}].pkl' \
.format(directory, dataset, model, dataset, model, epochs, num_users, frac, iid,
local_bs, "sparsetopk", lrs[1], 1, topk, topk_d, number)
with open(file_name, 'rb') as pickle_file:
experiments.append(pickle.load(pickle_file))
xi_values = np.mean(np.array(experiments)[:, index], axis=0)
d = len(xi_values) // 100
xi_values = batch_to_epoch(xi_values, d, ignore_first)
epochs = len(xi_values)
return epochs, xi_values
def get_downlink_batches(lrs, num_users=20, epochs=100, model = "mlp", dataset = "mnist", local_bs=10, numbers = [1], index=4, ignore_first=False, topk_d=0.001):
frac = "1.0"
iid = 1
topk = 0.001
experiments = []
for number in numbers:
file_name = '../save/{}-{}/{}_{}_EPOCH[{}]_USERS[{}]_C[{}]_iid[{}]_B[{}]_OPT[{}]_LR[{}]_DIR[{}]_TOPK[{}]_TOPKD[{}]_NUM[{}].pkl' \
.format(dataset, model, dataset, model, epochs, num_users, frac, iid,
local_bs, "sparsetopk", lrs[1], 1, topk, topk_d, number)
with open(file_name, 'rb') as pickle_file:
experiments.append(pickle.load(pickle_file))
xi_values = np.mean(np.array(experiments)[:, index], axis=0)
# all_experiments.append(xi_values)
epochs = len(xi_values)
return epochs, xi_values
def get_xi_values(lrs, num_users=20, epochs=100, model = "mlp", dataset = "mnist", local_bs=10, numbers = [1], index=4, ignore_first=False, topk_d=0.001, iid = 1):
frac = "1.0"
topk = 0.001
all_experiments = []
experiments = []
for number in numbers:
file_name = '../save/{}-{}/{}_{}_EPOCH[{}]_USERS[{}]_C[{}]_iid[{}]_B[{}]_OPT[{}]_LR[{}]_DIR[{}]_TOPK[{}]_TOPKD[{}]_NUM[{}].pkl' \
.format(dataset, model, dataset, model, epochs, num_users, frac, iid,
local_bs, "sparsetopk", lrs[0], 0, topk, topk_d, number)
with open(file_name, 'rb') as pickle_file:
experiments.append(pickle.load(pickle_file))
xi_values = np.mean(np.array(experiments)[:, index], axis=0)
d = len(xi_values) // 100
xi_values = batch_to_epoch(xi_values, d, ignore_first)
all_experiments.append(xi_values)
experiments = []
for number in numbers:
file_name = '../save/{}-{}/{}_{}_EPOCH[{}]_USERS[{}]_C[{}]_iid[{}]_B[{}]_OPT[{}]_LR[{}]_DIR[{}]_TOPK[{}]_TOPKD[{}]_NUM[{}].pkl' \
.format(dataset, model, dataset, model, epochs, num_users, frac, iid,
local_bs, "sparsetopk", lrs[1], 1, topk, topk_d, number)
with open(file_name, 'rb') as pickle_file:
experiments.append(pickle.load(pickle_file))
xi_values = np.mean(np.array(experiments)[:, index], axis=0)
d = len(xi_values) // 100
xi_values = batch_to_epoch(xi_values, d, ignore_first)
all_experiments.append(xi_values)
epochs = len(all_experiments[0])
return epochs, all_experiments
def get_side_values(lr, num_users=20, epochs=100, model = "mlp", dataset = "mnist", local_bs=10, numbers = [1], direction=0, iid = 1):
frac = "1.0"
topk = 0.001
topk_d = 0.001
all_experiments = []
experiments = []
for number in numbers:
file_name = '../save/{}-{}/{}_{}_EPOCH[{}]_USERS[{}]_C[{}]_iid[{}]_B[{}]_OPT[{}]_LR[{}]_DIR[{}]_TOPK[{}]_TOPKD[{}]_NUM[{}].pkl' \
.format(dataset, model, dataset, model, epochs, num_users, frac, iid,
local_bs, "sparsetopk", lr, direction, topk, topk_d, number)
with open(file_name, 'rb') as pickle_file:
experiments.append(pickle.load(pickle_file))
xi_values_lhs = np.mean(np.array(experiments)[:, 9], axis=0)
d = len(xi_values_lhs) // 100
xi_values_lhs = batch_to_epoch(xi_values_lhs, d)
all_experiments.append(xi_values_lhs)
xi_values_rhs = np.mean(np.array(experiments)[:, 10], axis=0)
d = len(xi_values_rhs) // 100
xi_values_rhs = batch_to_epoch_min(xi_values_rhs, d)
all_experiments.append(xi_values_rhs)
epochs = len(all_experiments[0])
return epochs, all_experiments
def get_distance(lrs, num_users=20, epochs=100, model = "mlp", dataset = "mnist", local_bs=10, numbers=[4], exception=False):
frac = "1.0"
iid = 1
topk = 0.001
topk_d = 0.001
all_experiments = []
experiments = []
for number in numbers:
file_name = '../save/{}-{}/{}_{}_EPOCH[{}]_USERS[{}]_C[{}]_iid[{}]_B[{}]_OPT[{}]_LR[{}]_DIR[{}]_TOPK[{}]_TOPKD[{}]_NUM[{}].pkl' \
.format(dataset, model, dataset, model, epochs, num_users, frac, iid,
local_bs, "sparsetopk", lrs[1], 1, topk, topk_d, number)
with open(file_name, 'rb') as pickle_file:
experiments.append(pickle.load(pickle_file))
lhs_values = np.mean(np.array(experiments)[:, 12], axis=0)
gradient = np.mean(np.array(experiments)[:, 10], axis=0)
a = lhs_values/gradient
d = len(a) // 100
xi_values = batch_to_epoch(a, d)
all_experiments.append(xi_values)
batches = len(all_experiments[0])
return all_experiments
def get_side_values_batches(lr, num_users=20, epochs=100, model = "mlp", dataset = "mnist", local_bs=10, numbers = [1], direction=0):
frac = "1.0"
iid = 1
topk = 0.001
topk_d = 0.001
all_experiments = []
experiments = []
for number in numbers:
file_name = '../save/{}-{}/{}_{}_EPOCH[{}]_USERS[{}]_C[{}]_iid[{}]_B[{}]_OPT[{}]_LR[{}]_DIR[{}]_TOPK[{}]_TOPKD[{}]_NUM[{}].pkl' \
.format(dataset, model, dataset, model, epochs, num_users, frac, iid,
local_bs, "sparsetopk", lr, direction, topk, topk_d, number)
with open(file_name, 'rb') as pickle_file:
experiments.append(pickle.load(pickle_file))
xi_values_lhs = np.mean(np.array(experiments)[:, 9], axis=0)
all_experiments.append(xi_values_lhs)
xi_values_rhs = np.mean(np.array(experiments)[:, 10], axis=0)
all_experiments.append(xi_values_rhs)
epochs = len(all_experiments[0])
return epochs, all_experiments
def get_model_results(lrs, num_users=20, epochs=100, model = "mlp", dataset = "mnist", local_bs=10, numbers = [1], index_comparison=3, sgddir=0):
frac = "1.0"
iid = 1
topk = 0.001
topk_d = 0.001
all_experiments = []
experiments = []
for num in [1]:
file_name = '../save/{}-{}-tuning/{}_{}_EPOCH[{}]_USERS[{}]_C[{}]_iid[{}]_B[{}]_OPT[{}]_LR[{}]_DIR[{}]_TOPK[{}]_TOPKD[{}]_NUM[{}].pkl' \
.format(dataset, model, dataset, model, epochs, num_users, frac, iid,
local_bs, "sparsetopk", lrs[0], 0, topk, topk_d, num)
with open(file_name, 'rb') as pickle_file:
experiments.append(pickle.load(pickle_file)[index_comparison])
all_experiments.append(np.average(np.array(experiments), axis=0))
experiments = []
for num in [1]:
file_name = '../save/{}-{}-tuning/{}_{}_EPOCH[{}]_USERS[{}]_C[{}]_iid[{}]_B[{}]_OPT[{}]_LR[{}]_DIR[{}]_TOPK[{}]_TOPKD[{}]_NUM[{}].pkl' \
.format(dataset, model, dataset, model, epochs, num_users, frac, iid,
local_bs, "sparsetopk", lrs[1], 1, topk, topk_d, num)
with open(file_name, 'rb') as pickle_file:
experiments.append(pickle.load(pickle_file)[index_comparison])
all_experiments.append(np.average(np.array(experiments), axis=0))
experiments = []
for num in [1]:
file_name = '../save/{}-{}-tuning/{}_{}_EPOCH[{}]_USERS[{}]_C[{}]_iid[{}]_B[{}]_OPT[{}]_LR[{}]_DIR[{}]_TOPK[{}]_TOPKD[{}]_NUM[{}].pkl' \
.format(dataset, model, dataset, model, epochs, num_users, frac, iid,
local_bs, "sgd", lrs[2], sgddir, topk, topk_d, num)
with open(file_name, 'rb') as pickle_file:
experiments.append(pickle.load(pickle_file)[index_comparison])
all_experiments.append(np.average(np.array(experiments), axis=0))
epochs = len(all_experiments[0])
return epochs, all_experiments
| 35.242038
| 169
| 0.592355
| 1,496
| 11,066
| 4.112968
| 0.06885
| 0.053307
| 0.043881
| 0.027304
| 0.928165
| 0.920852
| 0.887697
| 0.887697
| 0.878758
| 0.870632
| 0
| 0.028565
| 0.228086
| 11,066
| 313
| 170
| 35.354633
| 0.691758
| 0.002982
| 0
| 0.827273
| 0
| 0.054545
| 0.145408
| 0.12483
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0
| 0.022727
| 0
| 0.131818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f285677f423fe03a12db0c90815ca6c061c4a169
| 59,237
|
py
|
Python
|
sdk/python/pulumi_scaleway/loadbalancer_backend.py
|
stack72/pulumi-scaleway
|
0242d1f058046f86fe4ea6f106872ecd08d10c3b
|
[
"ECL-2.0",
"Apache-2.0"
] | 6
|
2020-10-16T09:09:05.000Z
|
2022-03-24T21:32:17.000Z
|
sdk/python/pulumi_scaleway/loadbalancer_backend.py
|
stack72/pulumi-scaleway
|
0242d1f058046f86fe4ea6f106872ecd08d10c3b
|
[
"ECL-2.0",
"Apache-2.0"
] | 34
|
2020-10-29T17:38:13.000Z
|
2022-03-31T13:33:47.000Z
|
sdk/python/pulumi_scaleway/loadbalancer_backend.py
|
stack72/pulumi-scaleway
|
0242d1f058046f86fe4ea6f106872ecd08d10c3b
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2022-01-13T18:46:32.000Z
|
2022-02-28T03:58:36.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['LoadbalancerBackendArgs', 'LoadbalancerBackend']
@pulumi.input_type
class LoadbalancerBackendArgs:
def __init__(__self__, *,
forward_port: pulumi.Input[int],
forward_protocol: pulumi.Input[str],
lb_id: pulumi.Input[str],
forward_port_algorithm: Optional[pulumi.Input[str]] = None,
health_check_delay: Optional[pulumi.Input[str]] = None,
health_check_http: Optional[pulumi.Input['LoadbalancerBackendHealthCheckHttpArgs']] = None,
health_check_https: Optional[pulumi.Input['LoadbalancerBackendHealthCheckHttpsArgs']] = None,
health_check_max_retries: Optional[pulumi.Input[int]] = None,
health_check_port: Optional[pulumi.Input[int]] = None,
health_check_tcp: Optional[pulumi.Input['LoadbalancerBackendHealthCheckTcpArgs']] = None,
health_check_timeout: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
on_marked_down_action: Optional[pulumi.Input[str]] = None,
proxy_protocol: Optional[pulumi.Input[str]] = None,
send_proxy_v2: Optional[pulumi.Input[bool]] = None,
server_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
sticky_sessions: Optional[pulumi.Input[str]] = None,
sticky_sessions_cookie_name: Optional[pulumi.Input[str]] = None,
timeout_connect: Optional[pulumi.Input[str]] = None,
timeout_server: Optional[pulumi.Input[str]] = None,
timeout_tunnel: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a LoadbalancerBackend resource.
:param pulumi.Input[int] forward_port: User sessions will be forwarded to this port of backend servers.
:param pulumi.Input[str] forward_protocol: Backend protocol. Possible values are: `tcp` or `http`.
:param pulumi.Input[str] lb_id: The load-balancer ID this backend is attached to.
> **Important:** Updates to `lb_id` will recreate the backend.
:param pulumi.Input[str] forward_port_algorithm: Load balancing algorithm. Possible values are: `roundrobin`, `leastconn` and `first`.
:param pulumi.Input[str] health_check_delay: Interval between two HC requests.
:param pulumi.Input['LoadbalancerBackendHealthCheckHttpArgs'] health_check_http: This block enable HTTP health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
:param pulumi.Input['LoadbalancerBackendHealthCheckHttpsArgs'] health_check_https: This block enable HTTPS health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
:param pulumi.Input[int] health_check_max_retries: Number of allowed failed HC requests before the backend server is marked down.
:param pulumi.Input[int] health_check_port: Port the HC requests will be send to.
:param pulumi.Input['LoadbalancerBackendHealthCheckTcpArgs'] health_check_tcp: This block enable TCP health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
:param pulumi.Input[str] health_check_timeout: Timeout before we consider a HC request failed.
:param pulumi.Input[str] name: The name of the load-balancer backend.
:param pulumi.Input[str] on_marked_down_action: Modify what occurs when a backend server is marked down. Possible values are: `none` and `shutdown_sessions`.
:param pulumi.Input[str] proxy_protocol: Choose the type of PROXY protocol to enable (`none`, `v1`, `v2`, `v2_ssl`, `v2_ssl_cn`)
:param pulumi.Input[bool] send_proxy_v2: DEPRECATED please use `proxy_protocol` instead - (Default: `false`) Enables PROXY protocol version 2.
:param pulumi.Input[Sequence[pulumi.Input[str]]] server_ips: List of backend server IP addresses. Addresses can be either IPv4 or IPv6.
:param pulumi.Input[str] sticky_sessions: Load balancing algorithm. Possible values are: `none`, `cookie` and `table`.
:param pulumi.Input[str] sticky_sessions_cookie_name: Cookie name for for sticky sessions. Only applicable when sticky_sessions is set to `cookie`.
:param pulumi.Input[str] timeout_connect: Maximum initial server connection establishment time. (e.g.: `1s`)
:param pulumi.Input[str] timeout_server: Maximum server connection inactivity time. (e.g.: `1s`)
:param pulumi.Input[str] timeout_tunnel: Maximum tunnel inactivity time. (e.g.: `1s`)
"""
pulumi.set(__self__, "forward_port", forward_port)
pulumi.set(__self__, "forward_protocol", forward_protocol)
pulumi.set(__self__, "lb_id", lb_id)
if forward_port_algorithm is not None:
pulumi.set(__self__, "forward_port_algorithm", forward_port_algorithm)
if health_check_delay is not None:
pulumi.set(__self__, "health_check_delay", health_check_delay)
if health_check_http is not None:
pulumi.set(__self__, "health_check_http", health_check_http)
if health_check_https is not None:
pulumi.set(__self__, "health_check_https", health_check_https)
if health_check_max_retries is not None:
pulumi.set(__self__, "health_check_max_retries", health_check_max_retries)
if health_check_port is not None:
pulumi.set(__self__, "health_check_port", health_check_port)
if health_check_tcp is not None:
pulumi.set(__self__, "health_check_tcp", health_check_tcp)
if health_check_timeout is not None:
pulumi.set(__self__, "health_check_timeout", health_check_timeout)
if name is not None:
pulumi.set(__self__, "name", name)
if on_marked_down_action is not None:
pulumi.set(__self__, "on_marked_down_action", on_marked_down_action)
if proxy_protocol is not None:
pulumi.set(__self__, "proxy_protocol", proxy_protocol)
if send_proxy_v2 is not None:
warnings.warn("""Please use proxy_protocol instead""", DeprecationWarning)
pulumi.log.warn("""send_proxy_v2 is deprecated: Please use proxy_protocol instead""")
if send_proxy_v2 is not None:
pulumi.set(__self__, "send_proxy_v2", send_proxy_v2)
if server_ips is not None:
pulumi.set(__self__, "server_ips", server_ips)
if sticky_sessions is not None:
pulumi.set(__self__, "sticky_sessions", sticky_sessions)
if sticky_sessions_cookie_name is not None:
pulumi.set(__self__, "sticky_sessions_cookie_name", sticky_sessions_cookie_name)
if timeout_connect is not None:
pulumi.set(__self__, "timeout_connect", timeout_connect)
if timeout_server is not None:
pulumi.set(__self__, "timeout_server", timeout_server)
if timeout_tunnel is not None:
pulumi.set(__self__, "timeout_tunnel", timeout_tunnel)
@property
@pulumi.getter(name="forwardPort")
def forward_port(self) -> pulumi.Input[int]:
"""
User sessions will be forwarded to this port of backend servers.
"""
return pulumi.get(self, "forward_port")
@forward_port.setter
def forward_port(self, value: pulumi.Input[int]):
pulumi.set(self, "forward_port", value)
@property
@pulumi.getter(name="forwardProtocol")
def forward_protocol(self) -> pulumi.Input[str]:
"""
Backend protocol. Possible values are: `tcp` or `http`.
"""
return pulumi.get(self, "forward_protocol")
@forward_protocol.setter
def forward_protocol(self, value: pulumi.Input[str]):
pulumi.set(self, "forward_protocol", value)
@property
@pulumi.getter(name="lbId")
def lb_id(self) -> pulumi.Input[str]:
"""
The load-balancer ID this backend is attached to.
> **Important:** Updates to `lb_id` will recreate the backend.
"""
return pulumi.get(self, "lb_id")
@lb_id.setter
def lb_id(self, value: pulumi.Input[str]):
pulumi.set(self, "lb_id", value)
@property
@pulumi.getter(name="forwardPortAlgorithm")
def forward_port_algorithm(self) -> Optional[pulumi.Input[str]]:
"""
Load balancing algorithm. Possible values are: `roundrobin`, `leastconn` and `first`.
"""
return pulumi.get(self, "forward_port_algorithm")
@forward_port_algorithm.setter
def forward_port_algorithm(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "forward_port_algorithm", value)
@property
@pulumi.getter(name="healthCheckDelay")
def health_check_delay(self) -> Optional[pulumi.Input[str]]:
"""
Interval between two HC requests.
"""
return pulumi.get(self, "health_check_delay")
@health_check_delay.setter
def health_check_delay(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_delay", value)
@property
@pulumi.getter(name="healthCheckHttp")
def health_check_http(self) -> Optional[pulumi.Input['LoadbalancerBackendHealthCheckHttpArgs']]:
"""
This block enable HTTP health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
"""
return pulumi.get(self, "health_check_http")
@health_check_http.setter
def health_check_http(self, value: Optional[pulumi.Input['LoadbalancerBackendHealthCheckHttpArgs']]):
pulumi.set(self, "health_check_http", value)
@property
@pulumi.getter(name="healthCheckHttps")
def health_check_https(self) -> Optional[pulumi.Input['LoadbalancerBackendHealthCheckHttpsArgs']]:
"""
This block enable HTTPS health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
"""
return pulumi.get(self, "health_check_https")
@health_check_https.setter
def health_check_https(self, value: Optional[pulumi.Input['LoadbalancerBackendHealthCheckHttpsArgs']]):
pulumi.set(self, "health_check_https", value)
@property
@pulumi.getter(name="healthCheckMaxRetries")
def health_check_max_retries(self) -> Optional[pulumi.Input[int]]:
"""
Number of allowed failed HC requests before the backend server is marked down.
"""
return pulumi.get(self, "health_check_max_retries")
@health_check_max_retries.setter
def health_check_max_retries(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "health_check_max_retries", value)
@property
@pulumi.getter(name="healthCheckPort")
def health_check_port(self) -> Optional[pulumi.Input[int]]:
"""
Port the HC requests will be send to.
"""
return pulumi.get(self, "health_check_port")
@health_check_port.setter
def health_check_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "health_check_port", value)
@property
@pulumi.getter(name="healthCheckTcp")
def health_check_tcp(self) -> Optional[pulumi.Input['LoadbalancerBackendHealthCheckTcpArgs']]:
"""
This block enable TCP health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
"""
return pulumi.get(self, "health_check_tcp")
@health_check_tcp.setter
def health_check_tcp(self, value: Optional[pulumi.Input['LoadbalancerBackendHealthCheckTcpArgs']]):
pulumi.set(self, "health_check_tcp", value)
@property
@pulumi.getter(name="healthCheckTimeout")
def health_check_timeout(self) -> Optional[pulumi.Input[str]]:
"""
Timeout before we consider a HC request failed.
"""
return pulumi.get(self, "health_check_timeout")
@health_check_timeout.setter
def health_check_timeout(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_timeout", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the load-balancer backend.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="onMarkedDownAction")
def on_marked_down_action(self) -> Optional[pulumi.Input[str]]:
"""
Modify what occurs when a backend server is marked down. Possible values are: `none` and `shutdown_sessions`.
"""
return pulumi.get(self, "on_marked_down_action")
@on_marked_down_action.setter
def on_marked_down_action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "on_marked_down_action", value)
@property
@pulumi.getter(name="proxyProtocol")
def proxy_protocol(self) -> Optional[pulumi.Input[str]]:
"""
Choose the type of PROXY protocol to enable (`none`, `v1`, `v2`, `v2_ssl`, `v2_ssl_cn`)
"""
return pulumi.get(self, "proxy_protocol")
@proxy_protocol.setter
def proxy_protocol(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "proxy_protocol", value)
@property
@pulumi.getter(name="sendProxyV2")
def send_proxy_v2(self) -> Optional[pulumi.Input[bool]]:
"""
DEPRECATED please use `proxy_protocol` instead - (Default: `false`) Enables PROXY protocol version 2.
"""
return pulumi.get(self, "send_proxy_v2")
@send_proxy_v2.setter
def send_proxy_v2(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "send_proxy_v2", value)
@property
@pulumi.getter(name="serverIps")
def server_ips(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of backend server IP addresses. Addresses can be either IPv4 or IPv6.
"""
return pulumi.get(self, "server_ips")
@server_ips.setter
def server_ips(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "server_ips", value)
@property
@pulumi.getter(name="stickySessions")
def sticky_sessions(self) -> Optional[pulumi.Input[str]]:
"""
Load balancing algorithm. Possible values are: `none`, `cookie` and `table`.
"""
return pulumi.get(self, "sticky_sessions")
@sticky_sessions.setter
def sticky_sessions(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sticky_sessions", value)
@property
@pulumi.getter(name="stickySessionsCookieName")
def sticky_sessions_cookie_name(self) -> Optional[pulumi.Input[str]]:
"""
Cookie name for for sticky sessions. Only applicable when sticky_sessions is set to `cookie`.
"""
return pulumi.get(self, "sticky_sessions_cookie_name")
@sticky_sessions_cookie_name.setter
def sticky_sessions_cookie_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sticky_sessions_cookie_name", value)
@property
@pulumi.getter(name="timeoutConnect")
def timeout_connect(self) -> Optional[pulumi.Input[str]]:
"""
Maximum initial server connection establishment time. (e.g.: `1s`)
"""
return pulumi.get(self, "timeout_connect")
@timeout_connect.setter
def timeout_connect(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "timeout_connect", value)
@property
@pulumi.getter(name="timeoutServer")
def timeout_server(self) -> Optional[pulumi.Input[str]]:
"""
Maximum server connection inactivity time. (e.g.: `1s`)
"""
return pulumi.get(self, "timeout_server")
@timeout_server.setter
def timeout_server(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "timeout_server", value)
@property
@pulumi.getter(name="timeoutTunnel")
def timeout_tunnel(self) -> Optional[pulumi.Input[str]]:
"""
Maximum tunnel inactivity time. (e.g.: `1s`)
"""
return pulumi.get(self, "timeout_tunnel")
@timeout_tunnel.setter
def timeout_tunnel(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "timeout_tunnel", value)
@pulumi.input_type
class _LoadbalancerBackendState:
def __init__(__self__, *,
forward_port: Optional[pulumi.Input[int]] = None,
forward_port_algorithm: Optional[pulumi.Input[str]] = None,
forward_protocol: Optional[pulumi.Input[str]] = None,
health_check_delay: Optional[pulumi.Input[str]] = None,
health_check_http: Optional[pulumi.Input['LoadbalancerBackendHealthCheckHttpArgs']] = None,
health_check_https: Optional[pulumi.Input['LoadbalancerBackendHealthCheckHttpsArgs']] = None,
health_check_max_retries: Optional[pulumi.Input[int]] = None,
health_check_port: Optional[pulumi.Input[int]] = None,
health_check_tcp: Optional[pulumi.Input['LoadbalancerBackendHealthCheckTcpArgs']] = None,
health_check_timeout: Optional[pulumi.Input[str]] = None,
lb_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
on_marked_down_action: Optional[pulumi.Input[str]] = None,
proxy_protocol: Optional[pulumi.Input[str]] = None,
send_proxy_v2: Optional[pulumi.Input[bool]] = None,
server_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
sticky_sessions: Optional[pulumi.Input[str]] = None,
sticky_sessions_cookie_name: Optional[pulumi.Input[str]] = None,
timeout_connect: Optional[pulumi.Input[str]] = None,
timeout_server: Optional[pulumi.Input[str]] = None,
timeout_tunnel: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering LoadbalancerBackend resources.
:param pulumi.Input[int] forward_port: User sessions will be forwarded to this port of backend servers.
:param pulumi.Input[str] forward_port_algorithm: Load balancing algorithm. Possible values are: `roundrobin`, `leastconn` and `first`.
:param pulumi.Input[str] forward_protocol: Backend protocol. Possible values are: `tcp` or `http`.
:param pulumi.Input[str] health_check_delay: Interval between two HC requests.
:param pulumi.Input['LoadbalancerBackendHealthCheckHttpArgs'] health_check_http: This block enable HTTP health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
:param pulumi.Input['LoadbalancerBackendHealthCheckHttpsArgs'] health_check_https: This block enable HTTPS health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
:param pulumi.Input[int] health_check_max_retries: Number of allowed failed HC requests before the backend server is marked down.
:param pulumi.Input[int] health_check_port: Port the HC requests will be send to.
:param pulumi.Input['LoadbalancerBackendHealthCheckTcpArgs'] health_check_tcp: This block enable TCP health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
:param pulumi.Input[str] health_check_timeout: Timeout before we consider a HC request failed.
:param pulumi.Input[str] lb_id: The load-balancer ID this backend is attached to.
> **Important:** Updates to `lb_id` will recreate the backend.
:param pulumi.Input[str] name: The name of the load-balancer backend.
:param pulumi.Input[str] on_marked_down_action: Modify what occurs when a backend server is marked down. Possible values are: `none` and `shutdown_sessions`.
:param pulumi.Input[str] proxy_protocol: Choose the type of PROXY protocol to enable (`none`, `v1`, `v2`, `v2_ssl`, `v2_ssl_cn`)
:param pulumi.Input[bool] send_proxy_v2: DEPRECATED please use `proxy_protocol` instead - (Default: `false`) Enables PROXY protocol version 2.
:param pulumi.Input[Sequence[pulumi.Input[str]]] server_ips: List of backend server IP addresses. Addresses can be either IPv4 or IPv6.
:param pulumi.Input[str] sticky_sessions: Load balancing algorithm. Possible values are: `none`, `cookie` and `table`.
:param pulumi.Input[str] sticky_sessions_cookie_name: Cookie name for for sticky sessions. Only applicable when sticky_sessions is set to `cookie`.
:param pulumi.Input[str] timeout_connect: Maximum initial server connection establishment time. (e.g.: `1s`)
:param pulumi.Input[str] timeout_server: Maximum server connection inactivity time. (e.g.: `1s`)
:param pulumi.Input[str] timeout_tunnel: Maximum tunnel inactivity time. (e.g.: `1s`)
"""
if forward_port is not None:
pulumi.set(__self__, "forward_port", forward_port)
if forward_port_algorithm is not None:
pulumi.set(__self__, "forward_port_algorithm", forward_port_algorithm)
if forward_protocol is not None:
pulumi.set(__self__, "forward_protocol", forward_protocol)
if health_check_delay is not None:
pulumi.set(__self__, "health_check_delay", health_check_delay)
if health_check_http is not None:
pulumi.set(__self__, "health_check_http", health_check_http)
if health_check_https is not None:
pulumi.set(__self__, "health_check_https", health_check_https)
if health_check_max_retries is not None:
pulumi.set(__self__, "health_check_max_retries", health_check_max_retries)
if health_check_port is not None:
pulumi.set(__self__, "health_check_port", health_check_port)
if health_check_tcp is not None:
pulumi.set(__self__, "health_check_tcp", health_check_tcp)
if health_check_timeout is not None:
pulumi.set(__self__, "health_check_timeout", health_check_timeout)
if lb_id is not None:
pulumi.set(__self__, "lb_id", lb_id)
if name is not None:
pulumi.set(__self__, "name", name)
if on_marked_down_action is not None:
pulumi.set(__self__, "on_marked_down_action", on_marked_down_action)
if proxy_protocol is not None:
pulumi.set(__self__, "proxy_protocol", proxy_protocol)
if send_proxy_v2 is not None:
warnings.warn("""Please use proxy_protocol instead""", DeprecationWarning)
pulumi.log.warn("""send_proxy_v2 is deprecated: Please use proxy_protocol instead""")
if send_proxy_v2 is not None:
pulumi.set(__self__, "send_proxy_v2", send_proxy_v2)
if server_ips is not None:
pulumi.set(__self__, "server_ips", server_ips)
if sticky_sessions is not None:
pulumi.set(__self__, "sticky_sessions", sticky_sessions)
if sticky_sessions_cookie_name is not None:
pulumi.set(__self__, "sticky_sessions_cookie_name", sticky_sessions_cookie_name)
if timeout_connect is not None:
pulumi.set(__self__, "timeout_connect", timeout_connect)
if timeout_server is not None:
pulumi.set(__self__, "timeout_server", timeout_server)
if timeout_tunnel is not None:
pulumi.set(__self__, "timeout_tunnel", timeout_tunnel)
@property
@pulumi.getter(name="forwardPort")
def forward_port(self) -> Optional[pulumi.Input[int]]:
"""
User sessions will be forwarded to this port of backend servers.
"""
return pulumi.get(self, "forward_port")
@forward_port.setter
def forward_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "forward_port", value)
@property
@pulumi.getter(name="forwardPortAlgorithm")
def forward_port_algorithm(self) -> Optional[pulumi.Input[str]]:
"""
Load balancing algorithm. Possible values are: `roundrobin`, `leastconn` and `first`.
"""
return pulumi.get(self, "forward_port_algorithm")
@forward_port_algorithm.setter
def forward_port_algorithm(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "forward_port_algorithm", value)
@property
@pulumi.getter(name="forwardProtocol")
def forward_protocol(self) -> Optional[pulumi.Input[str]]:
"""
Backend protocol. Possible values are: `tcp` or `http`.
"""
return pulumi.get(self, "forward_protocol")
@forward_protocol.setter
def forward_protocol(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "forward_protocol", value)
@property
@pulumi.getter(name="healthCheckDelay")
def health_check_delay(self) -> Optional[pulumi.Input[str]]:
"""
Interval between two HC requests.
"""
return pulumi.get(self, "health_check_delay")
@health_check_delay.setter
def health_check_delay(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_delay", value)
@property
@pulumi.getter(name="healthCheckHttp")
def health_check_http(self) -> Optional[pulumi.Input['LoadbalancerBackendHealthCheckHttpArgs']]:
"""
This block enable HTTP health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
"""
return pulumi.get(self, "health_check_http")
@health_check_http.setter
def health_check_http(self, value: Optional[pulumi.Input['LoadbalancerBackendHealthCheckHttpArgs']]):
pulumi.set(self, "health_check_http", value)
@property
@pulumi.getter(name="healthCheckHttps")
def health_check_https(self) -> Optional[pulumi.Input['LoadbalancerBackendHealthCheckHttpsArgs']]:
"""
This block enable HTTPS health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
"""
return pulumi.get(self, "health_check_https")
@health_check_https.setter
def health_check_https(self, value: Optional[pulumi.Input['LoadbalancerBackendHealthCheckHttpsArgs']]):
pulumi.set(self, "health_check_https", value)
@property
@pulumi.getter(name="healthCheckMaxRetries")
def health_check_max_retries(self) -> Optional[pulumi.Input[int]]:
"""
Number of allowed failed HC requests before the backend server is marked down.
"""
return pulumi.get(self, "health_check_max_retries")
@health_check_max_retries.setter
def health_check_max_retries(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "health_check_max_retries", value)
@property
@pulumi.getter(name="healthCheckPort")
def health_check_port(self) -> Optional[pulumi.Input[int]]:
"""
Port the HC requests will be send to.
"""
return pulumi.get(self, "health_check_port")
@health_check_port.setter
def health_check_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "health_check_port", value)
@property
@pulumi.getter(name="healthCheckTcp")
def health_check_tcp(self) -> Optional[pulumi.Input['LoadbalancerBackendHealthCheckTcpArgs']]:
"""
This block enable TCP health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
"""
return pulumi.get(self, "health_check_tcp")
@health_check_tcp.setter
def health_check_tcp(self, value: Optional[pulumi.Input['LoadbalancerBackendHealthCheckTcpArgs']]):
pulumi.set(self, "health_check_tcp", value)
@property
@pulumi.getter(name="healthCheckTimeout")
def health_check_timeout(self) -> Optional[pulumi.Input[str]]:
"""
Timeout before we consider a HC request failed.
"""
return pulumi.get(self, "health_check_timeout")
@health_check_timeout.setter
def health_check_timeout(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_timeout", value)
@property
@pulumi.getter(name="lbId")
def lb_id(self) -> Optional[pulumi.Input[str]]:
"""
The load-balancer ID this backend is attached to.
> **Important:** Updates to `lb_id` will recreate the backend.
"""
return pulumi.get(self, "lb_id")
@lb_id.setter
def lb_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "lb_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the load-balancer backend.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="onMarkedDownAction")
def on_marked_down_action(self) -> Optional[pulumi.Input[str]]:
"""
Modify what occurs when a backend server is marked down. Possible values are: `none` and `shutdown_sessions`.
"""
return pulumi.get(self, "on_marked_down_action")
@on_marked_down_action.setter
def on_marked_down_action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "on_marked_down_action", value)
@property
@pulumi.getter(name="proxyProtocol")
def proxy_protocol(self) -> Optional[pulumi.Input[str]]:
"""
Choose the type of PROXY protocol to enable (`none`, `v1`, `v2`, `v2_ssl`, `v2_ssl_cn`)
"""
return pulumi.get(self, "proxy_protocol")
@proxy_protocol.setter
def proxy_protocol(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "proxy_protocol", value)
@property
@pulumi.getter(name="sendProxyV2")
def send_proxy_v2(self) -> Optional[pulumi.Input[bool]]:
"""
DEPRECATED please use `proxy_protocol` instead - (Default: `false`) Enables PROXY protocol version 2.
"""
return pulumi.get(self, "send_proxy_v2")
@send_proxy_v2.setter
def send_proxy_v2(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "send_proxy_v2", value)
@property
@pulumi.getter(name="serverIps")
def server_ips(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of backend server IP addresses. Addresses can be either IPv4 or IPv6.
"""
return pulumi.get(self, "server_ips")
@server_ips.setter
def server_ips(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "server_ips", value)
@property
@pulumi.getter(name="stickySessions")
def sticky_sessions(self) -> Optional[pulumi.Input[str]]:
"""
Load balancing algorithm. Possible values are: `none`, `cookie` and `table`.
"""
return pulumi.get(self, "sticky_sessions")
@sticky_sessions.setter
def sticky_sessions(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sticky_sessions", value)
@property
@pulumi.getter(name="stickySessionsCookieName")
def sticky_sessions_cookie_name(self) -> Optional[pulumi.Input[str]]:
"""
Cookie name for for sticky sessions. Only applicable when sticky_sessions is set to `cookie`.
"""
return pulumi.get(self, "sticky_sessions_cookie_name")
@sticky_sessions_cookie_name.setter
def sticky_sessions_cookie_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sticky_sessions_cookie_name", value)
@property
@pulumi.getter(name="timeoutConnect")
def timeout_connect(self) -> Optional[pulumi.Input[str]]:
"""
Maximum initial server connection establishment time. (e.g.: `1s`)
"""
return pulumi.get(self, "timeout_connect")
@timeout_connect.setter
def timeout_connect(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "timeout_connect", value)
@property
@pulumi.getter(name="timeoutServer")
def timeout_server(self) -> Optional[pulumi.Input[str]]:
"""
Maximum server connection inactivity time. (e.g.: `1s`)
"""
return pulumi.get(self, "timeout_server")
@timeout_server.setter
def timeout_server(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "timeout_server", value)
@property
@pulumi.getter(name="timeoutTunnel")
def timeout_tunnel(self) -> Optional[pulumi.Input[str]]:
"""
Maximum tunnel inactivity time. (e.g.: `1s`)
"""
return pulumi.get(self, "timeout_tunnel")
@timeout_tunnel.setter
def timeout_tunnel(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "timeout_tunnel", value)
class LoadbalancerBackend(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
forward_port: Optional[pulumi.Input[int]] = None,
forward_port_algorithm: Optional[pulumi.Input[str]] = None,
forward_protocol: Optional[pulumi.Input[str]] = None,
health_check_delay: Optional[pulumi.Input[str]] = None,
health_check_http: Optional[pulumi.Input[pulumi.InputType['LoadbalancerBackendHealthCheckHttpArgs']]] = None,
health_check_https: Optional[pulumi.Input[pulumi.InputType['LoadbalancerBackendHealthCheckHttpsArgs']]] = None,
health_check_max_retries: Optional[pulumi.Input[int]] = None,
health_check_port: Optional[pulumi.Input[int]] = None,
health_check_tcp: Optional[pulumi.Input[pulumi.InputType['LoadbalancerBackendHealthCheckTcpArgs']]] = None,
health_check_timeout: Optional[pulumi.Input[str]] = None,
lb_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
on_marked_down_action: Optional[pulumi.Input[str]] = None,
proxy_protocol: Optional[pulumi.Input[str]] = None,
send_proxy_v2: Optional[pulumi.Input[bool]] = None,
server_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
sticky_sessions: Optional[pulumi.Input[str]] = None,
sticky_sessions_cookie_name: Optional[pulumi.Input[str]] = None,
timeout_connect: Optional[pulumi.Input[str]] = None,
timeout_server: Optional[pulumi.Input[str]] = None,
timeout_tunnel: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Creates and manages Scaleway Load-Balancer Backends.
For more information, see [the documentation](https://developers.scaleway.com/en/products/lb/zoned_api).
## Examples
### Basic
```python
import pulumi
import pulumi_scaleway as scaleway
backend01 = scaleway.LoadbalancerBackend("backend01",
lb_id=scaleway_lb["lb01"]["id"],
forward_protocol="http",
forward_port=80)
```
### With HTTP Health Check
```python
import pulumi
import pulumi_scaleway as scaleway
backend01 = scaleway.LoadbalancerBackend("backend01",
lb_id=scaleway_lb["lb01"]["id"],
forward_protocol="http",
forward_port=80,
health_check_http=scaleway.LoadbalancerBackendHealthCheckHttpArgs(
uri="www.test.com/health",
))
```
## Import
Load-Balancer backend can be imported using the `{zone}/{id}`, e.g. bash
```sh
$ pulumi import scaleway:index/loadbalancerBackend:LoadbalancerBackend backend01 fr-par-1/11111111-1111-1111-1111-111111111111
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] forward_port: User sessions will be forwarded to this port of backend servers.
:param pulumi.Input[str] forward_port_algorithm: Load balancing algorithm. Possible values are: `roundrobin`, `leastconn` and `first`.
:param pulumi.Input[str] forward_protocol: Backend protocol. Possible values are: `tcp` or `http`.
:param pulumi.Input[str] health_check_delay: Interval between two HC requests.
:param pulumi.Input[pulumi.InputType['LoadbalancerBackendHealthCheckHttpArgs']] health_check_http: This block enable HTTP health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
:param pulumi.Input[pulumi.InputType['LoadbalancerBackendHealthCheckHttpsArgs']] health_check_https: This block enable HTTPS health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
:param pulumi.Input[int] health_check_max_retries: Number of allowed failed HC requests before the backend server is marked down.
:param pulumi.Input[int] health_check_port: Port the HC requests will be send to.
:param pulumi.Input[pulumi.InputType['LoadbalancerBackendHealthCheckTcpArgs']] health_check_tcp: This block enable TCP health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
:param pulumi.Input[str] health_check_timeout: Timeout before we consider a HC request failed.
:param pulumi.Input[str] lb_id: The load-balancer ID this backend is attached to.
> **Important:** Updates to `lb_id` will recreate the backend.
:param pulumi.Input[str] name: The name of the load-balancer backend.
:param pulumi.Input[str] on_marked_down_action: Modify what occurs when a backend server is marked down. Possible values are: `none` and `shutdown_sessions`.
:param pulumi.Input[str] proxy_protocol: Choose the type of PROXY protocol to enable (`none`, `v1`, `v2`, `v2_ssl`, `v2_ssl_cn`)
:param pulumi.Input[bool] send_proxy_v2: DEPRECATED please use `proxy_protocol` instead - (Default: `false`) Enables PROXY protocol version 2.
:param pulumi.Input[Sequence[pulumi.Input[str]]] server_ips: List of backend server IP addresses. Addresses can be either IPv4 or IPv6.
:param pulumi.Input[str] sticky_sessions: Load balancing algorithm. Possible values are: `none`, `cookie` and `table`.
:param pulumi.Input[str] sticky_sessions_cookie_name: Cookie name for for sticky sessions. Only applicable when sticky_sessions is set to `cookie`.
:param pulumi.Input[str] timeout_connect: Maximum initial server connection establishment time. (e.g.: `1s`)
:param pulumi.Input[str] timeout_server: Maximum server connection inactivity time. (e.g.: `1s`)
:param pulumi.Input[str] timeout_tunnel: Maximum tunnel inactivity time. (e.g.: `1s`)
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: LoadbalancerBackendArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates and manages Scaleway Load-Balancer Backends.
For more information, see [the documentation](https://developers.scaleway.com/en/products/lb/zoned_api).
## Examples
### Basic
```python
import pulumi
import pulumi_scaleway as scaleway
backend01 = scaleway.LoadbalancerBackend("backend01",
lb_id=scaleway_lb["lb01"]["id"],
forward_protocol="http",
forward_port=80)
```
### With HTTP Health Check
```python
import pulumi
import pulumi_scaleway as scaleway
backend01 = scaleway.LoadbalancerBackend("backend01",
lb_id=scaleway_lb["lb01"]["id"],
forward_protocol="http",
forward_port=80,
health_check_http=scaleway.LoadbalancerBackendHealthCheckHttpArgs(
uri="www.test.com/health",
))
```
## Import
Load-Balancer backend can be imported using the `{zone}/{id}`, e.g. bash
```sh
$ pulumi import scaleway:index/loadbalancerBackend:LoadbalancerBackend backend01 fr-par-1/11111111-1111-1111-1111-111111111111
```
:param str resource_name: The name of the resource.
:param LoadbalancerBackendArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(LoadbalancerBackendArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
forward_port: Optional[pulumi.Input[int]] = None,
forward_port_algorithm: Optional[pulumi.Input[str]] = None,
forward_protocol: Optional[pulumi.Input[str]] = None,
health_check_delay: Optional[pulumi.Input[str]] = None,
health_check_http: Optional[pulumi.Input[pulumi.InputType['LoadbalancerBackendHealthCheckHttpArgs']]] = None,
health_check_https: Optional[pulumi.Input[pulumi.InputType['LoadbalancerBackendHealthCheckHttpsArgs']]] = None,
health_check_max_retries: Optional[pulumi.Input[int]] = None,
health_check_port: Optional[pulumi.Input[int]] = None,
health_check_tcp: Optional[pulumi.Input[pulumi.InputType['LoadbalancerBackendHealthCheckTcpArgs']]] = None,
health_check_timeout: Optional[pulumi.Input[str]] = None,
lb_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
on_marked_down_action: Optional[pulumi.Input[str]] = None,
proxy_protocol: Optional[pulumi.Input[str]] = None,
send_proxy_v2: Optional[pulumi.Input[bool]] = None,
server_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
sticky_sessions: Optional[pulumi.Input[str]] = None,
sticky_sessions_cookie_name: Optional[pulumi.Input[str]] = None,
timeout_connect: Optional[pulumi.Input[str]] = None,
timeout_server: Optional[pulumi.Input[str]] = None,
timeout_tunnel: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = LoadbalancerBackendArgs.__new__(LoadbalancerBackendArgs)
if forward_port is None and not opts.urn:
raise TypeError("Missing required property 'forward_port'")
__props__.__dict__["forward_port"] = forward_port
__props__.__dict__["forward_port_algorithm"] = forward_port_algorithm
if forward_protocol is None and not opts.urn:
raise TypeError("Missing required property 'forward_protocol'")
__props__.__dict__["forward_protocol"] = forward_protocol
__props__.__dict__["health_check_delay"] = health_check_delay
__props__.__dict__["health_check_http"] = health_check_http
__props__.__dict__["health_check_https"] = health_check_https
__props__.__dict__["health_check_max_retries"] = health_check_max_retries
__props__.__dict__["health_check_port"] = health_check_port
__props__.__dict__["health_check_tcp"] = health_check_tcp
__props__.__dict__["health_check_timeout"] = health_check_timeout
if lb_id is None and not opts.urn:
raise TypeError("Missing required property 'lb_id'")
__props__.__dict__["lb_id"] = lb_id
__props__.__dict__["name"] = name
__props__.__dict__["on_marked_down_action"] = on_marked_down_action
__props__.__dict__["proxy_protocol"] = proxy_protocol
if send_proxy_v2 is not None and not opts.urn:
warnings.warn("""Please use proxy_protocol instead""", DeprecationWarning)
pulumi.log.warn("""send_proxy_v2 is deprecated: Please use proxy_protocol instead""")
__props__.__dict__["send_proxy_v2"] = send_proxy_v2
__props__.__dict__["server_ips"] = server_ips
__props__.__dict__["sticky_sessions"] = sticky_sessions
__props__.__dict__["sticky_sessions_cookie_name"] = sticky_sessions_cookie_name
__props__.__dict__["timeout_connect"] = timeout_connect
__props__.__dict__["timeout_server"] = timeout_server
__props__.__dict__["timeout_tunnel"] = timeout_tunnel
super(LoadbalancerBackend, __self__).__init__(
'scaleway:index/loadbalancerBackend:LoadbalancerBackend',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
forward_port: Optional[pulumi.Input[int]] = None,
forward_port_algorithm: Optional[pulumi.Input[str]] = None,
forward_protocol: Optional[pulumi.Input[str]] = None,
health_check_delay: Optional[pulumi.Input[str]] = None,
health_check_http: Optional[pulumi.Input[pulumi.InputType['LoadbalancerBackendHealthCheckHttpArgs']]] = None,
health_check_https: Optional[pulumi.Input[pulumi.InputType['LoadbalancerBackendHealthCheckHttpsArgs']]] = None,
health_check_max_retries: Optional[pulumi.Input[int]] = None,
health_check_port: Optional[pulumi.Input[int]] = None,
health_check_tcp: Optional[pulumi.Input[pulumi.InputType['LoadbalancerBackendHealthCheckTcpArgs']]] = None,
health_check_timeout: Optional[pulumi.Input[str]] = None,
lb_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
on_marked_down_action: Optional[pulumi.Input[str]] = None,
proxy_protocol: Optional[pulumi.Input[str]] = None,
send_proxy_v2: Optional[pulumi.Input[bool]] = None,
server_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
sticky_sessions: Optional[pulumi.Input[str]] = None,
sticky_sessions_cookie_name: Optional[pulumi.Input[str]] = None,
timeout_connect: Optional[pulumi.Input[str]] = None,
timeout_server: Optional[pulumi.Input[str]] = None,
timeout_tunnel: Optional[pulumi.Input[str]] = None) -> 'LoadbalancerBackend':
"""
Get an existing LoadbalancerBackend resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] forward_port: User sessions will be forwarded to this port of backend servers.
:param pulumi.Input[str] forward_port_algorithm: Load balancing algorithm. Possible values are: `roundrobin`, `leastconn` and `first`.
:param pulumi.Input[str] forward_protocol: Backend protocol. Possible values are: `tcp` or `http`.
:param pulumi.Input[str] health_check_delay: Interval between two HC requests.
:param pulumi.Input[pulumi.InputType['LoadbalancerBackendHealthCheckHttpArgs']] health_check_http: This block enable HTTP health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
:param pulumi.Input[pulumi.InputType['LoadbalancerBackendHealthCheckHttpsArgs']] health_check_https: This block enable HTTPS health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
:param pulumi.Input[int] health_check_max_retries: Number of allowed failed HC requests before the backend server is marked down.
:param pulumi.Input[int] health_check_port: Port the HC requests will be send to.
:param pulumi.Input[pulumi.InputType['LoadbalancerBackendHealthCheckTcpArgs']] health_check_tcp: This block enable TCP health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
:param pulumi.Input[str] health_check_timeout: Timeout before we consider a HC request failed.
:param pulumi.Input[str] lb_id: The load-balancer ID this backend is attached to.
> **Important:** Updates to `lb_id` will recreate the backend.
:param pulumi.Input[str] name: The name of the load-balancer backend.
:param pulumi.Input[str] on_marked_down_action: Modify what occurs when a backend server is marked down. Possible values are: `none` and `shutdown_sessions`.
:param pulumi.Input[str] proxy_protocol: Choose the type of PROXY protocol to enable (`none`, `v1`, `v2`, `v2_ssl`, `v2_ssl_cn`)
:param pulumi.Input[bool] send_proxy_v2: DEPRECATED please use `proxy_protocol` instead - (Default: `false`) Enables PROXY protocol version 2.
:param pulumi.Input[Sequence[pulumi.Input[str]]] server_ips: List of backend server IP addresses. Addresses can be either IPv4 or IPv6.
:param pulumi.Input[str] sticky_sessions: Load balancing algorithm. Possible values are: `none`, `cookie` and `table`.
:param pulumi.Input[str] sticky_sessions_cookie_name: Cookie name for for sticky sessions. Only applicable when sticky_sessions is set to `cookie`.
:param pulumi.Input[str] timeout_connect: Maximum initial server connection establishment time. (e.g.: `1s`)
:param pulumi.Input[str] timeout_server: Maximum server connection inactivity time. (e.g.: `1s`)
:param pulumi.Input[str] timeout_tunnel: Maximum tunnel inactivity time. (e.g.: `1s`)
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _LoadbalancerBackendState.__new__(_LoadbalancerBackendState)
__props__.__dict__["forward_port"] = forward_port
__props__.__dict__["forward_port_algorithm"] = forward_port_algorithm
__props__.__dict__["forward_protocol"] = forward_protocol
__props__.__dict__["health_check_delay"] = health_check_delay
__props__.__dict__["health_check_http"] = health_check_http
__props__.__dict__["health_check_https"] = health_check_https
__props__.__dict__["health_check_max_retries"] = health_check_max_retries
__props__.__dict__["health_check_port"] = health_check_port
__props__.__dict__["health_check_tcp"] = health_check_tcp
__props__.__dict__["health_check_timeout"] = health_check_timeout
__props__.__dict__["lb_id"] = lb_id
__props__.__dict__["name"] = name
__props__.__dict__["on_marked_down_action"] = on_marked_down_action
__props__.__dict__["proxy_protocol"] = proxy_protocol
__props__.__dict__["send_proxy_v2"] = send_proxy_v2
__props__.__dict__["server_ips"] = server_ips
__props__.__dict__["sticky_sessions"] = sticky_sessions
__props__.__dict__["sticky_sessions_cookie_name"] = sticky_sessions_cookie_name
__props__.__dict__["timeout_connect"] = timeout_connect
__props__.__dict__["timeout_server"] = timeout_server
__props__.__dict__["timeout_tunnel"] = timeout_tunnel
return LoadbalancerBackend(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="forwardPort")
def forward_port(self) -> pulumi.Output[int]:
"""
User sessions will be forwarded to this port of backend servers.
"""
return pulumi.get(self, "forward_port")
@property
@pulumi.getter(name="forwardPortAlgorithm")
def forward_port_algorithm(self) -> pulumi.Output[Optional[str]]:
"""
Load balancing algorithm. Possible values are: `roundrobin`, `leastconn` and `first`.
"""
return pulumi.get(self, "forward_port_algorithm")
@property
@pulumi.getter(name="forwardProtocol")
def forward_protocol(self) -> pulumi.Output[str]:
"""
Backend protocol. Possible values are: `tcp` or `http`.
"""
return pulumi.get(self, "forward_protocol")
@property
@pulumi.getter(name="healthCheckDelay")
def health_check_delay(self) -> pulumi.Output[Optional[str]]:
"""
Interval between two HC requests.
"""
return pulumi.get(self, "health_check_delay")
@property
@pulumi.getter(name="healthCheckHttp")
def health_check_http(self) -> pulumi.Output[Optional['outputs.LoadbalancerBackendHealthCheckHttp']]:
"""
This block enable HTTP health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
"""
return pulumi.get(self, "health_check_http")
@property
@pulumi.getter(name="healthCheckHttps")
def health_check_https(self) -> pulumi.Output[Optional['outputs.LoadbalancerBackendHealthCheckHttps']]:
"""
This block enable HTTPS health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
"""
return pulumi.get(self, "health_check_https")
@property
@pulumi.getter(name="healthCheckMaxRetries")
def health_check_max_retries(self) -> pulumi.Output[Optional[int]]:
"""
Number of allowed failed HC requests before the backend server is marked down.
"""
return pulumi.get(self, "health_check_max_retries")
@property
@pulumi.getter(name="healthCheckPort")
def health_check_port(self) -> pulumi.Output[int]:
"""
Port the HC requests will be send to.
"""
return pulumi.get(self, "health_check_port")
@property
@pulumi.getter(name="healthCheckTcp")
def health_check_tcp(self) -> pulumi.Output['outputs.LoadbalancerBackendHealthCheckTcp']:
"""
This block enable TCP health check. Only one of `health_check_tcp`, `health_check_http` and `health_check_https` should be specified.
"""
return pulumi.get(self, "health_check_tcp")
@property
@pulumi.getter(name="healthCheckTimeout")
def health_check_timeout(self) -> pulumi.Output[Optional[str]]:
"""
Timeout before we consider a HC request failed.
"""
return pulumi.get(self, "health_check_timeout")
@property
@pulumi.getter(name="lbId")
def lb_id(self) -> pulumi.Output[str]:
"""
The load-balancer ID this backend is attached to.
> **Important:** Updates to `lb_id` will recreate the backend.
"""
return pulumi.get(self, "lb_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the load-balancer backend.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="onMarkedDownAction")
def on_marked_down_action(self) -> pulumi.Output[Optional[str]]:
"""
Modify what occurs when a backend server is marked down. Possible values are: `none` and `shutdown_sessions`.
"""
return pulumi.get(self, "on_marked_down_action")
@property
@pulumi.getter(name="proxyProtocol")
def proxy_protocol(self) -> pulumi.Output[Optional[str]]:
"""
Choose the type of PROXY protocol to enable (`none`, `v1`, `v2`, `v2_ssl`, `v2_ssl_cn`)
"""
return pulumi.get(self, "proxy_protocol")
@property
@pulumi.getter(name="sendProxyV2")
def send_proxy_v2(self) -> pulumi.Output[Optional[bool]]:
"""
DEPRECATED please use `proxy_protocol` instead - (Default: `false`) Enables PROXY protocol version 2.
"""
return pulumi.get(self, "send_proxy_v2")
@property
@pulumi.getter(name="serverIps")
def server_ips(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
List of backend server IP addresses. Addresses can be either IPv4 or IPv6.
"""
return pulumi.get(self, "server_ips")
@property
@pulumi.getter(name="stickySessions")
def sticky_sessions(self) -> pulumi.Output[Optional[str]]:
"""
Load balancing algorithm. Possible values are: `none`, `cookie` and `table`.
"""
return pulumi.get(self, "sticky_sessions")
@property
@pulumi.getter(name="stickySessionsCookieName")
def sticky_sessions_cookie_name(self) -> pulumi.Output[Optional[str]]:
"""
Cookie name for for sticky sessions. Only applicable when sticky_sessions is set to `cookie`.
"""
return pulumi.get(self, "sticky_sessions_cookie_name")
@property
@pulumi.getter(name="timeoutConnect")
def timeout_connect(self) -> pulumi.Output[Optional[str]]:
"""
Maximum initial server connection establishment time. (e.g.: `1s`)
"""
return pulumi.get(self, "timeout_connect")
@property
@pulumi.getter(name="timeoutServer")
def timeout_server(self) -> pulumi.Output[Optional[str]]:
"""
Maximum server connection inactivity time. (e.g.: `1s`)
"""
return pulumi.get(self, "timeout_server")
@property
@pulumi.getter(name="timeoutTunnel")
def timeout_tunnel(self) -> pulumi.Output[Optional[str]]:
"""
Maximum tunnel inactivity time. (e.g.: `1s`)
"""
return pulumi.get(self, "timeout_tunnel")
| 49.98903
| 244
| 0.675119
| 7,026
| 59,237
| 5.417876
| 0.040848
| 0.088136
| 0.067672
| 0.064152
| 0.946961
| 0.937346
| 0.933668
| 0.927836
| 0.924
| 0.915016
| 0
| 0.004605
| 0.219187
| 59,237
| 1,184
| 245
| 50.03125
| 0.81839
| 0.319699
| 0
| 0.870588
| 1
| 0
| 0.151962
| 0.05896
| 0
| 0
| 0
| 0
| 0
| 1
| 0.164706
| false
| 0.001471
| 0.010294
| 0
| 0.273529
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f29afe979e4ea3251ac98e02fc38754851d36142
| 76
|
py
|
Python
|
rajksimple/tests/test_nothing.py
|
sgergely/rajksimple
|
49f05bfd8129db341423facfd281b080cb95f66a
|
[
"MIT"
] | null | null | null |
rajksimple/tests/test_nothing.py
|
sgergely/rajksimple
|
49f05bfd8129db341423facfd281b080cb95f66a
|
[
"MIT"
] | null | null | null |
rajksimple/tests/test_nothing.py
|
sgergely/rajksimple
|
49f05bfd8129db341423facfd281b080cb95f66a
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
def test_import():
import rajksimple
| 12.666667
| 32
| 0.763158
| 10
| 76
| 5.7
| 0.7
| 0.350877
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184211
| 76
| 5
| 33
| 15.2
| 0.919355
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 1
| 0
| 1.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
29c3574952ee42acb36c11263725ba1c2f489bfc
| 51
|
py
|
Python
|
cfn_get_export_value/__init__.py
|
PokaInc/cfn-get-export-value
|
b9558e6ed8e6a302a1c9f72e60894463f893df4a
|
[
"MIT"
] | 5
|
2018-10-10T14:03:54.000Z
|
2020-07-14T08:52:55.000Z
|
cfn_get_export_value/__init__.py
|
PokaInc/cfn-get-export-value
|
b9558e6ed8e6a302a1c9f72e60894463f893df4a
|
[
"MIT"
] | 1
|
2019-11-19T15:35:57.000Z
|
2019-11-19T15:35:57.000Z
|
cfn_get_export_value/__init__.py
|
PokaInc/cfn-get-export-value
|
b9558e6ed8e6a302a1c9f72e60894463f893df4a
|
[
"MIT"
] | 1
|
2017-04-11T14:21:36.000Z
|
2017-04-11T14:21:36.000Z
|
from .cfn_get_export_value import get_export_value
| 25.5
| 50
| 0.901961
| 9
| 51
| 4.555556
| 0.666667
| 0.439024
| 0.682927
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078431
| 51
| 1
| 51
| 51
| 0.87234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
29e142d6a16cd3c1387b751d3040abed9a17fbdf
| 45
|
py
|
Python
|
RevitPythonShell_Scripts/GoogleTools.extension/GoogleTools.tab/Info.Panel/Info.pushbutton/script.py
|
arupiot/create_revit_families
|
9beab3c7e242426b2dca99ca5477fdb433e39db2
|
[
"MIT"
] | 1
|
2021-02-04T18:20:58.000Z
|
2021-02-04T18:20:58.000Z
|
RevitPythonShell_Scripts/GoogleTools.extension/GoogleTools.tab/Info.Panel/Info.pushbutton/script.py
|
arupiot/DBOTools
|
9beab3c7e242426b2dca99ca5477fdb433e39db2
|
[
"MIT"
] | null | null | null |
RevitPythonShell_Scripts/GoogleTools.extension/GoogleTools.tab/Info.Panel/Info.pushbutton/script.py
|
arupiot/DBOTools
|
9beab3c7e242426b2dca99ca5477fdb433e39db2
|
[
"MIT"
] | null | null | null |
import sys
print(sys.version)
print(sys.path)
| 15
| 18
| 0.8
| 8
| 45
| 4.5
| 0.625
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 45
| 3
| 19
| 15
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
d9b1af081b27537dfbd100e7b392535d29bc2e11
| 46,917
|
py
|
Python
|
riptable/benchmarks/bench_primops.py
|
972d5defe3218bd62b741e6a2f11f5b3/riptable
|
bb928c11752e831ec701f91964979b31db53826a
|
[
"BSD-2-Clause-Patent"
] | 307
|
2020-08-27T20:25:11.000Z
|
2022-03-08T15:51:19.000Z
|
riptable/benchmarks/bench_primops.py
|
972d5defe3218bd62b741e6a2f11f5b3/riptable
|
bb928c11752e831ec701f91964979b31db53826a
|
[
"BSD-2-Clause-Patent"
] | 206
|
2020-08-17T19:07:15.000Z
|
2022-03-18T11:53:55.000Z
|
riptable/benchmarks/bench_primops.py
|
972d5defe3218bd62b741e6a2f11f5b3/riptable
|
bb928c11752e831ec701f91964979b31db53826a
|
[
"BSD-2-Clause-Patent"
] | 10
|
2020-08-28T00:22:05.000Z
|
2021-04-30T20:22:28.000Z
|
# -*- coding: utf-8 -*-
"""
Benchmarks for primitive/low-level array operations.
"""
__all__ = [
"bench_astype",
"bench_astype_numba",
"bench_astype_numpy",
"bench_bool_index",
"bench_bool_index_numpy",
"bench_compare_ops",
"bench_compare_ops_numpy",
"bench_mbget",
"bench_mbget_numba",
# comparisons
"compare_astype",
"compare_bool_index",
"compare_compare_ops",
"compare_mbget",
]
import itertools
import logging
import operator
from typing import List
import numpy as np
from numpy.random import default_rng
import numba as nb
from .benchmark import _timestamp_funcs
from .rand_data import rand_array, rand_fancyindex
from .runner import create_comparison_dataset, create_trial_dataset
from ..rt_enum import TypeRegister, NumpyCharTypes
from ..rt_dataset import Dataset
from ..rt_numpy import empty
from ..rt_utils import mbget, _mbget_2dims #, mbget_numba
logger = logging.getLogger(__name__)
"""The logger for this module."""
timestamper = _timestamp_funcs["get_nano_time"]
"""The timestamping function to use in benchmarks."""
# TODO: Additional benchmarks which would be useful for riptable development and comparison to other frameworks:
# * mbget vs. numpy fancy indexing (only on valid array indices -- -len(arr) <= x < len(arr))
# * mbget vs. numba-based equivalent to look for compiled code optimizations + thread scaling
# * indexing with a boolean mask (riptable vs. numba)
# * array conversion (i.e. elementwise type conversion) (arr1.astype(np.float32))
# * make sure to include the self-conversion case so that we look for optimizations there (like just calling memcpy)
# * equality and comparisons
# * elementwise array equality (arr1 == arr2)
# * array vs. scalar equality (arr == 123, arr == "foo", arr != '', etc.)
# * elementwise array comparison (arr1 < arr2)
# * array vs. scalar comparison (arr1 < 1.23, arr > 123, etc.)
# * it would also be useful (for demonstration purposes) to demo here how much faster these operations
# are on a string categorical compared to a normal array of strings (like the Categorical's .expand_array).
# * conversion-assignment, e.g. result[:] = arr[:]
def mbget_numba(aValues, aIndex) -> np.ndarray:
"""
Re-implementation of the 'mbget' fancy-indexing function with numba, for comparison with the riptide_cpp implementation.
Parameters
----------
aValues
aIndex
Returns
-------
"""
# make sure a aValues and aIndex are both numpy arrays
if isinstance(aValues, (list, tuple)):
aValues = TypeRegister.FastArray(aValues)
if isinstance(aIndex, (list, tuple)):
aIndex = TypeRegister.FastArray(aIndex)
if not isinstance(aValues, np.ndarray) or not isinstance(aIndex, np.ndarray):
raise TypeError(f"Values and index must be numpy arrays. Got {type(aValues)} {type(aIndex)}")
elif aValues.dtype.char == 'O':
raise TypeError(f"mbget does not support object types")
elif aIndex.dtype.char not in NumpyCharTypes.AllInteger:
raise TypeError(f"indices provided to mbget must be an integer type not {aIndex.dtype}")
if aValues.ndim == 2:
return _mbget_2dims(aValues, aIndex)
# TODO: probably need special code or parameter to set custom default value for NAN_TIME
if aValues.dtype.char in NumpyCharTypes.AllInteger + NumpyCharTypes.AllFloat:
result = _mbget_numeric(aValues, aIndex)
elif aValues.dtype.char in "SU":
result = _mbget_string(aValues, aIndex)
else:
raise Exception(f"mbget can't operate on an array of this type: {aValues.dtype}")
result = TypeRegister.newclassfrominstance(result, aValues)
return result
def _mbget_numeric(aValues, aIndex) -> np.ndarray:
result = empty(len(aIndex), dtype=aValues.dtype)
# Choose different implementation for signed vs. unsigned dtype.
# See comment in mbget_string for details.
_mbget_numeric_impl = _mbget_numeric_unsigned_impl if aIndex.dtype.kind == 'u' else _mbget_numeric_signed_impl
_mbget_numeric_impl(aValues, aIndex, result, result.inv)
return result
@nb.njit(cache=True, parallel=True, nogil=True)
def _mbget_numeric_signed_impl(aValues, aIndex, result, default_val):
num_elmnts = len(aValues)
for i in nb.prange(aIndex.shape[0]):
# This has one less branch (in the code) than the riptide_cpp implementation of mbget,
# because numba handles the negative/wraparound indexing for us. So the conditional logic
# to handle the negative indexing is still there; it may or may not be in the generated
# machine code depending on how numba chooses to generate it.
index = aIndex[i]
result[i] = aValues[index] if -num_elmnts <= index < num_elmnts else default_val
@nb.njit(cache=True, parallel=True, nogil=True)
def _mbget_numeric_unsigned_impl(aValues, aIndex, result, default_val):
num_elmnts = len(aValues)
for i in nb.prange(aIndex.shape[0]):
# This has one less branch (in the code) than the riptide_cpp implementation of mbget,
# because numba handles the negative/wraparound indexing for us. So the conditional logic
# to handle the negative indexing is still there; it may or may not be in the generated
# machine code depending on how numba chooses to generate it.
index = aIndex[i]
result[i] = aValues[index] if index < num_elmnts else default_val
#not using a default value here since we're handling byte strings only (default val. is 0)
def _mbget_string(aValues, aIndex) -> np.ndarray:
result = empty(len(aIndex), dtype=aValues.dtype)
itemsize = aValues.dtype.itemsize // 1 # ASCII
# Choose different implementation for signed vs. unsigned dtype.
# This is both for performance reasons and also because if we try to use the signed implementation
# with unsigned integer types, numba ends up doing extra/unnecessary conversions so the performance
# is poor; for that same reason, numba fails with an error on the uint64 type since it tries to cast
# the index value to a float before we use it as an array index (which isn't allowed).
# TODO: This decision could probably be pushed into the numba JIT-specialized generic so we don't need to choose here?
_mbget_string_impl = _mbget_string_unsigned_impl if aIndex.dtype.kind == 'u' else _mbget_string_signed_impl
_mbget_string_impl(aValues.view(np.uint8), aIndex, result.view(np.uint8), itemsize)
return result
@nb.njit(cache=True, parallel=True, nogil=True)
def _mbget_string_signed_impl(aValues, aIndex, result, itemsize): # byte array
numstrings = aValues.shape[0] // itemsize
for i in nb.prange(aIndex.shape[0]):
index = aIndex[i]
if -numstrings <= index < numstrings:
str_idx = index if index >= 0 else numstrings + aIndex[i]
for j in range(itemsize):
result[itemsize * i + j] = aValues[itemsize * str_idx + j]
else:
for j in range(itemsize):
result[itemsize * i + j] = 0
@nb.njit(cache=True, parallel=True, nogil=True)
def _mbget_string_unsigned_impl(aValues, aIndex, result, itemsize): # byte array
numstrings = aValues.shape[0] // itemsize
for i in nb.prange(aIndex.shape[0]):
index = aIndex[i]
if index < numstrings:
for j in range(itemsize):
result[itemsize * i + j] = aValues[itemsize * index + j]
else:
for j in range(itemsize):
result[itemsize * i + j] = 0
def astype_numba(arr, dst_dtype):
#only supports numeric-to-numeric type conversions
if arr.dtype.char in "SU" or dst_dtype.char in "SU":
raise Exception (f"Only numeric-to-numeric type conversions are supported.")
result = empty(arr.shape[0], dtype=dst_dtype)
_astype_numba(arr, result)
return result
# numba seems to emit poor quality code for this simple loop, and the performance is
# massively worse when parallel=True is specified. (Tested with numba 0.48, 0.50.1)
# Manually splitting the loop so the input data is chunked does not improve the performance either.
@nb.njit(cache=True, parallel=False, nogil=True)
def _astype_numba(arr, result):
for i in nb.prange(len(arr)):
# conversion occurs implicitly, and numba only supports conversion
# between arrays of numeric types.
result[i] = arr[i]
def bench_bool_index(**kwargs) -> Dataset:
warmup_iters = 0
iters = 21
rng_seeds = [12345]
data_dtypes = [
np.int16,
np.int32,
np.float64,
# TODO: Enable these additional data types; they're somewhat slow though, so we'd only want to
# run them under a 'detailed' / 'long-running' scenario
# np.dtype('S4'),
# np.dtype('S10'),
# np.dtype('<U8')
]
data_lengths = [
100,
1_000,
10_000,
100_000,
1_000_000,
10_000_000,
# TODO: Add 100M, 1G and 2G -- these need to be optional since smaller machines will run out of memory
# and also take longer than typical trials
]
true_ratios = [
0.0,
0.2,
0.4,
0.6,
0.8,
1.0
]
setup_params = itertools.product(
rng_seeds,
data_dtypes,
data_lengths,
true_ratios
)
# Datasets containing timing data and parameters from the trials in this benchmark.
benchmark_data: List[Dataset] = []
for (
rng_seed,
data_dtype,
data_length,
true_ratio,
) in setup_params:
# HACK: Until we have a better approach for supporting non-rectangular parameter spaces,
# or otherwise being able to skip certain combinations of parameters (e.g. because
# they're invalid, non-realistic, or otherwise don't make sense).
# if np.iinfo(index_dtype).max < data_length:
# continue
#
# Setup phase. The data here is used for both the warmup and the real, timed function invocations.
#
# Make sure to re-initialize the RNG each time so we get a repeatable result.
rng = default_rng(rng_seed)
data_array = rand_array(rng, data_length, dtype=np.dtype(data_dtype))
mask = rng.random(data_length) < true_ratio
# Sweep over other parameters that aren't required by the setup phase.
other_params = [None]
for _ in other_params:
# Allocate an array to hold the raw timing data.
# TODO: Change to use TimeSpan?
timing_data = empty(iters, dtype=np.int64)
for is_warmup in (True, False):
loop_count = warmup_iters if is_warmup else iters
for i in range(loop_count):
start_time_ns = timestamper()
### The actual function invocation ###
_ = data_array[mask]
### Store the timing results (if this was a real invocation).
call_nanos = timestamper() - start_time_ns
if not is_warmup:
timing_data[i] = call_nanos
# Create a mini Dataset with the timing results for this run.
# Capture the timing results along with the other options used for the function invocations.
trial_data = create_trial_dataset(
timing_data,
{
# Setup parameters
"rng_seed": rng_seed,
"data_dtype": np.dtype(data_dtype),
"data_length": data_length,
"true_ratio": true_ratio,
# Other parameters
# (None)
},
)
benchmark_data.append(trial_data)
# hstack all of the individual Datasets together into one large Dataset and return it.
return Dataset.hstack(benchmark_data, destroy=True)
def bench_bool_index_numpy(**kwargs) -> Dataset:
warmup_iters = 0
iters = 21
rng_seeds = [12345]
data_dtypes = [
np.int16,
np.int32,
np.float64,
# TODO: Enable these additional data types; they're somewhat slow though, so we'd only want to
# run them under a 'detailed' / 'long-running' scenario
# np.dtype('S4'),
# np.dtype('S10'),
# np.dtype('<U8')
]
data_lengths = [
100,
1_000,
10_000,
100_000,
1_000_000,
10_000_000,
# TODO: Add 100M, 1G and 2G -- these need to be optional since smaller machines will run out of memory
# and also take longer than typical trials
]
true_ratios = [
0.0,
0.2,
0.4,
0.6,
0.8,
1.0
]
setup_params = itertools.product(
rng_seeds,
data_dtypes,
data_lengths,
true_ratios
)
# Datasets containing timing data and parameters from the trials in this benchmark.
benchmark_data: List[Dataset] = []
for (
rng_seed,
data_dtype,
data_length,
true_ratio,
) in setup_params:
# HACK: Until we have a better approach for supporting non-rectangular parameter spaces,
# or otherwise being able to skip certain combinations of parameters (e.g. because
# they're invalid, non-realistic, or otherwise don't make sense).
# if np.iinfo(index_dtype).max < data_length:
# continue
#
# Setup phase. The data here is used for both the warmup and the real, timed function invocations.
#
# Make sure to re-initialize the RNG each time so we get a repeatable result.
rng = default_rng(rng_seed)
data_array = rand_array(rng, data_length, dtype=np.dtype(data_dtype))
if hasattr(data_array, "_np"):
data_array = data_array._np
mask = rng.random(data_length) < true_ratio
# Sweep over other parameters that aren't required by the setup phase.
other_params = [None]
for _ in other_params:
# Allocate an array to hold the raw timing data.
# TODO: Change to use TimeSpan?
timing_data = empty(iters, dtype=np.int64)
for is_warmup in (True, False):
loop_count = warmup_iters if is_warmup else iters
for i in range(loop_count):
start_time_ns = timestamper()
### The actual function invocation ###
_ = data_array[mask]
### Store the timing results (if this was a real invocation).
call_nanos = timestamper() - start_time_ns
if not is_warmup:
timing_data[i] = call_nanos
# Create a mini Dataset with the timing results for this run.
# Capture the timing results along with the other options used for the function invocations.
trial_data = create_trial_dataset(
timing_data,
{
# Setup parameters
"rng_seed": rng_seed,
"data_dtype": np.dtype(data_dtype),
"data_length": data_length,
"true_ratio": true_ratio,
# Other parameters
# (None)
},
)
benchmark_data.append(trial_data)
# hstack all of the individual Datasets together into one large Dataset and return it.
return Dataset.hstack(benchmark_data, destroy=True)
def bench_mbget(**kwargs) -> Dataset:
# TODO: Add additional dimensions:
# * number of threads
# * recycler on/off
# * different key multiplicity distributions (in the rand_fancyindex function)
# * different amounts of 'sortedness' of the fancy index (from the rand_fancyindex function)
# Fixed parameters which apply to all of the trials in this benchmark.
warmup_iters = 0
iters = 21 # This duration of this function is (usually) fairly short, so the performance is prone to random noise -- using more iterations helps
# Parameters we'll sweep over for the benchmark.
rng_seeds = [12345]
data_dtypes = [
np.int16,
np.int32,
np.float64,
# TODO: Enable these additional data types; they're somewhat slow though, so we'd only want to
# run them under a 'detailed' / 'long-running' scenario
np.dtype('S11'),
]
index_dtypes = [
np.int8,
np.uint8,
np.int16,
np.uint16,
np.int32,
np.uint32,
np.int64,
np.uint64,
# TODO: Add float32 / float64 once rand_fancyindex() supports them
]
data_lengths = [
100,
1_000,
10_000,
100_000,
1_000_000,
10_000_000,
# TODO: Add 100M, 1G and 2G -- these need to be optional since smaller machines will run out of memory
# and also take longer than typical trials
]
index_lengths = [10, 100, 1_000, 10_000, 100_000, 1_000_000, 10_000_000]
invalid_ratios = [
0.0,
0.01,
0.1,
# TODO: Enable these additional values for the 'detailed' scenario
# 0.5,
# 0.9,
]
setup_params = itertools.product(
rng_seeds,
data_dtypes,
index_dtypes,
data_lengths,
index_lengths,
invalid_ratios,
)
# Datasets containing timing data and parameters from the trials in this benchmark.
benchmark_data: List[Dataset] = []
for (
rng_seed,
data_dtype,
index_dtype,
data_length,
index_length,
invalid_ratio,
) in setup_params:
# HACK: Until we have a better approach for supporting non-rectangular parameter spaces,
# or otherwise being able to skip certain combinations of parameters (e.g. because
# they're invalid, non-realistic, or otherwise don't make sense).
if np.iinfo(index_dtype).max < data_length:
continue
#
# Setup phase. The data here is used for both the warmup and the real, timed function invocations.
#
# Make sure to re-initialize the RNG each time so we get a repeatable result.
rng = default_rng(rng_seed)
data_array = rand_array(rng, data_length, dtype=np.dtype(data_dtype), invalid_ratio=invalid_ratio)
fancyindex = rand_fancyindex(
rng,
index_length,
dtype=np.dtype(index_dtype),
source_arr_len=data_length,
invalid_ratio=invalid_ratio,
)
# Sweep over other parameters that aren't required by the setup phase.
other_params = [None]
for _ in other_params:
# Allocate an array to hold the raw timing data.
# TODO: Change to use TimeSpan?
timing_data = empty(iters, dtype=np.int64)
for is_warmup in (True, False):
loop_count = warmup_iters if is_warmup else iters
for i in range(loop_count):
start_time_ns = timestamper()
### The actual function invocation ###
mbget(data_array, fancyindex)
### Store the timing results (if this was a real invocation).
call_nanos = timestamper() - start_time_ns
if not is_warmup:
timing_data[i] = call_nanos
# Create a mini Dataset with the timing results for this run.
# Capture the timing results along with the other options used for the function invocations.
trial_data = create_trial_dataset(
timing_data,
{
# Setup parameters
"rng_seed": rng_seed,
"data_dtype": np.dtype(data_dtype),
"index_dtype": np.dtype(index_dtype),
"data_length": data_length,
"index_length": index_length,
"invalid_ratio": invalid_ratio,
# Other parameters
# (None)
},
)
benchmark_data.append(trial_data)
# hstack all of the individual Datasets together into one large Dataset and return it.
return Dataset.hstack(benchmark_data, destroy=True)
def bench_mbget_numba(**kwargs) -> Dataset:
# TODO: Add additional dimensions:
# * number of threads
# * recycler on/off
# * different key multiplicity distributions (in the rand_fancyindex function)
# * different amounts of 'sortedness' of the fancy index (from the rand_fancyindex function)
# Fixed parameters which apply to all of the trials in this benchmark.
warmup_iters = 1
iters = 21 # This duration of this function is (usually) fairly short, so the performance is prone to random noise -- using more iterations helps
# Parameters we'll sweep over for the benchmark.
rng_seeds = [12345]
data_dtypes = [
np.int16,
np.int32,
np.float64,
# TODO: Enable these additional data types; they're somewhat slow though, so we'd only want to
# run them under a 'detailed' / 'long-running' scenario
np.dtype('S11'),
]
index_dtypes = [
np.int8,
np.uint8,
np.int16,
np.uint16,
np.int32,
np.uint32,
np.int64,
np.uint64,
# TODO: Add float32 / float64 once rand_fancyindex() supports them
]
data_lengths = [
100,
1_000,
10_000,
100_000,
1_000_000,
10_000_000,
# TODO: Add 100M, 1G and 2G -- these need to be optional since smaller machines will run out of memory
# and also take longer than typical trials
]
index_lengths = [10, 100, 1_000, 10_000, 100_000, 1_000_000, 10_000_000]
invalid_ratios = [
0.0,
0.01,
0.1,
# TODO: Enable these additional values for the 'detailed' scenario
# 0.5,
# 0.9,
]
setup_params = itertools.product(
rng_seeds,
data_dtypes,
index_dtypes,
data_lengths,
index_lengths,
invalid_ratios,
)
# Datasets containing timing data and parameters from the trials in this benchmark.
benchmark_data: List[Dataset] = []
for (
rng_seed,
data_dtype,
index_dtype,
data_length,
index_length,
invalid_ratio,
) in setup_params:
# HACK: Until we have a better approach for supporting non-rectangular parameter spaces,
# or otherwise being able to skip certain combinations of parameters (e.g. because
# they're invalid, non-realistic, or otherwise don't make sense).
if np.iinfo(index_dtype).max < data_length:
continue
#
# Setup phase. The data here is used for both the warmup and the real, timed function invocations.
#
# Make sure to re-initialize the RNG each time so we get a repeatable result.
rng = default_rng(rng_seed)
data_array = rand_array(rng, data_length, dtype=np.dtype(data_dtype), invalid_ratio=invalid_ratio)
fancyindex = rand_fancyindex(
rng,
index_length,
dtype=np.dtype(index_dtype),
source_arr_len=data_length,
invalid_ratio=invalid_ratio,
)
# Sweep over other parameters that aren't required by the setup phase.
other_params = [None]
for _ in other_params:
# Allocate an array to hold the raw timing data.
# TODO: Change to use TimeSpan?
timing_data = empty(iters, dtype=np.int64)
for is_warmup in (True, False):
loop_count = warmup_iters if is_warmup else iters
for i in range(loop_count):
start_time_ns = timestamper()
### The actual function invocation ###
mbget_numba(data_array, fancyindex)
### Store the timing results (if this was a real invocation).
call_nanos = timestamper() - start_time_ns
if not is_warmup:
timing_data[i] = call_nanos
# Create a mini Dataset with the timing results for this run.
# Capture the timing results along with the other options used for the function invocations.
trial_data = create_trial_dataset(
timing_data,
{
# Setup parameters
"rng_seed": rng_seed,
"data_dtype": np.dtype(data_dtype),
"index_dtype": np.dtype(index_dtype),
"data_length": data_length,
"index_length": index_length,
"invalid_ratio": invalid_ratio,
# Other parameters
# (None)
},
)
benchmark_data.append(trial_data)
# hstack all of the individual Datasets together into one large Dataset and return it.
return Dataset.hstack(benchmark_data, destroy=True)
def bench_astype(**kwargs):
# TODO: Add additional dimensions:
# * number of threads
# * recycler on/off
# * different key multiplicity distributions (in the rand_fancyindex function)
# * different amounts of 'sortedness' of the fancy index (from the rand_fancyindex function)
# Fixed parameters which apply to all of the trials in this benchmark.
warmup_iters = 0
iters = 21 # This duration of this function is (usually) fairly short, so the performance is prone to random noise -- using more iterations helps
# Parameters we'll sweep over for the benchmark.
rng_seeds = [12345]
src_dtypes = [
np.int16,
np.int32,
np.float64,
# np.dtype('S11'),
]
dst_dtypes = [
np.int16,
np.int32,
np.float64,
# np.dtype('S11'),
]
data_lengths = [
100,
1_000,
10_000,
100_000,
1_000_000,
10_000_000,
100_000_000
# TODO: Add 100M, 1G and 2G -- these need to be optional since smaller machines will run out of memory
# and also take longer than typical trials
]
invalid_ratios = [
0.0,
0.01,
0.1,
# TODO: Enable these additional values for the 'detailed' scenario
# 0.5,
# 0.9,
]
setup_params = itertools.product(
rng_seeds,
src_dtypes,
dst_dtypes,
data_lengths,
invalid_ratios,
)
# Datasets containing timing data and parameters from the trials in this benchmark.
benchmark_data: List[Dataset] = []
for (
rng_seed,
src_dtype,
dst_dtype,
data_length,
invalid_ratio,
) in setup_params:
# Setup phase. The data here is used for both the warmup and the real, timed function invocations.
#
# Make sure to re-initialize the RNG each time so we get a repeatable result.
rng = default_rng(rng_seed)
data_array = rand_array(rng, data_length, dtype=np.dtype(src_dtype), invalid_ratio=invalid_ratio)
# Sweep over other parameters that aren't required by the setup phase.
other_params = [None]
for _ in other_params:
# Allocate an array to hold the raw timing data.
# TODO: Change to use TimeSpan?
timing_data = empty(iters, dtype=np.int64)
for is_warmup in (True, False):
loop_count = warmup_iters if is_warmup else iters
for i in range(loop_count):
start_time_ns = timestamper()
### The actual function invocation ###
data_array.astype(dtype=dst_dtype)
### Store the timing results (if this was a real invocation).
call_nanos = timestamper() - start_time_ns
if not is_warmup:
timing_data[i] = call_nanos
# Create a mini Dataset with the timing results for this run.
# Capture the timing results along with the other options used for the function invocations.
trial_data = create_trial_dataset(
timing_data,
{
# Setup parameters
"rng_seed": rng_seed,
"src_dtype": np.dtype(src_dtype),
"dst_dtype": np.dtype(dst_dtype),
"data_length": data_length,
"invalid_ratio": invalid_ratio,
# Other parameters
# (None)
},
)
benchmark_data.append(trial_data)
# hstack all of the individual Datasets together into one large Dataset and return it.
return Dataset.hstack(benchmark_data, destroy=True)
def bench_astype_numpy(**kwargs):
# TODO: Add additional dimensions:
# * number of threads
# * recycler on/off
# * different key multiplicity distributions (in the rand_fancyindex function)
# * different amounts of 'sortedness' of the fancy index (from the rand_fancyindex function)
# Fixed parameters which apply to all of the trials in this benchmark.
warmup_iters = 0
iters = 21 # This duration of this function is (usually) fairly short, so the performance is prone to random noise -- using more iterations helps
# Parameters we'll sweep over for the benchmark.
rng_seeds = [12345]
src_dtypes = [
np.int16,
np.int32,
np.float64,
# np.dtype('S11'),
]
dst_dtypes = [
np.int16,
np.int32,
np.float64,
# np.dtype('S11'),
]
data_lengths = [
100,
1_000,
10_000,
100_000,
1_000_000,
10_000_000,
100_000_000
# TODO: Add 100M, 1G and 2G -- these need to be optional since smaller machines will run out of memory
# and also take longer than typical trials
]
invalid_ratios = [
0.0,
0.01,
0.1,
# TODO: Enable these additional values for the 'detailed' scenario
# 0.5,
# 0.9,
]
setup_params = itertools.product(
rng_seeds,
src_dtypes,
dst_dtypes,
data_lengths,
invalid_ratios,
)
# Datasets containing timing data and parameters from the trials in this benchmark.
benchmark_data: List[Dataset] = []
for (
rng_seed,
src_dtype,
dst_dtype,
data_length,
invalid_ratio,
) in setup_params:
# Setup phase. The data here is used for both the warmup and the real, timed function invocations.
#
# Make sure to re-initialize the RNG each time so we get a repeatable result.
rng = default_rng(rng_seed)
data_array = rand_array(rng, data_length, dtype=np.dtype(src_dtype), invalid_ratio=invalid_ratio)
if hasattr(data_array, '_np'):
data_array = data_array._np
# Sweep over other parameters that aren't required by the setup phase.
other_params = [None]
for _ in other_params:
# Allocate an array to hold the raw timing data.
# TODO: Change to use TimeSpan?
timing_data = empty(iters, dtype=np.int64)
for is_warmup in (True, False):
loop_count = warmup_iters if is_warmup else iters
for i in range(loop_count):
start_time_ns = timestamper()
### The actual function invocation ###
data_array.astype(dtype=dst_dtype)
### Store the timing results (if this was a real invocation).
call_nanos = timestamper() - start_time_ns
if not is_warmup:
timing_data[i] = call_nanos
# Create a mini Dataset with the timing results for this run.
# Capture the timing results along with the other options used for the function invocations.
trial_data = create_trial_dataset(
timing_data,
{
# Setup parameters
"rng_seed": rng_seed,
"src_dtype": np.dtype(src_dtype),
"dst_dtype": np.dtype(dst_dtype),
"data_length": data_length,
"invalid_ratio": invalid_ratio,
# Other parameters
# (None)
},
)
benchmark_data.append(trial_data)
# hstack all of the individual Datasets together into one large Dataset and return it.
return Dataset.hstack(benchmark_data, destroy=True)
def bench_astype_numba(**kwargs):
# TODO: Add additional dimensions:
# * number of threads
# * recycler on/off
# * different key multiplicity distributions (in the rand_fancyindex function)
# * different amounts of 'sortedness' of the fancy index (from the rand_fancyindex function)
# Fixed parameters which apply to all of the trials in this benchmark.
warmup_iters = 1
iters = 21 # This duration of this function is (usually) fairly short, so the performance is prone to random noise -- using more iterations helps
# Parameters we'll sweep over for the benchmark.
rng_seeds = [12345]
src_dtypes = [
np.int16,
np.int32,
np.float64,
]
dst_dtypes = [
np.int16,
np.int32,
np.float64,
]
data_lengths = [
100,
1_000,
10_000,
100_000,
1_000_000,
10_000_000,
100_000_000
# TODO: Add 100M, 1G and 2G -- these need to be optional since smaller machines will run out of memory
# and also take longer than typical trials
]
invalid_ratios = [
0.0,
0.01,
0.1,
# TODO: Enable these additional values for the 'detailed' scenario
# 0.5,
# 0.9,
]
setup_params = itertools.product(
rng_seeds,
src_dtypes,
dst_dtypes,
data_lengths,
invalid_ratios,
)
# Datasets containing timing data and parameters from the trials in this benchmark.
benchmark_data: List[Dataset] = []
for (
rng_seed,
src_dtype,
dst_dtype,
data_length,
invalid_ratio,
) in setup_params:
# Setup phase. The data here is used for both the warmup and the real, timed function invocations.
#
# Make sure to re-initialize the RNG each time so we get a repeatable result.
rng = default_rng(rng_seed)
data_array = rand_array(rng, data_length, dtype=np.dtype(src_dtype), invalid_ratio=invalid_ratio)
# Sweep over other parameters that aren't required by the setup phase.
other_params = [None]
for _ in other_params:
# Allocate an array to hold the raw timing data.
# TODO: Change to use TimeSpan?
timing_data = empty(iters, dtype=np.int64)
for is_warmup in (True, False):
loop_count = warmup_iters if is_warmup else iters
for i in range(loop_count):
start_time_ns = timestamper()
### The actual function invocation ###
astype_numba(data_array, np.dtype(dst_dtype))
### Store the timing results (if this was a real invocation).
call_nanos = timestamper() - start_time_ns
if not is_warmup:
timing_data[i] = call_nanos
# Create a mini Dataset with the timing results for this run.
# Capture the timing results along with the other options used for the function invocations.
trial_data = create_trial_dataset(
timing_data,
{
# Setup parameters
"rng_seed": rng_seed,
"src_dtype": np.dtype(src_dtype),
"dst_dtype": np.dtype(dst_dtype),
"data_length": data_length,
"invalid_ratio": invalid_ratio
# Other parameters
# (None)
},
)
benchmark_data.append(trial_data)
# hstack all of the individual Datasets together into one large Dataset and return it.
return Dataset.hstack(benchmark_data, destroy=True)
def bench_compare_ops(**kwargs):
# TODO: Add additional dimensions:
# * number of threads
# * recycler on/off
# * different key multiplicity distributions (in the rand_fancyindex function)
# * different amounts of 'sortedness' of the fancy index (from the rand_fancyindex function)
# Fixed parameters which apply to all of the trials in this benchmark.
warmup_iters = 0
iters = 21 # This duration of this function is (usually) fairly short, so the performance is prone to random noise -- using more iterations helps
# Parameters we'll sweep over for the benchmark.
rng_seeds = [12345]
arr1_dtypes = [
np.int16,
np.int32,
np.float64,
]
arr2_dtypes = [
np.int16,
np.int32,
np.float64,
]
data_lengths = [
100,
1_000,
10_000,
100_000,
1_000_000,
10_000_000,
# TODO: Add 100M, 1G and 2G -- these need to be optional since smaller machines will run out of memory
# and also take longer than typical trials
]
invalid_ratios = [
0.0,
0.01,
0.1,
# TODO: Enable these additional values for the 'detailed' scenario
# 0.5,
# 0.9,
]
ops = [
operator.eq,
operator.ne,
operator.lt,
operator.le,
operator.ge,
operator.gt
]
setup_params = itertools.product(
rng_seeds,
arr1_dtypes,
arr2_dtypes,
data_lengths,
invalid_ratios,
ops,
)
# Datasets containing timing data and parameters from the trials in this benchmark.
benchmark_data: List[Dataset] = []
for (
rng_seed,
arr1_dtype,
arr2_dtype,
data_length,
invalid_ratio,
op,
) in setup_params:
# Setup phase. The data here is used for both the warmup and the real, timed function invocations.
#
# Make sure to re-initialize the RNG each time so we get a repeatable result.
rng = default_rng(rng_seed)
arr1 = rand_array(rng, data_length, dtype=np.dtype(arr1_dtype), invalid_ratio=invalid_ratio)
arr2 = rand_array(rng, data_length, dtype=np.dtype(arr2_dtype), invalid_ratio=invalid_ratio)
# Sweep over other parameters that aren't required by the setup phase.
other_params = [None]
for _ in other_params:
# Allocate an array to hold the raw timing data.
# TODO: Change to use TimeSpan?
timing_data = empty(iters, dtype=np.int64)
for is_warmup in (True, False):
loop_count = warmup_iters if is_warmup else iters
for i in range(loop_count):
start_time_ns = timestamper()
#invocation of actual actual function
op(arr1, arr2)
### Store the timing results (if this was a real invocation).
call_nanos = timestamper() - start_time_ns
if not is_warmup:
timing_data[i] = call_nanos
# Create a mini Dataset with the timing results for this run.
# Capture the timing results along with the other options used for the function invocations.
trial_data = create_trial_dataset(
timing_data,
{
# Setup parameters
"rng_seed": rng_seed,
"arr1_dtype": np.dtype(arr1_dtype),
"arr2_dtype": np.dtype(arr2_dtype),
"operation": op.__name__,
"data_length": data_length,
"invalid_ratio": invalid_ratio,
# Other parameters
# (None)
},
)
benchmark_data.append(trial_data)
# hstack all of the individual Datasets together into one large Dataset and return it.
return Dataset.hstack(benchmark_data, destroy=True)
def bench_compare_ops_numpy(**kwargs):
# TODO: Add additional dimensions:
# * number of threads
# * recycler on/off
# * different key multiplicity distributions (in the rand_fancyindex function)
# * different amounts of 'sortedness' of the fancy index (from the rand_fancyindex function)
# Fixed parameters which apply to all of the trials in this benchmark.
warmup_iters = 0
iters = 21 # This duration of this function is (usually) fairly short, so the performance is prone to random noise -- using more iterations helps
# Parameters we'll sweep over for the benchmark.
rng_seeds = [12345]
arr1_dtypes = [
np.int16,
np.int32,
np.float64,
]
arr2_dtypes = [
np.int16,
np.int32,
np.float64,
]
data_lengths = [
100,
1_000,
10_000,
100_000,
1_000_000,
10_000_000,
# TODO: Add 100M, 1G and 2G -- these need to be optional since smaller machines will run out of memory
# and also take longer than typical trials
]
ops = [
operator.eq,
operator.ne,
operator.lt,
operator.le,
operator.ge,
operator.gt
]
setup_params = itertools.product(
rng_seeds,
arr1_dtypes,
arr2_dtypes,
data_lengths,
ops,
)
# Datasets containing timing data and parameters from the trials in this benchmark.
benchmark_data: List[Dataset] = []
for (
rng_seed,
arr1_dtype,
arr2_dtype,
data_length,
op,
) in setup_params:
# Setup phase. The data here is used for both the warmup and the real, timed function invocations.
#
# Make sure to re-initialize the RNG each time so we get a repeatable result.
rng = default_rng(rng_seed)
arr1 = rand_array(rng, data_length, dtype=np.dtype(arr1_dtype))._np
arr2 = rand_array(rng, data_length, dtype=np.dtype(arr2_dtype))._np
# Sweep over other parameters that aren't required by the setup phase.
other_params = [None]
for _ in other_params:
# Allocate an array to hold the raw timing data.
# TODO: Change to use TimeSpan?
timing_data = empty(iters, dtype=np.int64)
for is_warmup in (True, False):
loop_count = warmup_iters if is_warmup else iters
for i in range(loop_count):
start_time_ns = timestamper()
#invocation of actual actual function
op(arr1, arr2)
### Store the timing results (if this was a real invocation).
call_nanos = timestamper() - start_time_ns
if not is_warmup:
timing_data[i] = call_nanos
# Create a mini Dataset with the timing results for this run.
# Capture the timing results along with the other options used for the function invocations.
trial_data = create_trial_dataset(
timing_data,
{
# Setup parameters
"rng_seed": rng_seed,
"arr1_dtype": np.dtype(arr1_dtype),
"arr2_dtype": np.dtype(arr2_dtype),
"operation": op.__name__,
"data_length": data_length,
# Other parameters
# (None)
},
)
benchmark_data.append(trial_data)
# hstack all of the individual Datasets together into one large Dataset and return it.
return Dataset.hstack(benchmark_data, destroy=True)
def compare_mbget():
return create_comparison_dataset(
{
"mbget": bench_mbget(),
"mbget_numba": bench_mbget_numba(),
}
)
def compare_astype():
return create_comparison_dataset(
{
"astype": bench_astype(),
"astype_numpy": bench_astype_numpy(),
"astype_numba": bench_astype_numba(),
}
)
def compare_bool_index():
return create_comparison_dataset(
{
"bool_index": bench_bool_index(),
"bool_index_numpy": bench_bool_index_numpy()
}
)
def compare_compare_ops():
return create_comparison_dataset(
{
"compare_ops": bench_compare_ops(),
"compare_ops_numpy": bench_compare_ops_numpy(),
}
)
| 36.482893
| 151
| 0.580621
| 5,594
| 46,917
| 4.709331
| 0.084912
| 0.017461
| 0.01321
| 0.00911
| 0.838217
| 0.824324
| 0.822199
| 0.822199
| 0.818061
| 0.8151
| 0
| 0.027362
| 0.349553
| 46,917
| 1,286
| 152
| 36.482893
| 0.835895
| 0.372807
| 0
| 0.755359
| 0
| 0
| 0.04019
| 0.001628
| 0
| 0
| 0
| 0.001555
| 0
| 1
| 0.027743
| false
| 0
| 0.017654
| 0.005044
| 0.068096
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d9d4b1002f9b1ccc71b8608b343f5db0fdc777a2
| 375
|
py
|
Python
|
src/dbxdeploy/git/CurrentBranchResolver.py
|
DataSentics/dbx-deploy
|
c019d80a2b947b4f180071e97c3981daa3a2214a
|
[
"MIT"
] | null | null | null |
src/dbxdeploy/git/CurrentBranchResolver.py
|
DataSentics/dbx-deploy
|
c019d80a2b947b4f180071e97c3981daa3a2214a
|
[
"MIT"
] | null | null | null |
src/dbxdeploy/git/CurrentBranchResolver.py
|
DataSentics/dbx-deploy
|
c019d80a2b947b4f180071e97c3981daa3a2214a
|
[
"MIT"
] | null | null | null |
from dbxdeploy.git.CurrentRepositoryFactory import CurrentRepositoryFactory
class CurrentBranchResolver:
def __init__(
self,
currentRepositoryFactory: CurrentRepositoryFactory,
):
self.__currentRepositoryFactory = currentRepositoryFactory
def resolve(self) -> str:
return self.__currentRepositoryFactory.create().head.shorthand
| 28.846154
| 75
| 0.76
| 26
| 375
| 10.653846
| 0.615385
| 0.303249
| 0.375451
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181333
| 375
| 12
| 76
| 31.25
| 0.90228
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.111111
| 0.111111
| 0.555556
| 0
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
d9e7d0114d8541eab0b2fb262f57ca08eba71f1b
| 227
|
py
|
Python
|
config.py
|
ArtrixTech/ChiaPlotMaster
|
7169fa82e4ab93734d97bd1c2bb81023c067c248
|
[
"MIT"
] | null | null | null |
config.py
|
ArtrixTech/ChiaPlotMaster
|
7169fa82e4ab93734d97bd1c2bb81023c067c248
|
[
"MIT"
] | null | null | null |
config.py
|
ArtrixTech/ChiaPlotMaster
|
7169fa82e4ab93734d97bd1c2bb81023c067c248
|
[
"MIT"
] | null | null | null |
PLOTTER_PPK='0x986659cb40f005486c477fb7ad826b49542b266202b22ec8c38777d0fc94e5c129ca61013a305d16f347fc391f072013'
PLOTTER_FPK='0x86b09bd44d57c90e46e9fdacb7f7b250e9cec688b4476389e4eb70ba0f84fca1af91114972e62c23fbc1c41b6c1f8320'
| 56.75
| 112
| 0.960352
| 6
| 227
| 36
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.571429
| 0.013216
| 227
| 3
| 113
| 75.666667
| 0.392857
| 0
| 0
| 0
| 0
| 0
| 0.867257
| 0.867257
| 0
| 1
| 0.867257
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d9f2e08c6fe23aa37a5b7108e0b47f005a393c9a
| 266,085
|
py
|
Python
|
tests/hwsim/test_ap_hs20.py
|
redstone1980/hostap
|
ecea69a36f4badd5dcdeee00210b07709b6ecbc7
|
[
"Unlicense"
] | null | null | null |
tests/hwsim/test_ap_hs20.py
|
redstone1980/hostap
|
ecea69a36f4badd5dcdeee00210b07709b6ecbc7
|
[
"Unlicense"
] | null | null | null |
tests/hwsim/test_ap_hs20.py
|
redstone1980/hostap
|
ecea69a36f4badd5dcdeee00210b07709b6ecbc7
|
[
"Unlicense"
] | null | null | null |
# Hotspot 2.0 tests
# Copyright (c) 2013-2019, Jouni Malinen <j@w1.fi>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
from remotehost import remote_compatible
import base64
import binascii
import struct
import time
import logging
logger = logging.getLogger()
import os
import os.path
import socket
import subprocess
import hostapd
from utils import *
import hwsim_utils
from tshark import run_tshark
from wlantest import Wlantest
from wpasupplicant import WpaSupplicant
from wlantest import WlantestCapture
from test_ap_eap import check_eap_capa, check_domain_match_full
from test_gas import gas_rx, parse_gas, action_response, anqp_initial_resp, send_gas_resp, ACTION_CATEG_PUBLIC, GAS_INITIAL_RESPONSE
def hs20_ap_params(ssid="test-hs20"):
params = hostapd.wpa2_params(ssid=ssid)
params['wpa_key_mgmt'] = "WPA-EAP"
params['ieee80211w'] = "1"
params['ieee8021x'] = "1"
params['auth_server_addr'] = "127.0.0.1"
params['auth_server_port'] = "1812"
params['auth_server_shared_secret'] = "radius"
params['interworking'] = "1"
params['access_network_type'] = "14"
params['internet'] = "1"
params['asra'] = "0"
params['esr'] = "0"
params['uesa'] = "0"
params['venue_group'] = "7"
params['venue_type'] = "1"
params['venue_name'] = ["eng:Example venue", "fin:Esimerkkipaikka"]
params['roaming_consortium'] = ["112233", "1020304050", "010203040506",
"fedcba"]
params['domain_name'] = "example.com,another.example.com"
params['nai_realm'] = ["0,example.com,13[5:6],21[2:4][5:7]",
"0,another.example.com"]
params['hs20'] = "1"
params['hs20_wan_metrics'] = "01:8000:1000:80:240:3000"
params['hs20_conn_capab'] = ["1:0:2", "6:22:1", "17:5060:0"]
params['hs20_operating_class'] = "5173"
params['anqp_3gpp_cell_net'] = "244,91"
return params
def check_auto_select(dev, bssid):
dev.scan_for_bss(bssid, freq="2412")
dev.request("INTERWORKING_SELECT auto freq=2412")
ev = dev.wait_connected(timeout=15)
if bssid not in ev:
raise Exception("Connected to incorrect network")
dev.request("REMOVE_NETWORK all")
dev.wait_disconnected()
dev.dump_monitor()
def interworking_select(dev, bssid, type=None, no_match=False, freq=None):
dev.dump_monitor()
if bssid and freq and not no_match:
dev.scan_for_bss(bssid, freq=freq)
freq_extra = " freq=" + str(freq) if freq else ""
dev.request("INTERWORKING_SELECT" + freq_extra)
ev = dev.wait_event(["INTERWORKING-AP", "INTERWORKING-NO-MATCH"],
timeout=15)
if ev is None:
raise Exception("Network selection timed out")
if no_match:
if "INTERWORKING-NO-MATCH" not in ev:
raise Exception("Unexpected network match")
return
if "INTERWORKING-NO-MATCH" in ev:
logger.info("Matching network not found - try again")
dev.dump_monitor()
dev.request("INTERWORKING_SELECT" + freq_extra)
ev = dev.wait_event(["INTERWORKING-AP", "INTERWORKING-NO-MATCH"],
timeout=15)
if ev is None:
raise Exception("Network selection timed out")
if "INTERWORKING-NO-MATCH" in ev:
raise Exception("Matching network not found")
if bssid and bssid not in ev:
raise Exception("Unexpected BSSID in match")
if type and "type=" + type not in ev:
raise Exception("Network type not recognized correctly")
def check_sp_type(dev, sp_type):
type = dev.get_status_field("sp_type")
if type is None:
raise Exception("sp_type not available")
if type != sp_type:
raise Exception("sp_type did not indicate %s network" % sp_type)
def hlr_auc_gw_available():
if not os.path.exists("/tmp/hlr_auc_gw.sock"):
raise HwsimSkip("No hlr_auc_gw socket available")
if not os.path.exists("../../hostapd/hlr_auc_gw"):
raise HwsimSkip("No hlr_auc_gw available")
def interworking_ext_sim_connect(dev, bssid, method):
dev.request("INTERWORKING_CONNECT " + bssid)
interworking_ext_sim_auth(dev, method)
def interworking_ext_sim_auth(dev, method):
ev = dev.wait_event(["CTRL-EVENT-EAP-METHOD"], timeout=15)
if ev is None:
raise Exception("Network connected timed out")
if "(" + method + ")" not in ev:
raise Exception("Unexpected EAP method selection")
ev = dev.wait_event(["CTRL-REQ-SIM"], timeout=15)
if ev is None:
raise Exception("Wait for external SIM processing request timed out")
p = ev.split(':', 2)
if p[1] != "GSM-AUTH":
raise Exception("Unexpected CTRL-REQ-SIM type")
id = p[0].split('-')[3]
rand = p[2].split(' ')[0]
res = subprocess.check_output(["../../hostapd/hlr_auc_gw",
"-m",
"auth_serv/hlr_auc_gw.milenage_db",
"GSM-AUTH-REQ 232010000000000 " + rand]).decode()
if "GSM-AUTH-RESP" not in res:
raise Exception("Unexpected hlr_auc_gw response")
resp = res.split(' ')[2].rstrip()
dev.request("CTRL-RSP-SIM-" + id + ":GSM-AUTH:" + resp)
dev.wait_connected(timeout=15)
def interworking_connect(dev, bssid, method):
dev.request("INTERWORKING_CONNECT " + bssid)
interworking_auth(dev, method)
def interworking_auth(dev, method):
ev = dev.wait_event(["CTRL-EVENT-EAP-METHOD"], timeout=15)
if ev is None:
raise Exception("Network connected timed out")
if "(" + method + ")" not in ev:
raise Exception("Unexpected EAP method selection")
dev.wait_connected(timeout=15)
def check_probe_resp(wt, bssid_unexpected, bssid_expected):
if bssid_unexpected:
count = wt.get_bss_counter("probe_response", bssid_unexpected)
if count > 0:
raise Exception("Unexpected Probe Response frame from AP")
if bssid_expected:
count = wt.get_bss_counter("probe_response", bssid_expected)
if count == 0:
raise Exception("No Probe Response frame from AP")
def test_ap_anqp_sharing(dev, apdev):
"""ANQP sharing within ESS and explicit unshare"""
check_eap_capa(dev[0], "MSCHAPV2")
dev[0].flush_scan_cache()
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['nai_realm'] = ["0,example.com,13[5:6],21[2:4][5:7]"]
hostapd.add_ap(apdev[1], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com", 'username': "test",
'password': "secret",
'domain': "example.com"})
logger.info("Normal network selection with shared ANQP results")
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].scan_for_bss(bssid2, freq="2412")
interworking_select(dev[0], None, "home", freq="2412")
dev[0].dump_monitor()
state = dev[0].get_status_field('wpa_state')
if state != "DISCONNECTED":
raise Exception("Unexpected wpa_state after INTERWORKING_SELECT: " + state)
logger.debug("BSS entries:\n" + dev[0].request("BSS RANGE=ALL"))
res1 = dev[0].get_bss(bssid)
res2 = dev[0].get_bss(bssid2)
if 'anqp_nai_realm' not in res1:
raise Exception("anqp_nai_realm not found for AP1")
if 'anqp_nai_realm' not in res2:
raise Exception("anqp_nai_realm not found for AP2")
if res1['anqp_nai_realm'] != res2['anqp_nai_realm']:
raise Exception("ANQP results were not shared between BSSes")
logger.info("Explicit ANQP request to unshare ANQP results")
dev[0].request("ANQP_GET " + bssid + " 263")
ev = dev[0].wait_event(["RX-ANQP"], timeout=5)
if ev is None:
raise Exception("ANQP operation timed out")
dev[0].request("ANQP_GET " + bssid2 + " 263")
ev = dev[0].wait_event(["RX-ANQP"], timeout=5)
if ev is None:
raise Exception("ANQP operation timed out")
res1 = dev[0].get_bss(bssid)
res2 = dev[0].get_bss(bssid2)
if res1['anqp_nai_realm'] == res2['anqp_nai_realm']:
raise Exception("ANQP results were not unshared")
def test_ap_anqp_domain_id(dev, apdev):
"""ANQP Domain ID"""
check_eap_capa(dev[0], "MSCHAPV2")
dev[0].flush_scan_cache()
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['anqp_domain_id'] = '1234'
hostapd.add_ap(apdev[0], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['anqp_domain_id'] = '1234'
hostapd.add_ap(apdev[1], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com", 'username': "test",
'password': "secret",
'domain': "example.com"})
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].scan_for_bss(bssid2, freq="2412")
interworking_select(dev[0], None, "home", freq="2412")
def test_ap_anqp_no_sharing_diff_ess(dev, apdev):
"""ANQP no sharing between ESSs"""
check_eap_capa(dev[0], "MSCHAPV2")
dev[0].flush_scan_cache()
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20-another")
params['hessid'] = bssid
params['nai_realm'] = ["0,example.com,13[5:6],21[2:4][5:7]"]
hostapd.add_ap(apdev[1], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com", 'username': "test",
'password': "secret",
'domain': "example.com"})
logger.info("Normal network selection with shared ANQP results")
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].scan_for_bss(bssid2, freq="2412")
interworking_select(dev[0], None, "home", freq="2412")
def test_ap_anqp_no_sharing_missing_info(dev, apdev):
"""ANQP no sharing due to missing information"""
check_eap_capa(dev[0], "MSCHAPV2")
dev[0].flush_scan_cache()
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
del params['roaming_consortium']
del params['domain_name']
del params['anqp_3gpp_cell_net']
del params['nai_realm']
hostapd.add_ap(apdev[0], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['nai_realm'] = ["0,example.com,13[5:6],21[2:4][5:7]"]
hostapd.add_ap(apdev[1], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com", 'username': "test",
'password': "secret",
'domain': "example.com"})
logger.info("Normal network selection with shared ANQP results")
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].scan_for_bss(bssid2, freq="2412")
interworking_select(dev[0], None, "home", freq="2412")
def test_ap_anqp_sharing_oom(dev, apdev):
"""ANQP sharing within ESS and explicit unshare OOM"""
check_eap_capa(dev[0], "MSCHAPV2")
dev[0].flush_scan_cache()
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['nai_realm'] = ["0,example.com,13[5:6],21[2:4][5:7]"]
hostapd.add_ap(apdev[1], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com", 'username': "test",
'password': "secret",
'domain': "example.com"})
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].scan_for_bss(bssid2, freq="2412")
interworking_select(dev[0], None, "home", freq="2412")
dev[0].dump_monitor()
with alloc_fail(dev[0], 1, "wpa_bss_anqp_clone"):
dev[0].request("ANQP_GET " + bssid + " 263")
ev = dev[0].wait_event(["RX-ANQP"], timeout=5)
if ev is None:
raise Exception("ANQP operation timed out")
def test_ap_nai_home_realm_query(dev, apdev):
"""NAI Home Realm Query"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = ["0,example.com,13[5:6],21[2:4][5:7]",
"0,another.example.org"]
hostapd.add_ap(apdev[0], params)
dev[0].scan(freq="2412")
dev[0].request("HS20_GET_NAI_HOME_REALM_LIST " + bssid + " realm=example.com")
ev = dev[0].wait_event(["RX-ANQP"], timeout=5)
if ev is None:
raise Exception("ANQP operation timed out")
nai1 = dev[0].get_bss(bssid)['anqp_nai_realm']
dev[0].dump_monitor()
dev[0].request("ANQP_GET " + bssid + " 263")
ev = dev[0].wait_event(["RX-ANQP"], timeout=5)
if ev is None:
raise Exception("ANQP operation timed out")
nai2 = dev[0].get_bss(bssid)['anqp_nai_realm']
if len(nai1) >= len(nai2):
raise Exception("Unexpected NAI Realm list response lengths")
if binascii.hexlify(b"example.com").decode() not in nai1:
raise Exception("Home realm not reported")
if binascii.hexlify(b"example.org").decode() in nai1:
raise Exception("Non-home realm reported")
if binascii.hexlify(b"example.com").decode() not in nai2:
raise Exception("Home realm not reported in wildcard query")
if binascii.hexlify(b"example.org").decode() not in nai2:
raise Exception("Non-home realm not reported in wildcard query ")
cmds = ["foo",
"00:11:22:33:44:55 123",
"00:11:22:33:44:55 qq"]
for cmd in cmds:
if "FAIL" not in dev[0].request("HS20_GET_NAI_HOME_REALM_LIST " + cmd):
raise Exception("Invalid HS20_GET_NAI_HOME_REALM_LIST accepted: " + cmd)
dev[0].dump_monitor()
if "OK" not in dev[0].request("HS20_GET_NAI_HOME_REALM_LIST " + bssid):
raise Exception("HS20_GET_NAI_HOME_REALM_LIST failed")
ev = dev[0].wait_event(["GAS-QUERY-DONE"], timeout=10)
if ev is None:
raise Exception("ANQP operation timed out")
ev = dev[0].wait_event(["RX-ANQP"], timeout=0.1)
if ev is not None:
raise Exception("Unexpected ANQP response: " + ev)
dev[0].dump_monitor()
if "OK" not in dev[0].request("HS20_GET_NAI_HOME_REALM_LIST " + bssid + " 01000b6578616d706c652e636f6d"):
raise Exception("HS20_GET_NAI_HOME_REALM_LIST failed")
ev = dev[0].wait_event(["RX-ANQP"], timeout=10)
if ev is None:
raise Exception("No ANQP response")
if "NAI Realm list" not in ev:
raise Exception("Missing NAI Realm list: " + ev)
dev[0].add_cred_values({'realm': "example.com", 'username': "test",
'password': "secret",
'domain': "example.com"})
dev[0].dump_monitor()
if "OK" not in dev[0].request("HS20_GET_NAI_HOME_REALM_LIST " + bssid):
raise Exception("HS20_GET_NAI_HOME_REALM_LIST failed")
ev = dev[0].wait_event(["RX-ANQP"], timeout=10)
if ev is None:
raise Exception("No ANQP response")
if "NAI Realm list" not in ev:
raise Exception("Missing NAI Realm list: " + ev)
@remote_compatible
def test_ap_interworking_scan_filtering(dev, apdev):
"""Interworking scan filtering with HESSID and access network type"""
try:
_test_ap_interworking_scan_filtering(dev, apdev)
finally:
dev[0].request("SET hessid 00:00:00:00:00:00")
dev[0].request("SET access_network_type 15")
def _test_ap_interworking_scan_filtering(dev, apdev):
bssid = apdev[0]['bssid']
params = hs20_ap_params()
ssid = "test-hs20-ap1"
params['ssid'] = ssid
params['hessid'] = bssid
hapd0 = hostapd.add_ap(apdev[0], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params()
ssid2 = "test-hs20-ap2"
params['ssid'] = ssid2
params['hessid'] = bssid2
params['access_network_type'] = "1"
del params['venue_group']
del params['venue_type']
hostapd.add_ap(apdev[1], params)
dev[0].hs20_enable()
Wlantest.setup(hapd0)
wt = Wlantest()
wt.flush()
# Make sure wlantest has seen both BSSs to avoid issues in trying to clear
# counters for non-existing BSS.
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].scan_for_bss(bssid2, freq="2412")
wt.clear_bss_counters(bssid)
wt.clear_bss_counters(bssid2)
logger.info("Check probe request filtering based on HESSID")
dev[0].request("SET hessid " + bssid2)
dev[0].scan(freq="2412")
time.sleep(0.03)
check_probe_resp(wt, bssid, bssid2)
logger.info("Check probe request filtering based on access network type")
wt.clear_bss_counters(bssid)
wt.clear_bss_counters(bssid2)
dev[0].request("SET hessid 00:00:00:00:00:00")
dev[0].request("SET access_network_type 14")
dev[0].scan(freq="2412")
time.sleep(0.03)
check_probe_resp(wt, bssid2, bssid)
wt.clear_bss_counters(bssid)
wt.clear_bss_counters(bssid2)
dev[0].request("SET hessid 00:00:00:00:00:00")
dev[0].request("SET access_network_type 1")
dev[0].scan(freq="2412")
time.sleep(0.03)
check_probe_resp(wt, bssid, bssid2)
logger.info("Check probe request filtering based on HESSID and ANT")
wt.clear_bss_counters(bssid)
wt.clear_bss_counters(bssid2)
dev[0].request("SET hessid " + bssid)
dev[0].request("SET access_network_type 14")
dev[0].scan(freq="2412")
time.sleep(0.03)
check_probe_resp(wt, bssid2, bssid)
wt.clear_bss_counters(bssid)
wt.clear_bss_counters(bssid2)
dev[0].request("SET hessid " + bssid2)
dev[0].request("SET access_network_type 14")
dev[0].scan(freq="2412")
time.sleep(0.03)
check_probe_resp(wt, bssid, None)
check_probe_resp(wt, bssid2, None)
wt.clear_bss_counters(bssid)
wt.clear_bss_counters(bssid2)
dev[0].request("SET hessid " + bssid)
dev[0].request("SET access_network_type 1")
dev[0].scan(freq="2412")
time.sleep(0.03)
check_probe_resp(wt, bssid, None)
check_probe_resp(wt, bssid2, None)
def test_ap_hs20_select(dev, apdev):
"""Hotspot 2.0 network selection"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com", 'username': "test",
'password': "secret",
'domain': "example.com"})
interworking_select(dev[0], bssid, "home")
dev[0].remove_cred(id)
id = dev[0].add_cred_values({'realm': "example.com", 'username': "test",
'password': "secret",
'domain': "no.match.example.com"})
interworking_select(dev[0], bssid, "roaming", freq="2412")
dev[0].set_cred_quoted(id, "realm", "no.match.example.com")
interworking_select(dev[0], bssid, no_match=True, freq="2412")
res = dev[0].request("SCAN_RESULTS")
if "[HS20]" not in res:
raise Exception("HS20 flag missing from scan results: " + res)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params()
params['nai_realm'] = ["0,example.org,21"]
params['hessid'] = bssid2
params['domain_name'] = "example.org"
hostapd.add_ap(apdev[1], params)
dev[0].remove_cred(id)
id = dev[0].add_cred_values({'realm': "example.org", 'username': "test",
'password': "secret",
'domain': "example.org"})
interworking_select(dev[0], bssid2, "home", freq="2412")
def hs20_simulated_sim(dev, ap, method):
bssid = ap['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['anqp_3gpp_cell_net'] = "555,444"
params['domain_name'] = "wlan.mnc444.mcc555.3gppnetwork.org"
hostapd.add_ap(ap, params)
dev.hs20_enable()
dev.add_cred_values({'imsi': "555444-333222111", 'eap': method,
'milenage': "5122250214c33e723a5dd523fc145fc0:981d464c7c52eb6e5036234984ad0bcf:000000000123"})
interworking_select(dev, bssid, "home", freq="2412")
interworking_connect(dev, bssid, method)
check_sp_type(dev, "home")
def test_ap_hs20_sim(dev, apdev):
"""Hotspot 2.0 with simulated SIM and EAP-SIM"""
hlr_auc_gw_available()
hs20_simulated_sim(dev[0], apdev[0], "SIM")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["INTERWORKING-ALREADY-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Timeout on already-connected event")
def test_ap_hs20_sim_invalid(dev, apdev):
"""Hotspot 2.0 with simulated SIM and EAP-SIM - invalid IMSI"""
hlr_auc_gw_available()
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['anqp_3gpp_cell_net'] = "555,444"
params['domain_name'] = "wlan.mnc444.mcc555.3gppnetwork.org"
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].add_cred_values({'imsi': "555444-3332221110", 'eap': "SIM",
'milenage': "5122250214c33e723a5dd523fc145fc0:981d464c7c52eb6e5036234984ad0bcf:000000000123"})
# This hits "No valid IMSI available" in build_root_nai()
interworking_select(dev[0], bssid, freq="2412")
def test_ap_hs20_sim_oom(dev, apdev):
"""Hotspot 2.0 with simulated SIM and EAP-SIM - OOM"""
hlr_auc_gw_available()
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['anqp_3gpp_cell_net'] = "555,444"
params['domain_name'] = "wlan.mnc444.mcc555.3gppnetwork.org"
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].add_cred_values({'imsi': "555444-333222111", 'eap': "SIM",
'milenage': "5122250214c33e723a5dd523fc145fc0:981d464c7c52eb6e5036234984ad0bcf:000000000123"})
dev[0].scan_for_bss(bssid, freq=2412)
interworking_select(dev[0], bssid, freq="2412")
with alloc_fail(dev[0], 1, "wpa_config_add_network;interworking_connect_3gpp"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
with alloc_fail(dev[0], 1, "=interworking_connect_3gpp"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
def test_ap_hs20_aka(dev, apdev):
"""Hotspot 2.0 with simulated USIM and EAP-AKA"""
hlr_auc_gw_available()
hs20_simulated_sim(dev[0], apdev[0], "AKA")
def test_ap_hs20_aka_prime(dev, apdev):
"""Hotspot 2.0 with simulated USIM and EAP-AKA'"""
hlr_auc_gw_available()
hs20_simulated_sim(dev[0], apdev[0], "AKA'")
def test_ap_hs20_ext_sim(dev, apdev):
"""Hotspot 2.0 with external SIM processing"""
hlr_auc_gw_available()
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['anqp_3gpp_cell_net'] = "232,01"
params['domain_name'] = "wlan.mnc001.mcc232.3gppnetwork.org"
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
try:
dev[0].request("SET external_sim 1")
dev[0].add_cred_values({'imsi': "23201-0000000000", 'eap': "SIM"})
interworking_select(dev[0], bssid, "home", freq="2412")
interworking_ext_sim_connect(dev[0], bssid, "SIM")
check_sp_type(dev[0], "home")
finally:
dev[0].request("SET external_sim 0")
def test_ap_hs20_ext_sim_roaming(dev, apdev):
"""Hotspot 2.0 with external SIM processing in roaming network"""
hlr_auc_gw_available()
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['anqp_3gpp_cell_net'] = "244,91;310,026;232,01;234,56"
params['domain_name'] = "wlan.mnc091.mcc244.3gppnetwork.org"
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
try:
dev[0].request("SET external_sim 1")
dev[0].add_cred_values({'imsi': "23201-0000000000", 'eap': "SIM"})
interworking_select(dev[0], bssid, "roaming", freq="2412")
interworking_ext_sim_connect(dev[0], bssid, "SIM")
check_sp_type(dev[0], "roaming")
finally:
dev[0].request("SET external_sim 0")
def test_ap_hs20_username(dev, apdev):
"""Hotspot 2.0 connection in username/password credential"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['disable_dgaf'] = '1'
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com",
'username': "hs20-test",
'password': "password",
'ca_cert': "auth_serv/ca.pem",
'domain': "example.com",
'update_identifier': "1234"})
interworking_select(dev[0], bssid, "home", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
check_sp_type(dev[0], "home")
status = dev[0].get_status()
if status['pairwise_cipher'] != "CCMP":
raise Exception("Unexpected pairwise cipher")
if status['hs20'] != "3":
raise Exception("Unexpected HS 2.0 support indication")
dev[1].connect("test-hs20", key_mgmt="WPA-EAP", eap="TTLS",
identity="hs20-test", password="password",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
scan_freq="2412")
def test_ap_hs20_connect_api(dev, apdev):
"""Hotspot 2.0 connection with connect API"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['disable_dgaf'] = '1'
hostapd.add_ap(apdev[0], params)
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5", drv_params="force_connect_cmd=1")
wpas.hs20_enable()
wpas.flush_scan_cache()
id = wpas.add_cred_values({'realm': "example.com",
'username': "hs20-test",
'password': "password",
'ca_cert': "auth_serv/ca.pem",
'domain': "example.com",
'update_identifier': "1234"})
interworking_select(wpas, bssid, "home", freq="2412")
interworking_connect(wpas, bssid, "TTLS")
check_sp_type(wpas, "home")
status = wpas.get_status()
if status['pairwise_cipher'] != "CCMP":
raise Exception("Unexpected pairwise cipher")
if status['hs20'] != "3":
raise Exception("Unexpected HS 2.0 support indication")
def test_ap_hs20_auto_interworking(dev, apdev):
"""Hotspot 2.0 connection with auto_interworking=1"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['disable_dgaf'] = '1'
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable(auto_interworking=True)
id = dev[0].add_cred_values({'realm': "example.com",
'username': "hs20-test",
'password': "password",
'ca_cert': "auth_serv/ca.pem",
'domain': "example.com",
'update_identifier': "1234"})
dev[0].request("REASSOCIATE")
dev[0].wait_connected(timeout=15)
check_sp_type(dev[0], "home")
status = dev[0].get_status()
if status['pairwise_cipher'] != "CCMP":
raise Exception("Unexpected pairwise cipher")
if status['hs20'] != "3":
raise Exception("Unexpected HS 2.0 support indication")
@remote_compatible
def test_ap_hs20_auto_interworking_no_match(dev, apdev):
"""Hotspot 2.0 connection with auto_interworking=1 and no matching network"""
hapd = hostapd.add_ap(apdev[0], {"ssid": "mismatch"})
dev[0].hs20_enable(auto_interworking=True)
id = dev[0].connect("mismatch", psk="12345678", scan_freq="2412",
only_add_network=True)
dev[0].request("ENABLE_NETWORK " + str(id) + " no-connect")
id = dev[0].add_cred_values({'realm': "example.com",
'username': "hs20-test",
'password': "password",
'ca_cert': "auth_serv/ca.pem",
'domain': "example.com",
'update_identifier': "1234"})
dev[0].request("INTERWORKING_SELECT auto freq=2412")
time.sleep(0.1)
dev[0].dump_monitor()
for i in range(5):
logger.info("start ping")
if "PONG" not in dev[0].ctrl.request("PING", timeout=2):
raise Exception("PING failed")
logger.info("ping done")
fetch = 0
scan = 0
for j in range(15):
ev = dev[0].wait_event(["ANQP fetch completed",
"CTRL-EVENT-SCAN-RESULTS"], timeout=0.05)
if ev is None:
break
if "ANQP fetch completed" in ev:
fetch += 1
else:
scan += 1
if fetch > 2 * scan + 3:
raise Exception("Too many ANQP fetch iterations")
dev[0].dump_monitor()
dev[0].request("DISCONNECT")
@remote_compatible
def test_ap_hs20_auto_interworking_no_cred_match(dev, apdev):
"""Hotspot 2.0 connection with auto_interworking=1 but no cred match"""
bssid = apdev[0]['bssid']
params = {"ssid": "test"}
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable(auto_interworking=True)
dev[0].add_cred_values({'realm': "example.com",
'username': "hs20-test",
'password': "password",
'ca_cert': "auth_serv/ca.pem",
'domain': "example.com"})
id = dev[0].connect("test", psk="12345678", only_add_network=True)
dev[0].request("ENABLE_NETWORK %s" % id)
logger.info("Verify that scanning continues when there is partial network block match")
for i in range(0, 2):
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 10)
if ev is None:
raise Exception("Scan timed out")
logger.info("Scan completed")
def eap_test(dev, ap, eap_params, method, user, release=0):
bssid = ap['bssid']
params = hs20_ap_params()
params['nai_realm'] = ["0,example.com," + eap_params]
if release > 0:
params['hs20_release'] = str(release)
hapd = hostapd.add_ap(ap, params)
dev.flush_scan_cache()
dev.hs20_enable()
dev.add_cred_values({'realm': "example.com",
'ca_cert': "auth_serv/ca.pem",
'username': user,
'password': "password"})
interworking_select(dev, bssid, freq="2412")
interworking_connect(dev, bssid, method)
return hapd
@remote_compatible
def test_ap_hs20_eap_unknown(dev, apdev):
"""Hotspot 2.0 connection with unknown EAP method"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = "0,example.com,99"
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].add_cred_values(default_cred())
interworking_select(dev[0], None, no_match=True, freq="2412")
def test_ap_hs20_eap_peap_mschapv2(dev, apdev):
"""Hotspot 2.0 connection with PEAP/MSCHAPV2"""
check_eap_capa(dev[0], "MSCHAPV2")
eap_test(dev[0], apdev[0], "25[3:26]", "PEAP", "user")
def test_ap_hs20_eap_peap_default(dev, apdev):
"""Hotspot 2.0 connection with PEAP/MSCHAPV2 (as default)"""
check_eap_capa(dev[0], "MSCHAPV2")
eap_test(dev[0], apdev[0], "25", "PEAP", "user")
def test_ap_hs20_eap_peap_gtc(dev, apdev):
"""Hotspot 2.0 connection with PEAP/GTC"""
eap_test(dev[0], apdev[0], "25[3:6]", "PEAP", "user")
@remote_compatible
def test_ap_hs20_eap_peap_unknown(dev, apdev):
"""Hotspot 2.0 connection with PEAP/unknown"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = "0,example.com,25[3:99]"
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].add_cred_values(default_cred())
interworking_select(dev[0], None, no_match=True, freq="2412")
def test_ap_hs20_eap_ttls_chap(dev, apdev):
"""Hotspot 2.0 connection with TTLS/CHAP"""
skip_with_fips(dev[0])
eap_test(dev[0], apdev[0], "21[2:2]", "TTLS", "chap user")
def test_ap_hs20_eap_ttls_mschap(dev, apdev):
"""Hotspot 2.0 connection with TTLS/MSCHAP"""
skip_with_fips(dev[0])
eap_test(dev[0], apdev[0], "21[2:3]", "TTLS", "mschap user")
def test_ap_hs20_eap_ttls_default(dev, apdev):
"""Hotspot 2.0 connection with TTLS/default"""
skip_with_fips(dev[0])
eap_test(dev[0], apdev[0], "21", "TTLS", "hs20-test")
def test_ap_hs20_eap_ttls_eap_mschapv2(dev, apdev):
"""Hotspot 2.0 connection with TTLS/EAP-MSCHAPv2"""
check_eap_capa(dev[0], "MSCHAPV2")
eap_test(dev[0], apdev[0], "21[3:26][6:7][99:99]", "TTLS", "user")
@remote_compatible
def test_ap_hs20_eap_ttls_eap_unknown(dev, apdev):
"""Hotspot 2.0 connection with TTLS/EAP-unknown"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = "0,example.com,21[3:99]"
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].add_cred_values(default_cred())
interworking_select(dev[0], None, no_match=True, freq="2412")
@remote_compatible
def test_ap_hs20_eap_ttls_eap_unsupported(dev, apdev):
"""Hotspot 2.0 connection with TTLS/EAP-OTP(unsupported)"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = "0,example.com,21[3:5]"
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].add_cred_values(default_cred())
interworking_select(dev[0], None, no_match=True, freq="2412")
@remote_compatible
def test_ap_hs20_eap_ttls_unknown(dev, apdev):
"""Hotspot 2.0 connection with TTLS/unknown"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = "0,example.com,21[2:5]"
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].add_cred_values(default_cred())
interworking_select(dev[0], None, no_match=True, freq="2412")
def test_ap_hs20_eap_fast_mschapv2(dev, apdev):
"""Hotspot 2.0 connection with FAST/EAP-MSCHAPV2"""
check_eap_capa(dev[0], "FAST")
eap_test(dev[0], apdev[0], "43[3:26]", "FAST", "user")
def test_ap_hs20_eap_fast_gtc(dev, apdev):
"""Hotspot 2.0 connection with FAST/EAP-GTC"""
check_eap_capa(dev[0], "FAST")
eap_test(dev[0], apdev[0], "43[3:6]", "FAST", "user")
def test_ap_hs20_eap_tls(dev, apdev):
"""Hotspot 2.0 connection with EAP-TLS"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = ["0,example.com,13[5:6]"]
hostapd.add_ap(apdev[0], params)
dev[0].flush_scan_cache()
dev[0].hs20_enable()
dev[0].add_cred_values({'realm': "example.com",
'username': "certificate-user",
'ca_cert': "auth_serv/ca.pem",
'client_cert': "auth_serv/user.pem",
'private_key': "auth_serv/user.key"})
interworking_select(dev[0], bssid, freq="2412")
interworking_connect(dev[0], bssid, "TLS")
@remote_compatible
def test_ap_hs20_eap_cert_unknown(dev, apdev):
"""Hotspot 2.0 connection with certificate, but unknown EAP method"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = ["0,example.com,99[5:6]"]
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].add_cred_values({'realm': "example.com",
'username': "certificate-user",
'ca_cert': "auth_serv/ca.pem",
'client_cert': "auth_serv/user.pem",
'private_key': "auth_serv/user.key"})
interworking_select(dev[0], None, no_match=True, freq="2412")
@remote_compatible
def test_ap_hs20_eap_cert_unsupported(dev, apdev):
"""Hotspot 2.0 connection with certificate, but unsupported TTLS"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = ["0,example.com,21[5:6]"]
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].add_cred_values({'realm': "example.com",
'username': "certificate-user",
'ca_cert': "auth_serv/ca.pem",
'client_cert': "auth_serv/user.pem",
'private_key': "auth_serv/user.key"})
interworking_select(dev[0], None, no_match=True, freq="2412")
@remote_compatible
def test_ap_hs20_eap_invalid_cred(dev, apdev):
"""Hotspot 2.0 connection with invalid cred configuration"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].add_cred_values({'realm': "example.com",
'username': "certificate-user",
'client_cert': "auth_serv/user.pem"})
interworking_select(dev[0], None, no_match=True, freq="2412")
def test_ap_hs20_nai_realms(dev, apdev):
"""Hotspot 2.0 connection and multiple NAI realms and TTLS/PAP"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['nai_realm'] = ["0,no.match.here;example.com;no.match.here.either,21[2:1][5:7]"]
hostapd.add_ap(apdev[0], params)
dev[0].flush_scan_cache()
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com",
'ca_cert': "auth_serv/ca.pem",
'username': "pap user",
'password': "password",
'domain': "example.com"})
interworking_select(dev[0], bssid, "home", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
check_sp_type(dev[0], "home")
def test_ap_hs20_roaming_consortium(dev, apdev):
"""Hotspot 2.0 connection based on roaming consortium match"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0], params)
dev[0].flush_scan_cache()
dev[0].hs20_enable()
for consortium in ["112233", "1020304050", "010203040506", "fedcba"]:
id = dev[0].add_cred_values({'username': "user",
'password': "password",
'domain': "example.com",
'ca_cert': "auth_serv/ca.pem",
'roaming_consortium': consortium,
'eap': "PEAP"})
interworking_select(dev[0], bssid, "home", freq="2412")
interworking_connect(dev[0], bssid, "PEAP")
check_sp_type(dev[0], "home")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["INTERWORKING-ALREADY-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Timeout on already-connected event")
dev[0].remove_cred(id)
def test_ap_hs20_roaming_consortiums_match(dev, apdev):
"""Hotspot 2.0 connection based on roaming_consortiums match"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0], params)
dev[0].flush_scan_cache()
dev[0].hs20_enable()
tests = [("112233", "112233"),
("ffffff,1020304050,eeeeee", "1020304050")]
for consortium, selected in tests:
id = dev[0].add_cred_values({'username': "user",
'password': "password",
'domain': "my.home.example.com",
'ca_cert': "auth_serv/ca.pem",
'roaming_consortiums': consortium,
'eap': "PEAP"})
interworking_select(dev[0], bssid, "roaming", freq="2412")
interworking_connect(dev[0], bssid, "PEAP")
check_sp_type(dev[0], "roaming")
network_id = dev[0].get_status_field("id")
sel = dev[0].get_network(network_id, "roaming_consortium_selection")
if sel != selected:
raise Exception("Unexpected roaming_consortium_selection value: " +
sel)
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["INTERWORKING-ALREADY-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Timeout on already-connected event")
dev[0].remove_cred(id)
def test_ap_hs20_max_roaming_consortiums(dev, apdev):
"""Maximum number of cred roaming_consortiums"""
id = dev[0].add_cred()
consortium = (36*",ffffff")[1:]
if "OK" not in dev[0].request('SET_CRED %d roaming_consortiums "%s"' % (id, consortium)):
raise Exception("Maximum number of consortium OIs rejected")
consortium = (37*",ffffff")[1:]
if "FAIL" not in dev[0].request('SET_CRED %d roaming_consortiums "%s"' % (id, consortium)):
raise Exception("Over maximum number of consortium OIs accepted")
dev[0].remove_cred(id)
def test_ap_hs20_roaming_consortium_invalid(dev, apdev):
"""Hotspot 2.0 connection and invalid roaming consortium ANQP-element"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
# Override Roaming Consortium ANQP-element with an incorrectly encoded
# value.
params['anqp_elem'] = "261:04fedcba"
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'username': "user",
'password': "password",
'domain': "example.com",
'ca_cert': "auth_serv/ca.pem",
'roaming_consortium': "fedcba",
'eap': "PEAP"})
interworking_select(dev[0], bssid, "home", freq="2412", no_match=True)
def test_ap_hs20_roaming_consortium_element(dev, apdev):
"""Hotspot 2.0 connection and invalid roaming consortium element"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
del params['roaming_consortium']
params['vendor_elements'] = '6f00'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].scan_for_bss(bssid, freq="2412")
id = dev[0].add_cred_values({'username': "user",
'password': "password",
'domain': "example.com",
'ca_cert': "auth_serv/ca.pem",
'roaming_consortium': "112233",
'eap': "PEAP"})
interworking_select(dev[0], bssid, freq="2412", no_match=True)
hapd.set('vendor_elements', '6f020001')
if "OK" not in hapd.request("UPDATE_BEACON"):
raise Exception("UPDATE_BEACON failed")
dev[0].request("BSS_FLUSH 0")
dev[0].scan_for_bss(bssid, freq="2412", force_scan=True)
interworking_select(dev[0], bssid, freq="2412", no_match=True)
def test_ap_hs20_roaming_consortium_constraints(dev, apdev):
"""Hotspot 2.0 connection and roaming consortium constraints"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['bss_load_test'] = "12:200:20000"
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
vals = {'username': "user",
'password': "password",
'domain': "example.com",
'ca_cert': "auth_serv/ca.pem",
'roaming_consortium': "fedcba",
'eap': "TTLS"}
vals2 = vals.copy()
vals2['required_roaming_consortium'] = "223344"
id = dev[0].add_cred_values(vals2)
interworking_select(dev[0], bssid, "home", freq="2412", no_match=True)
dev[0].remove_cred(id)
vals2 = vals.copy()
vals2['min_dl_bandwidth_home'] = "65500"
id = dev[0].add_cred_values(vals2)
dev[0].request("INTERWORKING_SELECT freq=2412")
ev = dev[0].wait_event(["INTERWORKING-AP"], timeout=15)
if ev is None:
raise Exception("No AP found")
if "below_min_backhaul=1" not in ev:
raise Exception("below_min_backhaul not reported")
dev[0].remove_cred(id)
vals2 = vals.copy()
vals2['max_bss_load'] = "100"
id = dev[0].add_cred_values(vals2)
dev[0].request("INTERWORKING_SELECT freq=2412")
ev = dev[0].wait_event(["INTERWORKING-AP"], timeout=15)
if ev is None:
raise Exception("No AP found")
if "over_max_bss_load=1" not in ev:
raise Exception("over_max_bss_load not reported")
dev[0].remove_cred(id)
vals2 = vals.copy()
vals2['req_conn_capab'] = "6:1234"
vals2['domain'] = 'example.org'
id = dev[0].add_cred_values(vals2)
dev[0].request("INTERWORKING_SELECT freq=2412")
ev = dev[0].wait_event(["INTERWORKING-AP"], timeout=15)
if ev is None:
raise Exception("No AP found")
if "conn_capab_missing=1" not in ev:
raise Exception("conn_capab_missing not reported")
dev[0].remove_cred(id)
values = default_cred()
values['roaming_consortium'] = "fedcba"
id3 = dev[0].add_cred_values(values)
vals2 = vals.copy()
vals2['roaming_consortium'] = "fedcba"
vals2['priority'] = "2"
id = dev[0].add_cred_values(vals2)
values = default_cred()
values['roaming_consortium'] = "fedcba"
id2 = dev[0].add_cred_values(values)
dev[0].request("INTERWORKING_SELECT freq=2412")
ev = dev[0].wait_event(["INTERWORKING-AP"], timeout=15)
if ev is None:
raise Exception("No AP found")
dev[0].remove_cred(id)
dev[0].remove_cred(id2)
dev[0].remove_cred(id3)
def test_ap_hs20_3gpp_constraints(dev, apdev):
"""Hotspot 2.0 connection and 3GPP credential constraints"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['anqp_3gpp_cell_net'] = "555,444"
params['domain_name'] = "wlan.mnc444.mcc555.3gppnetwork.org"
params['bss_load_test'] = "12:200:20000"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
vals = {'imsi': "555444-333222111",
'eap': "SIM",
'milenage': "5122250214c33e723a5dd523fc145fc0:981d464c7c52eb6e5036234984ad0bcf:000000000123"}
vals2 = vals.copy()
vals2['required_roaming_consortium'] = "223344"
id = dev[0].add_cred_values(vals2)
interworking_select(dev[0], bssid, "home", freq="2412", no_match=True)
dev[0].remove_cred(id)
vals2 = vals.copy()
vals2['min_dl_bandwidth_home'] = "65500"
id = dev[0].add_cred_values(vals2)
dev[0].request("INTERWORKING_SELECT freq=2412")
ev = dev[0].wait_event(["INTERWORKING-AP"], timeout=15)
if ev is None:
raise Exception("No AP found")
if "below_min_backhaul=1" not in ev:
raise Exception("below_min_backhaul not reported")
dev[0].remove_cred(id)
vals2 = vals.copy()
vals2['max_bss_load'] = "100"
id = dev[0].add_cred_values(vals2)
dev[0].request("INTERWORKING_SELECT freq=2412")
ev = dev[0].wait_event(["INTERWORKING-AP"], timeout=15)
if ev is None:
raise Exception("No AP found")
if "over_max_bss_load=1" not in ev:
raise Exception("over_max_bss_load not reported")
dev[0].remove_cred(id)
values = default_cred()
values['roaming_consortium'] = "fedcba"
id3 = dev[0].add_cred_values(values)
vals2 = vals.copy()
vals2['roaming_consortium'] = "fedcba"
vals2['priority'] = "2"
id = dev[0].add_cred_values(vals2)
values = default_cred()
values['roaming_consortium'] = "fedcba"
id2 = dev[0].add_cred_values(values)
dev[0].request("INTERWORKING_SELECT freq=2412")
ev = dev[0].wait_event(["INTERWORKING-AP"], timeout=15)
if ev is None:
raise Exception("No AP found")
dev[0].remove_cred(id)
dev[0].remove_cred(id2)
dev[0].remove_cred(id3)
hapd.disable()
params = hs20_ap_params()
params['hessid'] = bssid
params['anqp_3gpp_cell_net'] = "555,444"
params['bss_load_test'] = "12:200:20000"
hapd = hostapd.add_ap(apdev[0], params)
vals2 = vals.copy()
vals2['req_conn_capab'] = "6:1234"
id = dev[0].add_cred_values(vals2)
dev[0].request("INTERWORKING_SELECT freq=2412")
ev = dev[0].wait_event(["INTERWORKING-AP"], timeout=15)
if ev is None:
raise Exception("No AP found")
if "conn_capab_missing=1" not in ev:
raise Exception("conn_capab_missing not reported")
dev[0].remove_cred(id)
def test_ap_hs20_connect_no_full_match(dev, apdev):
"""Hotspot 2.0 connection and no full match"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['anqp_3gpp_cell_net'] = "555,444"
hostapd.add_ap(apdev[0], params)
dev[0].flush_scan_cache()
dev[0].hs20_enable()
vals = {'username': "user",
'password': "password",
'domain': "example.com",
'ca_cert': "auth_serv/ca.pem",
'roaming_consortium': "fedcba",
'eap': "TTLS",
'min_dl_bandwidth_home': "65500"}
id = dev[0].add_cred_values(vals)
dev[0].request("INTERWORKING_SELECT freq=2412")
ev = dev[0].wait_event(["INTERWORKING-AP"], timeout=15)
if ev is None:
raise Exception("No AP found")
if "below_min_backhaul=1" not in ev:
raise Exception("below_min_backhaul not reported")
interworking_connect(dev[0], bssid, "TTLS")
dev[0].remove_cred(id)
dev[0].wait_disconnected()
vals = {'imsi': "555444-333222111", 'eap': "SIM",
'milenage': "5122250214c33e723a5dd523fc145fc0:981d464c7c52eb6e5036234984ad0bcf:000000000123",
'min_dl_bandwidth_roaming': "65500"}
id = dev[0].add_cred_values(vals)
dev[0].request("INTERWORKING_SELECT freq=2412")
ev = dev[0].wait_event(["INTERWORKING-AP"], timeout=15)
if ev is None:
raise Exception("No AP found")
if "below_min_backhaul=1" not in ev:
raise Exception("below_min_backhaul not reported")
interworking_connect(dev[0], bssid, "SIM")
dev[0].remove_cred(id)
dev[0].wait_disconnected()
def test_ap_hs20_username_roaming(dev, apdev):
"""Hotspot 2.0 connection in username/password credential (roaming)"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = ["0,example.com,13[5:6],21[2:4][5:7]",
"0,roaming.example.com,21[2:4][5:7]",
"0,another.example.com"]
params['domain_name'] = "another.example.com"
params['hessid'] = bssid
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "roaming.example.com",
'username': "hs20-test",
'password': "password",
'ca_cert': "auth_serv/ca.pem",
'domain': "example.com"})
interworking_select(dev[0], bssid, "roaming", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
check_sp_type(dev[0], "roaming")
def test_ap_hs20_username_unknown(dev, apdev):
"""Hotspot 2.0 connection in username/password credential (no domain in cred)"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com",
'ca_cert': "auth_serv/ca.pem",
'username': "hs20-test",
'password': "password"})
interworking_select(dev[0], bssid, "unknown", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
check_sp_type(dev[0], "unknown")
def test_ap_hs20_username_unknown2(dev, apdev):
"""Hotspot 2.0 connection in username/password credential (no domain advertized)"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
del params['domain_name']
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com",
'ca_cert': "auth_serv/ca.pem",
'username': "hs20-test",
'password': "password",
'domain': "example.com"})
interworking_select(dev[0], bssid, "unknown", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
check_sp_type(dev[0], "unknown")
def test_ap_hs20_gas_while_associated(dev, apdev):
"""Hotspot 2.0 connection with GAS query while associated"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com",
'ca_cert': "auth_serv/ca.pem",
'username': "hs20-test",
'password': "password",
'domain': "example.com"})
interworking_select(dev[0], bssid, "home", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
logger.info("Verifying GAS query while associated")
dev[0].request("FETCH_ANQP")
for i in range(0, 6):
ev = dev[0].wait_event(["RX-ANQP"], timeout=5)
if ev is None:
raise Exception("Operation timed out")
def test_ap_hs20_gas_with_another_ap_while_associated(dev, apdev):
"""GAS query with another AP while associated"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid2
params['nai_realm'] = ["0,no-match.example.org,13[5:6],21[2:4][5:7]"]
hostapd.add_ap(apdev[1], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com",
'ca_cert': "auth_serv/ca.pem",
'username': "hs20-test",
'password': "password",
'domain': "example.com"})
interworking_select(dev[0], bssid, "home", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
dev[0].dump_monitor()
logger.info("Verifying GAS query with same AP while associated")
dev[0].request("ANQP_GET " + bssid + " 263")
ev = dev[0].wait_event(["RX-ANQP"], timeout=5)
if ev is None:
raise Exception("ANQP operation timed out")
dev[0].dump_monitor()
logger.info("Verifying GAS query with another AP while associated")
dev[0].scan_for_bss(bssid2, 2412)
dev[0].request("ANQP_GET " + bssid2 + " 263")
ev = dev[0].wait_event(["RX-ANQP"], timeout=5)
if ev is None:
raise Exception("ANQP operation timed out")
def test_ap_hs20_gas_while_associated_with_pmf(dev, apdev):
"""Hotspot 2.0 connection with GAS query while associated and using PMF"""
check_eap_capa(dev[0], "MSCHAPV2")
try:
_test_ap_hs20_gas_while_associated_with_pmf(dev, apdev)
finally:
dev[0].request("SET pmf 0")
def _test_ap_hs20_gas_while_associated_with_pmf(dev, apdev):
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid2
params['nai_realm'] = ["0,no-match.example.org,13[5:6],21[2:4][5:7]"]
hostapd.add_ap(apdev[1], params)
dev[0].flush_scan_cache()
dev[0].hs20_enable()
dev[0].request("SET pmf 2")
id = dev[0].add_cred_values({'realm': "example.com",
'ca_cert': "auth_serv/ca.pem",
'username': "hs20-test",
'password': "password",
'domain': "example.com"})
interworking_select(dev[0], bssid, "home", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
logger.info("Verifying GAS query while associated")
dev[0].request("FETCH_ANQP")
for i in range(0, 2 * 6):
ev = dev[0].wait_event(["RX-ANQP"], timeout=5)
if ev is None:
raise Exception("Operation timed out")
def test_ap_hs20_gas_with_another_ap_while_using_pmf(dev, apdev):
"""GAS query with another AP while associated and using PMF"""
check_eap_capa(dev[0], "MSCHAPV2")
try:
_test_ap_hs20_gas_with_another_ap_while_using_pmf(dev, apdev)
finally:
dev[0].request("SET pmf 0")
def _test_ap_hs20_gas_with_another_ap_while_using_pmf(dev, apdev):
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hapd = hostapd.add_ap(apdev[0], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid2
params['nai_realm'] = ["0,no-match.example.org,13[5:6],21[2:4][5:7]"]
hostapd.add_ap(apdev[1], params)
dev[0].hs20_enable()
dev[0].request("SET pmf 2")
id = dev[0].add_cred_values({'realm': "example.com",
'ca_cert': "auth_serv/ca.pem",
'username': "hs20-test",
'password': "password",
'domain': "example.com"})
interworking_select(dev[0], bssid, "home", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
dev[0].dump_monitor()
hapd.wait_sta()
logger.info("Verifying GAS query with same AP while associated")
dev[0].request("ANQP_GET " + bssid + " 263")
ev = dev[0].wait_event(["RX-ANQP"], timeout=5)
if ev is None:
raise Exception("ANQP operation timed out")
dev[0].dump_monitor()
logger.info("Verifying GAS query with another AP while associated")
dev[0].scan_for_bss(bssid2, 2412)
dev[0].request("ANQP_GET " + bssid2 + " 263")
ev = dev[0].wait_event(["RX-ANQP"], timeout=5)
if ev is None:
raise Exception("ANQP operation timed out")
def test_ap_hs20_gas_frag_while_associated(dev, apdev):
"""Hotspot 2.0 connection with fragmented GAS query while associated"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hapd = hostapd.add_ap(apdev[0], params)
hapd.set("gas_frag_limit", "50")
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com",
'ca_cert': "auth_serv/ca.pem",
'username': "hs20-test",
'password': "password",
'domain': "example.com"})
interworking_select(dev[0], bssid, "home", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
hapd.wait_sta()
logger.info("Verifying GAS query while associated")
dev[0].request("FETCH_ANQP")
for i in range(0, 6):
ev = dev[0].wait_event(["RX-ANQP"], timeout=5)
if ev is None:
raise Exception("Operation timed out")
def test_ap_hs20_multiple_connects(dev, apdev):
"""Hotspot 2.0 connection through multiple network selections"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
values = {'realm': "example.com",
'ca_cert': "auth_serv/ca.pem",
'username': "hs20-test",
'password': "password",
'domain': "example.com"}
id = dev[0].add_cred_values(values)
dev[0].scan_for_bss(bssid, freq="2412")
for i in range(0, 3):
logger.info("Starting Interworking network selection")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
while True:
ev = dev[0].wait_event(["INTERWORKING-NO-MATCH",
"INTERWORKING-ALREADY-CONNECTED",
"CTRL-EVENT-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Connection timed out")
if "INTERWORKING-NO-MATCH" in ev:
raise Exception("Matching AP not found")
if "CTRL-EVENT-CONNECTED" in ev:
break
if i == 2 and "INTERWORKING-ALREADY-CONNECTED" in ev:
break
if i == 0:
dev[0].request("DISCONNECT")
dev[0].dump_monitor()
networks = dev[0].list_networks()
if len(networks) > 1:
raise Exception("Duplicated network block detected")
def test_ap_hs20_disallow_aps(dev, apdev):
"""Hotspot 2.0 connection and disallow_aps"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
values = {'realm': "example.com",
'ca_cert': "auth_serv/ca.pem",
'username': "hs20-test",
'password': "password",
'domain': "example.com"}
id = dev[0].add_cred_values(values)
dev[0].scan_for_bss(bssid, freq="2412")
logger.info("Verify disallow_aps bssid")
dev[0].request("SET disallow_aps bssid " + bssid.replace(':', ''))
dev[0].request("INTERWORKING_SELECT auto")
ev = dev[0].wait_event(["INTERWORKING-NO-MATCH"], timeout=15)
if ev is None:
raise Exception("Network selection timed out")
dev[0].dump_monitor()
logger.info("Verify disallow_aps ssid")
dev[0].request("SET disallow_aps ssid 746573742d68733230")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["INTERWORKING-NO-MATCH"], timeout=15)
if ev is None:
raise Exception("Network selection timed out")
dev[0].dump_monitor()
logger.info("Verify disallow_aps clear")
dev[0].request("SET disallow_aps ")
interworking_select(dev[0], bssid, "home", freq="2412")
dev[0].request("SET disallow_aps bssid " + bssid.replace(':', ''))
ret = dev[0].request("INTERWORKING_CONNECT " + bssid)
if "FAIL" not in ret:
raise Exception("INTERWORKING_CONNECT to disallowed BSS not rejected")
if "FAIL" not in dev[0].request("INTERWORKING_CONNECT foo"):
raise Exception("Invalid INTERWORKING_CONNECT not rejected")
if "FAIL" not in dev[0].request("INTERWORKING_CONNECT 00:11:22:33:44:55"):
raise Exception("Invalid INTERWORKING_CONNECT not rejected")
def policy_test(dev, ap, values, only_one=True):
dev.dump_monitor()
if ap:
logger.info("Verify network selection to AP " + ap['ifname'])
bssid = ap['bssid']
dev.scan_for_bss(bssid, freq="2412")
else:
logger.info("Verify network selection")
bssid = None
dev.hs20_enable()
id = dev.add_cred_values(values)
dev.request("INTERWORKING_SELECT auto freq=2412")
events = []
while True:
ev = dev.wait_event(["INTERWORKING-AP", "INTERWORKING-NO-MATCH",
"INTERWORKING-BLACKLISTED",
"INTERWORKING-SELECTED"], timeout=15)
if ev is None:
raise Exception("Network selection timed out")
events.append(ev)
if "INTERWORKING-NO-MATCH" in ev:
raise Exception("Matching AP not found")
if bssid and only_one and "INTERWORKING-AP" in ev and bssid not in ev:
raise Exception("Unexpected AP claimed acceptable")
if "INTERWORKING-SELECTED" in ev:
if bssid and bssid not in ev:
raise Exception("Selected incorrect BSS")
break
ev = dev.wait_connected(timeout=15)
if bssid and bssid not in ev:
raise Exception("Connected to incorrect BSS")
conn_bssid = dev.get_status_field("bssid")
if bssid and conn_bssid != bssid:
raise Exception("bssid information points to incorrect BSS")
dev.remove_cred(id)
dev.dump_monitor()
return events
def default_cred(domain=None, user="hs20-test"):
cred = {'realm': "example.com",
'ca_cert': "auth_serv/ca.pem",
'username': user,
'password': "password"}
if domain:
cred['domain'] = domain
return cred
def test_ap_hs20_prefer_home(dev, apdev):
"""Hotspot 2.0 required roaming consortium"""
check_eap_capa(dev[0], "MSCHAPV2")
params = hs20_ap_params()
params['domain_name'] = "example.org"
hostapd.add_ap(apdev[0], params)
params = hs20_ap_params()
params['ssid'] = "test-hs20-other"
params['domain_name'] = "example.com"
hostapd.add_ap(apdev[1], params)
values = default_cred()
values['domain'] = "example.com"
policy_test(dev[0], apdev[1], values, only_one=False)
values['domain'] = "example.org"
policy_test(dev[0], apdev[0], values, only_one=False)
def test_ap_hs20_req_roaming_consortium(dev, apdev):
"""Hotspot 2.0 required roaming consortium"""
check_eap_capa(dev[0], "MSCHAPV2")
params = hs20_ap_params()
hostapd.add_ap(apdev[0], params)
params = hs20_ap_params()
params['ssid'] = "test-hs20-other"
params['roaming_consortium'] = ["223344"]
hostapd.add_ap(apdev[1], params)
values = default_cred()
values['required_roaming_consortium'] = "223344"
policy_test(dev[0], apdev[1], values)
values['required_roaming_consortium'] = "112233"
policy_test(dev[0], apdev[0], values)
id = dev[0].add_cred()
dev[0].set_cred(id, "required_roaming_consortium", "112233")
dev[0].set_cred(id, "required_roaming_consortium", "112233445566778899aabbccddeeff")
for val in ["", "1", "11", "1122", "1122334",
"112233445566778899aabbccddeeff00"]:
if "FAIL" not in dev[0].request('SET_CRED {} required_roaming_consortium {}'.format(id, val)):
raise Exception("Invalid roaming consortium value accepted: " + val)
def test_ap_hs20_req_roaming_consortium_no_match(dev, apdev):
"""Hotspot 2.0 required roaming consortium and no match"""
check_eap_capa(dev[0], "MSCHAPV2")
params = hs20_ap_params()
del params['roaming_consortium']
hostapd.add_ap(apdev[0], params)
params = hs20_ap_params()
params['ssid'] = "test-hs20-other"
params['roaming_consortium'] = ["223345"]
hostapd.add_ap(apdev[1], params)
values = default_cred()
values['required_roaming_consortium'] = "223344"
dev[0].hs20_enable()
id = dev[0].add_cred_values(values)
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["INTERWORKING-NO-MATCH"], timeout=10)
if ev is None:
raise Exception("INTERWORKING-NO-MATCH not reported")
def test_ap_hs20_excluded_ssid(dev, apdev):
"""Hotspot 2.0 exclusion based on SSID"""
check_eap_capa(dev[0], "MSCHAPV2")
params = hs20_ap_params()
params['roaming_consortium'] = ["223344"]
params['anqp_3gpp_cell_net'] = "555,444"
hostapd.add_ap(apdev[0], params)
params = hs20_ap_params()
params['ssid'] = "test-hs20-other"
params['roaming_consortium'] = ["223344"]
params['anqp_3gpp_cell_net'] = "555,444"
hostapd.add_ap(apdev[1], params)
values = default_cred()
values['excluded_ssid'] = "test-hs20"
events = policy_test(dev[0], apdev[1], values)
ev = [e for e in events if "INTERWORKING-BLACKLISTED " + apdev[0]['bssid'] in e]
if len(ev) != 1:
raise Exception("Excluded network not reported")
values['excluded_ssid'] = "test-hs20-other"
events = policy_test(dev[0], apdev[0], values)
ev = [e for e in events if "INTERWORKING-BLACKLISTED " + apdev[1]['bssid'] in e]
if len(ev) != 1:
raise Exception("Excluded network not reported")
values = default_cred()
values['roaming_consortium'] = "223344"
values['eap'] = "TTLS"
values['phase2'] = "auth=MSCHAPV2"
values['excluded_ssid'] = "test-hs20"
events = policy_test(dev[0], apdev[1], values)
ev = [e for e in events if "INTERWORKING-BLACKLISTED " + apdev[0]['bssid'] in e]
if len(ev) != 1:
raise Exception("Excluded network not reported")
values = {'imsi': "555444-333222111", 'eap': "SIM",
'milenage': "5122250214c33e723a5dd523fc145fc0:981d464c7c52eb6e5036234984ad0bcf:000000000123",
'excluded_ssid': "test-hs20"}
events = policy_test(dev[0], apdev[1], values)
ev = [e for e in events if "INTERWORKING-BLACKLISTED " + apdev[0]['bssid'] in e]
if len(ev) != 1:
raise Exception("Excluded network not reported")
def test_ap_hs20_roam_to_higher_prio(dev, apdev):
"""Hotspot 2.0 and roaming from current to higher priority network"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params(ssid="test-hs20-visited")
params['domain_name'] = "visited.example.org"
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com",
'ca_cert': "auth_serv/ca.pem",
'username': "hs20-test",
'password': "password",
'domain': "example.com"})
logger.info("Connect to the only network option")
interworking_select(dev[0], bssid, "roaming", freq="2412")
dev[0].dump_monitor()
interworking_connect(dev[0], bssid, "TTLS")
logger.info("Start another AP (home operator) and reconnect")
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20-home")
params['domain_name'] = "example.com"
hostapd.add_ap(apdev[1], params)
dev[0].scan_for_bss(bssid2, freq="2412", force_scan=True)
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["INTERWORKING-NO-MATCH",
"INTERWORKING-ALREADY-CONNECTED",
"CTRL-EVENT-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Connection timed out")
if "INTERWORKING-NO-MATCH" in ev:
raise Exception("Matching AP not found")
if "INTERWORKING-ALREADY-CONNECTED" in ev:
raise Exception("Unexpected AP selected")
if bssid2 not in ev:
raise Exception("Unexpected BSSID after reconnection")
def test_ap_hs20_domain_suffix_match_full(dev, apdev):
"""Hotspot 2.0 and domain_suffix_match"""
check_domain_match_full(dev[0])
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com",
'username': "hs20-test",
'password': "password",
'ca_cert': "auth_serv/ca.pem",
'domain': "example.com",
'domain_suffix_match': "server.w1.fi"})
interworking_select(dev[0], bssid, "home", freq="2412")
dev[0].dump_monitor()
interworking_connect(dev[0], bssid, "TTLS")
dev[0].request("REMOVE_NETWORK all")
dev[0].dump_monitor()
dev[0].set_cred_quoted(id, "domain_suffix_match", "no-match.example.com")
interworking_select(dev[0], bssid, "home", freq="2412")
dev[0].dump_monitor()
dev[0].request("INTERWORKING_CONNECT " + bssid)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-TLS-CERT-ERROR"])
if ev is None:
raise Exception("TLS certificate error not reported")
if "Domain suffix mismatch" not in ev:
raise Exception("Domain suffix mismatch not reported")
def test_ap_hs20_domain_suffix_match(dev, apdev):
"""Hotspot 2.0 and domain_suffix_match"""
check_eap_capa(dev[0], "MSCHAPV2")
check_domain_match_full(dev[0])
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com",
'username': "hs20-test",
'password': "password",
'ca_cert': "auth_serv/ca.pem",
'domain': "example.com",
'domain_suffix_match': "w1.fi"})
interworking_select(dev[0], bssid, "home", freq="2412")
dev[0].dump_monitor()
interworking_connect(dev[0], bssid, "TTLS")
def test_ap_hs20_roaming_partner_preference(dev, apdev):
"""Hotspot 2.0 and roaming partner preference"""
check_eap_capa(dev[0], "MSCHAPV2")
params = hs20_ap_params()
params['domain_name'] = "roaming.example.org"
hostapd.add_ap(apdev[0], params)
params = hs20_ap_params()
params['ssid'] = "test-hs20-other"
params['domain_name'] = "roaming.example.net"
hostapd.add_ap(apdev[1], params)
logger.info("Verify default vs. specified preference")
values = default_cred()
values['roaming_partner'] = "roaming.example.net,1,127,*"
policy_test(dev[0], apdev[1], values, only_one=False)
values['roaming_partner'] = "roaming.example.net,1,129,*"
policy_test(dev[0], apdev[0], values, only_one=False)
logger.info("Verify partial FQDN match")
values['roaming_partner'] = "example.net,0,0,*"
policy_test(dev[0], apdev[1], values, only_one=False)
values['roaming_partner'] = "example.net,0,255,*"
policy_test(dev[0], apdev[0], values, only_one=False)
def test_ap_hs20_max_bss_load(dev, apdev):
"""Hotspot 2.0 and maximum BSS load"""
check_eap_capa(dev[0], "MSCHAPV2")
params = hs20_ap_params()
params['bss_load_test'] = "12:200:20000"
hostapd.add_ap(apdev[0], params)
params = hs20_ap_params()
params['ssid'] = "test-hs20-other"
params['bss_load_test'] = "5:20:10000"
hostapd.add_ap(apdev[1], params)
logger.info("Verify maximum BSS load constraint")
values = default_cred()
values['domain'] = "example.com"
values['max_bss_load'] = "100"
events = policy_test(dev[0], apdev[1], values, only_one=False)
ev = [e for e in events if "INTERWORKING-AP " + apdev[0]['bssid'] in e]
if len(ev) != 1 or "over_max_bss_load=1" not in ev[0]:
raise Exception("Maximum BSS Load case not noticed")
ev = [e for e in events if "INTERWORKING-AP " + apdev[1]['bssid'] in e]
if len(ev) != 1 or "over_max_bss_load=1" in ev[0]:
raise Exception("Maximum BSS Load case reported incorrectly")
logger.info("Verify maximum BSS load does not prevent connection")
values['max_bss_load'] = "1"
events = policy_test(dev[0], None, values)
ev = [e for e in events if "INTERWORKING-AP " + apdev[0]['bssid'] in e]
if len(ev) != 1 or "over_max_bss_load=1" not in ev[0]:
raise Exception("Maximum BSS Load case not noticed")
ev = [e for e in events if "INTERWORKING-AP " + apdev[1]['bssid'] in e]
if len(ev) != 1 or "over_max_bss_load=1" not in ev[0]:
raise Exception("Maximum BSS Load case not noticed")
def test_ap_hs20_max_bss_load2(dev, apdev):
"""Hotspot 2.0 and maximum BSS load with one AP not advertising"""
check_eap_capa(dev[0], "MSCHAPV2")
params = hs20_ap_params()
params['bss_load_test'] = "12:200:20000"
hostapd.add_ap(apdev[0], params)
params = hs20_ap_params()
params['ssid'] = "test-hs20-other"
hostapd.add_ap(apdev[1], params)
logger.info("Verify maximum BSS load constraint with AP advertisement")
values = default_cred()
values['domain'] = "example.com"
values['max_bss_load'] = "100"
events = policy_test(dev[0], apdev[1], values, only_one=False)
ev = [e for e in events if "INTERWORKING-AP " + apdev[0]['bssid'] in e]
if len(ev) != 1 or "over_max_bss_load=1" not in ev[0]:
raise Exception("Maximum BSS Load case not noticed")
ev = [e for e in events if "INTERWORKING-AP " + apdev[1]['bssid'] in e]
if len(ev) != 1 or "over_max_bss_load=1" in ev[0]:
raise Exception("Maximum BSS Load case reported incorrectly")
def test_ap_hs20_max_bss_load_roaming(dev, apdev):
"""Hotspot 2.0 and maximum BSS load (roaming)"""
check_eap_capa(dev[0], "MSCHAPV2")
params = hs20_ap_params()
params['bss_load_test'] = "12:200:20000"
hostapd.add_ap(apdev[0], params)
values = default_cred()
values['domain'] = "roaming.example.com"
values['max_bss_load'] = "100"
events = policy_test(dev[0], apdev[0], values, only_one=True)
ev = [e for e in events if "INTERWORKING-AP " + apdev[0]['bssid'] in e]
if len(ev) != 1:
raise Exception("No INTERWORKING-AP event")
if "over_max_bss_load=1" in ev[0]:
raise Exception("Maximum BSS Load reported for roaming")
def test_ap_hs20_multi_cred_sp_prio(dev, apdev):
"""Hotspot 2.0 multi-cred sp_priority"""
check_eap_capa(dev[0], "MSCHAPV2")
try:
_test_ap_hs20_multi_cred_sp_prio(dev, apdev)
finally:
dev[0].request("SET external_sim 0")
def _test_ap_hs20_multi_cred_sp_prio(dev, apdev):
hlr_auc_gw_available()
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
del params['domain_name']
params['anqp_3gpp_cell_net'] = "232,01"
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].request("SET external_sim 1")
id1 = dev[0].add_cred_values({'imsi': "23201-0000000000", 'eap': "SIM",
'provisioning_sp': "example.com",
'sp_priority' :"1"})
id2 = dev[0].add_cred_values({'realm': "example.com",
'ca_cert': "auth_serv/ca.pem",
'username': "hs20-test",
'password': "password",
'domain': "example.com",
'provisioning_sp': "example.com",
'sp_priority': "2"})
dev[0].dump_monitor()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
interworking_ext_sim_auth(dev[0], "SIM")
check_sp_type(dev[0], "unknown")
dev[0].request("REMOVE_NETWORK all")
dev[0].set_cred(id1, "sp_priority", "2")
dev[0].set_cred(id2, "sp_priority", "1")
dev[0].dump_monitor()
dev[0].request("INTERWORKING_SELECT auto freq=2412")
interworking_auth(dev[0], "TTLS")
check_sp_type(dev[0], "unknown")
def test_ap_hs20_multi_cred_sp_prio2(dev, apdev):
"""Hotspot 2.0 multi-cred sp_priority with two BSSes"""
check_eap_capa(dev[0], "MSCHAPV2")
try:
_test_ap_hs20_multi_cred_sp_prio2(dev, apdev)
finally:
dev[0].request("SET external_sim 0")
def _test_ap_hs20_multi_cred_sp_prio2(dev, apdev):
hlr_auc_gw_available()
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
del params['nai_realm']
del params['domain_name']
params['anqp_3gpp_cell_net'] = "232,01"
hostapd.add_ap(apdev[0], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params()
params['ssid'] = "test-hs20-other"
params['hessid'] = bssid2
del params['domain_name']
del params['anqp_3gpp_cell_net']
hostapd.add_ap(apdev[1], params)
dev[0].hs20_enable()
dev[0].request("SET external_sim 1")
id1 = dev[0].add_cred_values({'imsi': "23201-0000000000", 'eap': "SIM",
'provisioning_sp': "example.com",
'sp_priority': "1"})
id2 = dev[0].add_cred_values({'realm': "example.com",
'ca_cert': "auth_serv/ca.pem",
'username': "hs20-test",
'password': "password",
'domain': "example.com",
'provisioning_sp': "example.com",
'sp_priority': "2"})
dev[0].dump_monitor()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].scan_for_bss(bssid2, freq="2412")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
interworking_ext_sim_auth(dev[0], "SIM")
check_sp_type(dev[0], "unknown")
conn_bssid = dev[0].get_status_field("bssid")
if conn_bssid != bssid:
raise Exception("Connected to incorrect BSS")
dev[0].request("REMOVE_NETWORK all")
dev[0].set_cred(id1, "sp_priority", "2")
dev[0].set_cred(id2, "sp_priority", "1")
dev[0].dump_monitor()
dev[0].request("INTERWORKING_SELECT auto freq=2412")
interworking_auth(dev[0], "TTLS")
check_sp_type(dev[0], "unknown")
conn_bssid = dev[0].get_status_field("bssid")
if conn_bssid != bssid2:
raise Exception("Connected to incorrect BSS")
def test_ap_hs20_multi_cred_sp_prio_same(dev, apdev):
"""Hotspot 2.0 multi-cred and same sp_priority"""
check_eap_capa(dev[0], "MSCHAPV2")
hlr_auc_gw_available()
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
del params['domain_name']
params['anqp_3gpp_cell_net'] = "232,01"
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].scan_for_bss(bssid, freq="2412")
id1 = dev[0].add_cred_values({'realm': "example.com",
'ca_cert': "auth_serv/ca.pem",
'username': "hs20-test",
'password': "password",
'domain': "domain1.example.com",
'provisioning_sp': "example.com",
'sp_priority': "1"})
id2 = dev[0].add_cred_values({'realm': "example.com",
'ca_cert': "auth_serv/ca.pem",
'username': "hs20-test",
'password': "password",
'domain': "domain2.example.com",
'provisioning_sp': "example.com",
'sp_priority': "1"})
dev[0].dump_monitor()
dev[0].scan_for_bss(bssid, freq="2412")
check_auto_select(dev[0], bssid)
def check_conn_capab_selection(dev, type, missing):
dev.request("INTERWORKING_SELECT freq=2412")
ev = dev.wait_event(["INTERWORKING-AP"])
if ev is None:
raise Exception("Network selection timed out")
if "type=" + type not in ev:
raise Exception("Unexpected network type")
if missing and "conn_capab_missing=1" not in ev:
raise Exception("conn_capab_missing not reported")
if not missing and "conn_capab_missing=1" in ev:
raise Exception("conn_capab_missing reported unexpectedly")
def conn_capab_cred(domain=None, req_conn_capab=None):
cred = default_cred(domain=domain)
if req_conn_capab:
cred['req_conn_capab'] = req_conn_capab
return cred
def test_ap_hs20_req_conn_capab(dev, apdev):
"""Hotspot 2.0 network selection with req_conn_capab"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].scan_for_bss(bssid, freq="2412")
logger.info("Not used in home network")
values = conn_capab_cred(domain="example.com", req_conn_capab="6:1234")
id = dev[0].add_cred_values(values)
check_conn_capab_selection(dev[0], "home", False)
logger.info("Used in roaming network")
dev[0].remove_cred(id)
values = conn_capab_cred(domain="example.org", req_conn_capab="6:1234")
id = dev[0].add_cred_values(values)
check_conn_capab_selection(dev[0], "roaming", True)
logger.info("Verify that req_conn_capab does not prevent connection if no other network is available")
check_auto_select(dev[0], bssid)
logger.info("Additional req_conn_capab checks")
dev[0].remove_cred(id)
values = conn_capab_cred(domain="example.org", req_conn_capab="1:0")
id = dev[0].add_cred_values(values)
check_conn_capab_selection(dev[0], "roaming", True)
dev[0].remove_cred(id)
values = conn_capab_cred(domain="example.org", req_conn_capab="17:5060")
id = dev[0].add_cred_values(values)
check_conn_capab_selection(dev[0], "roaming", True)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20b")
params['hs20_conn_capab'] = ["1:0:2", "6:22:1", "17:5060:0", "50:0:1"]
hostapd.add_ap(apdev[1], params)
dev[0].remove_cred(id)
values = conn_capab_cred(domain="example.org", req_conn_capab="50")
id = dev[0].add_cred_values(values)
dev[0].set_cred(id, "req_conn_capab", "6:22")
dev[0].scan_for_bss(bssid2, freq="2412")
dev[0].request("INTERWORKING_SELECT freq=2412")
for i in range(0, 2):
ev = dev[0].wait_event(["INTERWORKING-AP"])
if ev is None:
raise Exception("Network selection timed out")
if bssid in ev and "conn_capab_missing=1" not in ev:
raise Exception("Missing protocol connection capability not reported")
if bssid2 in ev and "conn_capab_missing=1" in ev:
raise Exception("Protocol connection capability not reported correctly")
def test_ap_hs20_req_conn_capab2(dev, apdev):
"""Hotspot 2.0 network selection with req_conn_capab (not present)"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
del params['hs20_conn_capab']
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].scan_for_bss(bssid, freq="2412")
values = conn_capab_cred(domain="example.org", req_conn_capab="6:1234")
id = dev[0].add_cred_values(values)
check_conn_capab_selection(dev[0], "roaming", False)
def test_ap_hs20_req_conn_capab_and_roaming_partner_preference(dev, apdev):
"""Hotspot 2.0 and req_conn_capab with roaming partner preference"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['domain_name'] = "roaming.example.org"
params['hs20_conn_capab'] = ["1:0:2", "6:22:1", "17:5060:0", "50:0:1"]
hostapd.add_ap(apdev[0], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20-b")
params['domain_name'] = "roaming.example.net"
hostapd.add_ap(apdev[1], params)
values = default_cred()
values['roaming_partner'] = "roaming.example.net,1,127,*"
id = dev[0].add_cred_values(values)
check_auto_select(dev[0], bssid2)
dev[0].set_cred(id, "req_conn_capab", "50")
check_auto_select(dev[0], bssid)
dev[0].remove_cred(id)
id = dev[0].add_cred_values(values)
dev[0].set_cred(id, "req_conn_capab", "51")
check_auto_select(dev[0], bssid2)
def check_bandwidth_selection(dev, type, below):
dev.request("INTERWORKING_SELECT freq=2412")
ev = dev.wait_event(["INTERWORKING-AP"])
if ev is None:
raise Exception("Network selection timed out")
logger.debug("BSS entries:\n" + dev.request("BSS RANGE=ALL"))
if "type=" + type not in ev:
raise Exception("Unexpected network type")
if below and "below_min_backhaul=1" not in ev:
raise Exception("below_min_backhaul not reported")
if not below and "below_min_backhaul=1" in ev:
raise Exception("below_min_backhaul reported unexpectedly")
def bw_cred(domain=None, dl_home=None, ul_home=None, dl_roaming=None, ul_roaming=None):
cred = default_cred(domain=domain)
if dl_home:
cred['min_dl_bandwidth_home'] = str(dl_home)
if ul_home:
cred['min_ul_bandwidth_home'] = str(ul_home)
if dl_roaming:
cred['min_dl_bandwidth_roaming'] = str(dl_roaming)
if ul_roaming:
cred['min_ul_bandwidth_roaming'] = str(ul_roaming)
return cred
def test_ap_hs20_min_bandwidth_home(dev, apdev):
"""Hotspot 2.0 network selection with min bandwidth (home)"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].scan_for_bss(bssid, freq="2412")
values = bw_cred(domain="example.com", dl_home=5490, ul_home=58)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "home", False)
dev[0].remove_cred(id)
values = bw_cred(domain="example.com", dl_home=5491, ul_home=58)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "home", True)
dev[0].remove_cred(id)
values = bw_cred(domain="example.com", dl_home=5490, ul_home=59)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "home", True)
dev[0].remove_cred(id)
values = bw_cred(domain="example.com", dl_home=5491, ul_home=59)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "home", True)
check_auto_select(dev[0], bssid)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20-b")
params['hs20_wan_metrics'] = "01:8000:1000:1:1:3000"
hostapd.add_ap(apdev[1], params)
check_auto_select(dev[0], bssid2)
def test_ap_hs20_min_bandwidth_home2(dev, apdev):
"""Hotspot 2.0 network selection with min bandwidth - special cases"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hapd = hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].scan_for_bss(bssid, freq="2412")
values = bw_cred(domain="example.com", dl_home=5490, ul_home=58)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "home", False)
logger.info("WAN link at capacity")
hapd.set('hs20_wan_metrics', "09:8000:1000:80:240:3000")
check_bandwidth_selection(dev[0], "home", True)
logger.info("Downlink/Uplink Load was not measured")
hapd.set('hs20_wan_metrics', "01:8000:1000:80:240:0")
check_bandwidth_selection(dev[0], "home", False)
logger.info("Uplink and Downlink max values")
hapd.set('hs20_wan_metrics', "01:4294967295:4294967295:80:240:3000")
check_bandwidth_selection(dev[0], "home", False)
dev[0].remove_cred(id)
def test_ap_hs20_min_bandwidth_home_hidden_ssid_in_scan_res(dev, apdev):
"""Hotspot 2.0 network selection with min bandwidth (home) while hidden SSID is included in scan results"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
hapd = hostapd.add_ap(apdev[0], {"ssid": 'secret',
"ignore_broadcast_ssid": "1"})
dev[0].scan_for_bss(bssid, freq=2412)
hapd.disable()
hapd_global = hostapd.HostapdGlobal(apdev[0])
hapd_global.flush()
hapd_global.remove(apdev[0]['ifname'])
params = hs20_ap_params()
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].scan_for_bss(bssid, freq="2412")
values = bw_cred(domain="example.com", dl_home=5490, ul_home=58)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "home", False)
dev[0].remove_cred(id)
values = bw_cred(domain="example.com", dl_home=5491, ul_home=58)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "home", True)
dev[0].remove_cred(id)
values = bw_cred(domain="example.com", dl_home=5490, ul_home=59)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "home", True)
dev[0].remove_cred(id)
values = bw_cred(domain="example.com", dl_home=5491, ul_home=59)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "home", True)
check_auto_select(dev[0], bssid)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20-b")
params['hs20_wan_metrics'] = "01:8000:1000:1:1:3000"
hostapd.add_ap(apdev[1], params)
check_auto_select(dev[0], bssid2)
dev[0].flush_scan_cache()
def test_ap_hs20_min_bandwidth_roaming(dev, apdev):
"""Hotspot 2.0 network selection with min bandwidth (roaming)"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].scan_for_bss(bssid, freq="2412")
values = bw_cred(domain="example.org", dl_roaming=5490, ul_roaming=58)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "roaming", False)
dev[0].remove_cred(id)
values = bw_cred(domain="example.org", dl_roaming=5491, ul_roaming=58)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "roaming", True)
dev[0].remove_cred(id)
values = bw_cred(domain="example.org", dl_roaming=5490, ul_roaming=59)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "roaming", True)
dev[0].remove_cred(id)
values = bw_cred(domain="example.org", dl_roaming=5491, ul_roaming=59)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "roaming", True)
check_auto_select(dev[0], bssid)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20-b")
params['hs20_wan_metrics'] = "01:8000:1000:1:1:3000"
hostapd.add_ap(apdev[1], params)
check_auto_select(dev[0], bssid2)
def test_ap_hs20_min_bandwidth_and_roaming_partner_preference(dev, apdev):
"""Hotspot 2.0 and minimum bandwidth with roaming partner preference"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['domain_name'] = "roaming.example.org"
params['hs20_wan_metrics'] = "01:8000:1000:1:1:3000"
hostapd.add_ap(apdev[0], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20-b")
params['domain_name'] = "roaming.example.net"
hostapd.add_ap(apdev[1], params)
values = default_cred()
values['roaming_partner'] = "roaming.example.net,1,127,*"
id = dev[0].add_cred_values(values)
check_auto_select(dev[0], bssid2)
dev[0].set_cred(id, "min_dl_bandwidth_roaming", "6000")
check_auto_select(dev[0], bssid)
dev[0].set_cred(id, "min_dl_bandwidth_roaming", "10000")
check_auto_select(dev[0], bssid2)
def test_ap_hs20_min_bandwidth_no_wan_metrics(dev, apdev):
"""Hotspot 2.0 network selection with min bandwidth but no WAN Metrics"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
del params['hs20_wan_metrics']
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].scan_for_bss(bssid, freq="2412")
values = bw_cred(domain="example.com", dl_home=10000, ul_home=10000,
dl_roaming=10000, ul_roaming=10000)
dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "home", False)
def test_ap_hs20_deauth_req_ess(dev, apdev):
"""Hotspot 2.0 connection and deauthentication request for ESS"""
check_eap_capa(dev[0], "MSCHAPV2")
try:
_test_ap_hs20_deauth_req_ess(dev, apdev)
finally:
dev[0].request("SET pmf 0")
def _test_ap_hs20_deauth_req_ess(dev, apdev):
dev[0].request("SET pmf 2")
hapd = eap_test(dev[0], apdev[0], "21[3:26]", "TTLS", "user")
dev[0].dump_monitor()
addr = dev[0].p2p_interface_addr()
hapd.wait_sta()
hapd.request("HS20_DEAUTH_REQ " + addr + " 1 120 http://example.com/")
ev = dev[0].wait_event(["HS20-DEAUTH-IMMINENT-NOTICE"])
if ev is None:
raise Exception("Timeout on deauth imminent notice")
if "1 120 http://example.com/" not in ev:
raise Exception("Unexpected deauth imminent notice: " + ev)
hapd.request("DEAUTHENTICATE " + addr)
dev[0].wait_disconnected(timeout=10)
if "[TEMP-DISABLED]" not in dev[0].list_networks()[0]['flags']:
raise Exception("Network not marked temporarily disabled")
ev = dev[0].wait_event(["SME: Trying to authenticate",
"Trying to associate",
"CTRL-EVENT-CONNECTED"], timeout=5)
if ev is not None:
raise Exception("Unexpected connection attempt")
def test_ap_hs20_deauth_req_bss(dev, apdev):
"""Hotspot 2.0 connection and deauthentication request for BSS"""
check_eap_capa(dev[0], "MSCHAPV2")
try:
_test_ap_hs20_deauth_req_bss(dev, apdev)
finally:
dev[0].request("SET pmf 0")
def _test_ap_hs20_deauth_req_bss(dev, apdev):
dev[0].request("SET pmf 2")
hapd = eap_test(dev[0], apdev[0], "21[3:26]", "TTLS", "user")
dev[0].dump_monitor()
addr = dev[0].p2p_interface_addr()
hapd.wait_sta()
hapd.request("HS20_DEAUTH_REQ " + addr + " 0 120 http://example.com/")
ev = dev[0].wait_event(["HS20-DEAUTH-IMMINENT-NOTICE"])
if ev is None:
raise Exception("Timeout on deauth imminent notice")
if "0 120 http://example.com/" not in ev:
raise Exception("Unexpected deauth imminent notice: " + ev)
hapd.request("DEAUTHENTICATE " + addr + " reason=4")
ev = dev[0].wait_disconnected(timeout=10)
if "reason=4" not in ev:
raise Exception("Unexpected disconnection reason")
if "[TEMP-DISABLED]" not in dev[0].list_networks()[0]['flags']:
raise Exception("Network not marked temporarily disabled")
ev = dev[0].wait_event(["SME: Trying to authenticate",
"Trying to associate",
"CTRL-EVENT-CONNECTED"], timeout=5)
if ev is not None:
raise Exception("Unexpected connection attempt")
def test_ap_hs20_deauth_req_from_radius(dev, apdev):
"""Hotspot 2.0 connection and deauthentication request from RADIUS"""
check_eap_capa(dev[0], "MSCHAPV2")
try:
_test_ap_hs20_deauth_req_from_radius(dev, apdev)
finally:
dev[0].request("SET pmf 0")
def _test_ap_hs20_deauth_req_from_radius(dev, apdev):
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = ["0,example.com,21[2:4]"]
params['hs20_deauth_req_timeout'] = "2"
hostapd.add_ap(apdev[0], params)
dev[0].request("SET pmf 2")
dev[0].hs20_enable()
dev[0].add_cred_values({'realm': "example.com",
'username': "hs20-deauth-test",
'password': "password"})
interworking_select(dev[0], bssid, freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
ev = dev[0].wait_event(["HS20-DEAUTH-IMMINENT-NOTICE"], timeout=5)
if ev is None:
raise Exception("Timeout on deauth imminent notice")
if " 1 100" not in ev:
raise Exception("Unexpected deauth imminent contents")
dev[0].wait_disconnected(timeout=3)
def test_ap_hs20_deauth_req_without_pmf(dev, apdev):
"""Hotspot 2.0 connection and deauthentication request without PMF"""
check_eap_capa(dev[0], "MSCHAPV2")
dev[0].request("SET pmf 0")
hapd = eap_test(dev[0], apdev[0], "21[3:26]", "TTLS", "user", release=1)
dev[0].dump_monitor()
id = int(dev[0].get_status_field("id"))
dev[0].set_network(id, "ieee80211w", "0")
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
dev[0].select_network(id, freq=2412)
dev[0].wait_connected()
addr = dev[0].own_addr()
hapd.wait_sta()
hapd.request("HS20_DEAUTH_REQ " + addr + " 1 120 http://example.com/")
ev = dev[0].wait_event(["HS20-DEAUTH-IMMINENT-NOTICE"], timeout=0.2)
if ev is not None:
raise Exception("Deauth imminent notice without PMF accepted")
with alloc_fail(hapd, 1, "wpabuf_alloc;hostapd_ctrl_iface_hs20_deauth_req"):
if "FAIL" not in hapd.request("HS20_DEAUTH_REQ " + addr + " 1 120 http://example.com/"):
raise Exception("HS20_DEAUTH_REQ accepted during OOM")
def test_ap_hs20_deauth_req_pmf_htc(dev, apdev):
"""Hotspot 2.0 connection and deauthentication request PMF misbehavior (+HTC)"""
try:
run_ap_hs20_deauth_req_pmf_htc(dev, apdev)
finally:
stop_monitor(apdev[1]["ifname"])
def run_ap_hs20_deauth_req_pmf_htc(dev, apdev):
check_eap_capa(dev[0], "MSCHAPV2")
dev[0].request("SET pmf 0")
hapd = eap_test(dev[0], apdev[0], "21[3:26]", "TTLS", "user", release=1)
dev[0].dump_monitor()
addr = dev[0].own_addr()
hapd.wait_sta()
sock = start_monitor(apdev[1]["ifname"])
radiotap = radiotap_build()
bssid = hapd.own_addr().replace(':', '')
addr = dev[0].own_addr().replace(':', '')
payload = "0a1a0101dd1b506f9a0101780013687474703a2f2f6578616d706c652e636f6d2f"
# Claim there is a HT Control field, but then start the frame body from
# there and do not encrypt the Robust Action frame.
frame = binascii.unhexlify("d0803a01" + addr + 2 * bssid + "0000" + payload)
# Claim there is a HT Control field and start the frame body in the correct
# location, but do not encrypt the Robust Action frame. Make the first octet
# of HT Control field use a non-robust Action Category value.
frame2 = binascii.unhexlify("d0803a01" + addr + 2 * bssid + "0000" + "04000000" + payload)
sock.send(radiotap + frame)
sock.send(radiotap + frame2)
ev = dev[0].wait_event(["HS20-DEAUTH-IMMINENT-NOTICE"], timeout=1)
if ev is not None:
raise Exception("Deauth imminent notice without PMF accepted")
def test_ap_hs20_remediation_required(dev, apdev):
"""Hotspot 2.0 connection and remediation required from RADIUS"""
check_eap_capa(dev[0], "MSCHAPV2")
try:
_test_ap_hs20_remediation_required(dev, apdev)
finally:
dev[0].request("SET pmf 0")
def _test_ap_hs20_remediation_required(dev, apdev):
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = ["0,example.com,21[2:4]"]
hostapd.add_ap(apdev[0], params)
dev[0].request("SET pmf 1")
dev[0].hs20_enable()
dev[0].add_cred_values({'realm': "example.com",
'username': "hs20-subrem-test",
'password': "password"})
interworking_select(dev[0], bssid, freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
ev = dev[0].wait_event(["HS20-SUBSCRIPTION-REMEDIATION"], timeout=5)
if ev is None:
raise Exception("Timeout on subscription remediation notice")
if " 1 https://example.com/" not in ev:
raise Exception("Unexpected subscription remediation event contents")
def test_ap_hs20_remediation_required_ctrl(dev, apdev):
"""Hotspot 2.0 connection and subrem from ctrl_iface"""
check_eap_capa(dev[0], "MSCHAPV2")
try:
_test_ap_hs20_remediation_required_ctrl(dev, apdev)
finally:
dev[0].request("SET pmf 0")
def _test_ap_hs20_remediation_required_ctrl(dev, apdev):
bssid = apdev[0]['bssid']
addr = dev[0].own_addr()
params = hs20_ap_params()
params['nai_realm'] = ["0,example.com,21[2:4]"]
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET pmf 1")
dev[0].hs20_enable()
dev[0].add_cred_values(default_cred())
interworking_select(dev[0], bssid, freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
hapd.request("HS20_WNM_NOTIF " + addr + " https://example.com/")
ev = dev[0].wait_event(["HS20-SUBSCRIPTION-REMEDIATION"], timeout=5)
if ev is None:
raise Exception("Timeout on subscription remediation notice")
if " 1 https://example.com/" not in ev:
raise Exception("Unexpected subscription remediation event contents")
hapd.request("HS20_WNM_NOTIF " + addr)
ev = dev[0].wait_event(["HS20-SUBSCRIPTION-REMEDIATION"], timeout=5)
if ev is None:
raise Exception("Timeout on subscription remediation notice")
if not ev.endswith("HS20-SUBSCRIPTION-REMEDIATION "):
raise Exception("Unexpected subscription remediation event contents: " + ev)
if "FAIL" not in hapd.request("HS20_WNM_NOTIF "):
raise Exception("Unexpected HS20_WNM_NOTIF success")
if "FAIL" not in hapd.request("HS20_WNM_NOTIF foo"):
raise Exception("Unexpected HS20_WNM_NOTIF success")
if "FAIL" not in hapd.request("HS20_WNM_NOTIF " + addr + " https://12345678923456789842345678456783456712345678923456789842345678456783456712345678923456789842345678456783456712345678923456789842345678456783456712345678923456789842345678456783456712345678923456789842345678456783456712345678923456789842345678456783456712345678927.very.long.example.com/"):
raise Exception("Unexpected HS20_WNM_NOTIF success")
if "OK" not in hapd.request("HS20_WNM_NOTIF " + addr + " "):
raise Exception("HS20_WNM_NOTIF failed with empty URL")
def test_ap_hs20_session_info(dev, apdev):
"""Hotspot 2.0 connection and session information from RADIUS"""
check_eap_capa(dev[0], "MSCHAPV2")
try:
_test_ap_hs20_session_info(dev, apdev)
finally:
dev[0].request("SET pmf 0")
def _test_ap_hs20_session_info(dev, apdev):
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = ["0,example.com,21[2:4]"]
hostapd.add_ap(apdev[0], params)
dev[0].request("SET pmf 1")
dev[0].hs20_enable()
dev[0].add_cred_values({'realm': "example.com",
'username': "hs20-session-info-test",
'password': "password"})
interworking_select(dev[0], bssid, freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
ev = dev[0].wait_event(["ESS-DISASSOC-IMMINENT"], timeout=10)
if ev is None:
raise Exception("Timeout on ESS disassociation imminent notice")
if " 1 59904 https://example.com/" not in ev:
raise Exception("Unexpected ESS disassociation imminent event contents")
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-STARTED"])
if ev is None:
raise Exception("Scan not started")
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-RESULTS"], timeout=30)
if ev is None:
raise Exception("Scan not completed")
def test_ap_hs20_osen(dev, apdev):
"""Hotspot 2.0 OSEN connection"""
params = {'ssid': "osen",
'osen': "1",
'auth_server_addr': "127.0.0.1",
'auth_server_port': "1812",
'auth_server_shared_secret': "radius"}
hostapd.add_ap(apdev[0], params)
dev[1].connect("osen", key_mgmt="NONE", scan_freq="2412",
wait_connect=False)
if "WEP40" in dev[2].get_capability("group"):
dev[2].connect("osen", key_mgmt="NONE", wep_key0='"hello"',
scan_freq="2412", wait_connect=False)
dev[0].flush_scan_cache()
dev[0].connect("osen", proto="OSEN", key_mgmt="OSEN", pairwise="CCMP",
group="GTK_NOT_USED CCMP",
eap="WFA-UNAUTH-TLS", identity="osen@example.com",
ca_cert="auth_serv/ca.pem",
scan_freq="2412")
res = dev[0].get_bss(apdev[0]['bssid'])['flags']
if "[OSEN-OSEN-CCMP]" not in res:
raise Exception("OSEN not reported in BSS")
if "[WEP]" in res:
raise Exception("WEP reported in BSS")
res = dev[0].request("SCAN_RESULTS")
if "[OSEN-OSEN-CCMP]" not in res:
raise Exception("OSEN not reported in SCAN_RESULTS")
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5", drv_params="force_connect_cmd=1")
wpas.connect("osen", proto="OSEN", key_mgmt="OSEN", pairwise="CCMP",
group="GTK_NOT_USED CCMP",
eap="WFA-UNAUTH-TLS", identity="osen@example.com",
ca_cert="auth_serv/ca.pem",
scan_freq="2412")
wpas.request("DISCONNECT")
def test_ap_hs20_osen_single_ssid(dev, apdev):
"""Hotspot 2.0 OSEN-single-SSID connection"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['wpa_key_mgmt'] = "WPA-EAP OSEN"
params['hessid'] = bssid
hapd = hostapd.add_ap(apdev[0], params)
# RSN-OSEN (for OSU)
dev[0].connect("test-hs20", proto="OSEN", key_mgmt="OSEN", pairwise="CCMP",
group="CCMP GTK_NOT_USED",
eap="WFA-UNAUTH-TLS", identity="osen@example.com",
ca_cert="auth_serv/ca.pem", ieee80211w='2',
scan_freq="2412")
# RSN-EAP (for data connection)
dev[1].connect("test-hs20", key_mgmt="WPA-EAP", eap="TTLS",
identity="hs20-test", password="password",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
pairwise="CCMP", group="CCMP",
ieee80211w='2', scan_freq="2412")
res = dev[0].get_bss(apdev[0]['bssid'])['flags']
if "[WPA2-EAP+OSEN-CCMP]" not in res:
raise Exception("OSEN not reported in BSS")
if "[WEP]" in res:
raise Exception("WEP reported in BSS")
res = dev[0].request("SCAN_RESULTS")
if "[WPA2-EAP+OSEN-CCMP]" not in res:
raise Exception("OSEN not reported in SCAN_RESULTS")
hwsim_utils.test_connectivity(dev[1], hapd)
hwsim_utils.test_connectivity(dev[0], hapd, broadcast=False)
hwsim_utils.test_connectivity(dev[0], hapd, timeout=1,
success_expected=False)
def test_ap_hs20_network_preference(dev, apdev):
"""Hotspot 2.0 network selection with preferred home network"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
values = {'realm': "example.com",
'username': "hs20-test",
'password': "password",
'domain': "example.com"}
dev[0].add_cred_values(values)
id = dev[0].add_network()
dev[0].set_network_quoted(id, "ssid", "home")
dev[0].set_network_quoted(id, "psk", "12345678")
dev[0].set_network(id, "priority", "1")
dev[0].request("ENABLE_NETWORK %s no-connect" % id)
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_connected(timeout=15)
if bssid not in ev:
raise Exception("Unexpected network selected")
bssid2 = apdev[1]['bssid']
params = hostapd.wpa2_params(ssid="home", passphrase="12345678")
hostapd.add_ap(apdev[1], params)
dev[0].scan_for_bss(bssid2, freq="2412")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED",
"INTERWORKING-ALREADY-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Connection timed out")
if "INTERWORKING-ALREADY-CONNECTED" in ev:
raise Exception("No roam to higher priority network")
if bssid2 not in ev:
raise Exception("Unexpected network selected")
def test_ap_hs20_network_preference2(dev, apdev):
"""Hotspot 2.0 network selection with preferred credential"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid2 = apdev[1]['bssid']
params = hostapd.wpa2_params(ssid="home", passphrase="12345678")
hostapd.add_ap(apdev[1], params)
dev[0].hs20_enable()
values = {'realm': "example.com",
'username': "hs20-test",
'password': "password",
'domain': "example.com",
'priority': "1"}
dev[0].add_cred_values(values)
id = dev[0].add_network()
dev[0].set_network_quoted(id, "ssid", "home")
dev[0].set_network_quoted(id, "psk", "12345678")
dev[0].request("ENABLE_NETWORK %s no-connect" % id)
dev[0].scan_for_bss(bssid2, freq="2412")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_connected(timeout=15)
if bssid2 not in ev:
raise Exception("Unexpected network selected")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED",
"INTERWORKING-ALREADY-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Connection timed out")
if "INTERWORKING-ALREADY-CONNECTED" in ev:
raise Exception("No roam to higher priority network")
if bssid not in ev:
raise Exception("Unexpected network selected")
def test_ap_hs20_network_preference3(dev, apdev):
"""Hotspot 2.0 network selection with two credential (one preferred)"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20b")
params['nai_realm'] = "0,example.org,13[5:6],21[2:4][5:7]"
hostapd.add_ap(apdev[1], params)
dev[0].hs20_enable()
values = {'realm': "example.com",
'username': "hs20-test",
'password': "password",
'priority': "1"}
dev[0].add_cred_values(values)
values = {'realm': "example.org",
'username': "hs20-test",
'password': "password"}
id = dev[0].add_cred_values(values)
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].scan_for_bss(bssid2, freq="2412")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_connected(timeout=15)
if bssid not in ev:
raise Exception("Unexpected network selected")
dev[0].set_cred(id, "priority", "2")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED",
"INTERWORKING-ALREADY-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Connection timed out")
if "INTERWORKING-ALREADY-CONNECTED" in ev:
raise Exception("No roam to higher priority network")
if bssid2 not in ev:
raise Exception("Unexpected network selected")
def test_ap_hs20_network_preference4(dev, apdev):
"""Hotspot 2.0 network selection with username vs. SIM credential"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20b")
params['hessid'] = bssid2
params['anqp_3gpp_cell_net'] = "555,444"
params['domain_name'] = "wlan.mnc444.mcc555.3gppnetwork.org"
hostapd.add_ap(apdev[1], params)
dev[0].hs20_enable()
values = {'realm': "example.com",
'username': "hs20-test",
'password': "password",
'priority': "1"}
dev[0].add_cred_values(values)
values = {'imsi': "555444-333222111",
'eap': "SIM",
'milenage': "5122250214c33e723a5dd523fc145fc0:981d464c7c52eb6e5036234984ad0bcf:000000000123"}
id = dev[0].add_cred_values(values)
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].scan_for_bss(bssid2, freq="2412")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_connected(timeout=15)
if bssid not in ev:
raise Exception("Unexpected network selected")
dev[0].set_cred(id, "priority", "2")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED",
"INTERWORKING-ALREADY-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Connection timed out")
if "INTERWORKING-ALREADY-CONNECTED" in ev:
raise Exception("No roam to higher priority network")
if bssid2 not in ev:
raise Exception("Unexpected network selected")
def test_ap_hs20_interworking_select_blocking_scan(dev, apdev):
"""Ongoing INTERWORKING_SELECT blocking SCAN"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
values = {'realm': "example.com",
'username': "hs20-test",
'password': "password",
'domain': "example.com"}
dev[0].add_cred_values(values)
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
if "FAIL-BUSY" not in dev[0].request("SCAN"):
raise Exception("Unexpected SCAN command result")
dev[0].wait_connected(timeout=15)
def test_ap_hs20_fetch_osu(dev, apdev):
"""Hotspot 2.0 OSU provider and icon fetch"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hs20_icon'] = "128:80:zxx:image/png:w1fi_logo:w1fi_logo.png"
params['osu_ssid'] = '"HS 2.0 OSU open"'
params['osu_method_list'] = "1"
params['osu_friendly_name'] = ["eng:Test OSU", "fin:Testi-OSU"]
params['osu_icon'] = "w1fi_logo"
params['osu_service_desc'] = ["eng:Example services", "fin:Esimerkkipalveluja"]
params['osu_server_uri'] = "https://example.com/osu/"
hostapd.add_ap(apdev[0], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20b")
params['hessid'] = bssid2
params['hs20_icon'] = "128:80:zxx:image/png:w1fi_logo:w1fi_logo.png"
params['osu_ssid'] = '"HS 2.0 OSU OSEN"'
params['osu_method_list'] = "0"
params['osu_nai'] = "osen@example.com"
params['osu_friendly_name'] = ["eng:Test2 OSU", "fin:Testi2-OSU"]
params['osu_icon'] = "w1fi_logo"
params['osu_service_desc'] = ["eng:Example services2", "fin:Esimerkkipalveluja2"]
params['osu_server_uri'] = "https://example.org/osu/"
hostapd.add_ap(apdev[1], params)
with open("w1fi_logo.png", "rb") as f:
orig_logo = f.read()
dev[0].hs20_enable()
dir = "/tmp/osu-fetch"
if os.path.isdir(dir):
files = [f for f in os.listdir(dir) if f.startswith("osu-")]
for f in files:
os.remove(dir + "/" + f)
else:
try:
os.makedirs(dir)
except:
pass
try:
dev[1].scan_for_bss(bssid, freq="2412")
dev[2].scan_for_bss(bssid, freq="2412")
dev[0].request("SET osu_dir " + dir)
dev[0].request("FETCH_OSU")
if "FAIL" not in dev[1].request("HS20_ICON_REQUEST foo w1fi_logo"):
raise Exception("Invalid HS20_ICON_REQUEST accepted")
if "OK" not in dev[1].request("HS20_ICON_REQUEST " + bssid + " w1fi_logo"):
raise Exception("HS20_ICON_REQUEST failed")
if "OK" not in dev[2].request("REQ_HS20_ICON " + bssid + " w1fi_logo"):
raise Exception("REQ_HS20_ICON failed")
icons = 0
while True:
ev = dev[0].wait_event(["OSU provider fetch completed",
"RX-HS20-ANQP-ICON"], timeout=15)
if ev is None:
raise Exception("Timeout on OSU fetch")
if "OSU provider fetch completed" in ev:
break
if "RX-HS20-ANQP-ICON" in ev:
with open(ev.split(' ')[1], "rb") as f:
logo = f.read()
if logo == orig_logo:
icons += 1
with open(dir + "/osu-providers.txt", "r") as f:
prov = f.read()
logger.debug("osu-providers.txt: " + prov)
if "OSU-PROVIDER " + bssid not in prov:
raise Exception("Missing OSU_PROVIDER(1)")
if "OSU-PROVIDER " + bssid2 not in prov:
raise Exception("Missing OSU_PROVIDER(2)")
finally:
files = [f for f in os.listdir(dir) if f.startswith("osu-")]
for f in files:
os.remove(dir + "/" + f)
os.rmdir(dir)
if icons != 2:
raise Exception("Unexpected number of icons fetched")
ev = dev[1].wait_event(["GAS-QUERY-START"], timeout=5)
if ev is None:
raise Exception("Timeout on GAS-QUERY-DONE")
ev = dev[1].wait_event(["GAS-QUERY-DONE"], timeout=5)
if ev is None:
raise Exception("Timeout on GAS-QUERY-DONE")
if "freq=2412 status_code=0 result=SUCCESS" not in ev:
raise Exception("Unexpected GAS-QUERY-DONE: " + ev)
ev = dev[1].wait_event(["RX-HS20-ANQP"], timeout=15)
if ev is None:
raise Exception("Timeout on icon fetch")
if "Icon Binary File" not in ev:
raise Exception("Unexpected ANQP element")
ev = dev[2].wait_event(["RX-HS20-ICON"], timeout=5)
if ev is None:
raise Exception("Timeout on RX-HS20-ICON")
event_icon_len = ev.split(' ')[3]
if " w1fi_logo " not in ev:
raise Exception("RX-HS20-ICON did not have the expected file name")
if bssid not in ev:
raise Exception("RX-HS20-ICON did not have the expected BSSID")
if "FAIL" in dev[2].request("GET_HS20_ICON " + bssid + " w1fi_logo 0 10"):
raise Exception("GET_HS20_ICON 0..10 failed")
if "FAIL" in dev[2].request("GET_HS20_ICON " + bssid + " w1fi_logo 5 10"):
raise Exception("GET_HS20_ICON 5..15 failed")
if "FAIL" not in dev[2].request("GET_HS20_ICON " + bssid + " w1fi_logo 100000 10"):
raise Exception("Unexpected success of GET_HS20_ICON with too large offset")
if "FAIL" not in dev[2].request("GET_HS20_ICON " + bssid + " no_such_logo 0 10"):
raise Exception("GET_HS20_ICON for not existing icon succeeded")
if "FAIL" not in dev[2].request("GET_HS20_ICON " + bssid + " w1fi_logo 0 3070"):
raise Exception("GET_HS20_ICON with too many output bytes to fit the buffer succeeded")
if "FAIL" not in dev[2].request("GET_HS20_ICON " + bssid + " w1fi_logo 0 0"):
raise Exception("GET_HS20_ICON 0..0 succeeded")
icon = b''
pos = 0
while True:
if pos > 100000:
raise Exception("Unexpectedly long icon")
res = dev[2].request("GET_HS20_ICON " + bssid + " w1fi_logo %d 1000" % pos)
if res.startswith("FAIL"):
break
icon += base64.b64decode(res)
pos += 1000
hex = binascii.hexlify(icon).decode()
if not hex.startswith("0009696d6167652f706e677d1d"):
raise Exception("Unexpected beacon binary header: " + hex)
with open('w1fi_logo.png', 'rb') as f:
data = f.read()
if icon[13:] != data:
raise Exception("Unexpected icon data")
if len(icon) != int(event_icon_len):
raise Exception("Unexpected RX-HS20-ICON event length: " + event_icon_len)
for i in range(3):
if "OK" not in dev[i].request("REQ_HS20_ICON " + bssid + " w1fi_logo"):
raise Exception("REQ_HS20_ICON failed [2]")
for i in range(3):
ev = dev[i].wait_event(["RX-HS20-ICON"], timeout=5)
if ev is None:
raise Exception("Timeout on RX-HS20-ICON [2]")
if "FAIL" not in dev[2].request("DEL_HS20_ICON foo w1fi_logo"):
raise Exception("Invalid DEL_HS20_ICON accepted")
if "OK" not in dev[2].request("DEL_HS20_ICON " + bssid + " w1fi_logo"):
raise Exception("DEL_HS20_ICON failed")
if "OK" not in dev[1].request("DEL_HS20_ICON " + bssid):
raise Exception("DEL_HS20_ICON failed")
if "OK" not in dev[0].request("DEL_HS20_ICON "):
raise Exception("DEL_HS20_ICON failed")
for i in range(3):
if "FAIL" not in dev[i].request("DEL_HS20_ICON "):
raise Exception("DEL_HS20_ICON accepted when no icons left")
def test_ap_hs20_fetch_osu_no_info(dev, apdev):
"""Hotspot 2.0 OSU provider and no AP with info"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dir = "/tmp/osu-fetch"
if os.path.isdir(dir):
files = [f for f in os.listdir(dir) if f.startswith("osu-")]
for f in files:
os.remove(dir + "/" + f)
else:
try:
os.makedirs(dir)
except:
pass
dev[0].scan_for_bss(bssid, freq="2412")
try:
dev[0].request("SET osu_dir " + dir)
dev[0].request("FETCH_OSU")
ev = dev[0].wait_event(["OSU provider fetch completed"], timeout=30)
if ev is None:
raise Exception("Timeout on OSU fetch")
finally:
files = [f for f in os.listdir(dir) if f.startswith("osu-")]
for f in files:
os.remove(dir + "/" + f)
os.rmdir(dir)
def test_ap_hs20_fetch_osu_no_icon(dev, apdev):
"""Hotspot 2.0 OSU provider and no icon found"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hs20_icon'] = "128:80:zxx:image/png:w1fi_logo:w1fi_logo-no-file.png"
params['osu_ssid'] = '"HS 2.0 OSU open"'
params['osu_method_list'] = "1"
params['osu_friendly_name'] = ["eng:Test OSU", "fin:Testi-OSU"]
params['osu_icon'] = "w1fi_logo"
params['osu_service_desc'] = ["eng:Example services",
"fin:Esimerkkipalveluja"]
params['osu_server_uri'] = "https://example.com/osu/"
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dir = "/tmp/osu-fetch"
if os.path.isdir(dir):
files = [f for f in os.listdir(dir) if f.startswith("osu-")]
for f in files:
os.remove(dir + "/" + f)
else:
try:
os.makedirs(dir)
except:
pass
dev[0].scan_for_bss(bssid, freq="2412")
try:
dev[0].request("SET osu_dir " + dir)
dev[0].request("FETCH_OSU")
ev = dev[0].wait_event(["OSU provider fetch completed"], timeout=30)
if ev is None:
raise Exception("Timeout on OSU fetch")
finally:
files = [f for f in os.listdir(dir) if f.startswith("osu-")]
for f in files:
os.remove(dir + "/" + f)
os.rmdir(dir)
def test_ap_hs20_fetch_osu_single_ssid(dev, apdev):
"""Hotspot 2.0 OSU provider and single SSID"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hs20_icon'] = "128:80:zxx:image/png:w1fi_logo:w1fi_logo-no-file.png"
params['osu_ssid'] = '"HS 2.0 OSU open"'
params['osu_method_list'] = "1"
params['osu_friendly_name'] = ["eng:Test OSU", "fin:Testi-OSU"]
params['osu_nai2'] = "osen@example.com"
params['osu_icon'] = "w1fi_logo"
params['osu_service_desc'] = ["eng:Example services",
"fin:Esimerkkipalveluja"]
params['osu_server_uri'] = "https://example.com/osu/"
params['wpa_key_mgmt'] = "WPA-EAP OSEN"
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dir = "/tmp/osu-fetch"
if os.path.isdir(dir):
files = [f for f in os.listdir(dir) if f.startswith("osu-")]
for f in files:
os.remove(dir + "/" + f)
else:
try:
os.makedirs(dir)
except:
pass
dev[0].scan_for_bss(bssid, freq="2412")
try:
dev[0].request("SET osu_dir " + dir)
dev[0].request("FETCH_OSU")
ev = dev[0].wait_event(["OSU provider fetch completed"], timeout=30)
if ev is None:
raise Exception("Timeout on OSU fetch")
osu_ssid = False
osu_ssid2 = False
osu_nai = False
osu_nai2 = False
with open(os.path.join(dir, "osu-providers.txt"), "r") as f:
for l in f.readlines():
logger.info(l.strip())
if l.strip() == "osu_ssid=HS 2.0 OSU open":
osu_ssid = True
if l.strip() == "osu_ssid2=test-hs20":
osu_ssid2 = True
if l.strip().startswith("osu_nai="):
osu_nai = True
if l.strip() == "osu_nai2=osen@example.com":
osu_nai2 = True
if not osu_ssid:
raise Exception("osu_ssid not reported")
if not osu_ssid2:
raise Exception("osu_ssid2 not reported")
if osu_nai:
raise Exception("osu_nai reported unexpectedly")
if not osu_nai2:
raise Exception("osu_nai2 not reported")
finally:
files = [f for f in os.listdir(dir) if f.startswith("osu-")]
for f in files:
os.remove(dir + "/" + f)
os.rmdir(dir)
def test_ap_hs20_fetch_osu_single_ssid2(dev, apdev):
"""Hotspot 2.0 OSU provider and single SSID (two OSU providers)"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hs20_icon'] = "128:80:zxx:image/png:w1fi_logo:w1fi_logo-no-file.png"
params['osu_ssid'] = '"HS 2.0 OSU open"'
params['osu_method_list'] = "1"
params['osu_friendly_name'] = ["eng:Test OSU", "fin:Testi-OSU"]
params['osu_nai2'] = "osen@example.com"
params['osu_icon'] = "w1fi_logo"
params['osu_service_desc'] = ["eng:Example services",
"fin:Esimerkkipalveluja"]
params['osu_server_uri'] = "https://example.com/osu/"
params['wpa_key_mgmt'] = "WPA-EAP OSEN"
hapd = hostapd.add_ap(apdev[0], params, no_enable=True)
hapd.set('osu_server_uri', 'https://another.example.com/osu/')
hapd.set('osu_method_list', "1")
hapd.set('osu_nai2', "osen@another.example.com")
hapd.enable()
dev[0].hs20_enable()
dir = "/tmp/osu-fetch"
if os.path.isdir(dir):
files = [f for f in os.listdir(dir) if f.startswith("osu-")]
for f in files:
os.remove(dir + "/" + f)
else:
try:
os.makedirs(dir)
except:
pass
dev[0].scan_for_bss(bssid, freq="2412")
try:
dev[0].request("SET osu_dir " + dir)
dev[0].request("FETCH_OSU")
ev = dev[0].wait_event(["OSU provider fetch completed"], timeout=30)
if ev is None:
raise Exception("Timeout on OSU fetch")
osu_ssid = False
osu_ssid2 = False
osu_nai = False
osu_nai2 = False
osu_nai2b = False
with open(os.path.join(dir, "osu-providers.txt"), "r") as f:
for l in f.readlines():
logger.info(l.strip())
if l.strip() == "osu_ssid=HS 2.0 OSU open":
osu_ssid = True
if l.strip() == "osu_ssid2=test-hs20":
osu_ssid2 = True
if l.strip().startswith("osu_nai="):
osu_nai = True
if l.strip() == "osu_nai2=osen@example.com":
osu_nai2 = True
if l.strip() == "osu_nai2=osen@another.example.com":
osu_nai2b = True
if not osu_ssid:
raise Exception("osu_ssid not reported")
if not osu_ssid2:
raise Exception("osu_ssid2 not reported")
if osu_nai:
raise Exception("osu_nai reported unexpectedly")
if not osu_nai2:
raise Exception("osu_nai2 not reported")
if not osu_nai2b:
raise Exception("osu_nai2b not reported")
finally:
files = [f for f in os.listdir(dir) if f.startswith("osu-")]
for f in files:
os.remove(dir + "/" + f)
os.rmdir(dir)
def get_icon(dev, bssid, iconname):
icon = b''
pos = 0
while True:
if pos > 100000:
raise Exception("Unexpectedly long icon")
res = dev.request("GET_HS20_ICON " + bssid + " " + iconname + " %d 3000" % pos)
if res.startswith("FAIL"):
break
icon += base64.b64decode(res)
pos += 3000
if len(icon) < 13:
raise Exception("Too short GET_HS20_ICON response")
return icon[0:13], icon[13:]
def test_ap_hs20_req_hs20_icon(dev, apdev):
"""Hotspot 2.0 OSU provider and multi-icon fetch with REQ_HS20_ICON"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hs20_icon'] = ["128:80:zxx:image/png:w1fi_logo:w1fi_logo.png",
"128:80:zxx:image/png:test_logo:auth_serv/sha512-server.pem"]
params['osu_ssid'] = '"HS 2.0 OSU open"'
params['osu_method_list'] = "1"
params['osu_friendly_name'] = ["eng:Test OSU", "fin:Testi-OSU"]
params['osu_icon'] = ["w1fi_logo", "w1fi_logo2"]
params['osu_service_desc'] = ["eng:Example services",
"fin:Esimerkkipalveluja"]
params['osu_server_uri'] = "https://example.com/osu/"
hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(bssid, freq="2412")
run_req_hs20_icon(dev, bssid)
def run_req_hs20_icon(dev, bssid):
# First, fetch two icons from the AP to wpa_supplicant
if "OK" not in dev[0].request("REQ_HS20_ICON " + bssid + " w1fi_logo"):
raise Exception("REQ_HS20_ICON failed")
ev = dev[0].wait_event(["RX-HS20-ICON"], timeout=5)
if ev is None:
raise Exception("Timeout on RX-HS20-ICON (1)")
if "OK" not in dev[0].request("REQ_HS20_ICON " + bssid + " test_logo"):
raise Exception("REQ_HS20_ICON failed")
ev = dev[0].wait_event(["RX-HS20-ICON"], timeout=5)
if ev is None:
raise Exception("Timeout on RX-HS20-ICON (2)")
# Then, fetch the icons from wpa_supplicant for validation
hdr, data1 = get_icon(dev[0], bssid, "w1fi_logo")
hdr, data2 = get_icon(dev[0], bssid, "test_logo")
with open('w1fi_logo.png', 'rb') as f:
data = f.read()
if data1 != data:
raise Exception("Unexpected icon data (1)")
with open('auth_serv/sha512-server.pem', 'rb') as f:
data = f.read()
if data2 != data:
raise Exception("Unexpected icon data (2)")
# Finally, delete the icons from wpa_supplicant
if "OK" not in dev[0].request("DEL_HS20_ICON " + bssid + " w1fi_logo"):
raise Exception("DEL_HS20_ICON failed")
if "OK" not in dev[0].request("DEL_HS20_ICON " + bssid + " test_logo"):
raise Exception("DEL_HS20_ICON failed")
def test_ap_hs20_req_operator_icon(dev, apdev):
"""Hotspot 2.0 operator icons"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hs20_icon'] = ["128:80:zxx:image/png:w1fi_logo:w1fi_logo.png",
"500:300:fi:image/png:test_logo:auth_serv/sha512-server.pem"]
params['operator_icon'] = ["w1fi_logo", "unknown_logo", "test_logo"]
hostapd.add_ap(apdev[0], params)
value = struct.pack('<HH', 128, 80) + b"zxx"
value += struct.pack('B', 9) + b"image/png"
value += struct.pack('B', 9) + b"w1fi_logo"
value += struct.pack('<HH', 500, 300) + b"fi\0"
value += struct.pack('B', 9) + b"image/png"
value += struct.pack('B', 9) + b"test_logo"
dev[0].scan_for_bss(bssid, freq="2412")
if "OK" not in dev[0].request("ANQP_GET " + bssid + " hs20:12"):
raise Exception("ANQP_GET command failed")
ev = dev[0].wait_event(["GAS-QUERY-START"], timeout=5)
if ev is None:
raise Exception("GAS query start timed out")
ev = dev[0].wait_event(["GAS-QUERY-DONE"], timeout=10)
if ev is None:
raise Exception("GAS query timed out")
ev = dev[0].wait_event(["RX-HS20-ANQP"], timeout=1)
if ev is None or "Operator Icon Metadata" not in ev:
raise Exception("Did not receive Operator Icon Metadata")
ev = dev[0].wait_event(["ANQP-QUERY-DONE"], timeout=10)
if ev is None:
raise Exception("ANQP-QUERY-DONE event not seen")
if "result=SUCCESS" not in ev:
raise Exception("Unexpected result: " + ev)
bss = dev[0].get_bss(bssid)
if "hs20_operator_icon_metadata" not in bss:
raise Exception("hs20_operator_icon_metadata missing from BSS entry")
if bss["hs20_operator_icon_metadata"] != binascii.hexlify(value).decode():
raise Exception("Unexpected hs20_operator_icon_metadata value: " +
bss["hs20_operator_icon_metadata"])
run_req_hs20_icon(dev, bssid)
def test_ap_hs20_req_hs20_icon_oom(dev, apdev):
"""Hotspot 2.0 icon fetch OOM with REQ_HS20_ICON"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hs20_icon'] = ["128:80:zxx:image/png:w1fi_logo:w1fi_logo.png",
"128:80:zxx:image/png:test_logo:auth_serv/sha512-server.pem"]
params['osu_ssid'] = '"HS 2.0 OSU open"'
params['osu_method_list'] = "1"
params['osu_friendly_name'] = ["eng:Test OSU", "fin:Testi-OSU"]
params['osu_icon'] = ["w1fi_logo", "w1fi_logo2"]
params['osu_service_desc'] = ["eng:Example services",
"fin:Esimerkkipalveluja"]
params['osu_server_uri'] = "https://example.com/osu/"
hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(bssid, freq="2412")
if "FAIL" not in dev[0].request("REQ_HS20_ICON 11:22:33:44:55:66 w1fi_logo"):
raise Exception("REQ_HS20_ICON succeeded with unknown BSSID")
with alloc_fail(dev[0], 1, "hs20_build_anqp_req;hs20_anqp_send_req"):
if "FAIL" not in dev[0].request("REQ_HS20_ICON " + bssid + " w1fi_logo"):
raise Exception("REQ_HS20_ICON succeeded during OOM")
with alloc_fail(dev[0], 1, "gas_query_req;hs20_anqp_send_req"):
if "FAIL" not in dev[0].request("REQ_HS20_ICON " + bssid + " w1fi_logo"):
raise Exception("REQ_HS20_ICON succeeded during OOM")
with alloc_fail(dev[0], 1, "=hs20_anqp_send_req"):
if "FAIL" not in dev[0].request("REQ_HS20_ICON " + bssid + " w1fi_logo"):
raise Exception("REQ_HS20_ICON succeeded during OOM")
with alloc_fail(dev[0], 2, "=hs20_anqp_send_req"):
if "FAIL" not in dev[0].request("REQ_HS20_ICON " + bssid + " w1fi_logo"):
raise Exception("REQ_HS20_ICON succeeded during OOM")
if "OK" not in dev[0].request("REQ_HS20_ICON " + bssid + " w1fi_logo"):
raise Exception("REQ_HS20_ICON failed")
ev = dev[0].wait_event(["RX-HS20-ICON"], timeout=5)
if ev is None:
raise Exception("Timeout on RX-HS20-ICON (1)")
with alloc_fail(dev[0], 1, "hs20_get_icon"):
if "FAIL" not in dev[0].request("GET_HS20_ICON " + bssid + "w1fi_logo 0 100"):
raise Exception("GET_HS20_ICON succeeded during OOM")
if "OK" not in dev[0].request("DEL_HS20_ICON " + bssid + " w1fi_logo"):
raise Exception("DEL_HS20_ICON failed")
with alloc_fail(dev[0], 1, "=hs20_process_icon_binary_file"):
if "OK" not in dev[0].request("REQ_HS20_ICON " + bssid + " w1fi_logo"):
raise Exception("REQ_HS20_ICON failed")
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
def test_ap_hs20_req_hs20_icon_parallel(dev, apdev):
"""Hotspot 2.0 OSU provider and multi-icon parallel fetch with REQ_HS20_ICON"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hs20_icon'] = ["128:80:zxx:image/png:w1fi_logo:w1fi_logo.png",
"128:80:zxx:image/png:test_logo:auth_serv/sha512-server.pem"]
params['osu_ssid'] = '"HS 2.0 OSU open"'
params['osu_method_list'] = "1"
params['osu_friendly_name'] = ["eng:Test OSU", "fin:Testi-OSU"]
params['osu_icon'] = ["w1fi_logo", "w1fi_logo2"]
params['osu_service_desc'] = ["eng:Example services",
"fin:Esimerkkipalveluja"]
params['osu_server_uri'] = "https://example.com/osu/"
hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(bssid, freq="2412")
# First, fetch two icons from the AP to wpa_supplicant
if "OK" not in dev[0].request("REQ_HS20_ICON " + bssid + " w1fi_logo"):
raise Exception("REQ_HS20_ICON failed")
if "OK" not in dev[0].request("REQ_HS20_ICON " + bssid + " test_logo"):
raise Exception("REQ_HS20_ICON failed")
ev = dev[0].wait_event(["RX-HS20-ICON"], timeout=5)
if ev is None:
raise Exception("Timeout on RX-HS20-ICON (1)")
ev = dev[0].wait_event(["RX-HS20-ICON"], timeout=5)
if ev is None:
raise Exception("Timeout on RX-HS20-ICON (2)")
# Then, fetch the icons from wpa_supplicant for validation
hdr, data1 = get_icon(dev[0], bssid, "w1fi_logo")
hdr, data2 = get_icon(dev[0], bssid, "test_logo")
with open('w1fi_logo.png', 'rb') as f:
data = f.read()
if data1 != data:
raise Exception("Unexpected icon data (1)")
with open('auth_serv/sha512-server.pem', 'rb') as f:
data = f.read()
if data2 != data:
raise Exception("Unexpected icon data (2)")
# Finally, delete the icons from wpa_supplicant
if "OK" not in dev[0].request("DEL_HS20_ICON " + bssid + " w1fi_logo"):
raise Exception("DEL_HS20_ICON failed")
if "OK" not in dev[0].request("DEL_HS20_ICON " + bssid + " test_logo"):
raise Exception("DEL_HS20_ICON failed")
def test_ap_hs20_fetch_osu_stop(dev, apdev):
"""Hotspot 2.0 OSU provider fetch stopped"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hs20_icon'] = "128:80:zxx:image/png:w1fi_logo:w1fi_logo.png"
params['osu_ssid'] = '"HS 2.0 OSU open"'
params['osu_method_list'] = "1"
params['osu_friendly_name'] = ["eng:Test OSU", "fin:Testi-OSU"]
params['osu_icon'] = "w1fi_logo"
params['osu_service_desc'] = ["eng:Example services",
"fin:Esimerkkipalveluja"]
params['osu_server_uri'] = "https://example.com/osu/"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dir = "/tmp/osu-fetch"
if os.path.isdir(dir):
files = [f for f in os.listdir(dir) if f.startswith("osu-")]
for f in files:
os.remove(dir + "/" + f)
else:
try:
os.makedirs(dir)
except:
pass
try:
dev[0].request("SET osu_dir " + dir)
dev[0].request("SCAN freq=2412-2462")
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-STARTED"], timeout=10)
if ev is None:
raise Exception("Scan did not start")
if "FAIL" not in dev[0].request("FETCH_OSU"):
raise Exception("FETCH_OSU accepted while scanning")
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 10)
if ev is None:
raise Exception("Scan timed out")
hapd.set("ext_mgmt_frame_handling", "1")
dev[0].request("FETCH_ANQP")
if "FAIL" not in dev[0].request("FETCH_OSU"):
raise Exception("FETCH_OSU accepted while in FETCH_ANQP")
dev[0].request("STOP_FETCH_ANQP")
dev[0].wait_event(["GAS-QUERY-DONE"], timeout=5)
dev[0].dump_monitor()
hapd.dump_monitor()
dev[0].request("INTERWORKING_SELECT freq=2412")
for i in range(5):
msg = hapd.mgmt_rx()
if msg['subtype'] == 13:
break
if "FAIL" not in dev[0].request("FETCH_OSU"):
raise Exception("FETCH_OSU accepted while in INTERWORKING_SELECT")
ev = dev[0].wait_event(["INTERWORKING-AP", "INTERWORKING-NO-MATCH"],
timeout=15)
if ev is None:
raise Exception("Network selection timed out")
dev[0].dump_monitor()
if "OK" not in dev[0].request("FETCH_OSU"):
raise Exception("FETCH_OSU failed")
dev[0].request("CANCEL_FETCH_OSU")
for i in range(15):
time.sleep(0.5)
if dev[0].get_driver_status_field("scan_state") == "SCAN_COMPLETED":
break
dev[0].dump_monitor()
if "OK" not in dev[0].request("FETCH_OSU"):
raise Exception("FETCH_OSU failed")
if "FAIL" not in dev[0].request("FETCH_OSU"):
raise Exception("FETCH_OSU accepted while in FETCH_OSU")
ev = dev[0].wait_event(["GAS-QUERY-START"], 10)
if ev is None:
raise Exception("GAS timed out")
if "FAIL" not in dev[0].request("FETCH_OSU"):
raise Exception("FETCH_OSU accepted while in FETCH_OSU")
dev[0].request("CANCEL_FETCH_OSU")
ev = dev[0].wait_event(["GAS-QUERY-DONE"], 10)
if ev is None:
raise Exception("GAS event timed out after CANCEL_FETCH_OSU")
finally:
files = [f for f in os.listdir(dir) if f.startswith("osu-")]
for f in files:
os.remove(dir + "/" + f)
os.rmdir(dir)
def test_ap_hs20_fetch_osu_proto(dev, apdev):
"""Hotspot 2.0 OSU provider and protocol testing"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hapd = hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dir = "/tmp/osu-fetch"
if os.path.isdir(dir):
files = [f for f in os.listdir(dir) if f.startswith("osu-")]
for f in files:
os.remove(dir + "/" + f)
else:
try:
os.makedirs(dir)
except:
pass
tests = [("Empty provider list (no OSU SSID field)", b''),
("HS 2.0: Not enough room for OSU SSID",
binascii.unhexlify('01')),
("HS 2.0: Invalid OSU SSID Length 33",
binascii.unhexlify('21') + 33*b'A'),
("HS 2.0: Not enough room for Number of OSU Providers",
binascii.unhexlify('0130')),
("Truncated OSU Provider",
binascii.unhexlify('013001020000')),
("HS 2.0: Ignored 5 bytes of extra data after OSU Providers",
binascii.unhexlify('0130001122334455')),
("HS 2.0: Not enough room for OSU Friendly Name Length",
binascii.unhexlify('013001000000')),
("HS 2.0: Not enough room for OSU Friendly Name Duples",
build_prov('0100')),
("Invalid OSU Friendly Name", build_prov('040000000000')),
("Invalid OSU Friendly Name(2)", build_prov('040004000000')),
("HS 2.0: Not enough room for OSU Server URI length",
build_prov('0000')),
("HS 2.0: Not enough room for OSU Server URI",
build_prov('000001')),
("HS 2.0: Not enough room for OSU Method list length",
build_prov('000000')),
("HS 2.0: Not enough room for OSU Method list",
build_prov('00000001')),
("HS 2.0: Not enough room for Icons Available Length",
build_prov('00000000')),
("HS 2.0: Not enough room for Icons Available Length(2)",
build_prov('00000001ff00')),
("HS 2.0: Not enough room for Icons Available",
build_prov('000000000100')),
("HS 2.0: Invalid Icon Metadata",
build_prov('00000000010000')),
("HS 2.0: Not room for Icon Type",
build_prov('000000000900111122223333330200')),
("HS 2.0: Not room for Icon Filename length",
build_prov('000000000900111122223333330100')),
("HS 2.0: Not room for Icon Filename",
build_prov('000000000900111122223333330001')),
("HS 2.0: Not enough room for OSU_NAI",
build_prov('000000000000')),
("HS 2.0: Not enough room for OSU_NAI(2)",
build_prov('00000000000001')),
("HS 2.0: Not enough room for OSU Service Description Length",
build_prov('00000000000000')),
("HS 2.0: Not enough room for OSU Service Description Length(2)",
build_prov('0000000000000000')),
("HS 2.0: Not enough room for OSU Service Description Duples",
build_prov('000000000000000100')),
("Invalid OSU Service Description",
build_prov('00000000000000040000000000')),
("Invalid OSU Service Description(2)",
build_prov('00000000000000040004000000'))]
try:
dev[0].request("SET osu_dir " + dir)
run_fetch_osu_icon_failure(hapd, dev, bssid)
for note, prov in tests:
run_fetch_osu(hapd, dev, bssid, note, prov)
finally:
files = [f for f in os.listdir(dir) if f.startswith("osu-")]
for f in files:
os.remove(dir + "/" + f)
os.rmdir(dir)
def test_ap_hs20_fetch_osu_invalid_dir(dev, apdev):
"""Hotspot 2.0 OSU provider and invalid directory"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hs20_icon'] = "128:80:zxx:image/png:w1fi_logo:w1fi_logo.png"
params['osu_ssid'] = '"HS 2.0 OSU open"'
params['osu_method_list'] = "1"
params['osu_friendly_name'] = ["eng:Test OSU", "fin:Testi-OSU"]
params['osu_icon'] = "w1fi_logo"
params['osu_service_desc'] = ["eng:Example services",
"fin:Esimerkkipalveluja"]
params['osu_server_uri'] = "https://example.com/osu/"
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dir = "/tmp/osu-fetch-no-such-dir"
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].request("SET osu_dir " + dir)
dev[0].request("FETCH_OSU no-scan")
ev = dev[0].wait_event(["Could not write OSU provider information"],
timeout=15)
if ev is None:
raise Exception("Timeout on OSU fetch")
def test_ap_hs20_fetch_osu_oom(dev, apdev):
"""Hotspot 2.0 OSU provider and OOM"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hs20_icon'] = "128:80:zxx:image/png:w1fi_logo:w1fi_logo.png"
params['osu_ssid'] = '"HS 2.0 OSU open"'
params['osu_method_list'] = "1"
params['osu_friendly_name'] = ["eng:Test OSU", "fin:Testi-OSU"]
params['osu_icon'] = "w1fi_logo"
params['osu_service_desc'] = ["eng:Example services",
"fin:Esimerkkipalveluja"]
params['osu_server_uri'] = "https://example.com/osu/"
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dir = "/tmp/osu-fetch"
if os.path.isdir(dir):
files = [f for f in os.listdir(dir) if f.startswith("osu-")]
for f in files:
os.remove(dir + "/" + f)
else:
try:
os.makedirs(dir)
except:
pass
dev[0].scan_for_bss(bssid, freq="2412")
try:
dev[0].request("SET osu_dir " + dir)
with alloc_fail(dev[0], 1, "=hs20_osu_add_prov"):
dev[0].request("FETCH_OSU no-scan")
ev = dev[0].wait_event(["OSU provider fetch completed"], timeout=30)
if ev is None:
raise Exception("Timeout on OSU fetch")
with alloc_fail(dev[0], 1, "hs20_anqp_send_req;hs20_next_osu_icon"):
dev[0].request("FETCH_OSU no-scan")
ev = dev[0].wait_event(["OSU provider fetch completed"], timeout=30)
if ev is None:
raise Exception("Timeout on OSU fetch")
finally:
files = [f for f in os.listdir(dir) if f.startswith("osu-")]
for f in files:
os.remove(dir + "/" + f)
os.rmdir(dir)
def build_prov(prov):
data = binascii.unhexlify(prov)
return binascii.unhexlify('013001') + struct.pack('<H', len(data)) + data
def handle_osu_prov_fetch(hapd, dev, prov):
# GAS/ANQP query for OSU Providers List
query = gas_rx(hapd)
gas = parse_gas(query['payload'])
dialog_token = gas['dialog_token']
resp = action_response(query)
osu_prov = struct.pack('<HH', 0xdddd, len(prov) + 6) + binascii.unhexlify('506f9a110800') + prov
data = struct.pack('<H', len(osu_prov)) + osu_prov
resp['payload'] = anqp_initial_resp(dialog_token, 0) + data
send_gas_resp(hapd, resp)
ev = dev[0].wait_event(["RX-HS20-ANQP"], timeout=5)
if ev is None:
raise Exception("ANQP query response for OSU Providers not received")
if "OSU Providers list" not in ev:
raise Exception("ANQP query response for OSU Providers not received(2)")
ev = dev[0].wait_event(["ANQP-QUERY-DONE"], timeout=5)
if ev is None:
raise Exception("ANQP query for OSU Providers list not completed")
def start_osu_fetch(hapd, dev, bssid, note):
hapd.set("ext_mgmt_frame_handling", "0")
dev[0].request("BSS_FLUSH 0")
dev[0].scan_for_bss(bssid, freq="2412")
hapd.set("ext_mgmt_frame_handling", "1")
dev[0].dump_monitor()
dev[0].request("NOTE " + note)
dev[0].request("FETCH_OSU no-scan")
def wait_osu_fetch_completed(dev):
ev = dev[0].wait_event(["OSU provider fetch completed"], timeout=5)
if ev is None:
raise Exception("Timeout on OSU fetch")
def run_fetch_osu_icon_failure(hapd, dev, bssid):
start_osu_fetch(hapd, dev, bssid, "Icon fetch failure")
prov = binascii.unhexlify('01ff' + '01' + '800019000b656e6754657374204f53550c66696e54657374692d4f53551868747470733a2f2f6578616d706c652e636f6d2f6f73752f01011b00800050007a787809696d6167652f706e6709773166695f6c6f676f002a0013656e674578616d706c652073657276696365731566696e4573696d65726b6b6970616c76656c756a61')
handle_osu_prov_fetch(hapd, dev, prov)
# GAS/ANQP query for icon
query = gas_rx(hapd)
gas = parse_gas(query['payload'])
dialog_token = gas['dialog_token']
resp = action_response(query)
# Unexpected Advertisement Protocol in response
adv_proto = struct.pack('8B', 108, 6, 127, 0xdd, 0x00, 0x11, 0x22, 0x33)
data = struct.pack('<H', 0)
resp['payload'] = struct.pack('<BBBHH', ACTION_CATEG_PUBLIC,
GAS_INITIAL_RESPONSE,
gas['dialog_token'], 0, 0) + adv_proto + data
send_gas_resp(hapd, resp)
ev = dev[0].wait_event(["ANQP-QUERY-DONE"], timeout=5)
if ev is None:
raise Exception("ANQP query for icon not completed")
wait_osu_fetch_completed(dev)
def run_fetch_osu(hapd, dev, bssid, note, prov):
start_osu_fetch(hapd, dev, bssid, note)
handle_osu_prov_fetch(hapd, dev, prov)
wait_osu_fetch_completed(dev)
def test_ap_hs20_ft(dev, apdev):
"""Hotspot 2.0 connection with FT"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['wpa_key_mgmt'] = "FT-EAP"
params['nas_identifier'] = "nas1.w1.fi"
params['r1_key_holder'] = "000102030405"
params["mobility_domain"] = "a1b2"
params["reassociation_deadline"] = "1000"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com",
'username': "hs20-test",
'password': "password",
'ca_cert': "auth_serv/ca.pem",
'domain': "example.com",
'update_identifier': "1234"})
interworking_select(dev[0], bssid, "home", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
dev[0].dump_monitor()
key_mgmt = dev[0].get_status_field("key_mgmt")
if key_mgmt != "FT-EAP":
raise Exception("Unexpected key_mgmt: " + key_mgmt)
# speed up testing by avoiding unnecessary scanning of other channels
nid = dev[0].get_status_field("id")
dev[0].set_network(nid, "scan_freq", "2412")
params = hs20_ap_params()
hapd2 = hostapd.add_ap(apdev[1], params)
hapd.disable()
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECTED"], timeout=10)
if ev is None:
raise Exception("Disconnection not reported")
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED"], timeout=5)
if ev is None:
raise Exception("Connection to AP2 not reported")
key_mgmt = dev[0].get_status_field("key_mgmt")
if key_mgmt != "WPA2/IEEE 802.1X/EAP":
raise Exception("Unexpected key_mgmt: " + key_mgmt)
def test_ap_hs20_remediation_sql(dev, apdev, params):
"""Hotspot 2.0 connection and remediation required using SQLite for user DB"""
check_eap_capa(dev[0], "MSCHAPV2")
try:
import sqlite3
except ImportError:
raise HwsimSkip("No sqlite3 module available")
dbfile = params['prefix'] + ".eap-user.db"
try:
os.remove(dbfile)
except:
pass
con = sqlite3.connect(dbfile)
with con:
cur = con.cursor()
cur.execute("CREATE TABLE users(identity TEXT PRIMARY KEY, methods TEXT, password TEXT, remediation TEXT, phase2 INTEGER)")
cur.execute("CREATE TABLE wildcards(identity TEXT PRIMARY KEY, methods TEXT)")
cur.execute("INSERT INTO users(identity,methods,password,phase2,remediation) VALUES ('user-mschapv2','TTLS-MSCHAPV2','password',1,'user')")
cur.execute("INSERT INTO wildcards(identity,methods) VALUES ('','TTLS,TLS')")
cur.execute("CREATE TABLE authlog(timestamp TEXT, session TEXT, nas_ip TEXT, username TEXT, note TEXT)")
try:
params = {"ssid": "as", "beacon_int": "2000",
"radius_server_clients": "auth_serv/radius_clients.conf",
"radius_server_auth_port": '18128',
"eap_server": "1",
"eap_user_file": "sqlite:" + dbfile,
"ca_cert": "auth_serv/ca.pem",
"server_cert": "auth_serv/server.pem",
"private_key": "auth_serv/server.key",
"subscr_remediation_url": "https://example.org/",
"subscr_remediation_method": "1"}
hostapd.add_ap(apdev[1], params)
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['auth_server_port'] = "18128"
hostapd.add_ap(apdev[0], params)
dev[0].request("SET pmf 1")
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com",
'username': "user-mschapv2",
'password': "password",
'ca_cert': "auth_serv/ca.pem"})
interworking_select(dev[0], bssid, freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
ev = dev[0].wait_event(["HS20-SUBSCRIPTION-REMEDIATION"], timeout=5)
if ev is None:
raise Exception("Timeout on subscription remediation notice")
if " 1 https://example.org/" not in ev:
raise Exception("Unexpected subscription remediation event contents")
with con:
cur = con.cursor()
cur.execute("SELECT * from authlog")
rows = cur.fetchall()
if len(rows) < 1:
raise Exception("No authlog entries")
finally:
os.remove(dbfile)
dev[0].request("SET pmf 0")
def test_ap_hs20_sim_provisioning(dev, apdev, params):
"""Hotspot 2.0 AAA server behavior for SIM provisioning"""
check_eap_capa(dev[0], "SIM")
try:
import sqlite3
except ImportError:
raise HwsimSkip("No sqlite3 module available")
dbfile = params['prefix'] + ".eap-user.db"
try:
os.remove(dbfile)
except:
pass
con = sqlite3.connect(dbfile)
with con:
cur = con.cursor()
cur.execute("CREATE TABLE users(identity TEXT PRIMARY KEY, methods TEXT, password TEXT, remediation TEXT, phase2 INTEGER, last_msk TEXT)")
cur.execute("CREATE TABLE wildcards(identity TEXT PRIMARY KEY, methods TEXT)")
cur.execute("INSERT INTO wildcards(identity,methods) VALUES ('1','SIM')")
cur.execute("CREATE TABLE authlog(timestamp TEXT, session TEXT, nas_ip TEXT, username TEXT, note TEXT)")
cur.execute("CREATE TABLE current_sessions(mac_addr TEXT PRIMARY KEY, identity TEXT, start_time TEXT, nas TEXT, hs20_t_c_filtering BOOLEAN, waiting_coa_ack BOOLEAN, coa_ack_received BOOLEAN)")
try:
params = {"ssid": "as", "beacon_int": "2000",
"radius_server_clients": "auth_serv/radius_clients.conf",
"radius_server_auth_port": '18128',
"eap_server": "1",
"eap_user_file": "sqlite:" + dbfile,
"eap_sim_db": "unix:/tmp/hlr_auc_gw.sock",
"ca_cert": "auth_serv/ca.pem",
"server_cert": "auth_serv/server.pem",
"private_key": "auth_serv/server.key",
"hs20_sim_provisioning_url":
"https://example.org/?hotspot2dot0-mobile-identifier-hash=",
"subscr_remediation_method": "1"}
hostapd.add_ap(apdev[1], params)
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['auth_server_port'] = "18128"
hostapd.add_ap(apdev[0], params)
dev[0].request("SET pmf 1")
dev[0].hs20_enable()
dev[0].connect("test-hs20", proto="RSN", key_mgmt="WPA-EAP", eap="SIM",
ieee80211w="1",
identity="1232010000000000",
password="90dca4eda45b53cf0f12d7c9c3bc6a89:cb9cccc4b9258e6dca4760379fb82581",
scan_freq="2412", update_identifier="54321")
ev = dev[0].wait_event(["HS20-SUBSCRIPTION-REMEDIATION"], timeout=0.5)
if ev is not None:
raise Exception("Unexpected subscription remediation notice")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].dump_monitor()
dev[0].connect("test-hs20", proto="RSN", key_mgmt="WPA-EAP", eap="SIM",
ieee80211w="1",
identity="1232010000000000",
password="90dca4eda45b53cf0f12d7c9c3bc6a89:cb9cccc4b9258e6dca4760379fb82581",
scan_freq="2412", update_identifier="0")
ev = dev[0].wait_event(["HS20-SUBSCRIPTION-REMEDIATION"], timeout=5)
if ev is None:
raise Exception("Timeout on subscription remediation notice")
if " 1 https://example.org/?hotspot2dot0-mobile-identifier-hash=" not in ev:
raise Exception("Unexpected subscription remediation event contents: " + ev)
id_hash = ev.split(' ')[2].split('=')[1]
with con:
cur = con.cursor()
cur.execute("SELECT * from authlog")
rows = cur.fetchall()
if len(rows) < 1:
raise Exception("No authlog entries")
with con:
cur = con.cursor()
cur.execute("SELECT * from sim_provisioning")
rows = cur.fetchall()
if len(rows) != 1:
raise Exeception("Unexpected number of rows in sim_provisioning (%d; expected %d)" % (len(rows), 1))
logger.info("sim_provisioning: " + str(rows))
if len(rows[0][0]) != 32:
raise Exception("Unexpected mobile_identifier_hash length in DB")
if rows[0][1] != "232010000000000":
raise Exception("Unexpected IMSI in DB")
if rows[0][2] != dev[0].own_addr():
raise Exception("Unexpected MAC address in DB")
if rows[0][0] != id_hash:
raise Exception("hotspot2dot0-mobile-identifier-hash mismatch")
finally:
dev[0].request("SET pmf 0")
def test_ap_hs20_external_selection(dev, apdev):
"""Hotspot 2.0 connection using external network selection and creation"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['disable_dgaf'] = '1'
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].connect("test-hs20", proto="RSN", key_mgmt="WPA-EAP", eap="TTLS",
ieee80211w="1",
identity="hs20-test", password="password",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
scan_freq="2412", update_identifier="54321",
roaming_consortium_selection="1020304050")
if dev[0].get_status_field("hs20") != "3":
raise Exception("Unexpected hs20 indication")
network_id = dev[0].get_status_field("id")
sel = dev[0].get_network(network_id, "roaming_consortium_selection")
if sel != "1020304050":
raise Exception("Unexpected roaming_consortium_selection value: " + sel)
def test_ap_hs20_random_mac_addr(dev, apdev):
"""Hotspot 2.0 connection with random MAC address"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['disable_dgaf'] = '1'
hapd = hostapd.add_ap(apdev[0], params)
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5")
addr = wpas.p2p_interface_addr()
wpas.request("SET mac_addr 1")
wpas.request("SET preassoc_mac_addr 1")
wpas.request("SET rand_addr_lifetime 60")
wpas.hs20_enable()
wpas.flush_scan_cache()
id = wpas.add_cred_values({'realm': "example.com",
'username': "hs20-test",
'password': "password",
'ca_cert': "auth_serv/ca.pem",
'domain': "example.com",
'update_identifier': "1234"})
interworking_select(wpas, bssid, "home", freq="2412")
interworking_connect(wpas, bssid, "TTLS")
addr1 = wpas.get_driver_status_field("addr")
if addr == addr1:
raise Exception("Did not use random MAC address")
sta = hapd.get_sta(addr)
if sta['addr'] != "FAIL":
raise Exception("Unexpected STA association with permanent address")
sta = hapd.get_sta(addr1)
if sta['addr'] != addr1:
raise Exception("STA association with random address not found")
def test_ap_hs20_multi_network_and_cred_removal(dev, apdev):
"""Multiple networks and cred removal"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = ["0,example.com,25[3:26]"]
hapd = hostapd.add_ap(apdev[0], params)
dev[0].add_network()
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com",
'username': "user",
'password': "password"})
interworking_select(dev[0], bssid, freq="2412")
interworking_connect(dev[0], bssid, "PEAP")
dev[0].add_network()
dev[0].request("DISCONNECT")
dev[0].wait_disconnected(timeout=10)
hapd.disable()
hapd.set("ssid", "another ssid")
hapd.enable()
interworking_select(dev[0], bssid, freq="2412")
interworking_connect(dev[0], bssid, "PEAP")
dev[0].add_network()
if len(dev[0].list_networks()) != 5:
raise Exception("Unexpected number of networks prior to remove_crec")
dev[0].dump_monitor()
dev[0].remove_cred(id)
if len(dev[0].list_networks()) != 3:
raise Exception("Unexpected number of networks after to remove_crec")
dev[0].wait_disconnected(timeout=10)
def test_ap_hs20_interworking_add_network(dev, apdev):
"""Hotspot 2.0 connection using INTERWORKING_ADD_NETWORK"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = ["0,example.com,21[3:26][6:7][99:99]"]
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].add_cred_values(default_cred(user="user"))
interworking_select(dev[0], bssid, freq=2412)
id = dev[0].interworking_add_network(bssid)
dev[0].select_network(id, freq=2412)
dev[0].wait_connected()
def _test_ap_hs20_proxyarp(dev, apdev):
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['disable_dgaf'] = '0'
params['proxy_arp'] = '1'
hapd = hostapd.add_ap(apdev[0], params, no_enable=True)
if "OK" in hapd.request("ENABLE"):
raise Exception("Incomplete hostapd configuration was accepted")
hapd.set("ap_isolate", "1")
if "OK" in hapd.request("ENABLE"):
raise Exception("Incomplete hostapd configuration was accepted")
hapd.set('bridge', 'ap-br0')
hapd.dump_monitor()
try:
hapd.enable()
except:
# For now, do not report failures due to missing kernel support
raise HwsimSkip("Could not start hostapd - assume proxyarp not supported in kernel version")
ev = hapd.wait_event(["AP-ENABLED", "AP-DISABLED"], timeout=10)
if ev is None:
raise Exception("AP startup timed out")
if "AP-ENABLED" not in ev:
raise Exception("AP startup failed")
dev[0].hs20_enable()
subprocess.call(['brctl', 'setfd', 'ap-br0', '0'])
subprocess.call(['ip', 'link', 'set', 'dev', 'ap-br0', 'up'])
id = dev[0].add_cred_values({'realm': "example.com",
'username': "hs20-test",
'password': "password",
'ca_cert': "auth_serv/ca.pem",
'domain': "example.com",
'update_identifier': "1234"})
interworking_select(dev[0], bssid, "home", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
dev[1].connect("test-hs20", key_mgmt="WPA-EAP", eap="TTLS",
identity="hs20-test", password="password",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
scan_freq="2412")
time.sleep(0.1)
addr0 = dev[0].p2p_interface_addr()
addr1 = dev[1].p2p_interface_addr()
src_ll_opt0 = b"\x01\x01" + binascii.unhexlify(addr0.replace(':', ''))
src_ll_opt1 = b"\x01\x01" + binascii.unhexlify(addr1.replace(':', ''))
pkt = build_ns(src_ll=addr0, ip_src="aaaa:bbbb:cccc::2",
ip_dst="ff02::1:ff00:2", target="aaaa:bbbb:cccc::2",
opt=src_ll_opt0)
if "OK" not in dev[0].request("DATA_TEST_FRAME " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
pkt = build_ns(src_ll=addr1, ip_src="aaaa:bbbb:dddd::2",
ip_dst="ff02::1:ff00:2", target="aaaa:bbbb:dddd::2",
opt=src_ll_opt1)
if "OK" not in dev[1].request("DATA_TEST_FRAME " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
pkt = build_ns(src_ll=addr1, ip_src="aaaa:bbbb:eeee::2",
ip_dst="ff02::1:ff00:2", target="aaaa:bbbb:eeee::2",
opt=src_ll_opt1)
if "OK" not in dev[1].request("DATA_TEST_FRAME " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
matches = get_permanent_neighbors("ap-br0")
logger.info("After connect: " + str(matches))
if len(matches) != 3:
raise Exception("Unexpected number of neighbor entries after connect")
if 'aaaa:bbbb:cccc::2 dev ap-br0 lladdr 02:00:00:00:00:00 PERMANENT' not in matches:
raise Exception("dev0 addr missing")
if 'aaaa:bbbb:dddd::2 dev ap-br0 lladdr 02:00:00:00:01:00 PERMANENT' not in matches:
raise Exception("dev1 addr(1) missing")
if 'aaaa:bbbb:eeee::2 dev ap-br0 lladdr 02:00:00:00:01:00 PERMANENT' not in matches:
raise Exception("dev1 addr(2) missing")
dev[0].request("DISCONNECT")
dev[1].request("DISCONNECT")
time.sleep(0.5)
matches = get_permanent_neighbors("ap-br0")
logger.info("After disconnect: " + str(matches))
if len(matches) > 0:
raise Exception("Unexpected neighbor entries after disconnect")
def test_ap_hs20_hidden_ssid_in_scan_res(dev, apdev):
"""Hotspot 2.0 connection with hidden SSId in scan results"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
hapd = hostapd.add_ap(apdev[0], {"ssid": 'secret',
"ignore_broadcast_ssid": "1"})
dev[0].scan_for_bss(bssid, freq=2412)
hapd.disable()
hapd_global = hostapd.HostapdGlobal(apdev[0])
hapd_global.flush()
hapd_global.remove(apdev[0]['ifname'])
params = hs20_ap_params()
params['hessid'] = bssid
hapd = hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com",
'username': "hs20-test",
'password': "password",
'ca_cert': "auth_serv/ca.pem",
'domain': "example.com"})
interworking_select(dev[0], bssid, "home", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
# clear BSS table to avoid issues in following test cases
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
hapd.disable()
dev[0].flush_scan_cache()
dev[0].flush_scan_cache()
def test_ap_hs20_proxyarp(dev, apdev):
"""Hotspot 2.0 and ProxyARP"""
check_eap_capa(dev[0], "MSCHAPV2")
try:
_test_ap_hs20_proxyarp(dev, apdev)
finally:
subprocess.call(['ip', 'link', 'set', 'dev', 'ap-br0', 'down'],
stderr=open('/dev/null', 'w'))
subprocess.call(['brctl', 'delbr', 'ap-br0'],
stderr=open('/dev/null', 'w'))
def _test_ap_hs20_proxyarp_dgaf(dev, apdev, disabled):
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['disable_dgaf'] = '1' if disabled else '0'
params['proxy_arp'] = '1'
params['na_mcast_to_ucast'] = '1'
params['ap_isolate'] = '1'
params['bridge'] = 'ap-br0'
hapd = hostapd.add_ap(apdev[0], params, no_enable=True)
try:
hapd.enable()
except:
# For now, do not report failures due to missing kernel support
raise HwsimSkip("Could not start hostapd - assume proxyarp not supported in kernel version")
ev = hapd.wait_event(["AP-ENABLED"], timeout=10)
if ev is None:
raise Exception("AP startup timed out")
dev[0].hs20_enable()
subprocess.call(['brctl', 'setfd', 'ap-br0', '0'])
subprocess.call(['ip', 'link', 'set', 'dev', 'ap-br0', 'up'])
id = dev[0].add_cred_values({'realm': "example.com",
'username': "hs20-test",
'password': "password",
'ca_cert': "auth_serv/ca.pem",
'domain': "example.com",
'update_identifier': "1234"})
interworking_select(dev[0], bssid, "home", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
dev[1].connect("test-hs20", key_mgmt="WPA-EAP", eap="TTLS",
identity="hs20-test", password="password",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
scan_freq="2412")
time.sleep(0.1)
addr0 = dev[0].p2p_interface_addr()
src_ll_opt0 = b"\x01\x01" + binascii.unhexlify(addr0.replace(':', ''))
pkt = build_ns(src_ll=addr0, ip_src="aaaa:bbbb:cccc::2",
ip_dst="ff02::1:ff00:2", target="aaaa:bbbb:cccc::2",
opt=src_ll_opt0)
if "OK" not in dev[0].request("DATA_TEST_FRAME " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
pkt = build_ra(src_ll=apdev[0]['bssid'], ip_src="aaaa:bbbb:cccc::33",
ip_dst="ff01::1")
if "OK" not in hapd.request("DATA_TEST_FRAME ifname=ap-br0 " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
pkt = build_na(src_ll=apdev[0]['bssid'], ip_src="aaaa:bbbb:cccc::44",
ip_dst="ff01::1", target="aaaa:bbbb:cccc::55")
if "OK" not in hapd.request("DATA_TEST_FRAME ifname=ap-br0 " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
pkt = build_dhcp_ack(dst_ll="ff:ff:ff:ff:ff:ff", src_ll=bssid,
ip_src="192.168.1.1", ip_dst="255.255.255.255",
yiaddr="192.168.1.123", chaddr=addr0)
if "OK" not in hapd.request("DATA_TEST_FRAME ifname=ap-br0 " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
# another copy for additional code coverage
pkt = build_dhcp_ack(dst_ll=addr0, src_ll=bssid,
ip_src="192.168.1.1", ip_dst="255.255.255.255",
yiaddr="192.168.1.123", chaddr=addr0)
if "OK" not in hapd.request("DATA_TEST_FRAME ifname=ap-br0 " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
matches = get_permanent_neighbors("ap-br0")
logger.info("After connect: " + str(matches))
if len(matches) != 2:
raise Exception("Unexpected number of neighbor entries after connect")
if 'aaaa:bbbb:cccc::2 dev ap-br0 lladdr 02:00:00:00:00:00 PERMANENT' not in matches:
raise Exception("dev0 addr missing")
if '192.168.1.123 dev ap-br0 lladdr 02:00:00:00:00:00 PERMANENT' not in matches:
raise Exception("dev0 IPv4 addr missing")
dev[0].request("DISCONNECT")
dev[1].request("DISCONNECT")
time.sleep(0.5)
matches = get_permanent_neighbors("ap-br0")
logger.info("After disconnect: " + str(matches))
if len(matches) > 0:
raise Exception("Unexpected neighbor entries after disconnect")
def test_ap_hs20_proxyarp_disable_dgaf(dev, apdev):
"""Hotspot 2.0 and ProxyARP with DGAF disabled"""
check_eap_capa(dev[0], "MSCHAPV2")
try:
_test_ap_hs20_proxyarp_dgaf(dev, apdev, True)
finally:
subprocess.call(['ip', 'link', 'set', 'dev', 'ap-br0', 'down'],
stderr=open('/dev/null', 'w'))
subprocess.call(['brctl', 'delbr', 'ap-br0'],
stderr=open('/dev/null', 'w'))
def test_ap_hs20_proxyarp_enable_dgaf(dev, apdev):
"""Hotspot 2.0 and ProxyARP with DGAF enabled"""
check_eap_capa(dev[0], "MSCHAPV2")
try:
_test_ap_hs20_proxyarp_dgaf(dev, apdev, False)
finally:
subprocess.call(['ip', 'link', 'set', 'dev', 'ap-br0', 'down'],
stderr=open('/dev/null', 'w'))
subprocess.call(['brctl', 'delbr', 'ap-br0'],
stderr=open('/dev/null', 'w'))
def ip_checksum(buf):
sum = 0
if len(buf) & 0x01:
buf += b'\x00'
for i in range(0, len(buf), 2):
val, = struct.unpack('H', buf[i:i+2])
sum += val
while (sum >> 16):
sum = (sum & 0xffff) + (sum >> 16)
return struct.pack('H', ~sum & 0xffff)
def ipv6_solicited_node_mcaddr(target):
prefix = socket.inet_pton(socket.AF_INET6, "ff02::1:ff00:0")
mask = socket.inet_pton(socket.AF_INET6, "::ff:ffff")
_target = socket.inet_pton(socket.AF_INET6, target)
p = struct.unpack('4I', prefix)
m = struct.unpack('4I', mask)
t = struct.unpack('4I', _target)
res = (p[0] | (t[0] & m[0]),
p[1] | (t[1] & m[1]),
p[2] | (t[2] & m[2]),
p[3] | (t[3] & m[3]))
return socket.inet_ntop(socket.AF_INET6, struct.pack('4I', *res))
def build_icmpv6(ipv6_addrs, type, code, payload):
start = struct.pack("BB", type, code)
end = payload
icmp = start + b'\x00\x00' + end
pseudo = ipv6_addrs + struct.pack(">LBBBB", len(icmp), 0, 0, 0, 58)
csum = ip_checksum(pseudo + icmp)
return start + csum + end
def build_ra(src_ll, ip_src, ip_dst, cur_hop_limit=0, router_lifetime=0,
reachable_time=0, retrans_timer=0, opt=None):
link_mc = binascii.unhexlify("3333ff000002")
_src_ll = binascii.unhexlify(src_ll.replace(':', ''))
proto = b'\x86\xdd'
ehdr = link_mc + _src_ll + proto
_ip_src = socket.inet_pton(socket.AF_INET6, ip_src)
_ip_dst = socket.inet_pton(socket.AF_INET6, ip_dst)
adv = struct.pack('>BBHLL', cur_hop_limit, 0, router_lifetime,
reachable_time, retrans_timer)
if opt:
payload = adv + opt
else:
payload = adv
icmp = build_icmpv6(_ip_src + _ip_dst, 134, 0, payload)
ipv6 = struct.pack('>BBBBHBB', 0x60, 0, 0, 0, len(icmp), 58, 255)
ipv6 += _ip_src + _ip_dst
return ehdr + ipv6 + icmp
def build_ns(src_ll, ip_src, ip_dst, target, opt=None):
link_mc = binascii.unhexlify("3333ff000002")
_src_ll = binascii.unhexlify(src_ll.replace(':', ''))
proto = b'\x86\xdd'
ehdr = link_mc + _src_ll + proto
_ip_src = socket.inet_pton(socket.AF_INET6, ip_src)
if ip_dst is None:
ip_dst = ipv6_solicited_node_mcaddr(target)
_ip_dst = socket.inet_pton(socket.AF_INET6, ip_dst)
reserved = b'\x00\x00\x00\x00'
_target = socket.inet_pton(socket.AF_INET6, target)
if opt:
payload = reserved + _target + opt
else:
payload = reserved + _target
icmp = build_icmpv6(_ip_src + _ip_dst, 135, 0, payload)
ipv6 = struct.pack('>BBBBHBB', 0x60, 0, 0, 0, len(icmp), 58, 255)
ipv6 += _ip_src + _ip_dst
return ehdr + ipv6 + icmp
def send_ns(dev, src_ll=None, target=None, ip_src=None, ip_dst=None, opt=None,
hapd_bssid=None):
if hapd_bssid:
if src_ll is None:
src_ll = hapd_bssid
cmd = "DATA_TEST_FRAME ifname=ap-br0 "
else:
if src_ll is None:
src_ll = dev.p2p_interface_addr()
cmd = "DATA_TEST_FRAME "
if opt is None:
opt = b"\x01\x01" + binascii.unhexlify(src_ll.replace(':', ''))
pkt = build_ns(src_ll=src_ll, ip_src=ip_src, ip_dst=ip_dst, target=target,
opt=opt)
if "OK" not in dev.request(cmd + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
def build_na(src_ll, ip_src, ip_dst, target, opt=None, flags=0):
link_mc = binascii.unhexlify("3333ff000002")
_src_ll = binascii.unhexlify(src_ll.replace(':', ''))
proto = b'\x86\xdd'
ehdr = link_mc + _src_ll + proto
_ip_src = socket.inet_pton(socket.AF_INET6, ip_src)
_ip_dst = socket.inet_pton(socket.AF_INET6, ip_dst)
_target = socket.inet_pton(socket.AF_INET6, target)
if opt:
payload = struct.pack('>Bxxx', flags) + _target + opt
else:
payload = struct.pack('>Bxxx', flags) + _target
icmp = build_icmpv6(_ip_src + _ip_dst, 136, 0, payload)
ipv6 = struct.pack('>BBBBHBB', 0x60, 0, 0, 0, len(icmp), 58, 255)
ipv6 += _ip_src + _ip_dst
return ehdr + ipv6 + icmp
def send_na(dev, src_ll=None, target=None, ip_src=None, ip_dst=None, opt=None,
hapd_bssid=None):
if hapd_bssid:
if src_ll is None:
src_ll = hapd_bssid
cmd = "DATA_TEST_FRAME ifname=ap-br0 "
else:
if src_ll is None:
src_ll = dev.p2p_interface_addr()
cmd = "DATA_TEST_FRAME "
pkt = build_na(src_ll=src_ll, ip_src=ip_src, ip_dst=ip_dst, target=target,
opt=opt)
if "OK" not in dev.request(cmd + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
def build_dhcp_ack(dst_ll, src_ll, ip_src, ip_dst, yiaddr, chaddr,
subnet_mask="255.255.255.0", truncated_opt=False,
wrong_magic=False, force_tot_len=None, no_dhcp=False,
udp_checksum=True):
_dst_ll = binascii.unhexlify(dst_ll.replace(':', ''))
_src_ll = binascii.unhexlify(src_ll.replace(':', ''))
proto = b'\x08\x00'
ehdr = _dst_ll + _src_ll + proto
_ip_src = socket.inet_pton(socket.AF_INET, ip_src)
_ip_dst = socket.inet_pton(socket.AF_INET, ip_dst)
_subnet_mask = socket.inet_pton(socket.AF_INET, subnet_mask)
_ciaddr = b'\x00\x00\x00\x00'
_yiaddr = socket.inet_pton(socket.AF_INET, yiaddr)
_siaddr = b'\x00\x00\x00\x00'
_giaddr = b'\x00\x00\x00\x00'
_chaddr = binascii.unhexlify(chaddr.replace(':', '') + "00000000000000000000")
payload = struct.pack('>BBBBL3BB', 2, 1, 6, 0, 12345, 0, 0, 0, 0)
payload += _ciaddr + _yiaddr + _siaddr + _giaddr + _chaddr + 192*b'\x00'
# magic
if wrong_magic:
payload += b'\x63\x82\x53\x00'
else:
payload += b'\x63\x82\x53\x63'
if truncated_opt:
payload += b'\x22\xff\x00'
# Option: DHCP Message Type = ACK
payload += b'\x35\x01\x05'
# Pad Option
payload += b'\x00'
# Option: Subnet Mask
payload += b'\x01\x04' + _subnet_mask
# Option: Time Offset
payload += struct.pack('>BBL', 2, 4, 0)
# End Option
payload += b'\xff'
# Pad Option
payload += b'\x00\x00\x00\x00'
if no_dhcp:
payload = struct.pack('>BBBBL3BB', 2, 1, 6, 0, 12345, 0, 0, 0, 0)
payload += _ciaddr + _yiaddr + _siaddr + _giaddr + _chaddr + 192*b'\x00'
if udp_checksum:
pseudohdr = _ip_src + _ip_dst + struct.pack('>BBH', 0, 17,
8 + len(payload))
udphdr = struct.pack('>HHHH', 67, 68, 8 + len(payload), 0)
checksum, = struct.unpack('>H', ip_checksum(pseudohdr + udphdr + payload))
else:
checksum = 0
udp = struct.pack('>HHHH', 67, 68, 8 + len(payload), checksum) + payload
if force_tot_len:
tot_len = force_tot_len
else:
tot_len = 20 + len(udp)
start = struct.pack('>BBHHBBBB', 0x45, 0, tot_len, 0, 0, 0, 128, 17)
ipv4 = start + b'\x00\x00' + _ip_src + _ip_dst
csum = ip_checksum(ipv4)
ipv4 = start + csum + _ip_src + _ip_dst
return ehdr + ipv4 + udp
def build_arp(dst_ll, src_ll, opcode, sender_mac, sender_ip,
target_mac, target_ip):
_dst_ll = binascii.unhexlify(dst_ll.replace(':', ''))
_src_ll = binascii.unhexlify(src_ll.replace(':', ''))
proto = b'\x08\x06'
ehdr = _dst_ll + _src_ll + proto
_sender_mac = binascii.unhexlify(sender_mac.replace(':', ''))
_sender_ip = socket.inet_pton(socket.AF_INET, sender_ip)
_target_mac = binascii.unhexlify(target_mac.replace(':', ''))
_target_ip = socket.inet_pton(socket.AF_INET, target_ip)
arp = struct.pack('>HHBBH', 1, 0x0800, 6, 4, opcode)
arp += _sender_mac + _sender_ip
arp += _target_mac + _target_ip
return ehdr + arp
def send_arp(dev, dst_ll="ff:ff:ff:ff:ff:ff", src_ll=None, opcode=1,
sender_mac=None, sender_ip="0.0.0.0",
target_mac="00:00:00:00:00:00", target_ip="0.0.0.0",
hapd_bssid=None):
if hapd_bssid:
if src_ll is None:
src_ll = hapd_bssid
if sender_mac is None:
sender_mac = hapd_bssid
cmd = "DATA_TEST_FRAME ifname=ap-br0 "
else:
if src_ll is None:
src_ll = dev.p2p_interface_addr()
if sender_mac is None:
sender_mac = dev.p2p_interface_addr()
cmd = "DATA_TEST_FRAME "
pkt = build_arp(dst_ll=dst_ll, src_ll=src_ll, opcode=opcode,
sender_mac=sender_mac, sender_ip=sender_ip,
target_mac=target_mac, target_ip=target_ip)
if "OK" not in dev.request(cmd + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
def get_permanent_neighbors(ifname):
cmd = subprocess.Popen(['ip', 'nei'], stdout=subprocess.PIPE)
res = cmd.stdout.read().decode()
cmd.stdout.close()
return [line for line in res.splitlines() if "PERMANENT" in line and ifname in line]
def get_bridge_macs(ifname):
cmd = subprocess.Popen(['brctl', 'showmacs', ifname],
stdout=subprocess.PIPE)
res = cmd.stdout.read()
cmd.stdout.close()
return res.decode()
def tshark_get_arp(cap, filter):
res = run_tshark(cap, filter,
["eth.dst", "eth.src",
"arp.src.hw_mac", "arp.src.proto_ipv4",
"arp.dst.hw_mac", "arp.dst.proto_ipv4"],
wait=False)
frames = []
for l in res.splitlines():
frames.append(l.split('\t'))
return frames
def tshark_get_ns(cap):
res = run_tshark(cap, "icmpv6.type == 135",
["eth.dst", "eth.src",
"ipv6.src", "ipv6.dst",
"icmpv6.nd.ns.target_address",
"icmpv6.opt.linkaddr"],
wait=False)
frames = []
for l in res.splitlines():
frames.append(l.split('\t'))
return frames
def tshark_get_na(cap):
res = run_tshark(cap, "icmpv6.type == 136",
["eth.dst", "eth.src",
"ipv6.src", "ipv6.dst",
"icmpv6.nd.na.target_address",
"icmpv6.opt.linkaddr"],
wait=False)
frames = []
for l in res.splitlines():
frames.append(l.split('\t'))
return frames
def _test_proxyarp_open(dev, apdev, params, ebtables=False):
cap_br = params['prefix'] + ".ap-br0.pcap"
cap_dev0 = params['prefix'] + ".%s.pcap" % dev[0].ifname
cap_dev1 = params['prefix'] + ".%s.pcap" % dev[1].ifname
cap_dev2 = params['prefix'] + ".%s.pcap" % dev[2].ifname
bssid = apdev[0]['bssid']
params = {'ssid': 'open'}
params['proxy_arp'] = '1'
hapd = hostapd.add_ap(apdev[0], params, no_enable=True)
hapd.set("ap_isolate", "1")
hapd.set('bridge', 'ap-br0')
hapd.dump_monitor()
try:
hapd.enable()
except:
# For now, do not report failures due to missing kernel support
raise HwsimSkip("Could not start hostapd - assume proxyarp not supported in kernel version")
ev = hapd.wait_event(["AP-ENABLED", "AP-DISABLED"], timeout=10)
if ev is None:
raise Exception("AP startup timed out")
if "AP-ENABLED" not in ev:
raise Exception("AP startup failed")
params2 = {'ssid': 'another'}
hapd2 = hostapd.add_ap(apdev[1], params2, no_enable=True)
hapd2.set('bridge', 'ap-br0')
hapd2.enable()
subprocess.call(['brctl', 'setfd', 'ap-br0', '0'])
subprocess.call(['ip', 'link', 'set', 'dev', 'ap-br0', 'up'])
if ebtables:
for chain in ['FORWARD', 'OUTPUT']:
try:
err = subprocess.call(['ebtables', '-A', chain, '-p', 'ARP',
'-d', 'Broadcast',
'-o', apdev[0]['ifname'],
'-j', 'DROP'])
if err != 0:
raise
except:
raise HwsimSkip("No ebtables available")
time.sleep(0.5)
cmd = {}
cmd[0] = WlantestCapture('ap-br0', cap_br)
cmd[1] = WlantestCapture(dev[0].ifname, cap_dev0)
cmd[2] = WlantestCapture(dev[1].ifname, cap_dev1)
cmd[3] = WlantestCapture(dev[2].ifname, cap_dev2)
dev[0].connect("open", key_mgmt="NONE", scan_freq="2412")
dev[1].connect("open", key_mgmt="NONE", scan_freq="2412")
dev[2].connect("another", key_mgmt="NONE", scan_freq="2412")
time.sleep(1.1)
brcmd = subprocess.Popen(['brctl', 'show'], stdout=subprocess.PIPE)
res = brcmd.stdout.read().decode()
brcmd.stdout.close()
logger.info("Bridge setup: " + res)
brcmd = subprocess.Popen(['brctl', 'showstp', 'ap-br0'],
stdout=subprocess.PIPE)
res = brcmd.stdout.read().decode()
brcmd.stdout.close()
logger.info("Bridge showstp: " + res)
addr0 = dev[0].p2p_interface_addr()
addr1 = dev[1].p2p_interface_addr()
addr2 = dev[2].p2p_interface_addr()
pkt = build_dhcp_ack(dst_ll="ff:ff:ff:ff:ff:ff", src_ll=bssid,
ip_src="192.168.1.1", ip_dst="255.255.255.255",
yiaddr="192.168.1.124", chaddr=addr0)
if "OK" not in hapd.request("DATA_TEST_FRAME ifname=ap-br0 " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
# Change address and verify unicast
pkt = build_dhcp_ack(dst_ll=addr0, src_ll=bssid,
ip_src="192.168.1.1", ip_dst="255.255.255.255",
yiaddr="192.168.1.123", chaddr=addr0,
udp_checksum=False)
if "OK" not in hapd.request("DATA_TEST_FRAME ifname=ap-br0 " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
# Not-associated client MAC address
pkt = build_dhcp_ack(dst_ll="ff:ff:ff:ff:ff:ff", src_ll=bssid,
ip_src="192.168.1.1", ip_dst="255.255.255.255",
yiaddr="192.168.1.125", chaddr="22:33:44:55:66:77")
if "OK" not in hapd.request("DATA_TEST_FRAME ifname=ap-br0 " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
# No IP address
pkt = build_dhcp_ack(dst_ll=addr1, src_ll=bssid,
ip_src="192.168.1.1", ip_dst="255.255.255.255",
yiaddr="0.0.0.0", chaddr=addr1)
if "OK" not in hapd.request("DATA_TEST_FRAME ifname=ap-br0 " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
# Zero subnet mask
pkt = build_dhcp_ack(dst_ll=addr1, src_ll=bssid,
ip_src="192.168.1.1", ip_dst="255.255.255.255",
yiaddr="192.168.1.126", chaddr=addr1,
subnet_mask="0.0.0.0")
if "OK" not in hapd.request("DATA_TEST_FRAME ifname=ap-br0 " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
# Truncated option
pkt = build_dhcp_ack(dst_ll=addr1, src_ll=bssid,
ip_src="192.168.1.1", ip_dst="255.255.255.255",
yiaddr="192.168.1.127", chaddr=addr1,
truncated_opt=True)
if "OK" not in hapd.request("DATA_TEST_FRAME ifname=ap-br0 " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
# Wrong magic
pkt = build_dhcp_ack(dst_ll=addr1, src_ll=bssid,
ip_src="192.168.1.1", ip_dst="255.255.255.255",
yiaddr="192.168.1.128", chaddr=addr1,
wrong_magic=True)
if "OK" not in hapd.request("DATA_TEST_FRAME ifname=ap-br0 " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
# Wrong IPv4 total length
pkt = build_dhcp_ack(dst_ll=addr1, src_ll=bssid,
ip_src="192.168.1.1", ip_dst="255.255.255.255",
yiaddr="192.168.1.129", chaddr=addr1,
force_tot_len=1000)
if "OK" not in hapd.request("DATA_TEST_FRAME ifname=ap-br0 " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
# BOOTP
pkt = build_dhcp_ack(dst_ll=addr1, src_ll=bssid,
ip_src="192.168.1.1", ip_dst="255.255.255.255",
yiaddr="192.168.1.129", chaddr=addr1,
no_dhcp=True)
if "OK" not in hapd.request("DATA_TEST_FRAME ifname=ap-br0 " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
macs = get_bridge_macs("ap-br0")
logger.info("After connect (showmacs): " + str(macs))
matches = get_permanent_neighbors("ap-br0")
logger.info("After connect: " + str(matches))
if len(matches) != 1:
raise Exception("Unexpected number of neighbor entries after connect")
if '192.168.1.123 dev ap-br0 lladdr 02:00:00:00:00:00 PERMANENT' not in matches:
raise Exception("dev0 IPv4 addr missing")
targets = ["192.168.1.123", "192.168.1.124", "192.168.1.125",
"192.168.1.126"]
for target in targets:
send_arp(dev[1], sender_ip="192.168.1.100", target_ip=target)
for target in targets:
send_arp(hapd, hapd_bssid=bssid, sender_ip="192.168.1.101",
target_ip=target)
for target in targets:
send_arp(dev[2], sender_ip="192.168.1.103", target_ip=target)
# ARP Probe from wireless STA
send_arp(dev[1], target_ip="192.168.1.127")
# ARP Announcement from wireless STA
send_arp(dev[1], sender_ip="192.168.1.127", target_ip="192.168.1.127")
send_arp(dev[1], sender_ip="192.168.1.127", target_ip="192.168.1.127",
opcode=2)
macs = get_bridge_macs("ap-br0")
logger.info("After ARP Probe + Announcement (showmacs): " + str(macs))
matches = get_permanent_neighbors("ap-br0")
logger.info("After ARP Probe + Announcement: " + str(matches))
# ARP Request for the newly introduced IP address from wireless STA
send_arp(dev[0], sender_ip="192.168.1.123", target_ip="192.168.1.127")
# ARP Request for the newly introduced IP address from bridge
send_arp(hapd, hapd_bssid=bssid, sender_ip="192.168.1.102",
target_ip="192.168.1.127")
send_arp(dev[2], sender_ip="192.168.1.103", target_ip="192.168.1.127")
# ARP Probe from bridge
send_arp(hapd, hapd_bssid=bssid, target_ip="192.168.1.130")
send_arp(dev[2], target_ip="192.168.1.131")
# ARP Announcement from bridge (not to be learned by AP for proxyarp)
send_arp(hapd, hapd_bssid=bssid, sender_ip="192.168.1.130",
target_ip="192.168.1.130")
send_arp(hapd, hapd_bssid=bssid, sender_ip="192.168.1.130",
target_ip="192.168.1.130", opcode=2)
send_arp(dev[2], sender_ip="192.168.1.131", target_ip="192.168.1.131")
send_arp(dev[2], sender_ip="192.168.1.131", target_ip="192.168.1.131",
opcode=2)
macs = get_bridge_macs("ap-br0")
logger.info("After ARP Probe + Announcement (showmacs): " + str(macs))
matches = get_permanent_neighbors("ap-br0")
logger.info("After ARP Probe + Announcement: " + str(matches))
# ARP Request for the newly introduced IP address from wireless STA
send_arp(dev[0], sender_ip="192.168.1.123", target_ip="192.168.1.130")
# ARP Response from bridge (AP does not proxy for non-wireless devices)
send_arp(hapd, hapd_bssid=bssid, dst_ll=addr0, sender_ip="192.168.1.130",
target_ip="192.168.1.123", opcode=2)
# ARP Request for the newly introduced IP address from wireless STA
send_arp(dev[0], sender_ip="192.168.1.123", target_ip="192.168.1.131")
# ARP Response from bridge (AP does not proxy for non-wireless devices)
send_arp(dev[2], dst_ll=addr0, sender_ip="192.168.1.131",
target_ip="192.168.1.123", opcode=2)
# ARP Request for the newly introduced IP address from bridge
send_arp(hapd, hapd_bssid=bssid, sender_ip="192.168.1.102",
target_ip="192.168.1.130")
send_arp(dev[2], sender_ip="192.168.1.104", target_ip="192.168.1.131")
# ARP Probe from wireless STA (duplicate address; learned through DHCP)
send_arp(dev[1], target_ip="192.168.1.123")
# ARP Probe from wireless STA (duplicate address; learned through ARP)
send_arp(dev[0], target_ip="192.168.1.127")
# Gratuitous ARP Reply for another STA's IP address
send_arp(dev[0], opcode=2, sender_mac=addr0, sender_ip="192.168.1.127",
target_mac=addr1, target_ip="192.168.1.127")
send_arp(dev[1], opcode=2, sender_mac=addr1, sender_ip="192.168.1.123",
target_mac=addr0, target_ip="192.168.1.123")
# ARP Request to verify previous mapping
send_arp(dev[1], sender_ip="192.168.1.127", target_ip="192.168.1.123")
send_arp(dev[0], sender_ip="192.168.1.123", target_ip="192.168.1.127")
try:
hwsim_utils.test_connectivity_iface(dev[0], hapd, "ap-br0")
except Exception as e:
logger.info("test_connectibity_iface failed: " + str(e))
raise HwsimSkip("Assume kernel did not have the required patches for proxyarp")
hwsim_utils.test_connectivity_iface(dev[1], hapd, "ap-br0")
hwsim_utils.test_connectivity(dev[0], dev[1])
dev[0].request("DISCONNECT")
dev[1].request("DISCONNECT")
time.sleep(1.5)
for i in range(len(cmd)):
cmd[i].close()
time.sleep(0.1)
macs = get_bridge_macs("ap-br0")
logger.info("After disconnect (showmacs): " + str(macs))
matches = get_permanent_neighbors("ap-br0")
logger.info("After disconnect: " + str(matches))
if len(matches) > 0:
raise Exception("Unexpected neighbor entries after disconnect")
if ebtables:
cmd = subprocess.Popen(['ebtables', '-L', '--Lc'],
stdout=subprocess.PIPE)
res = cmd.stdout.read().decode()
cmd.stdout.close()
logger.info("ebtables results:\n" + res)
# Verify that expected ARP messages were seen and no unexpected
# ARP messages were seen.
arp_req = tshark_get_arp(cap_dev0, "arp.opcode == 1")
arp_reply = tshark_get_arp(cap_dev0, "arp.opcode == 2")
logger.info("dev0 seen ARP requests:\n" + str(arp_req))
logger.info("dev0 seen ARP replies:\n" + str(arp_reply))
if ['ff:ff:ff:ff:ff:ff', addr1,
addr1, '192.168.1.100',
'00:00:00:00:00:00', '192.168.1.123'] in arp_req:
raise Exception("dev0 saw ARP request from dev1")
if ['ff:ff:ff:ff:ff:ff', addr2,
addr2, '192.168.1.103',
'00:00:00:00:00:00', '192.168.1.123'] in arp_req:
raise Exception("dev0 saw ARP request from dev2")
# TODO: Uncomment once fixed in kernel
#if ['ff:ff:ff:ff:ff:ff', bssid,
# bssid, '192.168.1.101',
# '00:00:00:00:00:00', '192.168.1.123'] in arp_req:
# raise Exception("dev0 saw ARP request from br")
if ebtables:
for req in arp_req:
if req[1] != addr0:
raise Exception("Unexpected foreign ARP request on dev0")
arp_req = tshark_get_arp(cap_dev1, "arp.opcode == 1")
arp_reply = tshark_get_arp(cap_dev1, "arp.opcode == 2")
logger.info("dev1 seen ARP requests:\n" + str(arp_req))
logger.info("dev1 seen ARP replies:\n" + str(arp_reply))
if ['ff:ff:ff:ff:ff:ff', addr2,
addr2, '192.168.1.103',
'00:00:00:00:00:00', '192.168.1.123'] in arp_req:
raise Exception("dev1 saw ARP request from dev2")
if [addr1, addr0, addr0, '192.168.1.123', addr1, '192.168.1.100'] not in arp_reply:
raise Exception("dev1 did not get ARP response for 192.168.1.123")
if ebtables:
for req in arp_req:
if req[1] != addr1:
raise Exception("Unexpected foreign ARP request on dev1")
arp_req = tshark_get_arp(cap_dev2, "arp.opcode == 1")
arp_reply = tshark_get_arp(cap_dev2, "arp.opcode == 2")
logger.info("dev2 seen ARP requests:\n" + str(arp_req))
logger.info("dev2 seen ARP replies:\n" + str(arp_reply))
if [addr2, addr0,
addr0, '192.168.1.123',
addr2, '192.168.1.103'] not in arp_reply:
raise Exception("dev2 did not get ARP response for 192.168.1.123")
arp_req = tshark_get_arp(cap_br, "arp.opcode == 1")
arp_reply = tshark_get_arp(cap_br, "arp.opcode == 2")
logger.info("br seen ARP requests:\n" + str(arp_req))
logger.info("br seen ARP replies:\n" + str(arp_reply))
# TODO: Uncomment once fixed in kernel
#if [bssid, addr0,
# addr0, '192.168.1.123',
# bssid, '192.168.1.101'] not in arp_reply:
# raise Exception("br did not get ARP response for 192.168.1.123")
def _test_proxyarp_open_ipv6(dev, apdev, params, ebtables=False):
cap_br = params['prefix'] + ".ap-br0.pcap"
cap_dev0 = params['prefix'] + ".%s.pcap" % dev[0].ifname
cap_dev1 = params['prefix'] + ".%s.pcap" % dev[1].ifname
cap_dev2 = params['prefix'] + ".%s.pcap" % dev[2].ifname
bssid = apdev[0]['bssid']
params = {'ssid': 'open'}
params['proxy_arp'] = '1'
hapd = hostapd.add_ap(apdev[0], params, no_enable=True)
hapd.set("ap_isolate", "1")
hapd.set('bridge', 'ap-br0')
hapd.dump_monitor()
try:
hapd.enable()
except:
# For now, do not report failures due to missing kernel support
raise HwsimSkip("Could not start hostapd - assume proxyarp not supported in kernel version")
ev = hapd.wait_event(["AP-ENABLED", "AP-DISABLED"], timeout=10)
if ev is None:
raise Exception("AP startup timed out")
if "AP-ENABLED" not in ev:
raise Exception("AP startup failed")
params2 = {'ssid': 'another'}
hapd2 = hostapd.add_ap(apdev[1], params2, no_enable=True)
hapd2.set('bridge', 'ap-br0')
hapd2.enable()
subprocess.call(['brctl', 'setfd', 'ap-br0', '0'])
subprocess.call(['ip', 'link', 'set', 'dev', 'ap-br0', 'up'])
if ebtables:
for chain in ['FORWARD', 'OUTPUT']:
try:
err = subprocess.call(['ebtables', '-A', chain,
'-d', 'Multicast',
'-p', 'IPv6',
'--ip6-protocol', 'ipv6-icmp',
'--ip6-icmp-type',
'neighbor-solicitation',
'-o', apdev[0]['ifname'], '-j', 'DROP'])
if err != 0:
raise
subprocess.call(['ebtables', '-A', chain, '-d', 'Multicast',
'-p', 'IPv6', '--ip6-protocol', 'ipv6-icmp',
'--ip6-icmp-type', 'neighbor-advertisement',
'-o', apdev[0]['ifname'], '-j', 'DROP'])
subprocess.call(['ebtables', '-A', chain,
'-p', 'IPv6', '--ip6-protocol', 'ipv6-icmp',
'--ip6-icmp-type', 'router-solicitation',
'-o', apdev[0]['ifname'], '-j', 'DROP'])
# Multicast Listener Report Message
subprocess.call(['ebtables', '-A', chain, '-d', 'Multicast',
'-p', 'IPv6', '--ip6-protocol', 'ipv6-icmp',
'--ip6-icmp-type', '143',
'-o', apdev[0]['ifname'], '-j', 'DROP'])
except:
raise HwsimSkip("No ebtables available")
time.sleep(0.5)
cmd = {}
cmd[0] = WlantestCapture('ap-br0', cap_br)
cmd[1] = WlantestCapture(dev[0].ifname, cap_dev0)
cmd[2] = WlantestCapture(dev[1].ifname, cap_dev1)
cmd[3] = WlantestCapture(dev[2].ifname, cap_dev2)
dev[0].connect("open", key_mgmt="NONE", scan_freq="2412")
dev[1].connect("open", key_mgmt="NONE", scan_freq="2412")
dev[2].connect("another", key_mgmt="NONE", scan_freq="2412")
time.sleep(0.1)
brcmd = subprocess.Popen(['brctl', 'show'], stdout=subprocess.PIPE)
res = brcmd.stdout.read().decode()
brcmd.stdout.close()
logger.info("Bridge setup: " + res)
brcmd = subprocess.Popen(['brctl', 'showstp', 'ap-br0'],
stdout=subprocess.PIPE)
res = brcmd.stdout.read().decode()
brcmd.stdout.close()
logger.info("Bridge showstp: " + res)
addr0 = dev[0].p2p_interface_addr()
addr1 = dev[1].p2p_interface_addr()
addr2 = dev[2].p2p_interface_addr()
src_ll_opt0 = b"\x01\x01" + binascii.unhexlify(addr0.replace(':', ''))
src_ll_opt1 = b"\x01\x01" + binascii.unhexlify(addr1.replace(':', ''))
# DAD NS
send_ns(dev[0], ip_src="::", target="aaaa:bbbb:cccc::2")
send_ns(dev[0], ip_src="aaaa:bbbb:cccc::2", target="aaaa:bbbb:cccc::2")
# test frame without source link-layer address option
send_ns(dev[0], ip_src="aaaa:bbbb:cccc::2", target="aaaa:bbbb:cccc::2",
opt='')
# test frame with bogus option
send_ns(dev[0], ip_src="aaaa:bbbb:cccc::2", target="aaaa:bbbb:cccc::2",
opt=b"\x70\x01\x01\x02\x03\x04\x05\x05")
# test frame with truncated source link-layer address option
send_ns(dev[0], ip_src="aaaa:bbbb:cccc::2", target="aaaa:bbbb:cccc::2",
opt=b"\x01\x01\x01\x02\x03\x04")
# test frame with foreign source link-layer address option
send_ns(dev[0], ip_src="aaaa:bbbb:cccc::2", target="aaaa:bbbb:cccc::2",
opt=b"\x01\x01\x01\x02\x03\x04\x05\x06")
send_ns(dev[1], ip_src="aaaa:bbbb:dddd::2", target="aaaa:bbbb:dddd::2")
send_ns(dev[1], ip_src="aaaa:bbbb:eeee::2", target="aaaa:bbbb:eeee::2")
# another copy for additional code coverage
send_ns(dev[1], ip_src="aaaa:bbbb:eeee::2", target="aaaa:bbbb:eeee::2")
macs = get_bridge_macs("ap-br0")
logger.info("After connect (showmacs): " + str(macs))
matches = get_permanent_neighbors("ap-br0")
logger.info("After connect: " + str(matches))
if len(matches) != 3:
raise Exception("Unexpected number of neighbor entries after connect")
if 'aaaa:bbbb:cccc::2 dev ap-br0 lladdr 02:00:00:00:00:00 PERMANENT' not in matches:
raise Exception("dev0 addr missing")
if 'aaaa:bbbb:dddd::2 dev ap-br0 lladdr 02:00:00:00:01:00 PERMANENT' not in matches:
raise Exception("dev1 addr(1) missing")
if 'aaaa:bbbb:eeee::2 dev ap-br0 lladdr 02:00:00:00:01:00 PERMANENT' not in matches:
raise Exception("dev1 addr(2) missing")
send_ns(dev[0], target="aaaa:bbbb:dddd::2", ip_src="aaaa:bbbb:cccc::2")
time.sleep(0.1)
send_ns(dev[1], target="aaaa:bbbb:cccc::2", ip_src="aaaa:bbbb:dddd::2")
time.sleep(0.1)
send_ns(hapd, hapd_bssid=bssid, target="aaaa:bbbb:dddd::2",
ip_src="aaaa:bbbb:ffff::2")
time.sleep(0.1)
send_ns(dev[2], target="aaaa:bbbb:cccc::2", ip_src="aaaa:bbbb:ff00::2")
time.sleep(0.1)
send_ns(dev[2], target="aaaa:bbbb:dddd::2", ip_src="aaaa:bbbb:ff00::2")
time.sleep(0.1)
send_ns(dev[2], target="aaaa:bbbb:eeee::2", ip_src="aaaa:bbbb:ff00::2")
time.sleep(0.1)
# Try to probe for an already assigned address
send_ns(dev[1], target="aaaa:bbbb:cccc::2", ip_src="::")
time.sleep(0.1)
send_ns(hapd, hapd_bssid=bssid, target="aaaa:bbbb:cccc::2", ip_src="::")
time.sleep(0.1)
send_ns(dev[2], target="aaaa:bbbb:cccc::2", ip_src="::")
time.sleep(0.1)
# Unsolicited NA
send_na(dev[1], target="aaaa:bbbb:cccc:aeae::3",
ip_src="aaaa:bbbb:cccc:aeae::3", ip_dst="ff02::1")
send_na(hapd, hapd_bssid=bssid, target="aaaa:bbbb:cccc:aeae::4",
ip_src="aaaa:bbbb:cccc:aeae::4", ip_dst="ff02::1")
send_na(dev[2], target="aaaa:bbbb:cccc:aeae::5",
ip_src="aaaa:bbbb:cccc:aeae::5", ip_dst="ff02::1")
try:
hwsim_utils.test_connectivity_iface(dev[0], hapd, "ap-br0")
except Exception as e:
logger.info("test_connectibity_iface failed: " + str(e))
raise HwsimSkip("Assume kernel did not have the required patches for proxyarp")
hwsim_utils.test_connectivity_iface(dev[1], hapd, "ap-br0")
hwsim_utils.test_connectivity(dev[0], dev[1])
dev[0].request("DISCONNECT")
dev[1].request("DISCONNECT")
time.sleep(0.5)
for i in range(len(cmd)):
cmd[i].close()
macs = get_bridge_macs("ap-br0")
logger.info("After disconnect (showmacs): " + str(macs))
matches = get_permanent_neighbors("ap-br0")
logger.info("After disconnect: " + str(matches))
if len(matches) > 0:
raise Exception("Unexpected neighbor entries after disconnect")
if ebtables:
cmd = subprocess.Popen(['ebtables', '-L', '--Lc'],
stdout=subprocess.PIPE)
res = cmd.stdout.read().decode()
cmd.stdout.close()
logger.info("ebtables results:\n" + res)
ns = tshark_get_ns(cap_dev0)
logger.info("dev0 seen NS: " + str(ns))
na = tshark_get_na(cap_dev0)
logger.info("dev0 seen NA: " + str(na))
if [addr0, addr1, 'aaaa:bbbb:dddd::2', 'aaaa:bbbb:cccc::2',
'aaaa:bbbb:dddd::2', addr1] not in na:
# For now, skip the test instead of reporting the error since the IPv6
# proxyarp support is not yet in the upstream kernel tree.
#raise Exception("dev0 did not get NA for aaaa:bbbb:dddd::2")
raise HwsimSkip("Assume kernel did not have the required patches for proxyarp (IPv6)")
if ebtables:
for req in ns:
if req[1] == bssid and req[0] == "33:33:ff:" + bssid[9:] and \
req[3] == 'ff02::1:ff00:300' and req[4] == 'fe80::ff:fe00:300':
# At least for now, ignore this special case until the kernel
# can be prevented from sending it out.
logger.info("dev0: Ignore NS from AP to own local addr: " + str(req))
elif req[1] != addr0:
raise Exception("Unexpected foreign NS on dev0: " + str(req))
ns = tshark_get_ns(cap_dev1)
logger.info("dev1 seen NS: " + str(ns))
na = tshark_get_na(cap_dev1)
logger.info("dev1 seen NA: " + str(na))
if [addr1, addr0, 'aaaa:bbbb:cccc::2', 'aaaa:bbbb:dddd::2',
'aaaa:bbbb:cccc::2', addr0] not in na:
raise Exception("dev1 did not get NA for aaaa:bbbb:cccc::2")
if ebtables:
for req in ns:
if req[1] == bssid and req[0] == "33:33:ff:" + bssid[9:] and \
req[3] == 'ff02::1:ff00:300' and req[4] == 'fe80::ff:fe00:300':
# At least for now, ignore this special case until the kernel
# can be prevented from sending it out.
logger.info("dev1: Ignore NS from AP to own local addr: " + str(req))
elif req[1] != addr1:
raise Exception("Unexpected foreign NS on dev1: " + str(req))
ns = tshark_get_ns(cap_dev2)
logger.info("dev2 seen NS: " + str(ns))
na = tshark_get_na(cap_dev2)
logger.info("dev2 seen NA: " + str(na))
# FIX: enable once kernel implementation for proxyarp IPv6 is fixed
#if [addr2, addr0, 'aaaa:bbbb:cccc::2', 'aaaa:bbbb:ff00::2',
# 'aaaa:bbbb:cccc::2', addr0] not in na:
# raise Exception("dev2 did not get NA for aaaa:bbbb:cccc::2")
#if [addr2, addr1, 'aaaa:bbbb:dddd::2', 'aaaa:bbbb:ff00::2',
# 'aaaa:bbbb:dddd::2', addr1] not in na:
# raise Exception("dev2 did not get NA for aaaa:bbbb:dddd::2")
#if [addr2, addr1, 'aaaa:bbbb:eeee::2', 'aaaa:bbbb:ff00::2',
# 'aaaa:bbbb:eeee::2', addr1] not in na:
# raise Exception("dev2 did not get NA for aaaa:bbbb:eeee::2")
def test_proxyarp_open(dev, apdev, params):
"""ProxyARP with open network"""
try:
_test_proxyarp_open(dev, apdev, params)
finally:
subprocess.call(['ip', 'link', 'set', 'dev', 'ap-br0', 'down'],
stderr=open('/dev/null', 'w'))
subprocess.call(['brctl', 'delbr', 'ap-br0'],
stderr=open('/dev/null', 'w'))
def test_proxyarp_open_ipv6(dev, apdev, params):
"""ProxyARP with open network (IPv6)"""
try:
_test_proxyarp_open_ipv6(dev, apdev, params)
finally:
subprocess.call(['ip', 'link', 'set', 'dev', 'ap-br0', 'down'],
stderr=open('/dev/null', 'w'))
subprocess.call(['brctl', 'delbr', 'ap-br0'],
stderr=open('/dev/null', 'w'))
def test_proxyarp_open_ebtables(dev, apdev, params):
"""ProxyARP with open network"""
try:
_test_proxyarp_open(dev, apdev, params, ebtables=True)
finally:
try:
subprocess.call(['ebtables', '-F', 'FORWARD'])
subprocess.call(['ebtables', '-F', 'OUTPUT'])
except:
pass
subprocess.call(['ip', 'link', 'set', 'dev', 'ap-br0', 'down'],
stderr=open('/dev/null', 'w'))
subprocess.call(['brctl', 'delbr', 'ap-br0'],
stderr=open('/dev/null', 'w'))
def test_proxyarp_open_ebtables_ipv6(dev, apdev, params):
"""ProxyARP with open network (IPv6)"""
try:
_test_proxyarp_open_ipv6(dev, apdev, params, ebtables=True)
finally:
try:
subprocess.call(['ebtables', '-F', 'FORWARD'])
subprocess.call(['ebtables', '-F', 'OUTPUT'])
except:
pass
subprocess.call(['ip', 'link', 'set', 'dev', 'ap-br0', 'down'],
stderr=open('/dev/null', 'w'))
subprocess.call(['brctl', 'delbr', 'ap-br0'],
stderr=open('/dev/null', 'w'))
def test_proxyarp_errors(dev, apdev, params):
"""ProxyARP error cases"""
try:
run_proxyarp_errors(dev, apdev, params)
finally:
subprocess.call(['ip', 'link', 'set', 'dev', 'ap-br0', 'down'],
stderr=open('/dev/null', 'w'))
subprocess.call(['brctl', 'delbr', 'ap-br0'],
stderr=open('/dev/null', 'w'))
def run_proxyarp_errors(dev, apdev, params):
params = {'ssid': 'open',
'proxy_arp': '1',
'ap_isolate': '1',
'bridge': 'ap-br0',
'disable_dgaf': '1'}
hapd = hostapd.add_ap(apdev[0], params, no_enable=True)
try:
hapd.enable()
except:
# For now, do not report failures due to missing kernel support
raise HwsimSkip("Could not start hostapd - assume proxyarp not supported in kernel version")
ev = hapd.wait_event(["AP-ENABLED", "AP-DISABLED"], timeout=10)
if ev is None:
raise Exception("AP startup timed out")
if "AP-ENABLED" not in ev:
raise Exception("AP startup failed")
hapd.disable()
with alloc_fail(hapd, 1, "l2_packet_init;x_snoop_get_l2_packet;dhcp_snoop_init"):
if "FAIL" not in hapd.request("ENABLE"):
raise Exception("ENABLE accepted unexpectedly")
with alloc_fail(hapd, 1, "l2_packet_init;x_snoop_get_l2_packet;ndisc_snoop_init"):
if "FAIL" not in hapd.request("ENABLE"):
raise Exception("ENABLE accepted unexpectedly")
with fail_test(hapd, 1, "l2_packet_set_packet_filter;x_snoop_get_l2_packet;ndisc_snoop_init"):
if "FAIL" not in hapd.request("ENABLE"):
raise Exception("ENABLE accepted unexpectedly")
with fail_test(hapd, 1, "l2_packet_set_packet_filter;x_snoop_get_l2_packet;dhcp_snoop_init"):
if "FAIL" not in hapd.request("ENABLE"):
raise Exception("ENABLE accepted unexpectedly")
hapd.enable()
subprocess.call(['brctl', 'setfd', 'ap-br0', '0'])
subprocess.call(['ip', 'link', 'set', 'dev', 'ap-br0', 'up'])
dev[0].connect("open", key_mgmt="NONE", scan_freq="2412")
addr0 = dev[0].own_addr()
pkt = build_ra(src_ll=apdev[0]['bssid'], ip_src="aaaa:bbbb:cccc::33",
ip_dst="ff01::1")
with fail_test(hapd, 1, "x_snoop_mcast_to_ucast_convert_send"):
if "OK" not in hapd.request("DATA_TEST_FRAME ifname=ap-br0 " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
wait_fail_trigger(dev[0], "GET_FAIL")
with alloc_fail(hapd, 1, "sta_ip6addr_add"):
src_ll_opt0 = b"\x01\x01" + binascii.unhexlify(addr0.replace(':', ''))
pkt = build_ns(src_ll=addr0, ip_src="aaaa:bbbb:cccc::2",
ip_dst="ff02::1:ff00:2", target="aaaa:bbbb:cccc::2",
opt=src_ll_opt0)
if "OK" not in dev[0].request("DATA_TEST_FRAME " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
def test_ap_hs20_connect_deinit(dev, apdev):
"""Hotspot 2.0 connection interrupted with deinit"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hapd = hostapd.add_ap(apdev[0], params)
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5", drv_params="")
wpas.hs20_enable()
wpas.flush_scan_cache()
wpas.add_cred_values({'realm': "example.com",
'username': "hs20-test",
'password': "password",
'ca_cert': "auth_serv/ca.pem",
'domain': "example.com"})
wpas.scan_for_bss(bssid, freq=2412)
hapd.disable()
wpas.request("INTERWORKING_SELECT freq=2412")
id = wpas.request("RADIO_WORK add block-work")
ev = wpas.wait_event(["GAS-QUERY-START", "EXT-RADIO-WORK-START"], timeout=5)
if ev is None:
raise Exception("Timeout while waiting radio work to start")
ev = wpas.wait_event(["GAS-QUERY-START", "EXT-RADIO-WORK-START"], timeout=5)
if ev is None:
raise Exception("Timeout while waiting radio work to start (2)")
# Remove the interface while the gas-query radio work is still pending and
# GAS query has not yet been started.
wpas.interface_remove("wlan5")
def test_ap_hs20_anqp_format_errors(dev, apdev):
"""Interworking network selection and ANQP format errors"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hapd = hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
values = {'realm': "example.com",
'ca_cert': "auth_serv/ca.pem",
'username': "hs20-test",
'password': "password",
'domain': "example.com"}
id = dev[0].add_cred_values(values)
dev[0].scan_for_bss(bssid, freq="2412")
tests = ["00", "ffff", "010011223344", "020008000005112233445500",
"01000400000000", "01000000000000",
"01000300000200", "0100040000ff0000", "01000300000100",
"01000300000001",
"01000600000056112233",
"01000900000002050001000111",
"01000600000001000000", "01000600000001ff0000",
"01000600000001020001",
"010008000000010400010001", "0100080000000104000100ff",
"010011000000010d00050200020100030005000600",
"0000"]
for t in tests:
hapd.set("anqp_elem", "263:" + t)
dev[0].request("INTERWORKING_SELECT freq=2412")
ev = dev[0].wait_event(["INTERWORKING-NO-MATCH"], timeout=5)
if ev is None:
raise Exception("Network selection timed out")
dev[0].dump_monitor()
dev[0].remove_cred(id)
id = dev[0].add_cred_values({'imsi': "555444-333222111", 'eap': "AKA",
'milenage': "5122250214c33e723a5dd523fc145fc0:981d464c7c52eb6e5036234984ad0bcf:000000000123"})
tests = ["00", "0100", "0001", "00ff", "000200ff", "0003000101",
"00020100"]
for t in tests:
hapd.set("anqp_elem", "264:" + t)
dev[0].request("INTERWORKING_SELECT freq=2412")
ev = dev[0].wait_event(["INTERWORKING-NO-MATCH"], timeout=5)
if ev is None:
raise Exception("Network selection timed out")
dev[0].dump_monitor()
def test_ap_hs20_cred_with_nai_realm(dev, apdev):
"""Hotspot 2.0 network selection and cred_with_nai_realm cred->realm"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com",
'username': "test",
'password': "secret",
'domain': "example.com",
'eap': 'TTLS'})
interworking_select(dev[0], bssid, "home", freq=2412)
dev[0].remove_cred(id)
id = dev[0].add_cred_values({'realm': "foo.com",
'username': "test",
'password': "secret",
'domain': "example.com",
'roaming_consortium': "112234",
'eap': 'TTLS'})
interworking_select(dev[0], bssid, "home", freq=2412, no_match=True)
dev[0].remove_cred(id)
def test_ap_hs20_cred_and_no_roaming_consortium(dev, apdev):
"""Hotspot 2.0 network selection and no roaming consortium"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
del params['roaming_consortium']
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com",
'username': "test",
'password': "secret",
'domain': "example.com",
'roaming_consortium': "112234",
'eap': 'TTLS'})
interworking_select(dev[0], bssid, "home", freq=2412)
def test_ap_hs20_interworking_oom(dev, apdev):
"""Hotspot 2.0 network selection and OOM"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['nai_realm'] = ["0,no.match.here;example.com;no.match.here.either,21[2:1][5:7]",
"0,example.com,13[5:6],21[2:4][5:7]",
"0,another.example.com"]
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com",
'username': "test",
'password': "secret",
'domain': "example.com",
'eap': 'TTLS'})
dev[0].scan_for_bss(bssid, freq="2412")
funcs = ["wpabuf_alloc;interworking_anqp_send_req",
"anqp_build_req;interworking_anqp_send_req",
"gas_query_req;interworking_anqp_send_req",
"dup_binstr;nai_realm_parse_realm",
"=nai_realm_parse_realm",
"=nai_realm_parse",
"=nai_realm_match"]
for func in funcs:
with alloc_fail(dev[0], 1, func):
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["Starting ANQP"], timeout=5)
if ev is None:
raise Exception("ANQP did not start")
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].dump_monitor()
def test_ap_hs20_no_cred_connect(dev, apdev):
"""Hotspot 2.0 and connect attempt without credential"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hapd = hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].scan_for_bss(bssid, freq="2412")
if "FAIL" not in dev[0].request("INTERWORKING_CONNECT " + bssid):
raise Exception("Unexpected INTERWORKING_CONNECT success")
def test_ap_hs20_no_rsn_connect(dev, apdev):
"""Hotspot 2.0 and connect attempt without RSN"""
bssid = apdev[0]['bssid']
params = hostapd.wpa_params(ssid="test-hs20")
params['wpa_key_mgmt'] = "WPA-EAP"
params['ieee80211w'] = "1"
params['ieee8021x'] = "1"
params['auth_server_addr'] = "127.0.0.1"
params['auth_server_port'] = "1812"
params['auth_server_shared_secret'] = "radius"
params['interworking'] = "1"
params['roaming_consortium'] = ["112233", "1020304050", "010203040506",
"fedcba"]
params['nai_realm'] = ["0,example.com,13[5:6],21[2:4][5:7]",
"0,another.example.com"]
hapd = hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].scan_for_bss(bssid, freq="2412")
id = dev[0].add_cred_values({'realm': "example.com",
'username': "test",
'password': "secret",
'domain': "example.com",
'roaming_consortium': "112233",
'eap': 'TTLS'})
interworking_select(dev[0], bssid, freq=2412, no_match=True)
if "FAIL" not in dev[0].request("INTERWORKING_CONNECT " + bssid):
raise Exception("Unexpected INTERWORKING_CONNECT success")
def test_ap_hs20_no_match_connect(dev, apdev):
"""Hotspot 2.0 and connect attempt without matching cred"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hapd = hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].scan_for_bss(bssid, freq="2412")
id = dev[0].add_cred_values({'realm': "example.org",
'username': "test",
'password': "secret",
'domain': "example.org",
'roaming_consortium': "112234",
'eap': 'TTLS'})
interworking_select(dev[0], bssid, freq=2412, no_match=True)
if "FAIL" not in dev[0].request("INTERWORKING_CONNECT " + bssid):
raise Exception("Unexpected INTERWORKING_CONNECT success")
def test_ap_hs20_multiple_home_cred(dev, apdev):
"""Hotspot 2.0 and select with multiple matching home credentials"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['nai_realm'] = ["0,example.com,13[5:6],21[2:4][5:7]"]
params['domain_name'] = "example.com"
hapd = hostapd.add_ap(apdev[0], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20-other")
params['hessid'] = bssid2
params['nai_realm'] = ["0,example.org,13[5:6],21[2:4][5:7]"]
params['domain_name'] = "example.org"
hapd2 = hostapd.add_ap(apdev[1], params)
dev[0].hs20_enable()
dev[0].scan_for_bss(bssid2, freq="2412")
dev[0].scan_for_bss(bssid, freq="2412")
id = dev[0].add_cred_values({'realm': "example.com",
'priority': '2',
'username': "hs20-test",
'password': "password",
'domain': "example.com"})
id2 = dev[0].add_cred_values({'realm': "example.org",
'priority': '3',
'username': "hs20-test",
'password': "password",
'domain': "example.org"})
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_connected(timeout=15)
if bssid2 not in ev:
raise Exception("Connected to incorrect network")
def test_ap_hs20_anqp_invalid_gas_response(dev, apdev):
"""Hotspot 2.0 network selection and invalid GAS response"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(bssid, freq="2412")
hapd.set("ext_mgmt_frame_handling", "1")
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com",
'username': "test",
'password': "secret",
'domain': "example.com",
'roaming_consortium': "112234",
'eap': 'TTLS'})
dev[0].request("INTERWORKING_SELECT freq=2412")
query = gas_rx(hapd)
gas = parse_gas(query['payload'])
logger.info("ANQP: Unexpected Advertisement Protocol in response")
resp = action_response(query)
adv_proto = struct.pack('8B', 108, 6, 127, 0xdd, 0x00, 0x11, 0x22, 0x33)
data = struct.pack('<H', 0)
resp['payload'] = struct.pack('<BBBHH', ACTION_CATEG_PUBLIC,
GAS_INITIAL_RESPONSE,
gas['dialog_token'], 0, 0) + adv_proto + data
send_gas_resp(hapd, resp)
ev = dev[0].wait_event(["ANQP-QUERY-DONE"], timeout=5)
if ev is None:
raise Exception("No ANQP-QUERY-DONE seen")
if "result=INVALID_FRAME" not in ev:
raise Exception("Unexpected result: " + ev)
dev[0].request("INTERWORKING_SELECT freq=2412")
query = gas_rx(hapd)
gas = parse_gas(query['payload'])
logger.info("ANQP: Invalid element length for Info ID 1234")
resp = action_response(query)
adv_proto = struct.pack('BBBB', 108, 2, 127, 0)
elements = struct.pack('<HH', 1234, 1)
data = struct.pack('<H', len(elements)) + elements
resp['payload'] = struct.pack('<BBBHH', ACTION_CATEG_PUBLIC,
GAS_INITIAL_RESPONSE,
gas['dialog_token'], 0, 0) + adv_proto + data
send_gas_resp(hapd, resp)
ev = dev[0].wait_event(["ANQP-QUERY-DONE"], timeout=5)
if ev is None:
raise Exception("No ANQP-QUERY-DONE seen")
if "result=INVALID_FRAME" not in ev:
raise Exception("Unexpected result: " + ev)
with alloc_fail(dev[0], 1, "=anqp_add_extra"):
dev[0].request("INTERWORKING_SELECT freq=2412")
query = gas_rx(hapd)
gas = parse_gas(query['payload'])
resp = action_response(query)
elements = struct.pack('<HHHH', 1, 0, 1, 0)
data = struct.pack('<H', len(elements)) + elements
resp['payload'] = struct.pack('<BBBHH', ACTION_CATEG_PUBLIC,
GAS_INITIAL_RESPONSE,
gas['dialog_token'], 0, 0) + adv_proto + data
send_gas_resp(hapd, resp)
ev = dev[0].wait_event(["ANQP-QUERY-DONE"], timeout=5)
if ev is None:
raise Exception("No ANQP-QUERY-DONE seen")
if "result=SUCCESS" not in ev:
raise Exception("Unexpected result: " + ev)
with alloc_fail(dev[0], 1, "wpabuf_alloc_copy;anqp_add_extra"):
dev[0].request("INTERWORKING_SELECT freq=2412")
query = gas_rx(hapd)
gas = parse_gas(query['payload'])
resp = action_response(query)
elements = struct.pack('<HHHH', 1, 0, 1, 0)
data = struct.pack('<H', len(elements)) + elements
resp['payload'] = struct.pack('<BBBHH', ACTION_CATEG_PUBLIC,
GAS_INITIAL_RESPONSE,
gas['dialog_token'], 0, 0) + adv_proto + data
send_gas_resp(hapd, resp)
ev = dev[0].wait_event(["ANQP-QUERY-DONE"], timeout=5)
if ev is None:
raise Exception("No ANQP-QUERY-DONE seen")
if "result=SUCCESS" not in ev:
raise Exception("Unexpected result: " + ev)
tests = [struct.pack('<HH', 0xdddd, 0),
struct.pack('<HH3B', 0xdddd, 3, 0x50, 0x6f, 0x9a),
struct.pack('<HH4B', 0xdddd, 4, 0x50, 0x6f, 0x9a, 0),
struct.pack('<HH4B', 0xdddd, 4, 0x11, 0x22, 0x33, 0),
struct.pack('<HHHH', 1, 0, 1, 0)]
for elements in tests:
dev[0].request("INTERWORKING_SELECT freq=2412")
query = gas_rx(hapd)
gas = parse_gas(query['payload'])
resp = action_response(query)
data = struct.pack('<H', len(elements)) + elements
resp['payload'] = struct.pack('<BBBHH', ACTION_CATEG_PUBLIC,
GAS_INITIAL_RESPONSE,
gas['dialog_token'], 0, 0) + adv_proto + data
send_gas_resp(hapd, resp)
ev = dev[0].wait_event(["ANQP-QUERY-DONE"], timeout=5)
if ev is None:
raise Exception("No ANQP-QUERY-DONE seen")
if "result=SUCCESS" not in ev:
raise Exception("Unexpected result: " + ev)
def test_ap_hs20_set_profile_failures(dev, apdev):
"""Hotspot 2.0 and failures during profile configuration"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['anqp_3gpp_cell_net'] = "555,444"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].scan_for_bss(bssid, freq="2412")
id = dev[0].add_cred_values({'realm': "example.com",
'domain': "example.com",
'username': "test",
'password': "secret",
'eap': 'TTLS'})
interworking_select(dev[0], bssid, "home", freq=2412)
dev[0].dump_monitor()
dev[0].request("NOTE ssid->eap.eap_methods = os_malloc()")
with alloc_fail(dev[0], 1, "interworking_set_eap_params"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].remove_cred(id)
id = dev[0].add_cred_values({'realm': "example.com",
'domain': "example.com",
'username': "hs20-test-with-domain@example.com",
'password': "password"})
interworking_select(dev[0], bssid, "home", freq=2412)
dev[0].dump_monitor()
dev[0].request("NOTE anon = os_malloc()")
with alloc_fail(dev[0], 1, "interworking_set_eap_params"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("NOTE Successful connection with cred->username including realm")
dev[0].request("INTERWORKING_CONNECT " + bssid)
dev[0].wait_connected()
dev[0].remove_cred(id)
dev[0].wait_disconnected()
id = dev[0].add_cred_values({'realm': "example.com",
'domain': "example.com",
'username': "hs20-test",
'password': "password"})
interworking_select(dev[0], bssid, "home", freq=2412)
dev[0].dump_monitor()
dev[0].request("NOTE anon = os_malloc() (second)")
with alloc_fail(dev[0], 1, "interworking_set_eap_params"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
with alloc_fail(dev[0], 1, "wpa_config_add_network;interworking_connect"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
with alloc_fail(dev[0], 1, "=interworking_connect"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("NOTE wpa_config_set(eap)")
with alloc_fail(dev[0], 1, "wpa_config_parse_eap;wpa_config_set;interworking_connect"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("NOTE wpa_config_set(TTLS-NON_EAP_MSCHAPV2-phase2)")
with alloc_fail(dev[0], 1, "wpa_config_parse_str;wpa_config_set;interworking_connect"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].remove_cred(id)
id = dev[0].add_cred_values({'roaming_consortium': "112233",
'domain': "example.com",
'username': "hs20-test",
'password': "password",
'eap': 'TTLS',
'phase2': "auth=MSCHAPV2"})
interworking_select(dev[0], bssid, "home", freq=2412)
dev[0].dump_monitor()
dev[0].request("NOTE anon = os_strdup()")
with alloc_fail(dev[0], 2, "interworking_set_eap_params"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("NOTE wpa_config_set_quoted(anonymous_identity)")
with alloc_fail(dev[0], 1, "=wpa_config_set_quoted;interworking_set_eap_params"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("NOTE Successful connection with cred->realm not included")
dev[0].request("INTERWORKING_CONNECT " + bssid)
dev[0].wait_connected()
dev[0].remove_cred(id)
dev[0].wait_disconnected()
id = dev[0].add_cred_values({'roaming_consortium': "112233",
'domain': "example.com",
'realm': "example.com",
'username': "user",
'password': "password",
'eap': 'PEAP'})
interworking_select(dev[0], bssid, "home", freq=2412)
dev[0].dump_monitor()
dev[0].request("NOTE id = os_strdup()")
with alloc_fail(dev[0], 2, "interworking_set_eap_params"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("NOTE wpa_config_set_quoted(identity)")
with alloc_fail(dev[0], 1, "=wpa_config_set_quoted;interworking_set_eap_params"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].remove_cred(id)
id = dev[0].add_cred_values({'roaming_consortium': "112233",
'domain': "example.com",
'realm': "example.com",
'username': "user",
'password': "password",
'eap': "TTLS"})
interworking_select(dev[0], bssid, "home", freq=2412)
dev[0].dump_monitor()
dev[0].request("NOTE wpa_config_set_quoted(identity) (second)")
with alloc_fail(dev[0], 2, "=wpa_config_set_quoted;interworking_set_eap_params"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("NOTE wpa_config_set_quoted(password)")
with alloc_fail(dev[0], 3, "=wpa_config_set_quoted;interworking_set_eap_params"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
with alloc_fail(dev[0], 1, "wpa_config_add_network;interworking_connect_roaming_consortium"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
with alloc_fail(dev[0], 1, "=interworking_connect_roaming_consortium"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].remove_cred(id)
id = dev[0].add_cred_values({'roaming_consortium': "112233",
'domain': "example.com",
'realm': "example.com",
'username': "user",
'eap': "PEAP"})
dev[0].set_cred(id, "password", "ext:password")
interworking_select(dev[0], bssid, "home", freq=2412)
dev[0].dump_monitor()
dev[0].request("NOTE wpa_config_set(password)")
with alloc_fail(dev[0], 3, "wpa_config_set;interworking_set_eap_params"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
with alloc_fail(dev[0], 1, "interworking_set_hs20_params"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].remove_cred(id)
id = dev[0].add_cred_values({'realm': "example.com",
'domain': "example.com",
'username': "certificate-user",
'phase1': "include_tls_length=0",
'domain_suffix_match': "example.com",
'ca_cert': "auth_serv/ca.pem",
'client_cert': "auth_serv/user.pem",
'private_key': "auth_serv/user.key",
'private_key_passwd': "secret"})
interworking_select(dev[0], bssid, "home", freq=2412)
dev[0].dump_monitor()
dev[0].request("NOTE wpa_config_set_quoted(client_cert)")
with alloc_fail(dev[0], 2, "=wpa_config_set_quoted;interworking_set_eap_params"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("NOTE wpa_config_set_quoted(private_key)")
with alloc_fail(dev[0], 3, "=wpa_config_set_quoted;interworking_set_eap_params"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("NOTE wpa_config_set_quoted(private_key_passwd)")
with alloc_fail(dev[0], 4, "=wpa_config_set_quoted;interworking_set_eap_params"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("NOTE wpa_config_set_quoted(ca_cert)")
with alloc_fail(dev[0], 5, "=wpa_config_set_quoted;interworking_set_eap_params"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("NOTE wpa_config_set_quoted(domain_suffix_match)")
with alloc_fail(dev[0], 6, "=wpa_config_set_quoted;interworking_set_eap_params"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
with alloc_fail(dev[0], 1, "interworking_set_hs20_params"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].remove_cred(id)
id = dev[0].add_cred_values({'imsi': "555444-333222111", 'eap': "SIM",
'milenage': "5122250214c33e723a5dd523fc145fc0:981d464c7c52eb6e5036234984ad0bcf:000000000123"})
interworking_select(dev[0], bssid, freq=2412)
dev[0].dump_monitor()
with alloc_fail(dev[0], 1, "interworking_set_hs20_params"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("NOTE wpa_config_set_quoted(password;milenage)")
with alloc_fail(dev[0], 2, "=wpa_config_set_quoted;interworking_connect_3gpp"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("NOTE wpa_config_set(eap)")
with alloc_fail(dev[0], 1, "wpa_config_parse_eap;wpa_config_set;interworking_connect_3gpp"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("NOTE set_root_nai:wpa_config_set(identity)")
with alloc_fail(dev[0], 1, "wpa_config_parse_str;interworking_connect_3gpp"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].remove_cred(id)
id = dev[0].add_cred_values({'roaming_consortium': "112233",
'eap': 'TTLS',
'username': "user@example.com",
'password': "password"})
interworking_select(dev[0], bssid, freq=2412)
dev[0].dump_monitor()
dev[0].request("NOTE Interworking: No EAP method set for credential using roaming consortium")
dev[0].request("INTERWORKING_CONNECT " + bssid)
dev[0].remove_cred(id)
hapd.disable()
params = hs20_ap_params()
params['nai_realm'] = "0,example.com,25[3:26]"
hapd = hostapd.add_ap(apdev[0], params)
id = dev[0].add_cred_values({'realm': "example.com",
'domain': "example.com",
'username': "hs20-test",
'password': "password"})
interworking_select(dev[0], bssid, freq=2412)
dev[0].dump_monitor()
dev[0].request("NOTE wpa_config_set(PEAP/FAST-phase1)")
with alloc_fail(dev[0], 1, "wpa_config_parse_str;wpa_config_set;interworking_connect"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("NOTE wpa_config_set(PEAP/FAST-pac_interworking)")
with alloc_fail(dev[0], 2, "wpa_config_parse_str;wpa_config_set;interworking_connect"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("NOTE wpa_config_set(PEAP/FAST-phase2)")
with alloc_fail(dev[0], 3, "wpa_config_parse_str;wpa_config_set;interworking_connect"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
hapd.disable()
params = hs20_ap_params()
params['nai_realm'] = "0,example.com,21"
hapd = hostapd.add_ap(apdev[0], params)
interworking_select(dev[0], bssid, freq=2412)
dev[0].request("NOTE wpa_config_set(TTLS-defaults-phase2)")
with alloc_fail(dev[0], 1, "wpa_config_parse_str;wpa_config_set;interworking_connect"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
hapd.disable()
params = hs20_ap_params()
params['nai_realm'] = "0,example.com,21[2:3]"
hapd = hostapd.add_ap(apdev[0], params)
interworking_select(dev[0], bssid, freq=2412)
dev[0].request("NOTE wpa_config_set(TTLS-NON_EAP_MSCHAP-phase2)")
with alloc_fail(dev[0], 1, "wpa_config_parse_str;wpa_config_set;interworking_connect"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
hapd.disable()
params = hs20_ap_params()
params['nai_realm'] = "0,example.com,21[2:2]"
hapd = hostapd.add_ap(apdev[0], params)
interworking_select(dev[0], bssid, freq=2412)
dev[0].request("NOTE wpa_config_set(TTLS-NON_EAP_CHAP-phase2)")
with alloc_fail(dev[0], 1, "wpa_config_parse_str;wpa_config_set;interworking_connect"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
hapd.disable()
params = hs20_ap_params()
params['nai_realm'] = "0,example.com,21[2:1]"
hapd = hostapd.add_ap(apdev[0], params)
interworking_select(dev[0], bssid, freq=2412)
dev[0].request("NOTE wpa_config_set(TTLS-NON_EAP_PAP-phase2)")
with alloc_fail(dev[0], 1, "wpa_config_parse_str;wpa_config_set;interworking_connect"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
hapd.disable()
params = hs20_ap_params()
params['nai_realm'] = "0,example.com,21[3:26]"
hapd = hostapd.add_ap(apdev[0], params)
interworking_select(dev[0], bssid, freq=2412)
dev[0].request("NOTE wpa_config_set(TTLS-EAP-MSCHAPV2-phase2)")
with alloc_fail(dev[0], 1, "wpa_config_parse_str;wpa_config_set;interworking_connect"):
dev[0].request("INTERWORKING_CONNECT " + bssid)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].remove_cred(id)
def test_ap_hs20_unexpected(dev, apdev):
"""Unexpected Hotspot 2.0 AP configuration"""
skip_without_tkip(dev[0])
skip_without_tkip(dev[1])
skip_without_tkip(dev[2])
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hostapd.wpa_eap_params(ssid="test-hs20-fake")
params['wpa'] = "3"
params['wpa_pairwise'] = "TKIP CCMP"
params['rsn_pairwise'] = "CCMP"
params['ieee80211w'] = "1"
#params['vendor_elements'] = 'dd07506f9a10140000'
params['vendor_elements'] = 'dd04506f9a10'
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].connect("test-hs20-fake", key_mgmt="WPA-EAP", eap="TTLS",
pairwise="TKIP",
identity="hs20-test", password="password",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
scan_freq="2412")
dev[1].hs20_enable()
dev[1].scan_for_bss(bssid, freq="2412")
dev[1].connect("test-hs20-fake", key_mgmt="WPA-EAP", eap="TTLS",
proto="WPA",
identity="hs20-test", password="password",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
scan_freq="2412")
dev[2].hs20_enable()
dev[2].scan_for_bss(bssid, freq="2412")
dev[2].connect("test-hs20-fake", key_mgmt="WPA-EAP", eap="TTLS",
ieee80211w="1",
proto="RSN", pairwise="CCMP",
identity="hs20-test", password="password",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
scan_freq="2412")
def test_ap_interworking_element_update(dev, apdev):
"""Dynamic Interworking element update"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hapd = hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].scan_for_bss(bssid, freq="2412")
bss = dev[0].get_bss(bssid)
logger.info("Before update: " + str(bss))
if '6b091e0701020000000300' not in bss['ie']:
raise Exception("Expected Interworking element not seen before update")
# Update configuration parameters related to Interworking element
hapd.set('access_network_type', '2')
hapd.set('asra', '1')
hapd.set('esr', '1')
hapd.set('uesa', '1')
hapd.set('venue_group', '2')
hapd.set('venue_type', '8')
if "OK" not in hapd.request("UPDATE_BEACON"):
raise Exception("UPDATE_BEACON failed")
dev[0].request("BSS_FLUSH 0")
dev[0].scan_for_bss(bssid, freq="2412", force_scan=True)
bss = dev[0].get_bss(bssid)
logger.info("After update: " + str(bss))
if '6b09f20208020000000300' not in bss['ie']:
raise Exception("Expected Interworking element not seen after update")
def test_ap_hs20_terms_and_conditions(dev, apdev):
"""Hotspot 2.0 Terms and Conditions signaling"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['hs20_t_c_filename'] = 'terms-and-conditions'
params['hs20_t_c_timestamp'] = '123456789'
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].connect("test-hs20", proto="RSN", key_mgmt="WPA-EAP", eap="TTLS",
identity="hs20-t-c-test", password="password",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
ieee80211w='2', scan_freq="2412")
ev = dev[0].wait_event(["HS20-T-C-ACCEPTANCE"], timeout=5)
if ev is None:
raise Exception("Terms and Conditions Acceptance notification not received")
url = "https://example.com/t_and_c?addr=%s&ap=123" % dev[0].own_addr()
if url not in ev:
raise Exception("Unexpected URL: " + ev)
def test_ap_hs20_terms_and_conditions_coa(dev, apdev):
"""Hotspot 2.0 Terms and Conditions signaling - CoA"""
try:
import pyrad.client
import pyrad.packet
import pyrad.dictionary
import radius_das
except ImportError:
raise HwsimSkip("No pyrad modules available")
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['hs20_t_c_filename'] = 'terms-and-conditions'
params['hs20_t_c_timestamp'] = '123456789'
params['own_ip_addr'] = "127.0.0.1"
params['radius_das_port'] = "3799"
params['radius_das_client'] = "127.0.0.1 secret"
params['radius_das_require_event_timestamp'] = "1"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].connect("test-hs20", proto="RSN", key_mgmt="WPA-EAP", eap="TTLS",
identity="hs20-t-c-test", password="password",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
ieee80211w='2', scan_freq="2412")
ev = hapd.wait_event(["HS20-T-C-FILTERING-ADD"], timeout=5)
if ev is None:
raise Exception("Terms and Conditions filtering not enabled")
if ev.split(' ')[1] != dev[0].own_addr():
raise Exception("Unexpected STA address for filtering: " + ev)
ev = dev[0].wait_event(["HS20-T-C-ACCEPTANCE"], timeout=5)
if ev is None:
raise Exception("Terms and Conditions Acceptance notification not received")
url = "https://example.com/t_and_c?addr=%s&ap=123" % dev[0].own_addr()
if url not in ev:
raise Exception("Unexpected URL: " + ev)
dict = pyrad.dictionary.Dictionary("dictionary.radius")
srv = pyrad.client.Client(server="127.0.0.1", acctport=3799,
secret=b"secret", dict=dict)
srv.retries = 1
srv.timeout = 1
sta = hapd.get_sta(dev[0].own_addr())
multi_sess_id = sta['authMultiSessionId']
logger.info("CoA-Request with matching Acct-Session-Id")
vsa = binascii.unhexlify('00009f68090600000000')
req = radius_das.CoAPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
Acct_Multi_Session_Id=multi_sess_id,
Chargeable_User_Identity="hs20-cui",
Event_Timestamp=int(time.time()),
Vendor_Specific=vsa)
reply = srv.SendPacket(req)
logger.debug("RADIUS response from hostapd")
for i in list(reply.keys()):
logger.debug("%s: %s" % (i, reply[i]))
if reply.code != pyrad.packet.CoAACK:
raise Exception("CoA-Request failed")
ev = hapd.wait_event(["HS20-T-C-FILTERING-REMOVE"], timeout=5)
if ev is None:
raise Exception("Terms and Conditions filtering not disabled")
if ev.split(' ')[1] != dev[0].own_addr():
raise Exception("Unexpected STA address for filtering: " + ev)
def test_ap_hs20_terms_and_conditions_sql(dev, apdev, params):
"""Hotspot 2.0 Terms and Conditions using SQLite for user DB"""
addr = dev[0].own_addr()
run_ap_hs20_terms_and_conditions_sql(dev, apdev, params,
"https://example.com/t_and_c?addr=@1@&ap=123",
"https://example.com/t_and_c?addr=" + addr + "&ap=123")
def test_ap_hs20_terms_and_conditions_sql2(dev, apdev, params):
"""Hotspot 2.0 Terms and Conditions using SQLite for user DB"""
addr = dev[0].own_addr()
run_ap_hs20_terms_and_conditions_sql(dev, apdev, params,
"https://example.com/t_and_c?addr=@1@",
"https://example.com/t_and_c?addr=" + addr)
def run_ap_hs20_terms_and_conditions_sql(dev, apdev, params, url_template,
url_expected):
check_eap_capa(dev[0], "MSCHAPV2")
try:
import sqlite3
except ImportError:
raise HwsimSkip("No sqlite3 module available")
dbfile = params['prefix'] + ".eap-user.db"
try:
os.remove(dbfile)
except:
pass
con = sqlite3.connect(dbfile)
with con:
cur = con.cursor()
cur.execute("CREATE TABLE users(identity TEXT PRIMARY KEY, methods TEXT, password TEXT, remediation TEXT, phase2 INTEGER, t_c_timestamp INTEGER)")
cur.execute("CREATE TABLE wildcards(identity TEXT PRIMARY KEY, methods TEXT)")
cur.execute("INSERT INTO users(identity,methods,password,phase2) VALUES ('user-mschapv2','TTLS-MSCHAPV2','password',1)")
cur.execute("INSERT INTO wildcards(identity,methods) VALUES ('','TTLS,TLS')")
cur.execute("CREATE TABLE authlog(timestamp TEXT, session TEXT, nas_ip TEXT, username TEXT, note TEXT)")
cur.execute("CREATE TABLE pending_tc(mac_addr TEXT PRIMARY KEY, identity TEXT)")
cur.execute("CREATE TABLE current_sessions(mac_addr TEXT PRIMARY KEY, identity TEXT, start_time TEXT, nas TEXT, hs20_t_c_filtering BOOLEAN, waiting_coa_ack BOOLEAN, coa_ack_received BOOLEAN)")
try:
params = {"ssid": "as", "beacon_int": "2000",
"radius_server_clients": "auth_serv/radius_clients.conf",
"radius_server_auth_port": '18128',
"eap_server": "1",
"eap_user_file": "sqlite:" + dbfile,
"ca_cert": "auth_serv/ca.pem",
"server_cert": "auth_serv/server.pem",
"private_key": "auth_serv/server.key"}
params['hs20_t_c_server_url'] = url_template
authsrv = hostapd.add_ap(apdev[1], params)
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['auth_server_port'] = "18128"
params['hs20_t_c_filename'] = 'terms-and-conditions'
params['hs20_t_c_timestamp'] = '123456789'
params['own_ip_addr'] = "127.0.0.1"
params['radius_das_port'] = "3799"
params['radius_das_client'] = "127.0.0.1 radius"
params['radius_das_require_event_timestamp'] = "1"
params['disable_pmksa_caching'] = '1'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET pmf 1")
dev[0].hs20_enable()
id = dev[0].add_cred_values({'realm': "example.com",
'username': "user-mschapv2",
'password': "password",
'ca_cert': "auth_serv/ca.pem"})
interworking_select(dev[0], bssid, freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
ev = hapd.wait_event(["HS20-T-C-FILTERING-ADD"], timeout=5)
if ev is None:
raise Exception("Terms and Conditions filtering not enabled")
hapd.dump_monitor()
ev = dev[0].wait_event(["HS20-T-C-ACCEPTANCE"], timeout=5)
if ev is None:
raise Exception("Terms and Conditions Acceptance notification not received")
url = ev.split(' ')[1]
if url != url_expected:
raise Exception("Unexpected URL delivered to the client: %s (expected %s)" % (url, url_expected))
dev[0].dump_monitor()
with con:
cur = con.cursor()
cur.execute("SELECT * from current_sessions")
rows = cur.fetchall()
if len(rows) != 1:
raise Exeception("Unexpected number of rows in current_sessions (%d; expected %d)" % (len(rows), 1))
logger.info("current_sessions: " + str(rows))
if "OK" not in authsrv.request("DAC_REQUEST coa %s t_c_clear" % dev[0].own_addr()):
raise Exception("DAC_REQUEST failed")
ev = hapd.wait_event(["HS20-T-C-FILTERING-REMOVE"], timeout=5)
if ev is None:
raise Exception("Terms and Conditions filtering not disabled")
if ev.split(' ')[1] != dev[0].own_addr():
raise Exception("Unexpected STA address for filtering: " + ev)
time.sleep(0.2)
with con:
cur = con.cursor()
cur.execute("SELECT * from current_sessions")
rows = cur.fetchall()
if len(rows) != 1:
raise Exeception("Unexpected number of rows in current_sessions (%d; expected %d)" % (len(rows), 1))
logger.info("current_sessions: " + str(rows))
if rows[0][4] != 0 or rows[0][5] != 0 or rows[0][6] != 1:
raise Exception("Unexpected current_sessions information after CoA-ACK")
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
dev[0].dump_monitor()
# Simulate T&C server operation on user reading the updated version
with con:
cur = con.cursor()
cur.execute("SELECT identity FROM pending_tc WHERE mac_addr='" +
dev[0].own_addr() + "'")
rows = cur.fetchall()
if len(rows) != 1:
raise Exception("No pending_tc entry found")
if rows[0][0] != 'user-mschapv2':
raise Exception("Unexpected pending_tc identity value")
cur.execute("UPDATE users SET t_c_timestamp=123456789 WHERE identity='user-mschapv2'")
dev[0].request("RECONNECT")
dev[0].wait_connected()
ev = hapd.wait_event(["HS20-T-C-FILTERING-ADD"], timeout=0.1)
if ev is not None:
raise Exception("Terms and Conditions filtering enabled unexpectedly")
hapd.dump_monitor()
ev = dev[0].wait_event(["HS20-T-C-ACCEPTANCE"], timeout=0.1)
if ev is not None:
raise Exception("Unexpected Terms and Conditions Acceptance notification")
dev[0].dump_monitor()
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
dev[0].dump_monitor()
# New T&C available
hapd.set('hs20_t_c_timestamp', '123456790')
dev[0].request("RECONNECT")
dev[0].wait_connected()
ev = hapd.wait_event(["HS20-T-C-FILTERING-ADD"], timeout=5)
if ev is None:
raise Exception("Terms and Conditions filtering not enabled")
hapd.dump_monitor()
ev = dev[0].wait_event(["HS20-T-C-ACCEPTANCE"], timeout=5)
if ev is None:
raise Exception("Terms and Conditions Acceptance notification not received (2)")
dev[0].dump_monitor()
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
dev[0].dump_monitor()
# Simulate T&C server operation on user reading the updated version
with con:
cur = con.cursor()
cur.execute("UPDATE users SET t_c_timestamp=123456790 WHERE identity='user-mschapv2'")
dev[0].request("RECONNECT")
dev[0].wait_connected()
ev = hapd.wait_event(["HS20-T-C-FILTERING-ADD"], timeout=0.1)
if ev is not None:
raise Exception("Terms and Conditions filtering enabled unexpectedly")
hapd.dump_monitor()
ev = dev[0].wait_event(["HS20-T-C-ACCEPTANCE"], timeout=0.1)
if ev is not None:
raise Exception("Unexpected Terms and Conditions Acceptance notification (2)")
dev[0].dump_monitor()
finally:
os.remove(dbfile)
dev[0].request("SET pmf 0")
def test_ap_hs20_release_number_1(dev, apdev):
"""Hotspot 2.0 with AP claiming support for Release 1"""
run_ap_hs20_release_number(dev, apdev, 1)
def test_ap_hs20_release_number_2(dev, apdev):
"""Hotspot 2.0 with AP claiming support for Release 2"""
run_ap_hs20_release_number(dev, apdev, 2)
def test_ap_hs20_release_number_3(dev, apdev):
"""Hotspot 2.0 with AP claiming support for Release 3"""
run_ap_hs20_release_number(dev, apdev, 3)
def run_ap_hs20_release_number(dev, apdev, release):
check_eap_capa(dev[0], "MSCHAPV2")
eap_test(dev[0], apdev[0], "21[3:26][6:7][99:99]", "TTLS", "user",
release=release)
rel = dev[0].get_status_field('hs20')
if rel != str(release):
raise Exception("Unexpected release number indicated: " + rel)
def test_ap_hs20_missing_pmf(dev, apdev):
"""Hotspot 2.0 connection attempt without PMF"""
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['disable_dgaf'] = '1'
hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
dev[0].connect("test-hs20", proto="RSN", key_mgmt="WPA-EAP", eap="TTLS",
ieee80211w="0",
identity="hs20-test", password="password",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
scan_freq="2412", update_identifier="54321",
roaming_consortium_selection="1020304050",
wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-ASSOC-REJECT"], timeout=10)
dev[0].request("DISCONNECT")
if ev is None:
raise Exception("Association rejection not reported")
if "status_code=31" not in ev:
raise Exception("Unexpected rejection reason: " + ev)
def test_ap_hs20_open_osu_association(dev, apdev):
"""Hotspot 2.0 open OSU association"""
try:
run_ap_hs20_open_osu_association(dev, apdev)
finally:
dev[0].request("VENDOR_ELEM_REMOVE 13 *")
def run_ap_hs20_open_osu_association(dev, apdev):
params = {"ssid": "HS 2.0 OSU open"}
hostapd.add_ap(apdev[0], params)
dev[0].connect("HS 2.0 OSU open", key_mgmt="NONE", scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].dump_monitor()
# Test with unexpected Hotspot 2.0 Indication element in Assoc Req
dev[0].request("VENDOR_ELEM_ADD 13 dd07506f9a10220000")
dev[0].connect("HS 2.0 OSU open", key_mgmt="NONE", scan_freq="2412")
| 41.356077
| 360
| 0.610023
| 36,039
| 266,085
| 4.320209
| 0.036655
| 0.0344
| 0.019641
| 0.018343
| 0.865918
| 0.84367
| 0.813695
| 0.779333
| 0.746358
| 0.715542
| 0
| 0.062192
| 0.243851
| 266,085
| 6,433
| 361
| 41.362506
| 0.711645
| 0.046091
| 0
| 0.724861
| 0
| 0.006122
| 0.278412
| 0.047806
| 0
| 0
| 0.000569
| 0.000155
| 0
| 1
| 0.03859
| false
| 0.021336
| 0.005566
| 0
| 0.048237
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a0a9bb6735c4b2e6e4f8d77d8ae409a70294e45
| 13,940
|
py
|
Python
|
test.py
|
jackqu7/thomas
|
9fce3801beb96b9bd2e483a5f82953cb8ce473a1
|
[
"MIT"
] | null | null | null |
test.py
|
jackqu7/thomas
|
9fce3801beb96b9bd2e483a5f82953cb8ce473a1
|
[
"MIT"
] | null | null | null |
test.py
|
jackqu7/thomas
|
9fce3801beb96b9bd2e483a5f82953cb8ce473a1
|
[
"MIT"
] | null | null | null |
import unittest
from thomas.berth import (
BerthController, PriorityBerthController, FringeBerthController)
def _berth(**params):
params.setdefault('id', 'UF-Stevenage')
params.setdefault('number', 'abc')
params.setdefault('desc', 'desc')
params.setdefault('distance', 0)
return params
class BerthControllerTests(unittest.TestCase):
def test_is_different(self):
berth = BerthController(_berth())
moorgate1 = {
'headcode': '2F29'
}
moorgate2 = {
'headcode': '2F29'
}
kings_cross1 = {
'headcode': '1P29'
}
kings_cross2 = {
'headcode': '1P29'
}
assert berth._is_different(moorgate1, kings_cross1)
assert berth._is_different(kings_cross1, moorgate1)
assert berth._is_different(moorgate1, None)
assert berth._is_different(None, moorgate1)
assert not berth._is_different(moorgate1, moorgate2)
assert not berth._is_different(kings_cross2, kings_cross1)
def test_set(self):
berth = BerthController(_berth(number='abc'))
moorgate1 = {
'headcode': '2F29'
}
kings_cross1 = {
'headcode': '1P29'
}
# Should ignore other berths
assert berth.set('xyz', moorgate1) is None
assert berth.get_current_train() is None
# Should update if given a train for it's berth
assert berth.set('abc', kings_cross1) is True
assert berth.get_current_train() == kings_cross1
# Should ignore the same train again
assert berth.set('abc', kings_cross1) is None
assert berth.get_current_train() == kings_cross1
# Should update if different train overwritten
assert berth.set('abc', moorgate1) is True
assert berth.get_current_train() == moorgate1
# Should update if berth set back to None
assert berth.set('abc', None) is True
assert berth.get_current_train() is None
class PriorityBerthControllerTests(unittest.TestCase):
def test_set(self):
berth = PriorityBerthController(_berth(number='abc'), _berth(number='efg'))
moorgate1 = {
'headcode': '2F29'
}
kings_cross1 = {
'headcode': '1P29'
}
# Should ignore other berths
assert berth.set('xyz', moorgate1) is None
assert berth.get_current_train() is None
# Should update if given a train for it's alt berth
assert berth.set('efg', kings_cross1) is True
assert berth.get_current_train() == kings_cross1
# Should ignore the same train again
assert berth.set('efg', kings_cross1) is None
assert berth.get_current_train() == kings_cross1
# Should switch to priority berth if set
assert berth.set('abc', moorgate1) is True
assert berth.get_current_train() == moorgate1
# Should go back to first train if priority berth unset
assert berth.set('abc', None) is True
assert berth.get_current_train() == kings_cross1
# Should show nothing if both unset
assert berth.set('efg', None) is True
assert berth.get_current_train() is None
# Should show priority berth if only berth populated
assert berth.set('abc', moorgate1) is True
assert berth.get_current_train() == moorgate1
# Should not update if alt berth set whilst priority berth populated
assert berth.set('efg', kings_cross1) is None
assert berth.get_current_train() == moorgate1
class FringeBerthControllerTests(unittest.TestCase):
def test_set(self):
train_a = {
'headcode': 'AAAA'
}
train_b = {
'headcode': 'BBBB'
}
train_c = {
'headcode': 'CCCC'
}
b_main = _berth(number='MAIN')
b_f1 = _berth(number='F1', distance=1)
b_f2 = _berth(number='F2', distance=2)
b_f3 = _berth(number='F3', distance=3)
b_f4 = _berth(number='F4', distance=4)
# MAIN F1 F2 F3
# NEW - - - = NEW
berth = FringeBerthController(b_main, b_f1, b_f2, b_f3, b_f4)
assert berth.set('MAIN', train_a) is True
current_train_exp = dict(train_a, is_fringe=False)
assert berth.get_current_train() == current_train_exp
# MAIN F1 F2 F3
# - - - NEW = NEW
berth = FringeBerthController(b_main, b_f1, b_f2, b_f3, b_f4)
assert berth.set('F3', train_a) is True
current_train_exp = dict(
train_a,
is_fringe=True,
berth=b_f3,
distance_percent=0.75)
assert berth.get_current_train() == current_train_exp
assert berth.fringe_trains == {'F3': current_train_exp}
# MAIN F1 F2 F3
# A - NEW - = A
berth = FringeBerthController(b_main, b_f1, b_f2, b_f3, b_f4)
berth.set('MAIN', train_a)
assert berth.set('F2', train_b) is None
current_train_exp = dict(train_a, is_fringe=False)
f2_exp = dict(
train_b,
is_fringe=True,
berth=b_f2,
distance_percent=0.5)
assert berth.get_current_train() == current_train_exp
assert berth.fringe_trains == {'F2': f2_exp}
# MAIN F1 F2 F3
# NEW - A - = NEW
berth = FringeBerthController(b_main, b_f1, b_f2, b_f3, b_f4)
berth.set('F2', train_a)
assert berth.set('MAIN', train_b) is True
current_train_exp = dict(train_b, is_fringe=False)
assert berth.get_current_train() == current_train_exp
# MAIN F1 F2 F3
# - NEW - A = A
berth = FringeBerthController(b_main, b_f1, b_f2, b_f3, b_f4)
berth.set('F3', train_a)
assert berth.set('F1', train_b) is None
f1_exp = dict(
train_b,
is_fringe=True,
berth=b_f1,
distance_percent=0.25)
current_train_exp = dict(
train_a,
is_fringe=True,
berth=b_f3,
distance_percent=0.75)
assert berth.get_current_train() == current_train_exp
assert berth.fringe_trains == {
'F1': f1_exp,
'F3': current_train_exp}
# MAIN F1 F2 F3
# B NEW - A = B
berth = FringeBerthController(b_main, b_f1, b_f2, b_f3, b_f4)
berth.set('F3', train_a)
berth.set('MAIN', train_b)
assert berth.set('F1', train_c) is None
current_train_exp = dict(train_b, is_fringe=False)
f1_exp = dict(
train_c,
is_fringe=True,
berth=b_f1,
distance_percent=0.25)
f3_exp = dict(
train_a,
is_fringe=True,
berth=b_f3,
distance_percent=0.75)
assert berth.get_current_train() == current_train_exp
assert berth.fringe_trains == {
'F1': f1_exp,
'F3': f3_exp}
# MAIN F1 F2 F3
# - NONE - B = B
berth = FringeBerthController(b_main, b_f1, b_f2, b_f3, b_f4)
berth.set('F3', train_b)
berth.set('F1', train_a)
assert berth.set('F1', None) is None
f3_exp = dict(
train_b,
is_fringe=True,
berth=b_f3,
distance_percent=0.75)
assert berth.get_current_train() == f3_exp
assert berth.fringe_trains == {
'F1': None,
'F3': f3_exp}
# MAIN F1 F2 F3
# NONE A - B = B
berth = FringeBerthController(b_main, b_f1, b_f2, b_f3, b_f4)
berth.set('F3', train_b)
berth.set('F1', train_a)
berth.set('MAIN', train_c)
assert berth.set('MAIN', None) is True
current_train_exp = dict(
train_b,
is_fringe=True,
berth=b_f3,
distance_percent=0.75)
assert berth.get_current_train() == current_train_exp
def test_tick(self):
train_a = {
'headcode': 'AAAA'
}
train_b = {
'headcode': 'BBBB'
}
train_c = {
'headcode': 'CCCC'
}
b_main = _berth(number='MAIN')
b_f1 = _berth(number='F1', distance=1)
b_f2 = _berth(number='F2', distance=2)
b_f3 = _berth(number='F3', distance=3)
b_f4 = _berth(number='F4', distance=4)
# Train in the main berth -> do nothing on tick
berth = FringeBerthController(b_main, b_f1, b_f2, b_f3, b_f4)
berth.set('MAIN', train_a)
berth.set('F1', train_b)
berth.tick()
current_train_exp = dict(train_a, is_fringe=False)
assert berth.get_current_train() == current_train_exp
# No train in the main berth -> tick through fringe berths
berth = FringeBerthController(b_main, b_f1, b_f2, b_f3, b_f4)
berth.set('F1', train_a)
berth.set('F3', train_b)
current_train_exp = dict(
train_a,
is_fringe=True,
berth=b_f1,
distance_percent=0.25)
assert berth.get_current_train() == current_train_exp
berth.tick()
current_train_exp = dict(
train_b,
is_fringe=True,
berth=b_f3,
distance_percent=0.75)
assert berth.get_current_train() == current_train_exp
berth.tick()
current_train_exp = dict(
train_a,
is_fringe=True,
berth=b_f1,
distance_percent=0.25)
assert berth.get_current_train() == current_train_exp
# Add a new train to F2 -> show it on next tick and show F3 train later
berth.set('F2', train_c)
assert berth.get_current_train() == current_train_exp
berth.tick()
current_train_exp = dict(
train_c,
is_fringe=True,
berth=b_f2,
distance_percent=0.5)
assert berth.get_current_train() == current_train_exp
berth.tick()
current_train_exp = dict(
train_b,
is_fringe=True,
berth=b_f3,
distance_percent=0.75)
assert berth.get_current_train() == current_train_exp
berth.tick()
current_train_exp = dict(
train_a,
is_fringe=True,
berth=b_f1,
distance_percent=0.25)
assert berth.get_current_train() == current_train_exp
def test_is_different(self):
b_main = _berth(number='MAIN')
b_f1 = _berth(number='F1', distance=1)
b_f2 = _berth(number='F2', distance=2)
berth = FringeBerthController(b_main, b_f1, b_f2)
moorgate1 = {
'headcode': '2F29'
}
moorgate2 = {
'headcode': '2F29'
}
kings_cross1 = {
'headcode': '1P29',
'is_fringe': False
}
kings_cross2 = {
'headcode': '1P29',
'is_fringe': True,
}
f1_train = {
'headcode': '1P29',
'is_fringe': True,
'berth': b_f1,
}
f1_train2 = {
'headcode': '1P29',
'is_fringe': True,
'berth': b_f1,
}
f2_train = {
'headcode': '1P29',
'is_fringe': True,
'berth': b_f2,
}
assert berth._is_different(moorgate1, kings_cross1)
assert berth._is_different(kings_cross1, moorgate1)
assert berth._is_different(moorgate1, None)
assert berth._is_different(None, moorgate1)
assert not berth._is_different(moorgate1, moorgate2)
assert berth._is_different(kings_cross2, kings_cross1)
assert not berth._is_different(f1_train, f1_train2)
assert berth._is_different(f1_train, f2_train)
def test_set_and_tick(self):
train_a = {
'headcode': 'AAAA'
}
train_b = {
'headcode': 'BBBB'
}
train_c = {
'headcode': 'CCCC'
}
train_d = {
'headcode': 'DDDD'
}
b_main = _berth(number='MAIN')
b_f1 = _berth(number='F1', distance=1)
b_f2 = _berth(number='F2', distance=2)
b_f3 = _berth(number='F3', distance=3)
b_f4 = _berth(number='F4', distance=4)
berth = FringeBerthController(b_main, b_f1, b_f2, b_f3, b_f4)
berth.set('F2', train_a)
berth.set('F3', train_b)
berth.set('F4', train_c)
assert berth.get_current_train()['headcode'] == 'AAAA'
berth.tick()
assert berth.get_current_train()['headcode'] == 'BBBB'
berth.set('F1', train_d)
berth.tick()
assert berth.get_current_train()['headcode'] == 'CCCC'
berth.set('F3', None)
assert berth.get_current_train()['headcode'] == 'CCCC'
berth.tick()
assert berth.get_current_train()['headcode'] == 'DDDD'
berth.set('F1', None)
assert berth.get_current_train()['headcode'] == 'AAAA'
berth.tick()
assert berth.get_current_train()['headcode'] == 'CCCC'
berth.set('F1', train_d)
assert berth.get_current_train()['headcode'] == 'CCCC'
berth.tick()
assert berth.get_current_train()['headcode'] == 'DDDD'
berth.tick()
assert berth.get_current_train()['headcode'] == 'AAAA'
berth.set('F1', None)
berth.set('F2', None)
berth.set('F3', None)
berth.set('F4', None)
berth.tick()
assert berth.get_current_train() is None
assert berth.current_fringe_berth_id is None
berth.tick()
berth.set('F3', train_a)
assert berth.get_current_train()['headcode'] == 'AAAA'
berth.tick()
assert berth.get_current_train()['headcode'] == 'AAAA'
| 32.343387
| 83
| 0.564849
| 1,694
| 13,940
| 4.385478
| 0.066706
| 0.116974
| 0.079149
| 0.118724
| 0.856374
| 0.813165
| 0.79405
| 0.755418
| 0.722574
| 0.700767
| 0
| 0.034571
| 0.32769
| 13,940
| 430
| 84
| 32.418605
| 0.758109
| 0.074964
| 0
| 0.762463
| 0
| 0
| 0.057145
| 0
| 0
| 0
| 0
| 0
| 0.243402
| 1
| 0.02346
| false
| 0
| 0.005865
| 0
| 0.041056
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a2d37bfd1970c33cd012d5c708d49a9c45fe1d3
| 14,189
|
py
|
Python
|
boto3_type_annotations_with_docs/boto3_type_annotations/sns/paginator.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 119
|
2018-12-01T18:20:57.000Z
|
2022-02-02T10:31:29.000Z
|
boto3_type_annotations_with_docs/boto3_type_annotations/sns/paginator.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 15
|
2018-11-16T00:16:44.000Z
|
2021-11-13T03:44:18.000Z
|
boto3_type_annotations_with_docs/boto3_type_annotations/sns/paginator.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 11
|
2019-05-06T05:26:51.000Z
|
2021-09-28T15:27:59.000Z
|
from typing import Dict
from botocore.paginate import Paginator
class ListEndpointsByPlatformApplication(Paginator):
def paginate(self, PlatformApplicationArn: str, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`SNS.Client.list_endpoints_by_platform_application`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sns-2010-03-31/ListEndpointsByPlatformApplication>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
PlatformApplicationArn='string',
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'Endpoints': [
{
'EndpointArn': 'string',
'Attributes': {
'string': 'string'
}
},
],
}
**Response Structure**
- *(dict) --*
Response for ListEndpointsByPlatformApplication action.
- **Endpoints** *(list) --*
Endpoints returned for ListEndpointsByPlatformApplication action.
- *(dict) --*
Endpoint for mobile app and device.
- **EndpointArn** *(string) --*
EndpointArn for mobile app and device.
- **Attributes** *(dict) --*
Attributes for endpoint.
- *(string) --*
- *(string) --*
:type PlatformApplicationArn: string
:param PlatformApplicationArn: **[REQUIRED]**
PlatformApplicationArn for ListEndpointsByPlatformApplicationInput action.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class ListPhoneNumbersOptedOut(Paginator):
def paginate(self, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`SNS.Client.list_phone_numbers_opted_out`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sns-2010-03-31/ListPhoneNumbersOptedOut>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'phoneNumbers': [
'string',
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
The response from the ``ListPhoneNumbersOptedOut`` action.
- **phoneNumbers** *(list) --*
A list of phone numbers that are opted out of receiving SMS messages. The list is paginated, and each page can contain up to 100 phone numbers.
- *(string) --*
- **NextToken** *(string) --*
A token to resume pagination.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class ListPlatformApplications(Paginator):
def paginate(self, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`SNS.Client.list_platform_applications`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sns-2010-03-31/ListPlatformApplications>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'PlatformApplications': [
{
'PlatformApplicationArn': 'string',
'Attributes': {
'string': 'string'
}
},
],
}
**Response Structure**
- *(dict) --*
Response for ListPlatformApplications action.
- **PlatformApplications** *(list) --*
Platform applications returned when calling ListPlatformApplications action.
- *(dict) --*
Platform application object.
- **PlatformApplicationArn** *(string) --*
PlatformApplicationArn for platform application object.
- **Attributes** *(dict) --*
Attributes for platform application object.
- *(string) --*
- *(string) --*
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class ListSubscriptions(Paginator):
def paginate(self, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`SNS.Client.list_subscriptions`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sns-2010-03-31/ListSubscriptions>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'Subscriptions': [
{
'SubscriptionArn': 'string',
'Owner': 'string',
'Protocol': 'string',
'Endpoint': 'string',
'TopicArn': 'string'
},
],
}
**Response Structure**
- *(dict) --*
Response for ListSubscriptions action
- **Subscriptions** *(list) --*
A list of subscriptions.
- *(dict) --*
A wrapper type for the attributes of an Amazon SNS subscription.
- **SubscriptionArn** *(string) --*
The subscription's ARN.
- **Owner** *(string) --*
The subscription's owner.
- **Protocol** *(string) --*
The subscription's protocol.
- **Endpoint** *(string) --*
The subscription's endpoint (format depends on the protocol).
- **TopicArn** *(string) --*
The ARN of the subscription's topic.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class ListSubscriptionsByTopic(Paginator):
def paginate(self, TopicArn: str, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`SNS.Client.list_subscriptions_by_topic`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sns-2010-03-31/ListSubscriptionsByTopic>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
TopicArn='string',
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'Subscriptions': [
{
'SubscriptionArn': 'string',
'Owner': 'string',
'Protocol': 'string',
'Endpoint': 'string',
'TopicArn': 'string'
},
],
}
**Response Structure**
- *(dict) --*
Response for ListSubscriptionsByTopic action.
- **Subscriptions** *(list) --*
A list of subscriptions.
- *(dict) --*
A wrapper type for the attributes of an Amazon SNS subscription.
- **SubscriptionArn** *(string) --*
The subscription's ARN.
- **Owner** *(string) --*
The subscription's owner.
- **Protocol** *(string) --*
The subscription's protocol.
- **Endpoint** *(string) --*
The subscription's endpoint (format depends on the protocol).
- **TopicArn** *(string) --*
The ARN of the subscription's topic.
:type TopicArn: string
:param TopicArn: **[REQUIRED]**
The ARN of the topic for which you wish to find subscriptions.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class ListTopics(Paginator):
def paginate(self, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`SNS.Client.list_topics`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sns-2010-03-31/ListTopics>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'Topics': [
{
'TopicArn': 'string'
},
],
}
**Response Structure**
- *(dict) --*
Response for ListTopics action.
- **Topics** *(list) --*
A list of topic ARNs.
- *(dict) --*
A wrapper type for the topic's Amazon Resource Name (ARN). To retrieve a topic's attributes, use ``GetTopicAttributes`` .
- **TopicArn** *(string) --*
The topic's ARN.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
| 40.89049
| 224
| 0.516738
| 1,219
| 14,189
| 5.992617
| 0.129614
| 0.032854
| 0.022998
| 0.026283
| 0.758795
| 0.753046
| 0.73744
| 0.731143
| 0.731143
| 0.714716
| 0
| 0.009932
| 0.38262
| 14,189
| 346
| 225
| 41.008671
| 0.823973
| 0.761505
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3
| false
| 0.3
| 0.1
| 0
| 0.7
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
8a2ddfa4a13f86208179c4578191067b70ce71c5
| 43,478
|
py
|
Python
|
tests/middleware_exceptions/tests.py
|
benjaoming/django
|
6dbe979b4d9396e1b307c7d27388c97c13beb21c
|
[
"BSD-3-Clause"
] | 1
|
2015-11-08T16:39:07.000Z
|
2015-11-08T16:39:07.000Z
|
tests/middleware_exceptions/tests.py
|
benjaoming/django
|
6dbe979b4d9396e1b307c7d27388c97c13beb21c
|
[
"BSD-3-Clause"
] | null | null | null |
tests/middleware_exceptions/tests.py
|
benjaoming/django
|
6dbe979b4d9396e1b307c7d27388c97c13beb21c
|
[
"BSD-3-Clause"
] | null | null | null |
import sys
from django.conf import settings
from django.core.exceptions import MiddlewareNotUsed
from django.core.signals import got_request_exception
from django.http import HttpResponse
from django.template.response import TemplateResponse
from django.template import Template
from django.test import RequestFactory, TestCase, override_settings
from django.test.utils import patch_logger
class TestException(Exception):
pass
# A middleware base class that tracks which methods have been called
class TestMiddleware(object):
def __init__(self):
self.process_request_called = False
self.process_view_called = False
self.process_response_called = False
self.process_template_response_called = False
self.process_exception_called = False
def process_request(self, request):
self.process_request_called = True
def process_view(self, request, view_func, view_args, view_kwargs):
self.process_view_called = True
def process_template_response(self, request, response):
self.process_template_response_called = True
return response
def process_response(self, request, response):
self.process_response_called = True
return response
def process_exception(self, request, exception):
self.process_exception_called = True
# Middleware examples that do the right thing
class RequestMiddleware(TestMiddleware):
def process_request(self, request):
super(RequestMiddleware, self).process_request(request)
return HttpResponse('Request Middleware')
class ViewMiddleware(TestMiddleware):
def process_view(self, request, view_func, view_args, view_kwargs):
super(ViewMiddleware, self).process_view(request, view_func, view_args, view_kwargs)
return HttpResponse('View Middleware')
class ResponseMiddleware(TestMiddleware):
def process_response(self, request, response):
super(ResponseMiddleware, self).process_response(request, response)
return HttpResponse('Response Middleware')
class TemplateResponseMiddleware(TestMiddleware):
def process_template_response(self, request, response):
super(TemplateResponseMiddleware, self).process_template_response(request, response)
return TemplateResponse(request, Template('Template Response Middleware'))
class ExceptionMiddleware(TestMiddleware):
def process_exception(self, request, exception):
super(ExceptionMiddleware, self).process_exception(request, exception)
return HttpResponse('Exception Middleware')
# Sample middlewares that raise exceptions
class BadRequestMiddleware(TestMiddleware):
def process_request(self, request):
super(BadRequestMiddleware, self).process_request(request)
raise TestException('Test Request Exception')
class BadViewMiddleware(TestMiddleware):
def process_view(self, request, view_func, view_args, view_kwargs):
super(BadViewMiddleware, self).process_view(request, view_func, view_args, view_kwargs)
raise TestException('Test View Exception')
class BadTemplateResponseMiddleware(TestMiddleware):
def process_template_response(self, request, response):
super(BadTemplateResponseMiddleware, self).process_template_response(request, response)
raise TestException('Test Template Response Exception')
class BadResponseMiddleware(TestMiddleware):
def process_response(self, request, response):
super(BadResponseMiddleware, self).process_response(request, response)
raise TestException('Test Response Exception')
class BadExceptionMiddleware(TestMiddleware):
def process_exception(self, request, exception):
super(BadExceptionMiddleware, self).process_exception(request, exception)
raise TestException('Test Exception Exception')
# Sample middlewares that omit to return an HttpResonse
class NoTemplateResponseMiddleware(TestMiddleware):
def process_template_response(self, request, response):
super(NoTemplateResponseMiddleware, self).process_template_response(request, response)
class NoResponseMiddleware(TestMiddleware):
def process_response(self, request, response):
super(NoResponseMiddleware, self).process_response(request, response)
@override_settings(ROOT_URLCONF='middleware_exceptions.urls')
class BaseMiddlewareExceptionTest(TestCase):
def setUp(self):
self.exceptions = []
got_request_exception.connect(self._on_request_exception)
self.client.handler.load_middleware()
def tearDown(self):
got_request_exception.disconnect(self._on_request_exception)
self.exceptions = []
def _on_request_exception(self, sender, request, **kwargs):
self.exceptions.append(sys.exc_info())
def _add_middleware(self, middleware):
self.client.handler._request_middleware.insert(0, middleware.process_request)
self.client.handler._view_middleware.insert(0, middleware.process_view)
self.client.handler._template_response_middleware.append(middleware.process_template_response)
self.client.handler._response_middleware.append(middleware.process_response)
self.client.handler._exception_middleware.append(middleware.process_exception)
def assert_exceptions_handled(self, url, errors, extra_error=None):
try:
self.client.get(url)
except TestException:
# Test client intentionally re-raises any exceptions being raised
# during request handling. Hence actual testing that exception was
# properly handled is done by relying on got_request_exception
# signal being sent.
pass
except Exception as e:
if type(extra_error) != type(e):
self.fail("Unexpected exception: %s" % e)
self.assertEqual(len(self.exceptions), len(errors))
for i, error in enumerate(errors):
exception, value, tb = self.exceptions[i]
self.assertEqual(value.args, (error, ))
def assert_middleware_usage(self, middleware, request, view, template_response, response, exception):
self.assertEqual(middleware.process_request_called, request)
self.assertEqual(middleware.process_view_called, view)
self.assertEqual(middleware.process_template_response_called, template_response)
self.assertEqual(middleware.process_response_called, response)
self.assertEqual(middleware.process_exception_called, exception)
class MiddlewareTests(BaseMiddlewareExceptionTest):
def test_process_request_middleware(self):
pre_middleware = TestMiddleware()
middleware = RequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_middleware(self):
pre_middleware = TestMiddleware()
middleware = ViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_middleware(self):
pre_middleware = TestMiddleware()
middleware = ResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_template_response_middleware(self):
pre_middleware = TestMiddleware()
middleware = TemplateResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/template_response/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, True, True, False)
self.assert_middleware_usage(middleware, True, True, True, True, False)
self.assert_middleware_usage(post_middleware, True, True, True, True, False)
def test_process_exception_middleware(self):
pre_middleware = TestMiddleware()
middleware = ExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_request_middleware_not_found(self):
pre_middleware = TestMiddleware()
middleware = RequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_middleware_not_found(self):
pre_middleware = TestMiddleware()
middleware = ViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_template_response_middleware_not_found(self):
pre_middleware = TestMiddleware()
middleware = TemplateResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, True)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_response_middleware_not_found(self):
pre_middleware = TestMiddleware()
middleware = ResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, True)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_middleware_not_found(self):
pre_middleware = TestMiddleware()
middleware = ExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_request_middleware_exception(self):
pre_middleware = TestMiddleware()
middleware = RequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_middleware_exception(self):
pre_middleware = TestMiddleware()
middleware = ViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_middleware_exception(self):
pre_middleware = TestMiddleware()
middleware = ResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', ['Error in view'], Exception())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, True)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_middleware_exception(self):
pre_middleware = TestMiddleware()
middleware = ExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_request_middleware_null_view(self):
pre_middleware = TestMiddleware()
middleware = RequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_middleware_null_view(self):
pre_middleware = TestMiddleware()
middleware = ViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_middleware_null_view(self):
pre_middleware = TestMiddleware()
middleware = ResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object. It returned None instead.",
],
ValueError())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_exception_middleware_null_view(self):
pre_middleware = TestMiddleware()
middleware = ExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object. It returned None instead."
],
ValueError())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_request_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
middleware = RequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
middleware = ViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
middleware = ResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, True)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
middleware = ExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_template_response_error(self):
middleware = TestMiddleware()
self._add_middleware(middleware)
self.assert_exceptions_handled('/middleware_exceptions/template_response_error/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(middleware, True, True, True, True, False)
class BadMiddlewareTests(BaseMiddlewareExceptionTest):
def test_process_request_bad_middleware(self):
pre_middleware = TestMiddleware()
bad_middleware = BadRequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', ['Test Request Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(bad_middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_bad_middleware(self):
pre_middleware = TestMiddleware()
bad_middleware = BadViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', ['Test View Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_template_response_bad_middleware(self):
pre_middleware = TestMiddleware()
bad_middleware = BadTemplateResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/template_response/', ['Test Template Response Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, True, True, False)
self.assert_middleware_usage(post_middleware, True, True, True, True, False)
def test_process_response_bad_middleware(self):
pre_middleware = TestMiddleware()
bad_middleware = BadResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', ['Test Response Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_exception_bad_middleware(self):
pre_middleware = TestMiddleware()
bad_middleware = BadExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_request_bad_middleware_not_found(self):
pre_middleware = TestMiddleware()
bad_middleware = BadRequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test Request Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(bad_middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_bad_middleware_not_found(self):
pre_middleware = TestMiddleware()
bad_middleware = BadViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test View Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_bad_middleware_not_found(self):
pre_middleware = TestMiddleware()
bad_middleware = BadResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test Response Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, True)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_bad_middleware_not_found(self):
pre_middleware = TestMiddleware()
bad_middleware = BadExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test Exception Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_request_bad_middleware_exception(self):
pre_middleware = TestMiddleware()
bad_middleware = BadRequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', ['Test Request Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(bad_middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_bad_middleware_exception(self):
pre_middleware = TestMiddleware()
bad_middleware = BadViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', ['Test View Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_bad_middleware_exception(self):
pre_middleware = TestMiddleware()
bad_middleware = BadResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', ['Error in view', 'Test Response Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, True)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_bad_middleware_exception(self):
pre_middleware = TestMiddleware()
bad_middleware = BadExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', ['Test Exception Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_request_bad_middleware_null_view(self):
pre_middleware = TestMiddleware()
bad_middleware = BadRequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', ['Test Request Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(bad_middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_bad_middleware_null_view(self):
pre_middleware = TestMiddleware()
bad_middleware = BadViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', ['Test View Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_bad_middleware_null_view(self):
pre_middleware = TestMiddleware()
bad_middleware = BadResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object. It returned None instead.",
'Test Response Exception'
])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_exception_bad_middleware_null_view(self):
pre_middleware = TestMiddleware()
bad_middleware = BadExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object. It returned None instead."
],
ValueError())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_request_bad_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
bad_middleware = BadRequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test Request Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(bad_middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_bad_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
bad_middleware = BadViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test View Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_bad_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
bad_middleware = BadResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test Response Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, True)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_bad_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
bad_middleware = BadExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test Exception Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_response_no_response_middleware(self):
pre_middleware = TestMiddleware()
middleware = NoResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [
"NoResponseMiddleware.process_response didn't return an HttpResponse object. It returned None instead."
],
ValueError())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_template_response_no_response_middleware(self):
pre_middleware = TestMiddleware()
middleware = NoTemplateResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/template_response/', [
"NoTemplateResponseMiddleware.process_template_response didn't return an HttpResponse object. It returned None instead."
],
ValueError())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, True, True, False)
self.assert_middleware_usage(post_middleware, True, True, True, True, False)
_missing = object()
@override_settings(ROOT_URLCONF='middleware_exceptions.urls')
class RootUrlconfTests(TestCase):
@override_settings(ROOT_URLCONF=None)
def test_missing_root_urlconf(self):
# Removing ROOT_URLCONF is safe, as override_settings will restore
# the previously defined settings.
del settings.ROOT_URLCONF
self.assertRaises(AttributeError, self.client.get, "/middleware_exceptions/view/")
class MyMiddleware(object):
def __init__(self):
raise MiddlewareNotUsed
def process_request(self, request):
pass
class MyMiddlewareWithExceptionMessage(object):
def __init__(self):
raise MiddlewareNotUsed('spam eggs')
def process_request(self, request):
pass
@override_settings(
DEBUG=True,
ROOT_URLCONF='middleware_exceptions.urls',
)
class MiddlewareNotUsedTests(TestCase):
rf = RequestFactory()
def test_raise_exception(self):
request = self.rf.get('middleware_exceptions/view/')
with self.assertRaises(MiddlewareNotUsed):
MyMiddleware().process_request(request)
@override_settings(MIDDLEWARE_CLASSES=(
'middleware_exceptions.tests.MyMiddleware',
))
def test_log(self):
with patch_logger('django.request', 'debug') as calls:
self.client.get('/middleware_exceptions/view/')
self.assertEqual(len(calls), 1)
self.assertEqual(
calls[0],
"MiddlewareNotUsed: 'middleware_exceptions.tests.MyMiddleware'"
)
@override_settings(MIDDLEWARE_CLASSES=(
'middleware_exceptions.tests.MyMiddlewareWithExceptionMessage',
))
def test_log_custom_message(self):
with patch_logger('django.request', 'debug') as calls:
self.client.get('/middleware_exceptions/view/')
self.assertEqual(len(calls), 1)
self.assertEqual(
calls[0],
"MiddlewareNotUsed('middleware_exceptions.tests.MyMiddlewareWithExceptionMessage'): spam eggs"
)
@override_settings(DEBUG=False)
def test_do_not_log_when_debug_is_false(self):
with patch_logger('django.request', 'debug') as calls:
self.client.get('/middleware_exceptions/view/')
self.assertEqual(len(calls), 0)
| 48.362625
| 132
| 0.732807
| 4,735
| 43,478
| 6.416895
| 0.039493
| 0.064573
| 0.094688
| 0.111901
| 0.87421
| 0.852192
| 0.835045
| 0.822077
| 0.804338
| 0.786335
| 0
| 0.000198
| 0.188394
| 43,478
| 898
| 133
| 48.416481
| 0.860855
| 0.07314
| 0
| 0.710949
| 0
| 0
| 0.085286
| 0.055242
| 0
| 0
| 0
| 0
| 0.289051
| 1
| 0.115328
| false
| 0.005839
| 0.013139
| 0
| 0.170803
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8aa398cd6aaf822c7a1781f34d8f971d4242029b
| 56
|
py
|
Python
|
dexy/parsers/__init__.py
|
dsoto/dexy
|
0f2090250040c3c54c8481a16de8e476b559e87c
|
[
"MIT"
] | 136
|
2015-01-06T15:04:47.000Z
|
2021-12-21T22:52:41.000Z
|
dexy/parsers/__init__.py
|
dsoto/dexy
|
0f2090250040c3c54c8481a16de8e476b559e87c
|
[
"MIT"
] | 13
|
2015-01-26T14:06:58.000Z
|
2020-03-27T21:16:10.000Z
|
dexy/parsers/__init__.py
|
dsoto/dexy
|
0f2090250040c3c54c8481a16de8e476b559e87c
|
[
"MIT"
] | 34
|
2015-01-02T16:24:53.000Z
|
2021-11-27T05:38:30.000Z
|
import dexy.parsers.doc
import dexy.parsers.environment
| 18.666667
| 31
| 0.857143
| 8
| 56
| 6
| 0.625
| 0.416667
| 0.708333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 56
| 2
| 32
| 28
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8abaca9d107178bd9e5bbdb355ad4f8d2621bc91
| 43
|
py
|
Python
|
telegrambot/keyboards/__init__.py
|
BTMAN1489-1/telegrambot
|
fae2f6da98b50dccb9d800405509a16d4639953f
|
[
"MIT"
] | 12
|
2021-10-20T16:05:02.000Z
|
2022-03-22T14:20:30.000Z
|
telegrambot/keyboards/__init__.py
|
BTMAN1489-1/telegrambot
|
fae2f6da98b50dccb9d800405509a16d4639953f
|
[
"MIT"
] | 17
|
2021-11-02T20:33:14.000Z
|
2022-03-17T11:45:24.000Z
|
telegrambot/keyboards/__init__.py
|
BTMAN1489-1/telegrambot
|
fae2f6da98b50dccb9d800405509a16d4639953f
|
[
"MIT"
] | 7
|
2021-09-25T20:19:17.000Z
|
2022-02-20T09:31:41.000Z
|
from . import default
from . import inline
| 14.333333
| 21
| 0.767442
| 6
| 43
| 5.5
| 0.666667
| 0.606061
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186047
| 43
| 2
| 22
| 21.5
| 0.942857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8acb87cd7cc3df4fd8b60996b3da1bc6690a0eb0
| 93
|
py
|
Python
|
c02/p061_run_script.py
|
HiAwesome/dive-into-python3-practice
|
e57504cb0683ebca9c80b20ff0cb3878bdcc3f87
|
[
"Apache-2.0"
] | null | null | null |
c02/p061_run_script.py
|
HiAwesome/dive-into-python3-practice
|
e57504cb0683ebca9c80b20ff0cb3878bdcc3f87
|
[
"Apache-2.0"
] | null | null | null |
c02/p061_run_script.py
|
HiAwesome/dive-into-python3-practice
|
e57504cb0683ebca9c80b20ff0cb3878bdcc3f87
|
[
"Apache-2.0"
] | null | null | null |
import c02.p044_humansize as humansize
print(humansize.__name__)
"""
c02.p044_humansize
"""
| 13.285714
| 38
| 0.784946
| 12
| 93
| 5.583333
| 0.583333
| 0.208955
| 0.477612
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119048
| 0.096774
| 93
| 6
| 39
| 15.5
| 0.678571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
8ad34c8aeda10cb7d4085e5ae2e115e55ca1d3e1
| 816
|
py
|
Python
|
scripts/study_case/ID_4/test/test_debug.py
|
kzbnb/numerical_bugs
|
bc22e72bcc06df6ce7889a25e0aeed027bde910b
|
[
"Apache-2.0"
] | 8
|
2021-06-30T06:55:14.000Z
|
2022-03-18T01:57:14.000Z
|
scripts/study_case/ID_4/test/test_debug.py
|
kzbnb/numerical_bugs
|
bc22e72bcc06df6ce7889a25e0aeed027bde910b
|
[
"Apache-2.0"
] | 1
|
2021-06-30T03:08:15.000Z
|
2021-06-30T03:08:15.000Z
|
scripts/study_case/ID_4/test/test_debug.py
|
kzbnb/numerical_bugs
|
bc22e72bcc06df6ce7889a25e0aeed027bde910b
|
[
"Apache-2.0"
] | 2
|
2021-11-17T11:19:48.000Z
|
2021-11-18T03:05:58.000Z
|
from scripts.study_case.ID_4.torch_geometric import is_debug_enabled, debug, set_debug
def test_debug():
assert is_debug_enabled() is False
set_debug(True)
assert is_debug_enabled() is True
set_debug(False)
assert is_debug_enabled() is False
assert is_debug_enabled() is False
with set_debug(True):
assert is_debug_enabled() is True
assert is_debug_enabled() is False
assert is_debug_enabled() is False
set_debug(True)
assert is_debug_enabled() is True
with set_debug(False):
assert is_debug_enabled() is False
assert is_debug_enabled() is True
set_debug(False)
assert is_debug_enabled() is False
assert is_debug_enabled() is False
with debug():
assert is_debug_enabled() is True
assert is_debug_enabled() is False
| 28.137931
| 86
| 0.720588
| 124
| 816
| 4.41129
| 0.153226
| 0.191956
| 0.383912
| 0.511883
| 0.85192
| 0.85192
| 0.833638
| 0.833638
| 0.833638
| 0.8117
| 0
| 0.00156
| 0.214461
| 816
| 28
| 87
| 29.142857
| 0.851794
| 0
| 0
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.608696
| 1
| 0.043478
| true
| 0
| 0.043478
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
76e95a9a23b847a2886ed4840261fcadc4bc1343
| 113
|
py
|
Python
|
petstagram/petstagram/common/views.py
|
PetkoAndreev/Python-Web-Petstagram
|
d363ca900e64b7bba228f7f7603ddcaf7186f61e
|
[
"MIT"
] | 3
|
2021-01-19T18:54:38.000Z
|
2022-01-05T17:28:41.000Z
|
Python-Web-Framework-Softuni/petstagram/common/views.py
|
borisboychev/SoftUni
|
22062312f08e29a1d85377a6d41ef74966d37e99
|
[
"MIT"
] | 7
|
2021-06-05T00:01:05.000Z
|
2022-03-12T00:52:05.000Z
|
Python-Web-Framework-Softuni/petstagram/common/views.py
|
borisboychev/SoftUni
|
22062312f08e29a1d85377a6d41ef74966d37e99
|
[
"MIT"
] | 1
|
2020-12-27T09:36:31.000Z
|
2020-12-27T09:36:31.000Z
|
from django.shortcuts import render
def landing_page(request):
return render(request, 'landing_page.html')
| 18.833333
| 47
| 0.778761
| 15
| 113
| 5.733333
| 0.733333
| 0.255814
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132743
| 113
| 5
| 48
| 22.6
| 0.877551
| 0
| 0
| 0
| 0
| 0
| 0.150442
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
76fe6765fe7991f37a7a3df57e3a0956a3b6cd3b
| 182
|
py
|
Python
|
py/pysparkling/ml/models.py
|
evergage/sparkling-water
|
dda405176cb3a84bb0af1864393201a6084e7eba
|
[
"Apache-2.0"
] | null | null | null |
py/pysparkling/ml/models.py
|
evergage/sparkling-water
|
dda405176cb3a84bb0af1864393201a6084e7eba
|
[
"Apache-2.0"
] | null | null | null |
py/pysparkling/ml/models.py
|
evergage/sparkling-water
|
dda405176cb3a84bb0af1864393201a6084e7eba
|
[
"Apache-2.0"
] | null | null | null |
import py_sparkling.ml.models
class H2OMOJOModel(py_sparkling.ml.models.H2OMOJOModel):
pass
class H2OMOJOPipelineModel(py_sparkling.ml.models.H2OMOJOPipelineModel):
pass
| 18.2
| 72
| 0.813187
| 21
| 182
| 6.904762
| 0.428571
| 0.227586
| 0.268966
| 0.393103
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024691
| 0.10989
| 182
| 9
| 73
| 20.222222
| 0.87037
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.4
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
0a0d978966dc601a47acb61c5be7cd80fbd1642a
| 3,832
|
py
|
Python
|
yearn/prices/constants.py
|
pmdaly/yearn-exporter
|
d1e7697f8bf12cdb1126ea86fa350a26aea23cf8
|
[
"MIT"
] | null | null | null |
yearn/prices/constants.py
|
pmdaly/yearn-exporter
|
d1e7697f8bf12cdb1126ea86fa350a26aea23cf8
|
[
"MIT"
] | null | null | null |
yearn/prices/constants.py
|
pmdaly/yearn-exporter
|
d1e7697f8bf12cdb1126ea86fa350a26aea23cf8
|
[
"MIT"
] | null | null | null |
from brownie import chain
from yearn.networks import Network
tokens_by_network = {
Network.Mainnet: {
'weth': '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2',
'usdc': '0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48',
'dai': '0x6B175474E89094C44Da98b954EedeAC495271d0F',
},
Network.Gnosis: {
'weth': '0x6A023CCd1ff6F2045C3309768eAd9E68F978f6e1',
'usdc': '0xDDAfbb505ad214D7b80b1f830fcCc89B60fb7A83',
'dai': '0xe91D153E0b41518A2Ce8Dd3D7944Fa863463a97d', # wxdai address
},
Network.Fantom: {
'weth': '0x21be370D5312f44cB42ce377BC9b8a0cEF1A4C83',
'usdc': '0x04068DA6C83AFCFA0e13ba15A6696662335D5B75',
'dai': '0x8D11eC38a3EB5E956B052f67Da8Bdc9bef8Abf3E',
},
Network.Arbitrum: {
'weth': '0x82aF49447D8a07e3bd95BD0d56f35241523fBab1',
'usdc': '0xFF970A61A04b1cA14834A43f5dE4533eBDDB5CC8',
'dai': '0xDA10009cBd5D07dd0CeCc66161FC93D7c9000da1',
},
}
stablecoins_by_network = {
Network.Mainnet: {
"0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48": "usdc",
"0x0000000000085d4780B73119b644AE5ecd22b376": "tusd",
"0x6B175474E89094C44Da98b954EedeAC495271d0F": "dai",
"0xdAC17F958D2ee523a2206206994597C13D831ec7": "usdt",
"0x4Fabb145d64652a948d72533023f6E7A623C7C53": "busd",
"0x57Ab1ec28D129707052df4dF418D58a2D46d5f51": "susd",
"0x1456688345527bE1f37E9e627DA0837D6f08C925": "usdp",
"0x674C6Ad92Fd080e4004b2312b45f796a192D27a0": "usdn",
"0x853d955aCEf822Db058eb8505911ED77F175b99e": "frax",
"0x5f98805A4E8be255a32880FDeC7F6728C6568bA0": "lusd",
"0xBC6DA0FE9aD5f3b0d58160288917AA56653660E9": "alusd",
"0x0000000000085d4780B73119b644AE5ecd22b376": "tusd",
"0x1c48f86ae57291F7686349F12601910BD8D470bb": "usdk",
"0x056Fd409E1d7A124BD7017459dFEa2F387b6d5Cd": "gusd",
"0x0E2EC54fC0B509F445631Bf4b91AB8168230C752": "linkusd",
"0x99D8a9C45b2ecA8864373A26D1459e3Dff1e17F3": "mim",
"0xa47c8bf37f92aBed4A126BDA807A7b7498661acD": "ust",
"0x196f4727526eA7FB1e17b2071B3d8eAA38486988": "rsv",
"0xdF574c24545E5FfEcb9a659c229253D4111d87e1": "husd",
"0x5BC25f649fc4e26069dDF4cF4010F9f706c23831": "dusd",
"0xe2f2a5C287993345a840Db3B0845fbC70f5935a5": "musd",
"0x739ca6D71365a08f584c8FC4e1029045Fa8ABC4B": "anydai",
"0xbbc4A8d076F4B1888fec42581B6fc58d242CF2D5": "anymim",
},
Network.Gnosis: {
"0xDDAfbb505ad214D7b80b1f830fcCc89B60fb7A83": "usdc",
"0x4ECaBa5870353805a9F068101A40E0f32ed605C6": "usdt",
"0xe91d153e0b41518a2ce8dd3d7944fa863463a97d": "wxdai"
},
Network.Fantom: {
"0x04068DA6C83AFCFA0e13ba15A6696662335D5B75": "usdc",
"0x8D11eC38a3EB5E956B052f67Da8Bdc9bef8Abf3E": "dai",
"0x1B27A9dE6a775F98aaA5B90B62a4e2A0B84DbDd9": "usdt",
"0xC931f61B1534EB21D8c11B24f3f5Ab2471d4aB50": "busd",
"0xe2D27f06F63d98b8e11b38b5b08A75D0c8dD62B9": "ust",
"0x82f0B8B456c1A451378467398982d4834b6829c1": "mim",
"0x049d68029688eAbF473097a2fC38ef61633A3C7A": "fusdt",
"0xdc301622e621166BD8E82f2cA0A26c13Ad0BE355": "frax",
"0x95bf7E307BC1ab0BA38ae10fc27084bC36FcD605": "anyusdc"
},
Network.Arbitrum: {
'0xFF970A61A04b1cA14834A43f5dE4533eBDDB5CC8': 'usdc',
'0xDA10009cBd5D07dd0CeCc66161FC93D7c9000da1': 'dai',
},
}
ib_snapshot_block_by_network = {
Network.Mainnet: 14051986,
Network.Fantom: 28680044,
Network.Gnosis: 1, # TODO revisit as IB is not deployed in gnosis
Network.Arbitrum: 1
}
weth = tokens_by_network[chain.id]['weth']
usdc = tokens_by_network[chain.id]['usdc']
dai = tokens_by_network[chain.id]['dai']
stablecoins = stablecoins_by_network[chain.id]
ib_snapshot_block = ib_snapshot_block_by_network[chain.id]
| 44.045977
| 76
| 0.72834
| 194
| 3,832
| 14.273196
| 0.489691
| 0.026002
| 0.02528
| 0.028891
| 0.04117
| 0
| 0
| 0
| 0
| 0
| 0
| 0.408748
| 0.170668
| 3,832
| 86
| 77
| 44.55814
| 0.462555
| 0.015136
| 0
| 0.121951
| 0
| 0
| 0.600902
| 0.545744
| 0
| 0
| 0.545744
| 0.011628
| 0
| 1
| 0
| false
| 0
| 0.02439
| 0
| 0.02439
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a1c400930ef0e6007bb279b74e3a9a6066cadbe
| 1,719
|
py
|
Python
|
care/facility/migrations/0194_auto_20201009_1936.py
|
gigincg/care
|
07be6a7982b5c46a854e3435a52662f32800c8ae
|
[
"MIT"
] | 189
|
2020-03-17T17:18:58.000Z
|
2022-02-22T09:49:45.000Z
|
care/facility/migrations/0194_auto_20201009_1936.py
|
gigincg/care
|
07be6a7982b5c46a854e3435a52662f32800c8ae
|
[
"MIT"
] | 598
|
2020-03-19T21:22:09.000Z
|
2022-03-30T05:08:37.000Z
|
care/facility/migrations/0194_auto_20201009_1936.py
|
gigincg/care
|
07be6a7982b5c46a854e3435a52662f32800c8ae
|
[
"MIT"
] | 159
|
2020-03-19T18:45:56.000Z
|
2022-03-17T13:23:12.000Z
|
# Generated by Django 2.2.11 on 2020-10-09 14:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('facility', '0193_auto_20201004_1458'),
]
operations = [
migrations.AddField(
model_name='historicalpatientregistration',
name='cluster_name',
field=models.CharField(blank=True, default=None, max_length=255, null=True, verbose_name='Name/ Cluster of Contact'),
),
migrations.AddField(
model_name='historicalpatientregistration',
name='date_declared_positive',
field=models.DateTimeField(blank=True, null=True, verbose_name='Date Patient is Declared Positive'),
),
migrations.AddField(
model_name='historicalpatientregistration',
name='is_declared_positive',
field=models.BooleanField(default=None, null=True, verbose_name='Is Patient Declared Positive'),
),
migrations.AddField(
model_name='patientregistration',
name='cluster_name',
field=models.CharField(blank=True, default=None, max_length=255, null=True, verbose_name='Name/ Cluster of Contact'),
),
migrations.AddField(
model_name='patientregistration',
name='date_declared_positive',
field=models.DateTimeField(blank=True, null=True, verbose_name='Date Patient is Declared Positive'),
),
migrations.AddField(
model_name='patientregistration',
name='is_declared_positive',
field=models.BooleanField(default=None, null=True, verbose_name='Is Patient Declared Positive'),
),
]
| 39.068182
| 129
| 0.643979
| 171
| 1,719
| 6.315789
| 0.298246
| 0.118519
| 0.127778
| 0.15
| 0.859259
| 0.859259
| 0.736111
| 0.736111
| 0.675926
| 0.675926
| 0
| 0.029526
| 0.251309
| 1,719
| 43
| 130
| 39.976744
| 0.809635
| 0.02676
| 0
| 0.810811
| 1
| 0
| 0.271095
| 0.09216
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.027027
| 0
| 0.108108
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a681fcc4b2a49d8eb9b9e61d0def718feb7bfe7
| 6,269
|
py
|
Python
|
loldib/getratings/models/NA/na_zyra/na_zyra_bot.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_zyra/na_zyra_bot.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_zyra/na_zyra_bot.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Zyra_Bot_Aatrox(Ratings):
pass
class NA_Zyra_Bot_Ahri(Ratings):
pass
class NA_Zyra_Bot_Akali(Ratings):
pass
class NA_Zyra_Bot_Alistar(Ratings):
pass
class NA_Zyra_Bot_Amumu(Ratings):
pass
class NA_Zyra_Bot_Anivia(Ratings):
pass
class NA_Zyra_Bot_Annie(Ratings):
pass
class NA_Zyra_Bot_Ashe(Ratings):
pass
class NA_Zyra_Bot_AurelionSol(Ratings):
pass
class NA_Zyra_Bot_Azir(Ratings):
pass
class NA_Zyra_Bot_Bard(Ratings):
pass
class NA_Zyra_Bot_Blitzcrank(Ratings):
pass
class NA_Zyra_Bot_Brand(Ratings):
pass
class NA_Zyra_Bot_Braum(Ratings):
pass
class NA_Zyra_Bot_Caitlyn(Ratings):
pass
class NA_Zyra_Bot_Camille(Ratings):
pass
class NA_Zyra_Bot_Cassiopeia(Ratings):
pass
class NA_Zyra_Bot_Chogath(Ratings):
pass
class NA_Zyra_Bot_Corki(Ratings):
pass
class NA_Zyra_Bot_Darius(Ratings):
pass
class NA_Zyra_Bot_Diana(Ratings):
pass
class NA_Zyra_Bot_Draven(Ratings):
pass
class NA_Zyra_Bot_DrMundo(Ratings):
pass
class NA_Zyra_Bot_Ekko(Ratings):
pass
class NA_Zyra_Bot_Elise(Ratings):
pass
class NA_Zyra_Bot_Evelynn(Ratings):
pass
class NA_Zyra_Bot_Ezreal(Ratings):
pass
class NA_Zyra_Bot_Fiddlesticks(Ratings):
pass
class NA_Zyra_Bot_Fiora(Ratings):
pass
class NA_Zyra_Bot_Fizz(Ratings):
pass
class NA_Zyra_Bot_Galio(Ratings):
pass
class NA_Zyra_Bot_Gangplank(Ratings):
pass
class NA_Zyra_Bot_Garen(Ratings):
pass
class NA_Zyra_Bot_Gnar(Ratings):
pass
class NA_Zyra_Bot_Gragas(Ratings):
pass
class NA_Zyra_Bot_Graves(Ratings):
pass
class NA_Zyra_Bot_Hecarim(Ratings):
pass
class NA_Zyra_Bot_Heimerdinger(Ratings):
pass
class NA_Zyra_Bot_Illaoi(Ratings):
pass
class NA_Zyra_Bot_Irelia(Ratings):
pass
class NA_Zyra_Bot_Ivern(Ratings):
pass
class NA_Zyra_Bot_Janna(Ratings):
pass
class NA_Zyra_Bot_JarvanIV(Ratings):
pass
class NA_Zyra_Bot_Jax(Ratings):
pass
class NA_Zyra_Bot_Jayce(Ratings):
pass
class NA_Zyra_Bot_Jhin(Ratings):
pass
class NA_Zyra_Bot_Jinx(Ratings):
pass
class NA_Zyra_Bot_Kalista(Ratings):
pass
class NA_Zyra_Bot_Karma(Ratings):
pass
class NA_Zyra_Bot_Karthus(Ratings):
pass
class NA_Zyra_Bot_Kassadin(Ratings):
pass
class NA_Zyra_Bot_Katarina(Ratings):
pass
class NA_Zyra_Bot_Kayle(Ratings):
pass
class NA_Zyra_Bot_Kayn(Ratings):
pass
class NA_Zyra_Bot_Kennen(Ratings):
pass
class NA_Zyra_Bot_Khazix(Ratings):
pass
class NA_Zyra_Bot_Kindred(Ratings):
pass
class NA_Zyra_Bot_Kled(Ratings):
pass
class NA_Zyra_Bot_KogMaw(Ratings):
pass
class NA_Zyra_Bot_Leblanc(Ratings):
pass
class NA_Zyra_Bot_LeeSin(Ratings):
pass
class NA_Zyra_Bot_Leona(Ratings):
pass
class NA_Zyra_Bot_Lissandra(Ratings):
pass
class NA_Zyra_Bot_Lucian(Ratings):
pass
class NA_Zyra_Bot_Lulu(Ratings):
pass
class NA_Zyra_Bot_Lux(Ratings):
pass
class NA_Zyra_Bot_Malphite(Ratings):
pass
class NA_Zyra_Bot_Malzahar(Ratings):
pass
class NA_Zyra_Bot_Maokai(Ratings):
pass
class NA_Zyra_Bot_MasterYi(Ratings):
pass
class NA_Zyra_Bot_MissFortune(Ratings):
pass
class NA_Zyra_Bot_MonkeyKing(Ratings):
pass
class NA_Zyra_Bot_Mordekaiser(Ratings):
pass
class NA_Zyra_Bot_Morgana(Ratings):
pass
class NA_Zyra_Bot_Nami(Ratings):
pass
class NA_Zyra_Bot_Nasus(Ratings):
pass
class NA_Zyra_Bot_Nautilus(Ratings):
pass
class NA_Zyra_Bot_Nidalee(Ratings):
pass
class NA_Zyra_Bot_Nocturne(Ratings):
pass
class NA_Zyra_Bot_Nunu(Ratings):
pass
class NA_Zyra_Bot_Olaf(Ratings):
pass
class NA_Zyra_Bot_Orianna(Ratings):
pass
class NA_Zyra_Bot_Ornn(Ratings):
pass
class NA_Zyra_Bot_Pantheon(Ratings):
pass
class NA_Zyra_Bot_Poppy(Ratings):
pass
class NA_Zyra_Bot_Quinn(Ratings):
pass
class NA_Zyra_Bot_Rakan(Ratings):
pass
class NA_Zyra_Bot_Rammus(Ratings):
pass
class NA_Zyra_Bot_RekSai(Ratings):
pass
class NA_Zyra_Bot_Renekton(Ratings):
pass
class NA_Zyra_Bot_Rengar(Ratings):
pass
class NA_Zyra_Bot_Riven(Ratings):
pass
class NA_Zyra_Bot_Rumble(Ratings):
pass
class NA_Zyra_Bot_Ryze(Ratings):
pass
class NA_Zyra_Bot_Sejuani(Ratings):
pass
class NA_Zyra_Bot_Shaco(Ratings):
pass
class NA_Zyra_Bot_Shen(Ratings):
pass
class NA_Zyra_Bot_Shyvana(Ratings):
pass
class NA_Zyra_Bot_Singed(Ratings):
pass
class NA_Zyra_Bot_Sion(Ratings):
pass
class NA_Zyra_Bot_Sivir(Ratings):
pass
class NA_Zyra_Bot_Skarner(Ratings):
pass
class NA_Zyra_Bot_Sona(Ratings):
pass
class NA_Zyra_Bot_Soraka(Ratings):
pass
class NA_Zyra_Bot_Swain(Ratings):
pass
class NA_Zyra_Bot_Syndra(Ratings):
pass
class NA_Zyra_Bot_TahmKench(Ratings):
pass
class NA_Zyra_Bot_Taliyah(Ratings):
pass
class NA_Zyra_Bot_Talon(Ratings):
pass
class NA_Zyra_Bot_Taric(Ratings):
pass
class NA_Zyra_Bot_Teemo(Ratings):
pass
class NA_Zyra_Bot_Thresh(Ratings):
pass
class NA_Zyra_Bot_Tristana(Ratings):
pass
class NA_Zyra_Bot_Trundle(Ratings):
pass
class NA_Zyra_Bot_Tryndamere(Ratings):
pass
class NA_Zyra_Bot_TwistedFate(Ratings):
pass
class NA_Zyra_Bot_Twitch(Ratings):
pass
class NA_Zyra_Bot_Udyr(Ratings):
pass
class NA_Zyra_Bot_Urgot(Ratings):
pass
class NA_Zyra_Bot_Varus(Ratings):
pass
class NA_Zyra_Bot_Vayne(Ratings):
pass
class NA_Zyra_Bot_Veigar(Ratings):
pass
class NA_Zyra_Bot_Velkoz(Ratings):
pass
class NA_Zyra_Bot_Vi(Ratings):
pass
class NA_Zyra_Bot_Viktor(Ratings):
pass
class NA_Zyra_Bot_Vladimir(Ratings):
pass
class NA_Zyra_Bot_Volibear(Ratings):
pass
class NA_Zyra_Bot_Warwick(Ratings):
pass
class NA_Zyra_Bot_Xayah(Ratings):
pass
class NA_Zyra_Bot_Xerath(Ratings):
pass
class NA_Zyra_Bot_XinZhao(Ratings):
pass
class NA_Zyra_Bot_Yasuo(Ratings):
pass
class NA_Zyra_Bot_Yorick(Ratings):
pass
class NA_Zyra_Bot_Zac(Ratings):
pass
class NA_Zyra_Bot_Zed(Ratings):
pass
class NA_Zyra_Bot_Ziggs(Ratings):
pass
class NA_Zyra_Bot_Zilean(Ratings):
pass
class NA_Zyra_Bot_Zyra(Ratings):
pass
| 15.033573
| 46
| 0.75642
| 972
| 6,269
| 4.452675
| 0.151235
| 0.223198
| 0.350739
| 0.446396
| 0.791359
| 0.791359
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177221
| 6,269
| 416
| 47
| 15.069712
| 0.839085
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
6a7327a68e6941de23300d0980fef3324289aa5e
| 84
|
py
|
Python
|
my_test.py
|
Athenian-ComputerScience-Fall2020/reading-files-Milan938
|
76e76a300a1d86f05c6e1aaf4bfb0d6652ca6bec
|
[
"Apache-2.0"
] | null | null | null |
my_test.py
|
Athenian-ComputerScience-Fall2020/reading-files-Milan938
|
76e76a300a1d86f05c6e1aaf4bfb0d6652ca6bec
|
[
"Apache-2.0"
] | null | null | null |
my_test.py
|
Athenian-ComputerScience-Fall2020/reading-files-Milan938
|
76e76a300a1d86f05c6e1aaf4bfb0d6652ca6bec
|
[
"Apache-2.0"
] | null | null | null |
from my_code import avg_temp
def test_avg_temp():
assert 82.58 == avg_temp()
| 12
| 30
| 0.702381
| 15
| 84
| 3.6
| 0.733333
| 0.388889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059701
| 0.202381
| 84
| 6
| 31
| 14
| 0.746269
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6a81777f0fdc43a3b6ce9bee04c152b42d8a1ce5
| 3,064
|
py
|
Python
|
app/tests/slack/test_handlers.py
|
isayakhov/duty-schedule-bot
|
2515fe8e1ba71b183c31363d99d1c8f1d552826a
|
[
"MIT"
] | 6
|
2021-01-30T11:31:46.000Z
|
2022-02-15T10:09:07.000Z
|
app/tests/slack/test_handlers.py
|
isayakhov/duty-schedule-bot
|
2515fe8e1ba71b183c31363d99d1c8f1d552826a
|
[
"MIT"
] | 2
|
2021-02-04T19:45:07.000Z
|
2021-02-05T12:23:56.000Z
|
app/tests/slack/test_handlers.py
|
isayakhov/duty-schedule-bot
|
2515fe8e1ba71b183c31363d99d1c8f1d552826a
|
[
"MIT"
] | 2
|
2022-02-06T11:22:30.000Z
|
2022-02-06T18:46:14.000Z
|
from app.common import constants, exceptions
from app.common import handlers as common_handlers
from app.slack.constants import PLATFORM
def test_start(slack_web_client):
response = slack_web_client.post("/start")
assert response.status_code == 200
assert response.json() == {"text": constants.HELP_START_TEXT, "link_names": 1}
def test_create_schedule(slack_web_client, chat_id, username):
common_handlers.add_person(PLATFORM, chat_id, username)
response = slack_web_client.post("/create", data={"channel_id": chat_id})
assert response.status_code == 200
assert "text" in response.json()
def test_create_schedule_no_schedule(slack_web_client, chat_id):
response = slack_web_client.post("/create", data={"channel_id": chat_id})
assert response.status_code == 200
assert response.json() == {"text": constants.HELP_NO_SCHEDULE_TEXT, "link_names": 1}
def test_show_schedule(slack_web_client, chat_id, username):
common_handlers.add_person(PLATFORM, chat_id, username)
common_handlers.set_schedule(PLATFORM, chat_id)
response = slack_web_client.post("/week", data={"channel_id": chat_id})
assert response.status_code == 200
assert "text" in response.json()
def test_show_schedule_not_found(slack_web_client, chat_id, username):
common_handlers.add_person(PLATFORM, chat_id, username)
response = slack_web_client.post("/week", data={"channel_id": chat_id})
assert response.status_code == 200
assert response.json() == {"text": exceptions.ScheduleNotFound.default_message, "link_names": 1}
def test_add_person(slack_web_client, chat_id, username):
response = slack_web_client.post("/add", data={"channel_id": chat_id, "text": username})
assert response.status_code == 200
assert "text" in response.json()
def test_add_person_incorrect_username(slack_web_client, chat_id):
response = slack_web_client.post("/add", data={"channel_id": chat_id, "text": "BAD_NAME"})
assert response.status_code == 200
assert response.json() == {"text": exceptions.IncorrectUsername.default_message, "link_names": 1}
def test_remove_person(slack_web_client, chat_id, username):
slack_web_client.post("/add", data={"channel_id": chat_id, "text": username})
response = slack_web_client.post("/remove", data={"channel_id": chat_id, "text": username})
assert response.status_code == 200
assert "text" in response.json()
def test_remove_person_user_does_not_exist(slack_web_client, chat_id, username):
response = slack_web_client.post("/remove", data={"channel_id": chat_id, "text": username})
assert response.status_code == 200
assert response.json() == {"text": exceptions.UserDoesNotExist.default_message, "link_names": 1}
def test_remove_person_incorrect_username(slack_web_client, chat_id):
response = slack_web_client.post("/remove", data={"channel_id": chat_id, "text": "BAD_NAME"})
assert response.status_code == 200
assert response.json() == {"text": exceptions.IncorrectUsername.default_message, "link_names": 1}
| 37.365854
| 101
| 0.744778
| 422
| 3,064
| 5.07109
| 0.130332
| 0.064486
| 0.137383
| 0.092523
| 0.863084
| 0.844393
| 0.821028
| 0.78785
| 0.78785
| 0.761682
| 0
| 0.013488
| 0.128916
| 3,064
| 81
| 102
| 37.82716
| 0.78831
| 0
| 0
| 0.520833
| 0
| 0
| 0.09889
| 0
| 0
| 0
| 0
| 0
| 0.416667
| 1
| 0.208333
| false
| 0
| 0.0625
| 0
| 0.270833
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6a87347d73b024c3dcc8052e3b6bfdfc7a72c9ee
| 108
|
py
|
Python
|
deepSI/system_data/__init__.py
|
GerbenBeintema/deepSI
|
580711210398064bb7f01e41d08b7a248a88b35b
|
[
"BSD-3-Clause"
] | 12
|
2021-03-23T20:30:29.000Z
|
2022-03-01T12:22:41.000Z
|
deepSI/system_data/__init__.py
|
seaveredge/deepSI
|
429dfcc3dba5cebd4ece01ad734c7f543f17f206
|
[
"BSD-3-Clause"
] | 2
|
2022-01-12T14:05:13.000Z
|
2022-03-01T10:18:34.000Z
|
deepSI/system_data/__init__.py
|
seaveredge/deepSI
|
429dfcc3dba5cebd4ece01ad734c7f543f17f206
|
[
"BSD-3-Clause"
] | 7
|
2021-05-26T15:26:31.000Z
|
2022-02-03T00:43:31.000Z
|
from deepSI.system_data.system_data import System_data, System_data_list, System_data_norm, load_system_data
| 108
| 108
| 0.898148
| 18
| 108
| 4.888889
| 0.444444
| 0.681818
| 0.363636
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 108
| 1
| 108
| 108
| 0.862745
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
6a9d01e4649848ef3001d0cc6a516e2461fdc07d
| 57,154
|
py
|
Python
|
samples/openapi3/client/petstore/python/petstore_api/api/pet_api.py
|
therockstorm/openapi-generator
|
01d0b5d4780ebe2d6025e2b443ec136c6ce16c45
|
[
"Apache-2.0"
] | 4
|
2021-02-20T21:39:04.000Z
|
2021-08-24T13:54:15.000Z
|
samples/openapi3/client/petstore/python/petstore_api/api/pet_api.py
|
therockstorm/openapi-generator
|
01d0b5d4780ebe2d6025e2b443ec136c6ce16c45
|
[
"Apache-2.0"
] | 29
|
2021-04-07T07:38:57.000Z
|
2022-03-30T12:10:22.000Z
|
samples/openapi3/client/petstore/python/petstore_api/api/pet_api.py
|
therockstorm/openapi-generator
|
01d0b5d4780ebe2d6025e2b443ec136c6ce16c45
|
[
"Apache-2.0"
] | 4
|
2020-12-07T02:43:58.000Z
|
2020-12-07T10:23:39.000Z
|
# coding: utf-8
"""
OpenAPI Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from petstore_api.api_client import ApiClient
from petstore_api.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class PetApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_pet(self, pet, **kwargs): # noqa: E501
"""Add a new pet to the store # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_pet(pet, async_req=True)
>>> result = thread.get()
:param pet: Pet object that needs to be added to the store (required)
:type pet: Pet
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.add_pet_with_http_info(pet, **kwargs) # noqa: E501
def add_pet_with_http_info(self, pet, **kwargs): # noqa: E501
"""Add a new pet to the store # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_pet_with_http_info(pet, async_req=True)
>>> result = thread.get()
:param pet: Pet object that needs to be added to the store (required)
:type pet: Pet
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_hosts = [
'http://petstore.swagger.io/v2',
'http://path-server-test.petstore.local/v2'
]
local_var_host = local_var_hosts[0]
if kwargs.get('_host_index'):
_host_index = int(kwargs.get('_host_index'))
if _host_index < 0 or _host_index >= len(local_var_hosts):
raise ApiValueError(
"Invalid host index. Must be 0 <= index < %s"
% len(local_var_host)
)
local_var_host = local_var_hosts[_host_index]
local_var_params = locals()
all_params = [
'pet'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params and key != "_host_index":
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method add_pet" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'pet' is set
if self.api_client.client_side_validation and ('pet' not in local_var_params or # noqa: E501
local_var_params['pet'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pet` when calling `add_pet`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'pet' in local_var_params:
body_params = local_var_params['pet']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['petstore_auth'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/pet', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
_host=local_var_host,
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def delete_pet(self, pet_id, **kwargs): # noqa: E501
"""Deletes a pet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_pet(pet_id, async_req=True)
>>> result = thread.get()
:param pet_id: Pet id to delete (required)
:type pet_id: int
:param api_key:
:type api_key: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.delete_pet_with_http_info(pet_id, **kwargs) # noqa: E501
def delete_pet_with_http_info(self, pet_id, **kwargs): # noqa: E501
"""Deletes a pet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_pet_with_http_info(pet_id, async_req=True)
>>> result = thread.get()
:param pet_id: Pet id to delete (required)
:type pet_id: int
:param api_key:
:type api_key: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'pet_id',
'api_key'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_pet" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'pet_id' is set
if self.api_client.client_side_validation and ('pet_id' not in local_var_params or # noqa: E501
local_var_params['pet_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pet_id` when calling `delete_pet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'pet_id' in local_var_params:
path_params['petId'] = local_var_params['pet_id'] # noqa: E501
query_params = []
header_params = {}
if 'api_key' in local_var_params:
header_params['api_key'] = local_var_params['api_key'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['petstore_auth'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/pet/{petId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_pets_by_status(self, status, **kwargs): # noqa: E501
"""Finds Pets by status # noqa: E501
Multiple status values can be provided with comma separated strings # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_pets_by_status(status, async_req=True)
>>> result = thread.get()
:param status: Status values that need to be considered for filter (required)
:type status: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: list[Pet]
"""
kwargs['_return_http_data_only'] = True
return self.find_pets_by_status_with_http_info(status, **kwargs) # noqa: E501
def find_pets_by_status_with_http_info(self, status, **kwargs): # noqa: E501
"""Finds Pets by status # noqa: E501
Multiple status values can be provided with comma separated strings # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_pets_by_status_with_http_info(status, async_req=True)
>>> result = thread.get()
:param status: Status values that need to be considered for filter (required)
:type status: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(list[Pet], status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'status'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_pets_by_status" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'status' is set
if self.api_client.client_side_validation and ('status' not in local_var_params or # noqa: E501
local_var_params['status'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `status` when calling `find_pets_by_status`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'status' in local_var_params and local_var_params['status'] is not None: # noqa: E501
query_params.append(('status', local_var_params['status'])) # noqa: E501
collection_formats['status'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/xml', 'application/json']) # noqa: E501
# Authentication setting
auth_settings = ['petstore_auth'] # noqa: E501
response_types_map = {
200: "list[Pet]",
400: None,
}
return self.api_client.call_api(
'/pet/findByStatus', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_pets_by_tags(self, tags, **kwargs): # noqa: E501
"""Finds Pets by tags # noqa: E501
Multiple tags can be provided with comma separated strings. Use tag1, tag2, tag3 for testing. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_pets_by_tags(tags, async_req=True)
>>> result = thread.get()
:param tags: Tags to filter by (required)
:type tags: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: list[Pet]
"""
kwargs['_return_http_data_only'] = True
return self.find_pets_by_tags_with_http_info(tags, **kwargs) # noqa: E501
def find_pets_by_tags_with_http_info(self, tags, **kwargs): # noqa: E501
"""Finds Pets by tags # noqa: E501
Multiple tags can be provided with comma separated strings. Use tag1, tag2, tag3 for testing. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_pets_by_tags_with_http_info(tags, async_req=True)
>>> result = thread.get()
:param tags: Tags to filter by (required)
:type tags: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(list[Pet], status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'tags'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_pets_by_tags" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'tags' is set
if self.api_client.client_side_validation and ('tags' not in local_var_params or # noqa: E501
local_var_params['tags'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `tags` when calling `find_pets_by_tags`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'tags' in local_var_params and local_var_params['tags'] is not None: # noqa: E501
query_params.append(('tags', local_var_params['tags'])) # noqa: E501
collection_formats['tags'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/xml', 'application/json']) # noqa: E501
# Authentication setting
auth_settings = ['petstore_auth'] # noqa: E501
response_types_map = {
200: "list[Pet]",
400: None,
}
return self.api_client.call_api(
'/pet/findByTags', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def get_pet_by_id(self, pet_id, **kwargs): # noqa: E501
"""Find pet by ID # noqa: E501
Returns a single pet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pet_by_id(pet_id, async_req=True)
>>> result = thread.get()
:param pet_id: ID of pet to return (required)
:type pet_id: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: Pet
"""
kwargs['_return_http_data_only'] = True
return self.get_pet_by_id_with_http_info(pet_id, **kwargs) # noqa: E501
def get_pet_by_id_with_http_info(self, pet_id, **kwargs): # noqa: E501
"""Find pet by ID # noqa: E501
Returns a single pet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pet_by_id_with_http_info(pet_id, async_req=True)
>>> result = thread.get()
:param pet_id: ID of pet to return (required)
:type pet_id: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(Pet, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'pet_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_pet_by_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'pet_id' is set
if self.api_client.client_side_validation and ('pet_id' not in local_var_params or # noqa: E501
local_var_params['pet_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pet_id` when calling `get_pet_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'pet_id' in local_var_params:
path_params['petId'] = local_var_params['pet_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/xml', 'application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
response_types_map = {
200: "Pet",
400: None,
404: None,
}
return self.api_client.call_api(
'/pet/{petId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def update_pet(self, pet, **kwargs): # noqa: E501
"""Update an existing pet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_pet(pet, async_req=True)
>>> result = thread.get()
:param pet: Pet object that needs to be added to the store (required)
:type pet: Pet
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.update_pet_with_http_info(pet, **kwargs) # noqa: E501
def update_pet_with_http_info(self, pet, **kwargs): # noqa: E501
"""Update an existing pet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_pet_with_http_info(pet, async_req=True)
>>> result = thread.get()
:param pet: Pet object that needs to be added to the store (required)
:type pet: Pet
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_hosts = [
'http://petstore.swagger.io/v2',
'http://path-server-test.petstore.local/v2'
]
local_var_host = local_var_hosts[0]
if kwargs.get('_host_index'):
_host_index = int(kwargs.get('_host_index'))
if _host_index < 0 or _host_index >= len(local_var_hosts):
raise ApiValueError(
"Invalid host index. Must be 0 <= index < %s"
% len(local_var_host)
)
local_var_host = local_var_hosts[_host_index]
local_var_params = locals()
all_params = [
'pet'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params and key != "_host_index":
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_pet" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'pet' is set
if self.api_client.client_side_validation and ('pet' not in local_var_params or # noqa: E501
local_var_params['pet'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pet` when calling `update_pet`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'pet' in local_var_params:
body_params = local_var_params['pet']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['petstore_auth'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/pet', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
_host=local_var_host,
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def update_pet_with_form(self, pet_id, **kwargs): # noqa: E501
"""Updates a pet in the store with form data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_pet_with_form(pet_id, async_req=True)
>>> result = thread.get()
:param pet_id: ID of pet that needs to be updated (required)
:type pet_id: int
:param name: Updated name of the pet
:type name: str
:param status: Updated status of the pet
:type status: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.update_pet_with_form_with_http_info(pet_id, **kwargs) # noqa: E501
def update_pet_with_form_with_http_info(self, pet_id, **kwargs): # noqa: E501
"""Updates a pet in the store with form data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_pet_with_form_with_http_info(pet_id, async_req=True)
>>> result = thread.get()
:param pet_id: ID of pet that needs to be updated (required)
:type pet_id: int
:param name: Updated name of the pet
:type name: str
:param status: Updated status of the pet
:type status: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'pet_id',
'name',
'status'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_pet_with_form" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'pet_id' is set
if self.api_client.client_side_validation and ('pet_id' not in local_var_params or # noqa: E501
local_var_params['pet_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pet_id` when calling `update_pet_with_form`") # noqa: E501
collection_formats = {}
path_params = {}
if 'pet_id' in local_var_params:
path_params['petId'] = local_var_params['pet_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'name' in local_var_params:
form_params.append(('name', local_var_params['name'])) # noqa: E501
if 'status' in local_var_params:
form_params.append(('status', local_var_params['status'])) # noqa: E501
body_params = None
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['petstore_auth'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/pet/{petId}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def upload_file(self, pet_id, **kwargs): # noqa: E501
"""uploads an image # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_file(pet_id, async_req=True)
>>> result = thread.get()
:param pet_id: ID of pet to update (required)
:type pet_id: int
:param additional_metadata: Additional data to pass to server
:type additional_metadata: str
:param file: file to upload
:type file: file
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: ApiResponse
"""
kwargs['_return_http_data_only'] = True
return self.upload_file_with_http_info(pet_id, **kwargs) # noqa: E501
def upload_file_with_http_info(self, pet_id, **kwargs): # noqa: E501
"""uploads an image # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_file_with_http_info(pet_id, async_req=True)
>>> result = thread.get()
:param pet_id: ID of pet to update (required)
:type pet_id: int
:param additional_metadata: Additional data to pass to server
:type additional_metadata: str
:param file: file to upload
:type file: file
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(ApiResponse, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'pet_id',
'additional_metadata',
'file'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method upload_file" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'pet_id' is set
if self.api_client.client_side_validation and ('pet_id' not in local_var_params or # noqa: E501
local_var_params['pet_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pet_id` when calling `upload_file`") # noqa: E501
collection_formats = {}
path_params = {}
if 'pet_id' in local_var_params:
path_params['petId'] = local_var_params['pet_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'additional_metadata' in local_var_params:
form_params.append(('additionalMetadata', local_var_params['additional_metadata'])) # noqa: E501
if 'file' in local_var_params:
local_var_files['file'] = local_var_params['file'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['petstore_auth'] # noqa: E501
response_types_map = {
200: "ApiResponse",
}
return self.api_client.call_api(
'/pet/{petId}/uploadImage', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def upload_file_with_required_file(self, pet_id, required_file, **kwargs): # noqa: E501
"""uploads an image (required) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_file_with_required_file(pet_id, required_file, async_req=True)
>>> result = thread.get()
:param pet_id: ID of pet to update (required)
:type pet_id: int
:param required_file: file to upload (required)
:type required_file: file
:param additional_metadata: Additional data to pass to server
:type additional_metadata: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: ApiResponse
"""
kwargs['_return_http_data_only'] = True
return self.upload_file_with_required_file_with_http_info(pet_id, required_file, **kwargs) # noqa: E501
def upload_file_with_required_file_with_http_info(self, pet_id, required_file, **kwargs): # noqa: E501
"""uploads an image (required) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_file_with_required_file_with_http_info(pet_id, required_file, async_req=True)
>>> result = thread.get()
:param pet_id: ID of pet to update (required)
:type pet_id: int
:param required_file: file to upload (required)
:type required_file: file
:param additional_metadata: Additional data to pass to server
:type additional_metadata: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(ApiResponse, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'pet_id',
'required_file',
'additional_metadata'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method upload_file_with_required_file" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'pet_id' is set
if self.api_client.client_side_validation and ('pet_id' not in local_var_params or # noqa: E501
local_var_params['pet_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pet_id` when calling `upload_file_with_required_file`") # noqa: E501
# verify the required parameter 'required_file' is set
if self.api_client.client_side_validation and ('required_file' not in local_var_params or # noqa: E501
local_var_params['required_file'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `required_file` when calling `upload_file_with_required_file`") # noqa: E501
collection_formats = {}
path_params = {}
if 'pet_id' in local_var_params:
path_params['petId'] = local_var_params['pet_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'additional_metadata' in local_var_params:
form_params.append(('additionalMetadata', local_var_params['additional_metadata'])) # noqa: E501
if 'required_file' in local_var_params:
local_var_files['requiredFile'] = local_var_params['required_file'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['petstore_auth'] # noqa: E501
response_types_map = {
200: "ApiResponse",
}
return self.api_client.call_api(
'/fake/{petId}/uploadImageWithRequiredFile', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
| 43.167674
| 174
| 0.584229
| 6,483
| 57,154
| 4.885393
| 0.039179
| 0.043193
| 0.059674
| 0.03069
| 0.965048
| 0.957091
| 0.955008
| 0.947872
| 0.935969
| 0.917877
| 0
| 0.012588
| 0.34393
| 57,154
| 1,323
| 175
| 43.200302
| 0.832067
| 0.465112
| 0
| 0.738255
| 0
| 0
| 0.171759
| 0.031308
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031879
| false
| 0
| 0.008389
| 0
| 0.072148
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a85d1a820d17049bd1cbce10baced90485ab958
| 3,423
|
py
|
Python
|
Implementations/software/python/genkat_aead.py
|
Tetsu-Iwata/romulus
|
52be582102ee332fc51bc337166793312e68551e
|
[
"MIT"
] | 10
|
2019-09-16T08:47:13.000Z
|
2022-01-27T16:24:17.000Z
|
Implementations/software/python/genkat_aead.py
|
Tetsu-Iwata/romulus
|
52be582102ee332fc51bc337166793312e68551e
|
[
"MIT"
] | 1
|
2020-06-26T14:14:38.000Z
|
2021-05-26T14:22:58.000Z
|
Implementations/software/python/genkat_aead.py
|
Tetsu-Iwata/romulus
|
52be582102ee332fc51bc337166793312e68551e
|
[
"MIT"
] | 2
|
2021-04-07T09:04:22.000Z
|
2021-06-02T02:53:11.000Z
|
# for generation of Romulus-N test vectors
#from ROMULUS_N import *
#filename = "Python_AEAD_KAT_ROMULUS-N.txt"
# for generation of Romulus-M test vectors
#from ROMULUS_M import *
#filename = "Python_AEAD_KAT_ROMULUS-M.txt"
# for generation of Romulus-T test vectors
from ROMULUS_T import *
filename = "Python_AEAD_KAT_ROMULUS-T.txt"
MAX_MESSAGE_LENGTH = 32
MAX_ASSOCIATED_DATA_LENGTH = 32
CRYPTO_KEYBYTES = 16
CRYPTO_NPUBBYTES = 16
CRYPTO_ABYTES = 16
fic = open(filename, "w")
count = 0
for mlen in range(MAX_MESSAGE_LENGTH+1):
for adlen in range(MAX_ASSOCIATED_DATA_LENGTH+1):
count += 1
key = [i%256 for i in range(CRYPTO_KEYBYTES)]
nonce = [i%256 for i in range(CRYPTO_NPUBBYTES)]
msg = [i%256 for i in range(mlen)]
ad = [i%256 for i in range(adlen)]
print("\n\n***************************************** \n Count = %i" %(count))
fic.write("Count = {}".format(count) + "\n")
fic.write("Key = " + "".join("{:02X}".format(_) for _ in key) + "\n")
fic.write("Nonce = " + "".join("{:02X}".format(_) for _ in nonce) + "\n")
fic.write("PT = " + "".join("{:02X}".format(_) for _ in msg) + "\n")
fic.write("AD = " + "".join("{:02X}".format(_) for _ in ad) + "\n")
print("\n-------- ENCRYPT --------\n")
ct = crypto_aead_encrypt(msg, ad, nonce, key)
fic.write("CT = " + "".join("{:02X}".format(_) for _ in ct) + "\n")
fic.write("" + "\n")
print("\n-------- DECRYPT --------\n")
ret, msg2 = crypto_aead_decrypt(ct, ad, nonce, key)
print("ret = %i" %(ret))
print("msg =")
print(msg)
print("ct =")
print(ct)
print("msg2 =")
print(msg2)
if ret:
fic.write("Error: crypto_aead_decrypt returned non-zero (ret={}).".format(ret) + "\n")
exit(1)
if msg != msg2:
fic.write("Error: crypto_aead_decrypt did not recover the plaintext" + "\n")
exit(1)
mlen = 243
adlen = 147
count += 1
key = [i%256 for i in range(CRYPTO_KEYBYTES)]
nonce = [i%256 for i in range(CRYPTO_NPUBBYTES)]
msg = [i%256 for i in range(mlen)]
ad = [i%256 for i in range(adlen)]
print("\n\n***************************************** \n Count = %i" %(count))
fic.write("Count = {}".format(count) + "\n")
fic.write("Key = " + "".join("{:02X}".format(_) for _ in key) + "\n")
fic.write("Nonce = " + "".join("{:02X}".format(_) for _ in nonce) + "\n")
fic.write("PT = " + "".join("{:02X}".format(_) for _ in msg) + "\n")
fic.write("AD = " + "".join("{:02X}".format(_) for _ in ad) + "\n")
print("\n-------- ENCRYPT --------\n")
ct = crypto_aead_encrypt(msg, ad, nonce, key)
fic.write("CT = " + "".join("{:02X}".format(_) for _ in ct) + "\n")
fic.write("" + "\n")
print("\n-------- DECRYPT --------\n")
ret, msg2 = crypto_aead_decrypt(ct, ad, nonce, key)
print("ret = %i" %(ret))
print("msg =")
print(msg)
print("ct =")
print(ct)
print("msg2 =")
print(msg2)
if ret:
fic.write("Error: crypto_aead_decrypt returned non-zero (ret={}).".format(ret) + "\n")
exit(1)
if msg != msg2:
fic.write("Error: crypto_aead_decrypt did not recover the plaintext" + "\n")
exit(1)
fic.close()
#t = input("press key")
| 31.694444
| 99
| 0.523517
| 459
| 3,423
| 3.762527
| 0.152505
| 0.083382
| 0.052113
| 0.092646
| 0.815287
| 0.786335
| 0.727273
| 0.727273
| 0.727273
| 0.727273
| 0
| 0.030232
| 0.255916
| 3,423
| 107
| 100
| 31.990654
| 0.647821
| 0.080047
| 0
| 0.810811
| 0
| 0
| 0.232113
| 0.039235
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.013514
| 0
| 0.013514
| 0.27027
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a955021f1cc81dea4879a602d27d81b84e85840
| 18,090
|
py
|
Python
|
theano/typed_list/tests/test_basic.py
|
shaibagon/Theano
|
b4244cfaa1c99007015bb01e859699eec3518053
|
[
"BSD-3-Clause"
] | 1
|
2018-01-31T12:29:10.000Z
|
2018-01-31T12:29:10.000Z
|
theano/typed_list/tests/test_basic.py
|
AtousaTorabi/Theano_old
|
ba2d2f74406243112e813df31429721c791a889a
|
[
"BSD-3-Clause"
] | null | null | null |
theano/typed_list/tests/test_basic.py
|
AtousaTorabi/Theano_old
|
ba2d2f74406243112e813df31429721c791a889a
|
[
"BSD-3-Clause"
] | null | null | null |
import unittest
import numpy
import theano
import theano.typed_list
from theano import tensor as T
from theano.tensor.type_other import SliceType
from theano.typed_list.type import TypedListType
from theano.typed_list.basic import (GetItem, Insert,
Append, Extend, Remove, Reverse,
Index, Count, Length)
from theano import sparse
from theano.tests import unittest_tools as utt
import scipy.sparse as sp
#took from tensors/tests/test_basic.py
def rand_ranged_matrix(minimum, maximum, shape):
return numpy.asarray(numpy.random.rand(*shape) * (maximum - minimum)
+ minimum, dtype=theano.config.floatX)
#took from sparse/tests/test_basic.py
def random_lil(shape, dtype, nnz):
rval = sp.lil_matrix(shape, dtype=dtype)
huge = 2 ** 30
for k in range(nnz):
# set non-zeros in random locations (row x, col y)
idx = numpy.random.random_integers(huge, size=2) % shape
value = numpy.random.rand()
#if dtype *int*, value will always be zeros!
if "int" in dtype:
value = int(value * 100)
# The call to tuple is needed as scipy 0.13.1 do not support
# ndarray with lenght 2 as idx tuple.
rval.__setitem__(
tuple(idx),
value)
return rval
class test_get_item(unittest.TestCase):
def setUp(self):
utt.seed_rng()
def test_sanity_check_slice(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
mySymbolicSlice = SliceType()()
z = GetItem()(mySymbolicMatricesList, mySymbolicSlice)
self.assertFalse(isinstance(z, T.TensorVariable))
f = theano.function([mySymbolicMatricesList, mySymbolicSlice],
z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(numpy.array_equal(f([x], slice(0, 1, 1)), [x]))
def test_sanity_check_single(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
mySymbolicScalar = T.scalar(dtype='int64')
z = GetItem()(mySymbolicMatricesList, mySymbolicScalar)
f = theano.function([mySymbolicMatricesList, mySymbolicScalar],
z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(numpy.array_equal(f([x], numpy.asarray(0,
dtype='int64')), x))
def test_interface(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
mySymbolicScalar = T.scalar(dtype='int64')
z = mySymbolicMatricesList[mySymbolicScalar]
f = theano.function([mySymbolicMatricesList, mySymbolicScalar],
z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(numpy.array_equal(f([x], numpy.asarray(0,
dtype='int64')), x))
z = mySymbolicMatricesList[0]
f = theano.function([mySymbolicMatricesList],
z)
self.assertTrue(numpy.array_equal(f([x]), x))
def test_wrong_input(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
mySymbolicMatrix = T.matrix()
self.assertRaises(TypeError, GetItem(), mySymbolicMatricesList,
mySymbolicMatrix)
def test_constant_input(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
z = GetItem()(mySymbolicMatricesList, 0)
f = theano.function([mySymbolicMatricesList],
z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(numpy.array_equal(f([x]), x))
z = GetItem()(mySymbolicMatricesList, slice(0, 1, 1))
f = theano.function([mySymbolicMatricesList],
z)
self.assertTrue(numpy.array_equal(f([x]), [x]))
class test_append(unittest.TestCase):
def test_inplace(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
myMatrix = T.matrix()
z = Append(True)(mySymbolicMatricesList, myMatrix)
f = theano.function([mySymbolicMatricesList, myMatrix], z,
accept_inplace=True)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(numpy.array_equal(f([x], y), [x, y]))
def test_sanity_check(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
myMatrix = T.matrix()
z = Append()(mySymbolicMatricesList, myMatrix)
f = theano.function([mySymbolicMatricesList, myMatrix], z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(numpy.array_equal(f([x], y), [x, y]))
def test_interfaces(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
myMatrix = T.matrix()
z = mySymbolicMatricesList.append(myMatrix)
f = theano.function([mySymbolicMatricesList, myMatrix], z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(numpy.array_equal(f([x], y), [x, y]))
class test_extend(unittest.TestCase):
def test_inplace(self):
mySymbolicMatricesList1 = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
mySymbolicMatricesList2 = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
z = Extend(True)(mySymbolicMatricesList1, mySymbolicMatricesList2)
f = theano.function([mySymbolicMatricesList1, mySymbolicMatricesList2],
z, accept_inplace=True)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(numpy.array_equal(f([x], [y]), [x, y]))
def test_sanity_check(self):
mySymbolicMatricesList1 = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
mySymbolicMatricesList2 = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
z = Extend()(mySymbolicMatricesList1, mySymbolicMatricesList2)
f = theano.function([mySymbolicMatricesList1, mySymbolicMatricesList2],
z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(numpy.array_equal(f([x], [y]), [x, y]))
def test_interface(self):
mySymbolicMatricesList1 = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
mySymbolicMatricesList2 = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
z = mySymbolicMatricesList1.extend(mySymbolicMatricesList2)
f = theano.function([mySymbolicMatricesList1, mySymbolicMatricesList2],
z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(numpy.array_equal(f([x], [y]), [x, y]))
class test_insert(unittest.TestCase):
def test_inplace(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
myMatrix = T.matrix()
myScalar = T.scalar(dtype='int64')
z = Insert(True)(mySymbolicMatricesList, myScalar, myMatrix)
f = theano.function([mySymbolicMatricesList, myScalar, myMatrix], z,
accept_inplace=True)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(numpy.array_equal(f([x], numpy.asarray(1,
dtype='int64'), y), [x, y]))
def test_sanity_check(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
myMatrix = T.matrix()
myScalar = T.scalar(dtype='int64')
z = Insert()(mySymbolicMatricesList, myScalar, myMatrix)
f = theano.function([mySymbolicMatricesList, myScalar, myMatrix], z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(numpy.array_equal(f([x], numpy.asarray(1,
dtype='int64'), y), [x, y]))
def test_interface(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
myMatrix = T.matrix()
myScalar = T.scalar(dtype='int64')
z = mySymbolicMatricesList.insert(myScalar, myMatrix)
f = theano.function([mySymbolicMatricesList, myScalar, myMatrix], z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(numpy.array_equal(f([x], numpy.asarray(1,
dtype='int64'), y), [x, y]))
class test_remove(unittest.TestCase):
def test_inplace(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
myMatrix = T.matrix()
z = Remove(True)(mySymbolicMatricesList, myMatrix)
f = theano.function([mySymbolicMatricesList, myMatrix], z,
accept_inplace=True)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(numpy.array_equal(f([x, y], y), [x]))
def test_sanity_check(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
myMatrix = T.matrix()
z = Remove()(mySymbolicMatricesList, myMatrix)
f = theano.function([mySymbolicMatricesList, myMatrix], z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(numpy.array_equal(f([x, y], y), [x]))
def test_interface(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
myMatrix = T.matrix()
z = mySymbolicMatricesList.remove(myMatrix)
f = theano.function([mySymbolicMatricesList, myMatrix], z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(numpy.array_equal(f([x, y], y), [x]))
class test_reverse(unittest.TestCase):
def test_inplace(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
z = Reverse(True)(mySymbolicMatricesList)
f = theano.function([mySymbolicMatricesList], z,
accept_inplace=True)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(numpy.array_equal(f([x, y]), [y, x]))
def test_sanity_check(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
z = Reverse()(mySymbolicMatricesList)
f = theano.function([mySymbolicMatricesList], z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(numpy.array_equal(f([x, y]), [y, x]))
def test_interface(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
z = mySymbolicMatricesList.reverse()
f = theano.function([mySymbolicMatricesList], z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(numpy.array_equal(f([x, y]), [y, x]))
class test_index(unittest.TestCase):
def test_sanity_check(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
myMatrix = T.matrix()
z = Index()(mySymbolicMatricesList, myMatrix)
f = theano.function([mySymbolicMatricesList, myMatrix], z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(f([x, y], y) == 1)
def test_interface(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
myMatrix = T.matrix()
z = mySymbolicMatricesList.ind(myMatrix)
f = theano.function([mySymbolicMatricesList, myMatrix], z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(f([x, y], y) == 1)
def test_non_tensor_type(self):
mySymbolicNestedMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)), 1)()
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
z = Index()(mySymbolicNestedMatricesList, mySymbolicMatricesList)
f = theano.function([mySymbolicNestedMatricesList,
mySymbolicMatricesList], z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(f([[x, y], [x, y, y]], [x, y]) == 0)
def test_sparse(self):
mySymbolicSparseList = TypedListType(sparse.SparseType('csr',
theano.config.floatX))()
mySymbolicSparse = sparse.csr_matrix()
z = Index()(mySymbolicSparseList, mySymbolicSparse)
f = theano.function([mySymbolicSparseList, mySymbolicSparse], z)
x = sp.csr_matrix(random_lil((10, 40), theano.config.floatX, 3))
y = sp.csr_matrix(random_lil((10, 40), theano.config.floatX, 3))
self.assertTrue(f([x, y], y) == 1)
class test_count(unittest.TestCase):
def test_sanity_check(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
myMatrix = T.matrix()
z = Count()(mySymbolicMatricesList, myMatrix)
f = theano.function([mySymbolicMatricesList, myMatrix], z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(f([y, y, x, y], y) == 3)
def test_interface(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
myMatrix = T.matrix()
z = mySymbolicMatricesList.count(myMatrix)
f = theano.function([mySymbolicMatricesList, myMatrix], z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(f([x, y], y) == 1)
def test_non_tensor_type(self):
mySymbolicNestedMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)), 1)()
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
z = Count()(mySymbolicNestedMatricesList, mySymbolicMatricesList)
f = theano.function([mySymbolicNestedMatricesList,
mySymbolicMatricesList], z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
y = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(f([[x, y], [x, y, y]], [x, y]) == 1)
def test_sparse(self):
mySymbolicSparseList = TypedListType(sparse.SparseType('csr',
theano.config.floatX))()
mySymbolicSparse = sparse.csr_matrix()
z = Count()(mySymbolicSparseList, mySymbolicSparse)
f = theano.function([mySymbolicSparseList, mySymbolicSparse], z)
x = sp.csr_matrix(random_lil((10, 40), theano.config.floatX, 3))
y = sp.csr_matrix(random_lil((10, 40), theano.config.floatX, 3))
self.assertTrue(f([x, y, y], y) == 2)
class test_length(unittest.TestCase):
def test_sanity_check(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
z = Length()(mySymbolicMatricesList)
f = theano.function([mySymbolicMatricesList], z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(f([x, x, x, x]) == 4)
def test_interface(self):
mySymbolicMatricesList = TypedListType(T.TensorType(
theano.config.floatX, (False, False)))()
z = mySymbolicMatricesList.__len__()
f = theano.function([mySymbolicMatricesList], z)
x = rand_ranged_matrix(-1000, 1000, [100, 101])
self.assertTrue(f([x, x]) == 2)
| 33.5
| 79
| 0.592095
| 1,851
| 18,090
| 5.671529
| 0.079417
| 0.047628
| 0.076205
| 0.093351
| 0.861688
| 0.855401
| 0.844828
| 0.839493
| 0.827015
| 0.827015
| 0
| 0.0603
| 0.284024
| 18,090
| 539
| 80
| 33.562152
| 0.750232
| 0.014373
| 0
| 0.721212
| 0
| 0
| 0.00331
| 0
| 0
| 0
| 0
| 0
| 0.1
| 1
| 0.1
| false
| 0
| 0.033333
| 0.00303
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0aadc62efc7cc2fb70338df653c11b80a1baf88c
| 2,832
|
py
|
Python
|
HLTriggerOffline/Top/python/topSingleLeptonHLTEventValidation_cfi.py
|
NTrevisani/cmssw
|
a212a27526f34eb9507cf8b875c93896e6544781
|
[
"Apache-2.0"
] | 3
|
2018-08-24T19:10:26.000Z
|
2019-02-19T11:45:32.000Z
|
HLTriggerOffline/Top/python/topSingleLeptonHLTEventValidation_cfi.py
|
NTrevisani/cmssw
|
a212a27526f34eb9507cf8b875c93896e6544781
|
[
"Apache-2.0"
] | 7
|
2016-07-17T02:34:54.000Z
|
2019-08-13T07:58:37.000Z
|
HLTriggerOffline/Top/python/topSingleLeptonHLTEventValidation_cfi.py
|
NTrevisani/cmssw
|
a212a27526f34eb9507cf8b875c93896e6544781
|
[
"Apache-2.0"
] | 5
|
2018-08-21T16:37:52.000Z
|
2020-01-09T13:33:17.000Z
|
import FWCore.ParameterSet.Config as cms
# ttbar semi muonique
from DQMServices.Core.DQMEDAnalyzer import DQMEDAnalyzer
topSingleMuonHLTValidation = DQMEDAnalyzer('TopSingleLeptonHLTValidation',
# Directory
sDir = cms.untracked.string('HLT/TopHLTValidation/Top/SemiMuonic/'),
# Electrons
sElectrons = cms.untracked.string('gedGsfElectrons'),
ptElectrons = cms.untracked.double(30.),
etaElectrons = cms.untracked.double(2.5),
isoElectrons = cms.untracked.double(0.1),
minElectrons = cms.untracked.uint32(0),
# Muons
sMuons = cms.untracked.string('muons'),
ptMuons = cms.untracked.double(26.),
etaMuons = cms.untracked.double(2.1),
isoMuons = cms.untracked.double(0.12),
minMuons = cms.untracked.uint32(1),
# Jets
sJets = cms.untracked.string('ak4PFJetsCHS'),
ptJets = cms.untracked.double(20.),
etaJets = cms.untracked.double(2.5),
minJets = cms.untracked.uint32(4),
# Trigger
iTrigger = cms.untracked.InputTag("TriggerResults","","HLT"),
### Updating to HLT paths to be monitored by TOP PAG in 2017
vsPaths = cms.untracked.vstring(['HLT_IsoMu27_v',
'HLT_Mu50_v']),
)
# ttbar semi electronique
topSingleElectronHLTValidation = DQMEDAnalyzer('TopSingleLeptonHLTValidation',
# Directory
sDir = cms.untracked.string('HLT/TopHLTValidation/Top/SemiElectronic/'),
# Electrons
sElectrons = cms.untracked.string('gedGsfElectrons'),
ptElectrons = cms.untracked.double(30.),
etaElectrons = cms.untracked.double(2.5),
isoElectrons = cms.untracked.double(0.1),
minElectrons = cms.untracked.uint32(1),
# Muons
sMuons = cms.untracked.string('muons'),
ptMuons = cms.untracked.double(26.),
etaMuons = cms.untracked.double(2.1),
isoMuons = cms.untracked.double(0.12),
minMuons = cms.untracked.uint32(0),
# Jets
sJets = cms.untracked.string('ak4PFJetsCHS'),
ptJets = cms.untracked.double(20.),
etaJets = cms.untracked.double(2.5),
minJets = cms.untracked.uint32(4),
# Trigger
iTrigger = cms.untracked.InputTag("TriggerResults","","HLT"),
### Updating to HLT paths to be monitored by TOP PAG in 2017
vsPaths = cms.untracked.vstring(['HLT_Ele35_WPTight_Gsf_v',
'HLT_Ele38_WPTight_Gsf_v',
'HLT_Ele40_WPTight_Gsf_v']),
)
| 45.677419
| 173
| 0.569209
| 265
| 2,832
| 6.022642
| 0.298113
| 0.255639
| 0.180451
| 0.071429
| 0.825815
| 0.825815
| 0.825815
| 0.825815
| 0.825815
| 0.825815
| 0
| 0.037442
| 0.320975
| 2,832
| 61
| 174
| 46.42623
| 0.792512
| 0.122881
| 0
| 0.604651
| 0
| 0
| 0.130629
| 0.081542
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.046512
| 0
| 0.046512
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0ab1807a7cdf8520d37c4dbbb781b127c4b07d3d
| 174
|
py
|
Python
|
index.py
|
Developer-07/Project
|
b75d199a36d0456bb5c474660542f4e213aba9c3
|
[
"CC0-1.0"
] | null | null | null |
index.py
|
Developer-07/Project
|
b75d199a36d0456bb5c474660542f4e213aba9c3
|
[
"CC0-1.0"
] | null | null | null |
index.py
|
Developer-07/Project
|
b75d199a36d0456bb5c474660542f4e213aba9c3
|
[
"CC0-1.0"
] | null | null | null |
from communication import databaseCommunication
from communication import execute
print(execute.main("root", "MLemgen1709!", "php", "SELECT * FROM users WHERE id=23")[0][6])
| 43.5
| 91
| 0.770115
| 22
| 174
| 6.090909
| 0.772727
| 0.253731
| 0.343284
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.050955
| 0.097701
| 174
| 4
| 91
| 43.5
| 0.802548
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0ac377bb0a309bf52088362e0bd0a15c7b4182f3
| 11,495
|
py
|
Python
|
lsdviztools/lsdplottingtools/lsdmap_basicmaps.py
|
LSDtopotools/lsdviztools
|
b6012c0013bea7a0af5e7fa283a8a2268be18e25
|
[
"MIT"
] | 2
|
2021-04-01T13:24:49.000Z
|
2021-09-15T17:24:19.000Z
|
lsdviztools/lsdplottingtools/lsdmap_basicmaps.py
|
LSDtopotools/lsdviztools
|
b6012c0013bea7a0af5e7fa283a8a2268be18e25
|
[
"MIT"
] | 20
|
2020-07-28T11:10:36.000Z
|
2021-08-19T13:10:44.000Z
|
lsdviztools/lsdplottingtools/lsdmap_basicmaps.py
|
LSDtopotools/lsdviztools
|
b6012c0013bea7a0af5e7fa283a8a2268be18e25
|
[
"MIT"
] | 2
|
2021-04-21T17:40:14.000Z
|
2021-09-15T17:24:20.000Z
|
#=============================================================================
#=============================================================================
# These functions create figures for Basic visualization
#
#
# It creates separate plots for each basin in the DEM.
#
# Authors:
# Simon M. Mudd
# Fiona J. Clubb
#=============================================================================
#=============================================================================
# IMPORT MODULES
#=============================================================================
# set backend to run on server
import matplotlib
matplotlib.use('Agg')
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import rcParams
import matplotlib.ticker as ticker
import pandas as pd
from matplotlib import colors
import math
import os
import subprocess
#from shapely.geometry import Polygon
from lsdmapfigure import plottinghelpers as Helper
from lsdmapfigure.plottingraster import MapFigure
from lsdmapfigure.plottingraster import BaseRaster
def PlotTopoRaster(DataDirectory, fname_prefix, size_format='ESURF', FigFormat='png', colors = "terrain"):
"""
Creates a basic Terrain topographic raster. Needs the Hillshade
Args:
DataDirectory (str): the data directory with the m/n csv files
fname_prefix (str): The prefix for the m/n csv files
size_format (str): Can be "big" (16 inches wide), "geomorphology" (6.25 inches wide), or "ESURF" (4.92 inches wide) (defualt esurf).
FigFormat (str): The format of the figure. Usually 'png' or 'pdf'. If "show" then it calls the matplotlib show() command.
Returns:
Shaded relief plot with the basins coloured by basin ID
Author: BG, FJC
"""
# check if a directory exists for the chi plots. If not then make it.
raster_directory = DataDirectory+'raster_plots/'
if not os.path.isdir(raster_directory):
os.makedirs(raster_directory)
# Set up fonts for plots
label_size = 10
rcParams['font.family'] = 'sans-serif'
rcParams['font.sans-serif'] = ['Liberation Sans']
rcParams['font.size'] = label_size
# set figure sizes based on format
if size_format == "geomorphology":
fig_width_inches = 6.25
elif size_format == "big":
fig_width_inches = 16
else:
fig_width_inches = 4.92126
# going to make the basin plots - need to have bil extensions.
print("I'm going to make a cabic topographic plot")
# get the rasters
raster_ext = '.bil'
## Just checking if you have a PP version of it
if os.path.isfile(DataDirectory + fname_prefix +"_PP.bil"):
BackgroundRasterName = fname_prefix+"_PP"+raster_ext
else:
BackgroundRasterName = fname_prefix+raster_ext
HillshadeName = fname_prefix+'_hs'+raster_ext
# create the map figure
MF = MapFigure(BackgroundRasterName, DataDirectory,coord_type="UTM_km", colourbar_location='None')
# Drape the hillshade and add the color
## Frist plot the terrain toporaster
MF.add_drape_image(BackgroundRasterName,DataDirectory, # Calling the function will add a drapped raster on the top of the background on
colourmap = colors, # colormap used for this raster, see http://matplotlib.org/users/colormaps.html for examples, put _r at the end of a colormap to get the reversed version
alpha=1, # transparency of this specific layer, 0 for fully transparent (why not) and 1 for fully opaque
show_colourbar = True, # Well, this one is explicit I think
colorbarlabel = "None",
NFF_opti = True)
## Drape the Hillshade raster
MF.add_drape_image(HillshadeName,DataDirectory, # Calling the function will add a drapped raster on the top of the background on
colourmap = "gray", # colormap used for this raster, see http://matplotlib.org/users/colormaps.html for examples, put _r at the end of a colormap to get the reversed version
alpha=0.4, # transparency of this specific layer, 0 for fully transparent (why not) and 1 for fully opaque
show_colourbar = True, # Well, this one is explicit I think
colorbarlabel = "None",
NFF_opti = True)
# Save the figure
ImageName = raster_directory+fname_prefix+'_Topo.'+FigFormat
MF.save_fig(fig_width_inches = fig_width_inches, FigFileName = ImageName, FigFormat=FigFormat, Fig_dpi = 300)
def PlotSlopeRaster(DataDirectory, fname_prefix, size_format='ESURF', FigFormat='png'):
"""
Creates a basic Slope Map with a [0,2] scale
Args:
DataDirectory (str): the data directory with the m/n csv files
fname_prefix (str): The prefix for the m/n csv files
size_format (str): Can be "big" (16 inches wide), "geomorphology" (6.25 inches wide), or "ESURF" (4.92 inches wide) (defualt esurf).
FigFormat (str): The format of the figure. Usually 'png' or 'pdf'. If "show" then it calls the matplotlib show() command.
Returns:
Shaded relief plot with the basins coloured by basin ID
Author: BG, FJC
"""
# check if a directory exists for the chi plots. If not then make it.
raster_directory = DataDirectory+'raster_plots/'
if not os.path.isdir(raster_directory):
os.makedirs(raster_directory)
# Set up fonts for plots
label_size = 10
rcParams['font.family'] = 'sans-serif'
rcParams['font.sans-serif'] = ['Liberation Sans']
rcParams['font.size'] = label_size
# set figure sizes based on format
if size_format == "geomorphology":
fig_width_inches = 6.25
elif size_format == "big":
fig_width_inches = 16
else:
fig_width_inches = 4.92126
# going to make the basin plots - need to have bil extensions.
print("I'm going to make a cabic topographic plot")
# get the rasters
raster_ext = '.bil'
## Just checking if you have a PP version of it
BackgroundRasterName = fname_prefix+"_slope"+raster_ext
# create the map figure
MF = MapFigure(BackgroundRasterName, DataDirectory,coord_type="UTM_km", colourbar_location='None')
# Drape the hillshade and add the color
## Frist plot the black background
MF.add_drape_image(BackgroundRasterName,DataDirectory, # Calling the function will add a drapped raster on the top of the background on
colourmap = "gray", # colormap used for this raster, see http://matplotlib.org/users/colormaps.html for examples, put _r at the end of a colormap to get the reversed version
alpha=1, # transparency of this specific layer, 0 for fully transparent (why not) and 1 for fully opaque
show_colourbar = True, # Well, this one is explicit I think
colorbarlabel = "None",
colour_min_max = [0,100000],
custom_min_max = [0,0.1],
NFF_opti = True)
## Drape the slope raster
MF.add_drape_image(BackgroundRasterName,DataDirectory, # Calling the function will add a drapped raster on the top of the background on
colourmap = "viridis", # colormap used for this raster, see http://matplotlib.org/users/colormaps.html for examples, put _r at the end of a colormap to get the reversed version
alpha=1, # transparency of this specific layer, 0 for fully transparent (why not) and 1 for fully opaque
show_colourbar = True, # Well, this one is explicit I think
colour_min_max = [0,2],
colorbarlabel = "None",
NFF_opti = True)
# Save the figure
ImageName = raster_directory+fname_prefix+'_Slopo.'+FigFormat
MF.save_fig(fig_width_inches = fig_width_inches, FigFileName = ImageName, FigFormat=FigFormat, Fig_dpi = 300)
def PlotCurveRaster(DataDirectory, fname_prefix, size_format='ESURF', FigFormat='png'):
"""
Creates a basic Slope Map with a [0,2] scale
Args:
DataDirectory (str): the data directory with the m/n csv files
fname_prefix (str): The prefix for the m/n csv files
size_format (str): Can be "big" (16 inches wide), "geomorphology" (6.25 inches wide), or "ESURF" (4.92 inches wide) (defualt esurf).
FigFormat (str): The format of the figure. Usually 'png' or 'pdf'. If "show" then it calls the matplotlib show() command.
Returns:
Shaded relief plot with the basins coloured by basin ID
Author: BG, FJC
"""
# check if a directory exists for the chi plots. If not then make it.
raster_directory = DataDirectory+'raster_plots/'
if not os.path.isdir(raster_directory):
os.makedirs(raster_directory)
# Set up fonts for plots
label_size = 10
rcParams['font.family'] = 'sans-serif'
rcParams['font.sans-serif'] = ['Liberation Sans']
rcParams['font.size'] = label_size
# set figure sizes based on format
if size_format == "geomorphology":
fig_width_inches = 6.25
elif size_format == "big":
fig_width_inches = 16
else:
fig_width_inches = 4.92126
# going to make the basin plots - need to have bil extensions.
print("I'm going to make a cabic topographic plot")
# get the rasters
raster_ext = '.bil'
## Just checking if you have a PP version of it
BackgroundRasterName = fname_prefix+"_curvature"+raster_ext
# create the map figure
MF = MapFigure(BackgroundRasterName, DataDirectory,coord_type="UTM_km", colourbar_location='None')
# Drape the hillshade and add the color
## Frist plot the black background
MF.add_drape_image(BackgroundRasterName,DataDirectory, # Calling the function will add a drapped raster on the top of the background on
colourmap = "gray", # colormap used for this raster, see http://matplotlib.org/users/colormaps.html for examples, put _r at the end of a colormap to get the reversed version
alpha=1, # transparency of this specific layer, 0 for fully transparent (why not) and 1 for fully opaque
show_colourbar = True, # Well, this one is explicit I think
colorbarlabel = "None",
colour_min_max = [0,100000],
custom_min_max = [0,0.1],
NFF_opti = True)
## Drape the slope raster
MF.add_drape_image(BackgroundRasterName,DataDirectory, # Calling the function will add a drapped raster on the top of the background on
colourmap = "viridis", # colormap used for this raster, see http://matplotlib.org/users/colormaps.html for examples, put _r at the end of a colormap to get the reversed version
alpha=1, # transparency of this specific layer, 0 for fully transparent (why not) and 1 for fully opaque
show_colourbar = True, # Well, this one is explicit I think
colour_min_max = [0,2],
colorbarlabel = "None",
NFF_opti = True)
# Save the figure
ImageName = raster_directory+fname_prefix+'_Curve.'+FigFormat
MF.save_fig(fig_width_inches = fig_width_inches, FigFileName = ImageName, FigFormat=FigFormat, Fig_dpi = 300)
| 46.727642
| 200
| 0.638452
| 1,489
| 11,495
| 4.827401
| 0.159167
| 0.022955
| 0.029215
| 0.006678
| 0.878408
| 0.874374
| 0.874374
| 0.874374
| 0.867279
| 0.867279
| 0
| 0.013881
| 0.254197
| 11,495
| 245
| 201
| 46.918367
| 0.824566
| 0.480035
| 0
| 0.758333
| 0
| 0
| 0.098748
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025
| false
| 0
| 0.108333
| 0
| 0.133333
| 0.025
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0aecedb47aeb5f93b21ed8afdadbc4f9177102a2
| 26,143
|
py
|
Python
|
test/lib/testMap.py
|
animator/titus2
|
1d35fab2950bd9f0438b931a02996475271a695e
|
[
"Apache-2.0"
] | 18
|
2019-11-29T08:53:58.000Z
|
2021-11-19T05:33:33.000Z
|
test/lib/testMap.py
|
animator/titus2
|
1d35fab2950bd9f0438b931a02996475271a695e
|
[
"Apache-2.0"
] | 2
|
2020-04-29T12:58:32.000Z
|
2021-03-23T05:55:43.000Z
|
test/lib/testMap.py
|
animator/titus2
|
1d35fab2950bd9f0438b931a02996475271a695e
|
[
"Apache-2.0"
] | 1
|
2020-05-05T15:10:27.000Z
|
2020-05-05T15:10:27.000Z
|
#!/usr/bin/env python
# Copyright (C) 2014 Open Data ("Open Data" refers to
# one or more of the following companies: Open Data Partners LLC,
# Open Data Research LLC, or Open Data Capital LLC.)
#
# This file is part of Hadrian.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from titus.genpy import PFAEngine
from titus.errors import *
class TestLib1Map(unittest.TestCase):
def testGetLength(self):
engine, = PFAEngine.fromYaml('''
input: {type: map, values: int}
output: int
action:
- {map.len: [input]}
''')
self.assertEqual(engine.action({"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}), 5)
engine, = PFAEngine.fromYaml('''
input: {type: map, values: int}
output: int
action:
- {map.len: [input]}
''')
self.assertEqual(engine.action({}), 0)
def testGetKeys(self):
engine, = PFAEngine.fromYaml('''
input: {type: map, values: int}
output: {type: array, items: string}
action:
- {map.keys: [input]}
''')
self.assertEqual(set(engine.action({"a": 1, "b": 2, "c": 3, "d": 4, "e": 5})), set(["a", "b", "c", "d", "e"]))
engine, = PFAEngine.fromYaml('''
input: {type: map, values: int}
output: {type: array, items: string}
action:
- {map.keys: [input]}
''')
self.assertEqual(engine.action({}), [])
def testGetValues(self):
engine, = PFAEngine.fromYaml('''
input: {type: map, values: int}
output: {type: array, items: int}
action:
- {map.values: [input]}
''')
self.assertEqual(set(engine.action({"a": 1, "b": 2, "c": 3, "d": 4, "e": 5})), set([1, 2, 3, 4, 5]))
engine, = PFAEngine.fromYaml('''
input: {type: map, values: int}
output: {type: array, items: int}
action:
- {map.values: [input]}
''')
self.assertEqual(engine.action({}), [])
def testCheckContainsKey(self):
engine, = PFAEngine.fromYaml('''
input: string
output: boolean
action:
map.containsKey:
- {value: {"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}, type: {type: map, values: int}}
- input
''')
self.assertTrue(engine.action("a"))
self.assertFalse(engine.action("z"))
engine, = PFAEngine.fromYaml('''
input: string
output: boolean
action:
map.containsKey:
- {value: {"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}, type: {type: map, values: int}}
- params: [{x: string}]
ret: boolean
do: {"==": [x, input]}
''')
self.assertTrue(engine.action("a"))
self.assertFalse(engine.action("z"))
def testCheckContainsKey(self):
engine, = PFAEngine.fromYaml('''
input: int
output: boolean
action:
map.containsValue:
- {value: {"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}, type: {type: map, values: int}}
- input
''')
self.assertTrue(engine.action(1))
self.assertFalse(engine.action(9))
engine, = PFAEngine.fromYaml('''
input: int
output: boolean
action:
map.containsValue:
- {value: {"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}, type: {type: map, values: int}}
- params: [{x: int}]
ret: boolean
do: {"==": [x, input]}
''')
self.assertTrue(engine.action(1))
self.assertFalse(engine.action(9))
def testAddKeyValuePairs(self):
engine, = PFAEngine.fromYaml('''
input: string
output: {type: map, values: int}
action:
map.add:
- {value: {"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}, type: {type: map, values: int}}
- input
- 999
''')
self.assertEqual(engine.action("a"), {"a": 999, "b": 2, "c": 3, "d": 4, "e": 5})
self.assertEqual(engine.action("z"), {"a": 1, "b": 2, "c": 3, "d": 4, "e": 5, "z": 999})
engine, = PFAEngine.fromYaml('''
input: int
output: {type: map, values: int}
action:
map.add:
- {map.toset: {value: [1, 2, 3, 4, 5], type: {type: array, items: int}}}
- input
''')
self.assertEqual(engine.action(1), {"BA==": 2, "Ag==": 1, "Bg==": 3, "Cg==": 5, "CA==": 4})
self.assertEqual(engine.action(999), {"BA==": 2, "Ag==": 1, "Bg==": 3, "Cg==": 5, "CA==": 4, "zg8=": 999})
def testRemoveKeys(self):
engine, = PFAEngine.fromYaml('''
input: string
output: {type: map, values: int}
action:
map.remove:
- {value: {"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}, type: {type: map, values: int}}
- input
''')
self.assertEqual(engine.action("a"), {"b": 2, "c": 3, "d": 4, "e": 5})
self.assertEqual(engine.action("z"), {"a": 1, "b": 2, "c": 3, "d": 4, "e": 5})
def testKeepOnlyCertainKeys(self):
engine, = PFAEngine.fromYaml('''
input: {type: array, items: string}
output: {type: map, values: int}
action:
map.only:
- {value: {"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}, type: {type: map, values: int}}
- input
''')
self.assertEqual(engine.action(["b", "c", "e"]), {"b": 2, "c": 3, "e": 5})
self.assertEqual(engine.action(["b", "c", "e", "z"]), {"b": 2, "c": 3, "e": 5})
self.assertEqual(engine.action([]), {})
engine, = PFAEngine.fromYaml('''
input: {type: array, items: string}
output: {type: map, values: int}
action:
map.only:
- {value: {}, type: {type: map, values: int}}
- input
''')
self.assertEqual(engine.action(["b", "c", "e"]), {})
self.assertEqual(engine.action([]), {})
def testEliminateOnlyCertainKeys(self):
engine, = PFAEngine.fromYaml('''
input: {type: array, items: string}
output: {type: map, values: int}
action:
map.except:
- {value: {"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}, type: {type: map, values: int}}
- input
''')
self.assertEqual(engine.action(["b", "c", "e"]), {"a": 1, "d": 4})
self.assertEqual(engine.action(["b", "c", "e", "z"]), {"a": 1, "d": 4})
self.assertEqual(engine.action([]), {"a": 1, "b": 2, "c": 3, "d": 4, "e": 5})
engine, = PFAEngine.fromYaml('''
input: {type: array, items: string}
output: {type: map, values: int}
action:
map.except:
- {value: {}, type: {type: map, values: int}}
- input
''')
self.assertEqual(engine.action(["b", "c", "e"]), {})
self.assertEqual(engine.action([]), {})
def testUpdateWithAnOverlay(self):
engine, = PFAEngine.fromYaml('''
input: {type: map, values: int}
output: {type: map, values: int}
action:
map.update:
- {value: {"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}, type: {type: map, values: int}}
- input
''')
self.assertEqual(engine.action({"b": 102, "c": 103, "z": 999}), {"a": 1, "b": 102, "c": 103, "d": 4, "e": 5, "z": 999})
def testSplit(self):
engine, = PFAEngine.fromYaml('''
input: {type: map, values: int}
output: {type: array, items: {type: map, values: int}}
action:
map.split: input
''')
self.assertEqual(sorted(engine.action({"a": 1, "b": 2, "c": 3}), key=lambda x: list(x.keys())[0]),
sorted([{"a": 1}, {"b": 2}, {"c": 3}], key=lambda x: list(x.keys())[0]))
def testJoin(self):
engine, = PFAEngine.fromYaml('''
input: {type: array, items: {type: map, values: int}}
output: {type: map, values: int}
action:
map.join: input
''')
self.assertEqual(sorted(engine.action([{"a": 1}, {"b": 2}, {"c": 3}])), sorted({"a": 1, "b": 2, "c": 3}))
def testNumericalArgmaxArgmin(self):
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: string
action:
- {map.argmax: [{value: {"0": 5.5, "1": 2.2, "2": 7.7, "3": 4.4, "4": 6.6, "5": 2.2, "6": 7.6}, type: {type: map, values: double}}]}
''')[0].action(None), "2")
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: string
action:
- {map.argmin: [{value: {"0": 5.5, "1": 2.2, "2": 7.7, "3": 4.4, "4": 6.6, "5": 2.2, "6": 7.6}, type: {type: map, values: double}}]}
''')[0].action(None), "1")
def testObjectArgmaxArgmin(self):
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: string
action:
- {map.argmax: [{value: {"0": "one", "1": "two", "2": "three", "3": "four", "4": "five", "5": "six", "6": "seven"}, type: {type: map, values: string}}]}
''')[0].action(None), "1")
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: string
action:
- {map.argmin: [{value: {"0": "one", "1": "two", "2": "three", "3": "four", "4": "five", "5": "six", "6": "seven"}, type: {type: map, values: string}}]}
''')[0].action(None), "4")
def testUserDefinedArgmaxArgmin(self):
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: string
action:
- map.argmaxLT:
- {value: {"0": 5.5, "1": 2.2, "2": 7.7, "3": 4.4, "4": 6.6, "5": 2.2, "6": 7.6}, type: {type: map, values: double}}
- {fcn: u.mylt}
fcns:
mylt:
params: [{a: double}, {b: double}]
ret: boolean
do: {"<": [{m.abs: {"-": [a, 6.2]}}, {m.abs: {"-": [b, 6.2]}}]}
''')[0].action(None), "1")
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: string
action:
- map.argmaxLT:
- {value: {"0": 5.5, "1": 2.2, "2": 7.7, "3": 4.4, "4": 6.6, "5": 2.2, "6": 7.6}, type: {type: map, values: double}}
- params: [{a: double}, {b: double}]
ret: boolean
do: {"<": [{m.abs: {"-": [a, 6.2]}}, {m.abs: {"-": [b, 6.2]}}]}
''')[0].action(None), "1")
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: string
action:
- map.argminLT:
- {value: {"0": 5.5, "1": 2.2, "2": 7.7, "3": 4.4, "4": 6.6, "5": 2.2, "6": 7.6}, type: {type: map, values: double}}
- {fcn: u.mylt}
fcns:
mylt:
params: [{a: double}, {b: double}]
ret: boolean
do: {"<": [{m.abs: {"-": [a, 6.2]}}, {m.abs: {"-": [b, 6.2]}}]}
''')[0].action(None), "4")
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: string
action:
- map.argminLT:
- {value: {"0": 5.5, "1": 2.2, "2": 7.7, "3": 4.4, "4": 6.6, "5": 2.2, "6": 7.6}, type: {type: map, values: double}}
- params: [{a: double}, {b: double}]
ret: boolean
do: {"<": [{m.abs: {"-": [a, 6.2]}}, {m.abs: {"-": [b, 6.2]}}]}
''')[0].action(None), "4")
def testFindTop3NumericalArgmaxArgmin(self):
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: {type: array, items: string}
action:
- {map.argmaxN: [{value: {"0": 5.5, "1": 2.2, "2": 7.7, "3": 4.4, "4": 6.6, "5": 2.2, "6": 7.6}, type: {type: map, values: double}}, 3]}
''')[0].action(None), ["2", "6", "4"])
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: {type: array, items: string}
action:
- {map.argminN: [{value: {"0": 5.5, "1": 2.2, "2": 7.7, "3": 4.4, "4": 6.6, "5": 2.2, "6": 7.6}, type: {type: map, values: double}}, 3]}
''')[0].action(None), ["1", "5", "3"])
def testFindTop3ObjectArgmaxArgmin(self):
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: {type: array, items: string}
action:
- {map.argmaxN: [{value: {"0": "one", "1": "two", "2": "three", "3": "four", "4": "five", "5": "six", "6": "seven"}, type: {type: map, values: string}}, 3]}
''')[0].action(None), ["1", "2", "5"])
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: {type: array, items: string}
action:
- {map.argminN: [{value: {"0": "one", "1": "two", "2": "three", "3": "four", "4": "five", "5": "six", "6": "seven"}, type: {type: map, values: string}}, 3]}
''')[0].action(None), ["4", "3", "0"])
def testFindTop3UserDefinedArgmaxArgmin(self):
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: {type: array, items: string}
action:
- map.argmaxNLT:
- {value: {"0": 5.5, "1": 2.2, "2": 7.7, "3": 4.4, "4": 6.6, "5": 2.2, "6": 7.6}, type: {type: map, values: double}}
- 3
- {fcn: u.mylt}
fcns:
mylt:
params: [{a: double}, {b: double}]
ret: boolean
do: {"<": [{m.abs: {"-": [a, 6.2]}}, {m.abs: {"-": [b, 6.2]}}]}
''')[0].action(None), ["1", "5", "3"])
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: {type: array, items: string}
action:
- map.argmaxNLT:
- {value: {"0": 5.5, "1": 2.2, "2": 7.7, "3": 4.4, "4": 6.6, "5": 2.2, "6": 7.6}, type: {type: map, values: double}}
- 3
- params: [{a: double}, {b: double}]
ret: boolean
do: {"<": [{m.abs: {"-": [a, 6.2]}}, {m.abs: {"-": [b, 6.2]}}]}
''')[0].action(None), ["1", "5", "3"])
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: {type: array, items: string}
action:
- map.argminNLT:
- {value: {"0": 5.5, "1": 2.2, "2": 7.7, "3": 4.4, "4": 6.6, "5": 2.2, "6": 7.6}, type: {type: map, values: double}}
- 3
- {fcn: u.mylt}
fcns:
mylt:
params: [{a: double}, {b: double}]
ret: boolean
do: {"<": [{m.abs: {"-": [a, 6.2]}}, {m.abs: {"-": [b, 6.2]}}]}
''')[0].action(None), ["4", "0", "6"])
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: {type: array, items: string}
action:
- map.argminNLT:
- {value: {"0": 5.5, "1": 2.2, "2": 7.7, "3": 4.4, "4": 6.6, "5": 2.2, "6": 7.6}, type: {type: map, values: double}}
- 3
- params: [{a: double}, {b: double}]
ret: boolean
do: {"<": [{m.abs: {"-": [a, 6.2]}}, {m.abs: {"-": [b, 6.2]}}]}
''')[0].action(None), ["4", "0", "6"])
def testToSet(self):
engine, = PFAEngine.fromYaml('''
input: {type: array, items: int}
output: {type: map, values: int}
action:
- {map.toset: [input]}
''')
self.assertEqual(engine.action([1, 2, 3, 4, 5]), {"BA==": 2, "Ag==": 1, "Bg==": 3, "Cg==": 5, "CA==": 4})
def testFromSet(self):
engine, = PFAEngine.fromYaml('''
input: {type: map, values: int}
output: {type: array, items: int}
action:
- {map.fromset: [input]}
''')
self.assertEqual(set(engine.action({"BA==": 2, "Ag==": 1, "Bg==": 3, "Cg==": 5, "CA==": 4})), set([1, 2, 3, 4, 5]))
engine, = PFAEngine.fromYaml('''
input: {type: map, values: string}
output: {type: array, items: string}
action:
- {map.fromset: [input]}
''')
self.assertEqual(set(engine.action({"BA==": "two", "Ag==": "one", "Bg==": "three", "Cg==": "five", "CA==": "four"})), set(["one", "two", "three", "four", "five"]))
def testIn(self):
engine, = PFAEngine.fromYaml('''
input: int
output: boolean
action:
map.in:
- {map.toset: {value: [1, 2, 3, 4, 5], type: {type: array, items: int}}}
- input
''')
self.assertTrue(engine.action(2))
self.assertFalse(engine.action(0))
def testUnion(self):
engine, = PFAEngine.fromYaml('''
input: "null"
output: {type: array, items: int}
action:
map.fromset:
map.union:
- {map.toset: {value: [1, 2, 3, 4, 5], type: {type: array, items: int}}}
- {map.toset: {value: [4, 5, 6, 7, 8], type: {type: array, items: int}}}
''')
self.assertEqual(set(engine.action(None)), set([1, 2, 3, 4, 5, 6, 7, 8]))
def testIntersection(self):
engine, = PFAEngine.fromYaml('''
input: "null"
output: {type: array, items: int}
action:
map.fromset:
map.intersection:
- {map.toset: {value: [1, 2, 3, 4, 5], type: {type: array, items: int}}}
- {map.toset: {value: [4, 5, 6, 7, 8], type: {type: array, items: int}}}
''')
self.assertEqual(set(engine.action(None)), set([4, 5]))
def testDiff(self):
engine, = PFAEngine.fromYaml('''
input: "null"
output: {type: array, items: int}
action:
map.fromset:
map.diff:
- {map.toset: {value: [1, 2, 3, 4, 5], type: {type: array, items: int}}}
- {map.toset: {value: [4, 5, 6, 7, 8], type: {type: array, items: int}}}
''')
self.assertEqual(set(engine.action(None)), set([1, 2, 3]))
def testSymDiff(self):
engine, = PFAEngine.fromYaml('''
input: "null"
output: {type: array, items: int}
action:
map.fromset:
map.symdiff:
- {map.toset: {value: [1, 2, 3, 4, 5], type: {type: array, items: int}}}
- {map.toset: {value: [4, 5, 6, 7, 8], type: {type: array, items: int}}}
''')
self.assertEqual(set(engine.action(None)), set([1, 2, 3, 6, 7, 8]))
def testSubset(self):
engine, = PFAEngine.fromYaml('''
input: {type: array, items: int}
output: boolean
action:
map.subset:
- {map.toset: input}
- {map.toset: {value: [1, 2, 3, 4, 5], type: {type: array, items: int}}}
''')
self.assertTrue(engine.action([1, 2, 3]))
self.assertFalse(engine.action([1, 2, 3, 999]))
self.assertFalse(engine.action([888, 999]))
def testDisjoint(self):
engine, = PFAEngine.fromYaml('''
input: {type: array, items: int}
output: boolean
action:
map.disjoint:
- {map.toset: input}
- {map.toset: {value: [1, 2, 3, 4, 5], type: {type: array, items: int}}}
''')
self.assertFalse(engine.action([1, 2, 3]))
self.assertFalse(engine.action([1, 2, 3, 999]))
self.assertTrue(engine.action([888, 999]))
def testMap(self):
engine, = PFAEngine.fromYaml('''
input: {type: map, values: string}
output: {type: map, values: int}
action:
map.map:
- input
- params: [{x: string}]
ret: int
do: {parse.int: [x, 10]}
''')
self.assertEqual(engine.action({"a": "1", "b": "2", "c": "3", "d": "4", "e": "5"}), {"a": 1, "b": 2, "c": 3, "d": 4, "e": 5})
def testMapWithKey(self):
engine, = PFAEngine.fromYaml('''
input: {type: map, values: string}
output: {type: map, values: int}
action:
map.mapWithKey:
- input
- params: [{key: string}, {value: string}]
ret: int
do:
if: {">": [key, {string: "c"}]}
then: {+: [{parse.int: [value, 10]}, 1000]}
else: {parse.int: [value, 10]}
''')
self.assertEqual(engine.action({"a": "1", "b": "2", "c": "3", "d": "4", "e": "5"}), {"a": 1, "b": 2, "c": 3, "d": 1004, "e": 1005})
def testFilter(self):
engine, = PFAEngine.fromYaml('''
input: {type: map, values: int}
output: {type: map, values: int}
action:
map.filter:
- input
- params: [{x: int}]
ret: boolean
do: {"<": [x, 3]}
''')
self.assertEqual(engine.action({"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}), {"a": 1, "b": 2})
def testFilterWithKey(self):
engine, = PFAEngine.fromYaml('''
input: {type: map, values: int}
output: {type: map, values: int}
action:
map.filterWithKey:
- input
- params: [{key: string}, {value: int}]
ret: boolean
do: {"&&": [{"<": [value, 3]}, {"==": [key, {string: "a"}]}]}
''')
self.assertEqual(engine.action({"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}), {"a": 1})
def testFilterMap(self):
engine, = PFAEngine.fromYaml('''
input: {type: map, values: int}
output: {type: map, values: int}
action:
map.filterMap:
- input
- params: [{value: int}]
ret: [int, "null"]
do:
if: {"==": [{"%": [value, 2]}, 0]}
then: {"+": [value, 1000]}
else: null
''')
self.assertEqual(engine.action({"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}), {"b": 1002, "d": 1004})
def testFilterMapWithKey(self):
engine, = PFAEngine.fromYaml('''
input: {type: map, values: int}
output: {type: map, values: int}
action:
map.filterMapWithKey:
- input
- params: [{key: string}, {value: int}]
ret: [int, "null"]
do:
if: {"&&": [{"==": [{"%": [value, 2]}, 0]}, {"==": [key, {string: "b"}]}]}
then: {"+": [value, 1000]}
else: null
''')
self.assertEqual(engine.action({"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}), {"b": 1002})
def testFlatMap(self):
engine, = PFAEngine.fromYaml('''
input: {type: map, values: int}
output: {type: map, values: int}
action:
map.flatMap:
- input
- params: [{value: int}]
ret: {type: map, values: int}
do:
if: {">": [value, 2]}
then:
- let: {out: {value: {}, type: {type: map, values: int}}}
- set:
out:
map.add:
- out
- {s.int: value}
- value
- set:
out:
map.add:
- out
- {s.concat: [{s.int: value}, {s.int: value}]}
- value
- out
else:
{value: {}, type: {type: map, values: int}}
''')
self.assertEqual(engine.action({"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}), {"3": 3, "4": 4, "5": 5, "33": 3, "44": 4, "55": 5})
def testFlatMapWithKey(self):
engine, = PFAEngine.fromYaml('''
input: {type: map, values: int}
output: {type: map, values: int}
action:
map.flatMapWithKey:
- input
- params: [{key: string}, {value: int}]
ret: {type: map, values: int}
do:
map.add:
- map.add:
- {value: {}, type: {type: map, values: int}}
- key
- value
- {s.concat: [key, key]}
- {+: [100, value]}
''')
self.assertEqual(engine.action({"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}), {"a": 1, "b": 2, "c": 3, "d": 4, "e": 5, "aa": 101, "bb": 102, "cc": 103, "dd": 104, "ee": 105})
def testZipMap(self):
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: {type: map, values: string}
action:
map.zipmap:
- {value: {"0": "x", "1": "y", "2": "z"}, type: {type: map, values: string}}
- {value: {"0": 101, "1": 102, "2": 103}, type: {type: map, values: int}}
- params: [{a: string}, {b: int}]
ret: string
do: {s.concat: [a, {s.int: b}]}
''')[0].action(None), {"0": "x101", "1": "y102", "2": "z103"})
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: {type: map, values: string}
action:
map.zipmap:
- {value: {"0": "x", "1": "y", "2": "z"}, type: {type: map, values: string}}
- {value: {"0": 101, "1": 102, "2": 103}, type: {type: map, values: int}}
- {value: {"0": "a", "1": "b", "2": "c"}, type: {type: map, values: string}}
- params: [{a: string}, {b: int}, {c: string}]
ret: string
do: {s.concat: [{s.concat: [a, {s.int: b}]}, c]}
''')[0].action(None), {"0": "x101a", "1": "y102b", "2": "z103c"})
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: {type: map, values: string}
action:
map.zipmap:
- {value: {"0": "x", "1": "y", "2": "z"}, type: {type: map, values: string}}
- {value: {"0": 101, "1": 102, "2": 103}, type: {type: map, values: int}}
- {value: {"0": "a", "1": "b", "2": "c"}, type: {type: map, values: string}}
- {value: {"0": true, "1": false, "2": true}, type: {type: map, values: boolean}}
- params: [{a: string}, {b: int}, {c: string}, {d: boolean}]
ret: string
do: {s.concat: [{s.concat: [{s.concat: [a, {s.int: b}]}, c]}, {if: d, then: {string: "-up"}, else: {string: "-down"}}]}
''')[0].action(None), {"0": "x101a-up", "1": "y102b-down", "2": "z103c-up"})
def testZipMapWithKey(self):
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: {type: map, values: string}
action:
map.zipmapWithKey:
- {value: {"0": "x", "1": "y", "2": "z"}, type: {type: map, values: string}}
- {value: {"0": 101, "1": 102, "2": 103}, type: {type: map, values: int}}
- params: [{k: string}, {a: string}, {b: int}]
ret: string
do: {s.concat: [{s.concat: [k, a]}, {s.int: b}]}
''')[0].action(None), {"0": "0x101", "1": "1y102", "2": "2z103"})
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: {type: map, values: string}
action:
map.zipmapWithKey:
- {value: {"0": "x", "1": "y", "2": "z"}, type: {type: map, values: string}}
- {value: {"0": 101, "1": 102, "2": 103}, type: {type: map, values: int}}
- {value: {"0": "a", "1": "b", "2": "c"}, type: {type: map, values: string}}
- params: [{k: string}, {a: string}, {b: int}, {c: string}]
ret: string
do: {s.concat: [{s.concat: [{s.concat: [k, a]}, {s.int: b}]}, c]}
''')[0].action(None), {"0": "0x101a", "1": "1y102b", "2": "2z103c"})
self.assertEqual(PFAEngine.fromYaml('''
input: "null"
output: {type: map, values: string}
action:
map.zipmapWithKey:
- {value: {"0": "x", "1": "y", "2": "z"}, type: {type: map, values: string}}
- {value: {"0": 101, "1": 102, "2": 103}, type: {type: map, values: int}}
- {value: {"0": "a", "1": "b", "2": "c"}, type: {type: map, values: string}}
- {value: {"0": true, "1": false, "2": true}, type: {type: map, values: boolean}}
- params: [{k: string}, {a: string}, {b: int}, {c: string}, {d: boolean}]
ret: string
do: {s.concat: [{s.concat: [{s.concat: [{s.concat: [k, a]}, {s.int: b}]}, c]}, {if: d, then: {string: "-up"}, else: {string: "-down"}}]}
''')[0].action(None), {"0": "0x101a-up", "1": "1y102b-down", "2": "2z103c-up"})
def testCorresponds(self):
engine, = PFAEngine.fromYaml('''
input: {type: map, values: int}
output: boolean
action:
map.corresponds:
- input
- {value: {"a": "1", "b": "2", "c": "3", "d": "4", "e": "5"}, type: {type: map, values: string}}
- params: [{x: int}, {y: string}]
ret: boolean
do: {"==": [x, {parse.int: [y, 10]}]}
''')
self.assertTrue(engine.action({"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}))
self.assertFalse(engine.action({"a": 111, "b": 2, "c": 3, "d": 4, "e": 5}))
def testCorrespondsWithKey(self):
engine, = PFAEngine.fromYaml('''
input: {type: map, values: int}
output: boolean
action:
map.correspondsWithKey:
- input
- {value: {"a": "1", "b": "2", "c": "3", "d": "4", "e": "5"}, type: {type: map, values: string}}
- params: [{k: string}, {x: int}, {y: string}]
ret: boolean
do:
if: {"==": [k, {string: "a"}]}
then: true
else: {"==": [x, {parse.int: [y, 10]}]}
''')
self.assertTrue(engine.action({"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}))
self.assertTrue(engine.action({"a": 111, "b": 2, "c": 3, "d": 4, "e": 5}))
self.assertFalse(engine.action({"a": 1, "b": 222, "c": 3, "d": 4, "e": 5}))
| 32.925693
| 178
| 0.517003
| 3,577
| 26,143
| 3.778585
| 0.076601
| 0.066588
| 0.094259
| 0.069843
| 0.843149
| 0.827686
| 0.807413
| 0.788103
| 0.769606
| 0.758804
| 0
| 0.051679
| 0.222086
| 26,143
| 793
| 179
| 32.967213
| 0.612922
| 0.028306
| 0
| 0.759825
| 0
| 0.10917
| 0.616442
| 0.001733
| 0
| 0
| 0.00067
| 0
| 0.120815
| 1
| 0.056769
| false
| 0
| 0.004367
| 0
| 0.062591
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7c24e7387956ea49193ebbb12c292c1dcce8c564
| 49,334
|
py
|
Python
|
sdk/metricsadvisor/azure-ai-metricsadvisor/tests/async_tests/test_data_feeds_async.py
|
ankitarorabit/azure-sdk-for-python
|
dd90281cbad9400f8080754a5ef2f56791a5a88f
|
[
"MIT"
] | null | null | null |
sdk/metricsadvisor/azure-ai-metricsadvisor/tests/async_tests/test_data_feeds_async.py
|
ankitarorabit/azure-sdk-for-python
|
dd90281cbad9400f8080754a5ef2f56791a5a88f
|
[
"MIT"
] | null | null | null |
sdk/metricsadvisor/azure-ai-metricsadvisor/tests/async_tests/test_data_feeds_async.py
|
ankitarorabit/azure-sdk-for-python
|
dd90281cbad9400f8080754a5ef2f56791a5a88f
|
[
"MIT"
] | null | null | null |
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import datetime
from dateutil.tz import tzutc
import unittest
import pytest
from devtools_testutils import AzureTestCase
from azure.core.exceptions import ResourceNotFoundError
from azure.ai.metricsadvisor.models import (
SqlServerDataFeedSource,
AzureTableDataFeedSource,
AzureBlobDataFeedSource,
AzureCosmosDbDataFeedSource,
DataFeedMetric,
DataFeedDimension,
DataFeedSchema,
DataFeedIngestionSettings,
DataFeedGranularity,
DataFeedMissingDataPointFillSettings,
DataFeedRollupSettings,
AzureApplicationInsightsDataFeedSource,
AzureDataExplorerDataFeedSource,
InfluxDbDataFeedSource,
AzureDataLakeStorageGen2DataFeedSource,
MongoDbDataFeedSource,
MySqlDataFeedSource,
PostgreSqlDataFeedSource,
)
from base_testcase_async import TestMetricsAdvisorAdministrationClientBaseAsync
class TestMetricsAdvisorAdministrationClientAsync(TestMetricsAdvisorAdministrationClientBaseAsync):
@AzureTestCase.await_prepared_test
async def test_create_simple_data_feed(self):
data_feed_name = self.create_random_name("testfeed")
async with self.admin_client:
try:
data_feed = await self.admin_client.create_data_feed(
name=data_feed_name,
source=SqlServerDataFeedSource(
connection_string=self.sql_server_connection_string,
query="select * from adsample2 where Timestamp = @StartTime"
),
granularity="Daily",
schema=["cost", "revenue"],
ingestion_settings=datetime.datetime(2019, 10, 1)
)
self.assertIsNotNone(data_feed.id)
self.assertIsNotNone(data_feed.created_time)
self.assertIsNotNone(data_feed.name)
self.assertEqual(data_feed.source.data_source_type, "SqlServer")
self.assertIsNotNone(data_feed.source.query)
self.assertEqual(data_feed.granularity.granularity_type, "Daily")
self.assertEqual(data_feed.schema.metrics[0].name, "cost")
self.assertEqual(data_feed.schema.metrics[1].name, "revenue")
self.assertEqual(data_feed.ingestion_settings.ingestion_begin_time,
datetime.datetime(2019, 10, 1, tzinfo=tzutc()))
finally:
await self.admin_client.delete_data_feed(data_feed.id)
@AzureTestCase.await_prepared_test
async def test_create_data_feed_from_sql_server(self):
data_feed_name = self.create_random_name("testfeedasync")
async with self.admin_client:
try:
data_feed = await self.admin_client.create_data_feed(
name=data_feed_name,
source=SqlServerDataFeedSource(
connection_string=self.sql_server_connection_string,
query=u"select * from adsample2 where Timestamp = @StartTime"
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost", display_name="display cost", description="the cost"),
DataFeedMetric(name="revenue", display_name="display revenue", description="the revenue")
],
dimensions=[
DataFeedDimension(name="category", display_name="display category"),
DataFeedDimension(name="city", display_name="display city")
],
timestamp_column="Timestamp"
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 10, 1),
data_source_request_concurrency=0,
ingestion_retry_delay=-1,
ingestion_start_offset=-1,
stop_retry_after=-1,
),
admin_emails=["yournamehere@microsoft.com"],
data_feed_description="my first data feed",
missing_data_point_fill_settings=DataFeedMissingDataPointFillSettings(
fill_type="SmartFilling"
),
rollup_settings=DataFeedRollupSettings(
rollup_type="NoRollup",
rollup_method="None",
),
viewer_emails=["viewers"],
access_mode="Private",
action_link_template="action link template"
)
self.assertIsNotNone(data_feed.id)
self.assertIsNotNone(data_feed.created_time)
self.assertIsNotNone(data_feed.name)
self.assertEqual(data_feed.source.data_source_type, "SqlServer")
self.assertIsNotNone(data_feed.source.query)
self.assertEqual(data_feed.granularity.granularity_type, "Daily")
self.assertEqual(data_feed.granularity.custom_granularity_value, None)
self.assertEqual(data_feed.schema.metrics[0].name, "cost")
self.assertEqual(data_feed.schema.metrics[1].name, "revenue")
self.assertEqual(data_feed.schema.metrics[0].display_name, "display cost")
self.assertEqual(data_feed.schema.metrics[1].display_name, "display revenue")
self.assertEqual(data_feed.schema.metrics[0].description, "the cost")
self.assertEqual(data_feed.schema.metrics[1].description, "the revenue")
self.assertEqual(data_feed.schema.dimensions[0].name, "category")
self.assertEqual(data_feed.schema.dimensions[1].name, "city")
self.assertEqual(data_feed.schema.dimensions[0].display_name, "display category")
self.assertEqual(data_feed.schema.dimensions[1].display_name, "display city")
self.assertEqual(data_feed.ingestion_settings.ingestion_begin_time,
datetime.datetime(2019, 10, 1, tzinfo=tzutc()))
self.assertEqual(data_feed.ingestion_settings.data_source_request_concurrency, 0)
self.assertEqual(data_feed.ingestion_settings.ingestion_retry_delay, -1)
self.assertEqual(data_feed.ingestion_settings.ingestion_start_offset, -1)
self.assertEqual(data_feed.ingestion_settings.stop_retry_after, -1)
self.assertIn("yournamehere@microsoft.com", data_feed.admin_emails)
self.assertEqual(data_feed.data_feed_description, "my first data feed")
self.assertEqual(data_feed.missing_data_point_fill_settings.fill_type, "SmartFilling")
self.assertEqual(data_feed.rollup_settings.rollup_type, "NoRollup")
self.assertEqual(data_feed.rollup_settings.rollup_method, "None")
self.assertEqual(data_feed.viewer_emails, ["viewers"])
self.assertEqual(data_feed.access_mode, "Private")
self.assertEqual(data_feed.action_link_template, "action link template")
self.assertEqual(data_feed.status, "Active")
self.assertTrue(data_feed.is_admin)
self.assertIsNotNone(data_feed.metric_ids)
finally:
await self.admin_client.delete_data_feed(data_feed.id)
with self.assertRaises(ResourceNotFoundError):
await self.admin_client.get_data_feed(data_feed.id)
@AzureTestCase.await_prepared_test
async def test_create_data_feed_from_sql_server_with_custom_values(self):
data_feed_name = self.create_random_name("testfeedasync")
async with self.admin_client:
try:
data_feed = await self.admin_client.create_data_feed(
name=data_feed_name,
source=SqlServerDataFeedSource(
connection_string=self.sql_server_connection_string,
query=u"select * from adsample2 where Timestamp = @StartTime"
),
granularity=DataFeedGranularity(
granularity_type="Custom",
custom_granularity_value=20
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost", display_name="display cost", description="the cost"),
DataFeedMetric(name="revenue", display_name="display revenue", description="the revenue")
],
dimensions=[
DataFeedDimension(name="category", display_name="display category"),
DataFeedDimension(name="city", display_name="display city")
],
timestamp_column="Timestamp"
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 10, 1),
data_source_request_concurrency=0,
ingestion_retry_delay=-1,
ingestion_start_offset=-1,
stop_retry_after=-1,
),
admin_emails=["yournamehere@microsoft.com"],
data_feed_description="my first data feed",
missing_data_point_fill_settings=DataFeedMissingDataPointFillSettings(
fill_type="CustomValue",
custom_fill_value=10
),
rollup_settings=DataFeedRollupSettings(
rollup_type="AlreadyRollup",
rollup_method="Sum",
rollup_identification_value="sumrollup"
),
viewer_emails=["viewers"],
access_mode="Private",
action_link_template="action link template"
)
self.assertIsNotNone(data_feed.id)
self.assertIsNotNone(data_feed.created_time)
self.assertIsNotNone(data_feed.name)
self.assertEqual(data_feed.source.data_source_type, "SqlServer")
self.assertIsNotNone(data_feed.source.query)
self.assertEqual(data_feed.granularity.granularity_type, "Custom")
self.assertEqual(data_feed.granularity.custom_granularity_value, 20)
self.assertEqual(data_feed.schema.metrics[0].name, "cost")
self.assertEqual(data_feed.schema.metrics[1].name, "revenue")
self.assertEqual(data_feed.schema.metrics[0].display_name, "display cost")
self.assertEqual(data_feed.schema.metrics[1].display_name, "display revenue")
self.assertEqual(data_feed.schema.metrics[0].description, "the cost")
self.assertEqual(data_feed.schema.metrics[1].description, "the revenue")
self.assertEqual(data_feed.schema.dimensions[0].name, "category")
self.assertEqual(data_feed.schema.dimensions[1].name, "city")
self.assertEqual(data_feed.schema.dimensions[0].display_name, "display category")
self.assertEqual(data_feed.schema.dimensions[1].display_name, "display city")
self.assertEqual(data_feed.ingestion_settings.ingestion_begin_time,
datetime.datetime(2019, 10, 1, tzinfo=tzutc()))
self.assertEqual(data_feed.ingestion_settings.data_source_request_concurrency, 0)
self.assertEqual(data_feed.ingestion_settings.ingestion_retry_delay, -1)
self.assertEqual(data_feed.ingestion_settings.ingestion_start_offset, -1)
self.assertEqual(data_feed.ingestion_settings.stop_retry_after, -1)
self.assertIn("yournamehere@microsoft.com", data_feed.admin_emails)
self.assertEqual(data_feed.data_feed_description, "my first data feed")
self.assertEqual(data_feed.missing_data_point_fill_settings.fill_type, "CustomValue")
self.assertEqual(data_feed.missing_data_point_fill_settings.custom_fill_value, 10)
self.assertEqual(data_feed.rollup_settings.rollup_type, "AlreadyRollup")
self.assertEqual(data_feed.rollup_settings.rollup_method, "Sum")
self.assertEqual(data_feed.rollup_settings.rollup_identification_value, "sumrollup")
self.assertEqual(data_feed.viewer_emails, ["viewers"])
self.assertEqual(data_feed.access_mode, "Private")
self.assertEqual(data_feed.action_link_template, "action link template")
self.assertEqual(data_feed.status, "Active")
self.assertTrue(data_feed.is_admin)
self.assertIsNotNone(data_feed.metric_ids)
finally:
await self.admin_client.delete_data_feed(data_feed.id)
with self.assertRaises(ResourceNotFoundError):
await self.admin_client.get_data_feed(data_feed.id)
@AzureTestCase.await_prepared_test
async def test_create_data_feed_with_azure_table(self):
name = self.create_random_name("tablefeedasync")
async with self.admin_client:
try:
data_feed = await self.admin_client.create_data_feed(
name=name,
source=AzureTableDataFeedSource(
connection_string=self.azure_table_connection_string,
query="PartitionKey ge '@StartTime' and PartitionKey lt '@EndTime'",
table="adsample"
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost"),
DataFeedMetric(name="revenue")
],
dimensions=[
DataFeedDimension(name="category"),
DataFeedDimension(name="city")
],
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 10, 1),
),
)
self.assertIsNotNone(data_feed.id)
self.assertIsNotNone(data_feed.created_time)
self.assertIsNotNone(data_feed.name)
self.assertEqual(data_feed.source.data_source_type, "AzureTable")
self.assertEqual(data_feed.source.table, "adsample")
self.assertEqual(data_feed.source.query, "PartitionKey ge '@StartTime' and PartitionKey lt '@EndTime'")
finally:
await self.admin_client.delete_data_feed(data_feed.id)
@AzureTestCase.await_prepared_test
async def test_create_data_feed_with_azure_blob(self):
name = self.create_random_name("blobfeedasync")
async with self.admin_client:
try:
data_feed = await self.admin_client.create_data_feed(
name=name,
source=AzureBlobDataFeedSource(
connection_string=self.azure_blob_connection_string,
container="adsample",
blob_template="%Y/%m/%d/%h/JsonFormatV2.json"
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost"),
DataFeedMetric(name="revenue")
],
dimensions=[
DataFeedDimension(name="category"),
DataFeedDimension(name="city")
],
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 10, 1),
),
)
self.assertIsNotNone(data_feed.id)
self.assertIsNotNone(data_feed.created_time)
self.assertIsNotNone(data_feed.name)
self.assertEqual(data_feed.source.data_source_type, "AzureBlob")
self.assertEqual(data_feed.source.container, "adsample")
self.assertEqual(data_feed.source.blob_template, "%Y/%m/%d/%h/JsonFormatV2.json")
finally:
await self.admin_client.delete_data_feed(data_feed.id)
@AzureTestCase.await_prepared_test
async def test_create_data_feed_with_azure_cosmos_db(self):
name = self.create_random_name("cosmosfeedasync")
async with self.admin_client:
try:
data_feed = await self.admin_client.create_data_feed(
name=name,
source=AzureCosmosDbDataFeedSource(
connection_string=self.azure_cosmosdb_connection_string,
sql_query="'SELECT * FROM Items I where I.Timestamp >= @StartTime and I.Timestamp < @EndTime'",
database="adsample",
collection_id="adsample"
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost"),
DataFeedMetric(name="revenue")
],
dimensions=[
DataFeedDimension(name="category"),
DataFeedDimension(name="city")
],
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 10, 1),
),
)
self.assertIsNotNone(data_feed.id)
self.assertIsNotNone(data_feed.created_time)
self.assertIsNotNone(data_feed.name)
self.assertEqual(data_feed.source.data_source_type, "AzureCosmosDB")
self.assertEqual(data_feed.source.database, "adsample")
self.assertEqual(data_feed.source.collection_id, "adsample")
self.assertEqual(data_feed.source.sql_query, "'SELECT * FROM Items I where I.Timestamp >= @StartTime and I.Timestamp < @EndTime'")
finally:
await self.admin_client.delete_data_feed(data_feed.id)
@AzureTestCase.await_prepared_test
async def test_create_data_feed_with_application_insights(self):
name = self.create_random_name("applicationinsightsasync")
async with self.admin_client:
try:
query = "let gran=60m; let starttime=datetime(@StartTime); let endtime=starttime + gran; requests | " \
"where timestamp >= starttime and timestamp < endtime | summarize request_count = count(), " \
"duration_avg_ms = avg(duration), duration_95th_ms = percentile(duration, 95), " \
"duration_max_ms = max(duration) by resultCode"
data_feed = await self.admin_client.create_data_feed(
name=name,
source=AzureApplicationInsightsDataFeedSource(
azure_cloud="Azure",
application_id="3706fe8b-98f1-47c7-bf69-b73b6e53274d",
api_key=self.application_insights_api_key,
query=query
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost"),
DataFeedMetric(name="revenue")
],
dimensions=[
DataFeedDimension(name="category"),
DataFeedDimension(name="city")
],
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2020, 7, 1),
),
)
self.assertIsNotNone(data_feed.id)
self.assertIsNotNone(data_feed.created_time)
self.assertIsNotNone(data_feed.name)
self.assertEqual(data_feed.source.data_source_type, "AzureApplicationInsights")
self.assertEqual(data_feed.source.application_id, "3706fe8b-98f1-47c7-bf69-b73b6e53274d")
self.assertIsNotNone(data_feed.source.query)
finally:
await self.admin_client.delete_data_feed(data_feed.id)
@AzureTestCase.await_prepared_test
async def test_create_data_feed_with_data_explorer(self):
name = self.create_random_name("azuredataexplorerasync")
async with self.admin_client:
try:
query = "let StartDateTime = datetime(@StartTime); let EndDateTime = StartDateTime + 1d; " \
"adsample | where Timestamp >= StartDateTime and Timestamp < EndDateTime"
data_feed = await self.admin_client.create_data_feed(
name=name,
source=AzureDataExplorerDataFeedSource(
connection_string=self.azure_data_explorer_connection_string,
query=query
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost"),
DataFeedMetric(name="revenue")
],
dimensions=[
DataFeedDimension(name="category"),
DataFeedDimension(name="city")
],
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 1, 1),
),
)
self.assertIsNotNone(data_feed.id)
self.assertIsNotNone(data_feed.created_time)
self.assertIsNotNone(data_feed.name)
self.assertEqual(data_feed.source.data_source_type, "AzureDataExplorer")
self.assertEqual(data_feed.source.query, query)
finally:
await self.admin_client.delete_data_feed(data_feed.id)
@AzureTestCase.await_prepared_test
async def test_create_data_feed_with_influxdb(self):
name = self.create_random_name("influxdbasync")
async with self.admin_client:
try:
data_feed = await self.admin_client.create_data_feed(
name=name,
source=InfluxDbDataFeedSource(
connection_string=self.influxdb_connection_string,
database="adsample",
user_name="adreadonly",
password=self.influxdb_password,
query="'select * from adsample2 where Timestamp = @StartTime'"
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost"),
DataFeedMetric(name="revenue")
],
dimensions=[
DataFeedDimension(name="category"),
DataFeedDimension(name="city")
],
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 1, 1),
),
)
self.assertIsNotNone(data_feed.id)
self.assertIsNotNone(data_feed.created_time)
self.assertIsNotNone(data_feed.name)
self.assertEqual(data_feed.source.data_source_type, "InfluxDB")
self.assertIsNotNone(data_feed.source.query)
self.assertEqual(data_feed.source.database, "adsample")
self.assertEqual(data_feed.source.user_name, "adreadonly")
finally:
await self.admin_client.delete_data_feed(data_feed.id)
@AzureTestCase.await_prepared_test
async def test_create_data_feed_with_datalake(self):
name = self.create_random_name("datalakeasync")
async with self.admin_client:
try:
data_feed = await self.admin_client.create_data_feed(
name=name,
source=AzureDataLakeStorageGen2DataFeedSource(
account_name="adsampledatalakegen2",
account_key=self.azure_datalake_account_key,
file_system_name="adsample",
directory_template="%Y/%m/%d",
file_template="adsample.json"
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost", display_name="Cost"),
DataFeedMetric(name="revenue", display_name="Revenue")
],
dimensions=[
DataFeedDimension(name="category", display_name="Category"),
DataFeedDimension(name="city", display_name="City")
],
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 1, 1),
),
)
self.assertIsNotNone(data_feed.id)
self.assertIsNotNone(data_feed.created_time)
self.assertIsNotNone(data_feed.name)
self.assertEqual(data_feed.source.data_source_type, "AzureDataLakeStorageGen2")
self.assertEqual(data_feed.source.account_name, "adsampledatalakegen2")
self.assertEqual(data_feed.source.file_system_name, "adsample")
self.assertEqual(data_feed.source.directory_template, "%Y/%m/%d")
self.assertEqual(data_feed.source.file_template, "adsample.json")
finally:
await self.admin_client.delete_data_feed(data_feed.id)
@AzureTestCase.await_prepared_test
async def test_create_data_feed_with_mongodb(self):
name = self.create_random_name("mongodbasync")
async with self.admin_client:
try:
data_feed = await self.admin_client.create_data_feed(
name=name,
source=MongoDbDataFeedSource(
connection_string=self.mongodb_connection_string,
database="adsample",
command='{"find": "adsample", "filter": { Timestamp: { $eq: @StartTime }} "batchSize": 2000,}'
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost"),
DataFeedMetric(name="revenue")
],
dimensions=[
DataFeedDimension(name="category"),
DataFeedDimension(name="city")
],
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 1, 1),
),
)
self.assertIsNotNone(data_feed.id)
self.assertIsNotNone(data_feed.created_time)
self.assertIsNotNone(data_feed.name)
self.assertEqual(data_feed.source.data_source_type, "MongoDB")
self.assertEqual(data_feed.source.database, "adsample")
self.assertEqual(data_feed.source.command, '{"find": "adsample", "filter": { Timestamp: { $eq: @StartTime }} "batchSize": 2000,}')
finally:
await self.admin_client.delete_data_feed(data_feed.id)
@AzureTestCase.await_prepared_test
async def test_create_data_feed_with_mysql(self):
name = self.create_random_name("mysqlasync")
async with self.admin_client:
try:
data_feed = await self.admin_client.create_data_feed(
name=name,
source=MySqlDataFeedSource(
connection_string=self.mysql_connection_string,
query="'select * from adsample2 where Timestamp = @StartTime'"
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost"),
DataFeedMetric(name="revenue")
],
dimensions=[
DataFeedDimension(name="category"),
DataFeedDimension(name="city")
],
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 1, 1),
),
)
self.assertIsNotNone(data_feed.id)
self.assertIsNotNone(data_feed.created_time)
self.assertIsNotNone(data_feed.name)
self.assertEqual(data_feed.source.data_source_type, "MySql")
self.assertEqual(data_feed.source.query, "'select * from adsample2 where Timestamp = @StartTime'")
finally:
await self.admin_client.delete_data_feed(data_feed.id)
@AzureTestCase.await_prepared_test
async def test_create_data_feed_with_postgresql(self):
name = self.create_random_name("postgresqlasync")
async with self.admin_client:
try:
data_feed = await self.admin_client.create_data_feed(
name=name,
source=PostgreSqlDataFeedSource(
connection_string=self.postgresql_connection_string,
query="'select * from adsample2 where Timestamp = @StartTime'"
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost"),
DataFeedMetric(name="revenue")
],
dimensions=[
DataFeedDimension(name="category"),
DataFeedDimension(name="city")
],
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 1, 1),
),
)
self.assertIsNotNone(data_feed.id)
self.assertIsNotNone(data_feed.created_time)
self.assertIsNotNone(data_feed.name)
self.assertEqual(data_feed.source.data_source_type, "PostgreSql")
self.assertEqual(data_feed.source.query, "'select * from adsample2 where Timestamp = @StartTime'")
finally:
await self.admin_client.delete_data_feed(data_feed.id)
@AzureTestCase.await_prepared_test
async def test_list_data_feeds(self):
async with self.admin_client:
feeds = self.admin_client.list_data_feeds()
feeds_list = []
async for item in feeds:
feeds_list.append(item)
assert len(feeds_list) > 0
@AzureTestCase.await_prepared_test
async def test_list_data_feeds_with_data_feed_name(self):
async with self.admin_client:
feeds = self.admin_client.list_data_feeds(data_feed_name="azsqlDatafeed")
feeds_list = []
async for item in feeds:
feeds_list.append(item)
assert len(feeds_list) == 1
@AzureTestCase.await_prepared_test
async def test_list_data_feeds_with_status(self):
async with self.admin_client:
feeds = self.admin_client.list_data_feeds(status="Paused")
feeds_list = []
async for item in feeds:
feeds_list.append(item)
assert len(feeds_list) == 0
@AzureTestCase.await_prepared_test
async def test_list_data_feeds_with_source_type(self):
async with self.admin_client:
feeds = self.admin_client.list_data_feeds(data_source_type="SqlServer")
feeds_list = []
async for item in feeds:
feeds_list.append(item)
assert len(feeds_list) > 0
@AzureTestCase.await_prepared_test
async def test_list_data_feeds_with_granularity_type(self):
async with self.admin_client:
feeds = self.admin_client.list_data_feeds(granularity_type="Daily")
feeds_list = []
async for item in feeds:
feeds_list.append(item)
assert len(feeds_list) > 0
@unittest.skip("skip test")
@AzureTestCase.await_prepared_test
async def test_list_data_feeds_with_skip(self):
async with self.admin_client:
all_feeds = self.admin_client.list_data_feeds()
skipped_feeds = self.admin_client.list_data_feeds(skip=1)
all_feeds_list = []
skipped_feeds_list = []
async for feed in all_feeds:
all_feeds_list.append(feed)
async for feed in skipped_feeds:
skipped_feeds_list.append(feed)
assert len(all_feeds_list) == len(skipped_feeds_list) + 1
@AzureTestCase.await_prepared_test
async def test_update_data_feed_with_model(self):
async with self.admin_client:
data_feed = await self._create_data_feed_for_update("update")
try:
data_feed.name = "update"
data_feed.data_feed_description = "updated"
data_feed.schema.timestamp_column = "time"
data_feed.ingestion_settings.ingestion_begin_time = datetime.datetime(2020, 12, 10)
data_feed.ingestion_settings.ingestion_start_offset = 1
data_feed.ingestion_settings.data_source_request_concurrency = 1
data_feed.ingestion_settings.ingestion_retry_delay = 1
data_feed.ingestion_settings.stop_retry_after = 1
data_feed.rollup_settings.rollup_type = "AlreadyRollup"
data_feed.rollup_settings.rollup_method = "Sum"
data_feed.rollup_settings.rollup_identification_value = "sumrollup"
data_feed.rollup_settings.auto_rollup_group_by_column_names = []
data_feed.missing_data_point_fill_settings.fill_type = "CustomValue"
data_feed.missing_data_point_fill_settings.custom_fill_value = 2
data_feed.access_mode = "Public"
data_feed.viewer_emails = ["updated"]
data_feed.status = "Paused"
data_feed.action_link_template = "updated"
data_feed.source.connection_string = "updated"
data_feed.source.query = "get data"
await self.admin_client.update_data_feed(data_feed)
updated = await self.admin_client.get_data_feed(data_feed.id)
self.assertEqual(updated.name, "update")
self.assertEqual(updated.data_feed_description, "updated")
self.assertEqual(updated.schema.timestamp_column, "time")
self.assertEqual(updated.ingestion_settings.ingestion_begin_time,
datetime.datetime(2020, 12, 10, tzinfo=tzutc()))
self.assertEqual(updated.ingestion_settings.ingestion_start_offset, 1)
self.assertEqual(updated.ingestion_settings.data_source_request_concurrency, 1)
self.assertEqual(updated.ingestion_settings.ingestion_retry_delay, 1)
self.assertEqual(updated.ingestion_settings.stop_retry_after, 1)
self.assertEqual(updated.rollup_settings.rollup_type, "AlreadyRollup")
self.assertEqual(updated.rollup_settings.rollup_method, "Sum")
self.assertEqual(updated.rollup_settings.rollup_identification_value, "sumrollup")
self.assertEqual(updated.missing_data_point_fill_settings.fill_type, "CustomValue")
self.assertEqual(updated.missing_data_point_fill_settings.custom_fill_value, 2)
self.assertEqual(updated.access_mode, "Public")
self.assertEqual(updated.viewer_emails, ["updated"])
self.assertEqual(updated.status, "Paused")
self.assertEqual(updated.action_link_template, "updated")
self.assertEqual(updated.source.query, "get data")
finally:
await self.admin_client.delete_data_feed(data_feed.id)
@AzureTestCase.await_prepared_test
async def test_update_data_feed_with_kwargs(self):
async with self.admin_client:
data_feed = await self._create_data_feed_for_update("update")
try:
await self.admin_client.update_data_feed(
data_feed.id,
name="update",
data_feed_description="updated",
timestamp_column="time",
ingestion_begin_time=datetime.datetime(2020, 12, 10),
ingestion_start_offset=1,
data_source_request_concurrency=1,
ingestion_retry_delay=1,
stop_retry_after=1,
rollup_type="AlreadyRollup",
rollup_method="Sum",
rollup_identification_value="sumrollup",
auto_rollup_group_by_column_names=[],
fill_type="CustomValue",
custom_fill_value=2,
access_mode="Public",
viewer_emails=["updated"],
status="Paused",
action_link_template="updated",
source=SqlServerDataFeedSource(
connection_string="updated",
query="get data"
)
)
updated = await self.admin_client.get_data_feed(data_feed.id)
self.assertEqual(updated.name, "update")
self.assertEqual(updated.data_feed_description, "updated")
self.assertEqual(updated.schema.timestamp_column, "time")
self.assertEqual(updated.ingestion_settings.ingestion_begin_time,
datetime.datetime(2020, 12, 10, tzinfo=tzutc()))
self.assertEqual(updated.ingestion_settings.ingestion_start_offset, 1)
self.assertEqual(updated.ingestion_settings.data_source_request_concurrency, 1)
self.assertEqual(updated.ingestion_settings.ingestion_retry_delay, 1)
self.assertEqual(updated.ingestion_settings.stop_retry_after, 1)
self.assertEqual(updated.rollup_settings.rollup_type, "AlreadyRollup")
self.assertEqual(updated.rollup_settings.rollup_method, "Sum")
self.assertEqual(updated.rollup_settings.rollup_identification_value, "sumrollup")
self.assertEqual(updated.missing_data_point_fill_settings.fill_type, "CustomValue")
self.assertEqual(updated.missing_data_point_fill_settings.custom_fill_value, 2)
self.assertEqual(updated.access_mode, "Public")
self.assertEqual(updated.viewer_emails, ["updated"])
self.assertEqual(updated.status, "Paused")
self.assertEqual(updated.action_link_template, "updated")
self.assertEqual(updated.source.query, "get data")
finally:
await self.admin_client.delete_data_feed(data_feed.id)
@AzureTestCase.await_prepared_test
async def test_update_data_feed_with_model_and_kwargs(self):
async with self.admin_client:
data_feed = await self._create_data_feed_for_update("update")
try:
data_feed.name = "updateMe"
data_feed.data_feed_description = "updateMe"
data_feed.schema.timestamp_column = "don't update me"
data_feed.ingestion_settings.ingestion_begin_time = datetime.datetime(2020, 12, 22)
data_feed.ingestion_settings.ingestion_start_offset = 2
data_feed.ingestion_settings.data_source_request_concurrency = 2
data_feed.ingestion_settings.ingestion_retry_delay = 2
data_feed.ingestion_settings.stop_retry_after = 2
data_feed.rollup_settings.rollup_type = "don't update me"
data_feed.rollup_settings.rollup_method = "don't update me"
data_feed.rollup_settings.rollup_identification_value = "don't update me"
data_feed.rollup_settings.auto_rollup_group_by_column_names = []
data_feed.missing_data_point_fill_settings.fill_type = "don't update me"
data_feed.missing_data_point_fill_settings.custom_fill_value = 4
data_feed.access_mode = "don't update me"
data_feed.viewer_emails = ["don't update me"]
data_feed.status = "don't update me"
data_feed.action_link_template = "don't update me"
data_feed.source.connection_string = "don't update me"
data_feed.source.query = "don't update me"
await self.admin_client.update_data_feed(
data_feed,
timestamp_column="time",
ingestion_begin_time=datetime.datetime(2020, 12, 10),
ingestion_start_offset=1,
data_source_request_concurrency=1,
ingestion_retry_delay=1,
stop_retry_after=1,
rollup_type="AlreadyRollup",
rollup_method="Sum",
rollup_identification_value="sumrollup",
auto_rollup_group_by_column_names=[],
fill_type="CustomValue",
custom_fill_value=2,
access_mode="Public",
viewer_emails=["updated"],
status="Paused",
action_link_template="updated",
source=SqlServerDataFeedSource(
connection_string="updated",
query="get data"
)
)
updated = await self.admin_client.get_data_feed(data_feed.id)
self.assertEqual(updated.name, "updateMe")
self.assertEqual(updated.data_feed_description, "updateMe")
self.assertEqual(updated.schema.timestamp_column, "time")
self.assertEqual(updated.ingestion_settings.ingestion_begin_time,
datetime.datetime(2020, 12, 10, tzinfo=tzutc()))
self.assertEqual(updated.ingestion_settings.ingestion_start_offset, 1)
self.assertEqual(updated.ingestion_settings.data_source_request_concurrency, 1)
self.assertEqual(updated.ingestion_settings.ingestion_retry_delay, 1)
self.assertEqual(updated.ingestion_settings.stop_retry_after, 1)
self.assertEqual(updated.rollup_settings.rollup_type, "AlreadyRollup")
self.assertEqual(updated.rollup_settings.rollup_method, "Sum")
self.assertEqual(updated.rollup_settings.rollup_identification_value, "sumrollup")
self.assertEqual(updated.missing_data_point_fill_settings.fill_type, "CustomValue")
self.assertEqual(updated.missing_data_point_fill_settings.custom_fill_value, 2)
self.assertEqual(updated.access_mode, "Public")
self.assertEqual(updated.viewer_emails, ["updated"])
self.assertEqual(updated.status, "Paused")
self.assertEqual(updated.action_link_template, "updated")
self.assertEqual(updated.source.query, "get data")
finally:
await self.admin_client.delete_data_feed(data_feed.id)
@unittest.skip("skip test")
@AzureTestCase.await_prepared_test
async def test_update_data_feed_by_reseting_properties(self):
async with self.admin_client:
data_feed = await self._create_data_feed_for_update("update")
try:
await self.admin_client.update_data_feed(
data_feed.id,
name="update",
data_feed_description=None,
timestamp_column=None,
ingestion_start_offset=None,
data_source_request_concurrency=None,
ingestion_retry_delay=None,
stop_retry_after=None,
rollup_type=None,
rollup_method=None,
rollup_identification_value=None,
auto_rollup_group_by_column_names=None,
fill_type=None,
custom_fill_value=None,
access_mode=None,
viewer_emails=None,
status=None,
action_link_template=None,
)
updated = await self.admin_client.get_data_feed(data_feed.id)
self.assertEqual(updated.name, "update")
# self.assertEqual(updated.data_feed_description, "") # doesn't currently clear
# self.assertEqual(updated.schema.timestamp_column, "") # doesn't currently clear
self.assertEqual(updated.ingestion_settings.ingestion_begin_time,
datetime.datetime(2019, 10, 1, tzinfo=tzutc()))
self.assertEqual(updated.ingestion_settings.ingestion_start_offset, -1)
self.assertEqual(updated.ingestion_settings.data_source_request_concurrency, 0)
self.assertEqual(updated.ingestion_settings.ingestion_retry_delay, -1)
self.assertEqual(updated.ingestion_settings.stop_retry_after, -1)
self.assertEqual(updated.rollup_settings.rollup_type, "NoRollup")
self.assertEqual(updated.rollup_settings.rollup_method, "None")
self.assertEqual(updated.rollup_settings.rollup_identification_value, None)
self.assertEqual(updated.missing_data_point_fill_settings.fill_type, "SmartFilling")
self.assertEqual(updated.missing_data_point_fill_settings.custom_fill_value, 0)
self.assertEqual(updated.access_mode, "Private")
# self.assertEqual(updated.viewer_emails, ["viewers"]) # doesn't currently clear
self.assertEqual(updated.status, "Active")
# self.assertEqual(updated.action_link_template, "updated") # doesn't currently clear
finally:
await self.admin_client.delete_data_feed(data_feed.id)
| 51.985248
| 146
| 0.576134
| 4,459
| 49,334
| 6.07468
| 0.063916
| 0.090671
| 0.061727
| 0.074722
| 0.866357
| 0.846938
| 0.813896
| 0.785654
| 0.74892
| 0.738509
| 0
| 0.010273
| 0.342948
| 49,334
| 948
| 147
| 52.040084
| 0.825359
| 0.012547
| 0
| 0.704128
| 0
| 0.005734
| 0.083448
| 0.007803
| 0
| 0
| 0
| 0
| 0.244266
| 1
| 0
| false
| 0.001147
| 0.009174
| 0
| 0.010321
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c3e9f5e52dbba77494ec3d085a2b69bc091d33a
| 20,776
|
py
|
Python
|
tests/resource.py
|
jannon/slumber
|
319e45218f0a60683d462b0eb3990ca29e8f9518
|
[
"BSD-2-Clause"
] | null | null | null |
tests/resource.py
|
jannon/slumber
|
319e45218f0a60683d462b0eb3990ca29e8f9518
|
[
"BSD-2-Clause"
] | null | null | null |
tests/resource.py
|
jannon/slumber
|
319e45218f0a60683d462b0eb3990ca29e8f9518
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import sys
import mock
import requests
import slumber
import slumber.serialize
import unittest2 as unittest
from slumber import exceptions
class ResourceTestCase(unittest.TestCase):
def setUp(self):
self.base_resource = slumber.Resource(base_url="http://example/api/v1/test",
format="json",
append_slash=False,
api=slumber.API("http://example/api/v1/test"))
def test_get_200_json(self):
r = mock.Mock(spec=requests.Response)
r.status_code = 200
r.headers = {"content-type": "application/json"}
r.content = '{"result": ["a", "b", "c"]}'
self.base_resource._store.update({
"session": mock.Mock(spec=requests.Session),
"serializer": slumber.serialize.Serializer(),
})
self.base_resource._store["session"].request.return_value = r
resp = self.base_resource._request("GET")
self.assertTrue(resp is r)
self.assertEqual(resp.content, r.content)
self.base_resource._store["session"].request.assert_called_once_with(
"GET",
"http://example/api/v1/test",
data=None,
files=None,
params=None,
headers={"content-type": self.base_resource._store["serializer"].get_content_type(),
"accept": self.base_resource._store["serializer"].get_content_type()}
)
resp = self.base_resource.get()
self.assertEqual(resp['result'], ['a', 'b', 'c'])
def test_get_200_text(self):
r = mock.Mock(spec=requests.Response)
r.status_code = 200
r.headers = {"content-type": "text/plain"}
r.content = "Mocked Content"
self.base_resource._store.update({
"session": mock.Mock(spec=requests.Session),
"serializer": slumber.serialize.Serializer(),
})
self.base_resource._store["session"].request.return_value = r
resp = self.base_resource._request("GET")
self.assertTrue(resp is r)
self.assertEqual(resp.content, "Mocked Content")
self.base_resource._store["session"].request.assert_called_once_with(
"GET",
"http://example/api/v1/test",
data=None,
files=None,
params=None,
headers={"content-type": self.base_resource._store["serializer"].get_content_type(),
"accept": self.base_resource._store["serializer"].get_content_type()}
)
resp = self.base_resource.get()
self.assertEqual(resp, r.content)
def test_options_200_json(self):
r = mock.Mock(spec=requests.Response)
r.status_code = 200
r.headers = {"content-type": "application/json"}
r.content = '{"actions": {"POST": {"foo": {"required": false, "type": "string"}}}}'
self.base_resource._store.update({
"session": mock.Mock(spec=requests.Session),
"serializer": slumber.serialize.Serializer(),
})
self.base_resource._store["session"].request.return_value = r
resp = self.base_resource._request("OPTIONS")
self.assertTrue(resp is r)
self.assertEqual(resp.content, r.content)
self.base_resource._store["session"].request.assert_called_once_with(
"OPTIONS",
"http://example/api/v1/test",
data=None,
files=None,
params=None,
headers={"content-type": self.base_resource._store["serializer"].get_content_type(),
"accept": self.base_resource._store["serializer"].get_content_type()}
)
resp = self.base_resource.options()
self.assertTrue('POST' in resp['actions'])
self.assertTrue('foo' in resp['actions']['POST'])
self.assertTrue('type' in resp['actions']['POST']['foo'])
self.assertEqual(resp['actions']['POST']['foo']['type'], 'string')
def test_head_200_json(self):
r = mock.Mock(spec=requests.Response)
r.status_code = 200
r.headers = {"content-type": "application/json"}
r.content = ''
self.base_resource._store.update({
"session": mock.Mock(spec=requests.Session),
"serializer": slumber.serialize.Serializer(),
})
self.base_resource._store["session"].request.return_value = r
resp = self.base_resource._request("HEAD")
self.assertTrue(resp is r)
self.assertEqual(resp.content, r.content)
self.base_resource._store["session"].request.assert_called_once_with(
"HEAD",
"http://example/api/v1/test",
data=None,
files=None,
params=None,
headers={"content-type": self.base_resource._store["serializer"].get_content_type(),
"accept": self.base_resource._store["serializer"].get_content_type()}
)
resp = self.base_resource.head()
self.assertEqual(resp, r.content)
def test_post_201_redirect(self):
r1 = mock.Mock(spec=requests.Response)
r1.status_code = 201
r1.headers = {"location": "http://example/api/v1/test/1"}
r1.content = ''
r2 = mock.Mock(spec=requests.Response)
r2.status_code = 200
r2.headers = {"content-type": "application/json"}
r2.content = '{"result": ["a", "b", "c"]}'
self.base_resource._store.update({
"session": mock.Mock(spec=requests.Session),
"serializer": slumber.serialize.Serializer(),
})
self.base_resource._store["session"].request.side_effect = (r1, r2)
resp = self.base_resource._request("POST")
self.assertTrue(resp is r1)
self.assertEqual(resp.content, r1.content)
self.base_resource._store["session"].request.assert_called_once_with(
"POST",
"http://example/api/v1/test",
data=None,
files=None,
params=None,
headers={"content-type": self.base_resource._store["serializer"].get_content_type(),
"accept": self.base_resource._store["serializer"].get_content_type()}
)
resp = self.base_resource.post(data={'foo': 'bar'})
self.assertEqual(resp['result'], ['a', 'b', 'c'])
def test_post_decodable_response(self):
r = mock.Mock(spec=requests.Response)
r.status_code = 200
r.content = '{"result": ["a", "b", "c"]}'
r.headers = {"content-type": "application/json"}
self.base_resource._store.update({
"session": mock.Mock(spec=requests.Session),
"serializer": slumber.serialize.Serializer(),
})
self.base_resource._store["session"].request.return_value = r
resp = self.base_resource._request("POST")
self.assertTrue(resp is r)
self.assertEqual(resp.content, r.content)
self.base_resource._store["session"].request.assert_called_once_with(
"POST",
"http://example/api/v1/test",
data=None,
files=None,
params=None,
headers={"content-type": self.base_resource._store["serializer"].get_content_type(),
"accept": self.base_resource._store["serializer"].get_content_type()}
)
resp = self.base_resource.post(data={'foo': 'bar'})
self.assertEqual(resp['result'], ['a', 'b', 'c'])
def test_patch_201_redirect(self):
r1 = mock.Mock(spec=requests.Response)
r1.status_code = 201
r1.headers = {"location": "http://example/api/v1/test/1"}
r1.content = ''
r2 = mock.Mock(spec=requests.Response)
r2.status_code = 200
r2.headers = {"content-type": "application/json"}
r2.content = '{"result": ["a", "b", "c"]}'
self.base_resource._store.update({
"session": mock.Mock(spec=requests.Session),
"serializer": slumber.serialize.Serializer(),
})
self.base_resource._store["session"].request.side_effect = (r1, r2)
resp = self.base_resource._request("PATCH")
self.assertTrue(resp is r1)
self.assertEqual(resp.content, r1.content)
self.base_resource._store["session"].request.assert_called_once_with(
"PATCH",
"http://example/api/v1/test",
data=None,
files=None,
params=None,
headers={"content-type": self.base_resource._store["serializer"].get_content_type(),
"accept": self.base_resource._store["serializer"].get_content_type()}
)
resp = self.base_resource.patch(data={'foo': 'bar'})
self.assertEqual(resp['result'], ['a', 'b', 'c'])
def test_patch_decodable_response(self):
r = mock.Mock(spec=requests.Response)
r.status_code = 200
r.content = '{"result": ["a", "b", "c"]}'
r.headers = {"content-type": "application/json"}
self.base_resource._store.update({
"session": mock.Mock(spec=requests.Session),
"serializer": slumber.serialize.Serializer(),
})
self.base_resource._store["session"].request.return_value = r
resp = self.base_resource._request("PATCH")
self.assertTrue(resp is r)
self.assertEqual(resp.content, r.content)
self.base_resource._store["session"].request.assert_called_once_with(
"PATCH",
"http://example/api/v1/test",
data=None,
files=None,
params=None,
headers={"content-type": self.base_resource._store["serializer"].get_content_type(),
"accept": self.base_resource._store["serializer"].get_content_type()}
)
resp = self.base_resource.patch(data={'foo': 'bar'})
self.assertEqual(resp['result'], ['a', 'b', 'c'])
def test_put_201_redirect(self):
r1 = mock.Mock(spec=requests.Response)
r1.status_code = 201
r1.headers = {"location": "http://example/api/v1/test/1"}
r1.content = ''
r2 = mock.Mock(spec=requests.Response)
r2.status_code = 200
r2.headers = {"content-type": "application/json"}
r2.content = '{"result": ["a", "b", "c"]}'
self.base_resource._store.update({
"session": mock.Mock(spec=requests.Session),
"serializer": slumber.serialize.Serializer(),
})
self.base_resource._store["session"].request.side_effect = (r1, r2)
resp = self.base_resource._request("PUT")
self.assertTrue(resp is r1)
self.assertEqual(resp.content, r1.content)
self.base_resource._store["session"].request.assert_called_once_with(
"PUT",
"http://example/api/v1/test",
data=None,
files=None,
params=None,
headers={"content-type": self.base_resource._store["serializer"].get_content_type(),
"accept": self.base_resource._store["serializer"].get_content_type()}
)
resp = self.base_resource.put(data={'foo': 'bar'})
self.assertEqual(resp['result'], ['a', 'b', 'c'])
def test_put_decodable_response(self):
r = mock.Mock(spec=requests.Response)
r.status_code = 200
r.content = '{"result": ["a", "b", "c"]}'
r.headers = {"content-type": "application/json"}
self.base_resource._store.update({
"session": mock.Mock(spec=requests.Session),
"serializer": slumber.serialize.Serializer(),
})
self.base_resource._store["session"].request.return_value = r
resp = self.base_resource._request("PUT")
self.assertTrue(resp is r)
self.assertEqual(resp.content, r.content)
self.base_resource._store["session"].request.assert_called_once_with(
"PUT",
"http://example/api/v1/test",
data=None,
files=None,
params=None,
headers={"content-type": self.base_resource._store["serializer"].get_content_type(),
"accept": self.base_resource._store["serializer"].get_content_type()}
)
resp = self.base_resource.put(data={'foo': 'bar'})
self.assertEqual(resp['result'], ['a', 'b', 'c'])
def test_handle_serialization(self):
self.base_resource._store.update({
"serializer": slumber.serialize.Serializer(),
})
resp = mock.Mock(spec=requests.Response)
resp.status_code = 200
resp.headers = {"content-type": "application/json; charset=utf-8"}
resp.content = '{"foo": "bar"}'
r = self.base_resource._try_to_serialize_response(resp)
if not isinstance(r, dict):
self.fail("Serialization did not take place")
def test_post_204_json(self):
resp = mock.Mock(spec=requests.Response)
resp.status_code = 204
resp.headers = {"content-type": "application/json"}
resp.content = None
self.base_resource._store.update({
"session": mock.Mock(spec=requests.Session),
"serializer": slumber.serialize.Serializer(),
})
self.base_resource._store["session"].request.return_value = resp
self.assertEqual(self.base_resource.post(), None)
def test_get_200_subresource_json(self):
r = mock.Mock(spec=requests.Response)
r.status_code = 200
r.headers = {"content-type": "application/json"}
r.content = '{"result": ["a", "b", "c"]}'
self.base_resource._store.update({
"session": mock.Mock(spec=requests.Session),
"serializer": slumber.serialize.Serializer(),
})
self.base_resource._store["session"].request.return_value = r
resp = self.base_resource.subresource._request("GET")
self.assertTrue(resp is r)
self.assertEqual(resp.content, r.content)
self.base_resource._store["session"].request.assert_called_once_with(
"GET",
"http://example/api/v1/test/subresource",
data=None,
files=None,
params=None,
headers={"content-type": self.base_resource._store["serializer"].get_content_type(),
"accept": self.base_resource._store["serializer"].get_content_type()}
)
resp = self.base_resource.get()
self.assertEqual(resp['result'], ['a', 'b', 'c'])
def test_bad_resource_name(self):
with self.assertRaises(AttributeError):
self.base_resource._subresource
def test_get_400_response(self):
r = mock.Mock(spec=requests.Response)
r.status_code = 400
r.headers = {"content-type": "application/json"}
r.content = ''
self.base_resource._store.update({
"session": mock.Mock(spec=requests.Session),
"serializer": slumber.serialize.Serializer(),
})
self.base_resource._store["session"].request.return_value = r
with self.assertRaises(exceptions.HttpClientError):
self.base_resource.req._request("GET")
def test_get_404_response(self):
r = mock.Mock(spec=requests.Response)
r.status_code = 404
r.headers = {"content-type": "application/json"}
r.content = ''
self.base_resource._store.update({
"session": mock.Mock(spec=requests.Session),
"serializer": slumber.serialize.Serializer(),
})
self.base_resource._store["session"].request.return_value = r
with self.assertRaises(exceptions.HttpNotFoundError):
self.base_resource.req._request("GET")
def test_get_500_response(self):
r = mock.Mock(spec=requests.Response)
r.status_code = 500
r.headers = {"content-type": "application/json"}
r.content = ''
self.base_resource._store.update({
"session": mock.Mock(spec=requests.Session),
"serializer": slumber.serialize.Serializer(),
})
self.base_resource._store["session"].request.return_value = r
with self.assertRaises(exceptions.HttpServerError):
self.base_resource.req._request("GET")
def test_improperly_conf(self):
with self.assertRaises(exceptions.ImproperlyConfigured):
slumber.API()
def test_api(self):
r = mock.Mock(spec=requests.Response)
r.status_code = 200
r.headers = {"content-type": "application/json"}
r.content = '{"result": ["a", "b", "c"]}'
client = slumber.API(base_url="http://example/api/v1",
session=mock.Mock(spec=requests.Session))
client.test._store["session"].request.return_value = r
resp = client.test.get()
self.assertEqual(resp['result'], ['a', 'b', 'c'])
self.assertEqual(client.status_code, r.status_code)
self.assertEqual(client.headers.get('content-type'), r.headers['content-type'])
def test_api_subclass(self):
class SubclassedResource(slumber.Resource):
pass
class SubclassedAPI(slumber.API):
resource_class = SubclassedResource
client = SubclassedAPI(base_url="http://example/api/v1")
self.assertIsInstance(client.test, SubclassedResource)
self.assertIsInstance(client.test(1).other(2).more, SubclassedResource)
def test_url(self):
self.assertEqual(self.base_resource.url(), "http://example/api/v1/test")
def test_get_200_json_py3(self):
r = mock.Mock(spec=requests.Response)
r.status_code = 200
r.headers = {"content-type": "application/json"}
r.content = b'{"result": ["a", "b", "c"]}'
self.base_resource._store.update({
"session": mock.Mock(spec=requests.Session),
"serializer": slumber.serialize.Serializer(),
})
self.base_resource._store["session"].request.return_value = r
resp = self.base_resource._request("GET")
self.assertTrue(resp is r)
self.assertEqual(resp.content, r.content)
self.base_resource._store["session"].request.assert_called_once_with(
"GET",
"http://example/api/v1/test",
data=None,
files=None,
params=None,
headers={"content-type": self.base_resource._store["serializer"].get_content_type(),
"accept": self.base_resource._store["serializer"].get_content_type()}
)
resp = self.base_resource.get()
self.assertEqual(resp['result'], ['a', 'b', 'c'])
@unittest.expectedFailure
def test_post_201_does_get(self):
getparams = dict(username="luser", api_key="1234")
postparams = dict(key1=1, key2="two")
listuri = "http://example/api/v1/"
newuri = "http://example/api/v1/myres/newthing/"
ses = mock.Mock(spec=requests.session())
ses.request.return_value.status_code = 201
ses.request.return_value.headers = { "location": newuri }
api = slumber.API(listuri, session=ses)
api.myres.post(postparams, **getparams)
self.assertEqual(ses.request.call_count, 2)
ses.request.assert_called_with('GET', newuri,
headers={
'content-type': 'application/json',
'accept': 'application/json'
},
params=getparams,
data=None)
def test_unicode_decodable_response(self):
r = mock.Mock(spec=requests.Response)
r.status_code = 200
r.content = '{"result": "Préparatoire"}'
r.headers = {"content-type": "application/json"}
self.base_resource._store.update({
"session": mock.Mock(spec=requests.Session),
"serializer": slumber.serialize.Serializer(),
})
self.base_resource._store["session"].request.return_value = r
resp = self.base_resource._request("POST")
self.assertTrue(resp is r)
self.assertEqual(resp.content, r.content)
self.base_resource._store["session"].request.assert_called_once_with(
"POST",
"http://example/api/v1/test",
data=None,
files=None,
params=None,
headers={"content-type": self.base_resource._store["serializer"].get_content_type(), "accept": self.base_resource._store["serializer"].get_content_type()}
)
resp = self.base_resource.post(data={'foo': 'bar'})
expected = b'Pr\xc3\xa9paratoire'.decode('utf8')
self.assertEqual(resp['result'], expected)
| 36.967972
| 166
| 0.594821
| 2,304
| 20,776
| 5.182726
| 0.067708
| 0.072356
| 0.144712
| 0.13014
| 0.83586
| 0.823047
| 0.807638
| 0.799012
| 0.793401
| 0.779164
| 0
| 0.012331
| 0.258375
| 20,776
| 561
| 167
| 37.033868
| 0.762656
| 0.001011
| 0
| 0.721348
| 0
| 0
| 0.147786
| 0
| 0
| 0
| 0
| 0
| 0.155056
| 1
| 0.05618
| false
| 0.002247
| 0.01573
| 0
| 0.078652
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c6d5cbdc5884c703fbecf3545295170890583ea
| 9,828
|
py
|
Python
|
sdk/python/bouncerapi/controllers/blacklist_geo_locations_controller.py
|
nmfta-repo/nmfta-bouncer
|
a178244dbf0b8a165aabc02a5d1ba05006f9ec22
|
[
"Apache-2.0"
] | 1
|
2021-04-09T20:29:39.000Z
|
2021-04-09T20:29:39.000Z
|
sdk/python/bouncerapi/controllers/blacklist_geo_locations_controller.py
|
nmfta-repo/nmfta-bouncer
|
a178244dbf0b8a165aabc02a5d1ba05006f9ec22
|
[
"Apache-2.0"
] | 21
|
2019-02-28T14:23:11.000Z
|
2020-07-07T20:46:37.000Z
|
sdk/python/bouncerapi/controllers/blacklist_geo_locations_controller.py
|
nmfta-repo/nmfta-bouncer
|
a178244dbf0b8a165aabc02a5d1ba05006f9ec22
|
[
"Apache-2.0"
] | 2
|
2019-05-07T13:16:49.000Z
|
2020-06-23T13:49:01.000Z
|
# -*- coding: utf-8 -*-
"""
bouncerapi
This file was automatically generated by APIMATIC v2.0 ( https://apimatic.io ).
"""
from bouncerapi.api_helper import APIHelper
from bouncerapi.configuration import Configuration
from bouncerapi.controllers.base_controller import BaseController
from bouncerapi.http.auth.custom_auth import CustomAuth
from bouncerapi.models.remove_response_1 import RemoveResponse1
from bouncerapi.models.create_response_1 import CreateResponse1
from bouncerapi.models.update_response_1 import UpdateResponse1
from bouncerapi.models.list_response_1 import ListResponse1
from bouncerapi.models.get_details_response_1 import GetDetailsResponse1
from bouncerapi.exceptions.return_exception import ReturnException
class BlacklistGeoLocationsController(BaseController):
"""A Controller to access Endpoints in the bouncerapi API."""
def remove(self,
entry_id,
body):
"""Does a DELETE request to /v1.1/blacklists/geolocations/{entry_id}/delete.
Remove a Geo Location in the Blacklist
Args:
entry_id (string): a unique identifier for the Geo Location;
opaque but likely a GUID
body (GeoLocation): TODO: type description here. Example:
Returns:
RemoveResponse1: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/v1.1/blacklists/geolocations/{entry_id}/delete'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'entry_id': entry_id
})
_query_builder = Configuration.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8'
}
# Prepare and execute request
_request = self.http_client.delete(_query_url, headers=_headers, parameters=APIHelper.json_serialize(body))
CustomAuth.apply(_request)
_context = self.execute_request(_request)
# Endpoint and global error handling using HTTP status codes.
if _context.response.status_code == 400:
raise ReturnException('Unexpected error in API call. See HTTP response body for details.', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, RemoveResponse1.from_dictionary)
def create(self,
body):
"""Does a POST request to /v1.1/blacklists/geolocations/create.
Create a Geo Location in the Blacklist. When POSTed-to this endpoint,
Bouncer scans `geolist.txt` for any IPs matching the Country Code (CC)
in the POSTed object and, for each: Bouncer will create a new
ipaddress in this list (black- or white-list).
Args:
body (GeoLocation): TODO: type description here. Example:
Returns:
CreateResponse1: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/v1.1/blacklists/geolocations/create'
_query_builder = Configuration.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8'
}
# Prepare and execute request
_request = self.http_client.post(_query_url, headers=_headers, parameters=APIHelper.json_serialize(body))
CustomAuth.apply(_request)
_context = self.execute_request(_request)
# Endpoint and global error handling using HTTP status codes.
if _context.response.status_code == 400:
raise ReturnException('Unexpected error in API call. See HTTP response body for details.', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, CreateResponse1.from_dictionary)
def update(self,
entry_id,
body):
"""Does a PUT request to /v1.1/blacklists/geolocations/{entry_id}/update.
Update a Geo Location in the Blacklist
Args:
entry_id (string): a unique identifier for the Geo Location;
opaque but likely a GUID
body (GeoLocation): TODO: type description here. Example:
Returns:
UpdateResponse1: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/v1.1/blacklists/geolocations/{entry_id}/update'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'entry_id': entry_id
})
_query_builder = Configuration.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8'
}
# Prepare and execute request
_request = self.http_client.put(_query_url, headers=_headers, parameters=APIHelper.json_serialize(body))
CustomAuth.apply(_request)
_context = self.execute_request(_request)
# Endpoint and global error handling using HTTP status codes.
if _context.response.status_code == 400:
raise ReturnException('Unexpected error in API call. See HTTP response body for details.', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, UpdateResponse1.from_dictionary)
def list(self):
"""Does a GET request to /v1.1/blacklists/geolocations.
List all Geo Locations in the Blacklist
Returns:
ListResponse1: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/v1.1/blacklists/geolocations'
_query_builder = Configuration.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.http_client.get(_query_url, headers=_headers)
CustomAuth.apply(_request)
_context = self.execute_request(_request)
# Endpoint and global error handling using HTTP status codes.
if _context.response.status_code == 400:
raise ReturnException('Unexpected error in API call. See HTTP response body for details.', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, ListResponse1.from_dictionary)
def get_details(self,
entry_id):
"""Does a GET request to /v1.1/blacklists/geolocations/{entry_id}.
Get Details of a Geo Location Entry in the Blacklist
Args:
entry_id (string): a unique identifier for the Geo Location;
opaque but likely a GUID
Returns:
GetDetailsResponse1: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/v1.1/blacklists/geolocations/{entry_id}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'entry_id': entry_id
})
_query_builder = Configuration.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.http_client.get(_query_url, headers=_headers)
CustomAuth.apply(_request)
_context = self.execute_request(_request)
# Endpoint and global error handling using HTTP status codes.
if _context.response.status_code == 400:
raise ReturnException('Unexpected error in API call. See HTTP response body for details.', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, GetDetailsResponse1.from_dictionary)
| 37.51145
| 115
| 0.655983
| 1,113
| 9,828
| 5.584906
| 0.146451
| 0.02027
| 0.020914
| 0.040219
| 0.791345
| 0.789414
| 0.773327
| 0.769466
| 0.746783
| 0.733269
| 0
| 0.008578
| 0.276455
| 9,828
| 261
| 116
| 37.655172
| 0.86556
| 0.367521
| 0
| 0.663462
| 1
| 0
| 0.139366
| 0.03524
| 0
| 0
| 0
| 0.011494
| 0
| 1
| 0.048077
| false
| 0
| 0.096154
| 0
| 0.201923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7cc0b667ed8ceb72180a59deb8c1f3ba79c2669d
| 3,237
|
py
|
Python
|
datamegh/api/http.py
|
leapfrogtechnology/data-engine
|
35e65e390dafb427e9972f6e8beca6fcbcdd5112
|
[
"MIT"
] | 3
|
2020-02-25T09:28:40.000Z
|
2020-02-28T04:18:23.000Z
|
datamegh/api/http.py
|
leapfrogtechnology/data-engine
|
35e65e390dafb427e9972f6e8beca6fcbcdd5112
|
[
"MIT"
] | 21
|
2020-02-12T15:32:19.000Z
|
2020-03-20T06:48:02.000Z
|
datamegh/api/http.py
|
leapfrogtechnology/data-engine
|
35e65e390dafb427e9972f6e8beca6fcbcdd5112
|
[
"MIT"
] | 1
|
2020-02-20T08:18:48.000Z
|
2020-02-20T08:18:48.000Z
|
""" Basic HTTP verbs for your API connection. """
import requests
def get(url, params=None, **kwargs):
"""Sends a GET request.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary, list of tuples or bytes to send
in the query string for the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return requests.get(url, params, **kwargs)
def options(url, **kwargs):
"""Sends an OPTIONS request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return requests.options(url, **kwargs)
def head(url, **kwargs):
"""Sends a HEAD request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes. If
`allow_redirects` is not provided, it will be set to `False` (as
opposed to the default :meth:`request` behavior).
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return requests.head(url, **kwargs)
def post(url, data=None, json=None, **kwargs):
"""Sends a POST request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return requests.post(url, data, json, **kwargs)
def put(url, data=None, **kwargs):
"""Sends a PUT request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return requests.put(url, data, **kwargs)
def patch(url, data=None, **kwargs):
"""Sends a PATCH request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return requests.patch(url, data, **kwargs)
def delete(url, **kwargs):
"""Sends a DELETE request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return requests.delete(url, **kwargs)
| 34.43617
| 82
| 0.654927
| 427
| 3,237
| 4.962529
| 0.161593
| 0.079283
| 0.049552
| 0.059462
| 0.774894
| 0.760736
| 0.739028
| 0.739028
| 0.739028
| 0.739028
| 0
| 0
| 0.207291
| 3,237
| 93
| 83
| 34.806452
| 0.825799
| 0.734013
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.466667
| false
| 0
| 0.066667
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
7cc1527dadebad9cd8890a674784fa1119122d88
| 47
|
py
|
Python
|
db/__init__.py
|
CitizenScienceCenter/cccs-connexion
|
7f1e55df1058bfe628087116f535c505c38af926
|
[
"Apache-2.0"
] | null | null | null |
db/__init__.py
|
CitizenScienceCenter/cccs-connexion
|
7f1e55df1058bfe628087116f535c505c38af926
|
[
"Apache-2.0"
] | 35
|
2018-09-05T08:10:40.000Z
|
2019-12-09T14:40:48.000Z
|
db/__init__.py
|
CitizenScienceCenter/backend
|
7f1e55df1058bfe628087116f535c505c38af926
|
[
"Apache-2.0"
] | null | null | null |
from db.models import *
from db.roles import *
| 15.666667
| 23
| 0.744681
| 8
| 47
| 4.375
| 0.625
| 0.342857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170213
| 47
| 2
| 24
| 23.5
| 0.897436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7cd56629feb3f420e3c0479e0f33f6af8cfee20c
| 30,510
|
py
|
Python
|
hubspot/crm/properties/api/groups_api.py
|
cclauss/hubspot-api-python
|
7c60c0f572b98c73e1f1816bf5981396a42735f6
|
[
"Apache-2.0"
] | null | null | null |
hubspot/crm/properties/api/groups_api.py
|
cclauss/hubspot-api-python
|
7c60c0f572b98c73e1f1816bf5981396a42735f6
|
[
"Apache-2.0"
] | null | null | null |
hubspot/crm/properties/api/groups_api.py
|
cclauss/hubspot-api-python
|
7c60c0f572b98c73e1f1816bf5981396a42735f6
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Properties
All HubSpot objects store data in default and custom properties. These endpoints provide access to read and modify object properties in HubSpot. # noqa: E501
The version of the OpenAPI document: v3
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from hubspot.crm.properties.api_client import ApiClient
from hubspot.crm.properties.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class GroupsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def archive(self, object_type, group_name, **kwargs): # noqa: E501
"""Archive a property group # noqa: E501
Move a property group identified by {groupName} to the recycling bin. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.archive(object_type, group_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str object_type: (required)
:param str group_name: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.archive_with_http_info(object_type, group_name, **kwargs) # noqa: E501
def archive_with_http_info(self, object_type, group_name, **kwargs): # noqa: E501
"""Archive a property group # noqa: E501
Move a property group identified by {groupName} to the recycling bin. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.archive_with_http_info(object_type, group_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str object_type: (required)
:param str group_name: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'object_type',
'group_name'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method archive" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'object_type' is set
if self.api_client.client_side_validation and ('object_type' not in local_var_params or # noqa: E501
local_var_params['object_type'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `object_type` when calling `archive`") # noqa: E501
# verify the required parameter 'group_name' is set
if self.api_client.client_side_validation and ('group_name' not in local_var_params or # noqa: E501
local_var_params['group_name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `group_name` when calling `archive`") # noqa: E501
collection_formats = {}
path_params = {}
if 'object_type' in local_var_params:
path_params['objectType'] = local_var_params['object_type'] # noqa: E501
if 'group_name' in local_var_params:
path_params['groupName'] = local_var_params['group_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['hapikey', 'oauth2'] # noqa: E501
return self.api_client.call_api(
'/crm/v3/properties/{objectType}/groups/{groupName}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create(self, object_type, property_group_create, **kwargs): # noqa: E501
"""Create a property group # noqa: E501
Create and return a copy of a new property group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create(object_type, property_group_create, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str object_type: (required)
:param PropertyGroupCreate property_group_create: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PropertyGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_with_http_info(object_type, property_group_create, **kwargs) # noqa: E501
def create_with_http_info(self, object_type, property_group_create, **kwargs): # noqa: E501
"""Create a property group # noqa: E501
Create and return a copy of a new property group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_with_http_info(object_type, property_group_create, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str object_type: (required)
:param PropertyGroupCreate property_group_create: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PropertyGroup, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'object_type',
'property_group_create'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'object_type' is set
if self.api_client.client_side_validation and ('object_type' not in local_var_params or # noqa: E501
local_var_params['object_type'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `object_type` when calling `create`") # noqa: E501
# verify the required parameter 'property_group_create' is set
if self.api_client.client_side_validation and ('property_group_create' not in local_var_params or # noqa: E501
local_var_params['property_group_create'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `property_group_create` when calling `create`") # noqa: E501
collection_formats = {}
path_params = {}
if 'object_type' in local_var_params:
path_params['objectType'] = local_var_params['object_type'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'property_group_create' in local_var_params:
body_params = local_var_params['property_group_create']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', '*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['hapikey', 'oauth2'] # noqa: E501
return self.api_client.call_api(
'/crm/v3/properties/{objectType}/groups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PropertyGroup', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all(self, object_type, **kwargs): # noqa: E501
"""Read all property groups # noqa: E501
Read all existing property groups for the specified object type and HubSpot account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all(object_type, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str object_type: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: CollectionResponsePropertyGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_all_with_http_info(object_type, **kwargs) # noqa: E501
def get_all_with_http_info(self, object_type, **kwargs): # noqa: E501
"""Read all property groups # noqa: E501
Read all existing property groups for the specified object type and HubSpot account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_with_http_info(object_type, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str object_type: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(CollectionResponsePropertyGroup, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'object_type'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'object_type' is set
if self.api_client.client_side_validation and ('object_type' not in local_var_params or # noqa: E501
local_var_params['object_type'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `object_type` when calling `get_all`") # noqa: E501
collection_formats = {}
path_params = {}
if 'object_type' in local_var_params:
path_params['objectType'] = local_var_params['object_type'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', '*/*']) # noqa: E501
# Authentication setting
auth_settings = ['hapikey', 'oauth2'] # noqa: E501
return self.api_client.call_api(
'/crm/v3/properties/{objectType}/groups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CollectionResponsePropertyGroup', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_by_name(self, object_type, group_name, **kwargs): # noqa: E501
"""Read a property group # noqa: E501
Read a property group identified by {groupName}. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_by_name(object_type, group_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str object_type: (required)
:param str group_name: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PropertyGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_by_name_with_http_info(object_type, group_name, **kwargs) # noqa: E501
def get_by_name_with_http_info(self, object_type, group_name, **kwargs): # noqa: E501
"""Read a property group # noqa: E501
Read a property group identified by {groupName}. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_by_name_with_http_info(object_type, group_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str object_type: (required)
:param str group_name: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PropertyGroup, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'object_type',
'group_name'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_by_name" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'object_type' is set
if self.api_client.client_side_validation and ('object_type' not in local_var_params or # noqa: E501
local_var_params['object_type'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `object_type` when calling `get_by_name`") # noqa: E501
# verify the required parameter 'group_name' is set
if self.api_client.client_side_validation and ('group_name' not in local_var_params or # noqa: E501
local_var_params['group_name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `group_name` when calling `get_by_name`") # noqa: E501
collection_formats = {}
path_params = {}
if 'object_type' in local_var_params:
path_params['objectType'] = local_var_params['object_type'] # noqa: E501
if 'group_name' in local_var_params:
path_params['groupName'] = local_var_params['group_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', '*/*']) # noqa: E501
# Authentication setting
auth_settings = ['hapikey', 'oauth2'] # noqa: E501
return self.api_client.call_api(
'/crm/v3/properties/{objectType}/groups/{groupName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PropertyGroup', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update(self, object_type, group_name, property_group_update, **kwargs): # noqa: E501
"""Update a property group # noqa: E501
Perform a partial update of a property group identified by {groupName}. Provided fields will be overwritten. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update(object_type, group_name, property_group_update, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str object_type: (required)
:param str group_name: (required)
:param PropertyGroupUpdate property_group_update: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PropertyGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_with_http_info(object_type, group_name, property_group_update, **kwargs) # noqa: E501
def update_with_http_info(self, object_type, group_name, property_group_update, **kwargs): # noqa: E501
"""Update a property group # noqa: E501
Perform a partial update of a property group identified by {groupName}. Provided fields will be overwritten. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_with_http_info(object_type, group_name, property_group_update, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str object_type: (required)
:param str group_name: (required)
:param PropertyGroupUpdate property_group_update: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PropertyGroup, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'object_type',
'group_name',
'property_group_update'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'object_type' is set
if self.api_client.client_side_validation and ('object_type' not in local_var_params or # noqa: E501
local_var_params['object_type'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `object_type` when calling `update`") # noqa: E501
# verify the required parameter 'group_name' is set
if self.api_client.client_side_validation and ('group_name' not in local_var_params or # noqa: E501
local_var_params['group_name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `group_name` when calling `update`") # noqa: E501
# verify the required parameter 'property_group_update' is set
if self.api_client.client_side_validation and ('property_group_update' not in local_var_params or # noqa: E501
local_var_params['property_group_update'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `property_group_update` when calling `update`") # noqa: E501
collection_formats = {}
path_params = {}
if 'object_type' in local_var_params:
path_params['objectType'] = local_var_params['object_type'] # noqa: E501
if 'group_name' in local_var_params:
path_params['groupName'] = local_var_params['group_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'property_group_update' in local_var_params:
body_params = local_var_params['property_group_update']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', '*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['hapikey', 'oauth2'] # noqa: E501
return self.api_client.call_api(
'/crm/v3/properties/{objectType}/groups/{groupName}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PropertyGroup', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 46.157337
| 162
| 0.597771
| 3,385
| 30,510
| 5.132349
| 0.05997
| 0.045128
| 0.064468
| 0.025902
| 0.950901
| 0.945375
| 0.940713
| 0.936626
| 0.931273
| 0.925229
| 0
| 0.015814
| 0.328482
| 30,510
| 660
| 163
| 46.227273
| 0.832146
| 0.435923
| 0
| 0.722397
| 0
| 0
| 0.197066
| 0.054719
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0347
| false
| 0
| 0.015773
| 0
| 0.085174
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7cdafd9336c3bee7085a8da17a5e49dd16adcdc2
| 27,776
|
py
|
Python
|
tests/test_shootdowns.py
|
IL2HorusTeam/il2fb-ds-events-parser
|
ea13014506166f24909630ea75bcecf810cfbbd8
|
[
"MIT"
] | null | null | null |
tests/test_shootdowns.py
|
IL2HorusTeam/il2fb-ds-events-parser
|
ea13014506166f24909630ea75bcecf810cfbbd8
|
[
"MIT"
] | null | null | null |
tests/test_shootdowns.py
|
IL2HorusTeam/il2fb-ds-events-parser
|
ea13014506166f24909630ea75bcecf810cfbbd8
|
[
"MIT"
] | null | null | null |
import datetime
import unittest
from il2fb.ds.events.definitions.shootdowns import AIAircraftShotdownEvent
from il2fb.ds.events.definitions.shootdowns import AIAircraftShotdownSelfEvent
from il2fb.ds.events.definitions.shootdowns import HumanAircraftShotdownEvent
from il2fb.ds.events.definitions.shootdowns import HumanAircraftShotdownSelfEvent
from il2fb.ds.events.definitions.shootdowns import AIAircraftShotdownByAIAircraftEvent
from il2fb.ds.events.definitions.shootdowns import AIAircraftShotdownByBridgeEvent
from il2fb.ds.events.definitions.shootdowns import AIAircraftShotdownByBuildingEvent
from il2fb.ds.events.definitions.shootdowns import AIAircraftShotdownByHumanAircraftEvent
from il2fb.ds.events.definitions.shootdowns import AIAircraftShotdownByMovingUnitEvent
from il2fb.ds.events.definitions.shootdowns import AIAircraftShotdownByMovingUnitMemberEvent
from il2fb.ds.events.definitions.shootdowns import AIAircraftShotdownByObjectEvent
from il2fb.ds.events.definitions.shootdowns import AIAircraftShotdownByStationaryUnitEvent
from il2fb.ds.events.definitions.shootdowns import AIAircraftShotdownByTreeEvent
from il2fb.ds.events.definitions.shootdowns import AIAircraftShotdownByParatrooperEvent
from il2fb.ds.events.definitions.shootdowns import HumanAircraftShotdownByAIAircraftEvent
from il2fb.ds.events.definitions.shootdowns import HumanAircraftShotdownByBridgeEvent
from il2fb.ds.events.definitions.shootdowns import HumanAircraftShotdownByBuildingEvent
from il2fb.ds.events.definitions.shootdowns import HumanAircraftShotdownByHumanAircraftEvent
from il2fb.ds.events.definitions.shootdowns import HumanAircraftShotdownByMovingUnitEvent
from il2fb.ds.events.definitions.shootdowns import HumanAircraftShotdownByMovingUnitMemberEvent
from il2fb.ds.events.definitions.shootdowns import HumanAircraftShotdownByObjectEvent
from il2fb.ds.events.definitions.shootdowns import HumanAircraftShotdownByStationaryUnitEvent
from il2fb.ds.events.definitions.shootdowns import HumanAircraftShotdownByTreeEvent
from il2fb.ds.events.definitions.shootdowns import HumanAircraftShotdownByParatrooperEvent
from il2fb.ds.events.definitions.shootdowns import AIAircraftShotdownByAIAircraftAndAIAircraftEvent
from il2fb.ds.events.definitions.shootdowns import AIAircraftShotdownByAIAircraftAndHumanAircraftEvent
from il2fb.ds.events.definitions.shootdowns import AIAircraftShotdownByHumanAircraftAndAIAircraftEvent
from il2fb.ds.events.definitions.shootdowns import AIAircraftShotdownByHumanAircraftAndHumanAircraftEvent
from il2fb.ds.events.definitions.shootdowns import HumanAircraftShotdownByAIAircraftAndAIAircraftEvent
from il2fb.ds.events.definitions.shootdowns import HumanAircraftShotdownByAIAircraftAndHumanAircraftEvent
from il2fb.ds.events.definitions.shootdowns import HumanAircraftShotdownByHumanAircraftAndAIAircraftEvent
from il2fb.ds.events.definitions.shootdowns import HumanAircraftShotdownByHumanAircraftAndHumanAircraftEvent
from il2fb.ds.events.parsing.shootdowns import ActorShotdownLineParser
class ActorShotdownLineParserTestCase(unittest.TestCase):
def setUp(self):
self.parser = ActorShotdownLineParser()
def test_parse_line_no_match(self):
timestamp = None
line = "foo"
evt = self.parser.parse_line(timestamp, line)
self.assertIsNone(evt)
def test_ai_aircraft_shotdown(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.id, "r0100")
self.assertEqual(evt.data.target.flight_index, 1)
self.assertEqual(evt.data.pos.x, float("145663.6"))
self.assertEqual(evt.data.pos.y, float("62799.64"))
self.assertEqual(evt.data.pos.z, float("83.96088"))
def test_ai_aircraft_shotdown_self(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by landscape at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownSelfEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.id, "r0100")
self.assertEqual(evt.data.target.flight_index, 1)
line = "r01001 shot down by NONAME at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownSelfEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.id, "r0100")
self.assertEqual(evt.data.target.flight_index, 1)
def test_ai_aircraft_shotdown_by_ai_aircraft(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by g01002 at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownByAIAircraftEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.id, "r0100")
self.assertEqual(evt.data.target.flight_index, 1)
self.assertEqual(evt.data.attacker.id, "g0100")
self.assertEqual(evt.data.attacker.flight_index, 2)
def test_ai_aircraft_shotdown_by_bridge(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by Bridge159 at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownByBridgeEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.id, "r0100")
self.assertEqual(evt.data.target.flight_index, 1)
self.assertEqual(evt.data.attacker.id, "Bridge159")
def test_ai_aircraft_shotdown_by_building(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by 194_bld at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownByBuildingEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.id, "r0100")
self.assertEqual(evt.data.target.flight_index, 1)
self.assertEqual(evt.data.attacker.id, "194_bld")
def test_ai_aircraft_shotdown_by_human_aircraft(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by TheUser:TB-7_M40F at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownByHumanAircraftEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.id, "r0100")
self.assertEqual(evt.data.target.flight_index, 1)
self.assertEqual(evt.data.attacker.callsign, "TheUser")
self.assertEqual(evt.data.attacker.aircraft, "TB-7_M40F")
def test_ai_aircraft_shotdown_by_human_aircraft_stripped_callsign_spaces(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by The User :TB-7_M40F at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownByHumanAircraftEvent)
self.assertEqual(evt.data.attacker.callsign, "TheUser")
def test_ai_aircraft_shotdown_by_human_aircraft_empty_callsign(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by :TB-7_M40F at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownByHumanAircraftEvent)
self.assertEqual(evt.data.attacker.callsign, "")
line = "r01001 shot down by :TB-7_M40F at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownByHumanAircraftEvent)
self.assertEqual(evt.data.attacker.callsign, "")
def test_ai_aircraft_shotdown_by_moving_unit(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by 0_Chief at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownByMovingUnitEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.id, "r0100")
self.assertEqual(evt.data.target.flight_index, 1)
self.assertEqual(evt.data.attacker.id, "0_Chief")
def test_ai_aircraft_shotdown_by_moving_unit_member(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by 0_Chief0 at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownByMovingUnitMemberEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.id, "r0100")
self.assertEqual(evt.data.target.flight_index, 1)
self.assertEqual(evt.data.attacker.id, "0_Chief")
self.assertEqual(evt.data.attacker.member_index, 0)
def test_ai_aircraft_shotdown_by_object(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by 3do/Buildings/Airdrome/BarrelBlock1/mono.sim at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownByObjectEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.id, "r0100")
self.assertEqual(evt.data.target.flight_index, 1)
self.assertEqual(evt.data.attacker.id, "3do/Buildings/Airdrome/BarrelBlock1/mono.sim")
def test_ai_aircraft_shotdown_by_stationary_unit(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by 0_Static at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownByStationaryUnitEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.id, "r0100")
self.assertEqual(evt.data.target.flight_index, 1)
self.assertEqual(evt.data.attacker.id, "0_Static")
def test_ai_aircraft_shotdown_by_tree(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by 3do/Tree/Line/live.sim at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownByTreeEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.id, "r0100")
self.assertEqual(evt.data.target.flight_index, 1)
def test_ai_aircraft_shotdown_by_paratrooper(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by _para_1 at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownByParatrooperEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.id, "r0100")
self.assertEqual(evt.data.target.flight_index, 1)
def test_human_aircraft_shotdown(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "TheUser:TB-7_M40F shot down by at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.callsign, "TheUser")
self.assertEqual(evt.data.target.aircraft, "TB-7_M40F")
def test_human_aircraft_shotdown_stripped_callsign_spaces(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = " The User :TB-7_M40F shot down by at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownEvent)
self.assertEqual(evt.data.target.callsign, "TheUser")
def test_human_aircraft_shotdown_empty_callsign(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = " :TB-7_M40F shot down by at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownEvent)
self.assertEqual(evt.data.target.callsign, "")
line = ":TB-7_M40F shot down by at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownEvent)
self.assertEqual(evt.data.target.callsign, "")
def test_human_aircraft_shotdown_self(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "TheUser:TB-7_M40F shot down by landscape at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownSelfEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.callsign, "TheUser")
self.assertEqual(evt.data.target.aircraft, "TB-7_M40F")
line = "TheUser:TB-7_M40F shot down by NONAME at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownSelfEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.callsign, "TheUser")
self.assertEqual(evt.data.target.aircraft, "TB-7_M40F")
def test_human_aircraft_shotdown_by_ai_aircraft(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "TheUser:TB-7_M40F shot down by g01002 at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownByAIAircraftEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.callsign, "TheUser")
self.assertEqual(evt.data.target.aircraft, "TB-7_M40F")
self.assertEqual(evt.data.attacker.id, "g0100")
self.assertEqual(evt.data.attacker.flight_index, 2)
def test_human_aircraft_shotdown_by_bridge(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "TheUser:TB-7_M40F shot down by Bridge159 at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownByBridgeEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.callsign, "TheUser")
self.assertEqual(evt.data.target.aircraft, "TB-7_M40F")
self.assertEqual(evt.data.attacker.id, "Bridge159")
def test_human_aircraft_shotdown_by_building(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "TheUser:TB-7_M40F shot down by 194_bld at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownByBuildingEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.callsign, "TheUser")
self.assertEqual(evt.data.target.aircraft, "TB-7_M40F")
self.assertEqual(evt.data.attacker.id, "194_bld")
def test_human_aircraft_shotdown_by_human_aircraft(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "TheUser:TB-7_M40F shot down by TheUser2:TB-7_M40F at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownByHumanAircraftEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.callsign, "TheUser")
self.assertEqual(evt.data.target.aircraft, "TB-7_M40F")
self.assertEqual(evt.data.attacker.callsign, "TheUser2")
self.assertEqual(evt.data.attacker.aircraft, "TB-7_M40F")
def test_human_aircraft_shotdown_by_human_aircraft_stripped_callsign_spaces(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "TheUser:TB-7_M40F shot down by The User2 :TB-7_M40F at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownByHumanAircraftEvent)
self.assertEqual(evt.data.attacker.callsign, "TheUser2")
def test_human_aircraft_shotdown_by_human_aircraft_empty_callsign(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "TheUser:TB-7_M40F shot down by :TB-7_M40F at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownByHumanAircraftEvent)
self.assertEqual(evt.data.attacker.callsign, "")
line = "TheUser:TB-7_M40F shot down by :TB-7_M40F at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownByHumanAircraftEvent)
self.assertEqual(evt.data.attacker.callsign, "")
def test_human_aircraft_shotdown_by_moving_unit(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "TheUser:TB-7_M40F shot down by 0_Chief at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownByMovingUnitEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.callsign, "TheUser")
self.assertEqual(evt.data.target.aircraft, "TB-7_M40F")
self.assertEqual(evt.data.attacker.id, "0_Chief")
def test_human_aircraft_shotdown_by_moving_unit_member(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "TheUser:TB-7_M40F shot down by 0_Chief0 at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownByMovingUnitMemberEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.callsign, "TheUser")
self.assertEqual(evt.data.target.aircraft, "TB-7_M40F")
self.assertEqual(evt.data.attacker.id, "0_Chief")
self.assertEqual(evt.data.attacker.member_index, 0)
def test_human_aircraft_shotdown_by_object(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "TheUser:TB-7_M40F shot down by 3do/Buildings/Airdrome/BarrelBlock1/mono.sim at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownByObjectEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.callsign, "TheUser")
self.assertEqual(evt.data.target.aircraft, "TB-7_M40F")
self.assertEqual(evt.data.attacker.id, "3do/Buildings/Airdrome/BarrelBlock1/mono.sim")
def test_human_aircraft_shotdown_by_stationary_unit(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "TheUser:TB-7_M40F shot down by 0_Static at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownByStationaryUnitEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.callsign, "TheUser")
self.assertEqual(evt.data.target.aircraft, "TB-7_M40F")
self.assertEqual(evt.data.attacker.id, "0_Static")
def test_human_aircraft_shotdown_by_tree(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "TheUser:TB-7_M40F shot down by 3do/Tree/Line/live.sim at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownByTreeEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.callsign, "TheUser")
self.assertEqual(evt.data.target.aircraft, "TB-7_M40F")
def test_human_aircraft_shotdown_by_paratrooper(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "TheUser:TB-7_M40F shot down by _para_1 at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownByParatrooperEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.callsign, "TheUser")
self.assertEqual(evt.data.target.aircraft, "TB-7_M40F")
def test_ai_aircraft_shotdown_by_ai_aircraft_and_ai_aircraft(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by g01002 and g01003 at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownByAIAircraftAndAIAircraftEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.id, "r0100")
self.assertEqual(evt.data.target.flight_index, 1)
self.assertEqual(evt.data.attacker.id, "g0100")
self.assertEqual(evt.data.attacker.flight_index, 2)
self.assertEqual(evt.data.assistant.id, "g0100")
self.assertEqual(evt.data.assistant.flight_index, 3)
def test_ai_aircraft_shotdown_by_ai_aircraft_and_human_aircraft(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by g01002 and TheUser:TB-7_M40F at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownByAIAircraftAndHumanAircraftEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.id, "r0100")
self.assertEqual(evt.data.target.flight_index, 1)
self.assertEqual(evt.data.attacker.id, "g0100")
self.assertEqual(evt.data.attacker.flight_index, 2)
self.assertEqual(evt.data.assistant.callsign, "TheUser")
self.assertEqual(evt.data.assistant.aircraft, "TB-7_M40F")
def test_ai_aircraft_shotdown_by_human_aircraft_and_ai_aircraft(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by TheUser:TB-7_M40F and g01002 at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownByHumanAircraftAndAIAircraftEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.id, "r0100")
self.assertEqual(evt.data.target.flight_index, 1)
self.assertEqual(evt.data.attacker.callsign, "TheUser")
self.assertEqual(evt.data.attacker.aircraft, "TB-7_M40F")
self.assertEqual(evt.data.assistant.id, "g0100")
self.assertEqual(evt.data.assistant.flight_index, 2)
def test_ai_aircraft_shotdown_by_human_aircraft_and_human_aircraft(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by TheUser:TB-7_M40F and TheUser2:TB-7_M40F at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownByHumanAircraftAndHumanAircraftEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.id, "r0100")
self.assertEqual(evt.data.target.flight_index, 1)
self.assertEqual(evt.data.attacker.callsign, "TheUser")
self.assertEqual(evt.data.attacker.aircraft, "TB-7_M40F")
self.assertEqual(evt.data.assistant.callsign, "TheUser2")
self.assertEqual(evt.data.assistant.aircraft, "TB-7_M40F")
def test_human_aircraft_shotdown_by_ai_aircraft_and_ai_aircraft(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "TheUser:TB-7_M40F shot down by g01002 and g01003 at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownByAIAircraftAndAIAircraftEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.callsign, "TheUser")
self.assertEqual(evt.data.target.aircraft, "TB-7_M40F")
self.assertEqual(evt.data.attacker.id, "g0100")
self.assertEqual(evt.data.attacker.flight_index, 2)
self.assertEqual(evt.data.assistant.id, "g0100")
self.assertEqual(evt.data.assistant.flight_index, 3)
def test_human_aircraft_shotdown_by_ai_aircraft_and_human_aircraft(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "TheUser:TB-7_M40F shot down by g01002 and TheUser2:TB-7_M40F at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownByAIAircraftAndHumanAircraftEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.callsign, "TheUser")
self.assertEqual(evt.data.target.aircraft, "TB-7_M40F")
self.assertEqual(evt.data.attacker.id, "g0100")
self.assertEqual(evt.data.attacker.flight_index, 2)
self.assertEqual(evt.data.assistant.callsign, "TheUser2")
self.assertEqual(evt.data.assistant.aircraft, "TB-7_M40F")
def test_human_aircraft_shotdown_by_human_aircraft_and_ai_aircraft(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "TheUser:TB-7_M40F shot down by TheUser2:TB-7_M40F and g01002 at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownByHumanAircraftAndAIAircraftEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.callsign, "TheUser")
self.assertEqual(evt.data.target.aircraft, "TB-7_M40F")
self.assertEqual(evt.data.attacker.callsign, "TheUser2")
self.assertEqual(evt.data.attacker.aircraft, "TB-7_M40F")
self.assertEqual(evt.data.assistant.id, "g0100")
self.assertEqual(evt.data.assistant.flight_index, 2)
def test_human_aircraft_shotdown_by_human_aircraft_and_human_aircraft(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "TheUser:TB-7_M40F shot down by TheUser2:TB-7_M40F and TheUser3:TB-7_M40F at 145663.6 62799.64 83.96088"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, HumanAircraftShotdownByHumanAircraftAndHumanAircraftEvent)
self.assertEqual(evt.data.timestamp, timestamp)
self.assertEqual(evt.data.target.callsign, "TheUser")
self.assertEqual(evt.data.target.aircraft, "TB-7_M40F")
self.assertEqual(evt.data.attacker.callsign, "TheUser2")
self.assertEqual(evt.data.attacker.aircraft, "TB-7_M40F")
self.assertEqual(evt.data.assistant.callsign, "TheUser3")
self.assertEqual(evt.data.assistant.aircraft, "TB-7_M40F")
def test_parse_line_no_z_coord(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by at 145663.6 62799.64"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownEvent)
self.assertEqual(evt.data.pos.z, 0)
def test_parse_line_unknown_target(self):
timestamp = None
line = "xxx shot down by at 145663.6 62799.64"
evt = self.parser.parse_line(timestamp, line)
self.assertIsNone(evt)
def test_parse_line_unknown_attaker(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by xxx at 145663.6 62799.64"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownEvent)
def test_parse_line_unknown_assistant(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by g01001 and xxx at 145663.6 62799.64"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownByAIAircraftEvent)
def test_parse_line_unknown_attaker_and_known_assistant(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by xxx and g01001 at 145663.6 62799.64"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownByAIAircraftEvent)
def test_parse_line_unknown_attaker_and_unknown_assistant(self):
timestamp = datetime.datetime(2020, 12, 31, 15, 46, 8)
line = "r01001 shot down by xxx and zzz at 145663.6 62799.64"
evt = self.parser.parse_line(timestamp, line)
self.assertIsInstance(evt, AIAircraftShotdownEvent)
| 50.227848
| 118
| 0.769405
| 3,664
| 27,776
| 5.71179
| 0.040666
| 0.12113
| 0.145356
| 0.177657
| 0.874666
| 0.867594
| 0.862051
| 0.793721
| 0.790185
| 0.787653
| 0
| 0.089814
| 0.124136
| 27,776
| 552
| 119
| 50.318841
| 0.770429
| 0
| 0
| 0.659955
| 0
| 0.033557
| 0.146853
| 0.007921
| 0
| 0
| 0
| 0
| 0.489933
| 1
| 0.102908
| false
| 0
| 0.0783
| 0
| 0.183445
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6b060cca1bdd68020c6eaff07f72b78777e616b4
| 2,535
|
py
|
Python
|
test/test.py
|
sorriso/UBI8-postgres-src
|
ba5ddbbf84f5329971d458f4f144595add77e6e9
|
[
"MIT"
] | null | null | null |
test/test.py
|
sorriso/UBI8-postgres-src
|
ba5ddbbf84f5329971d458f4f144595add77e6e9
|
[
"MIT"
] | null | null | null |
test/test.py
|
sorriso/UBI8-postgres-src
|
ba5ddbbf84f5329971d458f4f144595add77e6e9
|
[
"MIT"
] | null | null | null |
import psycopg2 # pip3 install psycopg2-binary
import os
file_path = os.getcwd()
print("")
print(file_path)
print("")
print("Connecting without SSL mode...")
conn = psycopg2.connect(host="localhost", port = 5432, database="test", user="postgres", password="postgres1")
cur = conn.cursor()
print(conn.isolation_level)
print("Creating table...")
cur.execute("CREATE TABLE IF NOT EXISTS books(id bigint, title varchar(128));")
conn.commit()
print("Inserting data...")
cur.execute("INSERT INTO books(title) VALUES('StarWars');")
conn.commit()
print("Reading data...")
cur.execute("SELECT * FROM books")
query_results = cur.fetchall()
print(query_results)
print(len(query_results))
print("Asserting result...")
assert len(query_results) == 1, "Should be 1"
print("Dropping table...")
cur.execute("DROP TABLE books;")
conn.commit()
cur.close()
conn.close()
print("")
print("Connecting with SSL mode...")
conn = psycopg2.connect(host="localhost", port = 5432, database="test", user="postgres", password="postgres1", sslmode='require')
cur = conn.cursor()
print(conn.isolation_level)
print("Creating table...")
cur.execute("CREATE TABLE IF NOT EXISTS books(id bigint, title varchar(128));")
conn.commit()
print("Inserting data...")
cur.execute("INSERT INTO books(title) VALUES('StarWars');")
conn.commit()
print("Reading data...")
cur.execute("SELECT * FROM books")
query_results = cur.fetchall()
print(query_results)
print(len(query_results))
print("Asserting result...")
assert len(query_results) == 1, "Should be 1"
print("Dropping table...")
cur.execute("DROP TABLE books;")
conn.commit()
cur.close()
conn.close()
print("Connecting with SSL mode + client certificat...")
conn = psycopg2.connect(host='localhost', port = 5432, database='test', user='postgres', password='postgres1', sslmode='verify-ca', sslcert=file_path + '/ssl/client.pem', sslkey=file_path + '/ssl/client.key', sslrootcert=file_path + '/ssl/rootCA.pem')
cur = conn.cursor()
print(conn.isolation_level)
print("Creating table...")
cur.execute("CREATE TABLE IF NOT EXISTS books(id bigint, title varchar(128));")
conn.commit()
print("Inserting data...")
cur.execute("INSERT INTO books(title) VALUES('StarWars');")
conn.commit()
print("Reading data...")
cur.execute("SELECT * FROM books")
query_results = cur.fetchall()
print(query_results)
print(len(query_results))
print("Asserting result...")
assert len(query_results) == 1, "Should be 1"
print("Dropping table...")
cur.execute("DROP TABLE books;")
conn.commit()
cur.close()
conn.close()
| 24.375
| 251
| 0.715976
| 343
| 2,535
| 5.233236
| 0.230321
| 0.066852
| 0.050139
| 0.03844
| 0.875766
| 0.853482
| 0.853482
| 0.853482
| 0.853482
| 0.853482
| 0
| 0.015789
| 0.100592
| 2,535
| 103
| 252
| 24.61165
| 0.771491
| 0.011045
| 0
| 0.863014
| 0
| 0
| 0.389222
| 0
| 0
| 0
| 0
| 0
| 0.082192
| 1
| 0
| false
| 0.041096
| 0.027397
| 0
| 0.027397
| 0.424658
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
6b2356e9e24c49989b2db7aa3fcf56030833a452
| 10,554
|
py
|
Python
|
Encarnia/world/npcs.py
|
whitehorse-io/encarnia
|
798e79c72af18b13641cda218388ebb1bdb67d7c
|
[
"MIT"
] | 9
|
2017-07-10T04:27:31.000Z
|
2020-07-31T08:54:08.000Z
|
Encarnia/world/npcs.py
|
whitehorse-io/encarnia
|
798e79c72af18b13641cda218388ebb1bdb67d7c
|
[
"MIT"
] | null | null | null |
Encarnia/world/npcs.py
|
whitehorse-io/encarnia
|
798e79c72af18b13641cda218388ebb1bdb67d7c
|
[
"MIT"
] | 4
|
2017-09-11T02:26:21.000Z
|
2021-12-31T05:20:34.000Z
|
from evennia import DefaultObject
from evennia import default_cmds, CmdSet, utils
from commands.default_cmdsets import ChargenCmdset, ShopCmdset, BankCmdset, MerchantCmdset
from world import english_utils, npc_rules # , npc_rules
from random import randint
import time
from evennia import TICKER_HANDLER as tickerhandler
class Combat_Mob(DefaultObject): # This mob will not attack people but it will defend itself from attack.
"""
"""
def at_object_creation(self):
# Inherit the object properties.
super(Combat_Mob, self).at_object_creation()
self.aliases.add([])
#self.name = "a ruddy bronze broadsword" # not sure if I need this
self.db.live_name = "a giant rat"
self.db.defeated_name = "the mangled remains of some large vermin" # must not have 'rat' in it or it can't be targetted!
self.db.alive = True
self.db.desc = ""
self.db.health = 30
self.db.max_health = 30 # NPC damage, (player level * strength) * weapon damage ratio.
# So a level 1 would do 10 damage a hit, then 20, then 30, up to 1,000 per hit at level 100.
self.db.damage_amount = 10
self.db.ticker_speed = 3 # how often it attempts to attack or move/attack if a target is not found. This will only fire so many times before they 'forget'.
self.db.counter_attack_chance = False # integer chance this npc will trigger a counter-attack. Defaults as false.
self.db.respawn_speed = 600 # SHOULD BE A MULTIPLE OF 100
self.db.tries = 3 # how long it will spend trying to find its attacker before shutting down.
self.db.level = 10 # this is the relative level of the creature
self.db.exp_multiplier = 4 # If you're under the level, subtract player level from NPC level and multiply by the multiplier.
self.db.exp_max_level = 20 # At this level you won't gain any experience from killing this NPC.
self.db.home_location = "#2" # This should be set!
# So normally any kill is worth 1% exp.
# But if your level is under the npc's level, you get a bonus
# The bonus is level difference * multiplier.
# This multiplier equation could similarly be used when attacking people below your current level, so you might
# level up multiple times from killing a high-level person.
self.db.offended_by = []
self.db.lootable = False #can be LOOTed for silver.
self.db.looted_yet = False
self.db.silver_amount = 0
self.db.skinnable = True #can be SKINNED for a pelt or skin item.
self.db.skinned_yet = False
self.db.pelt_name = "a giant rat pelt"
self.db.attack_message_1 = "A giant rat hurls itself bodily into "
self.db.attack_message_2 = "A giant rat claws and bites at "
self.db.attack_message_3 = "With a resounding crunching sound, a giant rat bites into "
def npc_active_ticks(self, *args, **kwargs):
"Ticks after the NPC has been attacked."
targets = False # Any targets in the room?
# This should probably go below.
if self.db.tries_left > 0:
for i in self.location.contents:
if i in self.db.offended_by:
targets = True
npc_rules.attack(self, i)
self.db.tries_left = 3
return
if not targets:
for k, v in self.location.db.trails.iteritems():
target_name = str(self.db.offended_by[0])
if k == target_name:
destination = self.search(v)
self.move_to(destination)
for i in self.location.contents:
if i in self.db.offended_by:
targets = True
npc_rules.attack(self, i)
self.db.tries_left = 3
break
break
self.db.tries_left = self.db.tries_left - 1
if self.db.tries_left < 0:
self.db.offended_by = []
self.db.tries_left = self.db.tries
tickerhandler.remove(self.db.ticker_speed, self.npc_active_ticks)
return
def npc_revive_ticks(self, *args, **kwargs):
"ticked when "
self.db.alive = True
self.name = self.db.live_name
self.db.health = self.db.max_health
self.db.looted_yet = False
self.db.skinned_yet = False
destination = self.search(self.db.home_location, global_search=True)
self.move_to(destination)
tickerhandler.remove(self.db.respawn_speed, self.npc_revive_ticks)
return
def at_look(self, target):
"""
Called when this object performs a look. It allows to
customize just what this means. It will not itself
send any data.
Args:
target (Object): The target being looked at. This is
commonly an object or the current location. It will
be checked for the "view" type access.
Returns:
lookstring (str): A ready-processed look string
potentially ready to return to the looker.
"""
if not target.access(self, "view"):
try:
return "Could not view '%s'." % target.get_display_name(self)
except AttributeError:
return "Could not view '%s'." % target.key
description = target.return_appearance(self)
description = str.capitalize(description)
# the target's at_desc() method.
# this must be the last reference to target so it may delete itself when acted on.
target.at_desc(looker=self)
return description
class Combat_Merchant_Mob(DefaultObject): # This mob will not attack people but it will defend itself from attack.
"""
"""
def at_object_creation(self):
# Inherit the object properties.
super(Combat_Merchant_Mob, self).at_object_creation()
self.cmdset.add(MerchantCmdset, permanent=True)
self.aliases.add([])
#self.name = "a ruddy bronze broadsword" # not sure if I need this
self.db.live_name = "a giant rat"
self.db.defeated_name = "the mangled remains of some large vermin" # must not have 'rat' in it or it can't be targetted!
self.db.alive = True
self.db.desc = ""
self.db.trade_item = "pelts"
self.db.health = 135
self.db.max_health = 135 # NPC damage, (player level * strength) * weapon damage ratio.
# So a level 1 would do 10 damage a hit, then 20, then 30, up to 1,000 per hit at level 100.
self.db.damage_amount = 30
self.db.ticker_speed = 3 # how often it attempts to attack or move/attack if a target is not found. This will only fire so many times before they 'forget'.
self.db.counter_attack_chance = False # integer chance this npc will trigger a counter-attack. Defaults as false.
self.db.respawn_speed = 600 # SHOULD BE A MULTIPLE OF 100
self.db.tries = 3 # how long it will spend trying to find its attacker before shutting down.
self.db.exp_level = 10 # this is the relative level of the creature
self.db.exp_multiplier = 4 # If you're under the level, subtract player level from NPC level and multiply by the multiplier.
self.db.exp_max_level = 20 # At this level you won't gain any experience from killing this NPC.
self.db.home_location = "#2" # This should be set!
# So normally any kill is worth 1% exp.
# But if your level is under the npc's level, you get a bonus
# The bonus is level difference * multiplier.
# This multiplier equation could similarly be used when attacking people below your current level, so you might
# level up multiple times from killing a high-level person.
self.db.offended_by = []
self.db.lootable = True #can be LOOTed for silver.
self.db.looted_yet = False
self.db.silver_amount = 10
self.db.skinnable = False #can be SKINNED for a pelt or skin item.
self.db.skinned_yet = False
self.db.pelt_name = "a giant rat pelt"
self.db.attack_message_1 = "A giant rat hurls itself bodily into "
self.db.attack_message_2 = "A giant rat claws and bites at "
self.db.attack_message_3 = "With a resounding crunching sound, a giant rat bites into "
def npc_active_ticks(self, *args, **kwargs):
"Ticks after the NPC has been attacked."
targets = False # Any targets in the room?
# This should probably go below.
if self.db.tries_left > 0:
for i in self.location.contents:
if i in self.db.offended_by:
targets = True
npc_rules.attack(self, i)
self.db.tries_left = 3
return
if not targets:
for k, v in self.location.db.trails.iteritems():
target_name = str(self.db.offended_by[0])
if k == target_name:
destination = self.search(v)
self.move_to(destination)
for i in self.location.contents:
if i in self.db.offended_by:
targets = True
npc_rules.attack(self, i)
self.db.tries_left = 3
break
break
self.db.tries_left = self.db.tries_left - 1
if self.db.tries_left < 0:
self.db.offended_by = []
self.db.tries_left = self.db.tries
tickerhandler.remove(self.db.ticker_speed, self.npc_active_ticks)
return
def npc_revive_ticks(self, *args, **kwargs):
"ticked when "
self.db.alive = True
self.name = self.db.live_name
self.db.health = self.db.max_health
self.db.looted_yet = False
self.db.skinned_yet = False
destination = self.search(self.db.home_location, global_search=True)
self.move_to(destination)
tickerhandler.remove(self.db.respawn_speed, self.npc_revive_ticks)
return
| 40.436782
| 165
| 0.599204
| 1,440
| 10,554
| 4.294444
| 0.188194
| 0.090233
| 0.032018
| 0.033959
| 0.83053
| 0.83053
| 0.81436
| 0.81436
| 0.81436
| 0.81436
| 0
| 0.013121
| 0.328406
| 10,554
| 261
| 166
| 40.436782
| 0.85934
| 0.312299
| 0
| 0.764331
| 0
| 0
| 0.077778
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044586
| false
| 0
| 0.044586
| 0
| 0.159236
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6b241b5a00f51114ba5d39e64087d09ac34da03a
| 221,744
|
py
|
Python
|
hikari/api/rest.py
|
Reliku/hikari
|
c6e62b750ce35885a5e4124ffe8df6445ab34acd
|
[
"MIT"
] | 3
|
2021-08-28T17:48:52.000Z
|
2021-08-29T06:56:12.000Z
|
hikari/api/rest.py
|
Nephael/hikari
|
549dd12f4abbc5c42c32680927cdd14049ab3597
|
[
"MIT"
] | null | null | null |
hikari/api/rest.py
|
Nephael/hikari
|
549dd12f4abbc5c42c32680927cdd14049ab3597
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# cython: language_level=3
# Copyright (c) 2020 Nekokatt
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Provides an interface for REST API implementations to follow."""
from __future__ import annotations
__all__: typing.List[str] = ["ConnectorFactory", "RESTClient"]
import abc
import typing
from hikari import traits
from hikari import undefined
if typing.TYPE_CHECKING:
import aiohttp
from hikari import applications
from hikari import audit_logs
from hikari import channels
from hikari import colors
from hikari import embeds as embeds_
from hikari import emojis
from hikari import files
from hikari import guilds
from hikari import invites
from hikari import iterators
from hikari import messages as messages_
from hikari import permissions as permissions_
from hikari import sessions
from hikari import snowflakes
from hikari import users
from hikari import voices
from hikari import webhooks
from hikari.api import special_endpoints
from hikari.internal import time
class ConnectorFactory(abc.ABC):
"""Provider of a connector."""
__slots__: typing.Sequence[str] = ()
@abc.abstractmethod
async def close(self) -> None:
"""Close any resources if they exist."""
@abc.abstractmethod
def acquire(self) -> aiohttp.BaseConnector:
"""Acquire the connector."""
class RESTClient(traits.NetworkSettingsAware, abc.ABC):
"""Interface for functionality that a REST API implementation provides."""
__slots__: typing.Sequence[str] = ()
@abc.abstractmethod
async def close(self) -> None:
"""Close the client session."""
@abc.abstractmethod
async def fetch_channel(
self, channel: snowflakes.SnowflakeishOr[channels.PartialChannel]
) -> channels.PartialChannel:
"""Fetch a channel.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.PartialChannel]
The channel to fetch. This may be the object or the ID of an
existing channel.
Returns
-------
hikari.channels.PartialChannel
The channel. This will be a _derivative_ of
`hikari.channels.PartialChannel`, depending on the type of
channel you request for.
This means that you may get one of
`hikari.channels.DMChannel`,
`hikari.channels.GroupDMChannel`,
`hikari.channels.GuildTextChannel`,
`hikari.channels.GuildVoiceChannel`,
`hikari.channels.GuildStoreChannel`,
`hikari.channels.GuildNewsChannel`.
Likewise, the `hikari.channels.GuildChannel` can be used to
determine if a channel is guild-bound, and
`hikari.channels.TextChannel` can be used to determine
if the channel provides textual functionality to the application.
You can check for these using the `builtins.isinstance`
builtin function.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you are missing the `READ_MESSAGES` permission in the channel.
hikari.errors.NotFoundError
If the channel is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def edit_channel(
self,
channel: snowflakes.SnowflakeishOr[channels.GuildChannel],
/,
*,
name: undefined.UndefinedOr[str] = undefined.UNDEFINED,
position: undefined.UndefinedOr[int] = undefined.UNDEFINED,
topic: undefined.UndefinedOr[str] = undefined.UNDEFINED,
nsfw: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
bitrate: undefined.UndefinedOr[int] = undefined.UNDEFINED,
user_limit: undefined.UndefinedOr[int] = undefined.UNDEFINED,
rate_limit_per_user: undefined.UndefinedOr[time.Intervalish] = undefined.UNDEFINED,
permission_overwrites: undefined.UndefinedOr[
typing.Sequence[channels.PermissionOverwrite]
] = undefined.UNDEFINED,
parent_category: undefined.UndefinedOr[snowflakes.SnowflakeishOr[channels.GuildCategory]] = undefined.UNDEFINED,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> channels.PartialChannel:
"""Edit a channel.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.GuildChannel]
The channel to edit. This may be the object or the ID of an
existing channel.
Other Parameters
----------------
name : hikari.undefined.UndefinedOr[[builtins.str]
If provided, the new name for the channel.
position : hikari.undefined.UndefinedOr[[builtins.int]
If provided, the new position for the channel.
topic : hikari.undefined.UndefinedOr[builtins.str]
If provided, the new topic for the channel.
nsfw : hikari.undefined.UndefinedOr[builtins.bool]
If provided, whether the channel should be marked as NSFW or not.
bitrate : hikari.undefined.UndefinedOr[builtins.int]
If provided, the new bitrate for the channel.
user_limit : hikari.undefined.UndefinedOr[builtins.int]
If provided, the new user limit in the channel.
rate_limit_per_user : hikari.undefined.UndefinedOr[hikari.internal.time.Intervalish]
If provided, the new rate limit per user in the channel.
permission_overwrites : hikari.undefined.UndefinedOr[typing.Sequence[hikari.channels.PermissionOverwrite]]
If provided, the new permission overwrites for the channel.
parent_category : hikari.undefined.UndefinedOr[hikari.snowflakes.SnowflakeishOr[hikari.channels.GuildCategory]]
If provided, the new guild category for the channel.
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Returns
-------
hikari.channels.PartialChannel
The edited channel.
Raises
------
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you are missing permissions to edit the channel.
hikari.errors.NotFoundError
If the channel is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
""" # noqa: E501 - Line too long
@abc.abstractmethod
async def follow_channel(
self,
news_channel: snowflakes.SnowflakeishOr[channels.GuildNewsChannel],
target_channel: snowflakes.SnowflakeishOr[channels.GuildChannel],
) -> channels.ChannelFollow:
"""Follow a news channel to send messages to a target channel.
Parameters
----------
news_channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.GuildNewsChannel]
The object or ID of the news channel to follow.
target_channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.GuildChannel]
The object or ID of the channel to target.
Returns
-------
hikari.channels.ChannelFollow
Information about the new relationship that was made.
Raises
------
hikari.errors.BadRequestError
If you try to follow a channel that's not a news channel or if the
target channel has reached it's webhook limit, which is 10 at the
time of writing.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you are missing the `MANAGE_WEBHOOKS` permission in the target
channel or are missing the `VIEW_CHANNEL` permission in the origin
channel.
hikari.errors.NotFoundError
If the origin or target channel is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def delete_channel(self, channel: snowflakes.SnowflakeishOr[channels.PartialChannel]) -> None:
"""Delete a channel in a guild, or close a DM.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.PartialChannel]
The channel to delete. This may be the object or the ID of an
existing channel.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you are missing the `MANAGE_CHANNEL` permission in the channel.
hikari.errors.NotFoundError
If the channel is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
!!! note
For Public servers, the set 'Rules' or 'Guidelines' channels and the
'Public Server Updates' channel cannot be deleted.
"""
@typing.overload
@abc.abstractmethod
async def edit_permission_overwrites(
self,
channel: snowflakes.SnowflakeishOr[channels.GuildChannel],
target: typing.Union[channels.PermissionOverwrite, users.PartialUser, guilds.PartialRole],
*,
allow: undefined.UndefinedOr[permissions_.Permissions] = undefined.UNDEFINED,
deny: undefined.UndefinedOr[permissions_.Permissions] = undefined.UNDEFINED,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> None:
"""Edit permissions for a target entity."""
@typing.overload
@abc.abstractmethod
async def edit_permission_overwrites(
self,
channel: snowflakes.SnowflakeishOr[channels.GuildChannel],
target: snowflakes.Snowflakeish,
*,
target_type: channels.PermissionOverwriteType,
allow: undefined.UndefinedOr[permissions_.Permissions] = undefined.UNDEFINED,
deny: undefined.UndefinedOr[permissions_.Permissions] = undefined.UNDEFINED,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> None:
"""Edit permissions for a given entity ID and type."""
@abc.abstractmethod
async def edit_permission_overwrites(
self,
channel: snowflakes.SnowflakeishOr[channels.GuildChannel],
target: typing.Union[
snowflakes.Snowflakeish, users.PartialUser, guilds.PartialRole, channels.PermissionOverwrite
],
*,
target_type: undefined.UndefinedOr[channels.PermissionOverwriteType] = undefined.UNDEFINED,
allow: undefined.UndefinedOr[permissions_.Permissions] = undefined.UNDEFINED,
deny: undefined.UndefinedOr[permissions_.Permissions] = undefined.UNDEFINED,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> None:
"""Edit permissions for a specific entity in the given guild channel.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.GuildChannel]
The channel to edit a permission overwrite in. This may be the
object, or the ID of an existing channel.
target : typing.Union[hikari.users.PartialUser, hikari.guilds.PartialRole, hikari.channels.PermissionOverwrite, hikari.snowflakes.Snowflakeish]
The channel overwrite to edit. This may be the object or the ID of an
existing overwrite.
Other Parameters
----------------
target_type : hikari.undefined.UndefinedOr[hikari.channels.PermissionOverwriteType]
If provided, the type of the target to update. If unset, will attempt to get
the type from `target`.
allow : hikari.undefined.UndefinedOr[hikari.permissions.Permissions]
If provided, the new vale of all allowed permissions.
deny : hikari.undefined.UndefinedOr[hikari.permissions.Permissions]
If provided, the new vale of all disallowed permissions.
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Raises
------
builtins.TypeError
If `target_type` is unset and we were unable to determine the type
from `target`.
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you are missing the `MANAGE_PERMISSIONS` permission in the channel.
hikari.errors.NotFoundError
If the channel is not found or the target is not found if it is
a role.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
""" # noqa: E501 - Line too long
@abc.abstractmethod
async def delete_permission_overwrite(
self,
channel: snowflakes.SnowflakeishOr[channels.GuildChannel],
target: snowflakes.SnowflakeishOr[
typing.Union[channels.PermissionOverwrite, guilds.PartialRole, users.PartialUser, snowflakes.Snowflakeish]
],
) -> None:
"""Delete a custom permission for an entity in a given guild channel.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.GuildChannel]
The channel to delete a permission overwrite in. This may be the
object, or the ID of an existing channel.
target : typing.Union[hikari.users.PartialUser, hikari.guilds.PartialRole, hikari.channels.PermissionOverwrite, hikari.snowflakes.Snowflakeish]
The channel overwrite to delete.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you are missing the `MANAGE_PERMISSIONS` permission in the channel.
hikari.errors.NotFoundError
If the channel is not found or the target is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
""" # noqa: E501 - Line too long
@abc.abstractmethod
async def fetch_channel_invites(
self, channel: snowflakes.SnowflakeishOr[channels.GuildChannel]
) -> typing.Sequence[invites.InviteWithMetadata]:
"""Fetch all invites pointing to the given guild channel.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.GuildChannel]
The channel to fetch the invites from. This may be a channel
object, or the ID of an existing channel.
Returns
-------
typing.Sequence[hikari.invites.InviteWithMetadata]
The invites pointing to the given guild channel.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you are missing the `MANAGE_CHANNEL` permission in the channel.
hikari.errors.NotFoundError
If the channel is not found in any guilds you are a member of.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def create_invite(
self,
channel: snowflakes.SnowflakeishOr[channels.GuildChannel],
*,
max_age: undefined.UndefinedOr[time.Intervalish] = undefined.UNDEFINED,
max_uses: undefined.UndefinedOr[int] = undefined.UNDEFINED,
temporary: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
unique: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
target_user: undefined.UndefinedOr[snowflakes.SnowflakeishOr[users.PartialUser]] = undefined.UNDEFINED,
target_user_type: undefined.UndefinedOr[invites.TargetUserType] = undefined.UNDEFINED,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> invites.InviteWithMetadata:
"""Create an invite to the given guild channel.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.GuildChannel]
The channel to create a invite for. This may be the object
or the ID of an existing channel.
Other Parameters
----------------
max_age : hikari.undefined.UndefinedOr[typing.Union[datetime.timedelta, builtins.float, builtins.int]]
If provided, the duration of the invite before expiry.
max_uses : hikari.undefined.UndefinedOr[builtins.int]
If provided, the max uses the invite can have.
temporary : hikari.undefined.UndefinedOr[builtins.bool]
If provided, whether the invite only grants temporary membership.
unique : hikari.undefined.UndefinedOr[builtins.bool]
If provided, whether the invite should be unique.
target_user : hikari.undefined.UndefinedOr[hikari.snowflakes.SnowflakeishOr[hikari.users.PartialUser]]
If provided, the target user id for this invite. This may be the
object or the ID of an existing user.
target_user_type : hikari.undefined.UndefinedOr[hikari.invites.TargetUserType]
If provided, the type of target user for this invite.
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Returns
-------
hikari.invites.InviteWithMetadata
The invite to the given guild channel.
Raises
------
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you are missing the `MANAGE_CHANNELS` permission.
hikari.errors.NotFoundError
If the channel is not found, or if the target user does not exist,
if provided.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
""" # noqa: E501 - Line too long
@abc.abstractmethod
def trigger_typing(
self, channel: snowflakes.SnowflakeishOr[channels.TextChannel]
) -> special_endpoints.TypingIndicator:
"""Trigger typing in a text channel.
The result of this call can be awaited to trigger typing once, or
can be used as an async context manager to continually type until the
context manager is left.
Examples
--------
```py
# Trigger typing just once.
await rest.trigger_typing(channel)
# Trigger typing repeatedly for 1 minute.
async with rest.trigger_typing(channel):
await asyncio.sleep(60)
```
!!! warning
Sending a message to the channel will cause the typing indicator
to disappear until it is re-triggered.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.TextChannel]
The channel to trigger typing in. This may be the object or
the ID of an existing channel.
Returns
-------
hikari.api.special_endpoints.TypingIndicator
A typing indicator to use.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you are missing the `SEND_MESSAGES` in the channel.
hikari.errors.NotFoundError
If the channel is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
!!! note
The exceptions on this endpoint will only be raised once the result
is awaited or iterated over. Invoking this function itself will
not raise any of the above types.
"""
@abc.abstractmethod
async def fetch_pins(
self, channel: snowflakes.SnowflakeishOr[channels.TextChannel]
) -> typing.Sequence[messages_.Message]:
"""Fetch the pinned messages in this text channel.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.TextChannel]
The channel to fetch pins from. This may be the object or
the ID of an existing channel.
Returns
-------
typing.Sequence[hikari.messages.Message]
The pinned messages in this text channel.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you are missing the `READ_MESSAGES` in the channel.
hikari.errors.NotFoundError
If the channel is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def pin_message(
self,
channel: snowflakes.SnowflakeishOr[channels.TextChannel],
message: snowflakes.SnowflakeishOr[messages_.PartialMessage],
) -> None:
"""Pin an existing message in the given text channel.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.TextChannel]
The channel to pin a message in. This may be the object or
the ID of an existing channel.
message : hikari.snowflakes.SnowflakeishOr[hikari.messages.PartialMessage]
The message to pin. This may be the object or the ID
of an existing message.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you are missing the `MANAGE_MESSAGES` in the channel.
hikari.errors.NotFoundError
If the channel is not found, or if the message does not exist in
the given channel.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def unpin_message(
self,
channel: snowflakes.SnowflakeishOr[channels.TextChannel],
message: snowflakes.SnowflakeishOr[messages_.PartialMessage],
) -> None:
"""Unpin a given message from a given text channel.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.TextChannel]
The channel to unpin a message in. This may be the object or
the ID of an existing channel.
message : hikari.snowflakes.SnowflakeishOr[hikari.messages.PartialMessage]
The message to unpin. This may be the object or the ID of an
existing message.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you are missing the `MANAGE_MESSAGES` permission.
hikari.errors.NotFoundError
If the channel is not found or the message is not a pinned message
in the given channel.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
def fetch_messages(
self,
channel: snowflakes.SnowflakeishOr[channels.TextChannel],
*,
before: undefined.UndefinedOr[snowflakes.SearchableSnowflakeishOr[snowflakes.Unique]] = undefined.UNDEFINED,
after: undefined.UndefinedOr[snowflakes.SearchableSnowflakeishOr[snowflakes.Unique]] = undefined.UNDEFINED,
around: undefined.UndefinedOr[snowflakes.SearchableSnowflakeishOr[snowflakes.Unique]] = undefined.UNDEFINED,
) -> iterators.LazyIterator[messages_.Message]:
"""Browse the message history for a given text channel.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.TextChannel]
The channel to fetch messages in. This may be the object or
the ID of an existing channel.
Other Parameters
----------------
before : hikari.undefined.UndefinedOr[snowflakes.SearchableSnowflakeishOr[hikari.snowflakes.Unique]]
If provided, fetch messages before this snowflake. If you provide
a datetime object, it will be transformed into a snowflake. This
may be any other Discord entity that has an ID. In this case, the
date the object was first created will be used.
after : hikari.undefined.UndefinedOr[snowflakes.SearchableSnowflakeishOr[hikari.snowflakes.Unique]]
If provided, fetch messages after this snowflake. If you provide
a datetime object, it will be transformed into a snowflake. This
may be any other Discord entity that has an ID. In this case, the
date the object was first created will be used.
around : hikari.undefined.UndefinedOr[snowflakes.SearchableSnowflakeishOr[hikari.snowflakes.Unique]]
If provided, fetch messages around this snowflake. If you provide
a datetime object, it will be transformed into a snowflake. This
may be any other Discord entity that has an ID. In this case, the
date the object was first created will be used.
Returns
-------
hikari.iterators.LazyIterator[hikari.messages.Message]
An iterator to fetch the messages.
!!! note
This call is not a coroutine function, it returns a special type of
lazy iterator that will perform API calls as you iterate across it.
See `hikari.iterators` for the full API for this iterator type.
Raises
------
builtins.TypeError
If you specify more than one of `before`, `after`, `about`.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you are missing the `READ_MESSAGE_HISTORY` in the channel.
hikari.errors.NotFoundError
If the channel is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
!!! note
The exceptions on this endpoint (other than `builtins.TypeError`) will only
be raised once the result is awaited or iterated over. Invoking
this function itself will not raise anything (other than
`builtins.TypeError`).
""" # noqa: E501 - Line too long
@abc.abstractmethod
async def fetch_message(
self,
channel: snowflakes.SnowflakeishOr[channels.TextChannel],
message: snowflakes.SnowflakeishOr[messages_.PartialMessage],
) -> messages_.Message:
"""Fetch a specific message in the given text channel.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.TextChannel]
The channel to fetch messages in. This may be the object or
the ID of an existing channel.
message : hikari.snowflakes.SnowflakeishOr[hikari.messages.PartialMessage]
The message to fetch. This may be the object or the ID of an
existing channel.
Returns
-------
hikari.messages.Message
The requested message.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you are missing the `READ_MESSAGE_HISTORY` in the channel.
hikari.errors.NotFoundError
If the channel is not found or the message is not found in the
given text channel.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def create_message(
self,
channel: snowflakes.SnowflakeishOr[channels.TextChannel],
content: undefined.UndefinedOr[typing.Any] = undefined.UNDEFINED,
*,
embed: undefined.UndefinedOr[embeds_.Embed] = undefined.UNDEFINED,
attachment: undefined.UndefinedOr[files.Resourceish] = undefined.UNDEFINED,
attachments: undefined.UndefinedOr[typing.Sequence[files.Resourceish]] = undefined.UNDEFINED,
tts: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
nonce: undefined.UndefinedOr[str] = undefined.UNDEFINED,
mentions_everyone: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
user_mentions: undefined.UndefinedOr[
typing.Union[typing.Collection[snowflakes.SnowflakeishOr[users.PartialUser]], bool]
] = undefined.UNDEFINED,
role_mentions: undefined.UndefinedOr[
typing.Union[typing.Collection[snowflakes.SnowflakeishOr[guilds.PartialRole]], bool]
] = undefined.UNDEFINED,
) -> messages_.Message:
"""Create a message in the given channel.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.TextChannel]
The channel to create the message in.
content : hikari.undefined.UndefinedOr[typing.Any]
If provided, the message contents. If
`hikari.undefined.UNDEFINED`, then nothing will be sent
in the content. Any other value here will be cast to a
`builtins.str`.
If this is a `hikari.embeds.Embed` and no `embed` kwarg is
provided, then this will instead update the embed. This allows for
simpler syntax when sending an embed alone.
Likewise, if this is a `hikari.files.Resource`, then the
content is instead treated as an attachment if no `attachment` and
no `attachments` kwargs are provided.
Other Parameters
----------------
embed : hikari.undefined.UndefinedOr[hikari.embeds.Embed]
If provided, the message embed.
attachment : hikari.undefined.UndefinedOr[hikari.files.Resourceish],
If provided, the message attachment. This can be a resource,
or string of a path on your computer or a URL.
attachments : hikari.undefined.UndefinedOr[typing.Sequence[hikari.files.Resourceish]],
If provided, the message attachments. These can be resources, or
strings consisting of paths on your computer or URLs.
tts : hikari.undefined.UndefinedOr[builtins.bool]
If provided, whether the message will be read out by a screen
reader using Discord's TTS (text-to-speech) system.
nonce : hikari.undefined.UndefinedOr[builtins.str]
An arbitrary identifier to associate with the message. This
can be used to identify it later in received events. If provided,
this must be less than 32 bytes. If not provided, then
a null value is placed on the message instead. All users can
see this value.
mentions_everyone : hikari.undefined.UndefinedOr[builtins.bool]
If provided, whether the message should parse @everyone/@here
mentions.
user_mentions : hikari.undefined.UndefinedOr[typing.Union[typing.Collection[hikari.snowflakes.SnowflakeishOr[hikari.users.PartialUser]], builtins.bool]]
If provided, and `builtins.True`, all user mentions will be detected.
If provided, and `builtins.False`, all user mentions will be ignored
if appearing in the message body.
Alternatively this may be a collection of
`hikari.snowflakes.Snowflake`, or
`hikari.users.PartialUser` derivatives to enforce mentioning
specific users.
role_mentions : hikari.undefined.UndefinedOr[typing.Union[typing.Collection[hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialRole]], builtins.bool]]
If provided, and `builtins.True`, all role mentions will be detected.
If provided, and `builtins.False`, all role mentions will be ignored
if appearing in the message body.
Alternatively this may be a collection of
`hikari.snowflakes.Snowflake`, or
`hikari.guilds.PartialRole` derivatives to enforce mentioning
specific roles.
!!! note
Attachments can be passed as many different things, to aid in
convenience.
- If a `pathlib.PurePath` or `builtins.str` to a valid URL, the
resource at the given URL will be streamed to Discord when
sending the message. Subclasses of
`hikari.files.WebResource` such as
`hikari.files.URL`,
`hikari.messages.Attachment`,
`hikari.emojis.Emoji`,
`EmbedResource`, etc will also be uploaded this way.
This will use bit-inception, so only a small percentage of the
resource will remain in memory at any one time, thus aiding in
scalability.
- If a `hikari.files.Bytes` is passed, or a `builtins.str`
that contains a valid data URI is passed, then this is uploaded
with a randomized file name if not provided.
- If a `hikari.files.File`, `pathlib.PurePath` or
`builtins.str` that is an absolute or relative path to a file
on your file system is passed, then this resource is uploaded
as an attachment using non-blocking code internally and streamed
using bit-inception where possible. This depends on the
type of `concurrent.futures.Executor` that is being used for
the application (default is a thread pool which supports this
behaviour).
Returns
-------
hikari.messages.Message
The created message.
Raises
------
builtins.ValueError
If more than 100 unique objects/entities are passed for
`role_mentions` or `user_mentions`.
builtins.TypeError
If both `attachment` and `attachments` are specified.
hikari.errors.BadRequestError
This may be raised in several discrete situations, such as messages
being empty with no attachments or embeds; messages with more than
2000 characters in them, embeds that exceed one of the many embed
limits; too many attachments; attachments that are too large;
invalid image URLs in embeds; users in `user_mentions` not being
mentioned in the message content; roles in `role_mentions` not
being mentioned in the message content.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you are missing the `SEND_MESSAGES` in the channel or the
person you are trying to message has the DM's disabled.
hikari.errors.NotFoundError
If the channel is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
!!! warning
You are expected to make a connection to the gateway and identify
once before being able to use this endpoint for a bot.
""" # noqa: E501 - Line too long
@abc.abstractmethod
async def create_crossposts(
self,
channel: snowflakes.SnowflakeishOr[channels.GuildNewsChannel],
message: snowflakes.SnowflakeishOr[messages_.PartialMessage],
) -> messages_.Message:
"""Broadcast an announcement message.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.GuildNewsChannel]
The object or ID of the news channel to crosspost a message in.
message : hikari.snowflakes.SnowflakeishOr[hikari.messages.PartialMessage]
The object or ID of the message to crosspost.
Returns
-------
hikari.messages.Message
The message object that was crossposted.
Raises
------
hikari.errors.BadRequestError
If you tried to crosspost a message that has already been broadcast.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you try to crosspost a message by the current user without the
`SEND_MESSAGES` permission for the target news channel or try to
crosspost a message by another user without both the `SEND_MESSAGES`
and `MANAGE_MESSAGES` permissions for the target channel.
hikari.errors.NotFoundError
If the channel or message is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def edit_message(
self,
channel: snowflakes.SnowflakeishOr[channels.TextChannel],
message: snowflakes.SnowflakeishOr[messages_.PartialMessage],
content: undefined.UndefinedOr[typing.Any] = undefined.UNDEFINED,
*,
embed: undefined.UndefinedNoneOr[embeds_.Embed] = undefined.UNDEFINED,
mentions_everyone: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
user_mentions: undefined.UndefinedOr[
typing.Union[typing.Collection[snowflakes.SnowflakeishOr[users.PartialUser]], bool]
] = undefined.UNDEFINED,
role_mentions: undefined.UndefinedOr[
typing.Union[typing.Collection[snowflakes.SnowflakeishOr[guilds.PartialRole]], bool]
] = undefined.UNDEFINED,
flags: undefined.UndefinedOr[messages_.MessageFlag] = undefined.UNDEFINED,
) -> messages_.Message:
"""Edit an existing message in a given channel.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.TextChannel]
The channel to create the message in. This may be
the object or the ID of an existing channel.
message : hikari.snowflakes.SnowflakeishOr[hikari.messages.PartialMessage]
The message to edit. This may be the object or the ID
of an existing message.
content : hikari.undefined.UndefinedOr[typing.Any]
If provided, the message content to update with. If
`hikari.undefined.UNDEFINED`, then the content will not
be changed. If `builtins.None`, then the content will be removed.
Any other value will be cast to a `builtins.str` before sending.
If this is a `hikari.embeds.Embed` and no `embed` kwarg is
provided, then this will instead update the embed. This allows for
simpler syntax when sending an embed alone.
Other Parameters
----------------
embed : hikari.undefined.UndefinedNoneOr[hikari.embeds.Embed]
If provided, the embed to set on the message. If
`hikari.undefined.UNDEFINED`, the previous embed if
present is not changed. If this is `builtins.None`, then the embed
is removed if present. Otherwise, the new embed value that was
provided will be used as the replacement.
mentions_everyone : hikari.undefined.UndefinedOr[builtins.bool]
If provided, sanitation for `@everyone` mentions. If
`hikari.undefined.UNDEFINED`, then the previous setting is
not changed. If `builtins.True`, then `@everyone`/`@here` mentions
in the message content will show up as mentioning everyone that can
view the chat.
user_mentions : hikari.undefined.UndefinedOr[typing.Union[typing.Collection[hikari.snowflakes.SnowflakeishOr[hikari.users.PartialUser]], builtins.bool]]
If provided, sanitation for user mentions. If
`hikari.undefined.UNDEFINED`, then the previous setting is
not changed. If `builtins.True`, all valid user mentions will behave
as mentions. If `builtins.False`, all valid user mentions will not
behave as mentions.
You may alternatively pass a collection of
`hikari.snowflakes.Snowflake` user IDs, or
`hikari.users.PartialUser`-derived objects.
role_mentions : hikari.undefined.UndefinedOr[typing.Union[typing.Collection[hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialRole]], builtins.bool]]
If provided, sanitation for role mentions. If
`hikari.undefined.UNDEFINED`, then the previous setting is
not changed. If `builtins.True`, all valid role mentions will behave
as mentions. If `builtins.False`, all valid role mentions will not
behave as mentions.
You may alternatively pass a collection of
`hikari.snowflakes.Snowflake` role IDs, or
`hikari.guilds.PartialRole`-derived objects.
flags : hikari.undefined.UndefinedOr[hikari.messages.MessageFlag]
If provided, optional flags to set on the message. If
`hikari.undefined.UNDEFINED`, then nothing is changed.
Note that some flags may not be able to be set. Currently the only
flags that can be set are `NONE` and `SUPPRESS_EMBEDS`. If you
have `MANAGE_MESSAGES` permissions, you can use this call to
suppress embeds on another user's message.
!!! note
Mentioning everyone, roles, or users in message edits currently
will not send a push notification showing a new mention to people
on Discord. It will still highlight in their chat as if they
were mentioned, however.
!!! note
There is currently no documented way to clear attachments or edit
attachments from a previously sent message on Discord's API. To
do this, `delete` the message and re-send it. This also applies
to embed attachments.
!!! warning
If you specify one of `mentions_everyone`, `user_mentions`, or
`role_mentions`, then all others will default to `builtins.False`,
even if they were enabled previously.
This is a limitation of Discord's design. If in doubt, specify all three of
them each time.
!!! warning
If the message was not sent by your user, the only parameter
you may provide to this call is the `flags` parameter. Anything
else will result in a `hikari.errors.ForbiddenError` being raised.
Returns
-------
hikari.messages.Message
The edited message.
Raises
------
hikari.errors.BadRequestError
This may be raised in several discrete situations, such as messages
being empty with no embeds; messages with more than 2000 characters
in them, embeds that exceed one of the many embed
limits; invalid image URLs in embeds; users in `user_mentions` not
being mentioned in the message content; roles in `role_mentions` not
being mentioned in the message content.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you are missing the `SEND_MESSAGES` in the channel; if you try to
change the contents of another user's message; or if you try to edit
the flags on another user's message without the `MANAGE_MESSAGES`
permission.
hikari.errors.NotFoundError
If the channel or message is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
""" # noqa: E501 - Line too long
@abc.abstractmethod
async def delete_message(
self,
channel: snowflakes.SnowflakeishOr[channels.TextChannel],
message: snowflakes.SnowflakeishOr[messages_.PartialMessage],
) -> None:
"""Delete a given message in a given channel.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.TextChannel]
The channel to delete the message in. This may be
the object or the ID of an existing channel.
message : hikari.snowflakes.SnowflakeishOr[hikari.messages.PartialMessage]
The message to delete. This may be the object or the ID of
an existing message.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you are missing the `MANAGE_MESSAGES`, and the message is
not sent by you.
hikari.errors.NotFoundError
If the channel or message is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def delete_messages(
self,
channel: snowflakes.SnowflakeishOr[channels.TextChannel],
/,
*messages: snowflakes.SnowflakeishOr[messages_.PartialMessage],
) -> None:
"""Bulk-delete messages from the channel.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.TextChannel]
The channel to bulk delete the messages in. This may be
the object or the ID of an existing channel.
*messages : hikari.snowflakes.SnowflakeishOr[hikari.messages.PartialMessage]
The messages to delete. This may be one or more
objects or IDs of existing messages.
!!! note
This API endpoint will only be able to delete 100 messages
at a time. For anything more than this, multiple requests will
be executed one-after-the-other, since the rate limits for this
endpoint do not favour more than one request per bucket.
If one message is left over from chunking per 100 messages, or
only one message is passed to this coroutine function, then the
logic is expected to defer to `delete_message`. The implication
of this is that the `delete_message` endpoint is ratelimited
by a different bucket with different usage rates.
!!! warning
This endpoint is not atomic. If an error occurs midway through
a bulk delete, you will **not** be able to revert any changes made
up to this point.
!!! warning
Specifying any messages more than 14 days old will cause the call
to fail, potentially with partial completion.
Raises
------
hikari.errors.BulkDeleteError
An error containing the messages successfully deleted, and the
messages that were not removed. The
`builtins.BaseException.__cause__` of the exception will be the
original error that terminated this process.
"""
@abc.abstractmethod
async def add_reaction(
self,
channel: snowflakes.SnowflakeishOr[channels.TextChannel],
message: snowflakes.SnowflakeishOr[messages_.PartialMessage],
emoji: emojis.Emojiish,
) -> None:
"""Add a reaction emoji to a message in a given channel.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.TextChannel]
The channel where the message to add the reaction to is. This
may be a `hikari.channels.TextChannel` or the ID of an existing
channel.
message : hikari.snowflakes.SnowflakeishOr[hikari.messages.PartialMessage]
The message to add a reaction to. This may be the
object or the ID of an existing message.
emoji : hikari.emojis.Emojiish
The emoji to react to the message with.
Raises
------
hikari.errors.BadRequestError
If an invalid unicode emoji is given, or if the given custom emoji
does not exist.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you are missing the `ADD_REACTIONS` (this is only necessary if you
are the first person to add the reaction).
hikari.errors.NotFoundError
If the channel or message is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def delete_my_reaction(
self,
channel: snowflakes.SnowflakeishOr[channels.TextChannel],
message: snowflakes.SnowflakeishOr[messages_.PartialMessage],
emoji: emojis.Emojiish,
) -> None:
"""Delete a reaction that your application user created.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.TextChannel]
The channel where the message to delete the reaction from is.
This may be the object or the ID of an existing channel.
message : hikari.snowflakes.SnowflakeishOr[hikari.messages.PartialMessage]
The message to delete a reaction from. This may be the
object or the ID of an existing message.
emoji : hikari.emojis.Emojiish
The emoji to remove your reaction from.
Raises
------
hikari.errors.BadRequestError
If an invalid unicode emoji is given, or if the given custom emoji
does not exist.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the channel or message is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def delete_all_reactions_for_emoji(
self,
channel: snowflakes.SnowflakeishOr[channels.TextChannel],
message: snowflakes.SnowflakeishOr[messages_.PartialMessage],
emoji: emojis.Emojiish,
) -> None:
"""Delete all reactions for a single emoji on a given message.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.TextChannel]
The channel where the message to delete the reactions from is.
This may be the object or the ID of an existing channel.
message : hikari.snowflakes.SnowflakeishOr[hikari.messages.PartialMessage]
The message to delete a reactions from. This may be the
object or the ID of an existing message.
emoji : hikari.emojis.Emojiish
The emoji to delete all reactions from.
Raises
------
hikari.errors.BadRequestError
If an invalid unicode emoji is given, or if the given custom emoji
does not exist.
hikari.errors.ForbiddenError
If you are missing the `MANAGE_MESSAGES` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the channel or message is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def delete_reaction(
self,
channel: snowflakes.SnowflakeishOr[channels.TextChannel],
message: snowflakes.SnowflakeishOr[messages_.PartialMessage],
emoji: emojis.Emojiish,
user: snowflakes.SnowflakeishOr[users.PartialUser],
) -> None:
"""Delete a reaction from a message.
If you are looking to delete your own applications reaction, use
`delete_my_reaction`.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.TextChannel]
The channel where the message to delete the reaction from is.
This may be the object or the ID of an existing channel.
message : hikari.snowflakes.SnowflakeishOr[hikari.messages.PartialMessage]
The message to delete a reaction from. This may be the
object or the ID of an existing message.
emoji : hikari.emojis.Emojiish
The emoji to delete all reactions from.
Raises
------
hikari.errors.BadRequestError
If an invalid unicode emoji is given, or if the given custom emoji
does not exist.
hikari.errors.ForbiddenError
If you are missing the `MANAGE_MESSAGES` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the channel or message is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def delete_all_reactions(
self,
channel: snowflakes.SnowflakeishOr[channels.TextChannel],
message: snowflakes.SnowflakeishOr[messages_.PartialMessage],
) -> None:
"""Delete all reactions from a message.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.TextChannel]
The channel where the message to delete all reactions from is.
This may be the object or the ID of an existing channel.
message : hikari.snowflakes.SnowflakeishOr[hikari.messages.PartialMessage]
The message to delete all reaction from. This may be the
object or the ID of an existing message.
Raises
------
hikari.errors.BadRequestError
If an invalid unicode emoji is given, or if the given custom emoji
does not exist.
hikari.errors.ForbiddenError
If you are missing the `MANAGE_MESSAGES` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the channel or message is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
def fetch_reactions_for_emoji(
self,
channel: snowflakes.SnowflakeishOr[channels.TextChannel],
message: snowflakes.SnowflakeishOr[messages_.PartialMessage],
emoji: emojis.Emojiish,
) -> iterators.LazyIterator[users.User]:
"""Fetch reactions for an emoji from a message.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.TextChannel]
The channel where the message to delete all reactions from is.
This may be the object or the ID of an existing channel.
message : hikari.snowflakes.SnowflakeishOr[hikari.messages.PartialMessage]
The message to delete all reaction from. This may be the
object or the ID of an existing message.
emoji : hikari.emojis.Emojiish
The emoji to filter reactions by.
Returns
-------
hikari.iterators.LazyIterator[hikari.users.User]
An iterator to fetch the users.
!!! note
This call is not a coroutine function, it returns a special type of
lazy iterator that will perform API calls as you iterate across it.
See `hikari.iterators` for the full API for this iterator type.
Raises
------
hikari.errors.BadRequestError
If an invalid unicode emoji is given, or if the given custom emoji
does not exist.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the channel or message is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
!!! note
The exceptions on this endpoint will only be raised once the
result is awaited or iterated over. Invoking this function
itself will not raise anything.
"""
@abc.abstractmethod
async def create_webhook(
self,
channel: snowflakes.SnowflakeishOr[channels.TextChannel],
name: str,
*,
avatar: typing.Optional[files.Resourceish] = None,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> webhooks.Webhook:
"""Create webhook in a channel.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.TextChannel]
The channel where the webhook will be created. This may be
the object or the ID of an existing channel.
name : str
The name for the webhook. This cannnot be `clyde`.
Other Parameters
----------------
avatar : typing.Optional[hikari.files.Resourceish]
If provided, the avatar for the webhook.
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Returns
-------
hikari.webhooks.Webhook
The created webhook.
Raises
------
hikari.errors.BadRequestError
If `name` doesnt follow the restrictions enforced by discord.
hikari.errors.ForbiddenError
If you are missing the `MANAGE_WEBHOOKS` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the channel is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def fetch_webhook(
self,
webhook: snowflakes.SnowflakeishOr[webhooks.Webhook],
*,
token: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> webhooks.Webhook:
"""Fetch an existing webhook.
Parameters
----------
webhook : hikari.snowflakes.SnowflakeishOr[hikari.webhooks.Webhook]
The webhook to fetch. This may be the object or the ID
of an existing webhook.
Other Parameters
----------------
token : hikari.undefined.UndefinedOr[builtins.str]
If provided, the webhoook token that will be used to fetch
the webhook instead of the token the client was initialized with.
Returns
-------
hikari.webhooks.Webhook
The requested webhook.
Raises
------
hikari.errors.ForbiddenError
If you are missing the `MANAGE_WEBHOOKS` permission when not
using a token.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the webhook is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def fetch_channel_webhooks(
self,
channel: snowflakes.SnowflakeishOr[channels.TextChannel],
) -> typing.Sequence[webhooks.Webhook]:
"""Fetch all channel webhooks.
Parameters
----------
channel : hikari.snowflakes.SnowflakeishOr[hikari.channels.TextChannel]
The channel to fetch the webhooks for. This
may be a `hikari.channels.TextChannel` or the ID of an
existing channel.
Returns
-------
typing.Sequence[hikari.webhooks.Webhook]
The fetched webhooks.
Raises
------
hikari.errors.ForbiddenError
If you are missing the `MANAGE_WEBHOOKS` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the channel is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def fetch_guild_webhooks(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
) -> typing.Sequence[webhooks.Webhook]:
"""Fetch all guild webhooks.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to fetch the webhooks for. This may be the object
or the ID of an existing guild.
Returns
-------
typing.Sequence[hikari.webhooks.Webhook]
The fetched webhooks.
Raises
------
hikari.errors.ForbiddenError
If you are missing the `MANAGE_WEBHOOKS` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def edit_webhook(
self,
webhook: snowflakes.SnowflakeishOr[webhooks.Webhook],
*,
token: undefined.UndefinedOr[str] = undefined.UNDEFINED,
name: undefined.UndefinedOr[str] = undefined.UNDEFINED,
avatar: undefined.UndefinedNoneOr[files.Resourceish] = undefined.UNDEFINED,
channel: undefined.UndefinedOr[snowflakes.SnowflakeishOr[channels.TextChannel]] = undefined.UNDEFINED,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> webhooks.Webhook:
"""Edit a webhook.
Parameters
----------
webhook : hikari.snowflakes.SnowflakeishOr[hikari.webhooks.Webhook]
The webhook to edit. This may be the object or the
ID of an existing webhook.
Other Parameters
----------------
token : hikari.undefined.UndefinedOr[builtins.str]
If provided, the webhoook token that will be used to edit
the webhook instead of the token the client was initialized with.
name : hikari.undefined.UndefinedOr[builtins.str]
If provided, the new webhook name.
avatar : hikari.undefined.UndefinedNoneOr[hikari.files.Resourceish]
If provided, the new webhook avatar. If `builtins.None`, will
remove the webhook avatar.
channel : hikari.undefined.UndefinedOr[hikari.snowflakes.SnowflakeishOr[hikari.channels.TextChannel]]
If provided, the text channel to move the webhook to.
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Returns
-------
hikari.webhooks.Webhook
The edited webhook.
Raises
------
hikari.errors.ForbiddenError
If you are missing the `MANAGE_WEBHOOKS` permission when not
using a token.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the webhook is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def delete_webhook(
self,
webhook: snowflakes.SnowflakeishOr[webhooks.Webhook],
*,
token: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> None:
"""Delete a webhook.
Parameters
----------
webhook : hikari.snowflakes.SnowflakeishOr[hikari.webhooks.Webhook]
The webhook to delete. This may be the object or the
ID of an existing webhook.
Other Parameters
----------------
token : hikari.undefined.UndefinedOr[builtins.str]
If provided, the webhoook token that will be used to delete
the webhook instead of the token the client was initialized with.
Raises
------
hikari.errors.ForbiddenError
If you are missing the `MANAGE_WEBHOOKS` permission when not
using a token.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the webhoook is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def execute_webhook(
self,
webhook: snowflakes.SnowflakeishOr[webhooks.Webhook],
token: str,
content: undefined.UndefinedOr[typing.Any] = undefined.UNDEFINED,
*,
username: undefined.UndefinedOr[str] = undefined.UNDEFINED,
avatar_url: undefined.UndefinedOr[str] = undefined.UNDEFINED,
embed: undefined.UndefinedOr[embeds_.Embed] = undefined.UNDEFINED,
embeds: undefined.UndefinedOr[typing.Sequence[embeds_.Embed]] = undefined.UNDEFINED,
attachment: undefined.UndefinedOr[files.Resourceish] = undefined.UNDEFINED,
attachments: undefined.UndefinedOr[typing.Sequence[files.Resourceish]] = undefined.UNDEFINED,
tts: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
mentions_everyone: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
user_mentions: undefined.UndefinedOr[
typing.Union[typing.Collection[snowflakes.SnowflakeishOr[users.PartialUser]], bool]
] = undefined.UNDEFINED,
role_mentions: undefined.UndefinedOr[
typing.Union[typing.Collection[snowflakes.SnowflakeishOr[guilds.PartialRole]], bool]
] = undefined.UNDEFINED,
) -> messages_.Message:
"""Execute a webhook.
Parameters
----------
webhook : hikari.snowflakes.SnowflakeishOr[hikari.webhooks.Webhook]
The webhook to execute. This may be the object
or the ID of an existing webhook
token: builtins.str
The webhook token.
content : hikari.undefined.UndefinedOr[typing.Any]
If provided, the message contents. If
`hikari.undefined.UNDEFINED`, then nothing will be sent
in the content. Any other value here will be cast to a
`builtins.str`.
If this is a `hikari.embeds.Embed` and no `embed` nor
no `embeds` kwarg is provided, then this will instead
update the embed. This allows for simpler syntax when
sending an embed alone.
Likewise, if this is a `hikari.files.Resource`, then the
content is instead treated as an attachment if no `attachment` and
no `attachments` kwargs are provided.
Other Parameters
----------------
embed : hikari.undefined.UndefinedOr[hikari.embeds.Embed]
If provided, the message embed.
embeds : hikari.undefined.UndefinedOr[hikari.embeds.Embed]
If provided, the message embeds.
attachment : hikari.undefined.UndefinedOr[hikari.files.Resourceish],
If provided, the message attachment. This can be a resource,
or string of a path on your computer or a URL.
attachments : hikari.undefined.UndefinedOr[typing.Sequence[hikari.files.Resourceish]],
If provided, the message attachments. These can be resources, or
strings consisting of paths on your computer or URLs.
tts : hikari.undefined.UndefinedOr[builtins.bool]
If provided, whether the message will be read out by a screen
reader using Discord's TTS (text-to-speech) system.
nonce : hikari.undefined.UndefinedOr[builtins.str]
An arbitrary identifier to associate with the message. This
can be used to identify it later in received events. If provided,
this must be less than 32 bytes. If not provided, then
a null value is placed on the message instead. All users can
see this value.
mentions_everyone : hikari.undefined.UndefinedOr[builtins.bool]
If provided, whether the message should parse @everyone/@here
mentions.
user_mentions : hikari.undefined.UndefinedOr[typing.Union[typing.Collection[hikari.snowflakes.SnowflakeishOr[hikari.users.PartialUser]], builtins.bool]]
If provided, and `builtins.True`, all user mentions will be detected.
If provided, and `builtins.False`, all user mentions will be ignored
if appearing in the message body.
Alternatively this may be a collection of
`hikari.snowflakes.Snowflake`, or
`hikari.users.PartialUser` derivatives to enforce mentioning
specific users.
role_mentions : hikari.undefined.UndefinedOr[typing.Union[typing.Collection[hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialRole]], builtins.bool]]
If provided, and `builtins.True`, all role mentions will be detected.
If provided, and `builtins.False`, all role mentions will be ignored
if appearing in the message body.
Alternatively this may be a collection of
`hikari.snowflakes.Snowflake`, or
`hikari.guilds.PartialRole` derivatives to enforce mentioning
specific roles.
!!! note
Attachments can be passed as many different things, to aid in
convenience.
- If a `pathlib.PurePath` or `builtins.str` to a valid URL, the
resource at the given URL will be streamed to Discord when
sending the message. Subclasses of
`hikari.files.WebResource` such as
`hikari.files.URL`,
`hikari.messages.Attachment`,
`hikari.emojis.Emoji`,
`EmbedResource`, etc will also be uploaded this way.
This will use bit-inception, so only a small percentage of the
resource will remain in memory at any one time, thus aiding in
scalability.
- If a `hikari.files.Bytes` is passed, or a `builtins.str`
that contains a valid data URI is passed, then this is uploaded
with a randomized file name if not provided.
- If a `hikari.files.File`, `pathlib.PurePath` or
`builtins.str` that is an absolute or relative path to a file
on your file system is passed, then this resource is uploaded
as an attachment using non-blocking code internally and streamed
using bit-inception where possible. This depends on the
type of `concurrent.futures.Executor` that is being used for
the application (default is a thread pool which supports this
behaviour).
Returns
-------
hikari.messages.Message
The created message.
Raises
------
builtins.ValueError
If more than 100 unique objects/entities are passed for
`role_mentions` or `user_mentions`.
builtins.TypeError
If both `attachment` and `attachments` are specified or if both
`embed` and `embeds` are specified.
hikari.errors.BadRequestError
This may be raised in several discrete situations, such as messages
being empty with no attachments or embeds; messages with more than
2000 characters in them, embeds that exceed one of the many embed
limits; too many attachments; attachments that are too large;
invalid image URLs in embeds; users in `user_mentions` not being
mentioned in the message content; roles in `role_mentions` not
being mentioned in the message content.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the channel is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
""" # noqa: E501 - Line too long
@abc.abstractmethod
async def fetch_gateway_url(self) -> str:
"""Fetch the gateway url.
!!! note
This endpoint does not require any valid authorization.
Raises
------
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def fetch_gateway_bot(self) -> sessions.GatewayBot:
"""Fetch the gateway gateway info for the bot.
Returns
-------
hikari.sessions.GatewayBot
The gateway bot.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def fetch_invite(self, invite: invites.Inviteish) -> invites.Invite:
"""Fetch an existing invite.
Parameters
----------
invite : hikari.invites.Inviteish
The invite to fetch. This may be an invite object or
the code of an existing invite.
Returns
-------
hikari.invites.Invite
The requested invite.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the invite is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def delete_invite(self, invite: invites.Inviteish) -> None:
"""Delete an existing invite.
Parameters
----------
invite : hikari.invites.Inviteish
The invite to delete. This may be an invite object or
the code of an existing invite.
Raises
------
hikari.errors.ForbiddenError
If you are missing the `MANAGE_GUILD` permission in the guild
the invite is from or if you are missing the `MANAGE_CHANNELS`
permission in the channel the invite is from.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the invite is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def fetch_my_user(self) -> users.OwnUser:
"""Fetch the token's associated user.
Returns
-------
hikari.users.OwnUser
The token's associated user.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def edit_my_user(
self,
*,
username: undefined.UndefinedOr[str] = undefined.UNDEFINED,
avatar: undefined.UndefinedNoneOr[files.Resourceish] = undefined.UNDEFINED,
) -> users.OwnUser:
"""Edit the token's associated user.
Other Parameters
----------------
username : undefined.UndefinedOr[builtins.str]
If provided, the new username.
avatar : undefined.UndefinedNoneOr[hikari.files.Resourceish]
If provided, the new avatar. If `builtins.None`,
the avatar will be removed.
Returns
-------
hikari.users.OwnUser
The edited token's associated user.
Raises
------
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value.
Discord also returns this on a ratelimit:
https://github.com/discord/discord-api-docs/issues/1462
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def fetch_my_connections(self) -> typing.Sequence[applications.OwnConnection]:
"""Fetch the token's associated connections.
Returns
-------
hikari.applications.OwnConnection
The token's associated connections.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
def fetch_my_guilds(
self,
*,
newest_first: bool = False,
start_at: undefined.UndefinedOr[snowflakes.SearchableSnowflakeishOr[guilds.PartialGuild]] = undefined.UNDEFINED,
) -> iterators.LazyIterator[applications.OwnGuild]:
"""Fetch the token's associated guilds.
Other Parameters
----------------
newest_first : builtins.bool
Whether to fetch the newest first or the olders first.
Defaults to `builtins.False`.
start_at : hikari.undefined.UndefinedOr[hikari.snowflakes.SearchableSnowflakeishOr[hikari.guilds.PartialGuild]]
If provided, will start at this snowflake. If you provide
a datetime object, it will be transformed into a snowflake. This
may also be a guild object. In this case, the
date the object was first created will be used.
Returns
-------
hikari.iterators.LazyIterator[hikari.applications.OwnGuild]
The token's associated guilds.
!!! note
This call is not a coroutine function, it returns a special type of
lazy iterator that will perform API calls as you iterate across it.
See `hikari.iterators` for the full API for this iterator type.
Raises
------
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
!!! note
The exceptions on this endpoint will only be raised once the
result is awaited or iterated over. Invoking this function
itself will not raise anything.
"""
@abc.abstractmethod
async def leave_guild(self, guild: snowflakes.SnowflakeishOr[guilds.PartialGuild], /) -> None:
"""Leave a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to leave. This may be the object or
the ID of an existing guild.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild is not found or you own the guild.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
# THIS IS AN OAUTH2 FLOW ONLY
@abc.abstractmethod
async def create_dm_channel(self, user: snowflakes.SnowflakeishOr[users.PartialUser], /) -> channels.DMChannel:
"""Create a DM channel with a user.
Parameters
----------
user : hikari.snowflakes.SnowflakeishOr[hikari.users.PartialUser]
The user to create the DM channel with. This may be the
object or the ID of an existing user.
Returns
-------
hikari.channels.DMChannel
The created DM channel.
Raises
------
hikari.errors.BadRequestError
If the user is not found.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
# THIS IS AN OAUTH2 FLOW BUT CAN BE USED BY BOTS ALSO
@abc.abstractmethod
async def fetch_application(self) -> applications.Application:
"""Fetch the token's associated application.
Returns
-------
hikari.applications.Application
The token's associated application.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
# THIS IS AN OAUTH2 FLOW ONLY
@abc.abstractmethod
async def add_user_to_guild(
self,
access_token: str,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
user: snowflakes.SnowflakeishOr[users.PartialUser],
*,
nick: undefined.UndefinedOr[str] = undefined.UNDEFINED,
roles: undefined.UndefinedOr[
typing.Collection[snowflakes.SnowflakeishOr[guilds.PartialRole]]
] = undefined.UNDEFINED,
mute: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
deaf: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
) -> typing.Optional[guilds.Member]:
"""Add a user to a guild.
!!! note
This requires the `access_token` to have the
`hikari.applications.OAuth2Scope.GUILDS_JOIN` scope enabled.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to add the user to. This may be the object
or the ID of an existing guild.
user : hikari.snowflakes.SnowflakeishOr[hikari.users.PartialUser]
The user to add to the guild. This may be the object
or the ID of an existing user.
Other Parameters
----------------
nick : hikari.undefined.UndefinedOr[builtins.str]
If provided, the nick to add to the user when he joins the guild.
Requires the `MANAGE_NICKNAMES` permission on the guild.
roles : hikari.undefined.UndefinedOr[typing.Collection[hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialRole]]]
If provided, the roles to add to the user when he joins the guild.
This may be a collection objects or IDs of existing roles.
Requires the `MANAGE_ROLES` permission on the guild.
mute : hikari.undefined.UndefinedOr[builtins.bool]
If provided, the mute state to add the user when he joins the guild.
Requires the `MUTE_MEMBERS` permission on the guild.
deaf : hikari.undefined.UndefinedOr[builtins.bool]
If provided, the deaf state to add the user when he joins the guild.
Requires the `DEAFEN_MEMBERS` permission on the guild.
Returns
-------
typing.Optional[hikari.guilds.Member]
`builtins.None` if the user was already part of the guild, else
`hikari.guilds.Member`.
Raises
------
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value.
hikari.errors.ForbiddenError
If you are not part of the guild you want to add the user to,
if you are missing permissions to do one of the things you specified,
if you are using an access token for another user, if the token is
bound to annother bot or if the access token doesnt have the
`hikari.applications.OAuth2Scope.GUILDS_JOIN` scope enabled.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If you own the guild or the user is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def fetch_voice_regions(self) -> typing.Sequence[voices.VoiceRegion]:
"""Fetch available voice regions.
!!! note
This endpoint doesn't return VIP voice regions.
Returns
-------
typing.Sequence[hikari.voices.VoiceRegion]
The available voice regions.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def fetch_user(self, user: snowflakes.SnowflakeishOr[users.PartialUser]) -> users.User:
"""Fetch a user.
Parameters
----------
user : hikari.snowflakes.SnowflakeishOr[hikari.users.PartialUser]
The user to fetch. This can be the object
or the ID of an existing user.
Returns
-------
hikari.users.User
The requested user
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the user is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
def fetch_audit_log(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
*,
before: undefined.UndefinedOr[snowflakes.SearchableSnowflakeishOr[snowflakes.Unique]] = undefined.UNDEFINED,
user: undefined.UndefinedOr[snowflakes.SnowflakeishOr[users.PartialUser]] = undefined.UNDEFINED,
event_type: undefined.UndefinedOr[audit_logs.AuditLogEventType] = undefined.UNDEFINED,
) -> iterators.LazyIterator[audit_logs.AuditLog]:
"""Fetch the guild's audit log.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to fetch the audit logs from. This can be a
guild object or the ID of an existing guild.
Other Parameters
----------------
before : hikari.undefined.UndefinedOr[hikari.snowflakes.SearchableSnowflakeishOr[hikari.snowflakes.Unique]]
If provided, filter to only actions after this snowflake. If you provide
a datetime object, it will be transformed into a snowflake. This
may be any other Discord entity that has an ID. In this case, the
date the object was first created will be used.
user : hikari.undefined.UndefinedOr[hikari.snowflakes.SnowflakeishOr[hikari.users.PartialUser]]
If provided, the user to filter for.
event_type : hikari.undefined.UndefinedOr[hikari.audit_logs.AuditLogEventType]
If provided, the event type to filter for.
Returns
-------
hikari.iterators.LazyIterator[hikari.audit_logs.AuditLog]
The guild's audit log.
!!! note
This call is not a coroutine function, it returns a special type of
lazy iterator that will perform API calls as you iterate across it.
See `hikari.iterators` for the full API for this iterator type.
Raises
------
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value.
hikari.errors.ForbiddenError
If you are missing the `VIEW_AUDIT_LOG` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
!!! note
The exceptions on this endpoint will only be raised once the
result is awaited or iterated over. Invoking this function
itself will not raise anything.
"""
@abc.abstractmethod
async def fetch_emoji(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
emoji: snowflakes.SnowflakeishOr[emojis.CustomEmoji],
) -> emojis.KnownCustomEmoji:
"""Fetch a guild emoji.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to fetch the emoji from. This can be a
guild object or the ID of an existing guild.
emoji : hikari.snowflakes.SnowflakeishOr[hikari.emojis.CustomEmoji]
The emoji to fetch. This can be a `hikari.emojis.CustomEmoji`
or the ID of an existing emoji.
Returns
-------
hikari.emojis.KnownCustomEmoji
The requested emoji.
Raises
------
hikari.errors.NotFoundError
If the guild or the emoji are not found.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def fetch_guild_emojis(
self, guild: snowflakes.SnowflakeishOr[guilds.PartialGuild]
) -> typing.Sequence[emojis.KnownCustomEmoji]:
"""Fetch the emojis of a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to fetch the emojis from. This can be a
guild object or the ID of an existing guild.
Returns
-------
typing.Sequence[hikari.emojis.KnownCustomEmoji]
The requested emojis.
Raises
------
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def create_emoji(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
name: str,
image: files.Resourceish,
*,
roles: undefined.UndefinedOr[
typing.Collection[snowflakes.SnowflakeishOr[guilds.PartialRole]]
] = undefined.UNDEFINED,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> emojis.KnownCustomEmoji:
"""Create an emoji in a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to create the emoji on. This can be a
guild object or the ID of an existing guild.
name : builtins.str
The name for the emoji.
image : hikari.files.Resourceish
The 128x128 image for the emoji. Maximum upload size is 256kb.
This can be a still or an animated image.
Other Parameters
----------------
roles : hikari.undefined.UndefinedOr[typing.Collection[hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialRole]]]
If provided, a collection of the roles that will be able to
use this emoji. This can be a `hikari.guilds.PartialRole` or
the ID of an existing role.
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Returns
-------
hikari.emojis.KnownCustomEmoji
The created emoji.
Raises
------
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value or
if there are no more spaces for the type of emoji in the guild.
hikari.errors.ForbiddenError
If you are missing `MANAGE_EMOJIS` in the server.
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def edit_emoji(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
emoji: snowflakes.SnowflakeishOr[emojis.CustomEmoji],
*,
name: undefined.UndefinedOr[str] = undefined.UNDEFINED,
roles: undefined.UndefinedOr[
typing.Collection[snowflakes.SnowflakeishOr[guilds.PartialRole]]
] = undefined.UNDEFINED,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> emojis.KnownCustomEmoji:
"""Edit an emoji in a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to edit the emoji on. This can be a
guild object or the ID of an existing guild.
emoji : hikari.snowflakes.SnowflakeishOr[hikari.emojis.CustomEmoji]
The emoji to edit. This can be a `hikari.emojis.CustomEmoji`
or the ID of an existing emoji.
Other Parameters
----------------
name : hikari.undefined.UndefinedOr[builtins.str]
If provided, the new name for the emoji.
roles : hikari.undefined.UndefinedOr[typing.Collection[hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialRole]]]
If provided, the new collection of roles that will be able to
use this emoji. This can be a `hikari.guilds.PartialRole` or
the ID of an existing role.
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Returns
-------
hikari.emojis.KnownCustomEmoji
The edited emoji.
Raises
------
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value.
hikari.errors.ForbiddenError
If you are missing `MANAGE_EMOJIS` in the server.
hikari.errors.NotFoundError
If the guild or the emoji are not found.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def delete_emoji(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
emoji: snowflakes.SnowflakeishOr[emojis.CustomEmoji],
# Reason is not currently supported for some reason.
) -> None:
"""Delete an emoji in a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to delete the emoji on. This can be a
guild object or the ID of an existing guild.
emoji : hikari.snowflakes.SnowflakeishOr[hikari.emojis.CustomEmoji]
The emoji to delete. This can be a `hikari.emojis.CustomEmoji`
or the ID of an existing emoji.
Raises
------
hikari.errors.ForbiddenError
If you are missing `MANAGE_EMOJIS` in the server.
hikari.errors.NotFoundError
If the guild or the emoji are not found.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
def guild_builder(self, name: str, /) -> special_endpoints.GuildBuilder:
"""Make a guild builder to create a guild with.
Parameters
----------
name : builtins.str
The new guilds name.
Returns
-------
hikari.api.special_endpoints.GuildBuilder
The guild builder to use. This will allow to create a guild
later with `hikari.api.special_endpoints.GuildBuilder.create`.
Raises
------
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
!!! note
The exceptions on this endpoint will only be raised once
`hikari.api.special_endpoints.GuildBuilder.create` is called.
Invoking this function itself will not raise any of
the above types.
See Also
--------
Guild builder: `hikari.api.special_endpoints.GuildBuilder`
"""
@abc.abstractmethod
async def fetch_guild(self, guild: snowflakes.SnowflakeishOr[guilds.PartialGuild]) -> guilds.RESTGuild:
"""Fetch a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to fetch. This can be the object
or the ID of an existing guild.
Returns
-------
hikari.guilds.RESTGuild
The requested guild.
Raises
------
hikari.errors.ForbiddenError
If you are not part of the guild.
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def fetch_guild_preview(self, guild: snowflakes.SnowflakeishOr[guilds.PartialGuild]) -> guilds.GuildPreview:
"""Fetch a guild preview.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to fetch the preview of. This can be a
guild object or the ID of an existing guild.
Returns
-------
hikari.guilds.GuildPreview
The requested guild preview.
!!! note
This will only work for guilds you are a part of or are public.
Raises
------
hikari.errors.NotFoundError
If the guild is not found or you are not part of the guild.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def edit_guild(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
*,
name: undefined.UndefinedOr[str] = undefined.UNDEFINED,
region: undefined.UndefinedOr[voices.VoiceRegionish] = undefined.UNDEFINED,
verification_level: undefined.UndefinedOr[guilds.GuildVerificationLevel] = undefined.UNDEFINED,
default_message_notifications: undefined.UndefinedOr[
guilds.GuildMessageNotificationsLevel
] = undefined.UNDEFINED,
explicit_content_filter_level: undefined.UndefinedOr[
guilds.GuildExplicitContentFilterLevel
] = undefined.UNDEFINED,
afk_channel: undefined.UndefinedOr[snowflakes.SnowflakeishOr[channels.GuildVoiceChannel]] = undefined.UNDEFINED,
afk_timeout: undefined.UndefinedOr[time.Intervalish] = undefined.UNDEFINED,
icon: undefined.UndefinedNoneOr[files.Resourceish] = undefined.UNDEFINED,
owner: undefined.UndefinedOr[snowflakes.SnowflakeishOr[users.PartialUser]] = undefined.UNDEFINED,
splash: undefined.UndefinedNoneOr[files.Resourceish] = undefined.UNDEFINED,
banner: undefined.UndefinedNoneOr[files.Resourceish] = undefined.UNDEFINED,
system_channel: undefined.UndefinedNoneOr[
snowflakes.SnowflakeishOr[channels.GuildTextChannel]
] = undefined.UNDEFINED,
rules_channel: undefined.UndefinedNoneOr[
snowflakes.SnowflakeishOr[channels.GuildTextChannel]
] = undefined.UNDEFINED,
public_updates_channel: undefined.UndefinedNoneOr[
snowflakes.SnowflakeishOr[channels.GuildTextChannel]
] = undefined.UNDEFINED,
preferred_locale: undefined.UndefinedOr[str] = undefined.UNDEFINED,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> guilds.RESTGuild:
"""Edit a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to edit. This may be the object
or the ID of an existing guild.
Other Parameters
----------------
name : hikari.undefined.UndefinedOr[builtins.str]
If provided, the new name for the guild.
region : hikari.undefined.UndefinedOr[hikari.voices.VoiceRegionish]
If provided, the new voice region for the guild.
verification_level : hikari.undefined.UndefinedOr[hikari.guilds.GuildVerificationLevel]
If provided, the new verification level.
default_message_notifications : hikari.undefined.UndefinedOr[hikari.guilds.GuildMessageNotificationsLevel]
If provided, the new default message notifications level.
explicit_content_filter_level : hikari.undefined.UndefinedOr[hikari.guilds.GuildExplicitContentFilterLevel]
If provided, the new explicit content filter level.
afk_channel : hikari.undefined.UndefinedOr[hikari.snowflakes.SnowflakeishOr[hikari.channels.GuildVoiceChannel]]
If provided, the new afk channel. Requires `afk_timeout` to
be set to work.
afk_timeout : hikari.undefined.UndefinedOr[hikari.internal.time.Intervalish]
If provided, the new afk timeout.
icon : hikari.undefined.UndefinedOr[hikari.files.Resourceish]
If provided, the new guild icon. Must be a 1024x1024 image or can be
an animated gif when the guild has the `ANIMATED_ICON` feature.
owner : hikari.undefined.UndefinedOr[hikari.snowflakes.SnowflakeishOr[hikari.users.PartialUser]]]
If provided, the new guild owner.
!!! warn
You need to be the owner of the server to use this.
splash : hikari.undefined.UndefinedNoneOr[hikari.files.Resourceish]
If provided, the new guild splash. Must be a 16:9 image and the
guild must have the `INVITE_SPLASH` feature.
banner : hikari.undefined.UndefinedNoneOr[hikari.files.Resourceish]
If provided, the new guild banner. Must be a 16:9 image and the
guild must have the `BANNER` feature.
system_channel : hikari.undefined.UndefinedNoneOr[hikari.snowflakes.SnowflakeishOr[hikari.channels.GuildTextChannel]]
If provided, the new system channel.
rules_channel : hikari.undefined.UndefinedNoneOr[hikari.snowflakes.SnowflakeishOr[hikari.channels.GuildTextChannel]]
If provided, the new rules channel.
public_updates_channel : hikari.undefined.UndefinedNoneOr[hikari.snowflakes.SnowflakeishOr[hikari.channels.GuildTextChannel]]
If provided, the new public updates channel.
preferred_locale : hikari.undefined.UndefinedNoneOr[builtins.str]
If provided, the new preferred locale.
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Returns
-------
hikari.guilds.RESTGuild
The edited guild.
Raises
------
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value. Or
you are missing the
hikari.errors.ForbiddenError
If you are missing the `MANAGE_GUILD` permission or if you tried to
pass ownership without being the server owner.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
""" # noqa: E501 - Line too long
@abc.abstractmethod
async def delete_guild(self, guild: snowflakes.SnowflakeishOr[guilds.PartialGuild]) -> None:
"""Delete a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to delete. This may be the object or
the ID of an existing guild.
Raises
------
hikari.errors.ForbiddenError
If you are not the owner of the guild.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If you own the guild or if you are not in it.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def fetch_guild_channels(
self, guild: snowflakes.SnowflakeishOr[guilds.PartialGuild]
) -> typing.Sequence[channels.GuildChannel]:
"""Fetch the channels in a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to fetch the channels from. This may be the
object or the ID of an existing guild.
Returns
-------
typing.Sequence[hikari.channels.GuildChannel]
The requested channels.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def create_guild_text_channel(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
name: str,
*,
position: undefined.UndefinedOr[int] = undefined.UNDEFINED,
topic: undefined.UndefinedOr[str] = undefined.UNDEFINED,
nsfw: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
rate_limit_per_user: undefined.UndefinedOr[time.Intervalish] = undefined.UNDEFINED,
permission_overwrites: undefined.UndefinedOr[
typing.Sequence[channels.PermissionOverwrite]
] = undefined.UNDEFINED,
category: undefined.UndefinedOr[snowflakes.SnowflakeishOr[channels.GuildCategory]] = undefined.UNDEFINED,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> channels.GuildTextChannel:
"""Create a text channel in a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to create the channel in. This may be the
object or the ID of an existing guild.
name : builtins.str
The channels name. Must be between 2 and 1000 characters.
Other Parameters
----------------
position : hikari.undefined.UndefinedOr[builtins.int]
If provided, the position of the channel (relative to the
category, if any).
topic : hikari.undefined.UndefinedOr[builtins.str]
If provided, the channels topic. Maximum 1024 characters.
nsfw : hikari.undefined.UndefinedOr[builtins.bool]
If provided, whether to mark the channel as NSFW.
rate_limit_per_user : hikari.undefined.UndefinedOr[hikari.internal.time.Intervalish]
If provided, the ammount of seconds a user has to wait
before being able to send another message in the channel.
Maximum 21600 seconds.
permission_overwrites : hikari.undefined.UndefinedOr[typing.Sequence[hikari.channels.PermissionOverwrite]]
If provided, the permission overwrites for the channel.
category : hikari.undefined.UndefinedOr[hikari.snowflakes.SnowflakeishOr[hikari.channels.GuildCategory]]
The category to create the channel under. This may be the
object or the ID of an existing category.
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Returns
-------
hikari.channels.GuildTextChannel
The created channel.
Raises
------
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value.
hikari.errors.ForbiddenError
If you are missing the `MANAGE_CHANNEL` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def create_guild_news_channel(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
name: str,
*,
position: undefined.UndefinedOr[int] = undefined.UNDEFINED,
topic: undefined.UndefinedOr[str] = undefined.UNDEFINED,
nsfw: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
rate_limit_per_user: undefined.UndefinedOr[time.Intervalish] = undefined.UNDEFINED,
permission_overwrites: undefined.UndefinedOr[
typing.Sequence[channels.PermissionOverwrite]
] = undefined.UNDEFINED,
category: undefined.UndefinedOr[snowflakes.SnowflakeishOr[channels.GuildCategory]] = undefined.UNDEFINED,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> channels.GuildNewsChannel:
"""Create a news channel in a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to create the channel in. This may be the
object or the ID of an existing guild.
name : builtins.str
The channels name. Must be between 2 and 1000 characters.
Other Parameters
----------------
position : hikari.undefined.UndefinedOr[builtins.int]
If provided, the position of the channel (relative to the
category, if any).
topic : hikari.undefined.UndefinedOr[builtins.str]
If provided, the channels topic. Maximum 1024 characters.
nsfw : hikari.undefined.UndefinedOr[builtins.bool]
If provided, whether to mark the channel as NSFW.
rate_limit_per_user : hikari.undefined.UndefinedOr[hikari.internal.time.Intervalish]
If provided, the ammount of seconds a user has to wait
before being able to send another message in the channel.
Maximum 21600 seconds.
permission_overwrites : hikari.undefined.UndefinedOr[typing.Sequence[hikari.channels.PermissionOverwrite]]
If provided, the permission overwrites for the channel.
category : hikari.undefined.UndefinedOr[hikari.snowflakes.SnowflakeishOr[hikari.channels.GuildCategory]]
The category to create the channel under. This may be the
object or the ID of an existing category.
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Returns
-------
hikari.channels.GuildNewsChannel
The created channel.
Raises
------
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value.
hikari.errors.ForbiddenError
If you are missing the `MANAGE_CHANNEL` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def create_guild_voice_channel(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
name: str,
*,
position: undefined.UndefinedOr[int] = undefined.UNDEFINED,
user_limit: undefined.UndefinedOr[int] = undefined.UNDEFINED,
bitrate: undefined.UndefinedOr[int] = undefined.UNDEFINED,
permission_overwrites: undefined.UndefinedOr[
typing.Sequence[channels.PermissionOverwrite]
] = undefined.UNDEFINED,
category: undefined.UndefinedOr[snowflakes.SnowflakeishOr[channels.GuildCategory]] = undefined.UNDEFINED,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> channels.GuildVoiceChannel:
"""Create a voice channel in a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to create the channel in. This may be the
object or the ID of an existing guild.
name : builtins.str
The channels name. Must be between 2 and 1000 characters.
Other Parameters
----------------
position : hikari.undefined.UndefinedOr[builtins.int]
If provided, the position of the channel (relative to the
category, if any).
user_limit : hikari.undefined.UndefinedOr[builtins.int]
If provided, the maximum users in the channel at once.
Must be between 0 and 99 with 0 meaning no limit.
bitrate : hikari.undefined.UndefinedOr[builtins.int]
If provided, the bitrate for the channel. Must be
between 8000 and 96000 or 8000 and 128000 for VIP
servers.
permission_overwrites : hikari.undefined.UndefinedOr[typing.Sequence[hikari.channels.PermissionOverwrite]]
If provided, the permission overwrites for the channel.
category : hikari.undefined.UndefinedOr[hikari.snowflakes.SnowflakeishOr[hikari.channels.GuildCategory]]
The category to create the channel under. This may be the
object or the ID of an existing category.
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Returns
-------
hikari.channels.GuildVoiceChannel
The created channel.
Raises
------
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value.
hikari.errors.ForbiddenError
If you are missing the `MANAGE_CHANNEL` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def create_guild_category(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
name: str,
*,
position: undefined.UndefinedOr[int] = undefined.UNDEFINED,
permission_overwrites: undefined.UndefinedOr[
typing.Sequence[channels.PermissionOverwrite]
] = undefined.UNDEFINED,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> channels.GuildCategory:
"""Create a category in a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to create the channel in. This may be the
object or the ID of an existing guild.
name : builtins.str
The channels name. Must be between 2 and 1000 characters.
Other Parameters
----------------
position : hikari.undefined.UndefinedOr[builtins.int]
If provided, the position of the category.
permission_overwrites : hikari.undefined.UndefinedOr[typing.Sequence[hikari.channels.PermissionOverwrite]]
If provided, the permission overwrites for the category.
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Returns
-------
hikari.channels.GuildCategory
The created category.
Raises
------
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value.
hikari.errors.ForbiddenError
If you are missing the `MANAGE_CHANNEL` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def reposition_channels(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
positions: typing.Mapping[int, snowflakes.SnowflakeishOr[channels.GuildChannel]],
) -> None:
"""Reposition the channels in a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to reposition the channels in. This may be the
object or the ID of an existing guild.
positions : typing.Mapping[builtins.int, hikari.snowflakes.SnowflakeishOr[hikari.channels.GuildChannel]]
A mapping of of the object or the ID of an existing channel to
the new position, relative to their parent category, if any.
Raises
------
hikari.errors.ForbiddenError
If you are missing the `MANAGE_CHANNEL` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def fetch_member(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
user: snowflakes.SnowflakeishOr[users.PartialUser],
) -> guilds.Member:
"""Fetch a guild member.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to get the member from. This may be the
object or the ID of an existing guild.
user : hikari.snowflakes.SnowflakeishOr[hikari.users.PartialUser]
The user to get the member for. This may be the
object or the ID of an existing user.
Returns
-------
hikari.guilds.Member
The requested member.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild or the user are not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
def fetch_members(
self, guild: snowflakes.SnowflakeishOr[guilds.PartialGuild]
) -> iterators.LazyIterator[guilds.Member]:
"""Fetch the members from a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to fetch the members of. This may be the
object or the ID of an existing guild.
Returns
-------
hikari.iterators.LazyIterator[hikari.guilds.Member]
An iterator to fetch the members.
!!! note
This call is not a coroutine function, it returns a special type of
lazy iterator that will perform API calls as you iterate across it.
See `hikari.iterators` for the full API for this iterator type.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
!!! note
The exceptions on this endpoint will only be raised once the
result is awaited or iterated over. Invoking this function
itself will not raise anything.
"""
@abc.abstractmethod
async def edit_member(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
user: snowflakes.SnowflakeishOr[users.PartialUser],
*,
nick: undefined.UndefinedNoneOr[str] = undefined.UNDEFINED,
roles: undefined.UndefinedOr[
typing.Collection[snowflakes.SnowflakeishOr[guilds.PartialRole]]
] = undefined.UNDEFINED,
mute: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
deaf: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
voice_channel: undefined.UndefinedNoneOr[
snowflakes.SnowflakeishOr[channels.GuildVoiceChannel]
] = undefined.UNDEFINED,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> None:
"""Edit a guild member.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to edit. This may be the object
or the ID of an existing guild.
user : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to edit. This may be the object
or the ID of an existing guild.
Other Parameters
----------------
nick : hikari.undefined.UndefinedNoneOr[builtins.str]
If provided, the new nick for the member. If `builtins.None`,
will remove the members nick.
Requires the `MANAGE_NICKNAMES` permission.
roles : hikari.undefined.UndefinedOr[typing.Collection[hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialRole]]]
If provided, the new roles for the member.
Requires the `MANAGE_ROLES` permission.
mute : hikari.undefined.UndefinedOr[builtins.bool]
If provided, the new server mute state for the member.
Requires the `MUTE_MEMBERS` permission.
deaf : hikari.undefined.UndefinedOr[builtins.bool]
If provided, the new server deaf state for the member.
Requires the `DEAFEN_MEMBERS` permission.
voice_channel : hikari.undefined.UndefinedOr[hikari.snowflakes.SnowflakeishOr[hikari.channels.GuildVoiceChannel]]]
If provided, `builtins.None` or the object or the ID of
an existing voice channel to move the member to.
If `builtins.None`, will disconnect the member from voice.
Requires the `MOVE_MEMBERS` permission and the `CONNECT`
permission in the original voice channel and the target
voice channel.
!!! note
If the member is not in a voice channel, this will
take no effect.
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Raises
------
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value.
hikari.errors.ForbiddenError
If you are missing a permission to do an action.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild or the user are not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def edit_my_nick(
self,
guild: snowflakes.SnowflakeishOr[guilds.Guild],
nick: typing.Optional[str],
*,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> None:
"""Edit the associated token's member nick.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to edit. This may be the object
or the ID of an existing guild.
nick : typing.Optional[builtins.str]
The new nick. If `builtins.None`,
will remove the nick.
Other Parameters
----------------
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Raises
------
hikari.errors.ForbiddenError
If you are missing the `CHANGE_NICKNAME` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def add_role_to_member(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
user: snowflakes.SnowflakeishOr[users.PartialUser],
role: snowflakes.SnowflakeishOr[guilds.PartialRole],
*,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> None:
"""Add a role to a member.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild where the member is in. This may be the
object or the ID of an existing guild.
user : hikari.snowflakes.SnowflakeishOr[hikari.users.PartialUser]
The user to add the role to. This may be the
object or the ID of an existing user.
role : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialRole]
The role to add. This may be the object or the
ID of an existing role.
Other Parameters
----------------
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Raises
------
hikari.errors.ForbiddenError
If you are missing the `MANAGE_ROLES` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild, user or role are not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def remove_role_from_member(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
user: snowflakes.SnowflakeishOr[users.PartialUser],
role: snowflakes.SnowflakeishOr[guilds.PartialRole],
*,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> None:
"""Remove a role from a member.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild where the member is in. This may be the
object or the ID of an existing guild.
user : hikari.snowflakes.SnowflakeishOr[hikari.users.PartialUser]
The user to remove the role from. This may be the
object or the ID of an existing user.
role : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialRole]
The role to remove. This may be the object or the
ID of an existing role.
Other Parameters
----------------
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Raises
------
hikari.errors.ForbiddenError
If you are missing the `MANAGE_ROLES` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild, user or role are not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def kick_user(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
user: snowflakes.SnowflakeishOr[users.PartialUser],
*,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> None:
"""Kick a member from a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to kick the member from. This may be the
object or the ID of an existing guild.
user : hikari.snowflakes.SnowflakeishOr[hikari.users.PartialUser]
The user to kick. This may be the object
or the ID of an existing user.
Other Parameters
----------------
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Raises
------
hikari.errors.ForbiddenError
If you are missing the `KICK_MEMBERS` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild or user are not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
kick_member = kick_user
"""This is simply an alias for readability."""
@abc.abstractmethod
async def ban_user(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
user: snowflakes.SnowflakeishOr[users.PartialUser],
*,
delete_message_days: undefined.UndefinedOr[int] = undefined.UNDEFINED,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> None:
"""Ban a member from a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to ban the member from. This may be the
object or the ID of an existing guild.
user : hikari.snowflakes.SnowflakeishOr[hikari.users.PartialUser]
The user to kick. This may be the object
or the ID of an existing user.
Other Parameters
----------------
delete_message_days : hikari.undefined.UndefinedNoneOr[int]
If provided, the number of days to delete messages for.
This must be between 0 and 7.
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Raises
------
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value.
hikari.errors.ForbiddenError
If you are missing the `BAN_MEMBERS` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild or user are not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
ban_member = ban_user
"""This is simply an alias for readability."""
@abc.abstractmethod
async def unban_user(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
user: snowflakes.SnowflakeishOr[users.PartialUser],
*,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> None:
"""Unban a member from a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to unban the member from. This may be the
object or the ID of an existing guild.
user : hikari.snowflakes.SnowflakeishOr[hikari.users.PartialUser]
The user to unban. This may be the object
or the ID of an existing user.
Other Parameters
----------------
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Raises
------
hikari.errors.ForbiddenError
If you are missing the `BAN_MEMBERS` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild or user are not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
unban_member = unban_user
"""This is simply an alias for readability."""
@abc.abstractmethod
async def fetch_ban(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
user: snowflakes.SnowflakeishOr[users.PartialUser],
) -> guilds.GuildMemberBan:
"""Fetch the guild's ban info for a user.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to fetch the ban from. This may be the
object or the ID of an existing guild.
user : hikari.snowflakes.SnowflakeishOr[hikari.users.PartialUser]
The user to fetch the ban of. This may be the
object or the ID of an existing user.
Returns
-------
hikari.guilds.GuildMemberBan
The requested ban info.
Raises
------
hikari.errors.ForbiddenError
If you are missing the `BAN_MEMBERS` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild or user are not found or if the user
is not banned.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def fetch_bans(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
) -> typing.Sequence[guilds.GuildMemberBan]:
"""Fetch the bans of a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to fetch the bans from. This may be the
object or the ID of an existing guild.
Returns
-------
typing.Sequence[hikari.guilds.GuildMemberBan]
The requested bans.
Raises
------
hikari.errors.ForbiddenError
If you are missing the `BAN_MEMBERS` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def fetch_roles(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
) -> typing.Sequence[guilds.Role]:
"""Fetch the roles of a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to fetch the roles from. This may be the
object or the ID of an existing guild.
Returns
-------
typing.Sequence[hikari.guilds.Role]
The requested roles.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def create_role(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
*,
name: undefined.UndefinedOr[str] = undefined.UNDEFINED,
permissions: undefined.UndefinedOr[permissions_.Permissions] = undefined.UNDEFINED,
color: undefined.UndefinedOr[colors.Colorish] = undefined.UNDEFINED,
colour: undefined.UndefinedOr[colors.Colorish] = undefined.UNDEFINED,
hoist: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
mentionable: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> guilds.Role:
"""Create a role.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to create the role in. This may be the
object or the ID of an existing guild.
Other Parameters
----------------
name : hikari.undefined.UndefinedOr[builtins.str]
If provided, the name for the role.
permissions : hikari.undefined.UndefinedOr[hikari.permissions.Permissions]
The permissions to give the role. This will default to setting
NO roles if left to the default value. This is in contrast to
default behaviour on Discord where some random permissions will
be set by default.
color : hikari.undefined.UndefinedOr[hikari.colors.Colorish]
If provided, the role's color.
colour : hikari.undefined.UndefinedOr[hikari.colors.Colorish]
An alias for `color`.
hoist : hikari.undefined.UndefinedOr[builtins.bool]
If provided, whether to hoist the role.
mentionable : hikari.undefined.UndefinedOr[builtins.bool]
If provided, whether to make the role mentionable.
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Returns
-------
hikari.guilds.Role
The created role.
Raises
------
builtins.TypeError
If both `color` and `colour` are specified.
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value.
hikari.errors.ForbiddenError
If you are missing the `MANAGE_ROLES` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def reposition_roles(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
positions: typing.Mapping[int, snowflakes.SnowflakeishOr[guilds.PartialRole]],
) -> None:
"""Reposition the roles in a guild.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to reposition the roles in. This may be
the object or the ID of an existing guild.
positions : typing.Mapping[builtins.int, hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialRole]]
A mapping of the position to the role.
Raises
------
hikari.errors.ForbiddenError
If you are missing the `MANAGE_ROLES` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def edit_role(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
role: snowflakes.SnowflakeishOr[guilds.PartialRole],
*,
name: undefined.UndefinedOr[str] = undefined.UNDEFINED,
permissions: undefined.UndefinedOr[permissions_.Permissions] = undefined.UNDEFINED,
color: undefined.UndefinedOr[colors.Colorish] = undefined.UNDEFINED,
colour: undefined.UndefinedOr[colors.Colorish] = undefined.UNDEFINED,
hoist: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
mentionable: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> guilds.Role:
"""Edit a role.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to edit the role in. This may be the
object or the ID of an existing guild.
role : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialRole]
The role to edit. This may be the object or the
ID of an existing role.
Other Parameters
----------------
name : hikari.undefined.UndefinedOr[builtins.str]
If provided, the new name for the role.
permissions : hikari.undefined.UndefinedOr[hikari.permissions.Permissions]
If provided, the new permissions for the role.
color : hikari.undefined.UndefinedOr[hikari.colors.Colorish]
If provided, the new color for the role.
colour : hikari.undefined.UndefinedOr[hikari.colors.Colorish]
An alias for `color`.
hoist : hikari.undefined.UndefinedOr[builtins.bool]
If provided, whether to hoist the role.
mentionable : hikari.undefined.UndefinedOr[builtins.bool]
If provided, whether to make the role mentionable.
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Returns
-------
hikari.guilds.Role
The edited role.
Raises
------
builtins.TypeError
If both `color` and `colour` are specified.
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value.
hikari.errors.ForbiddenError
If you are missing the `MANAGE_ROLES` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild or role are not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def delete_role(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
role: snowflakes.SnowflakeishOr[guilds.PartialRole],
) -> None:
"""Delete a role.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to delete the role in. This may be the
object or the ID of an existing guild.
role : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialRole]
The role to delete. This may be the object or the
ID of an existing role.
Raises
------
hikari.errors.ForbiddenError
If you are missing the `MANAGE_ROLES` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild or role are not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def estimate_guild_prune_count(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
*,
days: undefined.UndefinedOr[int] = undefined.UNDEFINED,
include_roles: undefined.UndefinedOr[
typing.Collection[snowflakes.SnowflakeishOr[guilds.PartialRole]]
] = undefined.UNDEFINED,
) -> int:
"""Estimate the guild prune count.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to estimate the guild prune count for. This may be the object
or the ID of an existing guild.
Other Parameters
----------------
days : hikari.undefined.UndefinedOr[builtins.int]
If provided, number of days to count prune for.
include_roles : hikari.undefined.UndefinedOr[typing.Collection[hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialRole]]]
If provided, the role(s) to include. By default, this endpoint will
not count users with roles. Providing roles using this attribute
will make members with the specified roles also get included into
the count.
Returns
-------
builtins.int
The estimated guild prune count.
Raises
------
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you are missing the `KICK_MEMBERS` permission.
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
""" # noqa: E501 - Line too long
@abc.abstractmethod
async def begin_guild_prune(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
*,
days: undefined.UndefinedOr[int] = undefined.UNDEFINED,
compute_prune_count: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
include_roles: undefined.UndefinedOr[
typing.Collection[snowflakes.SnowflakeishOr[guilds.PartialRole]]
] = undefined.UNDEFINED,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> typing.Optional[int]:
"""Begin the guild prune.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to begin the guild prune in. This may be the object
or the ID of an existing guild.
Other Parameters
----------------
days : hikari.undefined.UndefinedOr[builtins.int]
If provided, number of days to count prune for.
compute_prune_count: hikari.snowflakes.SnowflakeishOr[builtins.bool]
If provided, whether to return the prune count. This is discouraged
for large guilds.
include_roles : hikari.undefined.UndefinedOr[typing.Collection[hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialRole]]]
If provided, the role(s) to include. By default, this endpoint will
not count users with roles. Providing roles using this attribute
will make members with the specified roles also get included into
the count.
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Returns
-------
typing.Optional[builtins.int]
If `compute_prune_count` is not provided or `builtins.True`, the
number of members pruned. Else `builtins.None`.
Raises
------
hikari.errors.BadRequestError
If any of the fields that are passed have an invalid value.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.ForbiddenError
If you are missing the `KICK_MEMBERS` permission.
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
""" # noqa: E501 - Line too long
@abc.abstractmethod
async def fetch_guild_voice_regions(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
) -> typing.Sequence[voices.VoiceRegion]:
"""Fetch the available voice regions for a guild.
!!! note
Unlike `RESTClient.fetch_voice_regions`, this will
return the VIP regions if the guild has access to them.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to fetch the voice regions for. This may be the object
or the ID of an existing guild.
Returns
-------
typing.Sequence[hikari.voices.VoiceRegion]
The available voice regions for the guild.
Raises
------
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def fetch_guild_invites(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
) -> typing.Sequence[invites.InviteWithMetadata]:
"""Fetch the guild's invites.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to fetch the invites for. This may be the object
or the ID of an existing guild.
Returns
-------
typing.Sequence[hikari.invites.InviteWithMetadata]
The invites for the guild.
Raises
------
hikari.errors.ForbiddenError
If you are missing the `MANAGE_GUILD` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def fetch_integrations(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
) -> typing.Sequence[guilds.Integration]:
"""Fetch the guild's integrations.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to fetch the integrations for. This may be the object
or the ID of an existing guild.
Returns
-------
typing.Sequence[hikari.guilds.Integration]
The integrations for the guild.
Raises
------
hikari.errors.ForbiddenError
If you are missing the `MANAGE_GUILD` permission.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def fetch_widget(self, guild: snowflakes.SnowflakeishOr[guilds.PartialGuild]) -> guilds.GuildWidget:
"""Fetch a guilds's widget.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to fetch the widget from. This can be the object
or the ID of an existing guild.
Returns
-------
hikari.guilds.GuildWidget
The requested guild widget.
Raises
------
hikari.errors.ForbiddenError
If you are missing the `MANAGE_GUILD` permission.
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def edit_widget(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
*,
channel: undefined.UndefinedNoneOr[snowflakes.SnowflakeishOr[channels.GuildChannel]] = undefined.UNDEFINED,
enabled: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
reason: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> guilds.GuildWidget:
"""Fetch a guilds's widget.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to edit the widget in. This can be the object
or the ID of an existing guild.
Other Parameters
----------------
channel : hikari.undefined.UndefinedNoneOr[hikari.snowflakes.SnowflakeishOr[hikari.channels.GuildChannel]]
If provided, the channel to set the widget to. If `builtins.None`,
will not set to any.
enabled : hikari.undefined.UndefinedOr[builtins.bool]
If provided, whether to enable the widget.
reason : hikari.undefined.UndefinedOr[builtins.str]
If provided, the reason that will be recorded in the audit logs.
Maximum of 512 characters.
Returns
-------
hikari.guilds.GuildWidget
The edited guild widget.
Raises
------
hikari.errors.ForbiddenError
If you are missing the `MANAGE_GUILD` permission.
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
@abc.abstractmethod
async def fetch_vanity_url(self, guild: snowflakes.SnowflakeishOr[guilds.PartialGuild]) -> invites.VanityURL:
"""Fetch a guild's vanity url.
Parameters
----------
guild : hikari.snowflakes.SnowflakeishOr[hikari.guilds.PartialGuild]
The guild to fetch the vanity url from. This can
be the object or the ID of an existing guild.
Returns
-------
hikari.invites.VanityURL
The requested invite.
Raises
------
hikari.errors.ForbiddenError
If you are not part of the guild.
hikari.errors.NotFoundError
If the guild is not found.
hikari.errors.UnauthorizedError
If you are unauthorized to make the request (invalid/missing token).
hikari.errors.RateLimitTooLongError
Raised in the event that a rate limit occurs that is
longer than `max_rate_limit` when making a request.
hikari.errors.RateLimitedError
Usually, Hikari will handle and retry on hitting
rate-limits automatically. This includes most bucket-specific
rate-limits and global rate-limits. In some rare edge cases,
however, Discord implements other undocumented rules for
rate-limiting, such as limits per attribute. These cannot be
detected or handled normally by Hikari due to their undocumented
nature, and will trigger this exception if they occur.
hikari.errors.InternalServerError
If an internal error occurs on Discord while handling the request.
"""
| 46.751845
| 161
| 0.655373
| 25,512
| 221,744
| 5.672468
| 0.033357
| 0.042704
| 0.008403
| 0.033334
| 0.88977
| 0.869745
| 0.854183
| 0.846043
| 0.833336
| 0.815736
| 0
| 0.0015
| 0.284662
| 221,744
| 4,742
| 162
| 46.761704
| 0.910832
| 0.073743
| 0
| 0.706944
| 0
| 0
| 0.000783
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011111
| false
| 0
| 0.034722
| 0
| 0.055556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86225427c2de01554019a5cd95c63ae5082b66f4
| 68,001
|
py
|
Python
|
sdk/iothub/azure-mgmt-iothubprovisioningservices/azure/mgmt/iothubprovisioningservices/aio/operations/_iot_dps_resource_operations.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | 1
|
2022-03-09T08:59:13.000Z
|
2022-03-09T08:59:13.000Z
|
sdk/iothub/azure-mgmt-iothubprovisioningservices/azure/mgmt/iothubprovisioningservices/aio/operations/_iot_dps_resource_operations.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | null | null | null |
sdk/iothub/azure-mgmt-iothubprovisioningservices/azure/mgmt/iothubprovisioningservices/aio/operations/_iot_dps_resource_operations.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | 1
|
2022-03-04T06:21:56.000Z
|
2022-03-04T06:21:56.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._iot_dps_resource_operations import build_check_provisioning_service_name_availability_request, build_create_or_update_private_endpoint_connection_request_initial, build_create_or_update_request_initial, build_delete_private_endpoint_connection_request_initial, build_delete_request_initial, build_get_operation_result_request, build_get_private_endpoint_connection_request, build_get_private_link_resources_request, build_get_request, build_list_by_resource_group_request, build_list_by_subscription_request, build_list_keys_for_key_name_request, build_list_keys_request, build_list_private_endpoint_connections_request, build_list_private_link_resources_request, build_list_valid_skus_request, build_update_request_initial
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class IotDpsResourceOperations:
"""IotDpsResourceOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.iothubprovisioningservices.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def get(
self,
provisioning_service_name: str,
resource_group_name: str,
**kwargs: Any
) -> "_models.ProvisioningServiceDescription":
"""Get the non-security related metadata of the provisioning service.
Get the metadata of the provisioning service without SAS keys.
:param provisioning_service_name: Name of the provisioning service to retrieve.
:type provisioning_service_name: str
:param resource_group_name: Resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ProvisioningServiceDescription, or the result of cls(response)
:rtype: ~azure.mgmt.iothubprovisioningservices.models.ProvisioningServiceDescription
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ProvisioningServiceDescription"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
provisioning_service_name=provisioning_service_name,
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ProvisioningServiceDescription', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{provisioningServiceName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
provisioning_service_name: str,
iot_dps_description: "_models.ProvisioningServiceDescription",
**kwargs: Any
) -> "_models.ProvisioningServiceDescription":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ProvisioningServiceDescription"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(iot_dps_description, 'ProvisioningServiceDescription')
request = build_create_or_update_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
provisioning_service_name=provisioning_service_name,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ProvisioningServiceDescription', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ProvisioningServiceDescription', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{provisioningServiceName}'} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
provisioning_service_name: str,
iot_dps_description: "_models.ProvisioningServiceDescription",
**kwargs: Any
) -> AsyncLROPoller["_models.ProvisioningServiceDescription"]:
"""Create or update the metadata of the provisioning service.
Create or update the metadata of the provisioning service. The usual pattern to modify a
property is to retrieve the provisioning service metadata and security metadata, and then
combine them with the modified values in a new body to update the provisioning service.
:param resource_group_name: Resource group identifier.
:type resource_group_name: str
:param provisioning_service_name: Name of provisioning service to create or update.
:type provisioning_service_name: str
:param iot_dps_description: Description of the provisioning service to create or update.
:type iot_dps_description:
~azure.mgmt.iothubprovisioningservices.models.ProvisioningServiceDescription
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ProvisioningServiceDescription or
the result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.iothubprovisioningservices.models.ProvisioningServiceDescription]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ProvisioningServiceDescription"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
provisioning_service_name=provisioning_service_name,
iot_dps_description=iot_dps_description,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('ProvisioningServiceDescription', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{provisioningServiceName}'} # type: ignore
async def _update_initial(
self,
resource_group_name: str,
provisioning_service_name: str,
provisioning_service_tags: "_models.TagsResource",
**kwargs: Any
) -> "_models.ProvisioningServiceDescription":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ProvisioningServiceDescription"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(provisioning_service_tags, 'TagsResource')
request = build_update_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
provisioning_service_name=provisioning_service_name,
content_type=content_type,
json=_json,
template_url=self._update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ProvisioningServiceDescription', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{provisioningServiceName}'} # type: ignore
@distributed_trace_async
async def begin_update(
self,
resource_group_name: str,
provisioning_service_name: str,
provisioning_service_tags: "_models.TagsResource",
**kwargs: Any
) -> AsyncLROPoller["_models.ProvisioningServiceDescription"]:
"""Update an existing provisioning service's tags.
Update an existing provisioning service's tags. to update other fields use the CreateOrUpdate
method.
:param resource_group_name: Resource group identifier.
:type resource_group_name: str
:param provisioning_service_name: Name of provisioning service to create or update.
:type provisioning_service_name: str
:param provisioning_service_tags: Updated tag information to set into the provisioning service
instance.
:type provisioning_service_tags: ~azure.mgmt.iothubprovisioningservices.models.TagsResource
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ProvisioningServiceDescription or
the result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.iothubprovisioningservices.models.ProvisioningServiceDescription]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ProvisioningServiceDescription"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_initial(
resource_group_name=resource_group_name,
provisioning_service_name=provisioning_service_name,
provisioning_service_tags=provisioning_service_tags,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('ProvisioningServiceDescription', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{provisioningServiceName}'} # type: ignore
async def _delete_initial(
self,
provisioning_service_name: str,
resource_group_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request_initial(
provisioning_service_name=provisioning_service_name,
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{provisioningServiceName}'} # type: ignore
@distributed_trace_async
async def begin_delete(
self,
provisioning_service_name: str,
resource_group_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Delete the Provisioning Service.
Deletes the Provisioning Service.
:param provisioning_service_name: Name of provisioning service to delete.
:type provisioning_service_name: str
:param resource_group_name: Resource group identifier.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
provisioning_service_name=provisioning_service_name,
resource_group_name=resource_group_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{provisioningServiceName}'} # type: ignore
@distributed_trace
def list_by_subscription(
self,
**kwargs: Any
) -> AsyncIterable["_models.ProvisioningServiceDescriptionListResult"]:
"""Get all the provisioning services in a subscription.
List all the provisioning services for a given subscription id.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProvisioningServiceDescriptionListResult or the
result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iothubprovisioningservices.models.ProvisioningServiceDescriptionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ProvisioningServiceDescriptionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_subscription_request(
subscription_id=self._config.subscription_id,
template_url=self.list_by_subscription.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_subscription_request(
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProvisioningServiceDescriptionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Devices/provisioningServices'} # type: ignore
@distributed_trace
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.ProvisioningServiceDescriptionListResult"]:
"""Get a list of all provisioning services in the given resource group.
:param resource_group_name: Resource group identifier.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProvisioningServiceDescriptionListResult or the
result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iothubprovisioningservices.models.ProvisioningServiceDescriptionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ProvisioningServiceDescriptionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_resource_group_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
template_url=self.list_by_resource_group.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_resource_group_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProvisioningServiceDescriptionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices'} # type: ignore
@distributed_trace_async
async def get_operation_result(
self,
operation_id: str,
resource_group_name: str,
provisioning_service_name: str,
asyncinfo: str = "true",
**kwargs: Any
) -> "_models.AsyncOperationResult":
"""Gets the status of a long running operation, such as create, update or delete a provisioning
service.
:param operation_id: Operation id corresponding to long running operation. Use this to poll for
the status.
:type operation_id: str
:param resource_group_name: Resource group identifier.
:type resource_group_name: str
:param provisioning_service_name: Name of provisioning service that the operation is running
on.
:type provisioning_service_name: str
:param asyncinfo: Async header used to poll on the status of the operation, obtained while
creating the long running operation.
:type asyncinfo: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AsyncOperationResult, or the result of cls(response)
:rtype: ~azure.mgmt.iothubprovisioningservices.models.AsyncOperationResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AsyncOperationResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_operation_result_request(
operation_id=operation_id,
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
provisioning_service_name=provisioning_service_name,
asyncinfo=asyncinfo,
template_url=self.get_operation_result.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('AsyncOperationResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_operation_result.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{provisioningServiceName}/operationresults/{operationId}'} # type: ignore
@distributed_trace
def list_valid_skus(
self,
provisioning_service_name: str,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.IotDpsSkuDefinitionListResult"]:
"""Get the list of valid SKUs for a provisioning service.
Gets the list of valid SKUs and tiers for a provisioning service.
:param provisioning_service_name: Name of provisioning service.
:type provisioning_service_name: str
:param resource_group_name: Name of resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either IotDpsSkuDefinitionListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iothubprovisioningservices.models.IotDpsSkuDefinitionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IotDpsSkuDefinitionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_valid_skus_request(
provisioning_service_name=provisioning_service_name,
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
template_url=self.list_valid_skus.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_valid_skus_request(
provisioning_service_name=provisioning_service_name,
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("IotDpsSkuDefinitionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_valid_skus.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{provisioningServiceName}/skus'} # type: ignore
@distributed_trace_async
async def check_provisioning_service_name_availability(
self,
arguments: "_models.OperationInputs",
**kwargs: Any
) -> "_models.NameAvailabilityInfo":
"""Check if a provisioning service name is available.
Check if a provisioning service name is available. This will validate if the name is
syntactically valid and if the name is usable.
:param arguments: Set the name parameter in the OperationInputs structure to the name of the
provisioning service to check.
:type arguments: ~azure.mgmt.iothubprovisioningservices.models.OperationInputs
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NameAvailabilityInfo, or the result of cls(response)
:rtype: ~azure.mgmt.iothubprovisioningservices.models.NameAvailabilityInfo
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NameAvailabilityInfo"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(arguments, 'OperationInputs')
request = build_check_provisioning_service_name_availability_request(
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.check_provisioning_service_name_availability.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('NameAvailabilityInfo', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_provisioning_service_name_availability.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Devices/checkProvisioningServiceNameAvailability'} # type: ignore
@distributed_trace
def list_keys(
self,
provisioning_service_name: str,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.SharedAccessSignatureAuthorizationRuleListResult"]:
"""Get the security metadata for a provisioning service.
List the primary and secondary keys for a provisioning service.
:param provisioning_service_name: The provisioning service name to get the shared access keys
for.
:type provisioning_service_name: str
:param resource_group_name: resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SharedAccessSignatureAuthorizationRuleListResult
or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iothubprovisioningservices.models.SharedAccessSignatureAuthorizationRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SharedAccessSignatureAuthorizationRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_keys_request(
provisioning_service_name=provisioning_service_name,
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
template_url=self.list_keys.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_keys_request(
provisioning_service_name=provisioning_service_name,
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("SharedAccessSignatureAuthorizationRuleListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{provisioningServiceName}/listkeys'} # type: ignore
@distributed_trace_async
async def list_keys_for_key_name(
self,
provisioning_service_name: str,
key_name: str,
resource_group_name: str,
**kwargs: Any
) -> "_models.SharedAccessSignatureAuthorizationRuleAccessRightsDescription":
"""Get a shared access policy by name from a provisioning service.
List primary and secondary keys for a specific key name.
:param provisioning_service_name: Name of the provisioning service.
:type provisioning_service_name: str
:param key_name: Logical key name to get key-values for.
:type key_name: str
:param resource_group_name: The name of the resource group that contains the provisioning
service.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SharedAccessSignatureAuthorizationRuleAccessRightsDescription, or the result of
cls(response)
:rtype:
~azure.mgmt.iothubprovisioningservices.models.SharedAccessSignatureAuthorizationRuleAccessRightsDescription
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SharedAccessSignatureAuthorizationRuleAccessRightsDescription"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_keys_for_key_name_request(
provisioning_service_name=provisioning_service_name,
key_name=key_name,
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
template_url=self.list_keys_for_key_name.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('SharedAccessSignatureAuthorizationRuleAccessRightsDescription', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_keys_for_key_name.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{provisioningServiceName}/keys/{keyName}/listkeys'} # type: ignore
@distributed_trace_async
async def list_private_link_resources(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> "_models.PrivateLinkResources":
"""List private link resources.
List private link resources for the given provisioning service.
:param resource_group_name: The name of the resource group that contains the provisioning
service.
:type resource_group_name: str
:param resource_name: The name of the provisioning service.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateLinkResources, or the result of cls(response)
:rtype: ~azure.mgmt.iothubprovisioningservices.models.PrivateLinkResources
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateLinkResources"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_private_link_resources_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
template_url=self.list_private_link_resources.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('PrivateLinkResources', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_private_link_resources.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{resourceName}/privateLinkResources'} # type: ignore
@distributed_trace_async
async def get_private_link_resources(
self,
resource_group_name: str,
resource_name: str,
group_id: str,
**kwargs: Any
) -> "_models.GroupIdInformation":
"""Get the specified private link resource.
Get the specified private link resource for the given provisioning service.
:param resource_group_name: The name of the resource group that contains the provisioning
service.
:type resource_group_name: str
:param resource_name: The name of the provisioning service.
:type resource_name: str
:param group_id: The name of the private link resource.
:type group_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: GroupIdInformation, or the result of cls(response)
:rtype: ~azure.mgmt.iothubprovisioningservices.models.GroupIdInformation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.GroupIdInformation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_private_link_resources_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
group_id=group_id,
template_url=self.get_private_link_resources.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('GroupIdInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_private_link_resources.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{resourceName}/privateLinkResources/{groupId}'} # type: ignore
@distributed_trace_async
async def list_private_endpoint_connections(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> List["_models.PrivateEndpointConnection"]:
"""List private endpoint connections.
List private endpoint connection properties.
:param resource_group_name: The name of the resource group that contains the provisioning
service.
:type resource_group_name: str
:param resource_name: The name of the provisioning service.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of PrivateEndpointConnection, or the result of cls(response)
:rtype: list[~azure.mgmt.iothubprovisioningservices.models.PrivateEndpointConnection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.PrivateEndpointConnection"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_private_endpoint_connections_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
template_url=self.list_private_endpoint_connections.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[PrivateEndpointConnection]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_private_endpoint_connections.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{resourceName}/privateEndpointConnections'} # type: ignore
@distributed_trace_async
async def get_private_endpoint_connection(
self,
resource_group_name: str,
resource_name: str,
private_endpoint_connection_name: str,
**kwargs: Any
) -> "_models.PrivateEndpointConnection":
"""Get private endpoint connection.
Get private endpoint connection properties.
:param resource_group_name: The name of the resource group that contains the provisioning
service.
:type resource_group_name: str
:param resource_name: The name of the provisioning service.
:type resource_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection.
:type private_endpoint_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateEndpointConnection, or the result of cls(response)
:rtype: ~azure.mgmt.iothubprovisioningservices.models.PrivateEndpointConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_private_endpoint_connection_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
private_endpoint_connection_name=private_endpoint_connection_name,
template_url=self.get_private_endpoint_connection.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_private_endpoint_connection.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{resourceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
async def _create_or_update_private_endpoint_connection_initial(
self,
resource_group_name: str,
resource_name: str,
private_endpoint_connection_name: str,
private_endpoint_connection: "_models.PrivateEndpointConnection",
**kwargs: Any
) -> "_models.PrivateEndpointConnection":
cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(private_endpoint_connection, 'PrivateEndpointConnection')
request = build_create_or_update_private_endpoint_connection_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
private_endpoint_connection_name=private_endpoint_connection_name,
content_type=content_type,
json=_json,
template_url=self._create_or_update_private_endpoint_connection_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_private_endpoint_connection_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{resourceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
@distributed_trace_async
async def begin_create_or_update_private_endpoint_connection(
self,
resource_group_name: str,
resource_name: str,
private_endpoint_connection_name: str,
private_endpoint_connection: "_models.PrivateEndpointConnection",
**kwargs: Any
) -> AsyncLROPoller["_models.PrivateEndpointConnection"]:
"""Create or update private endpoint connection.
Create or update the status of a private endpoint connection with the specified name.
:param resource_group_name: The name of the resource group that contains the provisioning
service.
:type resource_group_name: str
:param resource_name: The name of the provisioning service.
:type resource_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection.
:type private_endpoint_connection_name: str
:param private_endpoint_connection: The private endpoint connection with updated properties.
:type private_endpoint_connection:
~azure.mgmt.iothubprovisioningservices.models.PrivateEndpointConnection
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PrivateEndpointConnection or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.iothubprovisioningservices.models.PrivateEndpointConnection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_private_endpoint_connection_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
private_endpoint_connection_name=private_endpoint_connection_name,
private_endpoint_connection=private_endpoint_connection,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update_private_endpoint_connection.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{resourceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
async def _delete_private_endpoint_connection_initial(
self,
resource_group_name: str,
resource_name: str,
private_endpoint_connection_name: str,
**kwargs: Any
) -> Optional["_models.PrivateEndpointConnection"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.PrivateEndpointConnection"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_private_endpoint_connection_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
private_endpoint_connection_name=private_endpoint_connection_name,
template_url=self._delete_private_endpoint_connection_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_delete_private_endpoint_connection_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{resourceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
@distributed_trace_async
async def begin_delete_private_endpoint_connection(
self,
resource_group_name: str,
resource_name: str,
private_endpoint_connection_name: str,
**kwargs: Any
) -> AsyncLROPoller["_models.PrivateEndpointConnection"]:
"""Delete private endpoint connection.
Delete private endpoint connection with the specified name.
:param resource_group_name: The name of the resource group that contains the provisioning
service.
:type resource_group_name: str
:param resource_name: The name of the provisioning service.
:type resource_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection.
:type private_endpoint_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PrivateEndpointConnection or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.iothubprovisioningservices.models.PrivateEndpointConnection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_private_endpoint_connection_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
private_endpoint_connection_name=private_endpoint_connection_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete_private_endpoint_connection.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{resourceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
| 48.433761
| 743
| 0.694225
| 6,983
| 68,001
| 6.503222
| 0.049549
| 0.03378
| 0.036686
| 0.02092
| 0.883885
| 0.865366
| 0.8483
| 0.825971
| 0.813067
| 0.796508
| 0
| 0.004666
| 0.230997
| 68,001
| 1,403
| 744
| 48.468282
| 0.863748
| 0.080028
| 0
| 0.770905
| 0
| 0.009164
| 0.122241
| 0.102512
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016037
| false
| 0
| 0.018328
| 0
| 0.105384
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
868c0dc1b18a44f057130386800d81ae737d296e
| 166
|
py
|
Python
|
dvadmin-backend/apps/vadmin/monitor/models/__init__.py
|
yuanlaimantou/vue-django-admin
|
3757caf5d5ca2682ffbb6e017ef03ff9a3715cc9
|
[
"MIT"
] | 193
|
2021-02-25T17:36:47.000Z
|
2022-03-31T09:54:48.000Z
|
dvadmin-backend/apps/vadmin/monitor/models/__init__.py
|
yuanlaimantou/vue-django-admin
|
3757caf5d5ca2682ffbb6e017ef03ff9a3715cc9
|
[
"MIT"
] | 6
|
2021-04-23T12:35:14.000Z
|
2021-09-16T03:27:28.000Z
|
dvadmin-backend/apps/vadmin/monitor/models/__init__.py
|
yuanlaimantou/vue-django-admin
|
3757caf5d5ca2682ffbb6e017ef03ff9a3715cc9
|
[
"MIT"
] | 59
|
2021-03-29T09:25:00.000Z
|
2022-03-24T06:53:27.000Z
|
from apps.vadmin.monitor.models.monitor import Monitor
from apps.vadmin.monitor.models.server import Server
from apps.vadmin.monitor.models.sys_files import SysFiles
| 41.5
| 57
| 0.855422
| 25
| 166
| 5.64
| 0.4
| 0.170213
| 0.297872
| 0.446809
| 0.574468
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072289
| 166
| 3
| 58
| 55.333333
| 0.915584
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
86a8b15f1e10819508f308f334be7642736e7953
| 230
|
py
|
Python
|
cblib/scripts/filters/entries.py
|
HFriberg/cblib-base
|
164a00eb73ef3ac61f5b54f30492209cc69b854b
|
[
"Zlib"
] | 3
|
2019-06-13T06:57:31.000Z
|
2020-06-18T09:58:11.000Z
|
cblib/scripts/filters/entries.py
|
HFriberg/cblib-base
|
164a00eb73ef3ac61f5b54f30492209cc69b854b
|
[
"Zlib"
] | 1
|
2019-04-27T18:28:57.000Z
|
2019-04-30T17:16:53.000Z
|
cblib/scripts/filters/entries.py
|
HFriberg/cblib-base
|
164a00eb73ef3ac61f5b54f30492209cc69b854b
|
[
"Zlib"
] | 3
|
2019-04-30T11:19:34.000Z
|
2019-05-31T13:12:17.000Z
|
import var
import map
def keyquery(cnam=None, cdim=None):
return( var.keyquery(cnam,cdim) | map.keyquery(cnam,cdim) )
def getval(prob, cnam=None, cdim=None):
return( var.getval(prob,cnam,cdim) + map.getval(prob,cnam,cdim) )
| 25.555556
| 67
| 0.721739
| 37
| 230
| 4.486486
| 0.297297
| 0.192771
| 0.253012
| 0.192771
| 0.301205
| 0.301205
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117391
| 230
| 8
| 68
| 28.75
| 0.817734
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
86aff7575c9a6afebbf94bb347e1923be5ebb537
| 32,015
|
py
|
Python
|
tests/rest/admin/test_media.py
|
dsonck92/synapse
|
2560b1b6b2f74b5724253396c0e3665fa1f7968c
|
[
"Apache-2.0"
] | 9,945
|
2015-01-02T07:41:06.000Z
|
2022-03-31T23:22:42.000Z
|
tests/rest/admin/test_media.py
|
dsonck92/synapse
|
2560b1b6b2f74b5724253396c0e3665fa1f7968c
|
[
"Apache-2.0"
] | 9,320
|
2015-01-08T14:09:03.000Z
|
2022-03-31T21:11:24.000Z
|
tests/rest/admin/test_media.py
|
dsonck92/synapse
|
2560b1b6b2f74b5724253396c0e3665fa1f7968c
|
[
"Apache-2.0"
] | 2,299
|
2015-01-31T22:16:29.000Z
|
2022-03-31T06:08:26.000Z
|
# Copyright 2020 Dirk Klimpel
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from http import HTTPStatus
from parameterized import parameterized
from twisted.test.proto_helpers import MemoryReactor
import synapse.rest.admin
from synapse.api.errors import Codes
from synapse.rest.client import login, profile, room
from synapse.rest.media.v1.filepath import MediaFilePaths
from synapse.server import HomeServer
from synapse.util import Clock
from tests import unittest
from tests.server import FakeSite, make_request
from tests.test_utils import SMALL_PNG
VALID_TIMESTAMP = 1609459200000 # 2021-01-01 in milliseconds
INVALID_TIMESTAMP_IN_S = 1893456000 # 2030-01-01 in seconds
class DeleteMediaByIDTestCase(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
synapse.rest.admin.register_servlets_for_media_repo,
login.register_servlets,
]
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
self.media_repo = hs.get_media_repository_resource()
self.server_name = hs.hostname
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.filepaths = MediaFilePaths(hs.config.media.media_store_path)
def test_no_auth(self) -> None:
"""
Try to delete media without authentication.
"""
url = "/_synapse/admin/v1/media/%s/%s" % (self.server_name, "12345")
channel = self.make_request("DELETE", url, b"{}")
self.assertEqual(
HTTPStatus.UNAUTHORIZED,
channel.code,
msg=channel.json_body,
)
self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
def test_requester_is_no_admin(self) -> None:
"""
If the user is not a server admin, an error is returned.
"""
self.other_user = self.register_user("user", "pass")
self.other_user_token = self.login("user", "pass")
url = "/_synapse/admin/v1/media/%s/%s" % (self.server_name, "12345")
channel = self.make_request(
"DELETE",
url,
access_token=self.other_user_token,
)
self.assertEqual(
HTTPStatus.FORBIDDEN,
channel.code,
msg=channel.json_body,
)
self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
def test_media_does_not_exist(self) -> None:
"""
Tests that a lookup for a media that does not exist returns a HTTPStatus.NOT_FOUND
"""
url = "/_synapse/admin/v1/media/%s/%s" % (self.server_name, "12345")
channel = self.make_request(
"DELETE",
url,
access_token=self.admin_user_tok,
)
self.assertEqual(HTTPStatus.NOT_FOUND, channel.code, msg=channel.json_body)
self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"])
def test_media_is_not_local(self) -> None:
"""
Tests that a lookup for a media that is not a local returns a HTTPStatus.BAD_REQUEST
"""
url = "/_synapse/admin/v1/media/%s/%s" % ("unknown_domain", "12345")
channel = self.make_request(
"DELETE",
url,
access_token=self.admin_user_tok,
)
self.assertEqual(HTTPStatus.BAD_REQUEST, channel.code, msg=channel.json_body)
self.assertEqual("Can only delete local media", channel.json_body["error"])
def test_delete_media(self) -> None:
"""
Tests that delete a media is successfully
"""
download_resource = self.media_repo.children[b"download"]
upload_resource = self.media_repo.children[b"upload"]
# Upload some media into the room
response = self.helper.upload_media(
upload_resource,
SMALL_PNG,
tok=self.admin_user_tok,
expect_code=HTTPStatus.OK,
)
# Extract media ID from the response
server_and_media_id = response["content_uri"][6:] # Cut off 'mxc://'
server_name, media_id = server_and_media_id.split("/")
self.assertEqual(server_name, self.server_name)
# Attempt to access media
channel = make_request(
self.reactor,
FakeSite(download_resource, self.reactor),
"GET",
server_and_media_id,
shorthand=False,
access_token=self.admin_user_tok,
)
# Should be successful
self.assertEqual(
HTTPStatus.OK,
channel.code,
msg=(
"Expected to receive a HTTPStatus.OK on accessing media: %s"
% server_and_media_id
),
)
# Test if the file exists
local_path = self.filepaths.local_media_filepath(media_id)
self.assertTrue(os.path.exists(local_path))
url = "/_synapse/admin/v1/media/%s/%s" % (self.server_name, media_id)
# Delete media
channel = self.make_request(
"DELETE",
url,
access_token=self.admin_user_tok,
)
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
self.assertEqual(1, channel.json_body["total"])
self.assertEqual(
media_id,
channel.json_body["deleted_media"][0],
)
# Attempt to access media
channel = make_request(
self.reactor,
FakeSite(download_resource, self.reactor),
"GET",
server_and_media_id,
shorthand=False,
access_token=self.admin_user_tok,
)
self.assertEqual(
HTTPStatus.NOT_FOUND,
channel.code,
msg=(
"Expected to receive a HTTPStatus.NOT_FOUND on accessing deleted media: %s"
% server_and_media_id
),
)
# Test if the file is deleted
self.assertFalse(os.path.exists(local_path))
class DeleteMediaByDateSizeTestCase(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
synapse.rest.admin.register_servlets_for_media_repo,
login.register_servlets,
profile.register_servlets,
room.register_servlets,
]
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
self.media_repo = hs.get_media_repository_resource()
self.server_name = hs.hostname
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.filepaths = MediaFilePaths(hs.config.media.media_store_path)
self.url = "/_synapse/admin/v1/media/%s/delete" % self.server_name
# Move clock up to somewhat realistic time
self.reactor.advance(1000000000)
def test_no_auth(self) -> None:
"""
Try to delete media without authentication.
"""
channel = self.make_request("POST", self.url, b"{}")
self.assertEqual(
HTTPStatus.UNAUTHORIZED,
channel.code,
msg=channel.json_body,
)
self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
def test_requester_is_no_admin(self) -> None:
"""
If the user is not a server admin, an error is returned.
"""
self.other_user = self.register_user("user", "pass")
self.other_user_token = self.login("user", "pass")
channel = self.make_request(
"POST",
self.url,
access_token=self.other_user_token,
)
self.assertEqual(
HTTPStatus.FORBIDDEN,
channel.code,
msg=channel.json_body,
)
self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
def test_media_is_not_local(self) -> None:
"""
Tests that a lookup for media that is not local returns a HTTPStatus.BAD_REQUEST
"""
url = "/_synapse/admin/v1/media/%s/delete" % "unknown_domain"
channel = self.make_request(
"POST",
url + f"?before_ts={VALID_TIMESTAMP}",
access_token=self.admin_user_tok,
)
self.assertEqual(HTTPStatus.BAD_REQUEST, channel.code, msg=channel.json_body)
self.assertEqual("Can only delete local media", channel.json_body["error"])
def test_missing_parameter(self) -> None:
"""
If the parameter `before_ts` is missing, an error is returned.
"""
channel = self.make_request(
"POST",
self.url,
access_token=self.admin_user_tok,
)
self.assertEqual(
HTTPStatus.BAD_REQUEST,
channel.code,
msg=channel.json_body,
)
self.assertEqual(Codes.MISSING_PARAM, channel.json_body["errcode"])
self.assertEqual(
"Missing integer query parameter 'before_ts'", channel.json_body["error"]
)
def test_invalid_parameter(self) -> None:
"""
If parameters are invalid, an error is returned.
"""
channel = self.make_request(
"POST",
self.url + "?before_ts=-1234",
access_token=self.admin_user_tok,
)
self.assertEqual(
HTTPStatus.BAD_REQUEST,
channel.code,
msg=channel.json_body,
)
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
self.assertEqual(
"Query parameter before_ts must be a positive integer.",
channel.json_body["error"],
)
channel = self.make_request(
"POST",
self.url + f"?before_ts={INVALID_TIMESTAMP_IN_S}",
access_token=self.admin_user_tok,
)
self.assertEqual(
HTTPStatus.BAD_REQUEST,
channel.code,
msg=channel.json_body,
)
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
self.assertEqual(
"Query parameter before_ts you provided is from the year 1970. "
+ "Double check that you are providing a timestamp in milliseconds.",
channel.json_body["error"],
)
channel = self.make_request(
"POST",
self.url + f"?before_ts={VALID_TIMESTAMP}&size_gt=-1234",
access_token=self.admin_user_tok,
)
self.assertEqual(
HTTPStatus.BAD_REQUEST,
channel.code,
msg=channel.json_body,
)
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
self.assertEqual(
"Query parameter size_gt must be a string representing a positive integer.",
channel.json_body["error"],
)
channel = self.make_request(
"POST",
self.url + f"?before_ts={VALID_TIMESTAMP}&keep_profiles=not_bool",
access_token=self.admin_user_tok,
)
self.assertEqual(
HTTPStatus.BAD_REQUEST,
channel.code,
msg=channel.json_body,
)
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
self.assertEqual(
"Boolean query parameter 'keep_profiles' must be one of ['true', 'false']",
channel.json_body["error"],
)
def test_delete_media_never_accessed(self) -> None:
"""
Tests that media deleted if it is older than `before_ts` and never accessed
`last_access_ts` is `NULL` and `created_ts` < `before_ts`
"""
# upload and do not access
server_and_media_id = self._create_media()
self.pump(1.0)
# test that the file exists
media_id = server_and_media_id.split("/")[1]
local_path = self.filepaths.local_media_filepath(media_id)
self.assertTrue(os.path.exists(local_path))
# timestamp after upload/create
now_ms = self.clock.time_msec()
channel = self.make_request(
"POST",
self.url + "?before_ts=" + str(now_ms),
access_token=self.admin_user_tok,
)
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
self.assertEqual(1, channel.json_body["total"])
self.assertEqual(
media_id,
channel.json_body["deleted_media"][0],
)
self._access_media(server_and_media_id, False)
def test_keep_media_by_date(self) -> None:
"""
Tests that media is not deleted if it is newer than `before_ts`
"""
# timestamp before upload
now_ms = self.clock.time_msec()
server_and_media_id = self._create_media()
self._access_media(server_and_media_id)
channel = self.make_request(
"POST",
self.url + "?before_ts=" + str(now_ms),
access_token=self.admin_user_tok,
)
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
self.assertEqual(0, channel.json_body["total"])
self._access_media(server_and_media_id)
# timestamp after upload
now_ms = self.clock.time_msec()
channel = self.make_request(
"POST",
self.url + "?before_ts=" + str(now_ms),
access_token=self.admin_user_tok,
)
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
self.assertEqual(1, channel.json_body["total"])
self.assertEqual(
server_and_media_id.split("/")[1],
channel.json_body["deleted_media"][0],
)
self._access_media(server_and_media_id, False)
def test_keep_media_by_size(self) -> None:
"""
Tests that media is not deleted if its size is smaller than or equal
to `size_gt`
"""
server_and_media_id = self._create_media()
self._access_media(server_and_media_id)
now_ms = self.clock.time_msec()
channel = self.make_request(
"POST",
self.url + "?before_ts=" + str(now_ms) + "&size_gt=67",
access_token=self.admin_user_tok,
)
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
self.assertEqual(0, channel.json_body["total"])
self._access_media(server_and_media_id)
now_ms = self.clock.time_msec()
channel = self.make_request(
"POST",
self.url + "?before_ts=" + str(now_ms) + "&size_gt=66",
access_token=self.admin_user_tok,
)
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
self.assertEqual(1, channel.json_body["total"])
self.assertEqual(
server_and_media_id.split("/")[1],
channel.json_body["deleted_media"][0],
)
self._access_media(server_and_media_id, False)
def test_keep_media_by_user_avatar(self) -> None:
"""
Tests that we do not delete media if is used as a user avatar
Tests parameter `keep_profiles`
"""
server_and_media_id = self._create_media()
self._access_media(server_and_media_id)
# set media as avatar
channel = self.make_request(
"PUT",
"/profile/%s/avatar_url" % (self.admin_user,),
content={"avatar_url": "mxc://%s" % (server_and_media_id,)},
access_token=self.admin_user_tok,
)
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
now_ms = self.clock.time_msec()
channel = self.make_request(
"POST",
self.url + "?before_ts=" + str(now_ms) + "&keep_profiles=true",
access_token=self.admin_user_tok,
)
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
self.assertEqual(0, channel.json_body["total"])
self._access_media(server_and_media_id)
now_ms = self.clock.time_msec()
channel = self.make_request(
"POST",
self.url + "?before_ts=" + str(now_ms) + "&keep_profiles=false",
access_token=self.admin_user_tok,
)
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
self.assertEqual(1, channel.json_body["total"])
self.assertEqual(
server_and_media_id.split("/")[1],
channel.json_body["deleted_media"][0],
)
self._access_media(server_and_media_id, False)
def test_keep_media_by_room_avatar(self) -> None:
"""
Tests that we do not delete media if it is used as a room avatar
Tests parameter `keep_profiles`
"""
server_and_media_id = self._create_media()
self._access_media(server_and_media_id)
# set media as room avatar
room_id = self.helper.create_room_as(self.admin_user, tok=self.admin_user_tok)
channel = self.make_request(
"PUT",
"/rooms/%s/state/m.room.avatar" % (room_id,),
content={"url": "mxc://%s" % (server_and_media_id,)},
access_token=self.admin_user_tok,
)
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
now_ms = self.clock.time_msec()
channel = self.make_request(
"POST",
self.url + "?before_ts=" + str(now_ms) + "&keep_profiles=true",
access_token=self.admin_user_tok,
)
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
self.assertEqual(0, channel.json_body["total"])
self._access_media(server_and_media_id)
now_ms = self.clock.time_msec()
channel = self.make_request(
"POST",
self.url + "?before_ts=" + str(now_ms) + "&keep_profiles=false",
access_token=self.admin_user_tok,
)
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
self.assertEqual(1, channel.json_body["total"])
self.assertEqual(
server_and_media_id.split("/")[1],
channel.json_body["deleted_media"][0],
)
self._access_media(server_and_media_id, False)
def _create_media(self) -> str:
"""
Create a media and return media_id and server_and_media_id
"""
upload_resource = self.media_repo.children[b"upload"]
# Upload some media into the room
response = self.helper.upload_media(
upload_resource,
SMALL_PNG,
tok=self.admin_user_tok,
expect_code=HTTPStatus.OK,
)
# Extract media ID from the response
server_and_media_id = response["content_uri"][6:] # Cut off 'mxc://'
server_name = server_and_media_id.split("/")[0]
# Check that new media is a local and not remote
self.assertEqual(server_name, self.server_name)
return server_and_media_id
def _access_media(
self, server_and_media_id: str, expect_success: bool = True
) -> None:
"""
Try to access a media and check the result
"""
download_resource = self.media_repo.children[b"download"]
media_id = server_and_media_id.split("/")[1]
local_path = self.filepaths.local_media_filepath(media_id)
channel = make_request(
self.reactor,
FakeSite(download_resource, self.reactor),
"GET",
server_and_media_id,
shorthand=False,
access_token=self.admin_user_tok,
)
if expect_success:
self.assertEqual(
HTTPStatus.OK,
channel.code,
msg=(
"Expected to receive a HTTPStatus.OK on accessing media: %s"
% server_and_media_id
),
)
# Test that the file exists
self.assertTrue(os.path.exists(local_path))
else:
self.assertEqual(
HTTPStatus.NOT_FOUND,
channel.code,
msg=(
"Expected to receive a HTTPStatus.NOT_FOUND on accessing deleted media: %s"
% (server_and_media_id)
),
)
# Test that the file is deleted
self.assertFalse(os.path.exists(local_path))
class QuarantineMediaByIDTestCase(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
synapse.rest.admin.register_servlets_for_media_repo,
login.register_servlets,
]
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
media_repo = hs.get_media_repository_resource()
self.store = hs.get_datastore()
self.server_name = hs.hostname
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
# Create media
upload_resource = media_repo.children[b"upload"]
# Upload some media into the room
response = self.helper.upload_media(
upload_resource,
SMALL_PNG,
tok=self.admin_user_tok,
expect_code=HTTPStatus.OK,
)
# Extract media ID from the response
server_and_media_id = response["content_uri"][6:] # Cut off 'mxc://'
self.media_id = server_and_media_id.split("/")[1]
self.url = "/_synapse/admin/v1/media/%s/%s/%s"
@parameterized.expand(["quarantine", "unquarantine"])
def test_no_auth(self, action: str) -> None:
"""
Try to protect media without authentication.
"""
channel = self.make_request(
"POST",
self.url % (action, self.server_name, self.media_id),
b"{}",
)
self.assertEqual(
HTTPStatus.UNAUTHORIZED,
channel.code,
msg=channel.json_body,
)
self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
@parameterized.expand(["quarantine", "unquarantine"])
def test_requester_is_no_admin(self, action: str) -> None:
"""
If the user is not a server admin, an error is returned.
"""
self.other_user = self.register_user("user", "pass")
self.other_user_token = self.login("user", "pass")
channel = self.make_request(
"POST",
self.url % (action, self.server_name, self.media_id),
access_token=self.other_user_token,
)
self.assertEqual(
HTTPStatus.FORBIDDEN,
channel.code,
msg=channel.json_body,
)
self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
def test_quarantine_media(self) -> None:
"""
Tests that quarantining and remove from quarantine a media is successfully
"""
media_info = self.get_success(self.store.get_local_media(self.media_id))
self.assertFalse(media_info["quarantined_by"])
# quarantining
channel = self.make_request(
"POST",
self.url % ("quarantine", self.server_name, self.media_id),
access_token=self.admin_user_tok,
)
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
self.assertFalse(channel.json_body)
media_info = self.get_success(self.store.get_local_media(self.media_id))
self.assertTrue(media_info["quarantined_by"])
# remove from quarantine
channel = self.make_request(
"POST",
self.url % ("unquarantine", self.server_name, self.media_id),
access_token=self.admin_user_tok,
)
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
self.assertFalse(channel.json_body)
media_info = self.get_success(self.store.get_local_media(self.media_id))
self.assertFalse(media_info["quarantined_by"])
def test_quarantine_protected_media(self) -> None:
"""
Tests that quarantining from protected media fails
"""
# protect
self.get_success(self.store.mark_local_media_as_safe(self.media_id, safe=True))
# verify protection
media_info = self.get_success(self.store.get_local_media(self.media_id))
self.assertTrue(media_info["safe_from_quarantine"])
# quarantining
channel = self.make_request(
"POST",
self.url % ("quarantine", self.server_name, self.media_id),
access_token=self.admin_user_tok,
)
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
self.assertFalse(channel.json_body)
# verify that is not in quarantine
media_info = self.get_success(self.store.get_local_media(self.media_id))
self.assertFalse(media_info["quarantined_by"])
class ProtectMediaByIDTestCase(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
synapse.rest.admin.register_servlets_for_media_repo,
login.register_servlets,
]
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
media_repo = hs.get_media_repository_resource()
self.store = hs.get_datastore()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
# Create media
upload_resource = media_repo.children[b"upload"]
# Upload some media into the room
response = self.helper.upload_media(
upload_resource,
SMALL_PNG,
tok=self.admin_user_tok,
expect_code=HTTPStatus.OK,
)
# Extract media ID from the response
server_and_media_id = response["content_uri"][6:] # Cut off 'mxc://'
self.media_id = server_and_media_id.split("/")[1]
self.url = "/_synapse/admin/v1/media/%s/%s"
@parameterized.expand(["protect", "unprotect"])
def test_no_auth(self, action: str) -> None:
"""
Try to protect media without authentication.
"""
channel = self.make_request("POST", self.url % (action, self.media_id), b"{}")
self.assertEqual(
HTTPStatus.UNAUTHORIZED,
channel.code,
msg=channel.json_body,
)
self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
@parameterized.expand(["protect", "unprotect"])
def test_requester_is_no_admin(self, action: str) -> None:
"""
If the user is not a server admin, an error is returned.
"""
self.other_user = self.register_user("user", "pass")
self.other_user_token = self.login("user", "pass")
channel = self.make_request(
"POST",
self.url % (action, self.media_id),
access_token=self.other_user_token,
)
self.assertEqual(
HTTPStatus.FORBIDDEN,
channel.code,
msg=channel.json_body,
)
self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
def test_protect_media(self) -> None:
"""
Tests that protect and unprotect a media is successfully
"""
media_info = self.get_success(self.store.get_local_media(self.media_id))
self.assertFalse(media_info["safe_from_quarantine"])
# protect
channel = self.make_request(
"POST",
self.url % ("protect", self.media_id),
access_token=self.admin_user_tok,
)
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
self.assertFalse(channel.json_body)
media_info = self.get_success(self.store.get_local_media(self.media_id))
self.assertTrue(media_info["safe_from_quarantine"])
# unprotect
channel = self.make_request(
"POST",
self.url % ("unprotect", self.media_id),
access_token=self.admin_user_tok,
)
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
self.assertFalse(channel.json_body)
media_info = self.get_success(self.store.get_local_media(self.media_id))
self.assertFalse(media_info["safe_from_quarantine"])
class PurgeMediaCacheTestCase(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
synapse.rest.admin.register_servlets_for_media_repo,
login.register_servlets,
profile.register_servlets,
room.register_servlets,
]
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
self.media_repo = hs.get_media_repository_resource()
self.server_name = hs.hostname
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.filepaths = MediaFilePaths(hs.config.media.media_store_path)
self.url = "/_synapse/admin/v1/purge_media_cache"
def test_no_auth(self) -> None:
"""
Try to delete media without authentication.
"""
channel = self.make_request("POST", self.url, b"{}")
self.assertEqual(
HTTPStatus.UNAUTHORIZED,
channel.code,
msg=channel.json_body,
)
self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
def test_requester_is_not_admin(self) -> None:
"""
If the user is not a server admin, an error is returned.
"""
self.other_user = self.register_user("user", "pass")
self.other_user_token = self.login("user", "pass")
channel = self.make_request(
"POST",
self.url,
access_token=self.other_user_token,
)
self.assertEqual(
HTTPStatus.FORBIDDEN,
channel.code,
msg=channel.json_body,
)
self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
def test_invalid_parameter(self) -> None:
"""
If parameters are invalid, an error is returned.
"""
channel = self.make_request(
"POST",
self.url + "?before_ts=-1234",
access_token=self.admin_user_tok,
)
self.assertEqual(
HTTPStatus.BAD_REQUEST,
channel.code,
msg=channel.json_body,
)
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
self.assertEqual(
"Query parameter before_ts must be a positive integer.",
channel.json_body["error"],
)
channel = self.make_request(
"POST",
self.url + f"?before_ts={INVALID_TIMESTAMP_IN_S}",
access_token=self.admin_user_tok,
)
self.assertEqual(
HTTPStatus.BAD_REQUEST,
channel.code,
msg=channel.json_body,
)
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
self.assertEqual(
"Query parameter before_ts you provided is from the year 1970. "
+ "Double check that you are providing a timestamp in milliseconds.",
channel.json_body["error"],
)
| 33.5587
| 95
| 0.609089
| 3,733
| 32,015
| 4.983927
| 0.081704
| 0.069336
| 0.06853
| 0.037839
| 0.864929
| 0.852029
| 0.839774
| 0.821768
| 0.812309
| 0.806557
| 0
| 0.00644
| 0.287053
| 32,015
| 953
| 96
| 33.593914
| 0.808675
| 0.103108
| 0
| 0.761905
| 0
| 0
| 0.097372
| 0.01996
| 0
| 0
| 0
| 0
| 0.159754
| 1
| 0.049155
| false
| 0.030722
| 0.019969
| 0
| 0.086022
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86d029180cd10d29e351215d6a8311d3a1ed6ece
| 2,889
|
py
|
Python
|
ibanity/api/Sandbox/FinancialInstitutionHolding.py
|
Felixver/ibanity-python
|
06637fd0aaed382c14d860d209b41d530c365a9f
|
[
"MIT"
] | 1
|
2020-08-27T16:50:23.000Z
|
2020-08-27T16:50:23.000Z
|
ibanity/api/Sandbox/FinancialInstitutionHolding.py
|
Felixver/ibanity-python
|
06637fd0aaed382c14d860d209b41d530c365a9f
|
[
"MIT"
] | 1
|
2020-08-27T13:59:31.000Z
|
2020-09-04T14:24:29.000Z
|
ibanity/api/Sandbox/FinancialInstitutionHolding.py
|
Felixver/ibanity-python
|
06637fd0aaed382c14d860d209b41d530c365a9f
|
[
"MIT"
] | null | null | null |
from collections import namedtuple
from ibanity import Ibanity
from ibanity.Flatten import flatten_json
def get_list(financial_institution_id, financial_institution_user_id, financial_institution_account_id, params={}):
uri = Ibanity.client.api_schema["sandbox"]["financialInstitution"]["financialInstitutionAccount"]["financialInstitutionHoldings"] \
.replace("{financialInstitutionId}", financial_institution_id) \
.replace("{financialInstitutionUserId}", financial_institution_user_id) \
.replace("{financialInstitutionAccountId}", financial_institution_account_id)\
.replace("{financialInstitutionHoldingId}", "")
response = Ibanity.client.get(uri, params, None)
return list(
map(
lambda holding:
flatten_json(holding), response["data"]
)
)
def create(financial_institution_id, financial_institution_user_id, financial_institution_account_id, attributes):
uri = Ibanity.client.api_schema["sandbox"]["financialInstitution"]["financialInstitutionAccount"]["financialInstitutionHoldings"] \
.replace("{financialInstitutionId}", financial_institution_id) \
.replace("{financialInstitutionUserId}", financial_institution_user_id) \
.replace("{financialInstitutionAccountId}", financial_institution_account_id)\
.replace("{financialInstitutionHoldingId}", "")
body = {
"data": {
"type": "financialInstitutionHolding",
"attributes": attributes
}
}
response = Ibanity.client.post(uri, body, {}, None)
return flatten_json(response["data"])
def delete(financial_institution_id, financial_institution_user_id, financial_institution_account_id, id):
uri = Ibanity.client.api_schema["sandbox"]["financialInstitution"]["financialInstitutionAccount"]["financialInstitutionHoldings"] \
.replace("{financialInstitutionId}", financial_institution_id) \
.replace("{financialInstitutionUserId}", financial_institution_user_id) \
.replace("{financialInstitutionAccountId}", financial_institution_account_id)\
.replace("{financialInstitutionHoldingId}", id)
response = Ibanity.client.delete(uri, {}, None)
return flatten_json(response["data"])
def find(financial_institution_id, financial_institution_user_id, financial_institution_account_id, id):
uri = Ibanity.client.api_schema["sandbox"]["financialInstitution"]["financialInstitutionAccount"]["financialInstitutionHoldings"] \
.replace("{financialInstitutionId}", financial_institution_id) \
.replace("{financialInstitutionUserId}", financial_institution_user_id) \
.replace("{financialInstitutionAccountId}", financial_institution_account_id)\
.replace("{financialInstitutionHoldingId}", id)
response = Ibanity.client.get(uri, {}, None)
return flatten_json(response["data"])
| 51.589286
| 135
| 0.741779
| 242
| 2,889
| 8.549587
| 0.181818
| 0.231996
| 0.085065
| 0.100532
| 0.855969
| 0.841469
| 0.841469
| 0.78782
| 0.78782
| 0.78782
| 0
| 0
| 0.14261
| 2,889
| 55
| 136
| 52.527273
| 0.835285
| 0
| 0
| 0.5
| 0
| 0
| 0.29259
| 0.243421
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086957
| false
| 0
| 0.065217
| 0
| 0.23913
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8115a4d15a2a1e9a577f07cd1c6c4b0d8088f68e
| 33,855
|
py
|
Python
|
WISDEM/wisdem/test/test_drivetrainse/test_drive_structure.py
|
ptrbortolotti/WEIS
|
1e4dbf6728050f75cee08cd483fe57c5614488fe
|
[
"Apache-2.0"
] | 81
|
2015-01-19T18:17:31.000Z
|
2022-03-17T07:14:43.000Z
|
WISDEM/wisdem/test/test_drivetrainse/test_drive_structure.py
|
ptrbortolotti/WEIS
|
1e4dbf6728050f75cee08cd483fe57c5614488fe
|
[
"Apache-2.0"
] | 159
|
2015-02-05T01:54:52.000Z
|
2022-03-30T22:44:39.000Z
|
WISDEM/wisdem/test/test_drivetrainse/test_drive_structure.py
|
ptrbortolotti/WEIS
|
1e4dbf6728050f75cee08cd483fe57c5614488fe
|
[
"Apache-2.0"
] | 70
|
2015-01-02T15:22:39.000Z
|
2022-02-11T00:33:07.000Z
|
import unittest
import numpy as np
import numpy.testing as npt
import wisdem.drivetrainse.layout as lay
import wisdem.drivetrainse.drive_structure as ds
from wisdem.commonse import gravity
npts = 12
class TestDirectStructure(unittest.TestCase):
def setUp(self):
self.inputs = {}
self.outputs = {}
self.discrete_inputs = {}
self.discrete_outputs = {}
self.opt = {}
self.discrete_inputs["upwind"] = True
self.inputs["L_12"] = 2.0
self.inputs["L_h1"] = 1.0
self.inputs["L_generator"] = 3.25
# self.inputs['L_2n'] = 1.5
# self.inputs['L_grs'] = 1.1
# self.inputs['L_gsn'] = 1.1
self.inputs["L_hss"] = 0.75
self.inputs["L_gearbox"] = 1.2
self.inputs["overhang"] = 6.25
self.inputs["drive_height"] = 4.875
self.inputs["tilt"] = 4.0
self.inputs["access_diameter"] = 0.9
myones = np.ones(5)
self.inputs["lss_diameter"] = 3.3 * myones
self.inputs["lss_wall_thickness"] = 0.45 * myones
self.inputs["hss_diameter"] = 1.6 * np.ones(3)
self.inputs["hss_wall_thickness"] = 0.25 * np.ones(3)
self.inputs["nose_diameter"] = 2.2 * myones
self.inputs["nose_wall_thickness"] = 0.1 * myones
self.inputs["bedplate_wall_thickness"] = 0.06 * np.ones(npts)
self.inputs["bedplate_flange_width"] = 1.5
self.inputs["bedplate_flange_thickness"] = 0.05
# self.inputs['bedplate_web_height'] = 1.0
self.inputs["bedplate_web_thickness"] = 0.05
self.inputs["D_top"] = 6.5
self.inputs["hub_diameter"] = 4.0
self.inputs["other_mass"] = 200e3
self.inputs["mb1_mass"] = 10e3
self.inputs["mb1_I"] = 10e3 * 0.5 * 2 ** 2 * np.ones(3)
self.inputs["mb2_mass"] = 10e3
self.inputs["mb2_I"] = 10e3 * 0.5 * 1.5 ** 2 * np.ones(3)
self.inputs["mb1_max_defl_ang"] = 0.008
self.inputs["mb2_max_defl_ang"] = 0.008
self.inputs["m_stator"] = 100e3
self.inputs["cm_stator"] = -0.3
self.inputs["I_stator"] = np.array([1e6, 5e5, 5e5, 0.0, 0.0, 0.0])
self.inputs["generator_rotor_mass"] = 100e3
self.inputs["cm_rotor"] = -0.3
self.inputs["generator_rotor_I"] = np.array([1e6, 5e5, 5e5, 0.0, 0.0, 0.0])
self.inputs["generator_stator_mass"] = 100e3
self.inputs["cm_rotor"] = -0.3
self.inputs["generator_stator_I"] = np.array([1e6, 5e5, 5e5, 0.0, 0.0, 0.0])
self.inputs["generator_mass"] = 200e3
self.inputs["generator_I"] = np.array([2e6, 1e6, 1e6, 0.0, 0.0, 0.0])
self.inputs["gearbox_mass"] = 100e3
self.inputs["gearbox_I"] = np.array([1e6, 5e5, 5e5])
self.inputs["brake_mass"] = 10e3
self.inputs["brake_I"] = np.array([1e4, 5e3, 5e3])
self.inputs["carrier_mass"] = 10e3
self.inputs["carrier_I"] = np.array([1e4, 5e3, 5e3])
self.inputs["gear_ratio"] = 1.0
self.inputs["F_mb1"] = np.array([2409.750e3, -1716.429e3, 74.3529e3]).reshape((3, 1))
self.inputs["F_mb2"] = np.array([2409.750e3, -1716.429e3, 74.3529e3]).reshape((3, 1))
self.inputs["M_mb1"] = np.array([-1.83291e7, 6171.7324e3, 5785.82946e3]).reshape((3, 1))
self.inputs["M_mb2"] = np.array([-1.83291e7, 6171.7324e3, 5785.82946e3]).reshape((3, 1))
self.inputs["hub_system_mass"] = 100e3
self.inputs["hub_system_cm"] = 2.0
self.inputs["hub_system_I"] = np.array([2409.750e3, -1716.429e3, 74.3529e3, 0.0, 0.0, 0.0])
self.inputs["F_hub"] = np.array([2409.750e3, 0.0, 74.3529e2]).reshape((3, 1))
self.inputs["M_hub"] = np.array([-1.83291e4, 6171.7324e2, 5785.82946e2]).reshape((3, 1))
self.inputs["lss_E"] = self.inputs["hss_E"] = self.inputs["bedplate_E"] = 210e9
self.inputs["lss_G"] = self.inputs["hss_G"] = self.inputs["bedplate_G"] = 80.8e9
self.inputs["lss_rho"] = self.inputs["hss_rho"] = self.inputs["bedplate_rho"] = 7850.0
self.inputs["lss_Xy"] = self.inputs["hss_Xy"] = self.inputs["bedplate_Xy"] = 250e6
self.opt["gamma_f"] = 1.35
self.opt["gamma_m"] = 1.3
self.opt["gamma_n"] = 1.0
def compute_layout(self, direct=True):
myobj = lay.DirectLayout() if direct else lay.GearedLayout()
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
for k in self.outputs.keys():
self.inputs[k] = self.outputs[k]
def testBaseF_BaseM(self):
self.inputs["tilt"] = 0.0
self.inputs["F_mb1"] = np.zeros(3).reshape((3, 1))
self.inputs["F_mb2"] = np.zeros(3).reshape((3, 1))
self.inputs["M_mb1"] = np.zeros(3).reshape((3, 1))
self.inputs["M_mb2"] = np.zeros(3).reshape((3, 1))
self.compute_layout()
myobj = ds.Nose_Stator_Bedplate_Frame(modeling_options=self.opt, n_dlcs=1)
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2], 0.0)
npt.assert_almost_equal(self.outputs["base_M"][0], 0.0)
npt.assert_almost_equal(self.outputs["base_M"][-1], 0.0)
F0 = self.outputs["base_F"]
M0 = self.outputs["base_M"]
self.inputs["other_mass"] += 500e3
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2], 0.0)
npt.assert_almost_equal(self.outputs["base_F"][2], F0[2] - 500e3 * gravity)
npt.assert_almost_equal(self.outputs["base_M"][0], 0.0)
npt.assert_almost_equal(self.outputs["base_M"][1], M0[1])
npt.assert_almost_equal(self.outputs["base_M"][2], 0.0)
self.inputs["M_mb1"] = 10e3 * np.arange(1, 4).reshape((3, 1))
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2], 0.0)
npt.assert_almost_equal(self.outputs["base_F"][2], F0[2] - 500e3 * gravity)
npt.assert_almost_equal(self.outputs["base_M"], M0 + self.inputs["M_mb1"], decimal=0)
self.inputs["M_mb2"] = 20e3 * np.arange(1, 4).reshape((3, 1))
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2], 0.0)
npt.assert_almost_equal(self.outputs["base_F"][2], F0[2] - 500e3 * gravity)
npt.assert_almost_equal(self.outputs["base_M"], M0 + self.inputs["M_mb1"] + self.inputs["M_mb2"], decimal=-1)
self.inputs["F_mb1"] = np.array([30e2, 40e2, 50e2]).reshape((3, 1))
self.inputs["F_mb2"] = np.array([30e2, 40e2, 50e2]).reshape((3, 1))
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2], 2 * self.inputs["F_mb2"][:2])
npt.assert_almost_equal(self.outputs["base_F"][2], F0[2] - 500e3 * gravity + 2 * 50e2)
def testBaseF_BaseM_withTilt(self):
self.inputs["tilt"] = 5.0
self.inputs["F_mb1"] = np.zeros(3).reshape((3, 1))
self.inputs["F_mb2"] = np.zeros(3).reshape((3, 1))
self.inputs["M_mb1"] = np.zeros(3).reshape((3, 1))
self.inputs["M_mb2"] = np.zeros(3).reshape((3, 1))
self.compute_layout()
myobj = ds.Nose_Stator_Bedplate_Frame(modeling_options=self.opt, n_dlcs=1)
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["base_M"][0], 0.0)
npt.assert_almost_equal(self.outputs["base_M"][-1], 0.0)
F0 = self.outputs["base_F"]
M0 = self.outputs["base_M"]
self.inputs["other_mass"] += 500e3
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["base_F"][2], F0[2] - 500e3 * gravity)
npt.assert_almost_equal(self.outputs["base_M"][0], 0.0)
npt.assert_almost_equal(self.outputs["base_M"][1], M0[1])
npt.assert_almost_equal(self.outputs["base_M"][2], 0.0)
self.inputs["M_mb1"] = 10e3 * np.arange(1, 4).reshape((3, 1))
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["base_F"][2], F0[2] - 500e3 * gravity)
npt.assert_almost_equal(self.outputs["base_M"][1], M0[1] + self.inputs["M_mb1"][1], decimal=0)
self.inputs["M_mb2"] = 20e3 * np.arange(1, 4).reshape((3, 1))
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["base_F"][2], F0[2] - 500e3 * gravity)
npt.assert_almost_equal(
self.outputs["base_M"][1], M0[1] + self.inputs["M_mb1"][1] + self.inputs["M_mb2"][1], decimal=-1
)
self.inputs["F_mb1"] = np.array([30e2, 40e2, 50e2]).reshape((3, 1))
self.inputs["F_mb2"] = np.array([30e2, 40e2, 50e2]).reshape((3, 1))
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][1], 2 * self.inputs["F_mb2"][1])
def testBaseF_BaseM_Downwind(self):
self.inputs["tilt"] = 0.0
self.discrete_inputs["upwind"] = False
self.inputs["F_mb1"] = np.zeros(3).reshape((3, 1))
self.inputs["F_mb2"] = np.zeros(3).reshape((3, 1))
self.inputs["M_mb1"] = np.zeros(3).reshape((3, 1))
self.inputs["M_mb2"] = np.zeros(3).reshape((3, 1))
self.compute_layout()
myobj = ds.Nose_Stator_Bedplate_Frame(modeling_options=self.opt, n_dlcs=1)
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2], 0.0)
npt.assert_almost_equal(self.outputs["base_M"][0], 0.0)
npt.assert_almost_equal(self.outputs["base_M"][-1], 0.0)
F0 = self.outputs["base_F"]
M0 = self.outputs["base_M"]
self.inputs["other_mass"] += 500e3
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2], 0.0)
npt.assert_almost_equal(self.outputs["base_F"][2], F0[2] - 500e3 * gravity)
npt.assert_almost_equal(self.outputs["base_M"][0], 0.0)
npt.assert_almost_equal(self.outputs["base_M"][1], M0[1])
npt.assert_almost_equal(self.outputs["base_M"][2], 0.0)
self.inputs["M_mb1"] = 10e3 * np.arange(1, 4).reshape((3, 1))
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2], 0.0)
npt.assert_almost_equal(self.outputs["base_F"][2], F0[2] - 500e3 * gravity)
npt.assert_almost_equal(self.outputs["base_M"], M0 + self.inputs["M_mb1"], decimal=0)
self.inputs["M_mb2"] = 20e3 * np.arange(1, 4).reshape((3, 1))
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2], 0.0)
npt.assert_almost_equal(self.outputs["base_F"][2], F0[2] - 500e3 * gravity)
npt.assert_almost_equal(self.outputs["base_M"], M0 + self.inputs["M_mb1"] + self.inputs["M_mb2"], decimal=-1)
self.inputs["F_mb1"] = np.array([30e2, 40e2, 50e2]).reshape((3, 1))
self.inputs["F_mb2"] = np.array([30e2, 40e2, 50e2]).reshape((3, 1))
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2], 2 * self.inputs["F_mb2"][:2])
npt.assert_almost_equal(self.outputs["base_F"][2], F0[2] - 500e3 * gravity + 2 * 50e2)
def testBaseF_BaseM_withTilt_Downwind(self):
self.inputs["tilt"] = 5.0
self.discrete_inputs["upwind"] = False
self.inputs["F_mb1"] = np.zeros(3).reshape((3, 1))
self.inputs["F_mb2"] = np.zeros(3).reshape((3, 1))
self.inputs["M_mb1"] = np.zeros(3).reshape((3, 1))
self.inputs["M_mb2"] = np.zeros(3).reshape((3, 1))
self.compute_layout()
myobj = ds.Nose_Stator_Bedplate_Frame(modeling_options=self.opt, n_dlcs=1)
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["base_M"][0], 0.0)
npt.assert_almost_equal(self.outputs["base_M"][-1], 0.0)
F0 = self.outputs["base_F"]
M0 = self.outputs["base_M"]
self.inputs["other_mass"] += 500e3
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["base_F"][2], F0[2] - 500e3 * gravity)
npt.assert_almost_equal(self.outputs["base_M"][0], 0.0)
npt.assert_almost_equal(self.outputs["base_M"][1], M0[1])
npt.assert_almost_equal(self.outputs["base_M"][2], 0.0)
self.inputs["M_mb1"] = 10e3 * np.arange(1, 4).reshape((3, 1))
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["base_F"][2], F0[2] - 500e3 * gravity)
npt.assert_almost_equal(self.outputs["base_M"][1], M0[1] + self.inputs["M_mb1"][1], decimal=0)
self.inputs["M_mb2"] = 20e3 * np.arange(1, 4).reshape((3, 1))
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["base_F"][2], F0[2] - 500e3 * gravity)
npt.assert_almost_equal(
self.outputs["base_M"][1], M0[1] + self.inputs["M_mb1"][1] + self.inputs["M_mb2"][1], decimal=-1
)
self.inputs["F_mb1"] = np.array([30e2, 40e2, 50e2]).reshape((3, 1))
self.inputs["F_mb2"] = np.array([30e2, 40e2, 50e2]).reshape((3, 1))
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][1], 2 * self.inputs["F_mb2"][1])
def testBaseF_BaseM_Geared(self):
self.inputs["tilt"] = 0.0
self.inputs["F_mb1"] = np.zeros(3).reshape((3, 1))
self.inputs["F_mb2"] = np.zeros(3).reshape((3, 1))
self.inputs["F_torq"] = np.zeros(3).reshape((3, 1))
self.inputs["F_generator"] = np.zeros(3).reshape((3, 1))
self.inputs["M_mb1"] = np.zeros(3).reshape((3, 1))
self.inputs["M_mb2"] = np.zeros(3).reshape((3, 1))
self.inputs["M_torq"] = np.zeros(3).reshape((3, 1))
self.inputs["M_generator"] = np.zeros(3).reshape((3, 1))
self.compute_layout(False)
myobj = ds.Bedplate_IBeam_Frame(modeling_options=self.opt, n_dlcs=1)
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2, 0], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["base_M"][[0, 2], 0], 0.0, decimal=2)
F0 = self.outputs["base_F"][:, 0]
M0 = self.outputs["base_M"][:, 0]
self.inputs["other_mass"] += 500e3
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2, 0], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["base_F"][2, 0], F0[2] - 500e3 * gravity)
npt.assert_almost_equal(self.outputs["base_M"][[0, 2], 0], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["base_M"][1], M0[1])
self.inputs["M_mb1"] = 10e3 * np.arange(1, 4).reshape((3, 1))
self.inputs["M_mb2"] = 20e3 * np.arange(1, 4).reshape((3, 1))
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2, 0], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["base_F"][2, 0], F0[2] - 500e3 * gravity, decimal=0)
# npt.assert_almost_equal(self.outputs['base_M'], M0+self.inputs['M_mb1']+self.inputs['M_mb2'], decimal=-1)
self.inputs["F_mb1"] = self.inputs["F_mb2"] = self.inputs["F_generator"] = self.inputs["F_torq"] = np.array(
[30e2, 40e2, 50e2]
).reshape((3, 1))
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2, 0], 4 * self.inputs["F_mb1"][:2, 0], decimal=1)
npt.assert_almost_equal(self.outputs["base_F"][2, 0], F0[2] - 500e3 * gravity + 4 * 50e2, decimal=0)
def testBaseF_BaseM_withTilt_Geared(self):
self.inputs["tilt"] = 5.0
self.inputs["F_mb1"] = np.zeros(3).reshape((3, 1))
self.inputs["F_mb2"] = np.zeros(3).reshape((3, 1))
self.inputs["F_torq"] = np.zeros(3).reshape((3, 1))
self.inputs["F_generator"] = np.zeros(3).reshape((3, 1))
self.inputs["M_mb1"] = np.zeros(3).reshape((3, 1))
self.inputs["M_mb2"] = np.zeros(3).reshape((3, 1))
self.inputs["M_torq"] = np.zeros(3).reshape((3, 1))
self.inputs["M_generator"] = np.zeros(3).reshape((3, 1))
self.compute_layout(False)
myobj = ds.Bedplate_IBeam_Frame(modeling_options=self.opt, n_dlcs=1)
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2, 0], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["base_M"][[0, 2], 0], 0.0, decimal=2)
F0 = self.outputs["base_F"][:, 0]
M0 = self.outputs["base_M"][:, 0]
self.inputs["other_mass"] += 500e3
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2, 0], 0.0, decimal=1)
npt.assert_almost_equal(self.outputs["base_F"][2, 0], F0[2] - 500e3 * gravity)
npt.assert_almost_equal(self.outputs["base_M"][[0, 2], 0], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["base_M"][1], M0[1])
self.inputs["M_mb1"] = 10e3 * np.arange(1, 4).reshape((3, 1))
self.inputs["M_mb2"] = 20e3 * np.arange(1, 4).reshape((3, 1))
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][:2, 0], 0.0, decimal=1)
npt.assert_almost_equal(self.outputs["base_F"][2, 0], F0[2] - 500e3 * gravity, decimal=0)
# npt.assert_almost_equal(self.outputs['base_M'], M0+self.inputs['M_mb1']+self.inputs['M_mb2'], decimal=-1)
self.inputs["F_mb1"] = self.inputs["F_mb2"] = self.inputs["F_generator"] = self.inputs["F_torq"] = np.array(
[30e2, 40e2, 50e2]
).reshape((3, 1))
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["base_F"][1, 0], 4 * self.inputs["F_mb1"][1, 0], decimal=1)
def testRunRotatingDirect_noTilt(self):
self.inputs["tilt"] = 0.0
self.inputs["F_hub"] = np.zeros(3).reshape((3, 1))
self.inputs["M_hub"] = np.zeros(3).reshape((3, 1))
self.compute_layout()
myobj = ds.Hub_Rotor_LSS_Frame(n_dlcs=1, modeling_options=self.opt, direct_drive=True)
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
F0 = self.outputs["F_mb1"].flatten()
M0 = self.outputs["M_mb2"].flatten()
self.assertGreater(0.0, F0[-1])
# self.assertGreater(0.0, M0[1])
npt.assert_almost_equal(self.outputs["F_mb1"][:2], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["F_mb2"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["F_torq"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_mb1"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_mb2"][[0, 2]], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_torq"], 0.0, decimal=2)
self.assertAlmostEqual(
self.outputs["lss_spring_constant"], 80.8e9 * np.pi * (3.3 ** 4 - 2.4 ** 4) / 32 / self.inputs["L_lss"], 4
)
g = np.array([30e2, 40e2, 50e2])
self.inputs["F_hub"] = g.reshape((3, 1))
self.inputs["M_hub"] = 2 * g.reshape((3, 1))
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["F_mb1"].flatten(), g + F0, decimal=2)
npt.assert_almost_equal(self.outputs["F_mb2"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["F_torq"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_mb1"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_mb2"].flatten()[0], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_mb2"].flatten()[1], g[-1] * 1 + 2 * g[1] + M0[1], decimal=1) # *1=*L_h1
npt.assert_almost_equal(self.outputs["M_mb2"].flatten()[2], -g[1] * 1 + 2 * g[2], decimal=1) # *1=*L_h1
npt.assert_almost_equal(self.outputs["M_torq"].flatten(), np.r_[2 * g[0], 0.0, 0.0], decimal=2)
def testRunRotatingDirect_withTilt(self):
self.inputs["tilt"] = 5.0
self.inputs["F_hub"] = np.zeros(3).reshape((3, 1))
self.inputs["M_hub"] = np.zeros(3).reshape((3, 1))
self.compute_layout()
myobj = ds.Hub_Rotor_LSS_Frame(n_dlcs=1, modeling_options=self.opt, direct_drive=True)
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
F0 = self.outputs["F_mb1"].flatten()
M0 = self.outputs["M_mb2"].flatten()
self.assertGreater(0.0, F0[0])
self.assertGreater(0.0, F0[-1])
# self.assertGreater(0.0, M0[1])
npt.assert_almost_equal(self.outputs["F_mb1"][1], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["F_mb2"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["F_torq"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_mb1"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_mb2"][[0, 2]], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_torq"], 0.0, decimal=2)
self.assertAlmostEqual(
self.outputs["lss_spring_constant"], 80.8e9 * np.pi * (3.3 ** 4 - 2.4 ** 4) / 32 / self.inputs["L_lss"], 4
)
g = np.array([30e2, 40e2, 50e2])
self.inputs["F_hub"] = g.reshape((3, 1))
self.inputs["M_hub"] = 2 * g.reshape((3, 1))
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["F_mb1"].flatten(), g + F0, decimal=2)
npt.assert_almost_equal(self.outputs["F_mb2"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["F_torq"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_mb1"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_mb2"].flatten()[0], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_mb2"].flatten()[1], g[-1] * 1 + 2 * g[1] + M0[1], decimal=1) # *1=*L_h1
npt.assert_almost_equal(self.outputs["M_mb2"].flatten()[2], -g[1] * 1 + 2 * g[2], decimal=1) # *1=*L_h1
npt.assert_almost_equal(self.outputs["M_torq"].flatten(), np.r_[2 * g[0], 0.0, 0.0], decimal=2)
def testRunRotatingGeared_noTilt(self):
self.inputs["tilt"] = 0.0
self.inputs["gear_ratio"] = 50.0
self.inputs["F_hub"] = np.zeros(3).reshape((3, 1))
self.inputs["M_hub"] = np.zeros(3).reshape((3, 1))
self.compute_layout(False)
myobj = ds.Hub_Rotor_LSS_Frame(n_dlcs=1, modeling_options=self.opt, direct_drive=False)
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
F0 = self.outputs["F_mb1"].flatten()
M0 = self.outputs["M_mb2"].flatten()
self.assertGreater(0.0, F0[-1])
# self.assertGreater(0.0, M0[1])
npt.assert_almost_equal(self.outputs["F_mb1"][:2], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["F_mb2"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["F_torq"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_mb1"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_mb2"][[0, 2]], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_torq"], 0.0, decimal=2)
self.assertAlmostEqual(
self.outputs["lss_spring_constant"], 80.8e9 * np.pi * (3.3 ** 4 - 2.4 ** 4) / 32 / self.inputs["L_lss"], 4
)
g = np.array([30e2, 40e2, 50e2])
self.inputs["F_hub"] = g.reshape((3, 1))
self.inputs["M_hub"] = 2 * g.reshape((3, 1))
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["F_mb1"].flatten(), g + F0, decimal=2)
npt.assert_almost_equal(self.outputs["F_mb2"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["F_torq"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_mb1"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_mb2"].flatten()[0], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_mb2"].flatten()[1], g[-1] * 1 + 2 * g[1] + M0[1], decimal=2) # *1=*L_h1
npt.assert_almost_equal(self.outputs["M_mb2"].flatten()[2], -g[1] * 1 + 2 * g[2], decimal=2) # *1=*L_h1
npt.assert_almost_equal(self.outputs["M_torq"].flatten(), np.r_[2 * g[0], 0.0, 0.0], decimal=2)
def testRunRotatingGeared_withTilt(self):
self.inputs["tilt"] = 5.0
self.inputs["gear_ratio"] = 50.0
self.inputs["F_hub"] = np.zeros(3).reshape((3, 1))
self.inputs["M_hub"] = np.zeros(3).reshape((3, 1))
self.compute_layout(False)
myobj = ds.Hub_Rotor_LSS_Frame(n_dlcs=1, modeling_options=self.opt, direct_drive=False)
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
F0 = self.outputs["F_mb1"].flatten()
M0 = self.outputs["M_mb2"].flatten()
self.assertGreater(0.0, F0[0])
self.assertGreater(0.0, F0[-1])
# self.assertGreater(0.0, M0[1])
npt.assert_almost_equal(self.outputs["F_mb1"][1], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["F_mb2"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["F_torq"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_mb1"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_mb2"][[0, 2]], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_torq"], 0.0, decimal=2)
self.assertAlmostEqual(
self.outputs["lss_spring_constant"], 80.8e9 * np.pi * (3.3 ** 4 - 2.4 ** 4) / 32 / self.inputs["L_lss"], 4
)
g = np.array([30e2, 40e2, 50e2])
self.inputs["F_hub"] = g.reshape((3, 1))
self.inputs["M_hub"] = 2 * g.reshape((3, 1))
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
npt.assert_almost_equal(self.outputs["F_mb1"].flatten(), g + F0, decimal=2)
npt.assert_almost_equal(self.outputs["F_mb2"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["F_torq"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_mb1"], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_mb2"].flatten()[0], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_mb2"].flatten()[1], g[-1] * 1 + 2 * g[1] + M0[1], decimal=2) # *1=*L_h1
npt.assert_almost_equal(self.outputs["M_mb2"].flatten()[2], -g[1] * 1 + 2 * g[2], decimal=2) # *1=*L_h1
npt.assert_almost_equal(self.outputs["M_torq"].flatten(), np.r_[2 * g[0], 0.0, 0.0], decimal=2)
def testHSS_noTilt(self):
self.inputs["tilt"] = 0.0
self.inputs["gear_ratio"] = 50.0
self.inputs["F_hub"] = np.zeros(3).reshape((3, 1))
self.inputs["M_hub"] = np.zeros(3).reshape((3, 1))
self.compute_layout(False)
myobj = ds.HSS_Frame(modeling_options=self.opt, n_dlcs=1)
myobj.compute(self.inputs, self.outputs)
F0 = self.outputs["F_generator"].flatten()
M0 = self.outputs["M_generator"].flatten()
self.assertGreater(0.0, F0[-1])
self.assertGreater(0.0, M0[1])
npt.assert_almost_equal(self.outputs["F_generator"].flatten()[:2], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_generator"].flatten()[[0, 2]], 0.0, decimal=2)
g = np.array([30e2, 40e2, 50e2])
self.inputs["F_hub"] = g.reshape((3, 1))
self.inputs["M_hub"] = 2 * g.reshape((3, 1))
self.compute_layout(False)
myobj = ds.HSS_Frame(modeling_options=self.opt, n_dlcs=1)
myobj.compute(self.inputs, self.outputs)
npt.assert_almost_equal(self.outputs["F_generator"].flatten(), F0, decimal=2)
npt.assert_almost_equal(self.outputs["M_generator"].flatten(), np.r_[2 * g[0] / 50.0, M0[1], 0.0], decimal=2)
def testHSS_withTilt(self):
self.inputs["tilt"] = 5.0
self.inputs["gear_ratio"] = 50.0
self.inputs["F_hub"] = np.zeros(3).reshape((3, 1))
self.inputs["M_hub"] = np.zeros(3).reshape((3, 1))
self.compute_layout(False)
myobj = ds.HSS_Frame(modeling_options=self.opt, n_dlcs=1)
myobj.compute(self.inputs, self.outputs)
F0 = self.outputs["F_generator"].flatten()
M0 = self.outputs["M_generator"].flatten()
self.assertGreater(0.0, F0[0])
self.assertGreater(0.0, F0[-1])
self.assertGreater(0.0, M0[1])
npt.assert_almost_equal(self.outputs["F_generator"].flatten()[1], 0.0, decimal=2)
npt.assert_almost_equal(self.outputs["M_generator"].flatten()[[0, 2]], 0.0, decimal=2)
g = np.array([30e2, 40e2, 50e2])
self.inputs["F_hub"] = g.reshape((3, 1))
self.inputs["M_hub"] = 2 * g.reshape((3, 1))
self.compute_layout(False)
myobj = ds.HSS_Frame(modeling_options=self.opt, n_dlcs=1)
myobj.compute(self.inputs, self.outputs)
npt.assert_almost_equal(self.outputs["F_generator"].flatten(), F0, decimal=2)
npt.assert_almost_equal(self.outputs["M_generator"].flatten(), np.r_[2 * g[0] / 50.0, M0[1], 0.0], decimal=2)
def testShaftTheoryLSS(self):
# https://www.engineersedge.com/calculators/torsional-stress-calculator.htm
self.inputs["tilt"] = 0.0
self.inputs["F_hub"] = np.zeros(3).reshape((3, 1))
self.inputs["M_hub"] = np.array([1e5, 0.0, 0.0]).reshape((3, 1))
self.inputs["brake_mass"] = 0.0
self.inputs["brake_I"] = np.zeros(3)
self.inputs["generator_rotor_mass"] = 0.0
self.inputs["cm_rotor"] = 0.0
self.inputs["generator_rotor_I"] = np.zeros(6)
self.inputs["hub_system_mass"] = 0.0
self.inputs["hub_system_cm"] = 0.0
self.inputs["hub_system_I"] = np.zeros(6)
myones = np.ones(5)
self.inputs["lss_diameter"] = 5 * myones
self.inputs["lss_wall_thickness"] = 0.5 * myones
self.inputs["G"] = 100e9
self.inputs["lss_rho"] = 1e-6
self.compute_layout()
myobj = ds.Hub_Rotor_LSS_Frame(n_dlcs=1, modeling_options=self.opt, direct_drive=True)
myobj.compute(self.inputs, self.outputs, self.discrete_inputs, self.discrete_outputs)
J = 0.5 * np.pi * (2.5 ** 4 - 2 ** 4)
sigma = 1e5 / J * 2.5
npt.assert_almost_equal(self.outputs["lss_axial_stress"], 0.0, decimal=4)
npt.assert_almost_equal(self.outputs["lss_shear_stress"].flatten(), np.r_[np.zeros(3), sigma], decimal=4)
def testShaftTheoryHSS(self):
# https://www.engineersedge.com/calculators/torsional-stress-calculator.htm
self.inputs["tilt"] = 0.0
self.inputs["gear_ratio"] = 50.0
self.inputs["s_hss"] = np.array([0.0, 0.5, 1.0])
self.inputs["M_hub"] = np.array([1e5, 0.0, 0.0]).reshape((3, 1))
self.inputs["s_generator"] = 0.0
self.inputs["generator_mass"] = 0.0
self.inputs["generator_I"] = np.zeros(3)
self.inputs["brake_mass"] = 0.0
self.inputs["brake_I"] = np.zeros(3)
self.inputs["hub_system_mass"] = 0.0
self.inputs["hub_system_cm"] = 0.0
self.inputs["hub_system_I"] = np.zeros(6)
myones = np.ones(3)
self.inputs["hss_diameter"] = 5 * myones
self.inputs["hss_wall_thickness"] = 0.5 * myones
self.inputs["G"] = 100e9
self.inputs["hss_rho"] = 1e-6
self.compute_layout()
myobj = ds.HSS_Frame(modeling_options=self.opt, n_dlcs=1)
myobj.compute(self.inputs, self.outputs)
J = 0.5 * np.pi * (2.5 ** 4 - 2 ** 4)
sigma = 1e5 / 50.0 / J * 2.5
npt.assert_almost_equal(self.outputs["hss_axial_stress"], 0.0, decimal=4)
npt.assert_almost_equal(self.outputs["hss_bending_stress"], 0.0, decimal=4)
npt.assert_almost_equal(self.outputs["hss_shear_stress"].flatten(), sigma * np.ones(2), decimal=4)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestDirectStructure))
return suite
if __name__ == "__main__":
result = unittest.TextTestRunner().run(suite())
if result.wasSuccessful():
exit(0)
else:
exit(1)
| 55.138436
| 120
| 0.626525
| 5,221
| 33,855
| 3.879717
| 0.040031
| 0.137737
| 0.112559
| 0.150079
| 0.908422
| 0.893661
| 0.88127
| 0.87268
| 0.862214
| 0.856684
| 0
| 0.071611
| 0.191139
| 33,855
| 613
| 121
| 55.228385
| 0.668091
| 0.019968
| 0
| 0.745763
| 0
| 0
| 0.089571
| 0.003378
| 0
| 0
| 0
| 0
| 0.310734
| 1
| 0.032015
| false
| 0
| 0.011299
| 0
| 0.047081
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8118418a3d7d08844168e565fc9b9cb768b367cf
| 68,358
|
py
|
Python
|
ia870/iatext.py
|
andreperesnl/ia870
|
e6a089e17ea9def39cb9fd6901bbdf72a6ba7dfc
|
[
"BSD-3-Clause"
] | 5
|
2015-11-16T11:37:27.000Z
|
2020-07-20T22:10:31.000Z
|
ia870/iatext.py
|
Abigale-Xin/e2dhipseg
|
520366326cd20c75b5db855c9dd05cf0a8d49089
|
[
"MIT"
] | 2
|
2020-07-28T22:29:54.000Z
|
2021-07-07T20:37:25.000Z
|
ia870/iatext.py
|
Abigale-Xin/e2dhipseg
|
520366326cd20c75b5db855c9dd05cf0a8d49089
|
[
"MIT"
] | 30
|
2015-02-20T23:33:32.000Z
|
2020-10-29T05:14:07.000Z
|
# -*- encoding: utf-8 -*-
# Module iatext
from numpy import *
def iatext(txt):
from iabinary import iabinary
from iaconcat import iaconcat
FontDft = iabinary([
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 1, 0, 0,
0, 0, 0, 1, 0, 0, 1, 0, 0,
0, 0, 0, 1, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 1, 0, 0, 0,
0, 0, 1, 0, 0, 1, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 0, 0,
0, 0, 1, 0, 0, 1, 0, 0, 0,
0, 0, 1, 0, 0, 1, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 0, 0,
0, 0, 1, 0, 0, 1, 0, 0, 0,
0, 0, 1, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 1, 0, 0, 0, 0,
0, 1, 0, 0, 1, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 1, 0, 0, 1, 0,
0, 0, 0, 0, 1, 0, 0, 1, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 1, 0,
0, 1, 0, 1, 0, 0, 1, 0, 0,
0, 1, 0, 1, 0, 0, 1, 0, 0,
0, 0, 1, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 1, 0, 0,
0, 0, 1, 0, 0, 1, 0, 1, 0,
0, 0, 1, 0, 0, 1, 0, 1, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 0, 0, 0, 0,
0, 1, 0, 0, 0, 1, 0, 0, 0,
0, 1, 0, 0, 0, 1, 0, 0, 0,
0, 1, 0, 0, 0, 1, 0, 0, 0,
0, 0, 1, 1, 1, 0, 0, 0, 0,
0, 1, 0, 0, 0, 1, 0, 0, 0,
0, 1, 0, 0, 0, 1, 0, 1, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 1, 1, 0, 0,
0, 0, 1, 1, 1, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 1, 1, 0, 0,
0, 0, 0, 0, 1, 1, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0,
0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1, 1, 1, 0, 0, 0,
0, 0, 0, 1, 1, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 1, 1, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1, 1, 1, 0, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0,
0, 0, 0, 1, 1, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 1, 1, 0, 0, 0, 0,
0, 0, 1, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 1, 1, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 1, 1, 0, 0,
0, 0, 0, 0, 1, 0, 1, 0, 0,
0, 0, 0, 1, 0, 0, 1, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 1, 1, 1, 1, 0, 0,
0, 1, 1, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1, 1, 1, 1, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 1, 1, 1, 1, 0, 0,
0, 1, 1, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 1, 1, 0,
0, 0, 1, 1, 1, 1, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 1, 1, 1, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 1, 1, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 1, 1, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 1, 1, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1, 1, 1, 0, 0, 0,
0, 0, 0, 1, 1, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 1, 1, 1, 1, 0,
0, 1, 0, 1, 0, 0, 0, 1, 0,
0, 1, 0, 1, 0, 0, 1, 1, 0,
0, 1, 0, 0, 1, 1, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 0, 0,
0, 0, 1, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 0, 1, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 1, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 0, 1, 0,
0, 1, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 0, 0,
0, 0, 1, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 0, 1, 0,
0, 1, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 1, 1, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 0, 1, 1, 1, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 1, 0, 0, 0,
0, 1, 0, 0, 1, 0, 0, 0, 0,
0, 1, 1, 1, 0, 0, 0, 0, 0,
0, 1, 0, 1, 0, 0, 0, 0, 0,
0, 1, 0, 0, 1, 0, 0, 0, 0,
0, 1, 0, 0, 0, 1, 0, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 1, 0, 0, 0, 1, 1, 0,
0, 1, 0, 1, 0, 1, 0, 1, 0,
0, 1, 0, 1, 0, 1, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 1, 0, 0, 0, 0, 1, 0,
0, 1, 0, 1, 0, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 0, 1, 0, 1, 0,
0, 1, 0, 0, 0, 0, 1, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 0, 1, 0, 1, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 1, 0, 0, 0, 0,
0, 1, 0, 0, 0, 1, 0, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0,
0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 1, 0, 1, 0, 1, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0,
0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0,
0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1, 1, 1, 1, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 1, 1, 1, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
55, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 0, 0, 0, 0,
0, 0, 0, 1, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 1, 1, 1, 1, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 1, 1, 0,
0, 0, 1, 1, 1, 1, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 1, 1, 1, 1, 0, 0,
0, 1, 1, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 1, 0, 0, 0, 0, 1, 0,
0, 1, 0, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 1, 1, 1, 0, 1, 0,
0, 1, 0, 0, 0, 0, 1, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 1, 1, 0,
0, 0, 1, 1, 1, 1, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 1, 1, 0, 0,
0, 0, 0, 1, 0, 0, 0, 1, 0,
0, 0, 0, 1, 0, 0, 0, 1, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 0, 1, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 0, 1, 1, 1, 1, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 1, 1, 1, 1, 0, 0,
0, 1, 1, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 0, 1, 1, 1, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 1, 1, 0, 0,
0, 1, 0, 1, 1, 0, 0, 0, 0,
0, 1, 1, 0, 0, 0, 0, 0, 0,
0, 1, 0, 1, 1, 0, 0, 0, 0,
0, 1, 0, 0, 0, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 0, 1, 1, 0, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 1, 1, 1, 1, 0, 0,
0, 1, 1, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 1, 1, 1, 1, 0, 0,
0, 1, 1, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 1, 0, 0, 0, 0, 1, 0,
0, 1, 0, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 0, 1, 0,
0, 1, 0, 0, 0, 0, 1, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 1, 1, 0,
0, 0, 1, 1, 1, 1, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 1, 1, 1, 0, 0,
0, 0, 1, 1, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 1, 0,
0, 0, 0, 0, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 0, 1, 1, 1, 1, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0,
0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 1, 0, 1, 0, 1, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0,
0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 1, 1, 0, 0,
0, 0, 1, 1, 1, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0, 0,
0, 0, 1, 1, 1, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 1, 1, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 1, 1, 0, 0, 0, 0,
0, 0, 0, 1, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 1, 0, 0, 0,
0, 0, 0, 0, 1, 1, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 1, 1, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0])
FIRST_CHAR = ord(' ')
LAST_CHAR = 126
N_CHARS = LAST_CHAR - FIRST_CHAR
WIDTH_DFT = 9
HEIGHT_DFT = 15
FontDft = reshape(FontDft,(HEIGHT_DFT * N_CHARS, WIDTH_DFT))
y = ()
for c in txt:
i = ord(c) - FIRST_CHAR
assert i < (LAST_CHAR-FIRST_CHAR),'iatext, code not allowed (%s)' % c
if len(y) == 0:
y = FontDft[i*HEIGHT_DFT:(i+1)*HEIGHT_DFT,:]
else:
y = iaconcat('w',y,FontDft[i*HEIGHT_DFT:(i+1)*HEIGHT_DFT,:])
return y
| 47.503822
| 75
| 0.191521
| 12,788
| 68,358
| 1.022443
| 0.003441
| 1.495985
| 1.988375
| 2.381033
| 0.974914
| 0.974914
| 0.974914
| 0.974914
| 0.974914
| 0.96979
| 0
| 0.490917
| 0.621522
| 68,358
| 1,438
| 76
| 47.536857
| 0.014456
| 0.000541
| 0
| 0.972028
| 0
| 0
| 0.000454
| 0
| 0
| 0
| 0
| 0
| 0.000699
| 1
| 0.000699
| false
| 0
| 0.002098
| 0
| 0.003497
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
811d0f251aae9651029065dcc70aa6fe78537515
| 163
|
py
|
Python
|
social/apps/webpy_app/__init__.py
|
SeanHayes/python-social-auth
|
4d70b23eb603c1d9753a7982bd7b3bab7cf18d48
|
[
"BSD-3-Clause"
] | 7
|
2017-11-17T01:21:09.000Z
|
2021-03-25T09:55:50.000Z
|
social/apps/webpy_app/__init__.py
|
techdragon/python-social-auth
|
e419e695b39a2948142faee3986d5cabe66e2cee
|
[
"BSD-3-Clause"
] | 19
|
2019-09-25T19:03:41.000Z
|
2021-06-25T15:26:41.000Z
|
social/apps/webpy_app/__init__.py
|
techdragon/python-social-auth
|
e419e695b39a2948142faee3986d5cabe66e2cee
|
[
"BSD-3-Clause"
] | 3
|
2016-04-21T09:50:03.000Z
|
2019-02-05T12:53:15.000Z
|
from social.strategies.utils import set_current_strategy_getter
from social.apps.webpy_app.utils import load_strategy
set_current_strategy_getter(load_strategy)
| 27.166667
| 63
| 0.889571
| 24
| 163
| 5.666667
| 0.541667
| 0.147059
| 0.264706
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067485
| 163
| 5
| 64
| 32.6
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
811d42507377925e686b688fc74392afe568c5b4
| 66,932
|
py
|
Python
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 64
|
2016-10-20T15:47:18.000Z
|
2021-11-11T11:57:32.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 126
|
2016-10-05T10:36:14.000Z
|
2019-05-15T08:43:23.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 63
|
2016-11-07T15:23:08.000Z
|
2021-09-22T14:41:16.000Z
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import config
from . import state
from . import static_routes
from . import local_aggregates
from . import bgp
from . import ospfv2
from . import isis
class protocol(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: A process (instance) of a routing protocol. Some
systems may not support more than one instance of
a particular routing protocol
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__identifier",
"__name",
"__config",
"__state",
"__static_routes",
"__local_aggregates",
"__bgp",
"__ospfv2",
"__isis",
)
_yang_name = "protocol"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__identifier = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="identifier",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
self.__name = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="name",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__static_routes = YANGDynClass(
base=static_routes.static_routes,
is_container="container",
yang_name="static-routes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__local_aggregates = YANGDynClass(
base=local_aggregates.local_aggregates,
is_container="container",
yang_name="local-aggregates",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__bgp = YANGDynClass(
base=bgp.bgp,
is_container="container",
yang_name="bgp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__ospfv2 = YANGDynClass(
base=ospfv2.ospfv2,
is_container="container",
yang_name="ospfv2",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__isis = YANGDynClass(
base=isis.isis,
is_container="container",
yang_name="isis",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return ["network-instances", "network-instance", "protocols", "protocol"]
def _get_identifier(self):
"""
Getter method for identifier, mapped from YANG variable /network_instances/network_instance/protocols/protocol/identifier (leafref)
YANG Description: The protocol name for the routing or forwarding
protocol to be instantiated
"""
return self.__identifier
def _set_identifier(self, v, load=False):
"""
Setter method for identifier, mapped from YANG variable /network_instances/network_instance/protocols/protocol/identifier (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_identifier is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_identifier() directly.
YANG Description: The protocol name for the routing or forwarding
protocol to be instantiated
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError(
"Cannot set keys directly when" + " within an instantiated list"
)
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="identifier",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """identifier must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="identifier", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""",
}
)
self.__identifier = t
if hasattr(self, "_set"):
self._set()
def _unset_identifier(self):
self.__identifier = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="identifier",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
def _get_name(self):
"""
Getter method for name, mapped from YANG variable /network_instances/network_instance/protocols/protocol/name (leafref)
YANG Description: An operator-assigned identifier for the routing
or forwarding protocol. For some processes this
leaf may be system defined.
"""
return self.__name
def _set_name(self, v, load=False):
"""
Setter method for name, mapped from YANG variable /network_instances/network_instance/protocols/protocol/name (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_name() directly.
YANG Description: An operator-assigned identifier for the routing
or forwarding protocol. For some processes this
leaf may be system defined.
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError(
"Cannot set keys directly when" + " within an instantiated list"
)
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="name",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """name must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""",
}
)
self.__name = t
if hasattr(self, "_set"):
self._set()
def _unset_name(self):
self.__name = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="name",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /network_instances/network_instance/protocols/protocol/config (container)
YANG Description: Configuration parameters relating to the routing
protocol instance
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /network_instances/network_instance/protocols/protocol/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to the routing
protocol instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """config must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__config = t
if hasattr(self, "_set"):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/state (container)
YANG Description: State parameters relating to the routing protocol
instance
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters relating to the routing protocol
instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_static_routes(self):
"""
Getter method for static_routes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/static_routes (container)
YANG Description: Enclosing container for the list of static routes
"""
return self.__static_routes
def _set_static_routes(self, v, load=False):
"""
Setter method for static_routes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/static_routes (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_static_routes is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_static_routes() directly.
YANG Description: Enclosing container for the list of static routes
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=static_routes.static_routes,
is_container="container",
yang_name="static-routes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """static_routes must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=static_routes.static_routes, is_container='container', yang_name="static-routes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__static_routes = t
if hasattr(self, "_set"):
self._set()
def _unset_static_routes(self):
self.__static_routes = YANGDynClass(
base=static_routes.static_routes,
is_container="container",
yang_name="static-routes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_local_aggregates(self):
"""
Getter method for local_aggregates, mapped from YANG variable /network_instances/network_instance/protocols/protocol/local_aggregates (container)
YANG Description: Enclosing container for locally-defined aggregate
routes
"""
return self.__local_aggregates
def _set_local_aggregates(self, v, load=False):
"""
Setter method for local_aggregates, mapped from YANG variable /network_instances/network_instance/protocols/protocol/local_aggregates (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_local_aggregates is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_local_aggregates() directly.
YANG Description: Enclosing container for locally-defined aggregate
routes
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=local_aggregates.local_aggregates,
is_container="container",
yang_name="local-aggregates",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """local_aggregates must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=local_aggregates.local_aggregates, is_container='container', yang_name="local-aggregates", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__local_aggregates = t
if hasattr(self, "_set"):
self._set()
def _unset_local_aggregates(self):
self.__local_aggregates = YANGDynClass(
base=local_aggregates.local_aggregates,
is_container="container",
yang_name="local-aggregates",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_bgp(self):
"""
Getter method for bgp, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp (container)
YANG Description: Top-level configuration and state for the BGP router
"""
return self.__bgp
def _set_bgp(self, v, load=False):
"""
Setter method for bgp, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_bgp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bgp() directly.
YANG Description: Top-level configuration and state for the BGP router
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=bgp.bgp,
is_container="container",
yang_name="bgp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """bgp must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=bgp.bgp, is_container='container', yang_name="bgp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__bgp = t
if hasattr(self, "_set"):
self._set()
def _unset_bgp(self):
self.__bgp = YANGDynClass(
base=bgp.bgp,
is_container="container",
yang_name="bgp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_ospfv2(self):
"""
Getter method for ospfv2, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2 (container)
YANG Description: Top-level configuration and operational state for
Open Shortest Path First (OSPF) v2
"""
return self.__ospfv2
def _set_ospfv2(self, v, load=False):
"""
Setter method for ospfv2, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2 (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_ospfv2 is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ospfv2() directly.
YANG Description: Top-level configuration and operational state for
Open Shortest Path First (OSPF) v2
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=ospfv2.ospfv2,
is_container="container",
yang_name="ospfv2",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """ospfv2 must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=ospfv2.ospfv2, is_container='container', yang_name="ospfv2", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__ospfv2 = t
if hasattr(self, "_set"):
self._set()
def _unset_ospfv2(self):
self.__ospfv2 = YANGDynClass(
base=ospfv2.ospfv2,
is_container="container",
yang_name="ospfv2",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_isis(self):
"""
Getter method for isis, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis (container)
YANG Description: This container defines top-level ISIS configuration and state
information.
"""
return self.__isis
def _set_isis(self, v, load=False):
"""
Setter method for isis, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_isis is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_isis() directly.
YANG Description: This container defines top-level ISIS configuration and state
information.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=isis.isis,
is_container="container",
yang_name="isis",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """isis must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=isis.isis, is_container='container', yang_name="isis", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__isis = t
if hasattr(self, "_set"):
self._set()
def _unset_isis(self):
self.__isis = YANGDynClass(
base=isis.isis,
is_container="container",
yang_name="isis",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
identifier = __builtin__.property(_get_identifier, _set_identifier)
name = __builtin__.property(_get_name, _set_name)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
static_routes = __builtin__.property(_get_static_routes, _set_static_routes)
local_aggregates = __builtin__.property(
_get_local_aggregates, _set_local_aggregates
)
bgp = __builtin__.property(_get_bgp, _set_bgp)
ospfv2 = __builtin__.property(_get_ospfv2, _set_ospfv2)
isis = __builtin__.property(_get_isis, _set_isis)
_pyangbind_elements = OrderedDict(
[
("identifier", identifier),
("name", name),
("config", config),
("state", state),
("static_routes", static_routes),
("local_aggregates", local_aggregates),
("bgp", bgp),
("ospfv2", ospfv2),
("isis", isis),
]
)
from . import config
from . import state
from . import static_routes
from . import local_aggregates
from . import bgp
from . import ospfv2
from . import isis
class protocol(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: A process (instance) of a routing protocol. Some
systems may not support more than one instance of
a particular routing protocol
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__identifier",
"__name",
"__config",
"__state",
"__static_routes",
"__local_aggregates",
"__bgp",
"__ospfv2",
"__isis",
)
_yang_name = "protocol"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__identifier = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="identifier",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
self.__name = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="name",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__static_routes = YANGDynClass(
base=static_routes.static_routes,
is_container="container",
yang_name="static-routes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__local_aggregates = YANGDynClass(
base=local_aggregates.local_aggregates,
is_container="container",
yang_name="local-aggregates",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__bgp = YANGDynClass(
base=bgp.bgp,
is_container="container",
yang_name="bgp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__ospfv2 = YANGDynClass(
base=ospfv2.ospfv2,
is_container="container",
yang_name="ospfv2",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__isis = YANGDynClass(
base=isis.isis,
is_container="container",
yang_name="isis",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return ["network-instances", "network-instance", "protocols", "protocol"]
def _get_identifier(self):
"""
Getter method for identifier, mapped from YANG variable /network_instances/network_instance/protocols/protocol/identifier (leafref)
YANG Description: The protocol name for the routing or forwarding
protocol to be instantiated
"""
return self.__identifier
def _set_identifier(self, v, load=False):
"""
Setter method for identifier, mapped from YANG variable /network_instances/network_instance/protocols/protocol/identifier (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_identifier is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_identifier() directly.
YANG Description: The protocol name for the routing or forwarding
protocol to be instantiated
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError(
"Cannot set keys directly when" + " within an instantiated list"
)
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="identifier",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """identifier must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="identifier", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""",
}
)
self.__identifier = t
if hasattr(self, "_set"):
self._set()
def _unset_identifier(self):
self.__identifier = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="identifier",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
def _get_name(self):
"""
Getter method for name, mapped from YANG variable /network_instances/network_instance/protocols/protocol/name (leafref)
YANG Description: An operator-assigned identifier for the routing
or forwarding protocol. For some processes this
leaf may be system defined.
"""
return self.__name
def _set_name(self, v, load=False):
"""
Setter method for name, mapped from YANG variable /network_instances/network_instance/protocols/protocol/name (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_name() directly.
YANG Description: An operator-assigned identifier for the routing
or forwarding protocol. For some processes this
leaf may be system defined.
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError(
"Cannot set keys directly when" + " within an instantiated list"
)
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="name",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """name must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""",
}
)
self.__name = t
if hasattr(self, "_set"):
self._set()
def _unset_name(self):
self.__name = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="name",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /network_instances/network_instance/protocols/protocol/config (container)
YANG Description: Configuration parameters relating to the routing
protocol instance
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /network_instances/network_instance/protocols/protocol/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to the routing
protocol instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """config must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__config = t
if hasattr(self, "_set"):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/state (container)
YANG Description: State parameters relating to the routing protocol
instance
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters relating to the routing protocol
instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_static_routes(self):
"""
Getter method for static_routes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/static_routes (container)
YANG Description: Enclosing container for the list of static routes
"""
return self.__static_routes
def _set_static_routes(self, v, load=False):
"""
Setter method for static_routes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/static_routes (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_static_routes is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_static_routes() directly.
YANG Description: Enclosing container for the list of static routes
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=static_routes.static_routes,
is_container="container",
yang_name="static-routes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """static_routes must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=static_routes.static_routes, is_container='container', yang_name="static-routes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__static_routes = t
if hasattr(self, "_set"):
self._set()
def _unset_static_routes(self):
self.__static_routes = YANGDynClass(
base=static_routes.static_routes,
is_container="container",
yang_name="static-routes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_local_aggregates(self):
"""
Getter method for local_aggregates, mapped from YANG variable /network_instances/network_instance/protocols/protocol/local_aggregates (container)
YANG Description: Enclosing container for locally-defined aggregate
routes
"""
return self.__local_aggregates
def _set_local_aggregates(self, v, load=False):
"""
Setter method for local_aggregates, mapped from YANG variable /network_instances/network_instance/protocols/protocol/local_aggregates (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_local_aggregates is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_local_aggregates() directly.
YANG Description: Enclosing container for locally-defined aggregate
routes
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=local_aggregates.local_aggregates,
is_container="container",
yang_name="local-aggregates",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """local_aggregates must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=local_aggregates.local_aggregates, is_container='container', yang_name="local-aggregates", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__local_aggregates = t
if hasattr(self, "_set"):
self._set()
def _unset_local_aggregates(self):
self.__local_aggregates = YANGDynClass(
base=local_aggregates.local_aggregates,
is_container="container",
yang_name="local-aggregates",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_bgp(self):
"""
Getter method for bgp, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp (container)
YANG Description: Top-level configuration and state for the BGP router
"""
return self.__bgp
def _set_bgp(self, v, load=False):
"""
Setter method for bgp, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_bgp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bgp() directly.
YANG Description: Top-level configuration and state for the BGP router
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=bgp.bgp,
is_container="container",
yang_name="bgp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """bgp must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=bgp.bgp, is_container='container', yang_name="bgp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__bgp = t
if hasattr(self, "_set"):
self._set()
def _unset_bgp(self):
self.__bgp = YANGDynClass(
base=bgp.bgp,
is_container="container",
yang_name="bgp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_ospfv2(self):
"""
Getter method for ospfv2, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2 (container)
YANG Description: Top-level configuration and operational state for
Open Shortest Path First (OSPF) v2
"""
return self.__ospfv2
def _set_ospfv2(self, v, load=False):
"""
Setter method for ospfv2, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2 (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_ospfv2 is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ospfv2() directly.
YANG Description: Top-level configuration and operational state for
Open Shortest Path First (OSPF) v2
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=ospfv2.ospfv2,
is_container="container",
yang_name="ospfv2",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """ospfv2 must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=ospfv2.ospfv2, is_container='container', yang_name="ospfv2", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__ospfv2 = t
if hasattr(self, "_set"):
self._set()
def _unset_ospfv2(self):
self.__ospfv2 = YANGDynClass(
base=ospfv2.ospfv2,
is_container="container",
yang_name="ospfv2",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_isis(self):
"""
Getter method for isis, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis (container)
YANG Description: This container defines top-level ISIS configuration and state
information.
"""
return self.__isis
def _set_isis(self, v, load=False):
"""
Setter method for isis, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_isis is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_isis() directly.
YANG Description: This container defines top-level ISIS configuration and state
information.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=isis.isis,
is_container="container",
yang_name="isis",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """isis must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=isis.isis, is_container='container', yang_name="isis", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__isis = t
if hasattr(self, "_set"):
self._set()
def _unset_isis(self):
self.__isis = YANGDynClass(
base=isis.isis,
is_container="container",
yang_name="isis",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
identifier = __builtin__.property(_get_identifier, _set_identifier)
name = __builtin__.property(_get_name, _set_name)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
static_routes = __builtin__.property(_get_static_routes, _set_static_routes)
local_aggregates = __builtin__.property(
_get_local_aggregates, _set_local_aggregates
)
bgp = __builtin__.property(_get_bgp, _set_bgp)
ospfv2 = __builtin__.property(_get_ospfv2, _set_ospfv2)
isis = __builtin__.property(_get_isis, _set_isis)
_pyangbind_elements = OrderedDict(
[
("identifier", identifier),
("name", name),
("config", config),
("state", state),
("static_routes", static_routes),
("local_aggregates", local_aggregates),
("bgp", bgp),
("ospfv2", ospfv2),
("isis", isis),
]
)
| 39.464623
| 407
| 0.602163
| 6,922
| 66,932
| 5.591159
| 0.031349
| 0.072089
| 0.052814
| 0.059273
| 0.990311
| 0.984704
| 0.984704
| 0.984704
| 0.984704
| 0.984704
| 0
| 0.00189
| 0.304189
| 66,932
| 1,695
| 408
| 39.487906
| 0.829125
| 0.201951
| 0
| 0.891135
| 0
| 0.013997
| 0.262492
| 0.089861
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045101
| false
| 0
| 0.022551
| 0
| 0.106532
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d4c04421c90c5fa9b1bb7a652ac89ee33da75c24
| 5,959
|
py
|
Python
|
tests/grab_upload_file.py
|
alexey-v-paramonov/grab
|
a3bfda3ae16b615eeaa0323c2ba72a9d901cba7b
|
[
"MIT"
] | null | null | null |
tests/grab_upload_file.py
|
alexey-v-paramonov/grab
|
a3bfda3ae16b615eeaa0323c2ba72a9d901cba7b
|
[
"MIT"
] | null | null | null |
tests/grab_upload_file.py
|
alexey-v-paramonov/grab
|
a3bfda3ae16b615eeaa0323c2ba72a9d901cba7b
|
[
"MIT"
] | 1
|
2022-01-12T14:51:27.000Z
|
2022-01-12T14:51:27.000Z
|
# coding: utf-8
import os
from grab import UploadContent, UploadFile
from tests.util import build_grab, temp_file, BaseGrabTestCase
class TestUploadContent(BaseGrabTestCase):
def setUp(self):
self.server.reset()
def prepare_form_grab(self):
url = self.server.get_url()
html = (
'<form action="%s" method="post" enctype="multipart/form-data">'
'<input type="file" name="image">'
'</form>' % url
).encode('ascii')
grab = build_grab(html, charset='utf-8')
return grab
# *******************
# UploadContent Tests
# *******************
def test_upload_content_filename(self):
grab = self.prepare_form_grab()
data = b'foo'
upload_data = UploadContent(data, filename='avatar.jpg')
grab.doc.set_input('image', upload_data)
grab.doc.submit(make_request=False)
post = dict(grab.config['multipart_post'])
self.assertTrue(isinstance(post['image'], UploadContent))
grab.doc.submit()
self.assertEqual(data,
self.server.request['files']['image'][0]['body'])
self.assertEqual('avatar.jpg',
self.server.request['files']['image'][0]['filename'])
self.assertEqual(
'image/jpeg',
self.server.request['files']['image'][0]['content_type'])
def test_upload_content_random_filename(self):
grab = self.prepare_form_grab()
data = b'foo'
upload_data = UploadContent(data)
grab.doc.set_input('image', upload_data)
grab.doc.submit(make_request=False)
post = dict(grab.config['multipart_post'])
self.assertTrue(isinstance(post['image'], UploadContent))
grab.doc.submit()
self.assertEqual(data,
self.server.request['files']['image'][0]['body'])
self.assertEqual(
10, len(self.server.request['files']['image'][0]['filename']))
self.assertEqual(
'application/octet-stream',
self.server.request['files']['image'][0]['content_type'])
def test_upload_content_content_type(self):
grab = self.prepare_form_grab()
data = b'foo'
upload_data = UploadContent(data,
content_type='application/grab')
grab.doc.set_input('image', upload_data)
grab.doc.submit(make_request=False)
post = dict(grab.config['multipart_post'])
self.assertTrue(isinstance(post['image'], UploadContent))
grab.doc.submit()
self.assertEqual(data,
self.server.request['files']['image'][0]['body'])
self.assertEqual(
10, len(self.server.request['files']['image'][0]['filename']))
self.assertEqual(
'application/grab',
self.server.request['files']['image'][0]['content_type'])
# ****************
# UploadFile Tests
# ****************
def test_upload_file(self):
with temp_file() as file_path:
grab = self.prepare_form_grab()
data = b'foo'
with open(file_path, 'wb') as out:
out.write(data)
upload_data = UploadFile(file_path)
grab.doc.set_input('image', upload_data)
grab.doc.submit(make_request=False)
post = dict(grab.config['multipart_post'])
self.assertTrue(isinstance(post['image'], UploadFile))
grab.doc.submit()
self.assertEqual(data,
self.server.request['files']['image'][0]['body'])
_, filename = os.path.split(file_path)
self.assertEqual(
filename,
self.server.request['files']['image'][0]['filename'],
)
self.assertEqual(
'application/octet-stream',
self.server.request['files']['image'][0]['content_type'])
def test_upload_file_custom_filename(self):
with temp_file() as file_path:
grab = self.prepare_form_grab()
data = b'foo'
with open(file_path, 'wb') as out:
out.write(data)
upload_data = UploadFile(file_path, filename='avatar.jpg')
grab.doc.set_input('image', upload_data)
grab.doc.submit(make_request=False)
post = dict(grab.config['multipart_post'])
self.assertTrue(isinstance(post['image'], UploadFile))
grab.doc.submit()
self.assertEqual(data,
self.server.request['files']['image'][0]['body'])
self.assertEqual(
'avatar.jpg',
self.server.request['files']['image'][0]['filename'],
)
self.assertEqual(
'image/jpeg',
self.server.request['files']['image'][0]['content_type'])
def test_upload_file_custom_content_type(self):
with temp_file() as file_path:
grab = self.prepare_form_grab()
data = b'foo'
with open(file_path, 'wb') as out:
out.write(data)
upload_data = UploadFile(file_path, filename='avatar.jpg',
content_type='application/grab')
grab.doc.set_input('image', upload_data)
grab.doc.submit(make_request=False)
post = dict(grab.config['multipart_post'])
self.assertTrue(isinstance(post['image'], UploadFile))
grab.doc.submit()
self.assertEqual(data,
self.server.request['files']['image'][0]['body'])
self.assertEqual(
'avatar.jpg',
self.server.request['files']['image'][0]['filename'],
)
self.assertEqual(
'application/grab',
self.server.request['files']['image'][0]['content_type'])
| 38.445161
| 78
| 0.549421
| 623
| 5,959
| 5.107544
| 0.128411
| 0.062854
| 0.096166
| 0.12445
| 0.844123
| 0.844123
| 0.844123
| 0.844123
| 0.844123
| 0.844123
| 0
| 0.005758
| 0.300554
| 5,959
| 154
| 79
| 38.694805
| 0.757678
| 0.020809
| 0
| 0.736434
| 0
| 0
| 0.136434
| 0.013386
| 0
| 0
| 0
| 0
| 0.186047
| 1
| 0.062016
| false
| 0
| 0.023256
| 0
| 0.100775
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d4c90979921d1d17b5ab157785be856086b108a1
| 234,914
|
py
|
Python
|
codegen/ops_testgen.py
|
m1griffin/arrayfunc
|
df57097699c25d3e949e1ade307ed61eaa5728c2
|
[
"Apache-2.0"
] | 2
|
2017-08-28T08:41:16.000Z
|
2018-05-29T03:49:36.000Z
|
codegen/ops_testgen.py
|
m1griffin/arrayfunc
|
df57097699c25d3e949e1ade307ed61eaa5728c2
|
[
"Apache-2.0"
] | null | null | null |
codegen/ops_testgen.py
|
m1griffin/arrayfunc
|
df57097699c25d3e949e1ade307ed61eaa5728c2
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
##############################################################################
# Project: arrayfunc
# Purpose: Generate the unit tests for math operators.
# Language: Python 3.5
# Date: 03-Feb-2018
#
###############################################################################
#
# Copyright 2014 - 2018 Michael Griffin <m12.griffin@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
# ==============================================================================
import itertools
import codegen_common
# ==============================================================================
# Data generators. These are used to create test data algorithmically. These will
# vary depending on the operation being performed.
# truediv =============================================================
datafilters = { 'truediv' : '''
########################################################
def inttruediv(x, y):
"""Perform the math operation. This needs to be specially handled
for truediv on large signed integer arrays. This is because of a
combination of factors. Python will produce a floating point result,
but we we want an integer result when using integer arrays. If we
simply convert the result back to integer then we lose precision on
large integers, introducing errors. If we try to emulate it using
floor division, then when using mixed positive and negative inputs
the result is rounded away from zero, producing an incorrect result.
So, we need to take the absolute value, then do floor division, then
put the correct sign back into the result.
"""
# This is intended to catch template errors and should never
# occur in normal usage.
# For true division on integer arrays.
# For when signs are opposite in signed arrays.
if ((x < 0) ^ (y < 0)):
return -(abs(x) // abs(y))
else:
return x // y
########################################################
def filtertestdata(opvalues, minint, maxint, typecode):
"""Filter the test data for combinations that might cause errors.
This version is for truediv.
"""
# Truediv needs special handling for integer because the C function does
# not do actual truediv for integer.
checkedvalues = [(x,y) for x,y in opvalues if ((y != 0) and (inttruediv(x, y) <= maxint) and (inttruediv(x, y) >= minint))]
return checkedvalues
''',
# add =============================================================
'add' : '''
########################################################
def filtertestdata(opvalues, minint, maxint, typecode):
"""Filter the test data for combinations that might cause errors.
This version is for add.
"""
checkedvalues = [(x,y) for x,y in opvalues if ((x + y) <= maxint) and ((x + y) >= minint)]
return checkedvalues
''',
# floordiv =============================================================
'floordiv' : '''
########################################################
def filtertestdata(opvalues, minint, maxint, typecode):
"""Filter the test data for combinations that might cause errors.
This version is for floordiv.
"""
# Avoid division by zero
checkedvalues = [(x,y) for x,y in opvalues if ((y != 0) and ((x // y) <= maxint) and ((x // y) >= minint))]
return checkedvalues
''',
# mod =============================================================
'mod' : '''
########################################################
def filtertestdata(opvalues, minint, maxint, typecode):
"""Filter the test data for combinations that might cause errors.
This version is for mod.
"""
# Avoid division by zero
checkedvalues = [(x,y) for x,y in opvalues if ((y != 0) and ((x % y) <= maxint) and ((x % y) >= minint))]
return checkedvalues
''',
# mul =============================================================
'mul' : '''
########################################################
def filtertestdata(opvalues, minint, maxint, typecode):
"""Filter the test data for combinations that might cause errors.
This version is for mul.
"""
checkedvalues = [(x,y) for x,y in opvalues if ((x * y) <= maxint) and ((x * y) >= minint)]
return checkedvalues
''',
# pow =============================================================
'pow' : '''
########################################################
def filtertestdata(opvalues, minint, maxint, typecode):
"""Filter the test data for combinations that might cause errors.
This version is for pow.
"""
if typecode in ('f', 'd'):
checkedvalues = [(x, y) for x,y in opvalues if ((not((x == 0) and (y < 0))) and (minint <= (x**y) <= maxint))]
else:
checkedvalues = [(x, y) for x,y in opvalues if (y >= 0) and (minint <= (x**y) <= maxint)]
return checkedvalues
''',
# sub =============================================================
'sub' : '''
########################################################
def filtertestdata(opvalues, minint, maxint, typecode):
"""Filter the test data for combinations that might cause errors.
This version is for sub.
"""
checkedvalues = [(x,y) for x,y in opvalues if ((x - y) <= maxint) and ((x - y) >= minint)]
return checkedvalues
''',
}
# ===
# Used for everything except pow.
gendata_general = '''
########################################################
def gendata_special(minint, maxint, typecode):
""" Generate data for special cases which might cause problems.
For integers these will be minimum and maximum values, as well as around
the zero point.
"""
# Make sure that we have coverage for data around the maximum, minimum, and zero
# points, which we might otherwise not have with larger data sizes.
halfpoint = (maxint + minint) // 2
specialvals = [minint, minint + 1, minint + 2, minint + 3,
maxint - 3, maxint - 2, maxint - 1, maxint,
halfpoint - 3, halfpoint - 2, halfpoint - 1, halfpoint,
halfpoint + 1, halfpoint + 2, halfpoint + 3, halfpoint + 4]
# Create combinations of all of these values.
opvalues = list(itertools.product(specialvals, specialvals))
# Filter out values which might cause errors.
checkedvalues = filtertestdata(opvalues, minint, maxint, typecode)
checkedvalues.sort()
return checkedvalues
########################################################
def gendata_int(minint, maxint, typecode):
"""Generate data for general testing. This does not worry about edge case
data. Edge cases must be created and tested separately. This function
generates a wide selection of data over the numeric range.
"""
# This will generate a selection of data spread over most of the integer
# while giving the same amount of data for each data type.
intrange = maxint - minint
stepcount = intrange // 256
stepcount = max(stepcount, 1)
spreaddata = list(range(minint, maxint + 1, stepcount))
# Make sure we have a good selection of smaller values as well.
if (maxint > 256):
if minint < 0:
mindata = -128
maxdata = 127
else:
mindata = 0
maxdata = 255
spreaddata.extend(range(mindata, maxdata, 3))
# Remove duplicates.
spreaddata = list(set(spreaddata))
# Sort the data out in order.
spreaddata.sort()
# Trim down the size of the sample.
selectedspread = spreaddata[::3]
# Create combinations of all of these values.
opvalues = list(itertools.product(selectedspread, selectedspread))
# Filter out values which might cause errors.
checkedvalues = filtertestdata(opvalues, minint, maxint, typecode)
# Sort the data out in order.
checkedvalues.sort()
# Now pick a smaller and more reasonable size selection over the full range.
skipsize = len(checkedvalues) // 256
skipsize = max(skipsize, 1)
selectedvals = checkedvalues[::skipsize]
return selectedvals
'''
# Used only for pow.
gendata_pow = '''
########################################################
def gendata_specialpow(minint, maxint, typecode):
""" Generate data for special cases which might cause problems.
This one handles the data for pow only.
For integers these will be minimum and maximum values, as well as around
the zero point.
"""
halfpoint = (maxint + minint) // 2
basevals = [minint, minint + 1, minint + 2, minint + 3,
maxint - 3, maxint - 2, maxint - 1, maxint,
halfpoint - 3, halfpoint - 2, halfpoint - 1, halfpoint,
halfpoint + 1, halfpoint + 2, halfpoint + 3, halfpoint + 4]
# Raise to the power of 0 or 1.
zerovals = [(x,0) for x in basevals]
onevals = [(x,1) for x in basevals]
# Raise 1 or zero to a power. Make sure we don't have negative powers.
zerovals2 = [(0,x) for x in basevals if x >= 0]
onevals2 = [(1,x) for x in basevals if x >= 0]
# Raise some simple values to some common powers.
if minint < 0:
minstart = -3
else:
minstart = 0
simplerange = list(range(minstart, 4))
simplevals = list(itertools.product(simplerange, [0, 1, 2, 3, 4]))
# These pairs were found to cause problems with some edge cases.
# They all produce maximum negative integer for certain array types.
# They represent tests for a variety of array types and have to be
# filtered for each array code.
limitpairs = [(-2, 7), (-2, 15), (-8, 5), (-32, 3), (-2, 31),
(-2, 63), (-8, 21), (-127, 9), (-512, 7), (-2097152, 3)]
# Combine them all together.
allvals = zerovals + onevals + zerovals2 + onevals2 + simplevals + limitpairs
# Now filter them.
checkedvalues = filtertestdata(allvals, minint, maxint, typecode)
return checkedvalues
########################################################
def gendata_pow(minint, maxint, typecode):
"""Generate data for general testing. This is specifically for pow as
that operation has special requirements.
"""
# We need two values for lval ** rval. The left hand one can be no bigger
# than the square root of the maximum value in order to fit within the
# data range (lval ** 2).
lval = int(math.sqrt(maxint))
stepcount = lval // 256
stepcount = max(stepcount, 1)
if minint < 0:
lvalstart = -lval
else:
lvalstart = 0
lvalspread = list(range(lvalstart, lval, stepcount))
# Make sure we have a good selection of smaller values as well.
if (maxint > 32768):
if minint < 0:
mindata = -128
maxdata = 127
else:
mindata = 0
maxdata = 255
lvalspread.extend(range(mindata, maxdata, 3))
# Remove duplicates.
lvalspread = list(set(lvalspread))
lvalspread.sort()
# Take a few values which we will add back in later.
lvalcentre = len(lvalspread) // 2
extralvals = lvalspread[2:4] + lvalspread[-4:-2] + lvalspread[lvalcentre : lvalcentre + 2]
# The right hand one (power to raise by) can be no bigger than 'x' where
# 2 ** x. and the result is the maximum integer value.
raisevals = {127 : 7, 255 : 8, 32767 : 15, 65535 : 16,
2147483647 : 31, 4294967295 : 32,
9223372036854775807 : 63, 18446744073709551615 : 64}
rval = raisevals[maxint]
# We start the range at 2 because 0 and 1 are trivial and we don't want
# too many of them in the data mix.
rvalspread = list(range(2, rval))
# Create the combinations
opvalues = list(itertools.product(lvalspread, rvalspread))
# Filter out the values which would go out of range.
checkedvalues = filtertestdata(opvalues, minint, maxint, typecode)
# Sort the data out in order.
checkedvalues.sort()
# Now pick a smaller and more reasonable size selection over the full range.
skipsize = len(checkedvalues) // 256
skipsize = max(skipsize, 1)
selectedvals = checkedvalues[::skipsize]
# Create the additional values involving the trivial cases of raise
# to the power of 0 or 1.
additionalvals = list(itertools.product(extralvals, [0, 1]))
selectedvals.extend(additionalvals)
selectedvals.sort()
return selectedvals
'''
# Used for everything including pow.
gendata_fullrange = '''
########################################################
def gendata_fullrange(minint, maxint, typecode):
"""Generate data for general testing. Generate all combinations of data
that do not result in an overflow. This should only be used for small integers
as otherwise the amount of data generated is excessive.
This version does handle pow (**) as well as other operations.
"""
spreaddata = list(range(minint, maxint + 1, 1))
# Create combinations of all of these values.
opvalues = list(itertools.product(spreaddata, spreaddata))
# Filter out values which might cause errors.
checkedvalues = filtertestdata(opvalues, minint, maxint, typecode)
# Sort the data out in order.
checkedvalues.sort()
return checkedvalues
########################################################
def groupdata(datasample, desiredlen):
"""This takes the data pairs and groups them together such that there is a
sequence and a value (e.g. ([1,2,3,4}, 9) ). The sequence groups together
all the values which are compatible with the value in this operation. If
the sequence is shorter than the desired length it is repeated as many
times as necessary to pad it out to the desired length.
"""
# This helps pad out the data for pairs which have sequences shorter than desired.
padder = lambda x : x if len(x) > desiredlen else (x * (desiredlen // len(x))) + x[: desiredlen % len(x)]
# Group the samples.
return [(padder([i for i,j in x]),y) for y,x in itertools.groupby(datasample, lambda k : k[1])]
'''
# ==============================================================================
# This template is for operators (e.g. +, -, /, *, etc.).
test_op_templ = '''
##############################################################################
class %(funclabel)s_general_%(datagenerator)s_%(arrayevenodd)s_arraysize_%(typelabel)s(unittest.TestCase):
"""Test %(funclabel)s for basic general function operation using numeric data.
test_op_templ
"""
##############################################################################
def FloatassertEqual(self, dataoutitem, expecteditem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%%0.3f != %%0.3f' %% (expecteditem, dataoutitem))
##############################################################################
def FloatListassertEqual(self, dataout, expected, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
for index, (dataoutitem, expecteditem) in enumerate(zip(dataout, expected)):
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%%0.3f != %%0.3f at index %%d' %% (expecteditem, dataoutitem, index))
##############################################################################
def IntListassertEqual(self, dataout, expected, msg=None):
"""This function is patched into assertEqual to allow testing for
lists of integers.
"""
for index, (dataoutitem, expecteditem) in enumerate(zip(dataout, expected)):
if expecteditem != dataoutitem:
raise self.failureException('%%d != %%d at index %%d' %% (expecteditem, dataoutitem, index))
########################################################
@classmethod
def setUpClass(cls):
# For operations that support SIMD, this is intended to allow
# selecting data sets that fit evenly in the SIMD register width,
# and also data sets that don't, and so require the non-SIMD
# clean-up code to be exercised.
# Since SIMD registers can be 256 bits wide (although not all
# platforms, we want at least that much data for byte arrays.
cls.simdincr = 256 // 8
if '%(arrayevenodd)s' == 'even':
cls.testdatasize = cls.simdincr * 4
if '%(arrayevenodd)s' == 'odd':
cls.testdatasize = (cls.simdincr * 4) - 1
# For floating point values limit the test values to within
# the range of precision so that we don't create artificial
# test errors due to problems related to numerical resolution.
if '%(typecode)s' == 'f':
minval = arrayfunc.arraylimits.h_min
maxval = arrayfunc.arraylimits.h_max
elif '%(typecode)s' == 'd':
minval = arrayfunc.arraylimits.i_min
maxval = arrayfunc.arraylimits.i_max
else:
minval = arrayfunc.arraylimits.%(typelabel)s_min
maxval = arrayfunc.arraylimits.%(typelabel)s_max
# Generate the test data for this set of tests.
tdata = gendata_%(datagenerator)s(minval, maxval, '%(typecode)s')
# If floating point, convert the data to the correct type.
if '%(typecode)s' in ('f', 'd'):
testdata = [(float(x), float(y)) for x,y in tdata]
else:
testdata = tdata
# And separate the data pairs.
# This is used for array-array
cls.datax = [x for x,y in testdata]
cls.datay = [y for x,y in testdata]
# Group the data samples so we have sequences to fill arrays and
# individual values to use to perform operations on them.
# This version provides (sequence, value) e.g. ([1,2,3] , 9)
# This is used for array-num
datasample = testdata
datasample.sort(key = lambda x : x[1])
cls.groupeddatax = groupdata(datasample, cls.testdatasize)
# Swap the elements around so we can group them the other way.
# This version provides (value, sequence) e.g. (9, [1,2,3])
# This is used for num-array
datasampy = [(y,x) for x,y in testdata]
datasampy.sort(key = lambda x : x[1])
grptmp = groupdata(datasampy, cls.testdatasize)
# Swap them back so they are the way we expect them.
cls.groupeddatay = [(y,x) for x,y in grptmp]
########################################################
def setUp(self):
"""Initialise.
"""
# This is active for float numbers only.
self.addTypeEqualityFunc(float, self.FloatassertEqual)
# These handles lists of floats and ints respectively.
# Without using a specialised comparison function it's not
# possibly to compare floats properly. These functions allow
# for better performance on very large data sets than calling
# assertEqual repeatedly on individual items.
if '%(typecode)s' in ('f', 'd'):
self.addTypeEqualityFunc(list, self.FloatListassertEqual)
else:
self.addTypeEqualityFunc(list, self.IntListassertEqual)
# Make the data we want to use in the tests accessible with shorter labels.
# This first line gives us a reference to the class containing these tests
# as we need this to get at data created by setUpClass.
classref = self.__class__
self.groupeddatax = classref.groupeddatax
self.groupeddatay = classref.groupeddatay
self.datax = classref.datax
self.datay = classref.datay
self.simdincr = classref.simdincr
########################################################
def test_%(funclabel)s_check_test_data(self):
"""Test %(funclabel)s to ensure we have valid data present - Array code %(typelabel)s.
"""
# Make sure we don't have any empty or trivial length data sets.
# This test exists purely to ensure that the generated and filtered
# data in setUp is actually present and we don't have any empty
# data sets after we have pruned them. This condition should not
# arise unless the test has been edited carelessly.
self.assertTrue(len(self.datax) >= self.simdincr)
self.assertTrue(len(self.datay) >= self.simdincr)
########################################################
def test_%(funclabel)s_basic_array_num_none_a1(self):
"""Test %(funclabel)s as *array-num-none* for basic function - Array code %(typelabel)s.
"""
for testdatax, testvaly in self.groupeddatax:
with self.subTest(msg='Failed with parameter', testval = (testdatax, testvaly)):
data1 = array.array('%(typecode)s', testdatax)
# Calculate the expected result.
expected = [%(operatorfunc)s(x, testvaly) for x in testdatax]
arrayfunc.%(funcname)s(data1, testvaly)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(data1), expected)
########################################################
def test_%(funclabel)s_basic_array_num_none_a2(self):
"""Test %(funclabel)s as *array-num-none* for basic function with matherrors=True - Array code %(typelabel)s.
"""
for testdatax, testvaly in self.groupeddatax:
with self.subTest(msg='Failed with parameter', testval = (testdatax, testvaly)):
data1 = array.array('%(typecode)s', testdatax)
# Calculate the expected result.
expected = [%(operatorfunc)s(x, testvaly) for x in testdatax]
arrayfunc.%(funcname)s(data1, testvaly, matherrors=True)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(data1), expected)
########################################################
def test_%(funclabel)s_basic_array_num_none_a3(self):
"""Test %(funclabel)s as *array-num-none* for basic function with array limit - Array code %(typelabel)s.
"""
for testdatax, testvaly in self.groupeddatax:
with self.subTest(msg='Failed with parameter', testval = (testdatax, testvaly)):
data1 = array.array('%(typecode)s', testdatax)
limited = len(data1) // 2
# Calculate the expected result.
pydataout = [%(operatorfunc)s(x, testvaly) for x in testdatax]
expected = pydataout[0:limited] + list(data1)[limited:]
arrayfunc.%(funcname)s(data1, testvaly, maxlen=limited)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(data1), expected)
########################################################
def test_%(funclabel)s_basic_array_num_none_a4(self):
"""Test %(funclabel)s as *array-num-none* for basic function with matherrors=True and with array limit - Array code %(typelabel)s.
"""
for testdatax, testvaly in self.groupeddatax:
with self.subTest(msg='Failed with parameter', testval = (testdatax, testvaly)):
data1 = array.array('%(typecode)s', testdatax)
limited = len(data1) // 2
# Calculate the expected result.
pydataout = [%(operatorfunc)s(x, testvaly) for x in testdatax]
expected = pydataout[0:limited] + list(data1)[limited:]
arrayfunc.%(funcname)s(data1, testvaly, matherrors=True, maxlen=limited)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(data1), expected)
########################################################
def test_%(funclabel)s_basic_array_num_array_b1(self):
"""Test %(funclabel)s as *array-num-array* for basic function - Array code %(typelabel)s.
"""
for testdatax, testvaly in self.groupeddatax:
with self.subTest(msg='Failed with parameter', testval = (testdatax, testvaly)):
data1 = array.array('%(typecode)s', testdatax)
dataout = array.array('%(typecode)s', [0]*len(data1))
# Calculate the expected result.
expected = [%(operatorfunc)s(x, testvaly) for x in testdatax]
arrayfunc.%(funcname)s(data1, testvaly, dataout)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(dataout), expected)
########################################################
def test_%(funclabel)s_basic_array_num_array_b2(self):
"""Test %(funclabel)s as *array-num-array* for basic function with matherrors=True - Array code %(typelabel)s.
"""
for testdatax, testvaly in self.groupeddatax:
with self.subTest(msg='Failed with parameter', testval = (testdatax, testvaly)):
data1 = array.array('%(typecode)s', testdatax)
dataout = array.array('%(typecode)s', [0]*len(data1))
# Calculate the expected result.
expected = [%(operatorfunc)s(x, testvaly) for x in testdatax]
arrayfunc.%(funcname)s(data1, testvaly, dataout, matherrors=True)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(dataout), expected)
########################################################
def test_%(funclabel)s_basic_array_num_array_b3(self):
"""Test %(funclabel)s as *array-num-array* for basic function with array limit - Array code %(typelabel)s.
"""
for testdatax, testvaly in self.groupeddatax:
with self.subTest(msg='Failed with parameter', testval = (testdatax, testvaly)):
data1 = array.array('%(typecode)s', testdatax)
dataout = array.array('%(typecode)s', [0]*len(data1))
limited = len(data1) // 2
# Calculate the expected result.
pydataout = [%(operatorfunc)s(x, testvaly) for x in testdatax]
expected = pydataout[0:limited] + list(dataout)[limited:]
arrayfunc.%(funcname)s(data1, testvaly, dataout, maxlen=limited)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(dataout), expected)
########################################################
def test_%(funclabel)s_basic_array_num_array_b4(self):
"""Test %(funclabel)s as *array-num-array* for basic function with matherrors=True and with array limit - Array code %(typelabel)s.
"""
for testdatax, testvaly in self.groupeddatax:
with self.subTest(msg='Failed with parameter', testval = (testdatax, testvaly)):
data1 = array.array('%(typecode)s', testdatax)
dataout = array.array('%(typecode)s', [0]*len(data1))
limited = len(data1) // 2
# Calculate the expected result.
pydataout = [%(operatorfunc)s(x, testvaly) for x in testdatax]
expected = pydataout[0:limited] + list(dataout)[limited:]
arrayfunc.%(funcname)s(data1, testvaly, dataout, matherrors=True, maxlen=limited)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(dataout), expected)
########################################################
def test_%(funclabel)s_basic_num_array_none_c1(self):
"""Test %(funclabel)s as *num-array-none* for basic function - Array code %(typelabel)s.
"""
for testvalx, testdatay in self.groupeddatay:
with self.subTest(msg='Failed with parameter', testval = (testvalx, testdatay)):
data1 = array.array('%(typecode)s', testdatay)
# Calculate the expected result.
expected = [%(operatorfunc)s(testvalx, y) for y in testdatay]
arrayfunc.%(funcname)s(testvalx, data1)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(data1), expected)
########################################################
def test_%(funclabel)s_basic_num_array_none_c2(self):
"""Test %(funclabel)s as *num-array-none* for basic function with matherrors=True - Array code %(typelabel)s.
"""
for testvalx, testdatay in self.groupeddatay:
with self.subTest(msg='Failed with parameter', testval = (testvalx, testdatay)):
data1 = array.array('%(typecode)s', testdatay)
# Calculate the expected result.
expected = [%(operatorfunc)s(testvalx, y) for y in testdatay]
arrayfunc.%(funcname)s(testvalx, data1, matherrors=True)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(data1), expected)
########################################################
def test_%(funclabel)s_basic_num_array_none_c3(self):
"""Test %(funclabel)s as *num-array-none* for basic function with array limit - Array code %(typelabel)s.
"""
for testvalx, testdatay in self.groupeddatay:
with self.subTest(msg='Failed with parameter', testval = (testvalx, testdatay)):
data1 = array.array('%(typecode)s', testdatay)
limited = len(data1) // 2
# Calculate the expected result.
pydataout = [%(operatorfunc)s(testvalx, y) for y in testdatay]
expected = pydataout[0:limited] + list(data1)[limited:]
arrayfunc.%(funcname)s(testvalx, data1, maxlen=limited)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(data1), expected)
########################################################
def test_%(funclabel)s_basic_num_array_none_c4(self):
"""Test %(funclabel)s as *num-array-none* for basic function with matherrors=True and with array limit - Array code %(typelabel)s.
"""
for testvalx, testdatay in self.groupeddatay:
with self.subTest(msg='Failed with parameter', testval = (testvalx, testdatay)):
data1 = array.array('%(typecode)s', testdatay)
limited = len(data1) // 2
# Calculate the expected result.
pydataout = [%(operatorfunc)s(testvalx, y) for y in testdatay]
expected = pydataout[0:limited] + list(data1)[limited:]
arrayfunc.%(funcname)s(testvalx, data1, matherrors=True, maxlen=limited)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(data1), expected)
########################################################
def test_%(funclabel)s_basic_num_array_array_d1(self):
"""Test %(funclabel)s as *num-array-array* for basic function - Array code %(typelabel)s.
"""
for testvalx, testdatay in self.groupeddatay:
with self.subTest(msg='Failed with parameter', testval = (testvalx, testdatay)):
data1 = array.array('%(typecode)s', testdatay)
dataout = array.array('%(typecode)s', [0]*len(data1))
# Calculate the expected result.
expected = [%(operatorfunc)s(testvalx, y) for y in testdatay]
arrayfunc.%(funcname)s(testvalx, data1, dataout)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(dataout), expected)
########################################################
def test_%(funclabel)s_basic_num_array_array_d2(self):
"""Test %(funclabel)s as *num-array-array* for basic function with matherrors=True - Array code %(typelabel)s.
"""
for testvalx, testdatay in self.groupeddatay:
with self.subTest(msg='Failed with parameter', testval = (testvalx, testdatay)):
data1 = array.array('%(typecode)s', testdatay)
dataout = array.array('%(typecode)s', [0]*len(data1))
# Calculate the expected result.
expected = [%(operatorfunc)s(testvalx, y) for y in testdatay]
arrayfunc.%(funcname)s(testvalx, data1, dataout, matherrors=True)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(dataout), expected)
########################################################
def test_%(funclabel)s_basic_num_array_array_d3(self):
"""Test %(funclabel)s as *num-array-array* for basic function with array limit - Array code %(typelabel)s.
"""
for testvalx, testdatay in self.groupeddatay:
with self.subTest(msg='Failed with parameter', testval = (testvalx, testdatay)):
data1 = array.array('%(typecode)s', testdatay)
dataout = array.array('%(typecode)s', [0]*len(data1))
limited = len(data1) // 2
# Calculate the expected result.
pydataout = [%(operatorfunc)s(testvalx, y) for y in testdatay]
expected = pydataout[0:limited] + list(dataout)[limited:]
arrayfunc.%(funcname)s(testvalx, data1, dataout, maxlen=limited)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(dataout), expected)
########################################################
def test_%(funclabel)s_basic_num_array_array_d4(self):
"""Test %(funclabel)s as *num-array-array* for basic function with matherrors=True and with array limit - Array code %(typelabel)s.
"""
for testvalx, testdatay in self.groupeddatay:
with self.subTest(msg='Failed with parameter', testval = (testvalx, testdatay)):
data1 = array.array('%(typecode)s', testdatay)
dataout = array.array('%(typecode)s', [0]*len(data1))
limited = len(data1) // 2
# Calculate the expected result.
pydataout = [%(operatorfunc)s(testvalx, y) for y in testdatay]
expected = pydataout[0:limited] + list(dataout)[limited:]
arrayfunc.%(funcname)s(testvalx, data1, dataout, matherrors=True, maxlen=limited)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(dataout), expected)
########################################################
def test_%(funclabel)s_basic_array_array_none_e1(self):
"""Test %(funclabel)s as *array-array-none* for basic function - Array code %(typelabel)s.
"""
data1 = array.array('%(typecode)s', self.datax)
data2 = array.array('%(typecode)s', self.datay)
expected = [%(operatorfunc)s(x, y) for (x, y) in zip(data1, data2)]
arrayfunc.%(funcname)s(data1, data2)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(data1), expected)
########################################################
def test_%(funclabel)s_basic_array_array_none_e2(self):
"""Test %(funclabel)s as *array-array-none* for basic function with matherrors=True - Array code %(typelabel)s.
"""
data1 = array.array('%(typecode)s', self.datax)
data2 = array.array('%(typecode)s', self.datay)
expected = [%(operatorfunc)s(x, y) for (x, y) in zip(data1, data2)]
arrayfunc.%(funcname)s(data1, data2, matherrors=True)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(data1), expected)
########################################################
def test_%(funclabel)s_basic_array_array_none_e3(self):
"""Test %(funclabel)s as *array-array-none* for basic function with array limit - Array code %(typelabel)s.
"""
data1 = array.array('%(typecode)s', self.datax)
data2 = array.array('%(typecode)s', self.datay)
limited = len(data1) // 2
pydataout = [%(operatorfunc)s(x, y) for (x, y) in zip(data1, data2)]
expected = pydataout[0:limited] + list(data1)[limited:]
arrayfunc.%(funcname)s(data1, data2, maxlen=limited)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(data1), expected)
########################################################
def test_%(funclabel)s_basic_array_array_none_e4(self):
"""Test %(funclabel)s as *array-array-none* for basic function with matherrors=True and with array limit - Array code %(typelabel)s.
"""
data1 = array.array('%(typecode)s', self.datax)
data2 = array.array('%(typecode)s', self.datay)
limited = len(data1) // 2
pydataout = [%(operatorfunc)s(x, y) for (x, y) in zip(data1, data2)]
expected = pydataout[0:limited] + list(data1)[limited:]
arrayfunc.%(funcname)s(data1, data2, matherrors=True, maxlen=limited)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(data1), expected)
########################################################
def test_%(funclabel)s_basic_array_array_array_f1(self):
"""Test %(funclabel)s as *array-array-array* for basic function - Array code %(typelabel)s.
"""
data1 = array.array('%(typecode)s', self.datax)
data2 = array.array('%(typecode)s', self.datay)
dataout = array.array('%(typecode)s', [0]*len(data1))
expected = [%(operatorfunc)s(x, y) for (x, y) in zip(data1, data2)]
arrayfunc.%(funcname)s(data1, data2, dataout)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(dataout), expected)
########################################################
def test_%(funclabel)s_basic_array_array_array_f2(self):
"""Test %(funclabel)s as *array-array-array* for basic function with matherrors=True - Array code %(typelabel)s.
"""
data1 = array.array('%(typecode)s', self.datax)
data2 = array.array('%(typecode)s', self.datay)
dataout = array.array('%(typecode)s', [0]*len(data1))
expected = [%(operatorfunc)s(x, y) for (x, y) in zip(data1, data2)]
arrayfunc.%(funcname)s(data1, data2, dataout, matherrors=True)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(dataout), expected)
########################################################
def test_%(funclabel)s_basic_array_array_array_f3(self):
"""Test %(funclabel)s as *array-array-array* for basic function with matherrors=True and with array limit - Array code %(typelabel)s.
"""
data1 = array.array('%(typecode)s', self.datax)
data2 = array.array('%(typecode)s', self.datay)
dataout = array.array('%(typecode)s', [0]*len(data1))
limited = len(data1) // 2
pydataout = [%(operatorfunc)s(x, y) for (x, y) in zip(data1, data2)]
expected = pydataout[0:limited] + list(dataout)[limited:]
arrayfunc.%(funcname)s(data1, data2, dataout, matherrors=True, maxlen=limited)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(dataout), expected)
##############################################################################
'''
# ==============================================================================
# This template is for operators (e.g. +, -, *, etc.) which have SIMD support.
test_op_simd_templ = '''
##############################################################################
class %(funclabel)s_general_%(arrayevenodd)s_arraysize_simd_%(typelabel)s(unittest.TestCase):
"""Test %(funclabel)s for basic general function operation using numeric
data %(test_op_y)s.
test_op_simd_templ
"""
##############################################################################
def FloatassertEqual(self, dataoutitem, expecteditem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%%0.3f != %%0.3f' %% (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
# This is active for float numbers only.
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if '%(arrayevenodd)s' == 'even':
testdatasize = 160
if '%(arrayevenodd)s' == 'odd':
testdatasize = 159
paramitersize = 10
xdata = [x for x,y in zip(itertools.cycle([%(test_op_x)s]), range(testdatasize))]
self.datax = array.array('%(typecode)s', xdata)
self.datay = [x for (x,y) in zip(itertools.cycle([%(test_op_y)s]), self.datax)]
# This is used for testing with single parameters. We use a limited
# data set to avoid excessive numbers of sub-tests.
self.dataxparam = self.datax[:paramitersize]
self.datayparam = self.datay[:paramitersize]
########################################################
def test_%(funclabel)s_basic_array_num_none_a1(self):
"""Test %(funclabel)s as *array-num-none* for basic function - Array code %(typelabel)s.
"""
for testval in self.datayparam:
with self.subTest(msg='Failed with parameter', testval = testval):
data1 = array.array('%(typecode)s', self.datax)
expected = %(typeconv1)s [x %(pyoperator)s testval for x in data1] %(typeconv2)s
arrayfunc.%(funcname)s(data1, testval)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(data1), expected)
########################################################
def test_%(funclabel)s_basic_array_num_none_a2(self):
"""Test %(funclabel)s as *array-num-none* for basic function with matherrors=True - Array code %(typelabel)s.
"""
for testval in self.datayparam:
with self.subTest(msg='Failed with parameter', testval = testval):
data1 = array.array('%(typecode)s', self.datax)
expected = %(typeconv1)s [x %(pyoperator)s testval for x in data1] %(typeconv2)s
arrayfunc.%(funcname)s(data1, testval, matherrors=True)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(data1), expected)
########################################################
def test_%(funclabel)s_basic_array_num_none_a3(self):
"""Test %(funclabel)s as *array-num-none* for basic function with matherrors=True and nosimd=True - Array code %(typelabel)s.
"""
for testval in self.datayparam:
with self.subTest(msg='Failed with parameter', testval = testval):
data1 = array.array('%(typecode)s', self.datax)
expected = %(typeconv1)s [x %(pyoperator)s testval for x in data1] %(typeconv2)s
arrayfunc.%(funcname)s(data1, testval, matherrors=True, nosimd=True)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(data1), expected)
########################################################
def test_%(funclabel)s_basic_array_num_array_b1(self):
"""Test %(funclabel)s as *array-num-array* for basic function - Array code %(typelabel)s.
"""
for testval in self.datayparam:
with self.subTest(msg='Failed with parameter', testval = testval):
data1 = array.array('%(typecode)s', self.datax)
dataout = array.array('%(typecode)s', [0]*len(data1))
expected = %(typeconv1)s [x %(pyoperator)s testval for x in data1] %(typeconv2)s
arrayfunc.%(funcname)s(data1, testval, dataout)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(dataout), expected)
########################################################
def test_%(funclabel)s_basic_array_num_array_b2(self):
"""Test %(funclabel)s as *array-num-array* for basic function with matherrors=True - Array code %(typelabel)s.
"""
for testval in self.datayparam:
with self.subTest(msg='Failed with parameter', testval = testval):
data1 = array.array('%(typecode)s', self.datax)
dataout = array.array('%(typecode)s', [0]*len(data1))
expected = %(typeconv1)s [x %(pyoperator)s testval for x in data1] %(typeconv2)s
arrayfunc.%(funcname)s(data1, testval, dataout, matherrors=True)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(dataout), expected)
########################################################
def test_%(funclabel)s_basic_array_num_array_b3(self):
"""Test %(funclabel)s as *array-num-array* for basic function with matherrors=True and nosimd=True - Array code %(typelabel)s.
"""
for testval in self.datayparam:
with self.subTest(msg='Failed with parameter', testval = testval):
data1 = array.array('%(typecode)s', self.datax)
dataout = array.array('%(typecode)s', [0]*len(data1))
expected = %(typeconv1)s [x %(pyoperator)s testval for x in data1] %(typeconv2)s
arrayfunc.%(funcname)s(data1, testval, dataout, matherrors=True, nosimd=True)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(dataout), expected)
########################################################
def test_%(funclabel)s_basic_num_array_none_c1(self):
"""Test %(funclabel)s as *num-array-none* for basic function - Array code %(typelabel)s.
"""
for testval in self.dataxparam:
with self.subTest(msg='Failed with parameter', testval = testval):
data1 = array.array('%(typecode)s', self.datay)
expected = %(typeconv1)s [testval %(pyoperator)s x for x in data1] %(typeconv2)s
arrayfunc.%(funcname)s(testval, data1)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(data1), expected)
########################################################
def test_%(funclabel)s_basic_num_array_none_c2(self):
"""Test %(funclabel)s as *num-array-none* for basic function with matherrors=True - Array code %(typelabel)s.
"""
for testval in self.dataxparam:
with self.subTest(msg='Failed with parameter', testval = testval):
data1 = array.array('%(typecode)s', self.datay)
expected = %(typeconv1)s [testval %(pyoperator)s x for x in data1] %(typeconv2)s
arrayfunc.%(funcname)s(testval, data1, matherrors=True)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(data1), expected)
########################################################
def test_%(funclabel)s_basic_num_array_none_c3(self):
"""Test %(funclabel)s as *num-array-none* for basic function with matherrors=True and nosimd=True - Array code %(typelabel)s.
"""
for testval in self.dataxparam:
with self.subTest(msg='Failed with parameter', testval = testval):
data1 = array.array('%(typecode)s', self.datay)
expected = %(typeconv1)s [testval %(pyoperator)s x for x in data1] %(typeconv2)s
arrayfunc.%(funcname)s(testval, data1, matherrors=True, nosimd=True)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(data1), expected)
########################################################
def test_%(funclabel)s_basic_num_array_array_d1(self):
"""Test %(funclabel)s as *num-array-array* for basic function - Array code %(typelabel)s.
"""
for testval in self.dataxparam:
with self.subTest(msg='Failed with parameter', testval = testval):
data1 = array.array('%(typecode)s', self.datay)
dataout = array.array('%(typecode)s', [0]*len(data1))
expected = %(typeconv1)s [testval %(pyoperator)s x for x in data1] %(typeconv2)s
arrayfunc.%(funcname)s(testval, data1, dataout)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(dataout), expected)
########################################################
def test_%(funclabel)s_basic_num_array_array_d2(self):
"""Test %(funclabel)s as *num-array-array* for basic function with matherrors=True - Array code %(typelabel)s.
"""
for testval in self.dataxparam:
with self.subTest(msg='Failed with parameter', testval = testval):
data1 = array.array('%(typecode)s', self.datay)
dataout = array.array('%(typecode)s', [0]*len(data1))
expected = %(typeconv1)s [testval %(pyoperator)s x for x in data1] %(typeconv2)s
arrayfunc.%(funcname)s(testval, data1, dataout, matherrors=True)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(dataout), expected)
########################################################
def test_%(funclabel)s_basic_num_array_array_d3(self):
"""Test %(funclabel)s as *num-array-array* for basic function with matherrors=True and nosimd=True - Array code %(typelabel)s.
"""
for testval in self.dataxparam:
with self.subTest(msg='Failed with parameter', testval = testval):
data1 = array.array('%(typecode)s', self.datay)
dataout = array.array('%(typecode)s', [0]*len(data1))
expected = %(typeconv1)s [testval %(pyoperator)s x for x in data1] %(typeconv2)s
arrayfunc.%(funcname)s(testval, data1, dataout, matherrors=True, nosimd=True)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(dataout), expected)
########################################################
def test_%(funclabel)s_basic_array_array_none_e1(self):
"""Test %(funclabel)s as *array-array-none* for basic function - Array code %(typelabel)s.
"""
data1 = array.array('%(typecode)s', self.datax)
data2 = array.array('%(typecode)s', self.datay)
expected = %(typeconv1)s [x %(pyoperator)s y for (x, y) in zip(data1, data2)] %(typeconv2)s
arrayfunc.%(funcname)s(data1, data2)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(data1), expected)
########################################################
def test_%(funclabel)s_basic_array_array_none_e2(self):
"""Test %(funclabel)s as *array-array-none* for basic function with matherrors=True and nosimd=Tru - Array code %(typelabel)s.
"""
data1 = array.array('%(typecode)s', self.datax)
data2 = array.array('%(typecode)s', self.datay)
expected = %(typeconv1)s [x %(pyoperator)s y for (x, y) in zip(data1, data2)] %(typeconv2)s
arrayfunc.%(funcname)s(data1, data2, matherrors=True, nosimd=True)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(data1), expected)
########################################################
def test_%(funclabel)s_basic_array_array_array_f1(self):
"""Test %(funclabel)s as *array-array-array* for basic function - Array code %(typelabel)s.
"""
data1 = array.array('%(typecode)s', self.datax)
data2 = array.array('%(typecode)s', self.datay)
dataout = array.array('%(typecode)s', [0]*len(data1))
expected = %(typeconv1)s [x %(pyoperator)s y for (x, y) in zip(data1, data2)] %(typeconv2)s
arrayfunc.%(funcname)s(data1, data2, dataout)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(dataout), expected)
########################################################
def test_%(funclabel)s_basic_array_array_array_f2(self):
"""Test %(funclabel)s as *array-array-array* for basic function and nosimd=True - Array code %(typelabel)s.
"""
data1 = array.array('%(typecode)s', self.datax)
data2 = array.array('%(typecode)s', self.datay)
dataout = array.array('%(typecode)s', [0]*len(data1))
expected = %(typeconv1)s [x %(pyoperator)s y for (x, y) in zip(data1, data2)] %(typeconv2)s
arrayfunc.%(funcname)s(data1, data2, dataout, matherrors=True, nosimd=True)
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(list(dataout), expected)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for testing invalid parameter types
# for simd option.
param_invalid_opt_simd_template = '''
##############################################################################
class %(funclabel)s_opt_param_errors_%(typelabel)s(unittest.TestCase):
"""Test %(funclabel)s for invalid errors for simd option.
param_invalid_opt_simd_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.inparray1a = array.array('%(typecode)s', [%(test_op_x)s])
self.inparray1b = copy.copy(self.inparray1a)
self.inparray2a = array.array('%(typecode)s', [x for (x,y) in zip(itertools.cycle([%(test_op_y)s]), self.inparray1a)])
self.inparray2b = copy.copy(self.inparray2a)
arraysize = len(self.inparray1a)
self.dataout = array.array('%(typecode)s', itertools.repeat(%(zero_const)s, arraysize))
########################################################
def test_%(funclabel)s_array_num_none_a1(self):
"""Test %(funclabel)s as *array-num-none* for nosimd='a' - Array code %(typelabel)s.
"""
inpvalue = self.inparray2a[0]
# This version is expected to pass.
arrayfunc.%(funcname)s(self.inparray1a, inpvalue, nosimd=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.inparray1b, inpvalue, nosimd='a')
########################################################
def test_%(funclabel)s_array_num_array_b1(self):
"""Test %(funclabel)s as *array-num-array* for nosimd='a' - Array code %(typelabel)s.
"""
inpvalue = self.inparray2a[0]
# This version is expected to pass.
arrayfunc.%(funcname)s(self.inparray1a, inpvalue, self.dataout, nosimd=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.inparray1b, inpvalue, self.dataout, nosimd='a')
########################################################
def test_%(funclabel)s_num_array_none_c1(self):
"""Test %(funclabel)s as *num-array-none* for nosimd='a' - Array code %(typelabel)s.
"""
inpvalue = self.inparray1a[0]
# This version is expected to pass.
arrayfunc.%(funcname)s(inpvalue, self.inparray2a, nosimd=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(inpvalue, self.inparray2b, nosimd='a')
########################################################
def test_%(funclabel)s_num_array_array_d1(self):
"""Test %(funclabel)s as *num-array-array* for nosimd='a' - Array code %(typelabel)s.
"""
inpvalue = self.inparray1a[0]
# This version is expected to pass.
arrayfunc.%(funcname)s(inpvalue, self.inparray2a, self.dataout, nosimd=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(inpvalue, self.inparray2b, self.dataout, nosimd='a')
########################################################
def test_%(funclabel)s_array_array_none_e1(self):
"""Test %(funclabel)s as *array-array-none* for nosimd='a' - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.inparray1a, self.inparray2a, nosimd=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.inparray1b, self.inparray2b, nosimd='a')
########################################################
def test_%(funclabel)s_array_array_array_f1(self):
"""Test %(funclabel)s as *array-array-array* for nosimd='a' - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.inparray1a, self.inparray2a, self.dataout, nosimd=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.inparray1b, self.inparray2b, self.dataout, nosimd='a')
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for testing invalid array and
# numeric parameter types.
param_invalid_template = '''
##############################################################################
class %(funclabel)s_param_errors_%(typelabel)s(unittest.TestCase):
"""Test %(funclabel)s for invalid array and numeric parameters.
param_invalid_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.testarray1 = array.array('%(typecode)s', [%(test_op_x)s])
self.testarray2 = array.array('%(typecode)s', [x for (x,y) in zip(itertools.cycle([%(test_op_y)s]), self.testarray1)])
arraysize = len(self.testarray1)
self.dataout = array.array('%(typecode)s', itertools.repeat(%(zero_const)s, arraysize))
# Create some data array equivalents with an incompatible type.
self.badarray1 = array.array('%(badcode)s', [%(badconv)s(x) for x in self.testarray1])
self.badarray2 = array.array('%(badcode)s', [%(badconv)s(x) for x in self.testarray2])
self.baddataout = array.array('%(badcode)s', [%(badconv)s(x) for x in self.dataout])
########################################################
def test_%(funclabel)s_array_num_none_a1(self):
"""Test %(funclabel)s as *array-num-none* for invalid type of array - Array code %(typelabel)s.
"""
for testvalue in self.testarray2:
with self.subTest(msg='Failed with parameter', testvalue = testvalue):
# Copy the array so we don't change the original data.
testarray1 = copy.copy(self.testarray1)
badarray1 = copy.copy(self.badarray1)
# This version is expected to pass.
arrayfunc.%(funcname)s(testarray1, testvalue)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(badarray1, testvalue)
########################################################
def test_%(funclabel)s_array_num_none_a2(self):
"""Test %(funclabel)s as *array-num-none* for invalid type of number - Array code %(typelabel)s.
"""
for testvalue, badvalue in zip(self.testarray2, self.badarray2):
with self.subTest(msg='Failed with parameter', testvalue = testvalue):
# Copy the array so we don't change the original data.
testarray1 = copy.copy(self.testarray1)
# This version is expected to pass.
arrayfunc.%(funcname)s(testarray1, testvalue)
testarray1 = copy.copy(self.testarray1)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(testarray1, badvalue)
########################################################
def test_%(funclabel)s_array_num_array_b1(self):
"""Test %(funclabel)s as *array-num-array* for invalid type of array - Array code %(typelabel)s.
"""
for testvalue in self.testarray2:
with self.subTest(msg='Failed with parameter', testvalue = testvalue):
# Copy the array so we don't change the original data.
testarray1 = copy.copy(self.testarray1)
badarray1 = copy.copy(self.badarray1)
# This version is expected to pass.
arrayfunc.%(funcname)s(testarray1, testvalue, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(badarray1, testvalue, self.dataout)
########################################################
def test_%(funclabel)s_array_num_array_b2(self):
"""Test %(funclabel)s as *array-num-array* for invalid type of number - Array code %(typelabel)s.
"""
for testvalue, badvalue in zip(self.testarray2, self.badarray2):
with self.subTest(msg='Failed with parameter', testvalue = testvalue):
# Copy the array so we don't change the original data.
badarray1 = copy.copy(self.badarray1)
# This version is expected to pass.
arrayfunc.%(funcname)s(self.testarray1, testvalue, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.testarray1, badvalue, self.dataout)
########################################################
def test_%(funclabel)s_array_num_array_b3(self):
"""Test %(funclabel)s as *array-num-array* for invalid type of output array - Array code %(typelabel)s.
"""
for testvalue in self.testarray2:
with self.subTest(msg='Failed with parameter', testvalue = testvalue):
# Copy the array so we don't change the original data.
testarray1 = copy.copy(self.testarray1)
# This version is expected to pass.
arrayfunc.%(funcname)s(testarray1, testvalue, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(testarray1, testvalue, self.baddataout)
########################################################
def test_%(funclabel)s_num_array_none_c1(self):
"""Test %(funclabel)s as *num-array-none* for invalid type of array - Array code %(typelabel)s.
"""
for testvalue in self.testarray1:
with self.subTest(msg='Failed with parameter', testvalue = testvalue):
# Copy the array so we don't change the original data.
testarray2 = copy.copy(self.testarray2)
badarray2 = copy.copy(self.badarray2)
# This version is expected to pass.
arrayfunc.%(funcname)s(testvalue, testarray2)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(testvalue, badarray2)
########################################################
def test_%(funclabel)s_num_array_none_c2(self):
"""Test %(funclabel)s as *num-array-none* for invalid type of number - Array code %(typelabel)s.
"""
for testvalue, badvalue in zip(self.testarray1, self.badarray1):
with self.subTest(msg='Failed with parameter', testvalue = testvalue):
# Copy the array so we don't change the original data.
testarray2 = copy.copy(self.testarray2)
# This version is expected to pass.
arrayfunc.%(funcname)s(testvalue, testarray2)
testarray2 = copy.copy(self.testarray2)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(badvalue, testarray2)
########################################################
def test_%(funclabel)s_num_array_array_d1(self):
"""Test %(funclabel)s as *num-array-array* for invalid type of array - Array code %(typelabel)s.
"""
for testvalue in self.testarray1:
with self.subTest(msg='Failed with parameter', testvalue = testvalue):
# This version is expected to pass.
arrayfunc.%(funcname)s(testvalue, self.testarray2, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(testvalue, self.badarray2, self.dataout)
########################################################
def test_%(funclabel)s_num_array_array_d2(self):
"""Test %(funclabel)s as *num-array-array* for invalid type of number - Array code %(typelabel)s.
"""
for testvalue in self.testarray1:
with self.subTest(msg='Failed with parameter', testvalue = testvalue):
# This version is expected to pass.
arrayfunc.%(funcname)s(testvalue, self.testarray2, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(testvalue, self.badarray2, self.dataout)
########################################################
def test_%(funclabel)s_num_array_array_d3(self):
"""Test %(funclabel)s as *num-array-array* for invalid type of output array - Array code %(typelabel)s.
"""
for testvalue in self.testarray1:
with self.subTest(msg='Failed with parameter', testvalue = testvalue):
# This version is expected to pass.
arrayfunc.%(funcname)s(testvalue, self.testarray2, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(testvalue, self.testarray2, self.baddataout)
########################################################
def test_%(funclabel)s_array_array_none_e1(self):
"""Test %(funclabel)s as *array-array-none* for invalid type of array - Array code %(typelabel)s.
"""
# Copy the array so we don't change the original data.
testarray1 = copy.copy(self.testarray1)
# This version is expected to pass.
arrayfunc.%(funcname)s(testarray1, self.testarray2)
# Copy the array so we don't change the original data.
testarray1 = copy.copy(self.testarray1)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(testarray1, self.badarray2)
########################################################
def test_%(funclabel)s_array_array_none_e2(self):
"""Test %(funclabel)s as *array-array-none* for invalid type of array - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.testarray1, self.testarray2)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.badarray1, self.testarray2)
########################################################
def test_%(funclabel)s_array_array_array_f1(self):
"""Test %(funclabel)s as *array-array-array* for invalid type of array - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.testarray1, self.testarray2, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.testarray1, self.badarray2, self.dataout)
########################################################
def test_%(funclabel)s_array_array_array_f2(self):
"""Test %(funclabel)s as *array-array-array* for invalid type of array - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.testarray1, self.testarray2, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.badarray1, self.testarray2, self.dataout)
########################################################
def test_%(funclabel)s_array_array_array_f3(self):
"""Test %(funclabel)s as *array-array-array* for invalid type of output array - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.testarray1, self.testarray2, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.testarray1, self.testarray2, self.baddataout)
########################################################
def test_%(funclabel)s_no_params_g1(self):
"""Test %(funclabel)s with no parameters - Array code %(typelabel)s.
"""
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s()
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for testing invalid parameter types
# for errors flag and maxlen.
param_invalid_opt_template = '''
##############################################################################
class %(funclabel)s_opt_param_errors_%(typelabel)s(unittest.TestCase):
"""Test %(funclabel)s for invalid errors flag and maxlen parameters.
param_invalid_opt_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.inparray1a = array.array('%(typecode)s', [%(test_op_x)s])
self.inparray1b = copy.copy(self.inparray1a)
self.inparray2a = array.array('%(typecode)s', [x for (x,y) in zip(itertools.cycle([%(test_op_y)s]), self.inparray1a)])
self.inparray2b = copy.copy(self.inparray2a)
arraysize = len(self.inparray1a)
self.dataout = array.array('%(typecode)s', itertools.repeat(%(zero_const)s, arraysize))
self.testmaxlen = len(self.inparray1a) // 2
########################################################
def test_%(funclabel)s_array_num_none_a1(self):
"""Test %(funclabel)s as *array-num-none* for matherrors='a' - Array code %(typelabel)s.
"""
inpvalue = self.inparray2a[0]
# This version is expected to pass.
arrayfunc.%(funcname)s(self.inparray1a, inpvalue, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.inparray1b, inpvalue, matherrors='a')
########################################################
def test_%(funclabel)s_array_num_none_a2(self):
"""Test %(funclabel)s as *array-num-none* for maxlen='a' - Array code %(typelabel)s.
"""
inpvalue = self.inparray2a[0]
# This version is expected to pass.
arrayfunc.%(funcname)s(self.inparray1a, inpvalue, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.inparray1b, inpvalue, maxlen='a')
########################################################
def test_%(funclabel)s_array_num_array_b1(self):
"""Test %(funclabel)s as *array-num-array* for matherrors='a' - Array code %(typelabel)s.
"""
inpvalue = self.inparray2a[0]
# This version is expected to pass.
arrayfunc.%(funcname)s(self.inparray1a, inpvalue, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.inparray1b, inpvalue, self.dataout, matherrors='a')
########################################################
def test_%(funclabel)s_array_num_array_b2(self):
"""Test %(funclabel)s as *array-num-array* for maxlen='a' - Array code %(typelabel)s.
"""
# Copy the array so we don't change the original data.
inpvalue = self.inparray2a[0]
# This version is expected to pass.
arrayfunc.%(funcname)s(self.inparray1a, inpvalue, self.dataout, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.inparray1b, inpvalue, self.dataout, maxlen='a')
########################################################
def test_%(funclabel)s_num_array_none_c1(self):
"""Test %(funclabel)s as *num-array-none* for matherrors='a' - Array code %(typelabel)s.
"""
inpvalue = self.inparray1a[0]
# This version is expected to pass.
arrayfunc.%(funcname)s(inpvalue, self.inparray2a, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(inpvalue, self.inparray2b, matherrors='a')
########################################################
def test_%(funclabel)s_num_array_none_c2(self):
"""Test %(funclabel)s as *num-array-none* for maxlen='a' - Array code %(typelabel)s.
"""
inpvalue = self.inparray1a[0]
# This version is expected to pass.
arrayfunc.%(funcname)s(inpvalue, self.inparray2a, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(inpvalue, self.inparray2b, maxlen='a')
########################################################
def test_%(funclabel)s_num_array_array_d1(self):
"""Test %(funclabel)s as *num-array-array* for matherrors='a' - Array code %(typelabel)s.
"""
inpvalue = self.inparray1a[0]
# This version is expected to pass.
arrayfunc.%(funcname)s(inpvalue, self.inparray2a, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(inpvalue, self.inparray2b, self.dataout, matherrors='a')
########################################################
def test_%(funclabel)s_num_array_array_d2(self):
"""Test %(funclabel)s as *num-array-array* for maxlen='a' - Array code %(typelabel)s.
"""
inpvalue = self.inparray1a[0]
# This version is expected to pass.
arrayfunc.%(funcname)s(inpvalue, self.inparray2a, self.dataout, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(inpvalue, self.inparray2b, self.dataout, maxlen='a')
########################################################
def test_%(funclabel)s_array_array_none_e1(self):
"""Test %(funclabel)s as *array-array-none* for matherrors='a' - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.inparray1a, self.inparray2a, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.inparray1b, self.inparray2b, matherrors='a')
########################################################
def test_%(funclabel)s_array_array_none_e2(self):
"""Test %(funclabel)s as *array-array-none* for maxlen='a' - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.inparray1a, self.inparray2a, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.inparray1b, self.inparray2b, maxlen='a')
########################################################
def test_%(funclabel)s_array_array_array_f1(self):
"""Test %(funclabel)s as *array-array-array* for matherrors='a' - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.inparray1a, self.inparray2a, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.inparray1b, self.inparray2b, self.dataout, matherrors='a')
########################################################
def test_%(funclabel)s_array_array_array_f2(self):
"""Test %(funclabel)s as *array-array-array* for maxlen='a' - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.inparray1a, self.inparray2a, self.dataout, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.inparray1b, self.inparray2b, self.dataout, maxlen='a')
##############################################################################
'''
# ==============================================================================
# The template used to generate the tests for nan, inf, -inf in data arrays
# when exceptions are expected.
nan_data_error_template = '''
##############################################################################
class %(funclabel)s_%(errorlabel)s_errors_%(typelabel)s(unittest.TestCase):
"""Test %(funclabel)s for basic general function operation using parameter %(errordata)s.
nan_data_error_template
"""
##############################################################################
def FloatassertEqual(self, dataoutitem, expecteditem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%%0.3f != %%0.3f' %% (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
# This is active for float numbers only.
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.dataok1 = array.array('%(typecode)s', [%(test_op_x)s])
self.dataok2 = array.array('%(typecode)s', [x for (x,y) in zip(itertools.cycle([%(test_op_y)s]), self.dataok1)])
arraysize = len(self.dataok1)
self.dataout = array.array('%(typecode)s', itertools.repeat(0.0, arraysize))
self.errordata = array.array('%(typecode)s', [float('%(errordata)s')] * arraysize)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_num_none_a1(self):
"""Test %(funclabel)s as *array-num-none* for %(errordata)s - Array code %(typelabel)s.
"""
for testval in [%(test_op_y)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
dataok1 = copy.copy(self.dataok1)
errordata = copy.copy(self.errordata)
# This version is expected to pass.
arrayfunc.%(funcname)s(dataok1, testval)
# This is the actual test.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(errordata, testval)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_num_none_a2(self):
"""Test %(funclabel)s as *array-num-none* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
for testval in [%(test_op_y)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
errordata = copy.copy(self.errordata)
expected = [x %(pyoperator)s testval for x in self.errordata]
arrayfunc.%(funcname)s(errordata, testval, matherrors=True)
for dataoutitem, expecteditem in zip(errordata, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_num_array_b1(self):
"""Test %(funclabel)s as *array-num-array* for %(errordata)s - Array code %(typelabel)s.
"""
for testval in [%(test_op_y)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
dataok1 = copy.copy(self.dataok1)
errordata = copy.copy(self.errordata)
# This version is expected to pass.
arrayfunc.%(funcname)s(dataok1, testval, self.dataout)
# This is the actual test.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(errordata, testval, self.dataout)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_num_array_b2(self):
"""Test %(funclabel)s as *array-num-array* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
for testval in [%(test_op_y)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
expected = [x %(pyoperator)s testval for x in self.errordata]
arrayfunc.%(funcname)s(self.errordata, testval, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(self.dataout, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_num_array_none_c1(self):
"""Test %(funclabel)s as *num-array-none* for %(errordata)s - Array code %(typelabel)s.
"""
for testval in [%(test_op_x)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
dataok2 = copy.copy(self.dataok2)
errordata = copy.copy(self.errordata)
# This version is expected to pass.
arrayfunc.%(funcname)s(testval, dataok2)
# This is the actual test.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(testval, errordata)
########################################################
def test_%(funclabel)s_%(errorlabel)s_num_array_none_c2(self):
"""Test %(funclabel)s as *num-array-none* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
for testval in [%(test_op_x)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
errordata = copy.copy(self.errordata)
expected = [testval %(pyoperator)s x for x in self.errordata]
arrayfunc.%(funcname)s(testval, errordata, matherrors=True)
for dataoutitem, expecteditem in zip(errordata, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_num_array_array_d1(self):
"""Test %(funclabel)s as *num-array-array* for %(errordata)s - Array code %(typelabel)s.
"""
for testval in [%(test_op_x)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# This version is expected to pass.
arrayfunc.%(funcname)s(testval, self.dataok2, self.dataout)
# This is the actual test.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(testval, self.errordata, self.dataout)
########################################################
def test_%(funclabel)s_%(errorlabel)s_num_array_array_d2(self):
"""Test %(funclabel)s as *num-array-array* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
for testval in [%(test_op_x)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
expected = [testval %(pyoperator)s x for x in self.errordata]
arrayfunc.%(funcname)s(testval, self.errordata, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(self.dataout, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_array_none_e1(self):
"""Test %(funclabel)s as *array-array-none* for %(errordata)s - Array code %(typelabel)s.
"""
# Copy the array so we don't change the original data.
dataok1 = copy.copy(self.dataok1)
dataok2 = copy.copy(self.dataok2)
# This version is expected to pass.
arrayfunc.%(funcname)s(dataok1, dataok2)
# Copy the array so we don't change the original data.
dataok1 = copy.copy(self.dataok1)
# This is the actual test.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(dataok1, self.errordata)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_array_none_e2(self):
"""Test %(funclabel)s as *array-array-none* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
expected = [y %(pyoperator)s x for x,y in zip(self.errordata, self.dataok1)]
arrayfunc.%(funcname)s(self.dataok1, self.errordata, matherrors=True)
for dataoutitem, expecteditem in zip(self.dataok1, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_array_array_f1(self):
"""Test %(funclabel)s as *array-array-array* for %(errordata)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.dataok1, self.dataok2, self.dataout)
# This is the actual test.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(self.dataok1, self.errordata, self.dataout)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_array_array_f2(self):
"""Test %(funclabel)s as *array-array-array* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
expected = [y %(pyoperator)s x for x,y in zip(self.errordata, self.dataok1)]
arrayfunc.%(funcname)s(self.dataok1, self.errordata, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(self.dataout, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for nan, inf, -inf in data arrays
# when exceptions are expected. This is a special version for division.
nan_div_data_error_template = '''
##############################################################################
class %(funclabel)s_div_%(errorlabel)s_errors_%(typelabel)s(unittest.TestCase):
"""Test %(funclabel)s for basic general function operation using parameter %(errordata)s.
This version is for division operations where division by inf and -inf
results in zero.
nan_div_data_error_template
"""
##############################################################################
def FloatassertEqual(self, dataoutitem, expecteditem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%%0.3f != %%0.3f' %% (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
# This is active for float numbers only.
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.dataok1 = array.array('%(typecode)s', [%(test_op_x)s])
self.dataok2 = array.array('%(typecode)s', [x for (x,y) in zip(itertools.cycle([%(test_op_y)s]), self.dataok1)])
arraysize = len(self.dataok1)
self.dataout = array.array('%(typecode)s', itertools.repeat(0.0, arraysize))
self.errordata = array.array('%(typecode)s', [float('%(errordata)s')] * arraysize)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_num_none_a1(self):
"""Test %(funclabel)s as *array-num-none* for %(errordata)s - Array code %(typelabel)s.
"""
for testval in [%(test_op_y)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
dataok1 = copy.copy(self.dataok1)
errordata = copy.copy(self.errordata)
# This version is expected to pass.
arrayfunc.%(funcname)s(dataok1, testval)
# This is the actual test.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(errordata, testval)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_num_none_a2(self):
"""Test %(funclabel)s as *array-num-none* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
for testval in [%(test_op_y)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
errordata = copy.copy(self.errordata)
expected = [x %(pyoperator)s testval for x in self.errordata]
arrayfunc.%(funcname)s(errordata, testval, matherrors=True)
for dataoutitem, expecteditem in zip(errordata, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_num_array_b1(self):
"""Test %(funclabel)s as *array-num-array* for %(errordata)s - Array code %(typelabel)s.
"""
for testval in [%(test_op_y)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
dataok1 = copy.copy(self.dataok1)
errordata = copy.copy(self.errordata)
# This version is expected to pass.
arrayfunc.%(funcname)s(dataok1, testval, self.dataout)
# This is the actual test.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(errordata, testval, self.dataout)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_num_array_b2(self):
"""Test %(funclabel)s as *array-num-array* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
for testval in [%(test_op_y)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
expected = [x %(pyoperator)s testval for x in self.errordata]
arrayfunc.%(funcname)s(self.errordata, testval, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(self.dataout, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_num_array_none_c2(self):
"""Test %(funclabel)s as *num-array-none* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
for testval in [%(test_op_x)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
errordata = copy.copy(self.errordata)
expected = [testval %(pyoperator)s x for x in self.errordata]
arrayfunc.%(funcname)s(testval, errordata, matherrors=True)
for dataoutitem, expecteditem in zip(errordata, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_num_array_array_d2(self):
"""Test %(funclabel)s as *num-array-array* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
for testval in [%(test_op_x)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
expected = [testval %(pyoperator)s x for x in self.errordata]
arrayfunc.%(funcname)s(testval, self.errordata, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(self.dataout, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_array_none_e2(self):
"""Test %(funclabel)s as *array-array-none* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
expected = [x %(pyoperator)s y for x,y in zip(self.dataok1, self.errordata)]
arrayfunc.%(funcname)s(self.dataok1, self.errordata, matherrors=True)
for dataoutitem, expecteditem in zip(self.dataok1, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_array_array_f2(self):
"""Test %(funclabel)s as *array-array-array* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
expected = [x %(pyoperator)s y for x,y in zip(self.dataok1, self.errordata)]
arrayfunc.%(funcname)s(self.dataok1, self.errordata, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(self.dataout, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for nan, inf, -inf in data arrays
# when exceptions are expected. This is a special version for division.
inf_floordiv_data_error_template = '''
##############################################################################
class %(funclabel)s_div_%(errorlabel)s_errors_%(typelabel)s(unittest.TestCase):
"""Test %(funclabel)s for basic general function operation using parameter %(errordata)s.
This version is for division operations where division by inf and -inf
results in zero.
inf_floordiv_data_error_template
"""
##############################################################################
def FloatassertEqual(self, dataoutitem, expecteditem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%%0.3f != %%0.3f' %% (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
# This is active for float numbers only.
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.dataok1 = array.array('%(typecode)s', [%(test_op_x)s])
self.dataok2 = array.array('%(typecode)s', [x for (x,y) in zip(itertools.cycle([%(test_op_y)s]), self.dataok1)])
arraysize = len(self.dataok1)
self.dataout = array.array('%(typecode)s', itertools.repeat(0.0, arraysize))
self.errordata = array.array('%(typecode)s', [float('%(errordata)s')] * arraysize)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_num_none_a1(self):
"""Test %(funclabel)s as *array-num-none* for %(errordata)s - Array code %(typelabel)s.
"""
for testval in [%(test_op_y)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
dataok1 = copy.copy(self.dataok1)
errordata = copy.copy(self.errordata)
# This version is expected to pass.
arrayfunc.%(funcname)s(dataok1, testval)
# This is the actual test.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(errordata, testval)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_num_none_a2(self):
"""Test %(funclabel)s as *array-num-none* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
for testval in [%(test_op_y)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
errordata = copy.copy(self.errordata)
# Inf divided by anything other than 0, inf, or nan, is inf.
# We need to calculate this as follows in order to transfer the
# sign over correctly.
expected = [x / testval for x in self.errordata]
arrayfunc.%(funcname)s(errordata, testval, matherrors=True)
for dataoutitem, expecteditem in zip(errordata, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_num_array_b1(self):
"""Test %(funclabel)s as *array-num-array* for %(errordata)s - Array code %(typelabel)s.
"""
for testval in [%(test_op_y)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
dataok1 = copy.copy(self.dataok1)
errordata = copy.copy(self.errordata)
# This version is expected to pass.
arrayfunc.%(funcname)s(dataok1, testval, self.dataout)
# This is the actual test.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(errordata, testval, self.dataout)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_num_array_b2(self):
"""Test %(funclabel)s as *array-num-array* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
for testval in [%(test_op_y)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# Inf divided by anything other than 0, inf, or nan, is inf.
# We need to calculate this as follows in order to transfer the
# sign over correctly.
expected = [x / testval for x in self.errordata]
arrayfunc.%(funcname)s(self.errordata, testval, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(self.dataout, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_num_array_none_c2(self):
"""Test %(funclabel)s as *num-array-none* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
for testval in [%(test_op_x)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
errordata = copy.copy(self.errordata)
# The underlying C library
expected = [math.floor(testval / x) for x in self.errordata]
arrayfunc.%(funcname)s(testval, errordata, matherrors=True)
for dataoutitem, expecteditem in zip(errordata, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_num_array_array_d2(self):
"""Test %(funclabel)s as *num-array-array* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
for testval in [%(test_op_x)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
expected = [math.floor(testval / x) for x in self.errordata]
arrayfunc.%(funcname)s(testval, self.errordata, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(self.dataout, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_array_none_e2(self):
"""Test %(funclabel)s as *array-array-none* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
expected = [math.floor(x / y) for x,y in zip(self.dataok1, self.errordata)]
arrayfunc.%(funcname)s(self.dataok1, self.errordata, matherrors=True)
for dataoutitem, expecteditem in zip(self.dataok1, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_array_array_f2(self):
"""Test %(funclabel)s as *array-array-array* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
expected = [math.floor(x / y) for x,y in zip(self.dataok1, self.errordata)]
arrayfunc.%(funcname)s(self.dataok1, self.errordata, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(self.dataout, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for overflows using maximum value.
param_overflow_max_template = '''
##############################################################################
class %(funclabel)s_overflow_max_errors_%(typelabel)s(unittest.TestCase):
"""Test %(funclabel)s for value overflow for max value.
param_overflow_max_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.%(typecode)s_max
self.incvalue = %(incvalue)s
self.zero_const = %(zero_const)s
self.inparray1amax = array.array('%(typecode)s', [self.MaxLimit] * arraysize)
self.inparray1bmax = copy.copy(self.inparray1amax)
self.inparray2amax = array.array('%(typecode)s', [self.MaxLimit] * arraysize)
self.inparray2bmax = copy.copy(self.inparray2amax)
self.zeroarray = array.array('%(typecode)s', [self.zero_const] * arraysize)
self.incvaluearray = array.array('%(typecode)s', [self.incvalue] * arraysize)
self.dataout = array.array('%(typecode)s', itertools.repeat(self.zero_const, arraysize))
########################################################
def test_%(funclabel)s_array_num_none_a1(self):
"""Test %(funclabel)s as *array-num-none* for max value + 1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.inparray1amax, self.zero_const)
# This is the actual test.
with self.assertRaises(%(errorflagexceptioncode)s):
arrayfunc.%(funcname)s(self.inparray1bmax, self.incvalue)
########################################################
def test_%(funclabel)s_array_num_array_b1(self):
"""Test %(funclabel)s as *array-num-array* for max value + 1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.inparray1amax, self.zero_const, self.dataout)
# This is the actual test.
with self.assertRaises(%(errorflagexceptioncode)s):
arrayfunc.%(funcname)s(self.inparray1bmax, self.incvalue, self.dataout)
########################################################
def test_%(funclabel)s_num_array_none_c1(self):
"""Test %(funclabel)s as *num-array-none* for max value + 1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.zero_const, self.inparray2amax)
# This is the actual test.
with self.assertRaises(%(errorflagexceptioncode)s):
arrayfunc.%(funcname)s(self.incvalue, self.inparray2bmax)
########################################################
def test_%(funclabel)s_num_array_array_d1(self):
"""Test %(funclabel)s as *num-array-array* for max value + 1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.zero_const, self.inparray2amax, self.dataout)
# This is the actual test.
with self.assertRaises(%(errorflagexceptioncode)s):
arrayfunc.%(funcname)s(self.incvalue, self.inparray2bmax, self.dataout)
########################################################
def test_%(funclabel)s_array_array_none_e1(self):
"""Test %(funclabel)s as *array-array-none* for max value + 1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.inparray1amax, self.zeroarray)
# This is the actual test.
with self.assertRaises(%(errorflagexceptioncode)s):
arrayfunc.%(funcname)s(self.inparray1bmax, self.incvaluearray)
########################################################
def test_%(funclabel)s_array_array_none_e2(self):
"""Test %(funclabel)s as *array-array-none* for max value + 1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.zeroarray, self.inparray1amax)
# This is the actual test.
with self.assertRaises(%(errorflagexceptioncode)s):
arrayfunc.%(funcname)s(self.incvaluearray, self.inparray1bmax)
########################################################
def test_%(funclabel)s_array_array_array_f1(self):
"""Test %(funclabel)s as *array-array-array* for max value + 1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.inparray1amax, self.zeroarray, self.dataout)
# This is the actual test.
with self.assertRaises(%(errorflagexceptioncode)s):
arrayfunc.%(funcname)s(self.inparray1bmax, self.incvaluearray, self.dataout)
########################################################
def test_%(funclabel)s_array_array_array_f2(self):
"""Test %(funclabel)s as *array-array-array* for max value + 1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.zeroarray, self.inparray1amax, self.dataout)
# This is the actual test.
with self.assertRaises(%(errorflagexceptioncode)s):
arrayfunc.%(funcname)s(self.incvaluearray, self.inparray1bmax, self.dataout)
##############################################################################
'''
# ==============================================================================
# The template used to generate the tests for overflows using minimum value.
param_overflow_min_template = '''
##############################################################################
class %(funclabel)s_overflow_min_errors_%(typelabel)s(unittest.TestCase):
"""Test %(funclabel)s for value overflow for min value.
param_overflow_min_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MinLimit = arrayfunc.arraylimits.%(typecode)s_min
self.incvalue = %(decvalue)s
self.zero_const = %(zero_const)s
self.inparray1amin = array.array('%(typecode)s', [self.MinLimit] * arraysize)
self.inparray1bmin = copy.copy(self.inparray1amin)
self.inparray2amin = array.array('%(typecode)s', [self.MinLimit] * arraysize)
self.inparray2bmin = copy.copy(self.inparray2amin)
self.zeroarray = array.array('%(typecode)s', [self.zero_const] * arraysize)
self.incvaluearray = array.array('%(typecode)s', [self.incvalue] * arraysize)
self.dataout = array.array('%(typecode)s', itertools.repeat(self.zero_const, arraysize))
########################################################
def test_%(funclabel)s_array_num_none_a1(self):
"""Test %(funclabel)s as *array-num-none* for min value + 1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.inparray1amin, self.zero_const)
# This is the actual test.
with self.assertRaises(%(errorflagexceptioncode)s):
arrayfunc.%(funcname)s(self.inparray1bmin, self.incvalue)
########################################################
def test_%(funclabel)s_array_num_array_b1(self):
"""Test %(funclabel)s as *array-num-array* for min value + 1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.inparray1amin, self.zero_const, self.dataout)
# This is the actual test.
with self.assertRaises(%(errorflagexceptioncode)s):
arrayfunc.%(funcname)s(self.inparray1bmin, self.incvalue, self.dataout)
########################################################
def test_%(funclabel)s_num_array_none_c1(self):
"""Test %(funclabel)s as *num-array-none* for min value + 1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.zero_const, self.inparray2amin)
# This is the actual test.
with self.assertRaises(%(errorflagexceptioncode)s):
arrayfunc.%(funcname)s(self.incvalue, self.inparray2bmin)
########################################################
def test_%(funclabel)s_num_array_array_d1(self):
"""Test %(funclabel)s as *num-array-array* for min value + 1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.zero_const, self.inparray2amin, self.dataout)
# This is the actual test.
with self.assertRaises(%(errorflagexceptioncode)s):
arrayfunc.%(funcname)s(self.incvalue, self.inparray2bmin, self.dataout)
########################################################
def test_%(funclabel)s_array_array_none_e1(self):
"""Test %(funclabel)s as *array-array-none* for min value + 1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.inparray1amin, self.zeroarray)
# This is the actual test.
with self.assertRaises(%(errorflagexceptioncode)s):
arrayfunc.%(funcname)s(self.inparray1bmin, self.incvaluearray)
########################################################
def test_%(funclabel)s_array_array_none_e2(self):
"""Test %(funclabel)s as *array-array-none* for min value + 1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.zeroarray, self.inparray1amin)
# This is the actual test.
with self.assertRaises(%(errorflagexceptioncode)s):
arrayfunc.%(funcname)s(self.incvaluearray, self.inparray1bmin)
########################################################
def test_%(funclabel)s_array_array_array_f1(self):
"""Test %(funclabel)s as *array-array-array* for min value + 1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.inparray1amin, self.zeroarray, self.dataout)
# This is the actual test.
with self.assertRaises(%(errorflagexceptioncode)s):
arrayfunc.%(funcname)s(self.inparray1bmin, self.incvaluearray, self.dataout)
########################################################
def test_%(funclabel)s_array_array_array_f2(self):
"""Test %(funclabel)s as *array-array-array* for min value + 1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.zeroarray, self.inparray1amin, self.dataout)
# This is the actual test.
with self.assertRaises(%(errorflagexceptioncode)s):
arrayfunc.%(funcname)s(self.incvaluearray, self.inparray1bmin, self.dataout)
##############################################################################
'''
# ==============================================================================
# The template used to generate the tests for overflows using maximum value.
param_overflow_add_max1_template = '''
##############################################################################
class overflow_signed_max1_%(typelabel)s(unittest.TestCase):
"""Test add for value overflow for max values.
param_overflow_add_max1_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.%(typecode)s_max
self.MinLimit = arrayfunc.arraylimits.%(typecode)s_min
self.inparray1amax = array.array('%(typecode)s', [self.MaxLimit] * arraysize)
self.inparray1bmax = copy.copy(self.inparray1amax)
self.zero1array = array.array('%(typecode)s', [0%(decimalpoint)s] * arraysize)
self.plus1array = array.array('%(typecode)s', [1%(floatpad)s] * arraysize)
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, arraysize))
########################################################
def test_add_array_num_none_a1(self):
"""Test add as *array-num-none* for max value + 1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.add(self.inparray1amax, 0%(decimalpoint)s)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.add(self.inparray1bmax, 1%(floatpad)s)
########################################################
def test_add_array_num_array_a2(self):
"""Test add as *array-num-array* for max value + 1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.add(self.inparray1amax, 0%(decimalpoint)s, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.add(self.inparray1bmax, 1%(floatpad)s, self.dataout)
########################################################
def test_add_num_array_none_a3(self):
"""Test add as *num-array-none* for max value + 1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.add(self.MaxLimit, self.zero1array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.add(self.MaxLimit, self.plus1array)
########################################################
def test_add_num_array_array_a4(self):
"""Test add as *num-array-array* for max value + 1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.add(self.MaxLimit, self.zero1array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.add(self.MaxLimit, self.plus1array, self.dataout)
########################################################
def test_add_array_array_none_a5(self):
"""Test add as *array-array-none* for max value + 1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.add(self.inparray1amax, self.zero1array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.add(self.inparray1bmax, self.plus1array)
########################################################
def test_add_array_array_array_a6(self):
"""Test add as *array-array-array* for max value + 1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.add(self.inparray1amax, self.zero1array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.add(self.inparray1bmax, self.plus1array, self.dataout)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for overflows using minimum value.
param_overflow_add_min1_template = '''
##############################################################################
class overflow_signed_min1_%(typelabel)s(unittest.TestCase):
"""Test add for value overflow for min values.
param_overflow_add_min1_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.%(typecode)s_max
self.MinLimit = arrayfunc.arraylimits.%(typecode)s_min
self.inparray1amin = array.array('%(typecode)s', [self.MinLimit] * arraysize)
self.inparray1bmin = copy.copy(self.inparray1amin)
self.zero1array = array.array('%(typecode)s', [0%(decimalpoint)s] * arraysize)
self.minus1array = array.array('%(typecode)s', [-1%(floatpad)s] * arraysize)
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, arraysize))
########################################################
def test_add_array_num_none_b1(self):
"""Test add as *array-num-none* for min value + -1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.add(self.inparray1amin, 0%(decimalpoint)s)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.add(self.inparray1bmin, -1%(floatpad)s)
########################################################
def test_add_array_num_array_b2(self):
"""Test add as *array-num-array* for min value + -1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.add(self.inparray1amin, 0%(decimalpoint)s, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.add(self.inparray1bmin, -1%(floatpad)s, self.dataout)
########################################################
def test_add_num_array_none_b3(self):
"""Test add as *num-array-none* for min value + -1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.add(self.MinLimit, self.zero1array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.add(self.MinLimit, self.minus1array)
########################################################
def test_add_num_array_array_b4(self):
"""Test add as *num-array-array* for min value + -1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.add(self.MinLimit, self.zero1array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.add(self.MinLimit, self.minus1array, self.dataout)
########################################################
def test_add_array_num_none_b5(self):
"""Test add as *array-array-none* for min value + -1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.add(self.inparray1amin, self.zero1array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.add(self.inparray1bmin, self.minus1array)
########################################################
def test_add_array_num_none_b6(self):
"""Test add as *array-array-array* for min value + -1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.add(self.inparray1amin, self.zero1array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.add(self.inparray1bmin, self.minus1array, self.dataout)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for overflows using maximum value.
param_overflow_sub_max1_template = '''
##############################################################################
class overflow_signed_max1_%(typelabel)s(unittest.TestCase):
"""Test sub for value overflow for max values.
param_overflow_sub_max1_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.%(typecode)s_max
self.MinLimit = arrayfunc.arraylimits.%(typecode)s_min
self.inparray1amax = array.array('%(typecode)s', [self.MaxLimit] * arraysize)
self.inparray1bmax = copy.copy(self.inparray1amax)
self.zero1array = array.array('%(typecode)s', [0%(decimalpoint)s] * arraysize)
self.minus1array = array.array('%(typecode)s', [-1%(floatpad)s] * arraysize)
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, arraysize))
########################################################
def test_sub_array_num_none_a1(self):
"""Test sub as *array-num-none* for max value - -1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(self.inparray1amax, 0%(decimalpoint)s)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(self.inparray1bmax, -1%(floatpad)s)
########################################################
def test_sub_array_num_array_a2(self):
"""Test sub as *array-num-array* for max value - -1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(self.inparray1amax, 0%(decimalpoint)s, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(self.inparray1bmax, -1%(floatpad)s, self.dataout)
########################################################
def test_sub_num_array_none_a3(self):
"""Test sub as *num-array-none* for max value - -1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(self.MaxLimit, self.zero1array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(self.MaxLimit, self.minus1array)
########################################################
def test_sub_num_array_array_a4(self):
"""Test sub as *num-array-array* for max value - -1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(self.MaxLimit, self.zero1array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(self.MaxLimit, self.minus1array, self.dataout)
########################################################
def test_sub_array_array_none_a5(self):
"""Test sub as *array-array-none* for max value - -1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(self.inparray1amax, self.zero1array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(self.inparray1bmax, self.minus1array)
########################################################
def test_sub_array_array_array_a6(self):
"""Test sub as *array-array-array* for max value - -1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(self.inparray1amax, self.zero1array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(self.inparray1bmax, self.minus1array, self.dataout)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for overflows using minimum value.
param_overflow_sub_min1_template = '''
##############################################################################
class overflow_signed_min1_%(typelabel)s(unittest.TestCase):
"""Test sub for value overflow for min values.
param_overflow_sub_min1_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.%(typecode)s_max
self.MinLimit = arrayfunc.arraylimits.%(typecode)s_min
self.inparray1amin = array.array('%(typecode)s', [self.MinLimit] * arraysize)
self.inparray1bmin = copy.copy(self.inparray1amin)
self.zero1array = array.array('%(typecode)s', [0%(decimalpoint)s] * arraysize)
self.plus1array = array.array('%(typecode)s', [1%(floatpad)s] * arraysize)
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, arraysize))
########################################################
def test_sub_array_num_none_b1(self):
"""Test sub as *array-num-none* for min value - 1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(self.inparray1amin, 0%(decimalpoint)s)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(self.inparray1bmin, 1%(floatpad)s)
########################################################
def test_sub_array_num_array_b2(self):
"""Test sub as *array-num-array* for min value - 1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(self.inparray1amin, 0%(decimalpoint)s, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(self.inparray1bmin, 1%(floatpad)s, self.dataout)
########################################################
def test_sub_num_array_none_b3(self):
"""Test sub as *num-array-none* for min value - 1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(self.MinLimit, self.zero1array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(self.MinLimit, self.plus1array)
########################################################
def test_sub_num_array_array_b4(self):
"""Test sub as *num-array-array* for min value - 1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(self.MinLimit, self.zero1array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(self.MinLimit, self.plus1array, self.dataout)
########################################################
def test_sub_array_num_none_b5(self):
"""Test sub as *array-array-none* for min value - 1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(self.inparray1amin, self.zero1array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(self.inparray1bmin, self.plus1array)
########################################################
def test_sub_array_num_none_b6(self):
"""Test sub as *array-array-array* for min value - 1%(floatpad)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(self.inparray1amin, self.zero1array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(self.inparray1bmin, self.plus1array, self.dataout)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for overflows using maximum value.
param_overflow_sub_1max_template = '''
##############################################################################
class overflow_signed_1max_%(typelabel)s(unittest.TestCase):
"""Test sub for value overflow for max values.
param_overflow_sub_1max_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.%(typecode)s_max
self.MinLimit = arrayfunc.arraylimits.%(typecode)s_min
self.inparray2amax = array.array('%(typecode)s', [self.MaxLimit] * arraysize)
self.inparray2bmax = copy.copy(self.inparray2amax)
self.zero1array = array.array('%(typecode)s', [0%(decimalpoint)s] * arraysize)
self.minus1array = array.array('%(typecode)s', [-1%(floatpad)s] * arraysize)
self.minus2array = array.array('%(typecode)s', [-2%(floatpad)s] * arraysize)
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, arraysize))
########################################################
def test_sub_array_num_none_c1(self):
"""Test sub as *array-num-none* for -2%(floatpad)s - max value - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(self.minus1array, 0%(decimalpoint)s)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(self.minus2array, self.MaxLimit)
########################################################
def test_sub_array_num_array_c2(self):
"""Test sub as *array-num-array* for -2%(floatpad)s - max value - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(self.minus1array, 0%(decimalpoint)s, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(self.minus2array, self.MaxLimit, self.dataout)
########################################################
def test_sub_num_array_none_c3(self):
"""Test sub as *num-array-none* for -2%(floatpad)s - max value - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(-2%(floatpad)s, self.zero1array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(-2%(floatpad)s, self.inparray2bmax)
########################################################
def test_sub_num_array_array_c4(self):
"""Test sub as *num-array-array* for -2%(floatpad)s - max value - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(-2%(floatpad)s, self.zero1array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(-2%(floatpad)s, self.inparray2bmax, self.dataout)
########################################################
def test_sub_array_array_none_c5(self):
"""Test sub as *array-array-none* for -2%(floatpad)s - max value - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(self.minus1array, self.zero1array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(self.minus2array, self.inparray2bmax)
########################################################
def test_sub_array_array_array_c6(self):
"""Test sub as *array-array-array* for -2%(floatpad)s - max value - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(self.minus1array, self.zero1array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(self.minus2array, self.inparray2bmax, self.dataout)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for overflows using minimum value.
param_overflow_sub_1min_template = '''
##############################################################################
class overflow_signed_1min_%(typelabel)s(unittest.TestCase):
"""Test sub for value overflow for min values.
param_overflow_sub_1min_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.%(typecode)s_max
self.MinLimit = arrayfunc.arraylimits.%(typecode)s_min
self.inparray1amin = array.array('%(typecode)s', [self.MinLimit] * arraysize)
self.inparray1bmin = copy.copy(self.inparray1amin)
self.zero1array = array.array('%(typecode)s', [0%(decimalpoint)s] * arraysize)
self.zero2array = copy.copy(self.zero1array)
self.zero3array = copy.copy(self.zero1array)
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, arraysize))
########################################################
def test_sub_array_num_none_d1(self):
"""Test sub as *array-num-none* for 1%(floatpad)s - min value - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(self.zero1array, 0%(decimalpoint)s)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(self.zero2array, self.MinLimit)
########################################################
def test_sub_array_num_array_d2(self):
"""Test sub as *array-num-array* for 1%(floatpad)s - min value - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(self.zero1array, 0%(decimalpoint)s, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(self.zero2array, self.MinLimit, self.dataout)
########################################################
def test_sub_num_array_none_d3(self):
"""Test sub as *num-array-none* for 1%(floatpad)s - min value - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(0%(decimalpoint)s, self.zero1array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(0%(decimalpoint)s, self.inparray1bmin)
########################################################
def test_sub_num_array_array_d4(self):
"""Test sub as *num-array-array* for 1%(floatpad)s - min value - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(0%(decimalpoint)s, self.zero1array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(0%(decimalpoint)s, self.inparray1bmin, self.dataout)
########################################################
def test_sub_array_array_none_d5(self):
"""Test sub as *array-array-none* for 1%(floatpad)s - min value - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(self.zero1array, self.zero3array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(self.zero2array, self.inparray1bmin)
########################################################
def test_sub_array_array_array_d6(self):
"""Test sub as *array-array-array* for 1%(floatpad)s - min value - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.sub(self.zero1array, self.zero3array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.sub(self.zero2array, self.inparray1bmin, self.dataout)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for overflows using maximum value.
param_overflow_mul_max2_template = '''
##############################################################################
class overflow_signed_max2_%(typelabel)s(unittest.TestCase):
"""Test mul for value overflow for max values.
param_overflow_mul_max2_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.%(typecode)s_max
self.MinLimit = arrayfunc.arraylimits.%(typecode)s_min
self.inparray1amax = array.array('%(typecode)s', [self.MaxLimit] * arraysize)
self.inparray1bmax = copy.copy(self.inparray1amax)
self.zero1array = array.array('%(typecode)s', [0%(decimalpoint)s] * arraysize)
self.plus2array = array.array('%(typecode)s', [2%(decimalpoint)s] * arraysize)
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, arraysize))
########################################################
def test_mul_array_num_none_a1(self):
"""Test mul as *array-num-none* for max value * 2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.inparray1amax, 0%(decimalpoint)s)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.inparray1bmax, 2%(decimalpoint)s)
########################################################
def test_mul_array_num_array_a2(self):
"""Test mul as *array-num-array* for max value * 2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.inparray1amax, 0%(decimalpoint)s, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.inparray1bmax, 2%(decimalpoint)s, self.dataout)
########################################################
def test_mul_num_array_none_a3(self):
"""Test mul as *num-array-none* for max value * 2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.MaxLimit, self.zero1array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.MaxLimit, self.plus2array)
########################################################
def test_mul_num_array_array_a4(self):
"""Test mul as *num-array-array* for max value * 2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.MaxLimit, self.zero1array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.MaxLimit, self.plus2array, self.dataout)
########################################################
def test_mul_array_array_none_a5(self):
"""Test mul as *array-array-none* for max value * 2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.inparray1amax, self.zero1array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.inparray1bmax, self.plus2array)
########################################################
def test_mul_array_array_array_a6(self):
"""Test mul as *array-array-array* for max value * 2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.inparray1amax, self.zero1array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.inparray1bmax, self.plus2array, self.dataout)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for overflows using minimum value.
param_overflow_mul_min2_template = '''
##############################################################################
class overflow_signed_min2_%(typelabel)s(unittest.TestCase):
"""Test mul for value overflow for min values.
param_overflow_mul_min2_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.%(typecode)s_max
self.MinLimit = arrayfunc.arraylimits.%(typecode)s_min
self.inparray1amin = array.array('%(typecode)s', [self.MinLimit] * arraysize)
self.inparray1bmin = copy.copy(self.inparray1amin)
self.zero1array = array.array('%(typecode)s', [0%(decimalpoint)s] * arraysize)
self.plus2array = array.array('%(typecode)s', [2%(decimalpoint)s] * arraysize)
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, arraysize))
########################################################
def test_mul_array_num_none_b1(self):
"""Test mul as *array-num-none* for min value * 2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.inparray1amin, 0%(decimalpoint)s)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.inparray1bmin, 2%(decimalpoint)s)
########################################################
def test_mul_array_num_array_b2(self):
"""Test mul as *array-num-array* for min value * 2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.inparray1amin, 0%(decimalpoint)s, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.inparray1bmin, 2%(decimalpoint)s, self.dataout)
########################################################
def test_mul_num_array_none_b3(self):
"""Test mul as *num-array-none* for min value * 2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.MinLimit, self.zero1array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.MinLimit, self.plus2array)
########################################################
def test_mul_num_array_array_b4(self):
"""Test mul as *num-array-array* for min value * 2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.MinLimit, self.zero1array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.MinLimit, self.plus2array, self.dataout)
########################################################
def test_mul_array_num_none_b5(self):
"""Test mul as *array-array-none* for min value * 2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.inparray1amin, self.zero1array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.inparray1bmin, self.plus2array)
########################################################
def test_mul_array_num_none_b6(self):
"""Test mul as *array-array-array* for min value * 2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.inparray1amin, self.zero1array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.inparray1bmin, self.plus2array, self.dataout)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for overflows using maximum value.
param_overflow_mul_max2neg_template = '''
##############################################################################
class overflow_signed_max2neg_%(typelabel)s(unittest.TestCase):
"""Test mul for value overflow for max values.
param_overflow_mul_max2neg_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.%(typecode)s_max
self.MinLimit = arrayfunc.arraylimits.%(typecode)s_min
self.inparray2amax = array.array('%(typecode)s', [self.MaxLimit] * arraysize)
self.inparray2bmax = copy.copy(self.inparray2amax)
self.zero1array = array.array('%(typecode)s', [0%(decimalpoint)s] * arraysize)
self.minus1array = array.array('%(typecode)s', [-1%(decimalpoint)s] * arraysize)
self.minus2aarray = array.array('%(typecode)s', [-2%(decimalpoint)s] * arraysize)
self.minus2barray = array.array('%(typecode)s', [-2%(decimalpoint)s] * arraysize)
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, arraysize))
########################################################
def test_mul_array_num_none_c1(self):
"""Test mul as *array-num-none* for max value * -2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.inparray2amax, 0%(decimalpoint)s)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.inparray2bmax, -2%(decimalpoint)s)
########################################################
def test_mul_array_num_array_c2(self):
"""Test mul as *array-num-array* for max value * -2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.inparray2amax, 0%(decimalpoint)s, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.inparray2bmax, -2%(decimalpoint)s, self.dataout)
########################################################
def test_mul_num_array_none_c3(self):
"""Test mul as *num-array-none* for max value * -2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(-2%(decimalpoint)s, self.zero1array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(-2%(decimalpoint)s, self.inparray2bmax)
########################################################
def test_mul_num_array_array_c4(self):
"""Test mul as *num-array-array* for max value * -2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(-2%(decimalpoint)s, self.zero1array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(-2%(decimalpoint)s, self.inparray2bmax, self.dataout)
########################################################
def test_mul_array_array_none_c5(self):
"""Test mul as *array-array-none* for max value * -2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.minus1array, self.inparray2amax)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.minus2barray, self.inparray2bmax)
########################################################
def test_mul_array_array_array_c6(self):
"""Test mul as *array-array-array* for max value * -2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.minus1array, self.inparray2amax, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.minus2barray, self.inparray2bmax, self.dataout)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for overflows using minimum value.
param_overflow_mul_min2neg_template = '''
##############################################################################
class overflow_signed_min2neg_%(typelabel)s(unittest.TestCase):
"""Test mul for value overflow for min values.
param_overflow_mul_min2neg_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.%(typecode)s_max
self.MinLimit = arrayfunc.arraylimits.%(typecode)s_min
self.inparray1amin = array.array('%(typecode)s', [self.MinLimit] * arraysize)
self.inparray1bmin = copy.copy(self.inparray1amin)
self.zero1array = array.array('%(typecode)s', [0%(decimalpoint)s] * arraysize)
self.zero2array = copy.copy(self.zero1array)
self.zero3array = copy.copy(self.zero1array)
self.minus2aarray = array.array('%(typecode)s', [-2%(decimalpoint)s] * arraysize)
self.minus2barray = array.array('%(typecode)s', [-2%(decimalpoint)s] * arraysize)
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, arraysize))
########################################################
def test_mul_array_num_none_d1(self):
"""Test mul as *array-num-none* for min value * -2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.inparray1amin, 0%(decimalpoint)s)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.inparray1bmin, -2%(decimalpoint)s)
########################################################
def test_mul_array_num_array_d2(self):
"""Test mul as *array-num-array* for min value * -2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.inparray1amin, 0%(decimalpoint)s, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.inparray1bmin, -2%(decimalpoint)s, self.dataout)
########################################################
def test_mul_num_array_none_d3(self):
"""Test mul as *num-array-none* for min value * -2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(0%(decimalpoint)s, self.inparray1amin)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(-2%(decimalpoint)s, self.inparray1bmin)
########################################################
def test_mul_num_array_array_d4(self):
"""Test mul as *num-array-array* for min value * -2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(0%(decimalpoint)s, self.inparray1amin, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(-2%(decimalpoint)s, self.inparray1bmin, self.dataout)
########################################################
def test_mul_array_array_none_d5(self):
"""Test mul as *array-array-none* for min value * -2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.minus2aarray, self.zero3array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.minus2barray, self.inparray1bmin)
########################################################
def test_mul_array_array_array_d6(self):
"""Test mul as *array-array-array* for min value * -2%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.minus2aarray, self.zero3array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.minus2barray, self.inparray1bmin, self.dataout)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for overflows using minimum value.
param_overflow_mul_min1neg_template = '''
##############################################################################
class overflow_signed_min1neg_%(typelabel)s(unittest.TestCase):
"""Test mul for value overflow for min values.
param_overflow_mul_min1neg_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.%(typecode)s_max
self.MinLimit = arrayfunc.arraylimits.%(typecode)s_min
self.inparray1amin = array.array('%(typecode)s', [self.MinLimit] * arraysize)
self.inparray1bmin = copy.copy(self.inparray1amin)
self.zero1array = array.array('%(typecode)s', [0%(decimalpoint)s] * arraysize)
self.zero2array = copy.copy(self.zero1array)
self.zero3array = copy.copy(self.zero1array)
self.minus1aarray = array.array('%(typecode)s', [-1%(decimalpoint)s] * arraysize)
self.minus1barray = array.array('%(typecode)s', [-1%(decimalpoint)s] * arraysize)
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, arraysize))
########################################################
def test_mul_array_num_none_e1(self):
"""Test mul as *array-num-none* for min value * -1%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.inparray1amin, 0%(decimalpoint)s)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.inparray1bmin, -1%(decimalpoint)s)
########################################################
def test_mul_array_num_array_e2(self):
"""Test mul as *array-num-array* for min value * -1%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.inparray1amin, 0%(decimalpoint)s, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.inparray1bmin, -1%(decimalpoint)s, self.dataout)
########################################################
def test_mul_num_array_none_e3(self):
"""Test mul as *num-array-none* for min value * -1%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(0%(decimalpoint)s, self.inparray1amin)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(-1%(decimalpoint)s, self.inparray1bmin)
########################################################
def test_mul_num_array_array_e4(self):
"""Test mul as *num-array-array* for min value * -1%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(0%(decimalpoint)s, self.inparray1amin, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(-1%(decimalpoint)s, self.inparray1bmin, self.dataout)
########################################################
def test_mul_array_array_none_e5(self):
"""Test mul as *array-array-none* for min value * -1%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.minus1aarray, self.zero3array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.minus1barray, self.inparray1bmin)
########################################################
def test_mul_array_array_array_e6(self):
"""Test mul as *array-array-array* for min value * -1%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mul(self.minus1aarray, self.zero3array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.mul(self.minus1barray, self.inparray1bmin, self.dataout)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for overflows with divide by zero.
param_overflow_truediv_divzero_template = '''
##############################################################################
class overflow_signed_divzero_%(typelabel)s(unittest.TestCase):
"""Test truediv for value divide by zero.
param_overflow_truediv_divzero_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.%(typecode)s_max
self.MinLimit = arrayfunc.arraylimits.%(typecode)s_min
self.inparray1amax = array.array('%(typecode)s', [self.MaxLimit] * arraysize)
self.inparray1bmax = copy.copy(self.inparray1amax)
self.zero1array = array.array('%(typecode)s', [0%(decimalpoint)s] * arraysize)
self.plus1array = array.array('%(typecode)s', [1%(decimalpoint)s] * arraysize)
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, arraysize))
########################################################
def test_truediv_array_num_none_a1(self):
"""Test truediv as *array-num-none* for max value / 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.inparray1amax, 1%(decimalpoint)s)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.truediv(self.inparray1bmax, 0%(decimalpoint)s)
########################################################
def test_truediv_array_num_array_a2(self):
"""Test truediv as *array-num-array* for max value / 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.inparray1amax, 1%(decimalpoint)s, self.dataout)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.truediv(self.inparray1bmax, 0%(decimalpoint)s, self.dataout)
########################################################
def test_truediv_num_array_none_a3(self):
"""Test truediv as *num-array-none* for max value / 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.MaxLimit, self.plus1array)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.truediv(self.MaxLimit, self.zero1array)
########################################################
def test_truediv_num_array_array_a4(self):
"""Test truediv as *num-array-array* for max value / 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.MaxLimit, self.plus1array, self.dataout)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.truediv(self.MaxLimit, self.zero1array, self.dataout)
########################################################
def test_truediv_array_array_none_a5(self):
"""Test truediv as *array-array-none* for max value / 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.inparray1amax, self.plus1array)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.truediv(self.inparray1bmax, self.zero1array)
########################################################
def test_truediv_array_array_array_a6(self):
"""Test truediv as *array-array-array* for max value / 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.inparray1amax, self.plus1array, self.dataout)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.truediv(self.inparray1bmax, self.zero1array, self.dataout)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for overflows with divide by zero with
# overflow checking disabled.
param_overflow_truediv_divzero_errors_template = '''
##############################################################################
class overflow_signed_divzero_errors_%(typelabel)s(unittest.TestCase):
"""Test truediv for value divide by zero with overflow checking disabled.
param_overflow_truediv_divzero_errors_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.%(typecode)s_max
self.MinLimit = arrayfunc.arraylimits.%(typecode)s_min
self.inparray1amax = array.array('%(typecode)s', [self.MaxLimit] * arraysize)
self.inparray1bmax = copy.copy(self.inparray1amax)
self.zero1array = array.array('%(typecode)s', [0%(decimalpoint)s] * arraysize)
self.plus1array = array.array('%(typecode)s', [1%(decimalpoint)s] * arraysize)
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, arraysize))
########################################################
def test_truediv_array_num_none_b1(self):
"""Test truediv as *array-num-none* for max value / 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.inparray1amax, 1%(decimalpoint)s, matherrors=True)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.truediv(self.inparray1bmax, 0%(decimalpoint)s, matherrors=True)
########################################################
def test_truediv_array_num_array_b2(self):
"""Test truediv as *array-num-array* for max value / 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.inparray1amax, 1%(decimalpoint)s, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.truediv(self.inparray1bmax, 0%(decimalpoint)s, self.dataout, matherrors=True)
########################################################
def test_truediv_num_array_none_b3(self):
"""Test truediv as *num-array-none* for max value / 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.MaxLimit, self.plus1array, matherrors=True)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.truediv(self.MaxLimit, self.zero1array, matherrors=True)
########################################################
def test_truediv_num_array_array_b4(self):
"""Test truediv as *num-array-array* for max value / 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.MaxLimit, self.plus1array, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.truediv(self.MaxLimit, self.zero1array, self.dataout, matherrors=True)
########################################################
def test_truediv_array_array_none_b5(self):
"""Test truediv as *array-array-none* for max value / 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.inparray1amax, self.plus1array, matherrors=True)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.truediv(self.inparray1bmax, self.zero1array, matherrors=True)
########################################################
def test_truediv_array_array_array_b6(self):
"""Test truediv as *array-array-array* for max value / 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.inparray1amax, self.plus1array, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.truediv(self.inparray1bmax, self.zero1array, self.dataout, matherrors=True)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for overflows using minimum value.
param_overflow_truediv_mindivminus1_template = '''
##############################################################################
class overflow_signed_mindivminus1_%(typelabel)s(unittest.TestCase):
"""Test truediv for value overflow for min values divided by -1.
param_overflow_truediv_mindivminus1_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.%(typecode)s_max
self.MinLimit = arrayfunc.arraylimits.%(typecode)s_min
self.inparray1amin = array.array('%(typecode)s', [self.MinLimit] * arraysize)
self.inparray1bmin = copy.copy(self.inparray1amin)
self.zero1array = array.array('%(typecode)s', [0%(decimalpoint)s] * arraysize)
self.plus1array = array.array('%(typecode)s', [1%(decimalpoint)s] * arraysize)
self.minus1array = array.array('%(typecode)s', [-1%(decimalpoint)s] * arraysize)
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, arraysize))
########################################################
def test_truediv_array_num_none_a1(self):
"""Test truediv as *array-num-none* for min value / -1%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.inparray1amin, 1%(decimalpoint)s)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.truediv(self.inparray1bmin, -1%(decimalpoint)s)
########################################################
def test_truediv_array_num_none_a2(self):
"""Test truediv as *array-num-none* for min value / -1%(decimalpoint)s matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.inparray1amin, 1%(decimalpoint)s, matherrors=True)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.truediv(self.inparray1bmin, -1%(decimalpoint)s, matherrors=True)
########################################################
def test_truediv_array_num_array_b1(self):
"""Test truediv as *array-num-array* for min value / -1%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.inparray1amin, 1%(decimalpoint)s, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.truediv(self.inparray1bmin, -1%(decimalpoint)s, self.dataout)
########################################################
def test_truediv_array_num_array_b2(self):
"""Test truediv as *array-num-array* for min value / -1%(decimalpoint)s matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.inparray1amin, 1%(decimalpoint)s, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.truediv(self.inparray1bmin, -1%(decimalpoint)s, self.dataout, matherrors=True)
########################################################
def test_truediv_num_array_none_c1(self):
"""Test truediv as *num-array-none* for min value / -1%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.MinLimit, self.plus1array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.truediv(self.MinLimit, self.minus1array)
########################################################
def test_truediv_num_array_none_c2(self):
"""Test truediv as *num-array-none* for min value / -1%(decimalpoint)s matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.MinLimit, self.plus1array, matherrors=True)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.truediv(self.MinLimit, self.minus1array, matherrors=True)
########################################################
def test_truediv_num_array_array_d1(self):
"""Test truediv as *num-array-array* for min value / -1%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.MinLimit, self.plus1array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.truediv(self.MinLimit, self.minus1array, self.dataout)
########################################################
def test_truediv_num_array_array_d2(self):
"""Test truediv as *num-array-array* for min value / -1%(decimalpoint)s matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.MinLimit, self.plus1array, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.truediv(self.MinLimit, self.minus1array, self.dataout, matherrors=True)
########################################################
def test_truediv_array_num_none_e1(self):
"""Test truediv as *array-array-none* for min value / -1%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.inparray1amin, self.plus1array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.truediv(self.inparray1bmin, self.minus1array)
########################################################
def test_truediv_array_num_none_e2(self):
"""Test truediv as *array-array-none* for min value / -1%(decimalpoint)s matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.inparray1amin, self.plus1array, matherrors=True)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.truediv(self.inparray1bmin, self.minus1array, matherrors=True)
########################################################
def test_truediv_array_num_none_f1(self):
"""Test truediv as *array-array-array* for min value / -1%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.inparray1amin, self.plus1array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.truediv(self.inparray1bmin, self.minus1array, self.dataout)
########################################################
def test_truediv_array_num_none_f2(self):
"""Test truediv as *array-array-array* for min value / -1%(decimalpoint)s matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.truediv(self.inparray1amin, self.plus1array, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.truediv(self.inparray1bmin, self.minus1array, self.dataout, matherrors=True)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for overflows with divide by zero.
param_overflow_floordiv_divzero_template = '''
##############################################################################
class overflow_signed_divzero_%(typelabel)s(unittest.TestCase):
"""Test floordiv for value divide by zero.
param_overflow_floordiv_divzero_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.%(typecode)s_max
self.MinLimit = arrayfunc.arraylimits.%(typecode)s_min
self.inparray1amax = array.array('%(typecode)s', [self.MaxLimit] * arraysize)
self.inparray1bmax = copy.copy(self.inparray1amax)
self.zero1array = array.array('%(typecode)s', [0%(decimalpoint)s] * arraysize)
self.plus1array = array.array('%(typecode)s', [1%(decimalpoint)s] * arraysize)
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, arraysize))
########################################################
def test_floordiv_array_num_none_a1(self):
"""Test floordiv as *array-num-none* for max value // 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.inparray1amax, 1%(decimalpoint)s)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.floordiv(self.inparray1bmax, 0%(decimalpoint)s)
########################################################
def test_floordiv_array_num_array_a2(self):
"""Test floordiv as *array-num-array* for max value // 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.inparray1amax, 1%(decimalpoint)s, self.dataout)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.floordiv(self.inparray1bmax, 0%(decimalpoint)s, self.dataout)
########################################################
def test_floordiv_num_array_none_a3(self):
"""Test floordiv as *num-array-none* for max value // 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.MaxLimit, self.plus1array)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.floordiv(self.MaxLimit, self.zero1array)
########################################################
def test_floordiv_num_array_array_a4(self):
"""Test floordiv as *num-array-array* for max value // 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.MaxLimit, self.plus1array, self.dataout)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.floordiv(self.MaxLimit, self.zero1array, self.dataout)
########################################################
def test_floordiv_array_array_none_a5(self):
"""Test floordiv as *array-array-none* for max value // 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.inparray1amax, self.plus1array)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.floordiv(self.inparray1bmax, self.zero1array)
########################################################
def test_floordiv_array_array_array_a6(self):
"""Test floordiv as *array-array-array* for max value // 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.inparray1amax, self.plus1array, self.dataout)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.floordiv(self.inparray1bmax, self.zero1array, self.dataout)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for overflows with divide by zero with
# overflow checking disabled.
param_overflow_floordiv_divzero_errors_template = '''
##############################################################################
class overflow_signed_divzero_errors_%(typelabel)s(unittest.TestCase):
"""Test floordiv for value divide by zero with overflow checking disabled.
param_overflow_floordiv_divzero_errors_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.%(typecode)s_max
self.MinLimit = arrayfunc.arraylimits.%(typecode)s_min
self.inparray1amax = array.array('%(typecode)s', [self.MaxLimit] * arraysize)
self.inparray1bmax = copy.copy(self.inparray1amax)
self.zero1array = array.array('%(typecode)s', [0%(decimalpoint)s] * arraysize)
self.plus1array = array.array('%(typecode)s', [1%(decimalpoint)s] * arraysize)
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, arraysize))
########################################################
def test_floordiv_array_num_none_b1(self):
"""Test floordiv as *array-num-none* for max value // 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.inparray1amax, 1%(decimalpoint)s, matherrors=True)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.floordiv(self.inparray1bmax, 0%(decimalpoint)s, matherrors=True)
########################################################
def test_floordiv_array_num_array_b2(self):
"""Test floordiv as *array-num-array* for max value // 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.inparray1amax, 1%(decimalpoint)s, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.floordiv(self.inparray1bmax, 0%(decimalpoint)s, self.dataout, matherrors=True)
########################################################
def test_floordiv_num_array_none_b3(self):
"""Test floordiv as *num-array-none* for max value // 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.MaxLimit, self.plus1array, matherrors=True)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.floordiv(self.MaxLimit, self.zero1array, matherrors=True)
########################################################
def test_floordiv_num_array_array_b4(self):
"""Test floordiv as *num-array-array* for max value // 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.MaxLimit, self.plus1array, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.floordiv(self.MaxLimit, self.zero1array, self.dataout, matherrors=True)
########################################################
def test_floordiv_array_array_none_b5(self):
"""Test floordiv as *array-array-none* for max value // 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.inparray1amax, self.plus1array, matherrors=True)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.floordiv(self.inparray1bmax, self.zero1array, matherrors=True)
########################################################
def test_floordiv_array_array_array_b6(self):
"""Test floordiv as *array-array-array* for max value // 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.inparray1amax, self.plus1array, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.floordiv(self.inparray1bmax, self.zero1array, self.dataout, matherrors=True)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for overflows using minimum value.
param_overflow_floordiv_mindivminus1_template = '''
##############################################################################
class overflow_signed_mindivminus1_%(typelabel)s(unittest.TestCase):
"""Test floordiv for value overflow for min values divided by -1.
param_overflow_floordiv_mindivminus1_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.%(typecode)s_max
self.MinLimit = arrayfunc.arraylimits.%(typecode)s_min
self.inparray1amin = array.array('%(typecode)s', [self.MinLimit] * arraysize)
self.inparray1bmin = copy.copy(self.inparray1amin)
self.zero1array = array.array('%(typecode)s', [0%(decimalpoint)s] * arraysize)
self.plus1array = array.array('%(typecode)s', [1%(decimalpoint)s] * arraysize)
self.minus1array = array.array('%(typecode)s', [-1%(decimalpoint)s] * arraysize)
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, arraysize))
########################################################
def test_floordiv_array_num_none_a1(self):
"""Test floordiv as *array-num-none* for min value // -1%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.inparray1amin, 1%(decimalpoint)s)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.floordiv(self.inparray1bmin, -1%(decimalpoint)s)
########################################################
def test_floordiv_array_num_none_a2(self):
"""Test floordiv as *array-num-none* for min value // -1%(decimalpoint)s matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.inparray1amin, 1%(decimalpoint)s, matherrors=True)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.floordiv(self.inparray1bmin, -1%(decimalpoint)s, matherrors=True)
########################################################
def test_floordiv_array_num_array_b1(self):
"""Test floordiv as *array-num-array* for min value // -1%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.inparray1amin, 1%(decimalpoint)s, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.floordiv(self.inparray1bmin, -1%(decimalpoint)s, self.dataout)
########################################################
def test_floordiv_array_num_array_b2(self):
"""Test floordiv as *array-num-array* for min value // -1%(decimalpoint)s matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.inparray1amin, 1%(decimalpoint)s, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.floordiv(self.inparray1bmin, -1%(decimalpoint)s, self.dataout, matherrors=True)
########################################################
def test_floordiv_num_array_none_c1(self):
"""Test floordiv as *num-array-none* for min value // -1%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.MinLimit, self.plus1array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.floordiv(self.MinLimit, self.minus1array)
########################################################
def test_floordiv_num_array_none_c2(self):
"""Test floordiv as *num-array-none* for min value // -1%(decimalpoint)s matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.MinLimit, self.plus1array, matherrors=True)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.floordiv(self.MinLimit, self.minus1array, matherrors=True)
########################################################
def test_floordiv_num_array_array_d1(self):
"""Test floordiv as *num-array-array* for min value // -1%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.MinLimit, self.plus1array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.floordiv(self.MinLimit, self.minus1array, self.dataout)
########################################################
def test_floordiv_num_array_array_d2(self):
"""Test floordiv as *num-array-array* for min value // -1%(decimalpoint)s matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.MinLimit, self.plus1array, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.floordiv(self.MinLimit, self.minus1array, self.dataout, matherrors=True)
########################################################
def test_floordiv_array_num_none_e1(self):
"""Test floordiv as *array-array-none* for min value // -1%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.inparray1amin, self.plus1array)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.floordiv(self.inparray1bmin, self.minus1array)
########################################################
def test_floordiv_array_num_none_e2(self):
"""Test floordiv as *array-array-none* for min value // -1%(decimalpoint)s matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.inparray1amin, self.plus1array, matherrors=True)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.floordiv(self.inparray1bmin, self.minus1array, matherrors=True)
########################################################
def test_floordiv_array_num_none_f1(self):
"""Test floordiv as *array-array-array* for min value // -1%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.inparray1amin, self.plus1array, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.floordiv(self.inparray1bmin, self.minus1array, self.dataout)
########################################################
def test_floordiv_array_num_none_f2(self):
"""Test floordiv as *array-array-array* for min value // -1%(decimalpoint)s matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.floordiv(self.inparray1amin, self.plus1array, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.floordiv(self.inparray1bmin, self.minus1array, self.dataout, matherrors=True)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for overflows with divide by zero.
param_overflow_mod_divzero_template = '''
##############################################################################
class overflow_signed_divzero_%(typelabel)s(unittest.TestCase):
"""Test mod for value divide by zero.
param_overflow_mod_divzero_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.%(typecode)s_max
self.MinLimit = arrayfunc.arraylimits.%(typecode)s_min
self.inparray1amax = array.array('%(typecode)s', [self.MaxLimit] * arraysize)
self.inparray1bmax = copy.copy(self.inparray1amax)
self.zero1array = array.array('%(typecode)s', [0%(decimalpoint)s] * arraysize)
self.plus1array = array.array('%(typecode)s', [1%(decimalpoint)s] * arraysize)
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, arraysize))
########################################################
def test_mod_array_num_none_a1(self):
"""Test mod as *array-num-none* for max value %% 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mod(self.inparray1amax, 1%(decimalpoint)s)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.mod(self.inparray1bmax, 0%(decimalpoint)s)
########################################################
def test_mod_array_num_array_a2(self):
"""Test mod as *array-num-array* for max value %% 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mod(self.inparray1amax, 1%(decimalpoint)s, self.dataout)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.mod(self.inparray1bmax, 0%(decimalpoint)s, self.dataout)
########################################################
def test_mod_num_array_none_a3(self):
"""Test mod as *num-array-none* for max value %% 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mod(self.MaxLimit, self.plus1array)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.mod(self.MaxLimit, self.zero1array)
########################################################
def test_mod_num_array_array_a4(self):
"""Test mod as *num-array-array* for max value %% 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mod(self.MaxLimit, self.plus1array, self.dataout)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.mod(self.MaxLimit, self.zero1array, self.dataout)
########################################################
def test_mod_array_array_none_a5(self):
"""Test mod as *array-array-none* for max value %% 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mod(self.inparray1amax, self.plus1array)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.mod(self.inparray1bmax, self.zero1array)
########################################################
def test_mod_array_array_array_a6(self):
"""Test mod as *array-array-array* for max value %% 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mod(self.inparray1amax, self.plus1array, self.dataout)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.mod(self.inparray1bmax, self.zero1array, self.dataout)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for overflows with divide by zero with
# overflow checking disabled.
param_overflow_mod_divzero_errors_template = '''
##############################################################################
class overflow_signed_divzero_errors_%(typelabel)s(unittest.TestCase):
"""Test mod for value divide by zero with overflow checking disabled.
param_overflow_mod_divzero_errors_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.%(typecode)s_max
self.MinLimit = arrayfunc.arraylimits.%(typecode)s_min
self.inparray1amax = array.array('%(typecode)s', [self.MaxLimit] * arraysize)
self.inparray1bmax = copy.copy(self.inparray1amax)
self.zero1array = array.array('%(typecode)s', [0%(decimalpoint)s] * arraysize)
self.plus1array = array.array('%(typecode)s', [1%(decimalpoint)s] * arraysize)
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, arraysize))
########################################################
def test_mod_array_num_none_b1(self):
"""Test mod as *array-num-none* for max value %% 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mod(self.inparray1amax, 1%(decimalpoint)s, matherrors=True)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.mod(self.inparray1bmax, 0%(decimalpoint)s, matherrors=True)
########################################################
def test_mod_array_num_array_b2(self):
"""Test mod as *array-num-array* for max value %% 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mod(self.inparray1amax, 1%(decimalpoint)s, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.mod(self.inparray1bmax, 0%(decimalpoint)s, self.dataout, matherrors=True)
########################################################
def test_mod_num_array_none_b3(self):
"""Test mod as *num-array-none* for max value %% 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mod(self.MaxLimit, self.plus1array, matherrors=True)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.mod(self.MaxLimit, self.zero1array, matherrors=True)
########################################################
def test_mod_num_array_array_b4(self):
"""Test mod as *num-array-array* for max value %% 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mod(self.MaxLimit, self.plus1array, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.mod(self.MaxLimit, self.zero1array, self.dataout, matherrors=True)
########################################################
def test_mod_array_array_none_b5(self):
"""Test mod as *array-array-none* for max value %% 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mod(self.inparray1amax, self.plus1array, matherrors=True)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.mod(self.inparray1bmax, self.zero1array, matherrors=True)
########################################################
def test_mod_array_array_array_b6(self):
"""Test mod as *array-array-array* for max value %% 0%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.mod(self.inparray1amax, self.plus1array, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(ZeroDivisionError):
arrayfunc.mod(self.inparray1bmax, self.zero1array, self.dataout, matherrors=True)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for inf, -inf in data arrays
# when exceptions are expected. This is for "mod" only.
inf_mod_data_error_template = '''
##############################################################################
class %(funclabel)s_%(errorlabel)s_errors_%(typelabel)s(unittest.TestCase):
"""Test %(funclabel)s for basic general function operation using parameter %(errordata)s.
inf_mod_data_error_template
"""
##############################################################################
def FloatassertEqual(self, dataoutitem, expecteditem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%%0.3f != %%0.3f' %% (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
# This is active for float numbers only.
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.dataok1 = array.array('%(typecode)s', [%(test_op_x)s])
self.dataok2 = array.array('%(typecode)s', [x for (x,y) in zip(itertools.cycle([%(test_op_y)s]), self.dataok1)])
arraysize = len(self.dataok1)
self.dataout = array.array('%(typecode)s', itertools.repeat(0.0, arraysize))
self.errordata = array.array('%(typecode)s', [float('%(errordata)s')] * arraysize)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_num_none_a1(self):
"""Test %(funclabel)s as *array-num-none* for %(errordata)s - Array code %(typelabel)s.
"""
for testval in [%(test_op_y)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
dataok1 = copy.copy(self.dataok1)
errordata = copy.copy(self.errordata)
# This version is expected to pass.
arrayfunc.%(funcname)s(dataok1, testval)
# This is the actual test.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(errordata, testval)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_num_none_a2(self):
"""Test %(funclabel)s as *array-num-none* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
for testval in [%(test_op_y)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
errordata = copy.copy(self.errordata)
expected = [x %(pyoperator)s testval for x in self.errordata]
arrayfunc.%(funcname)s(errordata, testval, matherrors=True)
for dataoutitem, expecteditem in zip(errordata, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_num_array_b1(self):
"""Test %(funclabel)s as *array-num-array* for %(errordata)s - Array code %(typelabel)s.
"""
for testval in [%(test_op_y)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
dataok1 = copy.copy(self.dataok1)
errordata = copy.copy(self.errordata)
# This version is expected to pass.
arrayfunc.%(funcname)s(dataok1, testval, self.dataout)
# This is the actual test.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(errordata, testval, self.dataout)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_num_array_b2(self):
"""Test %(funclabel)s as *array-num-array* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
for testval in [%(test_op_y)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
expected = [x %(pyoperator)s testval for x in self.errordata]
arrayfunc.%(funcname)s(self.errordata, testval, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(self.dataout, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_num_array_none_c1(self):
"""Test %(funclabel)s as *num-array-none* for %(errordata)s - Array code %(typelabel)s.
"""
for testval in [%(test_op_x)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
dataok2 = copy.copy(self.dataok2)
errordata = copy.copy(self.errordata)
# This version is expected to pass.
arrayfunc.%(funcname)s(testval, dataok2)
# This is the actual test.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(testval, errordata)
########################################################
def test_%(funclabel)s_%(errorlabel)s_num_array_none_c2(self):
"""Test %(funclabel)s as *num-array-none* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
for testval in [%(test_op_x)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
errordata = copy.copy(self.errordata)
# This test results in "nan".
expected = [math.nan] * len(self.errordata)
arrayfunc.%(funcname)s(testval, errordata, matherrors=True)
for dataoutitem, expecteditem in zip(errordata, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_num_array_array_d1(self):
"""Test %(funclabel)s as *num-array-array* for %(errordata)s - Array code %(typelabel)s.
"""
for testval in [%(test_op_x)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# This version is expected to pass.
arrayfunc.%(funcname)s(testval, self.dataok2, self.dataout)
# This is the actual test.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(testval, self.errordata, self.dataout)
########################################################
def test_%(funclabel)s_%(errorlabel)s_num_array_array_d2(self):
"""Test %(funclabel)s as *num-array-array* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
for testval in [%(test_op_x)s]:
with self.subTest(msg='Failed with parameter', testval = testval):
# This test results in "nan".
expected = [math.nan] * len(self.errordata)
arrayfunc.%(funcname)s(testval, self.errordata, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(self.dataout, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_array_none_e1(self):
"""Test %(funclabel)s as *array-array-none* for %(errordata)s - Array code %(typelabel)s.
"""
# Copy the array so we don't change the original data.
dataok1 = copy.copy(self.dataok1)
dataok2 = copy.copy(self.dataok2)
# This version is expected to pass.
arrayfunc.%(funcname)s(dataok1, dataok2)
# Copy the array so we don't change the original data.
dataok1 = copy.copy(self.dataok1)
# This is the actual test.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(dataok1, self.errordata)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_array_none_e2(self):
"""Test %(funclabel)s as *array-array-none* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
# This test results in "nan".
expected = [math.nan] * len(self.errordata)
arrayfunc.%(funcname)s(self.dataok1, self.errordata, matherrors=True)
for dataoutitem, expecteditem in zip(self.dataok1, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_array_array_f1(self):
"""Test %(funclabel)s as *array-array-array* for %(errordata)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.dataok1, self.dataok2, self.dataout)
# This is the actual test.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(self.dataok1, self.errordata, self.dataout)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_array_array_f2(self):
"""Test %(funclabel)s as *array-array-array* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
# This test results in "nan".
expected = [math.nan] * len(self.errordata)
arrayfunc.%(funcname)s(self.dataok1, self.errordata, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(self.dataout, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for negative power.
param_overflow_pow_negy_template = '''
##############################################################################
class overflow_signed_negy_%(typelabel)s(unittest.TestCase):
"""Test pow for value overflow for max values.
param_overflow_pow_negy_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.datax1 = array.array('%(typecode)s', [%(test_op_x)s])
self.datax2 = copy.copy(self.datax1)
self.datay1 = array.array('%(typecode)s', [x for (x,y) in zip(itertools.cycle([%(test_op_y)s]), self.datax1)])
self.datay2 = copy.copy(self.datay1)
self.minus1 = array.array('%(typecode)s', [-1] * len(self.datax1))
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, len(self.datax1)))
########################################################
def test_pow_array_num_none_a1(self):
"""Test pow as *array-num-none* for x ** -1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(self.datax1, 1)
# This is the actual test.
with self.assertRaises(ValueError):
arrayfunc.pow(self.datax2, -1)
########################################################
def test_pow_array_num_array_b1(self):
"""Test pow as *array-num-array* for x ** -1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(self.datax1, 1, self.dataout)
# This is the actual test.
with self.assertRaises(ValueError):
arrayfunc.pow(self.datax2, -1, self.dataout)
########################################################
def test_pow_num_array_none_c1(self):
"""Test pow as *num-array-none* for x ** -1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(2, self.datay2)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.pow(2, self.minus1)
########################################################
def test_pow_num_array_none_c2(self):
"""Test pow as *num-array-none* for x ** -1 with matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(2, self.datay2, matherrors=True)
# This is the actual test. There should be no exception on math errors.
arrayfunc.pow(2, self.minus1, matherrors=True)
########################################################
def test_pow_num_array_array_d1(self):
"""Test pow as *num-array-array* for x ** -1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(2, self.datay2, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.pow(2, self.minus1, self.dataout)
########################################################
def test_pow_num_array_array_d2(self):
"""Test pow as *num-array-array* for x ** -1 with matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(2, self.datay2, self.dataout, matherrors=True)
# This is the actual test. There should be no exception on math errors.
arrayfunc.pow(2, self.minus1, self.dataout, matherrors=True)
########################################################
def test_pow_array_array_none_e1(self):
"""Test pow as *array-array-none* for x ** -1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(self.datax1, self.datay1)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.pow(self.datax2, self.minus1)
########################################################
def test_pow_array_array_none_e2(self):
"""Test pow as *array-array-none* for x ** -1 with matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(self.datax1, self.datay1, matherrors=True)
# This is the actual test. There should be no exception on math errors.
arrayfunc.pow(self.datax2, self.minus1, matherrors=True)
########################################################
def test_pow_array_array_array_f1(self):
"""Test pow as *array-array-array* for x ** -1 - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(self.datax1, self.datay1, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.pow(self.datax2, self.minus1, self.dataout)
########################################################
def test_pow_array_array_array_f2(self):
"""Test pow as *array-array-array* for x ** -1 with matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(self.datax1, self.datay1, self.dataout, matherrors=True)
# This is the actual test. There should be no exception on math errors.
arrayfunc.pow(self.datax2, self.minus1, self.dataout, matherrors=True)
##############################################################################
'''
# ==============================================================================
# The template used to generate the tests for overflow of power.
param_overflow_pow_error_template = '''
##############################################################################
class overflow_signed_pow_error_%(typelabel)s(unittest.TestCase):
"""Test pow for value overflow for max values.
param_overflow_pow_error_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# This is used as the 'y' value in x ** y. It is intended to cause a math error.
self.datayovfl = array.array('%(typecode)s', range(0, %(pow_y_err)s, 10))
# This provides a 'y' value for x ** y which will not cause math error.
self.datayok = array.array('%(typecode)s', [x for (x,y) in zip(itertools.cycle([%(test_op_y)s]), self.datayovfl)])
# These just provide simple data to work on.
self.datax2a = array.array('%(typecode)s', [2%(decimalpoint)s] * len(self.datayovfl))
self.datax2b = copy.copy(self.datax2a)
self.dataout = array.array('%(typecode)s', itertools.repeat(0%(decimalpoint)s, len(self.datayovfl)))
########################################################
def test_pow_array_num_none_a1(self):
"""Test pow as *array-num-none* for x ** %(pow_y_err)s%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(self.datax2a, 2%(decimalpoint)s)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.pow(self.datax2b, %(pow_y_err)s%(decimalpoint)s)
########################################################
def test_pow_array_num_none_a2(self):
"""Test pow as *array-num-none* for x ** %(pow_y_err)s%(decimalpoint)s with matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(self.datax2a, 2%(decimalpoint)s, matherrors=True)
# This is the actual test. There should be no exception on math errors.
arrayfunc.pow(self.datax2b, %(pow_y_err)s%(decimalpoint)s, matherrors=True)
########################################################
def test_pow_array_num_array_b1(self):
"""Test pow as *array-num-array* for x ** %(pow_y_err)s%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(self.datax2a, 2%(decimalpoint)s, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.pow(self.datax2b, %(pow_y_err)s%(decimalpoint)s, self.dataout)
########################################################
def test_pow_array_num_array_b2(self):
"""Test pow as *array-num-array* for x ** %(pow_y_err)s%(decimalpoint)s with matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(self.datax2a, 2%(decimalpoint)s, self.dataout, matherrors=True)
# This is the actual test. There should be no exception on math errors.
arrayfunc.pow(self.datax2b, %(pow_y_err)s%(decimalpoint)s, self.dataout, matherrors=True)
########################################################
def test_pow_num_array_none_c1(self):
"""Test pow as *num-array-none* for x ** %(pow_y_err)s%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(2%(decimalpoint)s, self.datax2a)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.pow(2%(decimalpoint)s, self.datayovfl)
########################################################
def test_pow_num_array_none_c2(self):
"""Test pow as *num-array-none* for x ** %(pow_y_err)s%(decimalpoint)s with matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(2%(decimalpoint)s, self.datax2a, matherrors=True)
# This is the actual test. There should be no exception on math errors.
arrayfunc.pow(2%(decimalpoint)s, self.datayovfl, matherrors=True)
########################################################
def test_pow_num_array_array_d1(self):
"""Test pow as *num-array-array* for x ** %(pow_y_err)s%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(2%(decimalpoint)s, self.datax2a, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.pow(2%(decimalpoint)s, self.datayovfl, self.dataout)
########################################################
def test_pow_num_array_array_d2(self):
"""Test pow as *num-array-array* for x ** %(pow_y_err)s%(decimalpoint)s with matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(2%(decimalpoint)s, self.datax2a, self.dataout, matherrors=True)
# This is the actual test. There should be no exception on math errors.
arrayfunc.pow(2%(decimalpoint)s, self.datayovfl, self.dataout, matherrors=True)
########################################################
def test_pow_array_array_none_e1(self):
"""Test pow as *array-array-none* for x ** %(pow_y_err)s%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(self.datax2a, self.datayok)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.pow(self.datax2b, self.datayovfl)
########################################################
def test_pow_array_array_none_e2(self):
"""Test pow as *array-array-none* for x ** %(pow_y_err)s%(decimalpoint)s with matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(self.datax2a, self.datayok, matherrors=True)
# This is the actual test. There should be no exception on math errors.
arrayfunc.pow(self.datax2b, self.datayovfl, matherrors=True)
########################################################
def test_pow_array_array_array_f1(self):
"""Test pow as *array-array-array* for x ** %(pow_y_err)s%(decimalpoint)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(self.datax2a, self.datayok, self.dataout)
# This is the actual test.
with self.assertRaises(%(exceptioncode)s):
arrayfunc.pow(self.datax2b, self.datayovfl, self.dataout)
########################################################
def test_pow_array_array_array_f2(self):
"""Test pow as *array-array-array* for x ** %(pow_y_err)s%(decimalpoint)s with matherrors=True - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.pow(self.datax2a, self.datayok, self.dataout, matherrors=True)
# This is the actual test. There should be no exception on math error.
arrayfunc.pow(self.datax2b, self.datayovfl, self.dataout, matherrors=True)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# The template used to generate the tests for nan, inf, -inf in data arrays
# for pow.
nan_data_pow_template = '''
##############################################################################
class %(funclabel)s_%(errorlabel)s_pow_%(typelabel)s(unittest.TestCase):
"""Test %(funclabel)s for basic general function operation using parameter %(errordata)s.
nan_data_pow_template
"""
##############################################################################
def FloatassertEqual(self, dataoutitem, expecteditem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%%0.3f != %%0.3f' %% (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
# This is active for float numbers only.
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.dataok1 = array.array('%(typecode)s', [-5.0, -4.0, -3.0, -2.0, 2.0, 3.0, 4.0, 5.0])
self.dataok2 = array.array('%(typecode)s', [-2.0, 3.0, -4.0, 5.0, 5.0, 4.0, -3.0, 2.0])
arraysize = len(self.dataok1)
self.dataout = array.array('%(typecode)s', itertools.repeat(0.0, arraysize))
self.errordata = array.array('%(typecode)s', [float('%(errordata)s')] * arraysize)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_num_none_a1(self):
"""Test %(funclabel)s as *array-num-none* for %(errordata)s - Array code %(typelabel)s.
"""
for testval in self.dataok2:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
dataok1 = copy.copy(self.dataok1)
errordata = copy.copy(self.errordata)
# This version is expected to pass.
arrayfunc.%(funcname)s(dataok1, testval)
expected = [math.%(funcname)s(x, testval) for x in errordata]
# This is the actual test.
# Some values will produce non-finite (nan, inf, -inf) results
# while some will not. We therefore provide means of checking both.
if not all([math.isfinite(x) for x in expected]):
# At least one value will produce a non-finite result.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(errordata, testval)
else:
arrayfunc.%(funcname)s(errordata, testval)
for dataoutitem, expecteditem in zip(errordata, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_num_none_a2(self):
"""Test %(funclabel)s as *array-num-none* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
for testval in self.dataok2:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
errordata = copy.copy(self.errordata)
expected = [math.%(funcname)s(x, testval) for x in errordata]
arrayfunc.%(funcname)s(errordata, testval, matherrors=True)
for dataoutitem, expecteditem in zip(errordata, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_num_array_b1(self):
"""Test %(funclabel)s as *array-num-array* for %(errordata)s - Array code %(typelabel)s.
"""
for testval in self.dataok2:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
dataok1 = copy.copy(self.dataok1)
errordata = copy.copy(self.errordata)
# This version is expected to pass.
arrayfunc.%(funcname)s(dataok1, testval, self.dataout)
expected = [math.%(funcname)s(x, testval) for x in errordata]
# This is the actual test.
# Some values will produce non-finite (nan, inf, -inf) results
# while some will not. We therefore provide means of checking both.
if not all([math.isfinite(x) for x in expected]):
# At least one value will produce a non-finite result.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(errordata, testval, self.dataout)
else:
arrayfunc.%(funcname)s(errordata, testval, self.dataout)
for dataoutitem, expecteditem in zip(self.dataout, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_num_array_b2(self):
"""Test %(funclabel)s as *array-num-array* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
for testval in self.dataok2:
with self.subTest(msg='Failed with parameter', testval = testval):
expected = [math.%(funcname)s(x, testval) for x in self.errordata]
arrayfunc.%(funcname)s(self.errordata, testval, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(self.dataout, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_num_array_none_c1(self):
"""Test %(funclabel)s as *num-array-none* for %(errordata)s - Array code %(typelabel)s.
"""
for testval in self.dataok1:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
dataok2 = copy.copy(self.dataok2)
errordata = copy.copy(self.errordata)
# This version is expected to pass.
arrayfunc.%(funcname)s(testval, dataok2)
expected = [math.%(funcname)s(testval, x) for x in errordata]
# This is the actual test.
# Some values will produce non-finite (nan, inf, -inf) results
# while some will not. We therefore provide means of checking both.
if not all([math.isfinite(x) for x in expected]):
# At least one value will produce a non-finite result.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(testval, errordata)
else:
arrayfunc.%(funcname)s(testval, errordata)
for dataoutitem, expecteditem in zip(self.dataout, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_num_array_none_c2(self):
"""Test %(funclabel)s as *num-array-none* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
for testval in self.dataok1:
with self.subTest(msg='Failed with parameter', testval = testval):
# Copy the array so we don't change the original data.
errordata = copy.copy(self.errordata)
expected = [math.%(funcname)s(testval, x) for x in self.errordata]
arrayfunc.%(funcname)s(testval, errordata, matherrors=True)
for dataoutitem, expecteditem in zip(errordata, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_num_array_array_d1(self):
"""Test %(funclabel)s as *num-array-array* for %(errordata)s - Array code %(typelabel)s.
"""
for testval in self.dataok1:
with self.subTest(msg='Failed with parameter', testval = testval):
# This version is expected to pass.
arrayfunc.%(funcname)s(testval, self.dataok2, self.dataout)
expected = [math.%(funcname)s(testval, x) for x in self.errordata]
# This is the actual test.
# Some values will produce non-finite (nan, inf, -inf) results
# while some will not. We therefore provide means of checking both.
if not all([math.isfinite(x) for x in expected]):
# At least one value will produce a non-finite result.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(testval, self.errordata, self.dataout)
else:
arrayfunc.%(funcname)s(testval, self.errordata, self.dataout)
for dataoutitem, expecteditem in zip(self.dataout, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_num_array_array_d2(self):
"""Test %(funclabel)s as *num-array-array* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
for testval in self.dataok1:
with self.subTest(msg='Failed with parameter', testval = testval):
expected = [math.%(funcname)s(testval, x) for x in self.errordata]
arrayfunc.%(funcname)s(testval, self.errordata, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(self.dataout, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_array_none_e1(self):
"""Test %(funclabel)s as *array-array-none* for %(errordata)s - Array code %(typelabel)s.
"""
# Copy the array so we don't change the original data.
dataok1 = copy.copy(self.dataok1)
dataok2 = copy.copy(self.dataok2)
# This version is expected to pass.
arrayfunc.%(funcname)s(dataok1, dataok2)
# Copy the array so we don't change the original data.
dataok1 = copy.copy(self.dataok1)
expected = [math.%(funcname)s(x, y) for x,y in zip(dataok1, self.errordata)]
# This is the actual test.
# Some values will produce non-finite (nan, inf, -inf) results
# while some will not. We therefore provide means of checking both.
if not all([math.isfinite(x) for x in expected]):
# At least one value will produce a non-finite result.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(dataok1, self.errordata)
else:
arrayfunc.%(funcname)s(dataok1, self.errordata)
for dataoutitem, expecteditem in zip(self.dataout, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_array_none_e2(self):
"""Test %(funclabel)s as *array-array-none* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
expected = [math.%(funcname)s(y, x) for x,y in zip(self.errordata, self.dataok2)]
arrayfunc.%(funcname)s(self.dataok2, self.errordata, matherrors=True)
for dataoutitem, expecteditem in zip(self.dataok2, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_array_array_f1(self):
"""Test %(funclabel)s as *array-array-array* for %(errordata)s - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.dataok1, self.dataok2, self.dataout)
expected = [math.%(funcname)s(x, y) for x,y in zip(self.dataok1, self.errordata)]
# This is the actual test.
# Some values will produce non-finite (nan, inf, -inf) results
# while some will not. We therefore provide means of checking both.
if not all([math.isfinite(x) for x in expected]):
# At least one value will produce a non-finite result.
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(self.dataok1, self.errordata, self.dataout)
else:
arrayfunc.%(funcname)s(self.dataok1, self.errordata, self.dataout)
for dataoutitem, expecteditem in zip(self.dataout, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_%(errorlabel)s_array_array_array_f2(self):
"""Test %(funclabel)s as *array-array-array* for %(errordata)s with error check off - Array code %(typelabel)s.
"""
expected = [math.%(funcname)s(y, x) for x,y in zip(self.errordata, self.dataok2)]
arrayfunc.%(funcname)s(self.dataok1, self.errordata, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(self.dataout, expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# Which opstemplate is valid for which operation. Each math operation requires
# different templates for signed int, unsigned int, and float.
opstemplates = {
'+' : {'int' : [param_overflow_add_max1_template,
param_overflow_add_min1_template],
'uint' : [param_overflow_add_max1_template],
'float' : [param_overflow_add_max1_template,
param_overflow_add_min1_template]
},
'-' : {'int' : [param_overflow_sub_max1_template,
param_overflow_sub_min1_template,
param_overflow_sub_1max_template,
param_overflow_sub_1min_template],
'uint' : [param_overflow_sub_min1_template],
'float' : [param_overflow_sub_max1_template,
param_overflow_sub_min1_template,
param_overflow_sub_1max_template]
},
'*' : {'int' : [param_overflow_mul_max2_template,
param_overflow_mul_min2_template,
param_overflow_mul_max2neg_template,
param_overflow_mul_min2neg_template,
param_overflow_mul_min1neg_template],
'uint' : [param_overflow_mul_max2_template],
'float' : [param_overflow_mul_max2_template,
param_overflow_mul_min2_template,
param_overflow_mul_max2neg_template,
param_overflow_mul_min2neg_template]
},
'/' : {'int' : [param_overflow_truediv_divzero_template,
param_overflow_truediv_divzero_errors_template,
param_overflow_truediv_mindivminus1_template],
'uint' : [param_overflow_truediv_divzero_template,
param_overflow_truediv_divzero_errors_template],
'float' : [param_overflow_truediv_divzero_template]
},
'//' : {'int' : [param_overflow_floordiv_divzero_template,
param_overflow_floordiv_divzero_errors_template,
param_overflow_floordiv_mindivminus1_template],
'uint' : [param_overflow_floordiv_divzero_template,
param_overflow_floordiv_divzero_errors_template],
'float' : [param_overflow_floordiv_divzero_template]
},
'%' : {'int' : [param_overflow_mod_divzero_template,
param_overflow_mod_divzero_errors_template],
'uint' : [param_overflow_mod_divzero_template,
param_overflow_mod_divzero_errors_template],
'float' : [param_overflow_mod_divzero_template]
},
'**' : {'int' : [param_overflow_pow_negy_template,
param_overflow_pow_error_template],
'uint' : [param_overflow_pow_error_template],
'float' : [param_overflow_pow_error_template]
},
}
# ==============================================================================
# These are all the test code templates.
test_templates = {'test_template_op' : test_op_templ,
'nan_data_error_template' : nan_data_error_template,
'nan_div_data_error_template' : nan_div_data_error_template,
'inf_floordiv_data_error_template' : inf_floordiv_data_error_template,
'inf_mod_data_error_template' : inf_mod_data_error_template,
'nan_data_pow_template' : nan_data_pow_template,
}
# ==============================================================================
# Used for creating test data. This covers everything except truediv on integers,
# which has to be handled by a function inside the test class as we use native
# division for this.
operatorfunc = {
'+' : 'operator.add',
'//' : 'operator.floordiv',
'%' : 'operator.mod',
'*' : 'operator.mul',
'**' : 'operator.pow',
'-' : 'operator.sub',
'/' : 'operator.truediv',
}
# ==============================================================================
# Read in the op codes.
opdata = codegen_common.ReadINI('affuncdata.ini')
# Filter out the desired math functions.
funclist = [(x,dict(y)) for x,y in opdata.items() if y.get('test_op_templ') in ['test_template_op', 'test_template_op_simd']]
# Create a list of names which support SIMD.
havesimd = [x for x,y in funclist if y.get('test_op_templ') == 'test_template_op_simd']
# ==============================================================================
# This defines the module name.
modulename = 'arrayfunc'
# Import the array module for testing.
arrayimport = 'import array'
for funcname, func in funclist:
filenamebase = 'test_' + funcname
filename = filenamebase + '.py'
headerdate = codegen_common.FormatHeaderData(filenamebase, '09-Dec-2017', funcname)
# Add additional header data.
headerdate['modulename'] = modulename
headerdate['arrayimport'] = arrayimport
# One function (one output file).
with open(filename, 'w') as f:
# The copyright header.
f.write(codegen_common.HeaderTemplate % headerdate)
# Insert the helper functions that are not array type dependent.
# This filters test data based on the operation being performed.
f.write(datafilters[funcname])
# Generate the data for the general tests. These functions are
# called by a number of different classes.
# The pow function uses a special version which is different from the rest.
if funcname == 'pow':
f.write(gendata_pow)
else:
f.write(gendata_general)
# This generates all possible data combinations for 8 bit arrays.
f.write(gendata_fullrange)
# Check each array type.
for functype in codegen_common.arraycodes:
# Convert the numeric literals to the appropriate type for the array.
if functype in codegen_common.floatarrays:
xvalues = [float(x) for x in func['test_op_x'].split(',')]
yvalues = [float(x) for x in func['test_op_y'].split(',')]
zero_const = 0.0
# Simply increment by the maximum value, instead of figuring out
# what the smallest increment is for a float or double.
incvalue = 'self.MaxLimit / 1000000.0'
decvalue = 'self.MinLimit / 1000000.0'
errorflagexceptioncode = 'ArithmeticError'
else:
xvalues = [int(x) for x in func['test_op_x'].split(',')]
yvalues = [int(x) for x in func['test_op_y'].split(',')]
# Make sure we don't have any negative test values for unsigned arrays.
if functype in codegen_common.unsignedint:
xmin = min(xvalues)
if xmin < 0:
xtmp = [x - xmin for x in xvalues]
else:
xtmp = xvalues
xvalues = xtmp
ymin = min(yvalues)
if ymin < 0:
ytmp = [y - ymin for y in yvalues]
else:
ytmp = yvalues
yvalues = ytmp
# Avoid zeros in the y parameters for division to avoid
# dividing by zero.
if funcname in ('truediv', 'floordiv', 'mod'):
yvalues = [x for x in yvalues if x != 0]
zero_const = '0'
incvalue = '1'
decvalue = '-1'
errorflagexceptioncode = 'OverflowError'
# Convert back to a string, as that is what the template expects.
test_op_x = ','.join([str(x) for x in xvalues])
test_op_y = ','.join([str(x) for x in yvalues])
pyoperator = func['pyoperator']
funcdata = {'funclabel' : funcname, 'funcname' : funcname, 'pyoperator' : pyoperator,
'typelabel' : functype, 'typecode' : functype, 'test_op_x' : test_op_x,
'test_op_y' : test_op_y, 'zero_const' : zero_const,
'incvalue' : incvalue, 'decvalue' : decvalue,
'errorflagexceptioncode' : errorflagexceptioncode}
# For integer true division, we must convert the expected values
# back to integer in order to compare them to the arrayfunc.truediv
# result.
# Python itself outputs a float value for true division, while we
# want to keep output array types the same as the input types.
if (pyoperator == '/') and (functype in codegen_common.intarrays):
funcdata['typeconv1'] = 'list(map(int,'
funcdata['typeconv2'] = '))'
funcdata['operatorfunc'] = 'inttruediv'
else:
funcdata['typeconv1'] = ''
funcdata['typeconv2'] = ''
funcdata['operatorfunc'] = operatorfunc[pyoperator]
# Basic tests. Select the test data generator.
# The data generator forms part of the class name to differentiate
# between them.
if funcname == 'pow':
funcdata['datagenerator'] = 'pow'
else:
funcdata['datagenerator'] = 'int'
# Even array size.
funcdata['arrayevenodd'] = 'even'
f.write(test_op_templ % funcdata)
# Odd array size.
funcdata['arrayevenodd'] = 'odd'
f.write(test_op_templ % funcdata)
# Special data. These values were hand selected.
if funcname == 'pow':
funcdata['datagenerator'] = 'specialpow'
else:
funcdata['datagenerator'] = 'special'
# Even array size.
funcdata['arrayevenodd'] = 'even'
f.write(test_op_templ % funcdata)
# Odd array size.
funcdata['arrayevenodd'] = 'odd'
f.write(test_op_templ % funcdata)
# We do a full range test only for the smallest array types
# as otherwise the test would be excessively long. This
# tests all possible combinations of values which would
# not overflow the result.
if functype in ('b', 'B'):
# This data generator handles pow data as well as
# all other types.
funcdata['datagenerator'] = 'fullrange'
# Even array size.
funcdata['arrayevenodd'] = 'even'
f.write(test_op_templ % funcdata)
# Odd array size.
funcdata['arrayevenodd'] = 'odd'
f.write(test_op_templ % funcdata)
#####
# Not all functions support SIMD operations.
if funcname in havesimd:
# Even array size.
funcdata['arrayevenodd'] = 'even'
f.write(test_op_simd_templ % funcdata)
# Odd array size.
funcdata['arrayevenodd'] = 'odd'
f.write(test_op_simd_templ % funcdata)
#####
# Test for invalid parameters. One template should work for all
# functions of this style.
if functype not in ['f', 'd']:
funcdata['badcode'] = 'd'
funcdata['badconv'] = 'float'
else:
funcdata['badcode'] = 'i'
funcdata['badconv'] = 'int'
f.write(param_invalid_template % funcdata)
# Test for invalid optional parameters such as errors and maxlen.
f.write(param_invalid_opt_template % funcdata)
#####
# Test for invalid nosimd optional parameter.
# Not all functions support SIMD operations.
if funcname in havesimd:
f.write(param_invalid_opt_simd_template % funcdata)
#####
# Overflow tests.
if functype in codegen_common.signedint:
errors_templt = '\n'.join(opstemplates[func['pyoperator']]['int'])
funcdata['decimalpoint'] = ''
funcdata['floatpad'] = ''
funcdata['exceptioncode'] = 'OverflowError'
funcdata['pow_y_err'] = '127'
elif functype in codegen_common.unsignedint:
errors_templt = '\n'.join(opstemplates[func['pyoperator']]['uint'])
funcdata['decimalpoint'] = ''
funcdata['floatpad'] = ''
funcdata['exceptioncode'] = 'OverflowError'
funcdata['pow_y_err'] = '127'
elif functype in codegen_common.floatarrays:
errors_templt = '\n'.join(opstemplates[func['pyoperator']]['float'])
funcdata['decimalpoint'] = '.0'
if functype == 'f':
funcdata['floatpad'] = '.0e37'
else:
funcdata['floatpad'] = '.0e300'
funcdata['exceptioncode'] = 'ArithmeticError'
funcdata['pow_y_err'] = '1100'
else:
print('Error - Unknown array type ', functype)
# Math error tests - output the templates.
f.write(errors_templt % funcdata)
#####
# NaN, Inf tests are for floating point only.
if functype in codegen_common.floatarrays:
# NaN, inf, -inf tests.
funcdata = {'funclabel' : funcname, 'funcname' : funcname,
'pyoperator' : func['pyoperator'],
'typelabel' : functype, 'typecode' : functype, 'test_op_x' : test_op_x,
'test_op_y' : test_op_y
}
# NaN
testtemplate = test_templates[func['test_nan_data_template']]
funcdata['errorlabel'] = 'NaN'
funcdata['errordata'] = 'nan'
f.write(testtemplate % funcdata)
# inf
testtemplate = test_templates[func['test_inf_data_template']]
funcdata['errorlabel'] = 'inf'
funcdata['errordata'] = 'inf'
f.write(testtemplate % funcdata)
# -inf
testtemplate = test_templates[func['test_ninf_data_template']]
funcdata['errorlabel'] = 'ninf'
funcdata['errordata'] = '-inf'
f.write(testtemplate % funcdata)
#####
f.write(codegen_common.testendtemplate % {'funcname' : funcname, 'testprefix' : 'af'})
# ==============================================================================
| 36.386927
| 135
| 0.621862
| 27,965
| 234,914
| 5.135884
| 0.030645
| 0.040174
| 0.028853
| 0.038364
| 0.909111
| 0.896654
| 0.881768
| 0.875969
| 0.859997
| 0.854322
| 0
| 0.01168
| 0.136267
| 234,914
| 6,455
| 136
| 36.392564
| 0.69617
| 0.05253
| 0
| 0.714121
| 1
| 0.110521
| 0.958973
| 0.379812
| 0
| 0
| 0
| 0
| 0.089986
| 1
| 0
| false
| 0.052377
| 0.000923
| 0
| 0.004384
| 0.000231
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
d4d91b6a8b71ce615b51cea24c60e676ff8069cd
| 89
|
py
|
Python
|
tests/docs/test_bind_hooks.py
|
adriangb/di
|
f277bb7189c8e8bde41170afb3181e6600b06be8
|
[
"MIT"
] | 57
|
2021-09-28T00:48:08.000Z
|
2022-03-16T16:50:39.000Z
|
tests/docs/test_bind_hooks.py
|
ScareTrow/di
|
a89b6b7d52da41b6e094b50ee5a500c3478676fa
|
[
"MIT"
] | 59
|
2021-09-25T00:06:22.000Z
|
2022-03-31T15:49:36.000Z
|
tests/docs/test_bind_hooks.py
|
ScareTrow/di
|
a89b6b7d52da41b6e094b50ee5a500c3478676fa
|
[
"MIT"
] | 3
|
2021-12-31T10:03:03.000Z
|
2021-12-31T16:07:54.000Z
|
from docs_src.bind_hooks import main
def test_bind_hooks_example() -> None:
main()
| 14.833333
| 38
| 0.741573
| 14
| 89
| 4.357143
| 0.785714
| 0.295082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168539
| 89
| 5
| 39
| 17.8
| 0.824324
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
be0099e94e5b3bfdff8f2992cb87966530be47b0
| 25,555
|
py
|
Python
|
xrdsst/api/services_api.py
|
nordic-institute/X-Road-Security-Server-toolkit
|
1538dbf3d76647f4fb3a72bbe93bf54f414ee9fb
|
[
"MIT"
] | 7
|
2020-11-01T19:50:11.000Z
|
2022-01-18T17:45:19.000Z
|
xrdsst/api/services_api.py
|
nordic-institute/X-Road-Security-Server-toolkit
|
1538dbf3d76647f4fb3a72bbe93bf54f414ee9fb
|
[
"MIT"
] | 24
|
2020-11-09T08:09:10.000Z
|
2021-06-16T07:22:14.000Z
|
xrdsst/api/services_api.py
|
nordic-institute/X-Road-Security-Server-toolkit
|
1538dbf3d76647f4fb3a72bbe93bf54f414ee9fb
|
[
"MIT"
] | 1
|
2021-04-27T14:39:48.000Z
|
2021-04-27T14:39:48.000Z
|
# coding: utf-8
"""
X-Road Security Server Admin API
X-Road Security Server Admin API. Note that the error metadata responses described in some endpoints are subjects to change and may be updated in upcoming versions. # noqa: E501
OpenAPI spec version: 1.0.31
Contact: info@niis.org
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from xrdsst.api_client.api_client import ApiClient
class ServicesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_endpoint(self, id, **kwargs): # noqa: E501
"""create endpoint # noqa: E501
<h3>Administrator creates a new endpoint.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_endpoint(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: id of the service (required)
:param Endpoint body:
:return: Endpoint
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_endpoint_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.add_endpoint_with_http_info(id, **kwargs) # noqa: E501
return data
def add_endpoint_with_http_info(self, id, **kwargs): # noqa: E501
"""create endpoint # noqa: E501
<h3>Administrator creates a new endpoint.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_endpoint_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: id of the service (required)
:param Endpoint body:
:return: Endpoint
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_endpoint" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `add_endpoint`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/services/{id}/endpoints', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Endpoint', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_service_service_clients(self, id, **kwargs): # noqa: E501
"""add access rights to selected service for new ServiceClients # noqa: E501
<h3>Adds access rights to selected service for new ServiceClients.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_service_service_clients(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: id of the service (required)
:param ServiceClients body:
:return: list[ServiceClient]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_service_service_clients_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.add_service_service_clients_with_http_info(id, **kwargs) # noqa: E501
return data
def add_service_service_clients_with_http_info(self, id, **kwargs): # noqa: E501
"""add access rights to selected service for new ServiceClients # noqa: E501
<h3>Adds access rights to selected service for new ServiceClients.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_service_service_clients_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: id of the service (required)
:param ServiceClients body:
:return: list[ServiceClient]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_service_service_clients" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `add_service_service_clients`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/services/{id}/service-clients', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ServiceClient]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_service_service_clients(self, id, **kwargs): # noqa: E501
"""remove access to selected service from given ServiceClients # noqa: E501
<h3>Administrator removes access to selected service from given ServiceClients.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_service_service_clients(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: id of the service (required)
:param ServiceClients body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_service_service_clients_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_service_service_clients_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_service_service_clients_with_http_info(self, id, **kwargs): # noqa: E501
"""remove access to selected service from given ServiceClients # noqa: E501
<h3>Administrator removes access to selected service from given ServiceClients.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_service_service_clients_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: id of the service (required)
:param ServiceClients body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_service_service_clients" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_service_service_clients`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/services/{id}/service-clients/delete', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_service(self, id, **kwargs): # noqa: E501
"""get service # noqa: E501
<h3>Administrator views selected service.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_service(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: id of the service (required)
:return: Service
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_service_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_service_with_http_info(id, **kwargs) # noqa: E501
return data
def get_service_with_http_info(self, id, **kwargs): # noqa: E501
"""get service # noqa: E501
<h3>Administrator views selected service.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_service_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: id of the service (required)
:return: Service
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_service" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_service`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/services/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Service', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_service_service_clients(self, id, **kwargs): # noqa: E501
"""get service clients who have access rights for the selected service # noqa: E501
<h3>Administrator views service clients who have access to the given service.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_service_service_clients(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: id of the service (required)
:return: list[ServiceClient]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_service_service_clients_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_service_service_clients_with_http_info(id, **kwargs) # noqa: E501
return data
def get_service_service_clients_with_http_info(self, id, **kwargs): # noqa: E501
"""get service clients who have access rights for the selected service # noqa: E501
<h3>Administrator views service clients who have access to the given service.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_service_service_clients_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: id of the service (required)
:return: list[ServiceClient]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_service_service_clients" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_service_service_clients`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/services/{id}/service-clients', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ServiceClient]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_service(self, id, **kwargs): # noqa: E501
"""update service # noqa: E501
<h3>Administrator updates the service.</h3> <p> This endpoint can return a warnings response which can be ignored by setting <code>ServiceUpdate.ignore_warnings</code> = true. If <code>ServiceUpdate.ignore_warnings</code> = false, it is possible to receive a warnings response from this endpoint if any one of the following conditions is true <ul> <li>SSL authentication is set to true, but https connection to the service URL fails because SSL handshake fails</li> <li>SSL authentication is set to true, but https connection to the service URL fails because of other errors (e.g. host unreachable)</li> </ul> </p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_service(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: id of the service (required)
:param ServiceUpdate body:
:return: Service
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_service_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.update_service_with_http_info(id, **kwargs) # noqa: E501
return data
def update_service_with_http_info(self, id, **kwargs): # noqa: E501
"""update service # noqa: E501
<h3>Administrator updates the service.</h3> <p> This endpoint can return a warnings response which can be ignored by setting <code>ServiceUpdate.ignore_warnings</code> = true. If <code>ServiceUpdate.ignore_warnings</code> = false, it is possible to receive a warnings response from this endpoint if any one of the following conditions is true <ul> <li>SSL authentication is set to true, but https connection to the service URL fails because SSL handshake fails</li> <li>SSL authentication is set to true, but https connection to the service URL fails because of other errors (e.g. host unreachable)</li> </ul> </p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_service_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: id of the service (required)
:param ServiceUpdate body:
:return: Service
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_service" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_service`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/services/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Service', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 40.180818
| 636
| 0.610878
| 3,018
| 25,555
| 4.957256
| 0.07389
| 0.049729
| 0.01925
| 0.025667
| 0.959963
| 0.958425
| 0.954348
| 0.948867
| 0.944322
| 0.943052
| 0
| 0.017432
| 0.297398
| 25,555
| 635
| 637
| 40.244094
| 0.815817
| 0.371708
| 0
| 0.824926
| 0
| 0
| 0.17099
| 0.046924
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038576
| false
| 0
| 0.011869
| 0
| 0.106825
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
07c55ee8bbc5bf60be2fd48dc64a7d5727c0ab13
| 123
|
py
|
Python
|
backend/champaign/utils.py
|
EugMJang/Covid-20
|
e56cb32bb2470c309cb983e28e8da3de5a2503c8
|
[
"MIT"
] | null | null | null |
backend/champaign/utils.py
|
EugMJang/Covid-20
|
e56cb32bb2470c309cb983e28e8da3de5a2503c8
|
[
"MIT"
] | null | null | null |
backend/champaign/utils.py
|
EugMJang/Covid-20
|
e56cb32bb2470c309cb983e28e8da3de5a2503c8
|
[
"MIT"
] | 1
|
2021-09-22T02:02:12.000Z
|
2021-09-22T02:02:12.000Z
|
import time
def get_unix_timestamp():
""" Gets unix timestamp in milliseconds """
return int(time.time() * 1000)
| 17.571429
| 47
| 0.674797
| 16
| 123
| 5.0625
| 0.75
| 0.320988
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.040816
| 0.203252
| 123
| 6
| 48
| 20.5
| 0.785714
| 0.284553
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
07d0a20c7fd4927f58e3da2af6bd378926c0676d
| 232
|
bzl
|
Python
|
utils.bzl
|
aij/flow
|
890f1c5be9f0d2d7bdf32674e4f23f0e3c450fb8
|
[
"MIT"
] | 1
|
2018-11-26T06:52:29.000Z
|
2018-11-26T06:52:29.000Z
|
utils.bzl
|
aij/flow
|
890f1c5be9f0d2d7bdf32674e4f23f0e3c450fb8
|
[
"MIT"
] | null | null | null |
utils.bzl
|
aij/flow
|
890f1c5be9f0d2d7bdf32674e4f23f0e3c450fb8
|
[
"MIT"
] | 1
|
2018-11-26T06:49:40.000Z
|
2018-11-26T06:49:40.000Z
|
load("@fbcode_macros//build_defs:platform_utils.bzl", "platform_utils")
def get_ppx_bin():
return "third-party-buck/{}/build/ocaml-lwt_ppx/lib/lwt_ppx/ppx.exe".format(platform_utils.get_platform_for_base_path(get_base_path()))
| 46.4
| 139
| 0.793103
| 38
| 232
| 4.447368
| 0.631579
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047414
| 232
| 4
| 140
| 58
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0.508621
| 0.448276
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0
| 0.333333
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.