hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
154ba1e720ac938a5159ad8b967937bd2c3d8510
3,662
py
Python
tests/fixtures/test_list.py
rcbops/tempest-zigzag
12f4bc528c9b88263f04394e5f31755519aa02e8
[ "Apache-2.0" ]
null
null
null
tests/fixtures/test_list.py
rcbops/tempest-zigzag
12f4bc528c9b88263f04394e5f31755519aa02e8
[ "Apache-2.0" ]
2
2019-01-14T22:48:35.000Z
2019-02-25T20:06:06.000Z
tests/fixtures/test_list.py
rcbops/tempest-zigzag
12f4bc528c9b88263f04394e5f31755519aa02e8
[ "Apache-2.0" ]
2
2019-01-08T20:19:16.000Z
2019-01-10T22:12:30.000Z
import pytest @pytest.fixture(scope='session') def file_test_list(tmpdir_factory, string_test_list): """An example of output from tempest run --list-tests """ filename = tmpdir_factory.mktemp('data').join('file_test_list_one').strpath with open(filename, 'w') as f: f.write(string_test_list) return filename @pytest.fixture(scope='session') def file_test_list_with_whitespace(tmpdir_factory, string_test_list_with_whitespace): """An example of output from tempest run --list-tests """ filename = tmpdir_factory.mktemp('data').join('file_test_list_one').strpath with open(filename, 'w') as f: f.write(string_test_list_with_whitespace) return filename @pytest.fixture(scope='session') def string_test_list_with_whitespace(): """An example of output from tempest run --list-tests """ test_list = \ """ tempest.api.compute.admin.test_agents.AgentsAdminTestJSON.test_one[id-1fc6bdc8-0b6d-4cc7-9f30-9b04fabe5b90,smoke] tempest.api.compute.admin.test_agents.AgentsAdminTestJSON.test_two[id-470e0b89-386f-407b-91fd-819737d0b335,negative] tempest.api.compute.admin.test_agents.AgentsAdminTestJSON.test_three[id-6a326c69-654b-438a-80a3-34bcc454e138,smoke] tempest.api.compute.admin.test_agents.AgentsAdminTestJSON.test_four[id-eabadde4-3cd7-4ec4-a4b5-5a936d2d4408,network] tempest.api.compute.admin.test_agents.AgentsAdminTestJSON.test_five[id-dc9ffd51-1c50-4f0e-a820-ae6d2a568a9e] tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_one[id-96be03c7-570d-409c-90f8-e4db3c646996] tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_two[id-eeef473c-7c52-494d-9f09-2ed7fc8fc036] tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_three[id-7f6a1cc5-2446-4cdb-9baa-b6ae0a919b72] tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_four[id-c8e85064-e79b-4906-9931-c11c24294d02] tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_five[id-0d148aa3-d54c-4317-aa8d-42040a475e20,smoke,negative,volume] """ # noqa return test_list @pytest.fixture(scope='session') def string_test_list(): """An example of output from tempest run --list-tests """ test_list = \ """tempest.api.compute.admin.test_agents.AgentsAdminTestJSON.test_one[id-1fc6bdc8-0b6d-4cc7-9f30-9b04fabe5b90,smoke] tempest.api.compute.admin.test_agents.AgentsAdminTestJSON.test_two[id-470e0b89-386f-407b-91fd-819737d0b335,negative] tempest.api.compute.admin.test_agents.AgentsAdminTestJSON.test_three[id-6a326c69-654b-438a-80a3-34bcc454e138,smoke] tempest.api.compute.admin.test_agents.AgentsAdminTestJSON.test_four[id-eabadde4-3cd7-4ec4-a4b5-5a936d2d4408,network] tempest.api.compute.admin.test_agents.AgentsAdminTestJSON.test_five[id-dc9ffd51-1c50-4f0e-a820-ae6d2a568a9e] tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_one[id-96be03c7-570d-409c-90f8-e4db3c646996] tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_two[id-eeef473c-7c52-494d-9f09-2ed7fc8fc036] tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_three[id-7f6a1cc5-2446-4cdb-9baa-b6ae0a919b72] tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_four[id-c8e85064-e79b-4906-9931-c11c24294d02] tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_five[id-0d148aa3-d54c-4317-aa8d-42040a475e20,smoke,negative,volume]""" # noqa return test_list
48.184211
157
0.770617
462
3,662
5.939394
0.218615
0.072886
0.123907
0.16035
0.990525
0.975948
0.975948
0.965743
0.916181
0.916181
0
0.123686
0.116876
3,662
75
158
48.826667
0.724799
0.058438
0
0.666667
0
0
0.087886
0
0
0
0
0
0
1
0.190476
false
0
0.047619
0
0.428571
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
1591c4c80a0d3fa8741f23cba1fc7d32f9168b0a
323
py
Python
pystruct/tests/test_libraries.py
LemonLison/pystruct
5606e643d1a0a3701b93b848a2a02c49e83c4f1e
[ "BSD-2-Clause" ]
501
2015-01-06T16:24:04.000Z
2022-03-22T03:53:03.000Z
pystruct/tests/test_libraries.py
LemonLison/pystruct
5606e643d1a0a3701b93b848a2a02c49e83c4f1e
[ "BSD-2-Clause" ]
104
2015-01-02T19:05:04.000Z
2022-02-13T20:18:38.000Z
pystruct/tests/test_libraries.py
LemonLison/pystruct
5606e643d1a0a3701b93b848a2a02c49e83c4f1e
[ "BSD-2-Clause" ]
145
2015-02-04T03:42:52.000Z
2022-03-04T13:16:37.000Z
from pystruct.inference import get_installed def test_pyqpbo(): import pyqpbo pyqpbo assert 'qpbo' in get_installed(['qpbo']) def test_ad3(): import ad3 ad3 assert 'ad3' in get_installed(['ad3']) def test_ad3plus(): import ad3 ad3 assert 'ad3+' in get_installed(['ad3+'])
15.380952
44
0.634675
42
323
4.714286
0.333333
0.242424
0.212121
0.181818
0.383838
0.383838
0.383838
0.383838
0.383838
0
0
0.041494
0.25387
323
20
45
16.15
0.780083
0
0
0.307692
0
0
0.068111
0
0
0
0
0
0.230769
1
0.230769
true
0
0.307692
0
0.538462
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
7
15b9a8f345b597a186d538575d61192ddaef2058
29,553
py
Python
resources.py
briangmaddox/QGISSOLR
e98e98f89265b7d0b6b8a760f6233c990ce368c3
[ "MIT" ]
null
null
null
resources.py
briangmaddox/QGISSOLR
e98e98f89265b7d0b6b8a760f6233c990ce368c3
[ "MIT" ]
null
null
null
resources.py
briangmaddox/QGISSOLR
e98e98f89265b7d0b6b8a760f6233c990ce368c3
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # Resource object code # # Created by: The Resource Compiler for PyQt5 (Qt v5.12.8) # # WARNING! All changes made in this file will be lost! from PyQt5 import QtCore qt_resource_data = b"\ \x00\x00\x0d\x09\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x40\x00\x00\x00\x40\x08\x06\x00\x00\x00\xaa\x69\x71\xde\ \x00\x00\x01\x85\x69\x43\x43\x50\x49\x43\x43\x20\x70\x72\x6f\x66\ \x69\x6c\x65\x00\x00\x28\x91\x7d\x91\x3d\x48\xc3\x40\x1c\xc5\x5f\ \x53\xb5\x22\x15\x05\x8b\x88\x88\x64\xa8\x4e\x56\x44\x45\x1c\xb5\ \x0a\x45\xa8\x10\x6a\x85\x56\x1d\x4c\x2e\xfd\x10\x9a\x34\x24\x29\ \x2e\x8e\x82\x6b\xc1\xc1\x8f\xc5\xaa\x83\x8b\xb3\xae\x0e\xae\x82\ \x20\xf8\x01\xe2\xe4\xe8\xa4\xe8\x22\x25\xfe\x2f\x2d\xb4\x88\xf1\ \xe0\xb8\x1f\xef\xee\x3d\xee\xde\x01\x42\xa5\xc0\x34\xab\x65\x0c\ \xd0\x74\xdb\x4c\xc4\xa2\x62\x2a\xbd\x22\x06\x5e\xd1\x86\x41\x74\ \xa3\x17\xa3\x32\xb3\x8c\x59\x49\x8a\xc3\x73\x7c\xdd\xc3\xc7\xd7\ \xbb\x08\xcf\xf2\x3e\xf7\xe7\xe8\x54\x33\x16\x03\x7c\x22\xf1\x0c\ \x33\x4c\x9b\x78\x9d\x78\x6a\xd3\x36\x38\xef\x13\x87\x58\x5e\x56\ \x89\xcf\x89\x47\x4c\xba\x20\xf1\x23\xd7\x95\x1a\xbf\x71\xce\xb9\ \x2c\xf0\xcc\x90\x99\x4c\xcc\x11\x87\x88\xc5\x5c\x13\x2b\x4d\xcc\ \xf2\xa6\x46\x3c\x49\x1c\x56\x35\x9d\xf2\x85\x54\x8d\x55\xce\x5b\ \x9c\xb5\x42\x89\xd5\xef\xc9\x5f\x18\xcc\xe8\xcb\x4b\x5c\xa7\x39\ \x80\x18\x16\xb0\x08\x09\x22\x14\x94\xb0\x81\x02\x6c\x44\x68\xd5\ \x49\xb1\x90\xa0\xfd\xa8\x87\xbf\xdf\xf5\x4b\xe4\x52\xc8\xb5\x01\ \x46\x8e\x79\x14\xa1\x41\x76\xfd\xe0\x7f\xf0\xbb\x5b\x2b\x3b\x31\ \x5e\x4b\x0a\x46\x81\xd6\x17\xc7\xf9\x18\x02\x02\xbb\x40\xb5\xec\ \x38\xdf\xc7\x8e\x53\x3d\x01\xfc\xcf\xc0\x95\xde\xf0\x17\x2b\xc0\ \xf4\x27\xe9\xe5\x86\x16\x3e\x02\xba\xb6\x81\x8b\xeb\x86\xa6\xec\ \x01\x97\x3b\x40\xdf\x93\x21\x9b\xb2\x2b\xf9\x69\x0a\xd9\x2c\xf0\ \x7e\x46\xdf\x94\x06\x7a\x6e\x81\x8e\xd5\x5a\x6f\xf5\x7d\x9c\x3e\ \x00\x49\xea\x2a\x7e\x03\x1c\x1c\x02\xc3\x39\xca\x5e\xf3\x78\x77\ \x7b\x73\x6f\xff\x9e\xa9\xf7\xf7\x03\x96\x07\x72\xb5\x90\xcd\x14\ \x8e\x00\x00\x00\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\ \xbd\xa7\x93\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x2e\x23\x00\ \x00\x2e\x23\x01\x78\xa5\x3f\x76\x00\x00\x00\x07\x74\x49\x4d\x45\ \x07\xe4\x06\x1e\x10\x18\x39\xbf\xbf\x95\x19\x00\x00\x0b\x05\x49\ \x44\x41\x54\x78\xda\xe5\x9b\x5b\x90\x94\xc5\x15\xc7\x7f\x33\xcc\ \xee\xe2\xc2\x2c\x86\xcb\x82\xb8\xb0\xb0\x08\xe2\xa5\xb8\x07\x12\ \x40\x03\x26\xb1\x22\x44\x4b\xaa\x52\x49\x61\x30\x2a\x46\x7c\x11\ \x83\x0f\x79\x23\x9a\x08\x09\x4f\xa9\x58\x31\xa5\x04\xac\x2c\x81\ \x4a\x48\xca\x58\x86\x0d\x56\xae\x95\xc4\x18\x90\xc4\x42\x10\x2b\ \x44\x97\x9b\xe0\xee\x0a\xc2\x2e\x02\x7b\x9f\x5b\x1e\xe6\xdf\x99\ \xa6\xf9\xbe\x99\x9e\xdd\xd9\xc5\x54\xba\x6a\x6a\xbf\xfd\xbe\xee\ \x73\xba\x4f\x9f\x73\xfa\xdc\x3a\xc2\xc0\xb6\x18\x10\x07\xc6\x01\ \x35\xc0\x44\xfd\x1d\xad\xf7\x15\xea\xd7\x03\x5c\x02\xce\x01\xef\ \x03\x4d\xfa\x7b\x06\xb8\x08\x24\x07\x6a\x82\x91\x01\x80\x59\xa6\ \x05\xcf\x00\x16\x01\x73\x81\x3a\x60\x14\x50\xa9\xef\xd1\x90\xb1\ \x69\x20\x01\x74\x89\x18\x27\x80\xfd\xc0\x1e\xe0\x10\xf0\x81\xbe\ \x7f\x2c\x09\x10\xd7\xa2\xbf\x08\xdc\x01\x4c\x03\xaa\xf2\x2c\xd6\ \xb7\xa5\xc5\x05\x47\x80\x3f\x03\xbb\x81\xb7\xc4\x31\x1f\x8b\x16\ \x07\xee\x04\xb6\x03\xa7\xc4\xae\x19\x8f\x5f\x22\xe4\x39\xdf\x2f\ \x29\xd1\xd8\x2e\x9c\xf1\xab\xb9\xf0\x32\x60\x3e\xb0\x15\x68\xd1\ \x4e\xb9\x13\x4e\x6b\xa7\x3a\x9c\xf7\x07\x81\x97\xb4\xf0\x84\x9e\ \x0f\x3a\x7d\x3a\x34\x36\x0c\x6e\x8b\x70\xcf\xd7\x5c\xfa\xd4\x86\ \xf4\x71\xdc\x58\x60\x35\xb0\x11\xf8\x9c\x58\xdd\x16\xa7\x0e\xe0\ \x30\xf0\x32\xf0\x5b\x60\x02\x30\x46\x93\xff\x07\xf0\x4d\xe0\x3c\ \x70\x97\xde\xfd\x10\xf8\x39\x30\x1d\xb8\x5e\xb0\x8e\x00\xcf\x01\ \xef\x68\x81\x71\xa0\xdc\x12\xdd\x38\x30\x0b\xb8\x5d\xeb\x38\x21\ \xbc\x03\xda\x22\xc0\x6c\x60\xa7\x76\xc7\xdd\x99\x73\x40\x03\xf0\ \x35\x29\xbe\x5a\xa0\x5e\x5a\x3e\x03\xec\x03\x16\x0b\xce\x6a\xbd\ \xef\xd1\x73\x44\xdf\xf6\xa9\x6f\x8f\xc6\xd6\x02\x93\x81\xfb\x81\ \x5d\xc2\xe1\xe2\xbd\xa4\x39\xcd\x1e\x20\xc5\xfe\x5f\x96\x5f\xa6\ \x09\xa6\x9c\x09\x5c\xd0\xc2\xef\x95\xb6\x07\x18\x06\x6c\xb0\xd8\ \xff\x2d\x60\xa9\x35\x41\x97\x00\x86\xc0\x4b\xd5\xd7\x88\xc1\x06\ \xc1\x42\xb0\xef\x15\x21\x2e\x38\x73\x48\x69\x6e\xcb\xfb\x23\x12\ \x61\xad\x02\x58\x05\x34\x06\x20\x3d\x00\xac\x91\x58\xd8\xa2\xb5\ \x4a\xe7\x78\x46\xec\xb9\xc2\x39\x11\x82\x08\x80\xfa\xac\xd0\x98\ \x8c\x60\xac\x72\xc4\xb5\x1a\x78\x44\xb8\xdd\xcd\x68\x54\xff\x8a\ \x52\x2e\x7e\xb5\x34\xbc\x8d\xa8\x1d\xf8\x19\x30\x27\x40\x97\xcc\ \xb3\x94\xda\x79\x60\xad\x25\xbf\x85\x08\x80\xfa\xae\xd5\x58\xa3\ \x34\xe7\x05\xe8\xaf\x39\x9a\x43\xbb\x33\xb7\x53\x82\x59\x51\x0a\ \xb6\x5f\x15\xb0\xf8\x33\xc0\x53\xce\xae\x63\xb1\xe9\x0e\xed\x4c\ \x02\xd8\x0c\x7c\x22\xa0\x5f\x3e\x02\xa0\x31\x9b\x05\x23\x25\x98\ \xa3\x42\x14\xf2\x93\x16\xb7\xd9\x44\x58\xd5\x1f\x71\x88\x48\x9e\ \x1a\x03\x00\x3f\x6a\xc9\x25\x0e\xfb\x3e\x68\xed\xdc\x1e\xe0\xa6\ \x10\xf8\x85\x08\x80\xc6\xee\xb5\x38\xe9\xc1\x10\xc3\x6a\x98\xc4\ \xf0\x54\x80\x38\x2c\xef\xab\x62\x9c\x6d\x69\x64\x7b\xf1\x0f\xe4\ \x61\xad\x69\xc0\xeb\xea\x7b\x16\xf8\x4a\x1e\xe4\x3e\x04\x88\x08\ \xc6\x59\xc1\x7c\x1d\x98\x9a\x47\x54\x1f\x08\x20\xc2\x3e\xad\xa5\ \xe8\x73\x7e\xa7\xa3\x60\x92\xc0\x4f\xa4\x80\xc2\xc4\x65\xbd\x16\ \x94\x52\xdf\x11\x79\x70\xf8\x10\x00\xc1\xa8\x17\xcc\x6e\xe1\x08\ \x63\xeb\x31\xc2\x9b\x74\x14\xf5\xce\x10\x71\x0d\x5d\xc8\x13\x21\ \xe7\xfc\x87\xb2\xbe\x16\x04\x70\xc1\xad\xc0\xdb\xea\x77\x0c\x58\ \x58\x00\x8f\x2f\x01\x10\xac\x63\x82\xfd\xb6\x70\xb9\xbb\xbf\x00\ \xd8\xa2\x39\x06\xd9\x09\x4f\xf8\xea\x83\xf9\xb2\xe2\x6c\x0a\x1e\ \x03\x3e\xb2\xfe\x6f\x04\x36\x89\xb5\x86\x4a\x23\xaf\x07\x7a\x45\ \xfd\xef\xeb\x7d\xa9\x08\x30\x54\x30\x93\xc2\xb1\x5e\x38\x2b\x64\ \x0d\x6e\xd2\x9c\x52\x96\x5d\x72\xdc\xe1\xe0\xc3\x5a\x5b\x41\xc7\ \x66\xab\x33\xf0\x00\xf0\x59\xe0\x61\xe0\x9f\x96\x55\x97\x90\x99\ \xfa\x0c\x70\x1f\xf0\xa6\xde\x1f\xf5\x41\x54\x24\x01\xcc\xc6\x18\ \x2e\x78\x53\x38\x9f\xd1\x1c\x8c\x33\xd5\xab\x39\x3e\xac\x39\x1f\ \x70\x36\x72\x6b\x21\x07\xea\x4e\x39\x19\xb6\x85\xb7\x46\x9a\x37\ \x2a\x5b\xfd\x3b\xa2\x66\xc2\x02\x7c\x5e\xbb\x93\x96\xfd\x3f\xde\ \xc3\x0d\x2e\x86\x00\x51\xc1\x7c\x59\x38\x92\xc2\x99\xb2\x36\xe3\ \xdf\xc0\xd3\x9a\xa3\x99\xef\x1a\xc7\x62\x6c\xd1\x1a\x43\x77\x7f\ \xbb\xe3\x7d\x35\x04\x28\x8f\x72\xb1\xdd\x46\x99\xac\x5d\x01\x36\ \xc2\x6e\xd9\x09\xf7\x48\x5e\xab\x15\x0c\x89\x79\x98\xc2\x31\xf5\ \xad\xd6\xd8\x7b\x04\x6b\x77\xc0\x59\xdf\xa5\x39\x6c\x94\x38\x96\ \x07\x28\xf3\x06\xc7\x8b\xdc\x6e\x73\x41\xcc\xea\x3c\xd3\xb1\xd5\ \x5b\xa5\x51\xcf\x38\x40\x7b\x65\x99\x1d\x96\x71\xf2\x10\xf0\x38\ \x70\x8d\x90\x54\xeb\xec\xfd\x82\x6c\xf9\x56\x45\x72\x9a\x80\x66\ \xe0\xb4\x76\xef\x93\x16\x97\x7c\x5a\x32\x3d\x52\x93\xbe\x5e\xbf\ \xf1\x32\x7e\x86\x05\x58\x9b\x5d\xf2\x22\xeb\x65\x36\xf7\x06\x6c\ \xea\x19\xad\x61\xa1\xe0\x44\x80\x25\x5a\xeb\xdf\x5d\xcd\xbf\xc9\ \x39\x3e\x76\x85\x58\x5e\x6e\x5b\xab\x5d\x4c\x00\x7f\x04\x7e\x0f\ \x9c\x04\x3a\x03\x7c\x79\xc3\xbe\x3d\x9a\x70\xc6\x92\xdd\x1e\x4b\ \x8c\xdc\x31\x9d\x82\xf9\x07\xe0\x4f\xc2\xd5\x0d\x3c\xe6\x31\xbf\ \x51\x0e\x17\x24\xb5\xd6\x32\x9b\x03\xae\x53\x18\x6b\x88\xe5\xcf\ \xbf\xa4\xdd\xcb\xd7\x86\xcb\x85\x2d\x17\xb5\xbf\xab\xd8\x5d\x2d\ \x70\xb3\x44\x65\xba\x02\xa1\xa3\xd4\xbf\x42\x78\x86\x38\x32\x9e\ \xb4\x44\xa2\x5d\xb8\x9b\xa4\xe4\x0e\x4a\xc6\xdf\xd3\xee\xdd\x2a\ \x4e\xb9\x0d\xd8\xa6\xfe\x61\xad\x15\xf8\x95\xd6\x67\x38\xe9\x0e\ \xe0\x79\xe0\x94\x21\xc0\x0c\x59\x71\xa6\x1d\x71\x59\x24\x8f\xc1\ \x74\x8b\x9e\x8f\x6a\xb2\x6d\xfa\x1d\x00\x7e\x29\x79\x1e\x21\x02\ \x8c\x92\x8d\x5f\x25\x71\x5b\xa9\xb1\x3b\x81\xbf\x28\xf6\x77\x5e\ \x93\x6e\x95\x02\xeb\x74\xa2\xc2\xef\xe8\x34\x30\xb8\xc7\x16\x20\ \x00\x32\xc9\x8f\x68\x43\x8c\xc5\x3a\xc3\x10\x20\xa6\xe8\x6d\x95\ \x3e\x66\x64\x7f\x37\x7b\x10\x60\x8a\xe4\x14\x19\x28\x6d\xce\xf7\ \xa4\x16\x75\x51\xb1\x3c\xd7\xcc\xfd\xb2\x9e\xff\x2a\x59\xf5\x69\ \xad\xc2\xb5\x50\xb8\xa7\x88\x20\xf9\x5a\x93\xcc\xe8\x99\xc2\x5b\ \xa5\xf1\xbf\x8b\xea\x9f\x39\x96\x42\xea\x00\xfe\x26\x56\x2c\xe4\ \x2c\x4d\x17\x5b\xf7\x68\xc7\x7b\x8b\x30\xb7\x33\x21\xcf\x85\x5a\ \xaf\xec\x80\x1e\xe1\xbe\xd1\xc3\xd9\xe9\x01\x5e\x15\x37\x19\x91\ \x9b\x0b\xc4\xa3\x62\xa1\x3a\xab\xf3\x87\xc0\xbf\x3c\x4d\xe6\x9b\ \xf4\xb7\x5d\x2c\x36\x58\xed\x88\x70\x96\x49\xd7\xf8\x98\xb8\x87\ \x9d\x13\xad\x0e\x18\x17\x55\xc0\xd2\xd6\xf6\x27\x75\x6c\x15\x6a\ \x43\x95\xe9\x41\x72\xdb\x32\x88\x04\xf8\x40\x38\xd1\xfc\x87\x7a\ \x8c\x69\x91\xa7\x68\x9f\x0e\x35\x86\x00\x95\xd6\x87\x13\x1e\x4a\ \xc5\xf8\xe0\xc6\x33\x3c\x27\x5f\x61\xb0\xda\x79\xe1\x34\xe1\xb1\ \x61\x1e\x63\xda\xb5\x36\xd3\x2a\x81\x09\x51\x1d\x51\x65\x56\x16\ \xe6\xa4\x67\xfa\x69\xb8\xe5\xee\xb6\xca\x30\x19\xac\xd6\x65\x1d\ \xd1\x23\x34\x97\x42\x2d\xa1\xb5\xa5\x2d\x11\xae\x89\x92\x4d\x54\ \x46\x2d\xad\x7d\xda\xea\x54\x88\x03\xae\xd1\xf3\x47\x45\x2a\xc0\ \xfe\xb6\x5e\x8b\xe3\x2a\x3d\x39\x20\x2d\xd1\x49\x5a\x8a\x70\x4c\ \xcc\xf1\x8e\x22\x0a\x3e\xfa\x64\x63\xeb\xac\xb1\x13\x95\x0b\x28\ \x26\x8b\xbb\xd8\x22\xfc\xe2\x22\x09\x10\xb3\xf4\x4f\x1c\xf8\x92\ \x4e\xb2\x42\x6d\x9e\x73\x62\xc4\x23\xca\xc8\xac\x74\xce\x6e\x1f\ \x0e\x88\x38\xa2\x53\x6c\x0a\x3b\x6a\x59\xa2\xbe\x38\x5d\x22\x44\ \x2d\xf6\xce\x14\x89\x13\x60\x67\x94\xff\xf3\x16\x73\x64\x37\x21\ \xdb\x7a\x9f\xa7\x15\xf8\x98\x0c\xa9\xd7\xc4\x49\xc5\x8a\xc0\xfd\ \x7a\xde\xe1\x69\x7a\xdb\xf3\xbe\x0f\xf8\x8c\xac\xcc\x1f\x79\x58\ \x83\x00\x9f\x52\x64\xb9\xcc\xd2\x25\x3c\x6b\x79\x4a\x3d\x0a\x22\ \xf8\xb4\xb9\x32\x97\x33\x22\x5a\xb1\x49\x88\x62\x23\x42\x6e\x0c\ \x70\x9b\x70\x37\x6b\x2e\x3e\xed\x51\x2b\xa2\x95\x01\x9e\x8d\xea\ \x3c\x4d\x5b\x94\x1d\x87\x5f\x51\x43\xa7\x5c\x52\x73\x14\x95\x0d\ \x22\xe7\x96\x59\x47\x70\x37\x7e\x59\xe1\xa8\xd6\x16\xb3\xf4\xd6\ \xd9\xa8\x1c\x85\x84\xd5\xa9\xd6\x73\x31\xed\xe4\xaa\x34\x46\x7a\ \x5a\x63\xa5\x6a\x43\x85\x13\x89\x40\x87\x27\xd1\x6a\x1d\xc5\xd9\ \x14\x95\x97\xd6\x69\x75\x9c\xec\x69\x58\xb4\x5b\xd6\xd8\x68\xcb\ \x9b\x1c\x8c\x56\x25\x9c\xc6\x08\xf3\xb1\x5c\xe3\x5a\x9b\xcd\xc1\ \xef\x1b\x02\xd8\x81\x8f\x5a\x05\x48\x7c\xac\xb1\x66\x8b\x03\xaa\ \x07\x91\x00\xd5\x16\x07\x34\x79\x5a\xa1\xd7\x59\xb6\x83\x21\x5c\ \x53\x54\x1e\xd2\xf1\x90\x20\x47\x21\x6b\xec\x5d\x45\x66\xab\x1c\ \xea\x0e\x74\xab\x13\x4e\x93\xa3\xf0\xb1\x42\x6f\x76\x02\xbc\xc7\ \x81\xd3\x51\xc9\xd0\x7e\x4b\x11\x56\x92\x2d\x3b\x29\xa4\xd5\xd3\ \x0a\x53\x75\x49\x26\x67\x3a\x46\x86\x8f\x21\x15\xf4\xec\x73\x04\ \xce\x10\xce\x2e\xcd\xa1\x90\x11\x55\xa1\x23\xb3\xd2\x9a\xfb\x7e\ \xe0\x52\x4c\x67\xf7\x5e\x11\xe2\x5a\x4d\x66\xa1\x9c\xa4\x42\x67\ \xeb\x51\xb2\x89\xcb\xc9\x22\x40\xdc\x72\x53\xcd\x64\xdd\x90\xd8\ \x48\xf5\x5b\x62\xc5\x05\x97\x90\x4b\x61\xb5\x89\x3d\xdb\x64\xef\ \xbb\x21\x31\x53\x1b\x14\x55\xec\xe2\xa8\x07\xd1\x6a\xc8\x46\x9e\ \x23\x96\xe2\xdc\x03\x24\xcd\x8e\x1d\x12\x2b\x99\x8c\xce\x54\x85\ \xc9\x0a\x11\xa0\x59\xe3\x26\x2b\x32\x53\x27\x97\x73\x92\x58\x6e\ \x66\x48\x50\x34\xe6\x04\x46\x57\x2a\x3c\x96\xb4\x82\xa2\x6d\xd2\ \x4f\xef\x92\x0b\xc3\xbf\x27\x1c\x37\x5a\x81\x91\x26\x0f\x02\x2c\ \xe2\xf2\xac\x72\xa3\xd6\x7c\xd9\x11\xe1\x86\xc5\x1b\x28\x1c\x16\ \x8f\x92\xcd\x14\x25\x75\x1e\xef\x62\xe0\xc3\xe2\x0d\x56\xdf\x6f\ \x7b\xd8\x2c\x5e\x61\xf1\x04\xf0\x0a\xf0\x55\x05\x48\x90\x18\xdc\ \x06\xfc\x3a\x04\x70\xb9\x76\xe3\x5a\x29\xa3\x0a\xe0\x6e\x8b\xcd\ \x52\xe4\xea\x7f\xed\xc4\xc8\x19\x72\x89\x91\x87\xd4\x77\x1b\xf0\ \x86\x22\xc6\x26\x31\x52\x23\xcd\x3d\x5a\xee\xee\x44\xfd\x32\xc2\ \xd1\x2b\xdc\xd3\xa4\xd0\xc2\x14\xe1\xed\x5c\x9e\xa9\x6e\x21\x9b\ \x65\x4a\x04\x9d\x93\xc5\xa6\xc6\x0e\x71\x65\x6a\xac\x05\xf8\x0d\ \xd9\x74\xd6\xdd\x3a\x51\xfa\x9a\x1a\xbb\x45\x30\x9e\x14\xcc\x16\ \xae\x4c\x8d\x1d\x22\x9b\x8f\xf0\x4d\x8d\xfd\x94\x3c\x09\x52\x9f\ \xe4\xe8\xd3\x5c\x99\x1c\xbd\x28\xe0\x29\xe0\xc7\xda\xb5\x42\x45\ \x98\xc5\x26\x47\x47\x0b\x76\x4a\xb8\x2e\xe0\x97\x1c\xbd\x88\x67\ \x72\xd4\x70\x41\xa1\xf4\x78\x2f\x97\xa7\xc7\x7f\x20\x44\x26\x75\ \xfd\x86\x3c\xc5\x52\x3b\x43\x37\x08\xb6\x29\xc0\x78\x44\xb8\x83\ \xd2\xe3\x5f\xa7\x8f\xe9\x71\x08\x2e\x90\x38\x4e\x78\x81\x44\x85\ \x42\x63\x9b\x2d\x85\xf5\x78\x89\x39\x60\x08\xf0\x0d\x4b\xb1\x6e\ \x16\x4e\x53\x20\xf1\x3d\x4a\x54\x20\x61\x4e\x84\x75\x84\x97\xc8\ \x6c\x21\xb8\x44\x66\xa9\x14\x9d\xa9\x07\x9e\x5a\x42\x02\x4c\x15\ \xcc\x8c\x70\x2c\x0d\x30\x74\x0a\x95\xc8\xac\x2b\xc6\x63\xed\x4b\ \x91\xd4\x70\xe0\x05\x72\xc5\x4c\xdf\x0a\x50\x4a\x7d\x21\x40\xb9\ \x94\x60\xb7\xc5\xc6\x61\xce\x5a\x49\x8a\xa4\x4c\xeb\x4b\x99\xdc\ \x22\x4b\x17\x34\xea\xff\xfe\x12\x60\x11\xb9\x5a\xc5\xa3\x79\x60\ \x96\xb4\x4c\xce\x1c\x49\xcb\x08\x2f\x94\x1c\x1e\x32\x89\x0d\x5a\ \x54\x1a\xf8\x45\x1e\x8e\xf1\x21\x40\xb5\x60\xa4\xd5\x6f\x43\x08\ \xf1\xf3\x15\x4a\x2e\xa3\x1f\x15\xe4\x7d\x29\x95\xbd\x81\x6c\x22\ \x32\xa3\x23\x68\x5d\x88\x28\x14\x22\x40\x39\xd9\xd2\x36\x73\x8c\ \xbd\x2a\xd8\x41\xe2\xfa\x14\x03\x50\x2a\x6b\xef\xea\x6a\x99\xa2\ \x41\xc5\xd2\x73\x1d\x8d\x1f\x21\x5b\xed\xdd\x6c\x1d\x59\x77\x05\ \xec\x42\x3e\x02\x44\xc8\x96\xd8\x1c\xb3\xe2\x7e\x2b\x1c\x18\x83\ \x52\x2c\x6d\x13\x21\xac\x5c\xfe\x20\x57\x96\xcb\x57\x8a\x5d\x3b\ \xc9\xd5\x0c\xcf\x2a\x82\x00\xb3\x34\x26\x23\x18\x1b\xb8\x3c\x7f\ \x39\x56\x38\x07\xa5\x5c\xde\x16\x87\xb0\x0b\x13\x17\x65\x72\xae\ \xb0\x42\x55\xe3\x81\x17\xd5\x37\x25\x9f\x62\x8a\x07\x01\xa6\xa8\ \xaf\x19\xf7\x22\xb9\x22\x0c\x73\x61\xa2\x81\x41\xbe\x30\x61\xb3\ \xe6\x6c\xe5\x00\x7c\xae\xcc\x2c\x50\xce\xc0\x58\x69\x3b\x14\x72\ \x0b\x23\xc0\x44\xf5\x31\xd6\xe6\x6b\x82\x51\xa7\x1c\x42\x03\x57\ \xf1\xca\x8c\xab\x9d\xd7\xc9\xba\x4a\x05\x4c\xa8\x5d\xec\xf9\xbc\ \xac\xb6\x36\xbd\xef\x96\x33\x32\x29\x80\x00\x93\xf4\xad\x5b\x7d\ \xdb\x34\xfe\x39\xc1\x0a\x22\x78\x4a\x73\x58\xd7\x97\x73\xbe\x14\ \x22\x61\xae\xcd\x35\x13\x7e\xbd\xad\xc3\x31\x4c\x7a\xc8\x56\x7c\ \xd6\x93\xbb\x36\x57\xaf\x77\x3d\x8e\xe1\xd5\x41\xfe\x6b\x73\x5b\ \xe8\xe7\xb5\xb9\x52\xb4\x38\xf0\x79\xed\x9e\xef\xc5\xc9\x74\x40\ \x40\x24\xcd\x55\xb8\x38\x39\x10\x57\x67\x97\x93\xbb\x3a\x3b\x82\ \xd2\x5d\x9d\x6d\x24\x7b\x75\xf6\x15\x4a\x78\x75\x76\xa0\x2f\x4f\ \x2f\x24\x77\x79\x7a\x34\xfe\x97\xa7\x3b\x15\x18\x3d\xae\xe8\xed\ \x5e\xfe\x07\x2e\x4f\x87\x85\xb0\xe3\x52\x50\x13\xf4\xab\x91\xd3\ \x62\xdf\x04\xed\xd5\x8e\x9e\x95\xb7\x67\xae\xcf\x9f\xd6\xfb\x01\ \xbb\x3e\xff\x1f\x47\x29\xb9\xca\x4a\x4c\x68\xf5\x00\x00\x00\x00\ \x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x0d\x09\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x40\x00\x00\x00\x40\x08\x06\x00\x00\x00\xaa\x69\x71\xde\ \x00\x00\x01\x85\x69\x43\x43\x50\x49\x43\x43\x20\x70\x72\x6f\x66\ \x69\x6c\x65\x00\x00\x28\x91\x7d\x91\x3d\x48\xc3\x40\x1c\xc5\x5f\ \x53\xb5\x22\x15\x05\x8b\x88\x88\x64\xa8\x4e\x56\x44\x45\x1c\xb5\ \x0a\x45\xa8\x10\x6a\x85\x56\x1d\x4c\x2e\xfd\x10\x9a\x34\x24\x29\ \x2e\x8e\x82\x6b\xc1\xc1\x8f\xc5\xaa\x83\x8b\xb3\xae\x0e\xae\x82\ \x20\xf8\x01\xe2\xe4\xe8\xa4\xe8\x22\x25\xfe\x2f\x2d\xb4\x88\xf1\ \xe0\xb8\x1f\xef\xee\x3d\xee\xde\x01\x42\xa5\xc0\x34\xab\x65\x0c\ \xd0\x74\xdb\x4c\xc4\xa2\x62\x2a\xbd\x22\x06\x5e\xd1\x86\x41\x74\ \xa3\x17\xa3\x32\xb3\x8c\x59\x49\x8a\xc3\x73\x7c\xdd\xc3\xc7\xd7\ \xbb\x08\xcf\xf2\x3e\xf7\xe7\xe8\x54\x33\x16\x03\x7c\x22\xf1\x0c\ \x33\x4c\x9b\x78\x9d\x78\x6a\xd3\x36\x38\xef\x13\x87\x58\x5e\x56\ \x89\xcf\x89\x47\x4c\xba\x20\xf1\x23\xd7\x95\x1a\xbf\x71\xce\xb9\ \x2c\xf0\xcc\x90\x99\x4c\xcc\x11\x87\x88\xc5\x5c\x13\x2b\x4d\xcc\ \xf2\xa6\x46\x3c\x49\x1c\x56\x35\x9d\xf2\x85\x54\x8d\x55\xce\x5b\ \x9c\xb5\x42\x89\xd5\xef\xc9\x5f\x18\xcc\xe8\xcb\x4b\x5c\xa7\x39\ \x80\x18\x16\xb0\x08\x09\x22\x14\x94\xb0\x81\x02\x6c\x44\x68\xd5\ \x49\xb1\x90\xa0\xfd\xa8\x87\xbf\xdf\xf5\x4b\xe4\x52\xc8\xb5\x01\ \x46\x8e\x79\x14\xa1\x41\x76\xfd\xe0\x7f\xf0\xbb\x5b\x2b\x3b\x31\ \x5e\x4b\x0a\x46\x81\xd6\x17\xc7\xf9\x18\x02\x02\xbb\x40\xb5\xec\ \x38\xdf\xc7\x8e\x53\x3d\x01\xfc\xcf\xc0\x95\xde\xf0\x17\x2b\xc0\ \xf4\x27\xe9\xe5\x86\x16\x3e\x02\xba\xb6\x81\x8b\xeb\x86\xa6\xec\ \x01\x97\x3b\x40\xdf\x93\x21\x9b\xb2\x2b\xf9\x69\x0a\xd9\x2c\xf0\ \x7e\x46\xdf\x94\x06\x7a\x6e\x81\x8e\xd5\x5a\x6f\xf5\x7d\x9c\x3e\ \x00\x49\xea\x2a\x7e\x03\x1c\x1c\x02\xc3\x39\xca\x5e\xf3\x78\x77\ \x7b\x73\x6f\xff\x9e\xa9\xf7\xf7\x03\x96\x07\x72\xb5\x90\xcd\x14\ \x8e\x00\x00\x00\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\ \xbd\xa7\x93\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x2e\x23\x00\ \x00\x2e\x23\x01\x78\xa5\x3f\x76\x00\x00\x00\x07\x74\x49\x4d\x45\ \x07\xe4\x06\x1e\x10\x18\x39\xbf\xbf\x95\x19\x00\x00\x0b\x05\x49\ \x44\x41\x54\x78\xda\xe5\x9b\x5b\x90\x94\xc5\x15\xc7\x7f\x33\xcc\ \xee\xe2\xc2\x2c\x86\xcb\x82\xb8\xb0\xb0\x08\xe2\xa5\xb8\x07\x12\ \x40\x03\x26\xb1\x22\x44\x4b\xaa\x52\x49\x61\x30\x2a\x46\x7c\x11\ \x83\x0f\x79\x23\x9a\x08\x09\x4f\xa9\x58\x31\xa5\x04\xac\x2c\x81\ \x4a\x48\xca\x58\x86\x0d\x56\xae\x95\xc4\x18\x90\xc4\x42\x10\x2b\ \x44\x97\x9b\xe0\xee\x0a\xc2\x2e\x02\x7b\x9f\x5b\x1e\xe6\xdf\x99\ \xa6\xf9\xbe\x99\x9e\xdd\xd9\xc5\x54\xba\x6a\x6a\xbf\xfd\xbe\xee\ \x73\xba\x4f\x9f\x73\xfa\xdc\x3a\xc2\xc0\xb6\x18\x10\x07\xc6\x01\ \x35\xc0\x44\xfd\x1d\xad\xf7\x15\xea\xd7\x03\x5c\x02\xce\x01\xef\ \x03\x4d\xfa\x7b\x06\xb8\x08\x24\x07\x6a\x82\x91\x01\x80\x59\xa6\ \x05\xcf\x00\x16\x01\x73\x81\x3a\x60\x14\x50\xa9\xef\xd1\x90\xb1\ \x69\x20\x01\x74\x89\x18\x27\x80\xfd\xc0\x1e\xe0\x10\xf0\x81\xbe\ \x7f\x2c\x09\x10\xd7\xa2\xbf\x08\xdc\x01\x4c\x03\xaa\xf2\x2c\xd6\ \xb7\xa5\xc5\x05\x47\x80\x3f\x03\xbb\x81\xb7\xc4\x31\x1f\x8b\x16\ \x07\xee\x04\xb6\x03\xa7\xc4\xae\x19\x8f\x5f\x22\xe4\x39\xdf\x2f\ \x29\xd1\xd8\x2e\x9c\xf1\xab\xb9\xf0\x32\x60\x3e\xb0\x15\x68\xd1\ \x4e\xb9\x13\x4e\x6b\xa7\x3a\x9c\xf7\x07\x81\x97\xb4\xf0\x84\x9e\ \x0f\x3a\x7d\x3a\x34\x36\x0c\x6e\x8b\x70\xcf\xd7\x5c\xfa\xd4\x86\ \xf4\x71\xdc\x58\x60\x35\xb0\x11\xf8\x9c\x58\xdd\x16\xa7\x0e\xe0\ \x30\xf0\x32\xf0\x5b\x60\x02\x30\x46\x93\xff\x07\xf0\x4d\xe0\x3c\ \x70\x97\xde\xfd\x10\xf8\x39\x30\x1d\xb8\x5e\xb0\x8e\x00\xcf\x01\ \xef\x68\x81\x71\xa0\xdc\x12\xdd\x38\x30\x0b\xb8\x5d\xeb\x38\x21\ \xbc\x03\xda\x22\xc0\x6c\x60\xa7\x76\xc7\xdd\x99\x73\x40\x03\xf0\ \x35\x29\xbe\x5a\xa0\x5e\x5a\x3e\x03\xec\x03\x16\x0b\xce\x6a\xbd\ \xef\xd1\x73\x44\xdf\xf6\xa9\x6f\x8f\xc6\xd6\x02\x93\x81\xfb\x81\ \x5d\xc2\xe1\xe2\xbd\xa4\x39\xcd\x1e\x20\xc5\xfe\x5f\x96\x5f\xa6\ \x09\xa6\x9c\x09\x5c\xd0\xc2\xef\x95\xb6\x07\x18\x06\x6c\xb0\xd8\ \xff\x2d\x60\xa9\x35\x41\x97\x00\x86\xc0\x4b\xd5\xd7\x88\xc1\x06\ \xc1\x42\xb0\xef\x15\x21\x2e\x38\x73\x48\x69\x6e\xcb\xfb\x23\x12\ \x61\xad\x02\x58\x05\x34\x06\x20\x3d\x00\xac\x91\x58\xd8\xa2\xb5\ \x4a\xe7\x78\x46\xec\xb9\xc2\x39\x11\x82\x08\x80\xfa\xac\xd0\x98\ \x8c\x60\xac\x72\xc4\xb5\x1a\x78\x44\xb8\xdd\xcd\x68\x54\xff\x8a\ \x52\x2e\x7e\xb5\x34\xbc\x8d\xa8\x1d\xf8\x19\x30\x27\x40\x97\xcc\ \xb3\x94\xda\x79\x60\xad\x25\xbf\x85\x08\x80\xfa\xae\xd5\x58\xa3\ \x34\xe7\x05\xe8\xaf\x39\x9a\x43\xbb\x33\xb7\x53\x82\x59\x51\x0a\ \xb6\x5f\x15\xb0\xf8\x33\xc0\x53\xce\xae\x63\xb1\xe9\x0e\xed\x4c\ \x02\xd8\x0c\x7c\x22\xa0\x5f\x3e\x02\xa0\x31\x9b\x05\x23\x25\x98\ \xa3\x42\x14\xf2\x93\x16\xb7\xd9\x44\x58\xd5\x1f\x71\x88\x48\x9e\ \x1a\x03\x00\x3f\x6a\xc9\x25\x0e\xfb\x3e\x68\xed\xdc\x1e\xe0\xa6\ \x10\xf8\x85\x08\x80\xc6\xee\xb5\x38\xe9\xc1\x10\xc3\x6a\x98\xc4\ \xf0\x54\x80\x38\x2c\xef\xab\x62\x9c\x6d\x69\x64\x7b\xf1\x0f\xe4\ \x61\xad\x69\xc0\xeb\xea\x7b\x16\xf8\x4a\x1e\xe4\x3e\x04\x88\x08\ \xc6\x59\xc1\x7c\x1d\x98\x9a\x47\x54\x1f\x08\x20\xc2\x3e\xad\xa5\ \xe8\x73\x7e\xa7\xa3\x60\x92\xc0\x4f\xa4\x80\xc2\xc4\x65\xbd\x16\ \x94\x52\xdf\x11\x79\x70\xf8\x10\x00\xc1\xa8\x17\xcc\x6e\xe1\x08\ \x63\xeb\x31\xc2\x9b\x74\x14\xf5\xce\x10\x71\x0d\x5d\xc8\x13\x21\ \xe7\xfc\x87\xb2\xbe\x16\x04\x70\xc1\xad\xc0\xdb\xea\x77\x0c\x58\ \x58\x00\x8f\x2f\x01\x10\xac\x63\x82\xfd\xb6\x70\xb9\xbb\xbf\x00\ \xd8\xa2\x39\x06\xd9\x09\x4f\xf8\xea\x83\xf9\xb2\xe2\x6c\x0a\x1e\ \x03\x3e\xb2\xfe\x6f\x04\x36\x89\xb5\x86\x4a\x23\xaf\x07\x7a\x45\ \xfd\xef\xeb\x7d\xa9\x08\x30\x54\x30\x93\xc2\xb1\x5e\x38\x2b\x64\ \x0d\x6e\xd2\x9c\x52\x96\x5d\x72\xdc\xe1\xe0\xc3\x5a\x5b\x41\xc7\ \x66\xab\x33\xf0\x00\xf0\x59\xe0\x61\xe0\x9f\x96\x55\x97\x90\x99\ \xfa\x0c\x70\x1f\xf0\xa6\xde\x1f\xf5\x41\x54\x24\x01\xcc\xc6\x18\ \x2e\x78\x53\x38\x9f\xd1\x1c\x8c\x33\xd5\xab\x39\x3e\xac\x39\x1f\ \x70\x36\x72\x6b\x21\x07\xea\x4e\x39\x19\xb6\x85\xb7\x46\x9a\x37\ \x2a\x5b\xfd\x3b\xa2\x66\xc2\x02\x7c\x5e\xbb\x93\x96\xfd\x3f\xde\ \xc3\x0d\x2e\x86\x00\x51\xc1\x7c\x59\x38\x92\xc2\x99\xb2\x36\xe3\ \xdf\xc0\xd3\x9a\xa3\x99\xef\x1a\xc7\x62\x6c\xd1\x1a\x43\x77\x7f\ \xbb\xe3\x7d\x35\x04\x28\x8f\x72\xb1\xdd\x46\x99\xac\x5d\x01\x36\ \xc2\x6e\xd9\x09\xf7\x48\x5e\xab\x15\x0c\x89\x79\x98\xc2\x31\xf5\ \xad\xd6\xd8\x7b\x04\x6b\x77\xc0\x59\xdf\xa5\x39\x6c\x94\x38\x96\ \x07\x28\xf3\x06\xc7\x8b\xdc\x6e\x73\x41\xcc\xea\x3c\xd3\xb1\xd5\ \x5b\xa5\x51\xcf\x38\x40\x7b\x65\x99\x1d\x96\x71\xf2\x10\xf0\x38\ \x70\x8d\x90\x54\xeb\xec\xfd\x82\x6c\xf9\x56\x45\x72\x9a\x80\x66\ \xe0\xb4\x76\xef\x93\x16\x97\x7c\x5a\x32\x3d\x52\x93\xbe\x5e\xbf\ \xf1\x32\x7e\x86\x05\x58\x9b\x5d\xf2\x22\xeb\x65\x36\xf7\x06\x6c\ \xea\x19\xad\x61\xa1\xe0\x44\x80\x25\x5a\xeb\xdf\x5d\xcd\xbf\xc9\ \x39\x3e\x76\x85\x58\x5e\x6e\x5b\xab\x5d\x4c\x00\x7f\x04\x7e\x0f\ \x9c\x04\x3a\x03\x7c\x79\xc3\xbe\x3d\x9a\x70\xc6\x92\xdd\x1e\x4b\ \x8c\xdc\x31\x9d\x82\xf9\x07\xe0\x4f\xc2\xd5\x0d\x3c\xe6\x31\xbf\ \x51\x0e\x17\x24\xb5\xd6\x32\x9b\x03\xae\x53\x18\x6b\x88\xe5\xcf\ \xbf\xa4\xdd\xcb\xd7\x86\xcb\x85\x2d\x17\xb5\xbf\xab\xd8\x5d\x2d\ \x70\xb3\x44\x65\xba\x02\xa1\xa3\xd4\xbf\x42\x78\x86\x38\x32\x9e\ \xb4\x44\xa2\x5d\xb8\x9b\xa4\xe4\x0e\x4a\xc6\xdf\xd3\xee\xdd\x2a\ \x4e\xb9\x0d\xd8\xa6\xfe\x61\xad\x15\xf8\x95\xd6\x67\x38\xe9\x0e\ \xe0\x79\xe0\x94\x21\xc0\x0c\x59\x71\xa6\x1d\x71\x59\x24\x8f\xc1\ \x74\x8b\x9e\x8f\x6a\xb2\x6d\xfa\x1d\x00\x7e\x29\x79\x1e\x21\x02\ \x8c\x92\x8d\x5f\x25\x71\x5b\xa9\xb1\x3b\x81\xbf\x28\xf6\x77\x5e\ \x93\x6e\x95\x02\xeb\x74\xa2\xc2\xef\xe8\x34\x30\xb8\xc7\x16\x20\ \x00\x32\xc9\x8f\x68\x43\x8c\xc5\x3a\xc3\x10\x20\xa6\xe8\x6d\x95\ \x3e\x66\x64\x7f\x37\x7b\x10\x60\x8a\xe4\x14\x19\x28\x6d\xce\xf7\ \xa4\x16\x75\x51\xb1\x3c\xd7\xcc\xfd\xb2\x9e\xff\x2a\x59\xf5\x69\ \xad\xc2\xb5\x50\xb8\xa7\x88\x20\xf9\x5a\x93\xcc\xe8\x99\xc2\x5b\ \xa5\xf1\xbf\x8b\xea\x9f\x39\x96\x42\xea\x00\xfe\x26\x56\x2c\xe4\ \x2c\x4d\x17\x5b\xf7\x68\xc7\x7b\x8b\x30\xb7\x33\x21\xcf\x85\x5a\ \xaf\xec\x80\x1e\xe1\xbe\xd1\xc3\xd9\xe9\x01\x5e\x15\x37\x19\x91\ \x9b\x0b\xc4\xa3\x62\xa1\x3a\xab\xf3\x87\xc0\xbf\x3c\x4d\xe6\x9b\ \xf4\xb7\x5d\x2c\x36\x58\xed\x88\x70\x96\x49\xd7\xf8\x98\xb8\x87\ \x9d\x13\xad\x0e\x18\x17\x55\xc0\xd2\xd6\xf6\x27\x75\x6c\x15\x6a\ \x43\x95\xe9\x41\x72\xdb\x32\x88\x04\xf8\x40\x38\xd1\xfc\x87\x7a\ \x8c\x69\x91\xa7\x68\x9f\x0e\x35\x86\x00\x95\xd6\x87\x13\x1e\x4a\ \xc5\xf8\xe0\xc6\x33\x3c\x27\x5f\x61\xb0\xda\x79\xe1\x34\xe1\xb1\ \x61\x1e\x63\xda\xb5\x36\xd3\x2a\x81\x09\x51\x1d\x51\x65\x56\x16\ \xe6\xa4\x67\xfa\x69\xb8\xe5\xee\xb6\xca\x30\x19\xac\xd6\x65\x1d\ \xd1\x23\x34\x97\x42\x2d\xa1\xb5\xa5\x2d\x11\xae\x89\x92\x4d\x54\ \x46\x2d\xad\x7d\xda\xea\x54\x88\x03\xae\xd1\xf3\x47\x45\x2a\xc0\ \xfe\xb6\x5e\x8b\xe3\x2a\x3d\x39\x20\x2d\xd1\x49\x5a\x8a\x70\x4c\ \xcc\xf1\x8e\x22\x0a\x3e\xfa\x64\x63\xeb\xac\xb1\x13\x95\x0b\x28\ \x26\x8b\xbb\xd8\x22\xfc\xe2\x22\x09\x10\xb3\xf4\x4f\x1c\xf8\x92\ \x4e\xb2\x42\x6d\x9e\x73\x62\xc4\x23\xca\xc8\xac\x74\xce\x6e\x1f\ \x0e\x88\x38\xa2\x53\x6c\x0a\x3b\x6a\x59\xa2\xbe\x38\x5d\x22\x44\ \x2d\xf6\xce\x14\x89\x13\x60\x67\x94\xff\xf3\x16\x73\x64\x37\x21\ \xdb\x7a\x9f\xa7\x15\xf8\x98\x0c\xa9\xd7\xc4\x49\xc5\x8a\xc0\xfd\ \x7a\xde\xe1\x69\x7a\xdb\xf3\xbe\x0f\xf8\x8c\xac\xcc\x1f\x79\x58\ \x83\x00\x9f\x52\x64\xb9\xcc\xd2\x25\x3c\x6b\x79\x4a\x3d\x0a\x22\ \xf8\xb4\xb9\x32\x97\x33\x22\x5a\xb1\x49\x88\x62\x23\x42\x6e\x0c\ \x70\x9b\x70\x37\x6b\x2e\x3e\xed\x51\x2b\xa2\x95\x01\x9e\x8d\xea\ \x3c\x4d\x5b\x94\x1d\x87\x5f\x51\x43\xa7\x5c\x52\x73\x14\x95\x0d\ \x22\xe7\x96\x59\x47\x70\x37\x7e\x59\xe1\xa8\xd6\x16\xb3\xf4\xd6\ \xd9\xa8\x1c\x85\x84\xd5\xa9\xd6\x73\x31\xed\xe4\xaa\x34\x46\x7a\ \x5a\x63\xa5\x6a\x43\x85\x13\x89\x40\x87\x27\xd1\x6a\x1d\xc5\xd9\ \x14\x95\x97\xd6\x69\x75\x9c\xec\x69\x58\xb4\x5b\xd6\xd8\x68\xcb\ \x9b\x1c\x8c\x56\x25\x9c\xc6\x08\xf3\xb1\x5c\xe3\x5a\x9b\xcd\xc1\ \xef\x1b\x02\xd8\x81\x8f\x5a\x05\x48\x7c\xac\xb1\x66\x8b\x03\xaa\ \x07\x91\x00\xd5\x16\x07\x34\x79\x5a\xa1\xd7\x59\xb6\x83\x21\x5c\ \x53\x54\x1e\xd2\xf1\x90\x20\x47\x21\x6b\xec\x5d\x45\x66\xab\x1c\ \xea\x0e\x74\xab\x13\x4e\x93\xa3\xf0\xb1\x42\x6f\x76\x02\xbc\xc7\ \x81\xd3\x51\xc9\xd0\x7e\x4b\x11\x56\x92\x2d\x3b\x29\xa4\xd5\xd3\ \x0a\x53\x75\x49\x26\x67\x3a\x46\x86\x8f\x21\x15\xf4\xec\x73\x04\ \xce\x10\xce\x2e\xcd\xa1\x90\x11\x55\xa1\x23\xb3\xd2\x9a\xfb\x7e\ \xe0\x52\x4c\x67\xf7\x5e\x11\xe2\x5a\x4d\x66\xa1\x9c\xa4\x42\x67\ \xeb\x51\xb2\x89\xcb\xc9\x22\x40\xdc\x72\x53\xcd\x64\xdd\x90\xd8\ \x48\xf5\x5b\x62\xc5\x05\x97\x90\x4b\x61\xb5\x89\x3d\xdb\x64\xef\ \xbb\x21\x31\x53\x1b\x14\x55\xec\xe2\xa8\x07\xd1\x6a\xc8\x46\x9e\ \x23\x96\xe2\xdc\x03\x24\xcd\x8e\x1d\x12\x2b\x99\x8c\xce\x54\x85\ \xc9\x0a\x11\xa0\x59\xe3\x26\x2b\x32\x53\x27\x97\x73\x92\x58\x6e\ \x66\x48\x50\x34\xe6\x04\x46\x57\x2a\x3c\x96\xb4\x82\xa2\x6d\xd2\ \x4f\xef\x92\x0b\xc3\xbf\x27\x1c\x37\x5a\x81\x91\x26\x0f\x02\x2c\ \xe2\xf2\xac\x72\xa3\xd6\x7c\xd9\x11\xe1\x86\xc5\x1b\x28\x1c\x16\ \x8f\x92\xcd\x14\x25\x75\x1e\xef\x62\xe0\xc3\xe2\x0d\x56\xdf\x6f\ \x7b\xd8\x2c\x5e\x61\xf1\x04\xf0\x0a\xf0\x55\x05\x48\x90\x18\xdc\ \x06\xfc\x3a\x04\x70\xb9\x76\xe3\x5a\x29\xa3\x0a\xe0\x6e\x8b\xcd\ \x52\xe4\xea\x7f\xed\xc4\xc8\x19\x72\x89\x91\x87\xd4\x77\x1b\xf0\ \x86\x22\xc6\x26\x31\x52\x23\xcd\x3d\x5a\xee\xee\x44\xfd\x32\xc2\ \xd1\x2b\xdc\xd3\xa4\xd0\xc2\x14\xe1\xed\x5c\x9e\xa9\x6e\x21\x9b\ \x65\x4a\x04\x9d\x93\xc5\xa6\xc6\x0e\x71\x65\x6a\xac\x05\xf8\x0d\ \xd9\x74\xd6\xdd\x3a\x51\xfa\x9a\x1a\xbb\x45\x30\x9e\x14\xcc\x16\ \xae\x4c\x8d\x1d\x22\x9b\x8f\xf0\x4d\x8d\xfd\x94\x3c\x09\x52\x9f\ \xe4\xe8\xd3\x5c\x99\x1c\xbd\x28\xe0\x29\xe0\xc7\xda\xb5\x42\x45\ \x98\xc5\x26\x47\x47\x0b\x76\x4a\xb8\x2e\xe0\x97\x1c\xbd\x88\x67\ \x72\xd4\x70\x41\xa1\xf4\x78\x2f\x97\xa7\xc7\x7f\x20\x44\x26\x75\ \xfd\x86\x3c\xc5\x52\x3b\x43\x37\x08\xb6\x29\xc0\x78\x44\xb8\x83\ \xd2\xe3\x5f\xa7\x8f\xe9\x71\x08\x2e\x90\x38\x4e\x78\x81\x44\x85\ \x42\x63\x9b\x2d\x85\xf5\x78\x89\x39\x60\x08\xf0\x0d\x4b\xb1\x6e\ \x16\x4e\x53\x20\xf1\x3d\x4a\x54\x20\x61\x4e\x84\x75\x84\x97\xc8\ \x6c\x21\xb8\x44\x66\xa9\x14\x9d\xa9\x07\x9e\x5a\x42\x02\x4c\x15\ \xcc\x8c\x70\x2c\x0d\x30\x74\x0a\x95\xc8\xac\x2b\xc6\x63\xed\x4b\ \x91\xd4\x70\xe0\x05\x72\xc5\x4c\xdf\x0a\x50\x4a\x7d\x21\x40\xb9\ \x94\x60\xb7\xc5\xc6\x61\xce\x5a\x49\x8a\xa4\x4c\xeb\x4b\x99\xdc\ \x22\x4b\x17\x34\xea\xff\xfe\x12\x60\x11\xb9\x5a\xc5\xa3\x79\x60\ \x96\xb4\x4c\xce\x1c\x49\xcb\x08\x2f\x94\x1c\x1e\x32\x89\x0d\x5a\ \x54\x1a\xf8\x45\x1e\x8e\xf1\x21\x40\xb5\x60\xa4\xd5\x6f\x43\x08\ \xf1\xf3\x15\x4a\x2e\xa3\x1f\x15\xe4\x7d\x29\x95\xbd\x81\x6c\x22\ \x32\xa3\x23\x68\x5d\x88\x28\x14\x22\x40\x39\xd9\xd2\x36\x73\x8c\ \xbd\x2a\xd8\x41\xe2\xfa\x14\x03\x50\x2a\x6b\xef\xea\x6a\x99\xa2\ \x41\xc5\xd2\x73\x1d\x8d\x1f\x21\x5b\xed\xdd\x6c\x1d\x59\x77\x05\ \xec\x42\x3e\x02\x44\xc8\x96\xd8\x1c\xb3\xe2\x7e\x2b\x1c\x18\x83\ \x52\x2c\x6d\x13\x21\xac\x5c\xfe\x20\x57\x96\xcb\x57\x8a\x5d\x3b\ \xc9\xd5\x0c\xcf\x2a\x82\x00\xb3\x34\x26\x23\x18\x1b\xb8\x3c\x7f\ \x39\x56\x38\x07\xa5\x5c\xde\x16\x87\xb0\x0b\x13\x17\x65\x72\xae\ \xb0\x42\x55\xe3\x81\x17\xd5\x37\x25\x9f\x62\x8a\x07\x01\xa6\xa8\ \xaf\x19\xf7\x22\xb9\x22\x0c\x73\x61\xa2\x81\x41\xbe\x30\x61\xb3\ \xe6\x6c\xe5\x00\x7c\xae\xcc\x2c\x50\xce\xc0\x58\x69\x3b\x14\x72\ \x0b\x23\xc0\x44\xf5\x31\xd6\xe6\x6b\x82\x51\xa7\x1c\x42\x03\x57\ \xf1\xca\x8c\xab\x9d\xd7\xc9\xba\x4a\x05\x4c\xa8\x5d\xec\xf9\xbc\ \xac\xb6\x36\xbd\xef\x96\x33\x32\x29\x80\x00\x93\xf4\xad\x5b\x7d\ \xdb\x34\xfe\x39\xc1\x0a\x22\x78\x4a\x73\x58\xd7\x97\x73\xbe\x14\ \x22\x61\xae\xcd\x35\x13\x7e\xbd\xad\xc3\x31\x4c\x7a\xc8\x56\x7c\ \xd6\x93\xbb\x36\x57\xaf\x77\x3d\x8e\xe1\xd5\x41\xfe\x6b\x73\x5b\ \xe8\xe7\xb5\xb9\x52\xb4\x38\xf0\x79\xed\x9e\xef\xc5\xc9\x74\x40\ \x40\x24\xcd\x55\xb8\x38\x39\x10\x57\x67\x97\x93\xbb\x3a\x3b\x82\ \xd2\x5d\x9d\x6d\x24\x7b\x75\xf6\x15\x4a\x78\x75\x76\xa0\x2f\x4f\ \x2f\x24\x77\x79\x7a\x34\xfe\x97\xa7\x3b\x15\x18\x3d\xae\xe8\xed\ \x5e\xfe\x07\x2e\x4f\x87\x85\xb0\xe3\x52\x50\x13\xf4\xab\x91\xd3\ \x62\xdf\x04\xed\xd5\x8e\x9e\x95\xb7\x67\xae\xcf\x9f\xd6\xfb\x01\ \xbb\x3e\xff\x1f\x47\x29\xb9\xca\x4a\x4c\x68\xf5\x00\x00\x00\x00\ \x49\x45\x4e\x44\xae\x42\x60\x82\ " qt_resource_name = b"\ \x00\x07\ \x07\x3b\xe0\xb3\ \x00\x70\ \x00\x6c\x00\x75\x00\x67\x00\x69\x00\x6e\x00\x73\ \x00\x08\ \x0b\xe8\xb0\x92\ \x00\x51\ \x00\x47\x00\x49\x00\x53\x00\x53\x00\x6f\x00\x6c\x00\x72\ \x00\x0e\ \x0a\xe8\xf1\x87\ \x00\x64\ \x00\x69\x00\x61\x00\x6c\x00\x6f\x00\x67\x00\x6c\x00\x6f\x00\x67\x00\x6f\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x08\ \x0a\x61\x5a\xa7\ \x00\x69\ \x00\x63\x00\x6f\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\ " qt_resource_struct_v1 = b"\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\ \x00\x00\x00\x14\x00\x02\x00\x00\x00\x02\x00\x00\x00\x03\ \x00\x00\x00\x4c\x00\x00\x00\x00\x00\x01\x00\x00\x0d\x0d\ \x00\x00\x00\x2a\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\ " qt_resource_struct_v2 = b"\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\ \x00\x00\x00\x00\x00\x00\x00\x00\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\ \x00\x00\x00\x00\x00\x00\x00\x00\ \x00\x00\x00\x14\x00\x02\x00\x00\x00\x02\x00\x00\x00\x03\ \x00\x00\x00\x00\x00\x00\x00\x00\ \x00\x00\x00\x4c\x00\x00\x00\x00\x00\x01\x00\x00\x0d\x0d\ \x00\x00\x01\x73\x06\x0b\xb6\x1e\ \x00\x00\x00\x2a\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\ \x00\x00\x01\x73\x06\x0c\x26\x96\ " qt_version = [int(v) for v in QtCore.qVersion().split('.')] if qt_version < [5, 8, 0]: rcc_version = 1 qt_resource_struct = qt_resource_struct_v1 else: rcc_version = 2 qt_resource_struct = qt_resource_struct_v2 def qInitResources(): QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data) def qCleanupResources(): QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data) qInitResources()
60.189409
105
0.72683
7,077
29,553
3.029815
0.04338
0.041694
0.037776
0.025744
0.971411
0.970712
0.967587
0.965768
0.96353
0.96353
0
0.320345
0.019558
29,553
490
106
60.312245
0.419672
0.005143
0
0.921941
0
0.907173
0.000034
0
0
1
0
0
0
1
0.004219
false
0
0.00211
0
0.006329
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
1
0
0
0
0
0
1
0
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
11
ec6a79538af900fdfa133cac1fbc048f13653a96
417
py
Python
favorite_app/mocks.py
kenware/favorite-things
2750c3af8c996bfa6b7ee9ddf2d31984f8375c43
[ "MIT" ]
null
null
null
favorite_app/mocks.py
kenware/favorite-things
2750c3af8c996bfa6b7ee9ddf2d31984f8375c43
[ "MIT" ]
9
2019-12-04T23:49:13.000Z
2022-02-10T07:45:17.000Z
favorite_app/mocks.py
kenware/Continuous-deployement-circleCI-AWS-backend
d5d7630765eb568b1a78cf52ffa5558cf579b7c7
[ "MIT" ]
null
null
null
set_up_data = { 'title': 'Avengers', 'description': 'This is my favorite among other favorite things', "ranking": 1, "metadata": { "color": "black", "attr": "very rud" }, } data = { 'title': 'Game of throne', 'description': 'This is my favorite among other favorite things', "ranking": 1, "metadata": { "color": "black", "attr": "very rud" }, }
21.947368
69
0.529976
44
417
4.977273
0.545455
0.082192
0.155251
0.173516
0.803653
0.803653
0.803653
0.803653
0.803653
0.803653
0
0.006826
0.297362
417
18
70
23.166667
0.740614
0
0
0.555556
0
0
0.532374
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
ecebf31ba97a938cbf5d9480358bc603b9301d0c
1,502
py
Python
minecraft-py/static/results.py
Stainpy/minecraft_py
85d7e8f8e5f74efa130333894f686aa5e31e1c87
[ "MIT" ]
7
2021-07-11T20:37:38.000Z
2022-01-18T06:41:05.000Z
minecraft-py/static/results.py
Stainpy/minecraft_py
85d7e8f8e5f74efa130333894f686aa5e31e1c87
[ "MIT" ]
null
null
null
minecraft-py/static/results.py
Stainpy/minecraft_py
85d7e8f8e5f74efa130333894f686aa5e31e1c87
[ "MIT" ]
1
2021-03-14T14:43:30.000Z
2021-03-14T14:43:30.000Z
import time import os from os import path class results: timestamp = time.strftime("%Y-%m-%d_%H-%M-%S") def Hits(self): results_path = f"./Results/{results.timestamp}" file_name = "Hits.txt" if not os.path.exists(results_path): os.makedirs(results_path) file_path = path.join(results_path,file_name) with open(file_path,"a",errors="ingore") as f: f.write(self + "\n") def Free(self): results_path = f"./Results/{results.timestamp}" file_name = "Free.txt" if not os.path.exists(results_path): os.makedirs(results_path) file_path = path.join(results_path,file_name) with open(file_path,"a",errors="ingore") as f: f.write(self + "\n") def Secured(self): results_path = f"./Results/{results.timestamp}" file_name = "Secured.txt" if not os.path.exists(results_path): os.makedirs(results_path) file_path = path.join(results_path,file_name) with open(file_path,"a",errors="ingore") as f: f.write(self + "\n") def Unsecured(self): results_path = f"./Results/{results.timestamp}" file_name = "Unsecured.txt" if not os.path.exists(results_path): os.makedirs(results_path) file_path = path.join(results_path,file_name) with open(file_path,"a",errors="ingore") as f: f.write(self + "\n")
35.761905
56
0.581891
199
1,502
4.226131
0.18593
0.209275
0.142687
0.0761
0.852556
0.852556
0.852556
0.852556
0.852556
0.629013
0
0
0.284288
1,502
41
57
36.634146
0.782326
0
0
0.648649
0
0
0.143053
0.079398
0
0
0
0
0
1
0.108108
false
0
0.081081
0
0.243243
0
0
0
0
null
1
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
ecf4e0fd21fc5d95c74b9f28a1b437e5a5612f13
17,656
py
Python
play_various_walking.py
Einsbon/bipedal-robot-walking-simulation
9a3348be92b016b17b71edbce20b5bcb60dc7d15
[ "MIT" ]
67
2018-09-18T05:40:14.000Z
2022-02-22T04:55:15.000Z
play_various_walking.py
midhuniitm/bipedal-robot-walking-simulation
00bcc3e43b1e54baa53700cfcecfb5f20e0d61e5
[ "MIT" ]
1
2019-09-28T07:00:38.000Z
2019-09-28T07:00:38.000Z
play_various_walking.py
midhuniitm/bipedal-robot-walking-simulation
00bcc3e43b1e54baa53700cfcecfb5f20e0d61e5
[ "MIT" ]
14
2018-09-18T05:40:06.000Z
2021-12-22T06:44:47.000Z
""" bipedal robot walking simulation by Einsbon (Sunbin Kim) - GitHub: https://github.com/Einsbon - Youtube: https://www.youtube.com/channel/UCt7FZ-8uzV_jHJiKp3NlHvg - Blog: https://blog.naver.com/einsbon """ import pybullet as p import time from time import sleep import pybullet_data import numpy as np import math import os import motorController import walkGenerator # motor setting motor_kp = 0.5 motor_kd = 0.5 motor_torque = 2 motor_max_velocity = 10.0 # physics parameter setting fixedTimeStep = 1. / 1000 numSolverIterations = 200 physicsClient = p.connect(p.GUI) p.setTimeStep(timeStep=fixedTimeStep, physicsClientId=physicsClient) p.setPhysicsEngineParameter(numSolverIterations=numSolverIterations) p.setAdditionalSearchPath(pybullet_data.getDataPath()) # to load ground p.setGravity(0, 0, 0) p.resetDebugVisualizerCamera(cameraDistance=1, cameraYaw=10, cameraPitch=-5, cameraTargetPosition=[0.3, 0.5, 0.1], physicsClientId=physicsClient) planeId = p.loadSDF('stadium.sdf') # or p.loadURDF('samurai.urdf') # p.loadURDF('plane.urdf') robot = p.loadURDF(os.path.abspath(os.path.dirname(__file__)) + '/humanoid_leg_12dof.8.urdf', [0, 0, 0.31], p.getQuaternionFromEuler([0, 0, 0]), useFixedBase=False) controller = motorController.MotorController(robot, physicsClient, fixedTimeStep, motor_kp, motor_kd, motor_torque, motor_max_velocity) walk = walkGenerator.WalkGenerator() walk.setWalkParameter(bodyMovePoint=8, legMovePoint=8, height=50, stride=90, sit=40, swayBody=30, swayFoot=0, bodyPositionForwardPlus=5, swayShift=3, liftPush=0.4, landPull=0.6, timeStep=0.06, damping=0.0, incline=0.0) walk.generate() walk.inverseKinematicsAll() actionTime = walk._timeStep p.setGravity(0, 0, -9.8) p.setRealTimeSimulation(0) controller.setMotorsAngleInFixedTimestep(walk.walkAnglesStartRight[0], 1, 0) waitTime = 1 repeatTime = int(waitTime / fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() p.setGravity(0, 0, -9.8) # walk 8 steps # start walking. right foot step for i in range(np.size(walk.walkAnglesStartRight, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesStartRight[i], actionTime, 0) for i in range(2): # left foot step for i in range(np.size(walk.walkAnglesWalkingLeft, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesWalkingLeft[i], actionTime, 0) # right foot step for i in range(np.size(walk.walkAnglesWalkingRight, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesWalkingRight[i], actionTime, 0) # end walking. left for i in range(np.size(walk.walkAnglesEndLeft, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesEndLeft[i], actionTime, 0) # rest 2 seconds waitTime = 2 repeatTime = int(waitTime / fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() ######################################################## p.resetBasePositionAndOrientation(robot, [0, 0, 0.31], p.getQuaternionFromEuler([0, 0, 0])) walk.setWalkParameter(bodyMovePoint=8, legMovePoint=8, height=50, stride=90, sit=70, swayBody=30, swayFoot=0, bodyPositionForwardPlus=5, swayShift=3, liftPush=0.4, landPull=0.6, timeStep=0.06, damping=0.0, incline=0.0) walk.generate() walk.inverseKinematicsAll() actionTime = walk._timeStep controller.setMotorsAngleInFixedTimestep(walk.walkAnglesStartRight[0], 2, 0) waitTime = 1 repeatTime = int(waitTime / fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() p.setGravity(0, 0, -9.8) for i in range(np.size(walk.walkAnglesStartRight, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesStartRight[i], actionTime, 0) for i in range(2): for i in range(np.size(walk.walkAnglesWalkingLeft, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesWalkingLeft[i], actionTime, 0) for i in range(np.size(walk.walkAnglesWalkingRight, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesWalkingRight[i], actionTime, 0) for i in range(np.size(walk.walkAnglesEndLeft, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesEndLeft[i], actionTime, 0) waitTime = 2 repeatTime = int(waitTime / fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() ######################################################## p.resetBasePositionAndOrientation(robot, [0, 0, 0.31], p.getQuaternionFromEuler([0, 0, 0])) walk.setWalkParameter(bodyMovePoint=8, legMovePoint=8, height=20, stride=40, sit=40, swayBody=15, swayFoot=0, bodyPositionForwardPlus=5, swayShift=3, liftPush=0.4, landPull=0.6, timeStep=0.03, damping=0.0, incline=0.0) walk.generate() walk.inverseKinematicsAll() actionTime = walk._timeStep controller.setMotorsAngleInFixedTimestep(walk.walkAnglesStartRight[0], 2, 0) waitTime = 1 repeatTime = int(waitTime / fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() for i in range(np.size(walk.walkAnglesStartRight, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesStartRight[i], actionTime, 0) for i in range(2): for i in range(np.size(walk.walkAnglesWalkingLeft, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesWalkingLeft[i], actionTime, 0) for i in range(np.size(walk.walkAnglesWalkingRight, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesWalkingRight[i], actionTime, 0) for i in range(np.size(walk.walkAnglesEndLeft, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesEndLeft[i], actionTime, 0) waitTime = 2 repeatTime = int(waitTime / fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() ######################################################## p.resetBasePositionAndOrientation(robot, [0, 0, 0.31], p.getQuaternionFromEuler([0, 0, 0])) walk.setWalkParameter(bodyMovePoint=8, legMovePoint=8, height=50, stride=140, sit=40, swayBody=35, swayFoot=0, bodyPositionForwardPlus=-2, swayShift=3, liftPush=0.4, landPull=0.6, timeStep=0.12, damping=0.0, incline=0.0) walk.generate() walk.inverseKinematicsAll() actionTime = walk._timeStep controller.setMotorsAngleInFixedTimestep(walk.walkAnglesStartRight[0], 2, 0) waitTime = 1 repeatTime = int(waitTime / fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() for i in range(np.size(walk.walkAnglesStartRight, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesStartRight[i], actionTime, 0) for i in range(2): for i in range(np.size(walk.walkAnglesWalkingLeft, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesWalkingLeft[i], actionTime, 0) for i in range(np.size(walk.walkAnglesWalkingRight, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesWalkingRight[i], actionTime, 0) for i in range(np.size(walk.walkAnglesEndLeft, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesEndLeft[i], actionTime, 0) waitTime = 2 repeatTime = int(waitTime / fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() ######################################################## p.resetBasePositionAndOrientation(robot, [0, 0, 0.31], p.getQuaternionFromEuler([0, 0, 0])) walk.setWalkParameter(bodyMovePoint=8, legMovePoint=8, height=40, stride=70, sit=40, swayBody=30, swayFoot=0, bodyPositionForwardPlus=-35, swayShift=3, liftPush=0.4, landPull=0.6, timeStep=0.06, damping=0.0, incline=0.0) walk.generate() walk.inverseKinematicsAll() actionTime = walk._timeStep controller.setMotorsAngleInFixedTimestep(walk.walkAnglesStartRight[0], 2, 0) waitTime = 1 repeatTime = int(waitTime / fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() for i in range(np.size(walk.walkAnglesStartRight, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesStartRight[i], actionTime, 0) for i in range(2): for i in range(np.size(walk.walkAnglesWalkingLeft, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesWalkingLeft[i], actionTime, 0) for i in range(np.size(walk.walkAnglesWalkingRight, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesWalkingRight[i], actionTime, 0) for i in range(np.size(walk.walkAnglesEndLeft, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesEndLeft[i], actionTime, 0) waitTime = 2 repeatTime = int(waitTime / fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() ######################################################## p.resetBasePositionAndOrientation(robot, [0, 0, 0.28], p.getQuaternionFromEuler([0, 0, 0])) walk.setWalkParameter(bodyMovePoint=8, legMovePoint=8, height=50, stride=-90, sit=40, swayBody=30, swayFoot=0, bodyPositionForwardPlus=0, swayShift=3, liftPush=0.4, landPull=0.6, timeStep=0.06, damping=0.0, incline=0.0) walk.generate() walk.inverseKinematicsAll() actionTime = walk._timeStep controller.setMotorsAngleInFixedTimestep(walk.walkAnglesStartRight[0], 2, 0) waitTime = 1 repeatTime = int(waitTime / fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() for i in range(np.size(walk.walkAnglesStartRight, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesStartRight[i], actionTime, 0) for i in range(2): # repeat twice # left foot step for i in range(np.size(walk.walkAnglesWalkingLeft, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesWalkingLeft[i], actionTime, 0) for i in range(np.size(walk.walkAnglesWalkingRight, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesWalkingRight[i], actionTime, 0) for i in range(np.size(walk.walkAnglesEndLeft, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesEndLeft[i], actionTime, 0) waitTime = 2 repeatTime = int(waitTime / fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() ######################################################## p.resetBasePositionAndOrientation(robot, [0, 0, 0.31], p.getQuaternionFromEuler([0, 0, 0])) walk.setWalkParameter(bodyMovePoint=8, legMovePoint=8, height=50, stride=0, sit=40, swayBody=30, swayFoot=0, bodyPositionForwardPlus=5, swayShift=3, liftPush=0.4, landPull=0.6, timeStep=0.06, damping=0.0, incline=0.0) walk.generate() walk.inverseKinematicsAll() actionTime = walk._timeStep controller.setMotorsAngleInFixedTimestep(walk.walkAnglesStartRight[0], 2, 0) waitTime = 1 repeatTime = int(waitTime / fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() # Turn function is not accurate yet. for i in range(np.size(walk.walkAnglesStartRight, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesStartRight[i] + walk.turnListUnfold[i] * 0.3, actionTime, 0) for i in range(3): for i in range(np.size(walk.walkAnglesWalkingLeft, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesWalkingLeft[i] + walk.turnListFold[i] * 0.3, actionTime, 0) for i in range(np.size(walk.walkAnglesWalkingRight, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesWalkingRight[i] + walk.turnListUnfold[i] * 0.3, actionTime, 0) for i in range(np.size(walk.walkAnglesEndLeft, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesEndLeft[i] + walk.turnListFold[i] * 0.3, actionTime, 0) waitTime = 2 repeatTime = int(waitTime / fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() ######################################################## p.resetBasePositionAndOrientation(robot, [0, 0, 0.31], p.getQuaternionFromEuler([0, 0, 0])) walk.setWalkParameter(bodyMovePoint=8, legMovePoint=8, height=50, stride=90, sit=40, swayBody=30, swayFoot=0, bodyPositionForwardPlus=5, swayShift=3, liftPush=0.4, landPull=0.6, timeStep=0.06, damping=0.0, incline=0.0) walk.generate() walk.inverseKinematicsAll() actionTime = walk._timeStep controller.setMotorsAngleInFixedTimestep(walk.walkAnglesStartRight[0], 2, 0) waitTime = 1 repeatTime = int(waitTime / fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() for i in range(np.size(walk.walkAnglesStartRight, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesStartRight[i] + walk.turnListUnfold[i] * 0.3, actionTime, 0) for i in range(3): for i in range(np.size(walk.walkAnglesWalkingLeft, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesWalkingLeft[i] + walk.turnListFold[i] * 0.3, actionTime, 0) for i in range(np.size(walk.walkAnglesWalkingRight, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesWalkingRight[i] + walk.turnListUnfold[i] * 0.3, actionTime, 0) for i in range(np.size(walk.walkAnglesEndLeft, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkAnglesEndLeft[i] + walk.turnListFold[i] * 0.3, actionTime, 0) ######################################################## fixedTimeStep = 1 / 500 p.setTimeStep(fixedTimeStep) giantRobot = p.loadURDF(os.path.abspath(os.path.dirname(__file__)) + '/humanoid_leg_12dof.8.urdf', [-4.7, 0, 3.1], p.getQuaternionFromEuler([0, 0, 0]), useFixedBase=False, globalScaling=10) for i in range(p.getNumJoints(giantRobot)): p.changeDynamics( giantRobot, i, lateralFriction=1, spinningFriction=1, ) waitTime = 2 repeatTime = int(waitTime / fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() controller2 = motorController.MotorController(giantRobot, physicsClient, fixedTimeStep, motor_kp, motor_kd, 50, motor_max_velocity) walk.setWalkParameter(bodyMovePoint=12, legMovePoint=12, height=30, stride=90, sit=35, swayBody=18, swayFoot=0, bodyPositionForwardPlus=5, swayShift=4, liftPush=0.7, landPull=0.8, timeStep=0.06, damping=0.0, incline=0.0) walk.generate() walk.inverseKinematicsAll() walk.showGaitPoint3D() actionTime = walk._timeStep controller2.setMotorsAngleInFixedTimestep(walk.walkAnglesStartRight[0], 4, 0) waitTime = 1 repeatTime = int(waitTime / fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() for i in range(np.size(walk.walkAnglesStartRight, 0)): controller2.setMotorsAngleInFixedTimestep(walk.walkAnglesStartRight[i], actionTime, 0) for i in range(2): for i in range(np.size(walk.walkAnglesWalkingLeft, 0)): controller2.setMotorsAngleInFixedTimestep(walk.walkAnglesWalkingLeft[i], actionTime, 0) for i in range(np.size(walk.walkAnglesWalkingRight, 0)): controller2.setMotorsAngleInFixedTimestep(walk.walkAnglesWalkingRight[i], actionTime, 0) for i in range(np.size(walk.walkAnglesEndLeft, 0)): controller2.setMotorsAngleInFixedTimestep(walk.walkAnglesEndLeft[i], actionTime, 0) controller2.setMotorsAngleInFixedTimestep(walk.inverseKinematicsPoint([-10, -40, 30], [-10, -40, 30]), 1, 0.5) controller2.setMotorsAngleInFixedTimestep(walk.inverseKinematicsPoint([-80, -35, 50], [-10, -40, 30]), 1.5, 0.5) controller2.setMotorsAngleInFixedTimestep(walk.inverseKinematicsPoint([-10, -35, 45], [0, -40, 30]), 0.4, 0) controller2.setMotorsAngleInFixedTimestep(walk.inverseKinematicsPoint([60, -35, 45], [0, -40, 30]), 0.2, 3) controller2.setMotorsAngleInFixedTimestep(walk.inverseKinematicsPoint([0, -32, 45], [0, -32, 30]), 1, 0) controller2.setMotorsAngleInFixedTimestep(walk.inverseKinematicsPoint([0, 0, 10], [0, 0, 10]), 0.5, 50)
38.889868
145
0.631513
1,768
17,656
6.275452
0.100113
0.039748
0.024876
0.045606
0.848941
0.814781
0.786571
0.776927
0.776927
0.774132
0
0.044075
0.240541
17,656
453
146
38.975717
0.783354
0.027526
0
0.791892
0
0
0.003772
0.003114
0
0
0
0
0
1
0
false
0
0.024324
0
0.024324
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
179722b26af904d904c2d83a1dba1efca23287e0
39
py
Python
day-4/passport/__init__.py
DallogFheir/aoc-2020
089bd45d5fbdf98b9729a23f3a142ca3b792567c
[ "MIT" ]
null
null
null
day-4/passport/__init__.py
DallogFheir/aoc-2020
089bd45d5fbdf98b9729a23f3a142ca3b792567c
[ "MIT" ]
null
null
null
day-4/passport/__init__.py
DallogFheir/aoc-2020
089bd45d5fbdf98b9729a23f3a142ca3b792567c
[ "MIT" ]
null
null
null
from passport.passport import Passport
19.5
38
0.871795
5
39
6.8
0.6
0
0
0
0
0
0
0
0
0
0
0
0.102564
39
1
39
39
0.971429
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
1
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
7
da01cfde58f1def1578a22320001fed5650ffc2c
4,496
py
Python
tests/test_pred_of_dist.py
akki2825/CorpusTools
ba236f55266eceb7e719fa36545985d8453953d3
[ "BSD-3-Clause" ]
97
2015-07-06T18:58:43.000Z
2022-03-10T23:00:07.000Z
tests/test_pred_of_dist.py
akki2825/CorpusTools
ba236f55266eceb7e719fa36545985d8453953d3
[ "BSD-3-Clause" ]
443
2015-03-10T21:24:39.000Z
2022-03-22T22:20:13.000Z
tests/test_pred_of_dist.py
akki2825/CorpusTools
ba236f55266eceb7e719fa36545985d8453953d3
[ "BSD-3-Clause" ]
22
2015-07-19T18:56:24.000Z
2020-09-17T17:58:12.000Z
import sys import os import pytest from corpustools.prod.pred_of_dist import (check_envs, calc_prod, EnvironmentFilter) from corpustools.contextmanagers import (CanonicalVariantContext, MostFrequentVariantContext, WeightedVariantContext) def test_prod_allenvs(specified_test_corpus): return def test_prod_token(specified_test_corpus): seg1 = 's' seg2 = 'ʃ' expected = {"-voc":0.0, "+voc,+high":0.9321115676166747, "+voc,-high":0.9660096062568557, "#":0.0} env_list = [] expected_envs = {} for k, v in expected.items(): if k != '#': segs = specified_test_corpus.features_to_segments(k) else: segs = k env = EnvironmentFilter(['s', 'ʃ'], None, [segs]) env_list.append(env) expected_envs[env] = v expected_envs["AVG"] = 0.9241743523004413 type_or_token = 'token' tier = 'transcription' with CanonicalVariantContext(specified_test_corpus, tier, type_or_token) as c: result = calc_prod(c, env_list) for k,v in result.items(): assert(expected_envs[k]-v < 0.001) def test_prod_type(specified_test_corpus): seg1 = 's' seg2 = 'ʃ' expected = {"-voc":0.0, "+voc,+high":0.863120568566631, "+voc,-high":0.9852281360342515, "#":0.0} env_list = [] expected_envs = {} for k, v in expected.items(): if k != '#': segs = specified_test_corpus.features_to_segments(k) else: segs = k env = EnvironmentFilter(['s', 'ʃ'], None, [segs]) env_list.append(env) expected_envs[env] = v expected_envs["AVG"] = 0.9241743523004413 type_or_token = 'type' tier = 'transcription' with CanonicalVariantContext(specified_test_corpus, tier, type_or_token) as c: result = calc_prod(c, env_list, all_info=False) for k,v in result.items(): assert(expected_envs[k]-v < 0.001) def test_prod_wordtokens_token(specified_discourse_corpus): seg1 = 's' seg2 = 'ʃ' expected = {"-voc":0.0, "+voc,+high":0.8631205, #0.9321115676166747, #Error!!!?!?!? "+voc,-high":0.9660096062568557, "#":0.0} env_list = [] expected_envs = {} for k, v in expected.items(): if k != '#': segs = specified_discourse_corpus.lexicon.features_to_segments(k) else: segs = k env = EnvironmentFilter(['s', 'ʃ'], None, [segs]) env_list.append(env) expected_envs[env] = v expected_envs["AVG"] = 0.9241743523004413 type_or_token = 'token' tier = 'transcription' with MostFrequentVariantContext(specified_discourse_corpus.lexicon, tier, type_or_token) as c: result = calc_prod(c, env_list) for k,v in result.items(): assert(expected_envs[k]-v < 0.001) type_or_token = 'token' tier = 'transcription' with WeightedVariantContext(specified_discourse_corpus.lexicon, tier, type_or_token) as c: result = calc_prod(c, env_list) for k,v in result.items(): assert(expected_envs[k]-v < 0.001) def test_prod_wordtokens_type(specified_discourse_corpus): seg1 = 's' seg2 = 'ʃ' expected = {"-voc":0.0, "+voc,+high":0.863120568566631, "+voc,-high":0.9852281360342515, "#":0.0} env_list = [] expected_envs = {} for k, v in expected.items(): if k != '#': segs = specified_discourse_corpus.lexicon.features_to_segments(k) else: segs = k env = EnvironmentFilter(['s', 'ʃ'], None, [segs]) env_list.append(env) expected_envs[env] = v expected_envs["AVG"] = 0.9241743523004413 type_or_token = 'type' tier = 'transcription' with MostFrequentVariantContext(specified_discourse_corpus.lexicon, tier, type_or_token) as c: result = calc_prod(c, env_list, all_info=False) for k,v in result.items(): assert(expected_envs[k]-v < 0.001) with WeightedVariantContext(specified_discourse_corpus.lexicon, tier, type_or_token) as c: result = calc_prod(c, env_list, all_info=False) for k,v in result.items(): assert(expected_envs[k]-v < 0.001) def test_prod_pronunciation_variants(pronunciation_variants_corpus): pass
33.804511
98
0.599422
540
4,496
4.764815
0.138889
0.083949
0.047027
0.027206
0.854644
0.854644
0.854644
0.841819
0.841819
0.841819
0
0.079531
0.27847
4,496
132
99
34.060606
0.713625
0.009342
0
0.847458
0
0
0.049427
0
0
0
0
0
0.050847
1
0.050847
false
0.008475
0.042373
0.008475
0.101695
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
da1f19f694da944bc0a2e6801a979fbf170ba1bc
9,932
py
Python
v6.0.5/system/test_fortios_system_resource_limits.py
fortinet-solutions-cse/ansible_fgt_modules
c45fba49258d7c9705e7a8fd9c2a09ea4c8a4719
[ "Apache-2.0" ]
14
2018-09-25T20:35:25.000Z
2021-07-14T04:30:54.000Z
v6.0.6/system/test_fortios_system_resource_limits.py
fortinet-solutions-cse/ansible_fgt_modules
c45fba49258d7c9705e7a8fd9c2a09ea4c8a4719
[ "Apache-2.0" ]
32
2018-10-09T04:13:42.000Z
2020-05-11T07:20:28.000Z
v6.0.5/system/test_fortios_system_resource_limits.py
fortinet-solutions-cse/ansible_fgt_modules
c45fba49258d7c9705e7a8fd9c2a09ea4c8a4719
[ "Apache-2.0" ]
11
2018-10-09T00:14:53.000Z
2021-11-03T10:54:09.000Z
# Copyright 2019 Fortinet, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <https://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import json import pytest from mock import ANY from ansible.module_utils.network.fortios.fortios import FortiOSHandler try: from ansible.modules.network.fortios import fortios_system_resource_limits except ImportError: pytest.skip("Could not load required modules for testing", allow_module_level=True) @pytest.fixture(autouse=True) def connection_mock(mocker): connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_system_resource_limits.Connection') return connection_class_mock fos_instance = FortiOSHandler(connection_mock) def test_system_resource_limits_creation(mocker): schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema') set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200} set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result) input_data = { 'username': 'admin', 'state': 'present', 'system_resource_limits': { 'custom_service': '3', 'dialup_tunnel': '4', 'firewall_address': '5', 'firewall_addrgrp': '6', 'firewall_policy': '7', 'ipsec_phase1': '8', 'ipsec_phase1_interface': '9', 'ipsec_phase2': '10', 'ipsec_phase2_interface': '11', 'log_disk_quota': '12', 'onetime_schedule': '13', 'proxy': '14', 'recurring_schedule': '15', 'service_group': '16', 'session': '17', 'sslvpn': '18', 'user': '19', 'user_group': '20' }, 'vdom': 'root'} is_error, changed, response = fortios_system_resource_limits.fortios_system(input_data, fos_instance) expected_data = { 'custom-service': '3', 'dialup-tunnel': '4', 'firewall-address': '5', 'firewall-addrgrp': '6', 'firewall-policy': '7', 'ipsec-phase1': '8', 'ipsec-phase1-interface': '9', 'ipsec-phase2': '10', 'ipsec-phase2-interface': '11', 'log-disk-quota': '12', 'onetime-schedule': '13', 'proxy': '14', 'recurring-schedule': '15', 'service-group': '16', 'session': '17', 'sslvpn': '18', 'user': '19', 'user-group': '20' } set_method_mock.assert_called_with('system', 'resource-limits', data=expected_data, vdom='root') schema_method_mock.assert_not_called() assert not is_error assert changed assert response['status'] == 'success' assert response['http_status'] == 200 def test_system_resource_limits_creation_fails(mocker): schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema') set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500} set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result) input_data = { 'username': 'admin', 'state': 'present', 'system_resource_limits': { 'custom_service': '3', 'dialup_tunnel': '4', 'firewall_address': '5', 'firewall_addrgrp': '6', 'firewall_policy': '7', 'ipsec_phase1': '8', 'ipsec_phase1_interface': '9', 'ipsec_phase2': '10', 'ipsec_phase2_interface': '11', 'log_disk_quota': '12', 'onetime_schedule': '13', 'proxy': '14', 'recurring_schedule': '15', 'service_group': '16', 'session': '17', 'sslvpn': '18', 'user': '19', 'user_group': '20' }, 'vdom': 'root'} is_error, changed, response = fortios_system_resource_limits.fortios_system(input_data, fos_instance) expected_data = { 'custom-service': '3', 'dialup-tunnel': '4', 'firewall-address': '5', 'firewall-addrgrp': '6', 'firewall-policy': '7', 'ipsec-phase1': '8', 'ipsec-phase1-interface': '9', 'ipsec-phase2': '10', 'ipsec-phase2-interface': '11', 'log-disk-quota': '12', 'onetime-schedule': '13', 'proxy': '14', 'recurring-schedule': '15', 'service-group': '16', 'session': '17', 'sslvpn': '18', 'user': '19', 'user-group': '20' } set_method_mock.assert_called_with('system', 'resource-limits', data=expected_data, vdom='root') schema_method_mock.assert_not_called() assert is_error assert not changed assert response['status'] == 'error' assert response['http_status'] == 500 def test_system_resource_limits_idempotent(mocker): schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema') set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404} set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result) input_data = { 'username': 'admin', 'state': 'present', 'system_resource_limits': { 'custom_service': '3', 'dialup_tunnel': '4', 'firewall_address': '5', 'firewall_addrgrp': '6', 'firewall_policy': '7', 'ipsec_phase1': '8', 'ipsec_phase1_interface': '9', 'ipsec_phase2': '10', 'ipsec_phase2_interface': '11', 'log_disk_quota': '12', 'onetime_schedule': '13', 'proxy': '14', 'recurring_schedule': '15', 'service_group': '16', 'session': '17', 'sslvpn': '18', 'user': '19', 'user_group': '20' }, 'vdom': 'root'} is_error, changed, response = fortios_system_resource_limits.fortios_system(input_data, fos_instance) expected_data = { 'custom-service': '3', 'dialup-tunnel': '4', 'firewall-address': '5', 'firewall-addrgrp': '6', 'firewall-policy': '7', 'ipsec-phase1': '8', 'ipsec-phase1-interface': '9', 'ipsec-phase2': '10', 'ipsec-phase2-interface': '11', 'log-disk-quota': '12', 'onetime-schedule': '13', 'proxy': '14', 'recurring-schedule': '15', 'service-group': '16', 'session': '17', 'sslvpn': '18', 'user': '19', 'user-group': '20' } set_method_mock.assert_called_with('system', 'resource-limits', data=expected_data, vdom='root') schema_method_mock.assert_not_called() assert not is_error assert not changed assert response['status'] == 'error' assert response['http_status'] == 404 def test_system_resource_limits_filter_foreign_attributes(mocker): schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema') set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200} set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result) input_data = { 'username': 'admin', 'state': 'present', 'system_resource_limits': { 'random_attribute_not_valid': 'tag', 'custom_service': '3', 'dialup_tunnel': '4', 'firewall_address': '5', 'firewall_addrgrp': '6', 'firewall_policy': '7', 'ipsec_phase1': '8', 'ipsec_phase1_interface': '9', 'ipsec_phase2': '10', 'ipsec_phase2_interface': '11', 'log_disk_quota': '12', 'onetime_schedule': '13', 'proxy': '14', 'recurring_schedule': '15', 'service_group': '16', 'session': '17', 'sslvpn': '18', 'user': '19', 'user_group': '20' }, 'vdom': 'root'} is_error, changed, response = fortios_system_resource_limits.fortios_system(input_data, fos_instance) expected_data = { 'custom-service': '3', 'dialup-tunnel': '4', 'firewall-address': '5', 'firewall-addrgrp': '6', 'firewall-policy': '7', 'ipsec-phase1': '8', 'ipsec-phase1-interface': '9', 'ipsec-phase2': '10', 'ipsec-phase2-interface': '11', 'log-disk-quota': '12', 'onetime-schedule': '13', 'proxy': '14', 'recurring-schedule': '15', 'service-group': '16', 'session': '17', 'sslvpn': '18', 'user': '19', 'user-group': '20' } set_method_mock.assert_called_with('system', 'resource-limits', data=expected_data, vdom='root') schema_method_mock.assert_not_called() assert not is_error assert changed assert response['status'] == 'success' assert response['http_status'] == 200
34.486111
133
0.590415
1,075
9,932
5.216744
0.181395
0.044936
0.064194
0.040121
0.815621
0.798859
0.769437
0.769437
0.769437
0.769437
0
0.040065
0.261176
9,932
287
134
34.606272
0.724176
0.066855
0
0.85654
0
0
0.360895
0.114029
0
0
0
0
0.101266
1
0.021097
false
0
0.033755
0
0.059072
0.004219
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
da56ad730531fbc33babd7de9d2c0fcaeb0014c7
5,132
py
Python
build/kinova_msgs/cmake/kinova_msgs-genmsg-context.py
FProgrammerLIU/caster_man_ros
a75b503fad3a470f985072a2b3953e89074f3223
[ "MIT" ]
null
null
null
build/kinova_msgs/cmake/kinova_msgs-genmsg-context.py
FProgrammerLIU/caster_man_ros
a75b503fad3a470f985072a2b3953e89074f3223
[ "MIT" ]
null
null
null
build/kinova_msgs/cmake/kinova_msgs-genmsg-context.py
FProgrammerLIU/caster_man_ros
a75b503fad3a470f985072a2b3953e89074f3223
[ "MIT" ]
null
null
null
# generated from genmsg/cmake/pkg-genmsg.context.in messages_str = "/home/caster/ros_ws/caster/src/kinova-ros/kinova_msgs/msg/FingerPosition.msg;/home/caster/ros_ws/caster/src/kinova-ros/kinova_msgs/msg/JointAngles.msg;/home/caster/ros_ws/caster/src/kinova-ros/kinova_msgs/msg/JointVelocity.msg;/home/caster/ros_ws/caster/src/kinova-ros/kinova_msgs/msg/JointTorque.msg;/home/caster/ros_ws/caster/src/kinova-ros/kinova_msgs/msg/KinovaPose.msg;/home/caster/ros_ws/caster/src/kinova-ros/kinova_msgs/msg/PoseVelocity.msg;/home/caster/ros_ws/caster/src/kinova-ros/kinova_msgs/msg/CartesianForce.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/ArmJointAnglesAction.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/ArmJointAnglesActionGoal.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/ArmJointAnglesActionResult.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/ArmJointAnglesActionFeedback.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/ArmJointAnglesGoal.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/ArmJointAnglesResult.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/ArmJointAnglesFeedback.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/ArmPoseAction.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/ArmPoseActionGoal.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/ArmPoseActionResult.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/ArmPoseActionFeedback.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/ArmPoseGoal.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/ArmPoseResult.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/ArmPoseFeedback.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/Arm_KinovaPoseAction.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/Arm_KinovaPoseActionGoal.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/Arm_KinovaPoseActionResult.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/Arm_KinovaPoseActionFeedback.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/Arm_KinovaPoseGoal.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/Arm_KinovaPoseResult.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/Arm_KinovaPoseFeedback.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/SetFingersPositionAction.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/SetFingersPositionActionGoal.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/SetFingersPositionActionResult.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/SetFingersPositionActionFeedback.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/SetFingersPositionGoal.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/SetFingersPositionResult.msg;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg/SetFingersPositionFeedback.msg" services_str = "/home/caster/ros_ws/caster/src/kinova-ros/kinova_msgs/srv/Start.srv;/home/caster/ros_ws/caster/src/kinova-ros/kinova_msgs/srv/Stop.srv;/home/caster/ros_ws/caster/src/kinova-ros/kinova_msgs/srv/HomeArm.srv;/home/caster/ros_ws/caster/src/kinova-ros/kinova_msgs/srv/SetForceControlParams.srv;/home/caster/ros_ws/caster/src/kinova-ros/kinova_msgs/srv/SetEndEffectorOffset.srv;/home/caster/ros_ws/caster/src/kinova-ros/kinova_msgs/srv/SetNullSpaceModeState.srv;/home/caster/ros_ws/caster/src/kinova-ros/kinova_msgs/srv/SetTorqueControlMode.srv;/home/caster/ros_ws/caster/src/kinova-ros/kinova_msgs/srv/SetTorqueControlParameters.srv;/home/caster/ros_ws/caster/src/kinova-ros/kinova_msgs/srv/ClearTrajectories.srv;/home/caster/ros_ws/caster/src/kinova-ros/kinova_msgs/srv/ZeroTorques.srv;/home/caster/ros_ws/caster/src/kinova-ros/kinova_msgs/srv/AddPoseToCartesianTrajectory.srv;/home/caster/ros_ws/caster/src/kinova-ros/kinova_msgs/srv/RunCOMParametersEstimation.srv" pkg_name = "kinova_msgs" dependencies_str = "actionlib_msgs;geometry_msgs" langs = "gencpp;geneus;genlisp;gennodejs;genpy" dep_include_paths_str = "kinova_msgs;/home/caster/ros_ws/caster/src/kinova-ros/kinova_msgs/msg;kinova_msgs;/home/caster/ros_ws/caster/devel/.private/kinova_msgs/share/kinova_msgs/msg;actionlib_msgs;/opt/ros/melodic/share/actionlib_msgs/cmake/../msg;geometry_msgs;/opt/ros/melodic/share/geometry_msgs/cmake/../msg;std_msgs;/opt/ros/melodic/share/std_msgs/cmake/../msg" PYTHON_EXECUTABLE = "/usr/bin/python2" package_has_static_sources = '' == 'TRUE' genmsg_check_deps_script = "/opt/ros/melodic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
427.666667
3,421
0.848987
790
5,132
5.311392
0.127848
0.193041
0.151811
0.175167
0.699952
0.684223
0.684223
0.679457
0.679457
0.679457
0
0.000196
0.006625
5,132
11
3,422
466.545455
0.822872
0.009548
0
0
1
0.333333
0.960047
0.953946
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
1
0
0
0
0
0
1
1
0
0
0
0
1
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
da6270ca16e06cc7dc6e592ea26ba6d4873214ff
63,432
py
Python
datadotworld/client/_swagger/apis/projects_api.py
DanialBetres/data.world-py
0e3acf2be9a07c5ab62ecac9289eb662088d54c7
[ "Apache-2.0" ]
99
2017-01-23T16:24:18.000Z
2022-03-30T22:51:58.000Z
datadotworld/client/_swagger/apis/projects_api.py
DanialBetres/data.world-py
0e3acf2be9a07c5ab62ecac9289eb662088d54c7
[ "Apache-2.0" ]
77
2017-01-26T04:33:06.000Z
2022-03-11T09:39:50.000Z
datadotworld/client/_swagger/apis/projects_api.py
DanialBetres/data.world-py
0e3acf2be9a07c5ab62ecac9289eb662088d54c7
[ "Apache-2.0" ]
29
2017-01-25T16:55:23.000Z
2022-01-31T01:44:15.000Z
# coding: utf-8 """ data.world API # data.world in a nutshell data.world is a productive, secure platform for modern data teamwork. We bring together your data practitioners, subject matter experts, and other stakeholders by removing costly barriers to data discovery, comprehension, integration, and sharing. Everything your team needs to quickly understand and use data stays with it. Social features and integrations encourage collaborators to ask and answer questions, share discoveries, and coordinate closely while still using their preferred tools. Our focus on interoperability helps you enhance your own data with data from any source, including our vast and growing library of free public datasets. Sophisticated permissions, auditing features, and more make it easy to manage who views your data and what they do with it. # Conventions ## Authentication All data.world API calls require an API token. OAuth2 is the preferred and most secure method for authenticating users of your data.world applications. Visit our [oauth documentation](https://apidocs.data.world/toolkit/oauth) for additional information. Alternatively, you can obtain a token for _personal use or testing_ by navigating to your profile settings, under the Advanced tab ([https://data.world/settings/advanced](https://data.world/settings/advanced)). Authentication must be provided in API requests via the `Authorization` header. For example, for a user whose API token is `my_api_token`, the request header should be `Authorization: Bearer my_api_token` (note the `Bearer` prefix). ## Content type By default, `application/json` is the content type used in request and response bodies. Exceptions are noted in respective endpoint documentation. ## HTTPS only Our APIs can only be accessed via HTTPS. # Interested in building data.world apps? Check out our [developer portal](https://apidocs.data.world) for tips on how to get started, tutorials, and to interact with the API endpoints right within your browser. OpenAPI spec version: 0.21.0 Contact: help@data.world Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import sys import os import re # python 2 and python 3 compatibility library from six import iteritems from ..configuration import Configuration from ..api_client import ApiClient class ProjectsApi(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): config = Configuration() if api_client: self.api_client = api_client else: if not config.api_client: config.api_client = ApiClient() self.api_client = config.api_client def add_linked_dataset(self, owner, id, linked_dataset_owner, linked_dataset_id, **kwargs): """ Link dataset Add a linked dataset to a project. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.add_linked_dataset(owner, id, linked_dataset_owner, linked_dataset_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the creator of a project. For example, in the URL: [https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), government is the unique identifier of the owner. (required) :param str id: Project unique identifier. For example, in the URL:[https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), how-to-add-depth-to-your-data-with-the-us-census-acs is the unique identifier of the project. (required) :param str linked_dataset_owner: (required) :param str linked_dataset_id: (required) :return: SuccessMessage If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.add_linked_dataset_with_http_info(owner, id, linked_dataset_owner, linked_dataset_id, **kwargs) else: (data) = self.add_linked_dataset_with_http_info(owner, id, linked_dataset_owner, linked_dataset_id, **kwargs) return data def add_linked_dataset_with_http_info(self, owner, id, linked_dataset_owner, linked_dataset_id, **kwargs): """ Link dataset Add a linked dataset to a project. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.add_linked_dataset_with_http_info(owner, id, linked_dataset_owner, linked_dataset_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the creator of a project. For example, in the URL: [https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), government is the unique identifier of the owner. (required) :param str id: Project unique identifier. For example, in the URL:[https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), how-to-add-depth-to-your-data-with-the-us-census-acs is the unique identifier of the project. (required) :param str linked_dataset_owner: (required) :param str linked_dataset_id: (required) :return: SuccessMessage If the method is called asynchronously, returns the request thread. """ all_params = ['owner', 'id', 'linked_dataset_owner', 'linked_dataset_id'] all_params.append('callback') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method add_linked_dataset" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'owner' is set if ('owner' not in params) or (params['owner'] is None): raise ValueError("Missing the required parameter `owner` when calling `add_linked_dataset`") # verify the required parameter 'id' is set if ('id' not in params) or (params['id'] is None): raise ValueError("Missing the required parameter `id` when calling `add_linked_dataset`") # verify the required parameter 'linked_dataset_owner' is set if ('linked_dataset_owner' not in params) or (params['linked_dataset_owner'] is None): raise ValueError("Missing the required parameter `linked_dataset_owner` when calling `add_linked_dataset`") # verify the required parameter 'linked_dataset_id' is set if ('linked_dataset_id' not in params) or (params['linked_dataset_id'] is None): raise ValueError("Missing the required parameter `linked_dataset_id` when calling `add_linked_dataset`") if 'owner' in params and not re.search('[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]', params['owner']): raise ValueError("Invalid value for parameter `owner` when calling `add_linked_dataset`, must conform to the pattern `/[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]/`") if 'id' in params and not re.search('[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]', params['id']): raise ValueError("Invalid value for parameter `id` when calling `add_linked_dataset`, must conform to the pattern `/[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]/`") collection_formats = {} path_params = {} if 'owner' in params: path_params['owner'] = params['owner'] if 'id' in params: path_params['id'] = params['id'] if 'linked_dataset_owner' in params: path_params['linkedDatasetOwner'] = params['linked_dataset_owner'] if 'linked_dataset_id' in params: path_params['linkedDatasetId'] = params['linked_dataset_id'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) # Authentication setting auth_settings = ['oauth'] return self.api_client.call_api('/projects/{owner}/{id}/linkedDatasets/{linkedDatasetOwner}/{linkedDatasetId}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessMessage', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def create_project(self, owner, **kwargs): """ Create a data project Create a new project. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.create_project(owner, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the creator of a project. For example, in the URL: [https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), government is the unique identifier of the owner. (required) :param ProjectCreateRequest body: :return: CreateProjectResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.create_project_with_http_info(owner, **kwargs) else: (data) = self.create_project_with_http_info(owner, **kwargs) return data def create_project_with_http_info(self, owner, **kwargs): """ Create a data project Create a new project. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.create_project_with_http_info(owner, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the creator of a project. For example, in the URL: [https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), government is the unique identifier of the owner. (required) :param ProjectCreateRequest body: :return: CreateProjectResponse If the method is called asynchronously, returns the request thread. """ all_params = ['owner', 'body'] all_params.append('callback') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_project" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'owner' is set if ('owner' not in params) or (params['owner'] is None): raise ValueError("Missing the required parameter `owner` when calling `create_project`") if 'owner' in params and not re.search('[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]', params['owner']): raise ValueError("Invalid value for parameter `owner` when calling `create_project`, must conform to the pattern `/[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]/`") collection_formats = {} path_params = {} if 'owner' in params: path_params['owner'] = params['owner'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = ['oauth'] return self.api_client.call_api('/projects/{owner}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='CreateProjectResponse', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_project(self, owner, id, **kwargs): """ Delete a data project Delete a project and associated data. This operation cannot be undone, but you may recreate the project using the same id. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.delete_project(owner, id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the creator of a project. For example, in the URL: [https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), government is the unique identifier of the owner. (required) :param str id: Project unique identifier. For example, in the URL:[https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), how-to-add-depth-to-your-data-with-the-us-census-acs is the unique identifier of the project. (required) :return: SuccessMessage If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.delete_project_with_http_info(owner, id, **kwargs) else: (data) = self.delete_project_with_http_info(owner, id, **kwargs) return data def delete_project_with_http_info(self, owner, id, **kwargs): """ Delete a data project Delete a project and associated data. This operation cannot be undone, but you may recreate the project using the same id. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.delete_project_with_http_info(owner, id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the creator of a project. For example, in the URL: [https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), government is the unique identifier of the owner. (required) :param str id: Project unique identifier. For example, in the URL:[https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), how-to-add-depth-to-your-data-with-the-us-census-acs is the unique identifier of the project. (required) :return: SuccessMessage If the method is called asynchronously, returns the request thread. """ all_params = ['owner', 'id'] all_params.append('callback') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_project" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'owner' is set if ('owner' not in params) or (params['owner'] is None): raise ValueError("Missing the required parameter `owner` when calling `delete_project`") # verify the required parameter 'id' is set if ('id' not in params) or (params['id'] is None): raise ValueError("Missing the required parameter `id` when calling `delete_project`") if 'owner' in params and not re.search('[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]', params['owner']): raise ValueError("Invalid value for parameter `owner` when calling `delete_project`, must conform to the pattern `/[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]/`") if 'id' in params and not re.search('[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]', params['id']): raise ValueError("Invalid value for parameter `id` when calling `delete_project`, must conform to the pattern `/[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]/`") collection_formats = {} path_params = {} if 'owner' in params: path_params['owner'] = params['owner'] if 'id' in params: path_params['id'] = params['id'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) # Authentication setting auth_settings = ['oauth'] return self.api_client.call_api('/projects/{owner}/{id}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessMessage', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_project(self, owner, id, **kwargs): """ Retrieve a data project Retrieve a project. The definition of the project will be returned, not the associated data. Use `POST:/sql/{owner}/{id}` or `POST:/sparql/{owner}/{id}` to query the data or use dataset APIs to retrieve data from linked datasets. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_project(owner, id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the creator of a project. For example, in the URL: [https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), government is the unique identifier of the owner. (required) :param str id: Project unique identifier. For example, in the URL:[https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), how-to-add-depth-to-your-data-with-the-us-census-acs is the unique identifier of the project. (required) :return: ProjectSummaryResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_project_with_http_info(owner, id, **kwargs) else: (data) = self.get_project_with_http_info(owner, id, **kwargs) return data def get_project_with_http_info(self, owner, id, **kwargs): """ Retrieve a data project Retrieve a project. The definition of the project will be returned, not the associated data. Use `POST:/sql/{owner}/{id}` or `POST:/sparql/{owner}/{id}` to query the data or use dataset APIs to retrieve data from linked datasets. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_project_with_http_info(owner, id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the creator of a project. For example, in the URL: [https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), government is the unique identifier of the owner. (required) :param str id: Project unique identifier. For example, in the URL:[https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), how-to-add-depth-to-your-data-with-the-us-census-acs is the unique identifier of the project. (required) :return: ProjectSummaryResponse If the method is called asynchronously, returns the request thread. """ all_params = ['owner', 'id'] all_params.append('callback') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_project" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'owner' is set if ('owner' not in params) or (params['owner'] is None): raise ValueError("Missing the required parameter `owner` when calling `get_project`") # verify the required parameter 'id' is set if ('id' not in params) or (params['id'] is None): raise ValueError("Missing the required parameter `id` when calling `get_project`") if 'owner' in params and not re.search('[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]', params['owner']): raise ValueError("Invalid value for parameter `owner` when calling `get_project`, must conform to the pattern `/[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]/`") if 'id' in params and not re.search('[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]', params['id']): raise ValueError("Invalid value for parameter `id` when calling `get_project`, must conform to the pattern `/[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]/`") collection_formats = {} path_params = {} if 'owner' in params: path_params['owner'] = params['owner'] if 'id' in params: path_params['id'] = params['id'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) # Authentication setting auth_settings = ['oauth'] return self.api_client.call_api('/projects/{owner}/{id}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ProjectSummaryResponse', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_project_by_version(self, owner, id, version_id, **kwargs): """ Retrieve a data project version Retrieve a project version. The definition of the project will be returned. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_project_by_version(owner, id, version_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the creator of a project. For example, in the URL: [https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), government is the unique identifier of the owner. (required) :param str id: Project unique identifier. For example, in the URL:[https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), how-to-add-depth-to-your-data-with-the-us-census-acs is the unique identifier of the project. (required) :param str version_id: Version unique identifier. (required) :return: ProjectSummaryResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_project_by_version_with_http_info(owner, id, version_id, **kwargs) else: (data) = self.get_project_by_version_with_http_info(owner, id, version_id, **kwargs) return data def get_project_by_version_with_http_info(self, owner, id, version_id, **kwargs): """ Retrieve a data project version Retrieve a project version. The definition of the project will be returned. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_project_by_version_with_http_info(owner, id, version_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the creator of a project. For example, in the URL: [https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), government is the unique identifier of the owner. (required) :param str id: Project unique identifier. For example, in the URL:[https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), how-to-add-depth-to-your-data-with-the-us-census-acs is the unique identifier of the project. (required) :param str version_id: Version unique identifier. (required) :return: ProjectSummaryResponse If the method is called asynchronously, returns the request thread. """ all_params = ['owner', 'id', 'version_id'] all_params.append('callback') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_project_by_version" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'owner' is set if ('owner' not in params) or (params['owner'] is None): raise ValueError("Missing the required parameter `owner` when calling `get_project_by_version`") # verify the required parameter 'id' is set if ('id' not in params) or (params['id'] is None): raise ValueError("Missing the required parameter `id` when calling `get_project_by_version`") # verify the required parameter 'version_id' is set if ('version_id' not in params) or (params['version_id'] is None): raise ValueError("Missing the required parameter `version_id` when calling `get_project_by_version`") if 'owner' in params and not re.search('[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]', params['owner']): raise ValueError("Invalid value for parameter `owner` when calling `get_project_by_version`, must conform to the pattern `/[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]/`") if 'id' in params and not re.search('[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]', params['id']): raise ValueError("Invalid value for parameter `id` when calling `get_project_by_version`, must conform to the pattern `/[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]/`") collection_formats = {} path_params = {} if 'owner' in params: path_params['owner'] = params['owner'] if 'id' in params: path_params['id'] = params['id'] if 'version_id' in params: path_params['versionId'] = params['version_id'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) # Authentication setting auth_settings = ['oauth'] return self.api_client.call_api('/projects/{owner}/{id}/v/{versionId}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ProjectSummaryResponse', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_projects_by_owner(self, owner, **kwargs): """ List projects for a specified owner List projects that the currently authenticated user has access to, for the specified owner; when the project is open, or when project is private but has view/edit/manage permissions for the authenticated user This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_projects_by_owner(owner, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the user or organization a resource belongs to. For example, in the URL: [https://data.world/jonloyens/an-intro-to-dataworld-dataset](https://data.world/jonloyens/an-intro-to-dataworld-dataset), jonloyens is the unique identifier of the owner. (required) :param str limit: Maximum number of items to include in a page of results. :param str next: Token from previous result page to be used when requesting a subsequent page. :return: PaginatedProjectResults If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_projects_by_owner_with_http_info(owner, **kwargs) else: (data) = self.get_projects_by_owner_with_http_info(owner, **kwargs) return data def get_projects_by_owner_with_http_info(self, owner, **kwargs): """ List projects for a specified owner List projects that the currently authenticated user has access to, for the specified owner; when the project is open, or when project is private but has view/edit/manage permissions for the authenticated user This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_projects_by_owner_with_http_info(owner, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the user or organization a resource belongs to. For example, in the URL: [https://data.world/jonloyens/an-intro-to-dataworld-dataset](https://data.world/jonloyens/an-intro-to-dataworld-dataset), jonloyens is the unique identifier of the owner. (required) :param str limit: Maximum number of items to include in a page of results. :param str next: Token from previous result page to be used when requesting a subsequent page. :return: PaginatedProjectResults If the method is called asynchronously, returns the request thread. """ all_params = ['owner', 'limit', 'next'] all_params.append('callback') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_projects_by_owner" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'owner' is set if ('owner' not in params) or (params['owner'] is None): raise ValueError("Missing the required parameter `owner` when calling `get_projects_by_owner`") if 'owner' in params and not re.search('[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]', params['owner']): raise ValueError("Invalid value for parameter `owner` when calling `get_projects_by_owner`, must conform to the pattern `/[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]/`") collection_formats = {} path_params = {} if 'owner' in params: path_params['owner'] = params['owner'] query_params = [] if 'limit' in params: query_params.append(('limit', params['limit'])) if 'next' in params: query_params.append(('next', params['next'])) header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) # Authentication setting auth_settings = ['oauth'] return self.api_client.call_api('/projects/{owner}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='PaginatedProjectResults', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def patch_project(self, owner, id, **kwargs): """ Update a data project Update an existing project. Only elements included in the request will be updated. All omitted elements will remain untouched. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.patch_project(owner, id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the creator of a project. For example, in the URL: [https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), government is the unique identifier of the owner. (required) :param str id: Project unique identifier. For example, in the URL:[https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), how-to-add-depth-to-your-data-with-the-us-census-acs is the unique identifier of the project. (required) :param ProjectPatchRequest body: :return: SuccessMessage If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.patch_project_with_http_info(owner, id, **kwargs) else: (data) = self.patch_project_with_http_info(owner, id, **kwargs) return data def patch_project_with_http_info(self, owner, id, **kwargs): """ Update a data project Update an existing project. Only elements included in the request will be updated. All omitted elements will remain untouched. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.patch_project_with_http_info(owner, id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the creator of a project. For example, in the URL: [https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), government is the unique identifier of the owner. (required) :param str id: Project unique identifier. For example, in the URL:[https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), how-to-add-depth-to-your-data-with-the-us-census-acs is the unique identifier of the project. (required) :param ProjectPatchRequest body: :return: SuccessMessage If the method is called asynchronously, returns the request thread. """ all_params = ['owner', 'id', 'body'] all_params.append('callback') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method patch_project" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'owner' is set if ('owner' not in params) or (params['owner'] is None): raise ValueError("Missing the required parameter `owner` when calling `patch_project`") # verify the required parameter 'id' is set if ('id' not in params) or (params['id'] is None): raise ValueError("Missing the required parameter `id` when calling `patch_project`") if 'owner' in params and not re.search('[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]', params['owner']): raise ValueError("Invalid value for parameter `owner` when calling `patch_project`, must conform to the pattern `/[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]/`") if 'id' in params and not re.search('[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]', params['id']): raise ValueError("Invalid value for parameter `id` when calling `patch_project`, must conform to the pattern `/[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]/`") collection_formats = {} path_params = {} if 'owner' in params: path_params['owner'] = params['owner'] if 'id' in params: path_params['id'] = params['id'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = ['oauth'] return self.api_client.call_api('/projects/{owner}/{id}', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessMessage', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def remove_linked_dataset(self, owner, id, linked_dataset_owner, linked_dataset_id, **kwargs): """ Unlink dataset Remove a linked dataset from a project. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.remove_linked_dataset(owner, id, linked_dataset_owner, linked_dataset_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the creator of a project. For example, in the URL: [https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), government is the unique identifier of the owner. (required) :param str id: Project unique identifier. For example, in the URL:[https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), how-to-add-depth-to-your-data-with-the-us-census-acs is the unique identifier of the project. (required) :param str linked_dataset_owner: (required) :param str linked_dataset_id: (required) :return: SuccessMessage If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.remove_linked_dataset_with_http_info(owner, id, linked_dataset_owner, linked_dataset_id, **kwargs) else: (data) = self.remove_linked_dataset_with_http_info(owner, id, linked_dataset_owner, linked_dataset_id, **kwargs) return data def remove_linked_dataset_with_http_info(self, owner, id, linked_dataset_owner, linked_dataset_id, **kwargs): """ Unlink dataset Remove a linked dataset from a project. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.remove_linked_dataset_with_http_info(owner, id, linked_dataset_owner, linked_dataset_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the creator of a project. For example, in the URL: [https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), government is the unique identifier of the owner. (required) :param str id: Project unique identifier. For example, in the URL:[https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), how-to-add-depth-to-your-data-with-the-us-census-acs is the unique identifier of the project. (required) :param str linked_dataset_owner: (required) :param str linked_dataset_id: (required) :return: SuccessMessage If the method is called asynchronously, returns the request thread. """ all_params = ['owner', 'id', 'linked_dataset_owner', 'linked_dataset_id'] all_params.append('callback') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method remove_linked_dataset" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'owner' is set if ('owner' not in params) or (params['owner'] is None): raise ValueError("Missing the required parameter `owner` when calling `remove_linked_dataset`") # verify the required parameter 'id' is set if ('id' not in params) or (params['id'] is None): raise ValueError("Missing the required parameter `id` when calling `remove_linked_dataset`") # verify the required parameter 'linked_dataset_owner' is set if ('linked_dataset_owner' not in params) or (params['linked_dataset_owner'] is None): raise ValueError("Missing the required parameter `linked_dataset_owner` when calling `remove_linked_dataset`") # verify the required parameter 'linked_dataset_id' is set if ('linked_dataset_id' not in params) or (params['linked_dataset_id'] is None): raise ValueError("Missing the required parameter `linked_dataset_id` when calling `remove_linked_dataset`") if 'owner' in params and not re.search('[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]', params['owner']): raise ValueError("Invalid value for parameter `owner` when calling `remove_linked_dataset`, must conform to the pattern `/[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]/`") if 'id' in params and not re.search('[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]', params['id']): raise ValueError("Invalid value for parameter `id` when calling `remove_linked_dataset`, must conform to the pattern `/[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]/`") collection_formats = {} path_params = {} if 'owner' in params: path_params['owner'] = params['owner'] if 'id' in params: path_params['id'] = params['id'] if 'linked_dataset_owner' in params: path_params['linkedDatasetOwner'] = params['linked_dataset_owner'] if 'linked_dataset_id' in params: path_params['linkedDatasetId'] = params['linked_dataset_id'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) # Authentication setting auth_settings = ['oauth'] return self.api_client.call_api('/projects/{owner}/{id}/linkedDatasets/{linkedDatasetOwner}/{linkedDatasetId}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessMessage', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def replace_project(self, owner, id, **kwargs): """ Create / Replace a data project Create or replace a project with a given id. If a project exists with the same id, this call will reset such project redefining all its attributes. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.replace_project(owner, id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the creator of a project. For example, in the URL: [https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), government is the unique identifier of the owner. (required) :param str id: Project unique identifier. For example, in the URL:[https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), how-to-add-depth-to-your-data-with-the-us-census-acs is the unique identifier of the project. (required) :param ProjectCreateRequest body: :return: SuccessMessage If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.replace_project_with_http_info(owner, id, **kwargs) else: (data) = self.replace_project_with_http_info(owner, id, **kwargs) return data def replace_project_with_http_info(self, owner, id, **kwargs): """ Create / Replace a data project Create or replace a project with a given id. If a project exists with the same id, this call will reset such project redefining all its attributes. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.replace_project_with_http_info(owner, id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the creator of a project. For example, in the URL: [https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), government is the unique identifier of the owner. (required) :param str id: Project unique identifier. For example, in the URL:[https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), how-to-add-depth-to-your-data-with-the-us-census-acs is the unique identifier of the project. (required) :param ProjectCreateRequest body: :return: SuccessMessage If the method is called asynchronously, returns the request thread. """ all_params = ['owner', 'id', 'body'] all_params.append('callback') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method replace_project" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'owner' is set if ('owner' not in params) or (params['owner'] is None): raise ValueError("Missing the required parameter `owner` when calling `replace_project`") # verify the required parameter 'id' is set if ('id' not in params) or (params['id'] is None): raise ValueError("Missing the required parameter `id` when calling `replace_project`") if 'owner' in params and not re.search('[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]', params['owner']): raise ValueError("Invalid value for parameter `owner` when calling `replace_project`, must conform to the pattern `/[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]/`") if 'id' in params and not re.search('[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]', params['id']): raise ValueError("Invalid value for parameter `id` when calling `replace_project`, must conform to the pattern `/[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]/`") collection_formats = {} path_params = {} if 'owner' in params: path_params['owner'] = params['owner'] if 'id' in params: path_params['id'] = params['id'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = ['oauth'] return self.api_client.call_api('/projects/{owner}/{id}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessMessage', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
57.249097
1,984
0.614926
7,760
63,432
4.88866
0.049613
0.007592
0.010122
0.025359
0.9404
0.933467
0.930673
0.925585
0.919681
0.917334
0
0.004378
0.27978
63,432
1,107
1,985
57.300813
0.826004
0.435395
0
0.774021
0
0.02847
0.249924
0.079744
0
0
0
0
0
1
0.033808
false
0
0.012456
0
0.096085
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
1
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
da7cc0a1a8f07b1dd1ceb64b5af7da813b3e8615
22,007
py
Python
esrl/ES.py
ELO-Lab/eTD3
abe65c9989e60a4f8f649e68caf3910f41c81d57
[ "MIT" ]
7
2021-12-06T17:52:50.000Z
2021-12-10T15:05:41.000Z
esrl/ES.py
ELO-Lab/eTD3
abe65c9989e60a4f8f649e68caf3910f41c81d57
[ "MIT" ]
null
null
null
esrl/ES.py
ELO-Lab/eTD3
abe65c9989e60a4f8f649e68caf3910f41c81d57
[ "MIT" ]
null
null
null
import numpy as np from copy import deepcopy from .Optimizers import Adam, BasicSGD def compute_ranks(x): """ Returns ranks in [0, len(x))] which returns ranks in [1, len(x)]. (https://github.com/openai/evolution-strategies-starter/blob/master/es_distributed/es.py) """ assert x.ndim == 1 ranks = np.empty(len(x), dtype=int) ranks[x.argsort()] = np.arange(len(x)) return ranks def compute_centered_ranks(x): """ https://github.com/openai/evolution-strategies-starter/blob/master/es_distributed/es.py """ y = compute_ranks(x.ravel()).reshape(x.shape).astype(np.float32) y /= (x.size - 1) y -= .5 return y def compute_weight_decay(weight_decay, model_param_list): model_param_grid = np.array(model_param_list) return -weight_decay * np.mean(model_param_grid * model_param_grid, axis=1) class VES: """ Basic Version of OpenAI Evolution Strategies """ def __init__(self, num_params, mu_init=None, sigma_init=0.1, lr=10**-2, pop_size=256, antithetic=True, weight_decay=0, rank_fitness=True): # misc self.num_params = num_params self.first_interation = True # distribution parameters if mu_init is None: self.mu = np.zeros(self.num_params) else: self.mu = np.array(mu_init) self.sigma = sigma_init # optimization stuff self.learning_rate = lr self.optimizer = Adam(self.learning_rate) # sampling stuff self.pop_size = pop_size self.antithetic = antithetic if self.antithetic: assert (self.pop_size % 2 == 0), "Population size must be even" self.weight_decay = weight_decay self.rank_fitness = rank_fitness def ask(self): """ Returns a list of candidates parameterss """ if self.antithetic: epsilon_half = np.random.randn(self.pop_size // 2, self.num_params) epsilon = np.concatenate([epsilon_half, - epsilon_half]) else: epsilon = np.random.randn(self.pop_size, self.num_params) return self.mu + epsilon * self.sigma def tell(self, scores, solutions): """ Updates the distribution """ assert(len(scores) == self.pop_size), "Inconsistent reward_table size reported." reward = np.array(scores) if self.rank_fitness: reward = compute_centered_ranks(reward) if self.weight_decay > 0: l2_decay = compute_weight_decay(self.weight_decay, solutions) reward += l2_decay epsilon = (solutions - self.mu) / self.sigma grad = -1/(self.sigma * self.pop_size) * np.dot(reward, epsilon) # optimization step step = self.optimizer.step(grad) self.mu += step def get_distrib_params(self): """ Returns the parameters of the distrubtion: the mean and sigma """ return np.copy(self.mu), np.copy(self.sigma ** 2) class GES: """ Guided Evolution Strategies """ def __init__(self, num_params, mu_init=None, sigma_init=0.1, lr=10**-2, alpha=0.5, beta=2, k=1, pop_size=256, antithetic=True, weight_decay=0, rank_fitness=False): # misc self.num_params = num_params self.first_interation = True # distribution parameters if mu_init is None: self.mu = np.zeros(self.num_params) else: self.mu = np.array(mu_init) self.sigma = sigma_init self.U = np.ones((self.num_params, k)) # optimization stuff self.alpha = alpha self.beta = beta self.k = k self.learning_rate = lr self.optimizer = Adam(self.learning_rate) # sampling stuff self.pop_size = pop_size self.antithetic = antithetic if self.antithetic: assert (self.pop_size % 2 == 0), "Population size must be even" self.weight_decay = weight_decay self.rank_fitness = rank_fitness def ask(self): """ Returns a list of candidates parameterss """ if self.antithetic: epsilon_half = np.sqrt(self.alpha / self.num_params) * \ np.random.randn(self.pop_size // 2, self.num_params) epsilon_half += np.sqrt((1 - self.alpha) / self.k) * \ np.random.randn(self.pop_size // 2, self.k) @ self.U.T epsilon = np.concatenate([epsilon_half, - epsilon_half]) else: epsilon = np.sqrt(self.alpha / self.num_params) * \ np.random.randn(self.pop_size, self.num_params) epsilon += np.sqrt(1 - self.alpha) * \ np.random.randn(self.pop_size, self.num_params) @ self.U.T return self.mu + epsilon * self.sigma def tell(self, scores, solutions): """ Updates the distribution """ assert(len(scores) == self.pop_size), "Inconsistent reward_table size reported." reward = np.array(scores) if self.rank_fitness: reward = compute_centered_ranks(reward) if self.weight_decay > 0: l2_decay = compute_weight_decay(self.weight_decay, solutions) reward += l2_decay epsilon = (solutions - self.mu) / self.sigma grad = -self.beta/(self.sigma * self.pop_size) * \ np.dot(reward, epsilon) # optimization step step = self.optimizer.step(grad) self.mu += step def add(self, params, grads, fitness): """ Adds new "gradient" to U """ if params is not None: self.mu = params grads = grads / np.linalg.norm(grads) self.U[:, -1] = grads def get_distrib_params(self): """ Returns the parameters of the distrubtion: the mean and sigma """ return np.copy(self.mu), np.copy(self.sigma ** 2) class sepCMAES: """ CMAES implementation adapted from https://en.wikipedia.org/wiki/CMA-ES#Example_code_in_MATLAB/Octave """ def __init__(self, num_params, mu_init=None, sigma_init=1, step_size_init=1, pop_size=255, antithetic=False, weight_decay=0.01, rank_fitness=True): # distribution parameters self.num_params = num_params if mu_init is not None: self.mu = np.array(mu_init) else: self.mu = np.zeros(num_params) self.antithetic = antithetic # stuff self.step_size = step_size_init self.p_c = np.zeros(self.num_params) self.p_s = np.zeros(self.num_params) self.cov = sigma_init * np.ones(num_params) # selection parameters self.pop_size = pop_size self.parents = pop_size // 2 self.weights = np.array([np.log((self.parents + 1) / i) for i in range(1, self.parents + 1)]) self.weights /= self.weights.sum() self.parents_eff = 1 / (self.weights ** 2).sum() self.rank_fitness = rank_fitness self.weight_decay = weight_decay # adaptation parameters self.g = 1 self.c_s = (self.parents_eff + 2) / \ (self.num_params + self.parents_eff + 3) self.c_c = 4 / (self.num_params + 4) self.c_cov = 1 / self.parents_eff * 2 / ((self.num_params + np.sqrt(2)) ** 2) + (1 - 1 / self.parents_eff) * \ min(1, (2 * self.parents_eff - 1) / (self.parents_eff + (self.num_params + 2) ** 2)) self.c_cov *= (self.num_params + 2) / 3 self.d_s = 1 + 2 * \ max(0, np.sqrt((self.parents_eff - 1) / (self.num_params + 1) - 1)) + self.c_s self.chi = np.sqrt(self.num_params) * (1 - 1 / (4 * self.num_params) + 1 / (21 * self.num_params ** 2)) def ask(self, pop_size): """ Returns a list of candidates parameters """ if self.antithetic: epsilon_half = np.random.randn(pop_size // 2, self.num_params) epsilon = np.concatenate([epsilon_half, - epsilon_half]) else: epsilon = np.random.randn(pop_size, self.num_params) return self.mu + self.step_size * epsilon * np.sqrt(self.cov) def tell(self, solutions, scores): """ Updates the distribution """ scores = np.array(scores) scores *= -1 idx_sorted = np.argsort(scores) # update mean old_mu = deepcopy(self.mu) self.mu = self.weights @ solutions[idx_sorted[:self.parents]] z = 1 / self.step_size * 1 / \ np.sqrt(self.cov) * (solutions[idx_sorted[:self.parents]] - old_mu) z_w = self.weights @ z # update evolution paths self.p_s = (1 - self.c_s) * self.p_s + \ np.sqrt(self.c_s * (2 - self.c_s) * self.parents_eff) * z_w tmp_1 = np.linalg.norm(self.p_s) / np.sqrt(1 - (1 - self.c_s) ** (2 * self.g)) \ <= self.chi * (1.4 + 2 / (self.num_params + 1)) self.p_c = (1 - self.c_c) * self.p_c + \ tmp_1 * np.sqrt(self.c_c * (2 - self.c_c) * self.parents_eff) * np.sqrt(self.cov) * z_w # update covariance matrix self.cov = (1 - self.c_cov) * self.cov + \ self.c_cov * 1 / self.parents_eff * self.p_c * self.p_c + \ self.c_cov * (1 - 1 / self.parents_eff) * \ (self.weights @ (self.cov * z * z)) # update step size self.step_size *= np.exp((self.c_s / self.d_s) * (np.linalg.norm(self.p_s) / self.chi - 1)) self.g += 1 print(self.cov) return idx_sorted[:self.parents] def get_distrib_params(self): """ Returns the parameters of the distrubtion: the mean and the covariance matrix """ return np.copy(self.mu), np.copy(self.step_size)**2 * np.copy(self.cov) class sepCEMv2: """ Cross-entropy methods. """ def __init__(self, num_params, mu_init=None, sigma_init=1e-3, pop_size=256, damp=1e-3, damp_limit=1e-5, parents=None, elitism=False, antithetic=False): # misc self.num_params = num_params # distribution parameters if mu_init is None: self.mu = np.zeros(self.num_params) else: self.mu = np.array(mu_init) self.sigma = sigma_init self.damp = damp self.damp_limit = damp_limit self.tau = 0.95 self.cov = self.sigma * np.ones(self.num_params) # elite stuff self.elitism = elitism self.elite = np.sqrt(self.sigma) * np.random.rand(self.num_params) self.elite_score = None # sampling stuff self.pop_size = pop_size self.antithetic = antithetic if self.antithetic: assert (self.pop_size % 2 == 0), "Population size must be even" if parents is None or parents <= 0: self.parents = pop_size // 2 else: self.parents = parents self.weights = np.array([np.log((self.parents + 1) / i) for i in range(1, self.parents + 1)]) self.weights /= self.weights.sum() def ask(self, pop_size): """ Returns a list of candidates parameters """ if self.antithetic and not pop_size % 2: epsilon_half = np.random.randn(pop_size // 2, self.num_params) epsilon = np.concatenate([epsilon_half, - epsilon_half]) else: epsilon = np.random.randn(pop_size, self.num_params) inds = self.mu + epsilon * np.sqrt(self.cov) if self.elitism: inds[-1] = self.elite return inds def tell(self, solutions, scores): """ Updates the distribution """ scores = np.array(scores) scores *= -1 idx_sorted = np.argsort(scores) old_mu = self.mu self.damp = self.damp * self.tau + (1 - self.tau) * self.damp_limit self.mu = self.weights @ solutions[idx_sorted[:self.parents]] z = (solutions[idx_sorted[:self.parents]] - old_mu) tmp = self.weights @ (z * z) beta = self.num_params * self.damp / np.sum(tmp) tmp *= beta alpha = 1 self.cov = (alpha * tmp + (1 - alpha) * self.damp * np.ones(self.num_params)) print(self.damp, beta, np.max(self.cov)) self.elite = solutions[idx_sorted[0]] self.elite_score = scores[idx_sorted[0]] print(self.cov) def get_distrib_params(self): """ Returns the parameters of the distrubtion: the mean and sigma """ return np.copy(self.mu), np.copy(self.cov) class sepCEM: """ Cross-entropy methods. """ def __init__(self, num_params, mu_init=None, sigma_init=1e-3, pop_size=256, damp=1e-3, damp_limit=1e-5, parents=None, elitism=False, antithetic=False): # misc self.num_params = num_params # distribution parameters if mu_init is None: self.mu = np.zeros(self.num_params) else: self.mu = np.array(mu_init) self.sigma = sigma_init self.damp = damp self.damp_limit = damp_limit self.tau = 0.95 self.cov = self.sigma * np.ones(self.num_params) # elite stuff self.elitism = elitism self.elite = np.sqrt(self.sigma) * np.random.rand(self.num_params) self.elite_score = None # sampling stuff self.pop_size = pop_size self.antithetic = antithetic if self.antithetic: assert (self.pop_size % 2 == 0), "Population size must be even" if parents is None or parents <= 0: self.parents = pop_size // 2 else: self.parents = parents self.weights = np.array([np.log((self.parents + 1) / i) for i in range(1, self.parents + 1)]) self.weights /= self.weights.sum() def ask(self, pop_size): """ Returns a list of candidates parameters """ if self.antithetic and not pop_size % 2: epsilon_half = np.random.randn(pop_size // 2, self.num_params) epsilon = np.concatenate([epsilon_half, - epsilon_half]) else: epsilon = np.random.randn(pop_size, self.num_params) inds = self.mu + epsilon * np.sqrt(self.cov) if self.elitism: inds[-1] = self.elite return inds def tell(self, solutions, scores): """ Updates the distribution """ scores = np.array(scores) scores *= -1 idx_sorted = np.argsort(scores) old_mu = self.mu self.damp = self.damp * self.tau + (1 - self.tau) * self.damp_limit self.mu = self.weights @ solutions[idx_sorted[:self.parents]] z = (solutions[idx_sorted[:self.parents]] - old_mu) self.cov = 1 / self.parents * self.weights @ ( z * z) + self.damp * np.ones(self.num_params) self.elite = solutions[idx_sorted[0]] self.elite_score = scores[idx_sorted[0]] print(self.cov) def get_distrib_params(self): """ Returns the parameters of the distrubtion: the mean and sigma """ return np.copy(self.mu), np.copy(self.cov) class Control: """ Cross-entropy methods. """ def __init__(self, num_params, mu_init, pop_size=256, sigma_init=1e-3): # misc self.num_params = num_params self.pop = np.sqrt(sigma_init) * np.random.randn(pop_size, num_params) + mu_init self.mu = np.zeros(num_params) def ask(self, pop_size): """ Returns a list of candidates parameters """ return self.pop def tell(self, solutions, scores): """ Updates the distribution """ self.mu = solutions[np.argmax(scores)] self.pop = solutions np.random.shuffle(self.pop) class sepCEMA: """ Cross-entropy methods. """ def __init__(self, num_params, mu_init=None, sigma_init=1e-3, pop_size=256, parents=None, elitism=False, antithetic=False): # misc self.num_params = num_params # distribution parameters if mu_init is None: self.mu = np.zeros(self.num_params) else: self.mu = np.array(mu_init) self.sigma = sigma_init self.cov = self.sigma * np.ones(self.num_params) # elite stuff self.elitism = elitism self.elite = np.sqrt(self.sigma) * np.random.rand(self.num_params) self.elite_score = -np.inf # sampling stuff self.pop_size = pop_size self.antithetic = antithetic if self.antithetic: assert (self.pop_size % 2 == 0), "Population size must be even" if parents is None or parents <= 0: self.parents = pop_size // 2 else: self.parents = parents self.weights = np.array([np.log((self.parents + 1) / i) for i in range(1, self.parents + 1)]) self.weights /= self.weights.sum() def ask(self, pop_size): """ Returns a list of candidates parameters """ if self.antithetic and not pop_size % 2: epsilon_half = np.random.randn(pop_size // 2, self.num_params) epsilon = np.concatenate([epsilon_half, - epsilon_half]) else: epsilon = np.random.randn(pop_size, self.num_params) inds = self.mu + epsilon * np.sqrt(self.cov) if self.elitism: inds[-1] = self.elite return inds def tell(self, solutions, scores): """ Updates the distribution """ scores = np.array(scores) scores *= -1 idx_sorted = np.argsort(scores) # new and old mean old_mu = self.mu self.mu = self.weights @ solutions[idx_sorted[:self.parents]] # sigma adaptation if scores[idx_sorted[0]] > 0.95 * self.elite_score: self.sigma *= 0.95 else: self.sigma *= 1.05 self.elite = solutions[idx_sorted[0]] self.elite_score = scores[idx_sorted[0]] z = (solutions[idx_sorted[:self.parents]] - old_mu) self.cov = self.weights @ (z * z) self.cov = self.sigma * self.cov / np.linalg.norm(self.cov) print(self.cov) print(self.sigma) def get_distrib_params(self): """ Returns the parameters of the distrubtion: the mean and sigma """ return np.copy(self.mu), np.copy(self.cov) class sepMCEM: """ Cross-entropy methods with multiplicative noise. Not really working. """ def __init__(self, num_params, mu_init=None, sigma_init=0.1, pop_size=256, damp=0.01, parents=None, antithetic=False): # misc self.num_params = num_params # distribution parameters if mu_init is None: self.mu = np.zeros(self.num_params) else: self.mu = np.array(mu_init) self.sigma = sigma_init self.damp = sigma_init self.damp_limit = damp self.cov = self.sigma * np.ones(self.num_params) self.tau = 0.95 # sampling stuff self.pop_size = pop_size self.antithetic = antithetic if self.antithetic: assert (self.pop_size % 2 == 0), "Population size must be even" if parents is None or parents <= 0: self.parents = pop_size // 2 else: self.parents = parents self.weights = np.array([np.log((self.parents + 1) / i) for i in range(1, self.parents + 1)]) self.weights /= self.weights.sum() def ask(self, pop_size): """ Returns a list of candidates parameters """ if self.antithetic: epsilon_half = np.random.randn(pop_size // 2, self.num_params) epsilon = np.concatenate([epsilon_half, - epsilon_half]) else: epsilon = np.random.randn(pop_size, self.num_params) return self.mu * (epsilon * np.sqrt(self.cov) + 1) def tell(self, solutions, scores): """ Updates the distribution """ scores = np.array(scores) scores *= -1 idx_sorted = np.argsort(scores) old_mu = self.mu self.damp = self.damp * self.tau + (1 - self.tau) * self.damp_limit self.mu = self.weights @ solutions[idx_sorted[:self.parents]] z = (solutions[idx_sorted[:self.parents]] - old_mu) self.cov = 1 / self.parents * self.weights @ ( z * z) + self.damp * np.ones(self.num_params) def get_distrib_params(self): """ Returns the parameters of the distrubtion: the mean and sigma """ return np.copy(self.mu), np.copy(self.cov)
30.146575
118
0.545463
2,746
22,007
4.219592
0.080117
0.058255
0.070683
0.015189
0.817209
0.77889
0.761716
0.744714
0.735652
0.725641
0
0.015929
0.343891
22,007
729
119
30.187929
0.78655
0.097242
0
0.726244
0
0
0.013011
0
0
0
0
0
0.020362
1
0.079186
false
0
0.006787
0
0.147059
0.013575
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
e501ffc3bb9a9583b2bbf458ee27a31d9b577586
97,067
py
Python
kaltura_lib/KalturaClient/Plugins/Document.py
KameliaZhelyazkova/Media-Hopper-Initial-Project
c15ad7cbd23dcddc7463d510510916ffcc4954df
[ "CC0-1.0" ]
null
null
null
kaltura_lib/KalturaClient/Plugins/Document.py
KameliaZhelyazkova/Media-Hopper-Initial-Project
c15ad7cbd23dcddc7463d510510916ffcc4954df
[ "CC0-1.0" ]
null
null
null
kaltura_lib/KalturaClient/Plugins/Document.py
KameliaZhelyazkova/Media-Hopper-Initial-Project
c15ad7cbd23dcddc7463d510510916ffcc4954df
[ "CC0-1.0" ]
null
null
null
# =================================================================================================== # _ __ _ _ # | |/ /__ _| | |_ _ _ _ _ __ _ # | ' </ _` | | _| || | '_/ _` | # |_|\_\__,_|_|\__|\_,_|_| \__,_| # # This file is part of the Kaltura Collaborative Media Suite which allows users # to do with audio, video, and animation what Wiki platfroms allow them to do with # text. # # Copyright (C) 2006-2015 Kaltura Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http:#www.gnu.org/licenses/>. # # @ignore # =================================================================================================== # @package Kaltura # @subpackage Client from Core import * from ..Base import * ########## enums ########## # @package Kaltura # @subpackage Client class KalturaDocumentType(object): DOCUMENT = 11 SWF = 12 PDF = 13 def __init__(self, value): self.value = value def getValue(self): return self.value # @package Kaltura # @subpackage Client class KalturaDocumentEntryOrderBy(object): CREATED_AT_ASC = "+createdAt" END_DATE_ASC = "+endDate" MODERATION_COUNT_ASC = "+moderationCount" NAME_ASC = "+name" PARTNER_SORT_VALUE_ASC = "+partnerSortValue" RANK_ASC = "+rank" RECENT_ASC = "+recent" START_DATE_ASC = "+startDate" TOTAL_RANK_ASC = "+totalRank" UPDATED_AT_ASC = "+updatedAt" WEIGHT_ASC = "+weight" CREATED_AT_DESC = "-createdAt" END_DATE_DESC = "-endDate" MODERATION_COUNT_DESC = "-moderationCount" NAME_DESC = "-name" PARTNER_SORT_VALUE_DESC = "-partnerSortValue" RANK_DESC = "-rank" RECENT_DESC = "-recent" START_DATE_DESC = "-startDate" TOTAL_RANK_DESC = "-totalRank" UPDATED_AT_DESC = "-updatedAt" WEIGHT_DESC = "-weight" def __init__(self, value): self.value = value def getValue(self): return self.value # @package Kaltura # @subpackage Client class KalturaDocumentFlavorParamsOrderBy(object): def __init__(self, value): self.value = value def getValue(self): return self.value # @package Kaltura # @subpackage Client class KalturaDocumentFlavorParamsOutputOrderBy(object): def __init__(self, value): self.value = value def getValue(self): return self.value # @package Kaltura # @subpackage Client class KalturaImageFlavorParamsOrderBy(object): def __init__(self, value): self.value = value def getValue(self): return self.value # @package Kaltura # @subpackage Client class KalturaImageFlavorParamsOutputOrderBy(object): def __init__(self, value): self.value = value def getValue(self): return self.value # @package Kaltura # @subpackage Client class KalturaPdfFlavorParamsOrderBy(object): def __init__(self, value): self.value = value def getValue(self): return self.value # @package Kaltura # @subpackage Client class KalturaPdfFlavorParamsOutputOrderBy(object): def __init__(self, value): self.value = value def getValue(self): return self.value # @package Kaltura # @subpackage Client class KalturaSwfFlavorParamsOrderBy(object): def __init__(self, value): self.value = value def getValue(self): return self.value # @package Kaltura # @subpackage Client class KalturaSwfFlavorParamsOutputOrderBy(object): def __init__(self, value): self.value = value def getValue(self): return self.value ########## classes ########## # @package Kaltura # @subpackage Client class KalturaDocumentEntry(KalturaBaseEntry): def __init__(self, id=NotImplemented, name=NotImplemented, description=NotImplemented, partnerId=NotImplemented, userId=NotImplemented, creatorId=NotImplemented, tags=NotImplemented, adminTags=NotImplemented, categories=NotImplemented, categoriesIds=NotImplemented, status=NotImplemented, moderationStatus=NotImplemented, moderationCount=NotImplemented, type=NotImplemented, createdAt=NotImplemented, updatedAt=NotImplemented, rank=NotImplemented, totalRank=NotImplemented, votes=NotImplemented, groupId=NotImplemented, partnerData=NotImplemented, downloadUrl=NotImplemented, searchText=NotImplemented, licenseType=NotImplemented, version=NotImplemented, thumbnailUrl=NotImplemented, accessControlId=NotImplemented, startDate=NotImplemented, endDate=NotImplemented, referenceId=NotImplemented, replacingEntryId=NotImplemented, replacedEntryId=NotImplemented, replacementStatus=NotImplemented, partnerSortValue=NotImplemented, conversionProfileId=NotImplemented, redirectEntryId=NotImplemented, rootEntryId=NotImplemented, parentEntryId=NotImplemented, operationAttributes=NotImplemented, entitledUsersEdit=NotImplemented, entitledUsersPublish=NotImplemented, documentType=NotImplemented, assetParamsIds=NotImplemented): KalturaBaseEntry.__init__(self, id, name, description, partnerId, userId, creatorId, tags, adminTags, categories, categoriesIds, status, moderationStatus, moderationCount, type, createdAt, updatedAt, rank, totalRank, votes, groupId, partnerData, downloadUrl, searchText, licenseType, version, thumbnailUrl, accessControlId, startDate, endDate, referenceId, replacingEntryId, replacedEntryId, replacementStatus, partnerSortValue, conversionProfileId, redirectEntryId, rootEntryId, parentEntryId, operationAttributes, entitledUsersEdit, entitledUsersPublish) # The type of the document # @var KalturaDocumentType # @insertonly self.documentType = documentType # Comma separated asset params ids that exists for this media entry # @var string # @readonly self.assetParamsIds = assetParamsIds PROPERTY_LOADERS = { 'documentType': (KalturaEnumsFactory.createInt, "KalturaDocumentType"), 'assetParamsIds': getXmlNodeText, } def fromXml(self, node): KalturaBaseEntry.fromXml(self, node) self.fromXmlImpl(node, KalturaDocumentEntry.PROPERTY_LOADERS) def toParams(self): kparams = KalturaBaseEntry.toParams(self) kparams.put("objectType", "KalturaDocumentEntry") kparams.addIntEnumIfDefined("documentType", self.documentType) return kparams def getDocumentType(self): return self.documentType def setDocumentType(self, newDocumentType): self.documentType = newDocumentType def getAssetParamsIds(self): return self.assetParamsIds # @package Kaltura # @subpackage Client class KalturaDocumentListResponse(KalturaListResponse): def __init__(self, totalCount=NotImplemented, objects=NotImplemented): KalturaListResponse.__init__(self, totalCount) # @var array of KalturaDocumentEntry # @readonly self.objects = objects PROPERTY_LOADERS = { 'objects': (KalturaObjectFactory.createArray, KalturaDocumentEntry), } def fromXml(self, node): KalturaListResponse.fromXml(self, node) self.fromXmlImpl(node, KalturaDocumentListResponse.PROPERTY_LOADERS) def toParams(self): kparams = KalturaListResponse.toParams(self) kparams.put("objectType", "KalturaDocumentListResponse") return kparams def getObjects(self): return self.objects # @package Kaltura # @subpackage Client class KalturaDocumentFlavorParams(KalturaFlavorParams): def __init__(self, id=NotImplemented, partnerId=NotImplemented, name=NotImplemented, systemName=NotImplemented, description=NotImplemented, createdAt=NotImplemented, isSystemDefault=NotImplemented, tags=NotImplemented, requiredPermissions=NotImplemented, sourceRemoteStorageProfileId=NotImplemented, remoteStorageProfileIds=NotImplemented, mediaParserType=NotImplemented, sourceAssetParamsIds=NotImplemented, videoCodec=NotImplemented, videoBitrate=NotImplemented, audioCodec=NotImplemented, audioBitrate=NotImplemented, audioChannels=NotImplemented, audioSampleRate=NotImplemented, width=NotImplemented, height=NotImplemented, frameRate=NotImplemented, gopSize=NotImplemented, conversionEngines=NotImplemented, conversionEnginesExtraParams=NotImplemented, twoPass=NotImplemented, deinterlice=NotImplemented, rotate=NotImplemented, operators=NotImplemented, engineVersion=NotImplemented, format=NotImplemented, aspectRatioProcessingMode=NotImplemented, forceFrameToMultiplication16=NotImplemented, isGopInSec=NotImplemented, isAvoidVideoShrinkFramesizeToSource=NotImplemented, isAvoidVideoShrinkBitrateToSource=NotImplemented, isVideoFrameRateForLowBrAppleHls=NotImplemented, multiStream=NotImplemented, anamorphicPixels=NotImplemented, isAvoidForcedKeyFrames=NotImplemented, isCropIMX=NotImplemented, optimizationPolicy=NotImplemented, maxFrameRate=NotImplemented, videoConstantBitrate=NotImplemented, videoBitrateTolerance=NotImplemented, watermarkData=NotImplemented, clipOffset=NotImplemented, clipDuration=NotImplemented): KalturaFlavorParams.__init__(self, id, partnerId, name, systemName, description, createdAt, isSystemDefault, tags, requiredPermissions, sourceRemoteStorageProfileId, remoteStorageProfileIds, mediaParserType, sourceAssetParamsIds, videoCodec, videoBitrate, audioCodec, audioBitrate, audioChannels, audioSampleRate, width, height, frameRate, gopSize, conversionEngines, conversionEnginesExtraParams, twoPass, deinterlice, rotate, operators, engineVersion, format, aspectRatioProcessingMode, forceFrameToMultiplication16, isGopInSec, isAvoidVideoShrinkFramesizeToSource, isAvoidVideoShrinkBitrateToSource, isVideoFrameRateForLowBrAppleHls, multiStream, anamorphicPixels, isAvoidForcedKeyFrames, isCropIMX, optimizationPolicy, maxFrameRate, videoConstantBitrate, videoBitrateTolerance, watermarkData, clipOffset, clipDuration) PROPERTY_LOADERS = { } def fromXml(self, node): KalturaFlavorParams.fromXml(self, node) self.fromXmlImpl(node, KalturaDocumentFlavorParams.PROPERTY_LOADERS) def toParams(self): kparams = KalturaFlavorParams.toParams(self) kparams.put("objectType", "KalturaDocumentFlavorParams") return kparams # @package Kaltura # @subpackage Client class KalturaImageFlavorParams(KalturaFlavorParams): def __init__(self, id=NotImplemented, partnerId=NotImplemented, name=NotImplemented, systemName=NotImplemented, description=NotImplemented, createdAt=NotImplemented, isSystemDefault=NotImplemented, tags=NotImplemented, requiredPermissions=NotImplemented, sourceRemoteStorageProfileId=NotImplemented, remoteStorageProfileIds=NotImplemented, mediaParserType=NotImplemented, sourceAssetParamsIds=NotImplemented, videoCodec=NotImplemented, videoBitrate=NotImplemented, audioCodec=NotImplemented, audioBitrate=NotImplemented, audioChannels=NotImplemented, audioSampleRate=NotImplemented, width=NotImplemented, height=NotImplemented, frameRate=NotImplemented, gopSize=NotImplemented, conversionEngines=NotImplemented, conversionEnginesExtraParams=NotImplemented, twoPass=NotImplemented, deinterlice=NotImplemented, rotate=NotImplemented, operators=NotImplemented, engineVersion=NotImplemented, format=NotImplemented, aspectRatioProcessingMode=NotImplemented, forceFrameToMultiplication16=NotImplemented, isGopInSec=NotImplemented, isAvoidVideoShrinkFramesizeToSource=NotImplemented, isAvoidVideoShrinkBitrateToSource=NotImplemented, isVideoFrameRateForLowBrAppleHls=NotImplemented, multiStream=NotImplemented, anamorphicPixels=NotImplemented, isAvoidForcedKeyFrames=NotImplemented, isCropIMX=NotImplemented, optimizationPolicy=NotImplemented, maxFrameRate=NotImplemented, videoConstantBitrate=NotImplemented, videoBitrateTolerance=NotImplemented, watermarkData=NotImplemented, clipOffset=NotImplemented, clipDuration=NotImplemented, densityWidth=NotImplemented, densityHeight=NotImplemented, sizeWidth=NotImplemented, sizeHeight=NotImplemented, depth=NotImplemented): KalturaFlavorParams.__init__(self, id, partnerId, name, systemName, description, createdAt, isSystemDefault, tags, requiredPermissions, sourceRemoteStorageProfileId, remoteStorageProfileIds, mediaParserType, sourceAssetParamsIds, videoCodec, videoBitrate, audioCodec, audioBitrate, audioChannels, audioSampleRate, width, height, frameRate, gopSize, conversionEngines, conversionEnginesExtraParams, twoPass, deinterlice, rotate, operators, engineVersion, format, aspectRatioProcessingMode, forceFrameToMultiplication16, isGopInSec, isAvoidVideoShrinkFramesizeToSource, isAvoidVideoShrinkBitrateToSource, isVideoFrameRateForLowBrAppleHls, multiStream, anamorphicPixels, isAvoidForcedKeyFrames, isCropIMX, optimizationPolicy, maxFrameRate, videoConstantBitrate, videoBitrateTolerance, watermarkData, clipOffset, clipDuration) # @var int self.densityWidth = densityWidth # @var int self.densityHeight = densityHeight # @var int self.sizeWidth = sizeWidth # @var int self.sizeHeight = sizeHeight # @var int self.depth = depth PROPERTY_LOADERS = { 'densityWidth': getXmlNodeInt, 'densityHeight': getXmlNodeInt, 'sizeWidth': getXmlNodeInt, 'sizeHeight': getXmlNodeInt, 'depth': getXmlNodeInt, } def fromXml(self, node): KalturaFlavorParams.fromXml(self, node) self.fromXmlImpl(node, KalturaImageFlavorParams.PROPERTY_LOADERS) def toParams(self): kparams = KalturaFlavorParams.toParams(self) kparams.put("objectType", "KalturaImageFlavorParams") kparams.addIntIfDefined("densityWidth", self.densityWidth) kparams.addIntIfDefined("densityHeight", self.densityHeight) kparams.addIntIfDefined("sizeWidth", self.sizeWidth) kparams.addIntIfDefined("sizeHeight", self.sizeHeight) kparams.addIntIfDefined("depth", self.depth) return kparams def getDensityWidth(self): return self.densityWidth def setDensityWidth(self, newDensityWidth): self.densityWidth = newDensityWidth def getDensityHeight(self): return self.densityHeight def setDensityHeight(self, newDensityHeight): self.densityHeight = newDensityHeight def getSizeWidth(self): return self.sizeWidth def setSizeWidth(self, newSizeWidth): self.sizeWidth = newSizeWidth def getSizeHeight(self): return self.sizeHeight def setSizeHeight(self, newSizeHeight): self.sizeHeight = newSizeHeight def getDepth(self): return self.depth def setDepth(self, newDepth): self.depth = newDepth # @package Kaltura # @subpackage Client class KalturaPdfFlavorParams(KalturaFlavorParams): def __init__(self, id=NotImplemented, partnerId=NotImplemented, name=NotImplemented, systemName=NotImplemented, description=NotImplemented, createdAt=NotImplemented, isSystemDefault=NotImplemented, tags=NotImplemented, requiredPermissions=NotImplemented, sourceRemoteStorageProfileId=NotImplemented, remoteStorageProfileIds=NotImplemented, mediaParserType=NotImplemented, sourceAssetParamsIds=NotImplemented, videoCodec=NotImplemented, videoBitrate=NotImplemented, audioCodec=NotImplemented, audioBitrate=NotImplemented, audioChannels=NotImplemented, audioSampleRate=NotImplemented, width=NotImplemented, height=NotImplemented, frameRate=NotImplemented, gopSize=NotImplemented, conversionEngines=NotImplemented, conversionEnginesExtraParams=NotImplemented, twoPass=NotImplemented, deinterlice=NotImplemented, rotate=NotImplemented, operators=NotImplemented, engineVersion=NotImplemented, format=NotImplemented, aspectRatioProcessingMode=NotImplemented, forceFrameToMultiplication16=NotImplemented, isGopInSec=NotImplemented, isAvoidVideoShrinkFramesizeToSource=NotImplemented, isAvoidVideoShrinkBitrateToSource=NotImplemented, isVideoFrameRateForLowBrAppleHls=NotImplemented, multiStream=NotImplemented, anamorphicPixels=NotImplemented, isAvoidForcedKeyFrames=NotImplemented, isCropIMX=NotImplemented, optimizationPolicy=NotImplemented, maxFrameRate=NotImplemented, videoConstantBitrate=NotImplemented, videoBitrateTolerance=NotImplemented, watermarkData=NotImplemented, clipOffset=NotImplemented, clipDuration=NotImplemented, readonly=NotImplemented): KalturaFlavorParams.__init__(self, id, partnerId, name, systemName, description, createdAt, isSystemDefault, tags, requiredPermissions, sourceRemoteStorageProfileId, remoteStorageProfileIds, mediaParserType, sourceAssetParamsIds, videoCodec, videoBitrate, audioCodec, audioBitrate, audioChannels, audioSampleRate, width, height, frameRate, gopSize, conversionEngines, conversionEnginesExtraParams, twoPass, deinterlice, rotate, operators, engineVersion, format, aspectRatioProcessingMode, forceFrameToMultiplication16, isGopInSec, isAvoidVideoShrinkFramesizeToSource, isAvoidVideoShrinkBitrateToSource, isVideoFrameRateForLowBrAppleHls, multiStream, anamorphicPixels, isAvoidForcedKeyFrames, isCropIMX, optimizationPolicy, maxFrameRate, videoConstantBitrate, videoBitrateTolerance, watermarkData, clipOffset, clipDuration) # @var bool self.readonly = readonly PROPERTY_LOADERS = { 'readonly': getXmlNodeBool, } def fromXml(self, node): KalturaFlavorParams.fromXml(self, node) self.fromXmlImpl(node, KalturaPdfFlavorParams.PROPERTY_LOADERS) def toParams(self): kparams = KalturaFlavorParams.toParams(self) kparams.put("objectType", "KalturaPdfFlavorParams") kparams.addBoolIfDefined("readonly", self.readonly) return kparams def getReadonly(self): return self.readonly def setReadonly(self, newReadonly): self.readonly = newReadonly # @package Kaltura # @subpackage Client class KalturaSwfFlavorParams(KalturaFlavorParams): def __init__(self, id=NotImplemented, partnerId=NotImplemented, name=NotImplemented, systemName=NotImplemented, description=NotImplemented, createdAt=NotImplemented, isSystemDefault=NotImplemented, tags=NotImplemented, requiredPermissions=NotImplemented, sourceRemoteStorageProfileId=NotImplemented, remoteStorageProfileIds=NotImplemented, mediaParserType=NotImplemented, sourceAssetParamsIds=NotImplemented, videoCodec=NotImplemented, videoBitrate=NotImplemented, audioCodec=NotImplemented, audioBitrate=NotImplemented, audioChannels=NotImplemented, audioSampleRate=NotImplemented, width=NotImplemented, height=NotImplemented, frameRate=NotImplemented, gopSize=NotImplemented, conversionEngines=NotImplemented, conversionEnginesExtraParams=NotImplemented, twoPass=NotImplemented, deinterlice=NotImplemented, rotate=NotImplemented, operators=NotImplemented, engineVersion=NotImplemented, format=NotImplemented, aspectRatioProcessingMode=NotImplemented, forceFrameToMultiplication16=NotImplemented, isGopInSec=NotImplemented, isAvoidVideoShrinkFramesizeToSource=NotImplemented, isAvoidVideoShrinkBitrateToSource=NotImplemented, isVideoFrameRateForLowBrAppleHls=NotImplemented, multiStream=NotImplemented, anamorphicPixels=NotImplemented, isAvoidForcedKeyFrames=NotImplemented, isCropIMX=NotImplemented, optimizationPolicy=NotImplemented, maxFrameRate=NotImplemented, videoConstantBitrate=NotImplemented, videoBitrateTolerance=NotImplemented, watermarkData=NotImplemented, clipOffset=NotImplemented, clipDuration=NotImplemented, flashVersion=NotImplemented, poly2Bitmap=NotImplemented): KalturaFlavorParams.__init__(self, id, partnerId, name, systemName, description, createdAt, isSystemDefault, tags, requiredPermissions, sourceRemoteStorageProfileId, remoteStorageProfileIds, mediaParserType, sourceAssetParamsIds, videoCodec, videoBitrate, audioCodec, audioBitrate, audioChannels, audioSampleRate, width, height, frameRate, gopSize, conversionEngines, conversionEnginesExtraParams, twoPass, deinterlice, rotate, operators, engineVersion, format, aspectRatioProcessingMode, forceFrameToMultiplication16, isGopInSec, isAvoidVideoShrinkFramesizeToSource, isAvoidVideoShrinkBitrateToSource, isVideoFrameRateForLowBrAppleHls, multiStream, anamorphicPixels, isAvoidForcedKeyFrames, isCropIMX, optimizationPolicy, maxFrameRate, videoConstantBitrate, videoBitrateTolerance, watermarkData, clipOffset, clipDuration) # @var int self.flashVersion = flashVersion # @var bool self.poly2Bitmap = poly2Bitmap PROPERTY_LOADERS = { 'flashVersion': getXmlNodeInt, 'poly2Bitmap': getXmlNodeBool, } def fromXml(self, node): KalturaFlavorParams.fromXml(self, node) self.fromXmlImpl(node, KalturaSwfFlavorParams.PROPERTY_LOADERS) def toParams(self): kparams = KalturaFlavorParams.toParams(self) kparams.put("objectType", "KalturaSwfFlavorParams") kparams.addIntIfDefined("flashVersion", self.flashVersion) kparams.addBoolIfDefined("poly2Bitmap", self.poly2Bitmap) return kparams def getFlashVersion(self): return self.flashVersion def setFlashVersion(self, newFlashVersion): self.flashVersion = newFlashVersion def getPoly2Bitmap(self): return self.poly2Bitmap def setPoly2Bitmap(self, newPoly2Bitmap): self.poly2Bitmap = newPoly2Bitmap # @package Kaltura # @subpackage Client class KalturaDocumentFlavorParamsOutput(KalturaFlavorParamsOutput): def __init__(self, id=NotImplemented, partnerId=NotImplemented, name=NotImplemented, systemName=NotImplemented, description=NotImplemented, createdAt=NotImplemented, isSystemDefault=NotImplemented, tags=NotImplemented, requiredPermissions=NotImplemented, sourceRemoteStorageProfileId=NotImplemented, remoteStorageProfileIds=NotImplemented, mediaParserType=NotImplemented, sourceAssetParamsIds=NotImplemented, videoCodec=NotImplemented, videoBitrate=NotImplemented, audioCodec=NotImplemented, audioBitrate=NotImplemented, audioChannels=NotImplemented, audioSampleRate=NotImplemented, width=NotImplemented, height=NotImplemented, frameRate=NotImplemented, gopSize=NotImplemented, conversionEngines=NotImplemented, conversionEnginesExtraParams=NotImplemented, twoPass=NotImplemented, deinterlice=NotImplemented, rotate=NotImplemented, operators=NotImplemented, engineVersion=NotImplemented, format=NotImplemented, aspectRatioProcessingMode=NotImplemented, forceFrameToMultiplication16=NotImplemented, isGopInSec=NotImplemented, isAvoidVideoShrinkFramesizeToSource=NotImplemented, isAvoidVideoShrinkBitrateToSource=NotImplemented, isVideoFrameRateForLowBrAppleHls=NotImplemented, multiStream=NotImplemented, anamorphicPixels=NotImplemented, isAvoidForcedKeyFrames=NotImplemented, isCropIMX=NotImplemented, optimizationPolicy=NotImplemented, maxFrameRate=NotImplemented, videoConstantBitrate=NotImplemented, videoBitrateTolerance=NotImplemented, watermarkData=NotImplemented, clipOffset=NotImplemented, clipDuration=NotImplemented, flavorParamsId=NotImplemented, commandLinesStr=NotImplemented, flavorParamsVersion=NotImplemented, flavorAssetId=NotImplemented, flavorAssetVersion=NotImplemented, readyBehavior=NotImplemented): KalturaFlavorParamsOutput.__init__(self, id, partnerId, name, systemName, description, createdAt, isSystemDefault, tags, requiredPermissions, sourceRemoteStorageProfileId, remoteStorageProfileIds, mediaParserType, sourceAssetParamsIds, videoCodec, videoBitrate, audioCodec, audioBitrate, audioChannels, audioSampleRate, width, height, frameRate, gopSize, conversionEngines, conversionEnginesExtraParams, twoPass, deinterlice, rotate, operators, engineVersion, format, aspectRatioProcessingMode, forceFrameToMultiplication16, isGopInSec, isAvoidVideoShrinkFramesizeToSource, isAvoidVideoShrinkBitrateToSource, isVideoFrameRateForLowBrAppleHls, multiStream, anamorphicPixels, isAvoidForcedKeyFrames, isCropIMX, optimizationPolicy, maxFrameRate, videoConstantBitrate, videoBitrateTolerance, watermarkData, clipOffset, clipDuration, flavorParamsId, commandLinesStr, flavorParamsVersion, flavorAssetId, flavorAssetVersion, readyBehavior) PROPERTY_LOADERS = { } def fromXml(self, node): KalturaFlavorParamsOutput.fromXml(self, node) self.fromXmlImpl(node, KalturaDocumentFlavorParamsOutput.PROPERTY_LOADERS) def toParams(self): kparams = KalturaFlavorParamsOutput.toParams(self) kparams.put("objectType", "KalturaDocumentFlavorParamsOutput") return kparams # @package Kaltura # @subpackage Client class KalturaImageFlavorParamsOutput(KalturaFlavorParamsOutput): def __init__(self, id=NotImplemented, partnerId=NotImplemented, name=NotImplemented, systemName=NotImplemented, description=NotImplemented, createdAt=NotImplemented, isSystemDefault=NotImplemented, tags=NotImplemented, requiredPermissions=NotImplemented, sourceRemoteStorageProfileId=NotImplemented, remoteStorageProfileIds=NotImplemented, mediaParserType=NotImplemented, sourceAssetParamsIds=NotImplemented, videoCodec=NotImplemented, videoBitrate=NotImplemented, audioCodec=NotImplemented, audioBitrate=NotImplemented, audioChannels=NotImplemented, audioSampleRate=NotImplemented, width=NotImplemented, height=NotImplemented, frameRate=NotImplemented, gopSize=NotImplemented, conversionEngines=NotImplemented, conversionEnginesExtraParams=NotImplemented, twoPass=NotImplemented, deinterlice=NotImplemented, rotate=NotImplemented, operators=NotImplemented, engineVersion=NotImplemented, format=NotImplemented, aspectRatioProcessingMode=NotImplemented, forceFrameToMultiplication16=NotImplemented, isGopInSec=NotImplemented, isAvoidVideoShrinkFramesizeToSource=NotImplemented, isAvoidVideoShrinkBitrateToSource=NotImplemented, isVideoFrameRateForLowBrAppleHls=NotImplemented, multiStream=NotImplemented, anamorphicPixels=NotImplemented, isAvoidForcedKeyFrames=NotImplemented, isCropIMX=NotImplemented, optimizationPolicy=NotImplemented, maxFrameRate=NotImplemented, videoConstantBitrate=NotImplemented, videoBitrateTolerance=NotImplemented, watermarkData=NotImplemented, clipOffset=NotImplemented, clipDuration=NotImplemented, flavorParamsId=NotImplemented, commandLinesStr=NotImplemented, flavorParamsVersion=NotImplemented, flavorAssetId=NotImplemented, flavorAssetVersion=NotImplemented, readyBehavior=NotImplemented, densityWidth=NotImplemented, densityHeight=NotImplemented, sizeWidth=NotImplemented, sizeHeight=NotImplemented, depth=NotImplemented): KalturaFlavorParamsOutput.__init__(self, id, partnerId, name, systemName, description, createdAt, isSystemDefault, tags, requiredPermissions, sourceRemoteStorageProfileId, remoteStorageProfileIds, mediaParserType, sourceAssetParamsIds, videoCodec, videoBitrate, audioCodec, audioBitrate, audioChannels, audioSampleRate, width, height, frameRate, gopSize, conversionEngines, conversionEnginesExtraParams, twoPass, deinterlice, rotate, operators, engineVersion, format, aspectRatioProcessingMode, forceFrameToMultiplication16, isGopInSec, isAvoidVideoShrinkFramesizeToSource, isAvoidVideoShrinkBitrateToSource, isVideoFrameRateForLowBrAppleHls, multiStream, anamorphicPixels, isAvoidForcedKeyFrames, isCropIMX, optimizationPolicy, maxFrameRate, videoConstantBitrate, videoBitrateTolerance, watermarkData, clipOffset, clipDuration, flavorParamsId, commandLinesStr, flavorParamsVersion, flavorAssetId, flavorAssetVersion, readyBehavior) # @var int self.densityWidth = densityWidth # @var int self.densityHeight = densityHeight # @var int self.sizeWidth = sizeWidth # @var int self.sizeHeight = sizeHeight # @var int self.depth = depth PROPERTY_LOADERS = { 'densityWidth': getXmlNodeInt, 'densityHeight': getXmlNodeInt, 'sizeWidth': getXmlNodeInt, 'sizeHeight': getXmlNodeInt, 'depth': getXmlNodeInt, } def fromXml(self, node): KalturaFlavorParamsOutput.fromXml(self, node) self.fromXmlImpl(node, KalturaImageFlavorParamsOutput.PROPERTY_LOADERS) def toParams(self): kparams = KalturaFlavorParamsOutput.toParams(self) kparams.put("objectType", "KalturaImageFlavorParamsOutput") kparams.addIntIfDefined("densityWidth", self.densityWidth) kparams.addIntIfDefined("densityHeight", self.densityHeight) kparams.addIntIfDefined("sizeWidth", self.sizeWidth) kparams.addIntIfDefined("sizeHeight", self.sizeHeight) kparams.addIntIfDefined("depth", self.depth) return kparams def getDensityWidth(self): return self.densityWidth def setDensityWidth(self, newDensityWidth): self.densityWidth = newDensityWidth def getDensityHeight(self): return self.densityHeight def setDensityHeight(self, newDensityHeight): self.densityHeight = newDensityHeight def getSizeWidth(self): return self.sizeWidth def setSizeWidth(self, newSizeWidth): self.sizeWidth = newSizeWidth def getSizeHeight(self): return self.sizeHeight def setSizeHeight(self, newSizeHeight): self.sizeHeight = newSizeHeight def getDepth(self): return self.depth def setDepth(self, newDepth): self.depth = newDepth # @package Kaltura # @subpackage Client class KalturaPdfFlavorParamsOutput(KalturaFlavorParamsOutput): def __init__(self, id=NotImplemented, partnerId=NotImplemented, name=NotImplemented, systemName=NotImplemented, description=NotImplemented, createdAt=NotImplemented, isSystemDefault=NotImplemented, tags=NotImplemented, requiredPermissions=NotImplemented, sourceRemoteStorageProfileId=NotImplemented, remoteStorageProfileIds=NotImplemented, mediaParserType=NotImplemented, sourceAssetParamsIds=NotImplemented, videoCodec=NotImplemented, videoBitrate=NotImplemented, audioCodec=NotImplemented, audioBitrate=NotImplemented, audioChannels=NotImplemented, audioSampleRate=NotImplemented, width=NotImplemented, height=NotImplemented, frameRate=NotImplemented, gopSize=NotImplemented, conversionEngines=NotImplemented, conversionEnginesExtraParams=NotImplemented, twoPass=NotImplemented, deinterlice=NotImplemented, rotate=NotImplemented, operators=NotImplemented, engineVersion=NotImplemented, format=NotImplemented, aspectRatioProcessingMode=NotImplemented, forceFrameToMultiplication16=NotImplemented, isGopInSec=NotImplemented, isAvoidVideoShrinkFramesizeToSource=NotImplemented, isAvoidVideoShrinkBitrateToSource=NotImplemented, isVideoFrameRateForLowBrAppleHls=NotImplemented, multiStream=NotImplemented, anamorphicPixels=NotImplemented, isAvoidForcedKeyFrames=NotImplemented, isCropIMX=NotImplemented, optimizationPolicy=NotImplemented, maxFrameRate=NotImplemented, videoConstantBitrate=NotImplemented, videoBitrateTolerance=NotImplemented, watermarkData=NotImplemented, clipOffset=NotImplemented, clipDuration=NotImplemented, flavorParamsId=NotImplemented, commandLinesStr=NotImplemented, flavorParamsVersion=NotImplemented, flavorAssetId=NotImplemented, flavorAssetVersion=NotImplemented, readyBehavior=NotImplemented, readonly=NotImplemented): KalturaFlavorParamsOutput.__init__(self, id, partnerId, name, systemName, description, createdAt, isSystemDefault, tags, requiredPermissions, sourceRemoteStorageProfileId, remoteStorageProfileIds, mediaParserType, sourceAssetParamsIds, videoCodec, videoBitrate, audioCodec, audioBitrate, audioChannels, audioSampleRate, width, height, frameRate, gopSize, conversionEngines, conversionEnginesExtraParams, twoPass, deinterlice, rotate, operators, engineVersion, format, aspectRatioProcessingMode, forceFrameToMultiplication16, isGopInSec, isAvoidVideoShrinkFramesizeToSource, isAvoidVideoShrinkBitrateToSource, isVideoFrameRateForLowBrAppleHls, multiStream, anamorphicPixels, isAvoidForcedKeyFrames, isCropIMX, optimizationPolicy, maxFrameRate, videoConstantBitrate, videoBitrateTolerance, watermarkData, clipOffset, clipDuration, flavorParamsId, commandLinesStr, flavorParamsVersion, flavorAssetId, flavorAssetVersion, readyBehavior) # @var bool self.readonly = readonly PROPERTY_LOADERS = { 'readonly': getXmlNodeBool, } def fromXml(self, node): KalturaFlavorParamsOutput.fromXml(self, node) self.fromXmlImpl(node, KalturaPdfFlavorParamsOutput.PROPERTY_LOADERS) def toParams(self): kparams = KalturaFlavorParamsOutput.toParams(self) kparams.put("objectType", "KalturaPdfFlavorParamsOutput") kparams.addBoolIfDefined("readonly", self.readonly) return kparams def getReadonly(self): return self.readonly def setReadonly(self, newReadonly): self.readonly = newReadonly # @package Kaltura # @subpackage Client class KalturaSwfFlavorParamsOutput(KalturaFlavorParamsOutput): def __init__(self, id=NotImplemented, partnerId=NotImplemented, name=NotImplemented, systemName=NotImplemented, description=NotImplemented, createdAt=NotImplemented, isSystemDefault=NotImplemented, tags=NotImplemented, requiredPermissions=NotImplemented, sourceRemoteStorageProfileId=NotImplemented, remoteStorageProfileIds=NotImplemented, mediaParserType=NotImplemented, sourceAssetParamsIds=NotImplemented, videoCodec=NotImplemented, videoBitrate=NotImplemented, audioCodec=NotImplemented, audioBitrate=NotImplemented, audioChannels=NotImplemented, audioSampleRate=NotImplemented, width=NotImplemented, height=NotImplemented, frameRate=NotImplemented, gopSize=NotImplemented, conversionEngines=NotImplemented, conversionEnginesExtraParams=NotImplemented, twoPass=NotImplemented, deinterlice=NotImplemented, rotate=NotImplemented, operators=NotImplemented, engineVersion=NotImplemented, format=NotImplemented, aspectRatioProcessingMode=NotImplemented, forceFrameToMultiplication16=NotImplemented, isGopInSec=NotImplemented, isAvoidVideoShrinkFramesizeToSource=NotImplemented, isAvoidVideoShrinkBitrateToSource=NotImplemented, isVideoFrameRateForLowBrAppleHls=NotImplemented, multiStream=NotImplemented, anamorphicPixels=NotImplemented, isAvoidForcedKeyFrames=NotImplemented, isCropIMX=NotImplemented, optimizationPolicy=NotImplemented, maxFrameRate=NotImplemented, videoConstantBitrate=NotImplemented, videoBitrateTolerance=NotImplemented, watermarkData=NotImplemented, clipOffset=NotImplemented, clipDuration=NotImplemented, flavorParamsId=NotImplemented, commandLinesStr=NotImplemented, flavorParamsVersion=NotImplemented, flavorAssetId=NotImplemented, flavorAssetVersion=NotImplemented, readyBehavior=NotImplemented, flashVersion=NotImplemented, poly2Bitmap=NotImplemented): KalturaFlavorParamsOutput.__init__(self, id, partnerId, name, systemName, description, createdAt, isSystemDefault, tags, requiredPermissions, sourceRemoteStorageProfileId, remoteStorageProfileIds, mediaParserType, sourceAssetParamsIds, videoCodec, videoBitrate, audioCodec, audioBitrate, audioChannels, audioSampleRate, width, height, frameRate, gopSize, conversionEngines, conversionEnginesExtraParams, twoPass, deinterlice, rotate, operators, engineVersion, format, aspectRatioProcessingMode, forceFrameToMultiplication16, isGopInSec, isAvoidVideoShrinkFramesizeToSource, isAvoidVideoShrinkBitrateToSource, isVideoFrameRateForLowBrAppleHls, multiStream, anamorphicPixels, isAvoidForcedKeyFrames, isCropIMX, optimizationPolicy, maxFrameRate, videoConstantBitrate, videoBitrateTolerance, watermarkData, clipOffset, clipDuration, flavorParamsId, commandLinesStr, flavorParamsVersion, flavorAssetId, flavorAssetVersion, readyBehavior) # @var int self.flashVersion = flashVersion # @var bool self.poly2Bitmap = poly2Bitmap PROPERTY_LOADERS = { 'flashVersion': getXmlNodeInt, 'poly2Bitmap': getXmlNodeBool, } def fromXml(self, node): KalturaFlavorParamsOutput.fromXml(self, node) self.fromXmlImpl(node, KalturaSwfFlavorParamsOutput.PROPERTY_LOADERS) def toParams(self): kparams = KalturaFlavorParamsOutput.toParams(self) kparams.put("objectType", "KalturaSwfFlavorParamsOutput") kparams.addIntIfDefined("flashVersion", self.flashVersion) kparams.addBoolIfDefined("poly2Bitmap", self.poly2Bitmap) return kparams def getFlashVersion(self): return self.flashVersion def setFlashVersion(self, newFlashVersion): self.flashVersion = newFlashVersion def getPoly2Bitmap(self): return self.poly2Bitmap def setPoly2Bitmap(self, newPoly2Bitmap): self.poly2Bitmap = newPoly2Bitmap # @package Kaltura # @subpackage Client class KalturaDocumentEntryBaseFilter(KalturaBaseEntryFilter): def __init__(self, orderBy=NotImplemented, advancedSearch=NotImplemented, idEqual=NotImplemented, idIn=NotImplemented, idNotIn=NotImplemented, nameLike=NotImplemented, nameMultiLikeOr=NotImplemented, nameMultiLikeAnd=NotImplemented, nameEqual=NotImplemented, partnerIdEqual=NotImplemented, partnerIdIn=NotImplemented, userIdEqual=NotImplemented, userIdIn=NotImplemented, creatorIdEqual=NotImplemented, tagsLike=NotImplemented, tagsMultiLikeOr=NotImplemented, tagsMultiLikeAnd=NotImplemented, adminTagsLike=NotImplemented, adminTagsMultiLikeOr=NotImplemented, adminTagsMultiLikeAnd=NotImplemented, categoriesMatchAnd=NotImplemented, categoriesMatchOr=NotImplemented, categoriesNotContains=NotImplemented, categoriesIdsMatchAnd=NotImplemented, categoriesIdsMatchOr=NotImplemented, categoriesIdsNotContains=NotImplemented, categoriesIdsEmpty=NotImplemented, statusEqual=NotImplemented, statusNotEqual=NotImplemented, statusIn=NotImplemented, statusNotIn=NotImplemented, moderationStatusEqual=NotImplemented, moderationStatusNotEqual=NotImplemented, moderationStatusIn=NotImplemented, moderationStatusNotIn=NotImplemented, typeEqual=NotImplemented, typeIn=NotImplemented, createdAtGreaterThanOrEqual=NotImplemented, createdAtLessThanOrEqual=NotImplemented, updatedAtGreaterThanOrEqual=NotImplemented, updatedAtLessThanOrEqual=NotImplemented, totalRankLessThanOrEqual=NotImplemented, totalRankGreaterThanOrEqual=NotImplemented, groupIdEqual=NotImplemented, searchTextMatchAnd=NotImplemented, searchTextMatchOr=NotImplemented, accessControlIdEqual=NotImplemented, accessControlIdIn=NotImplemented, startDateGreaterThanOrEqual=NotImplemented, startDateLessThanOrEqual=NotImplemented, startDateGreaterThanOrEqualOrNull=NotImplemented, startDateLessThanOrEqualOrNull=NotImplemented, endDateGreaterThanOrEqual=NotImplemented, endDateLessThanOrEqual=NotImplemented, endDateGreaterThanOrEqualOrNull=NotImplemented, endDateLessThanOrEqualOrNull=NotImplemented, referenceIdEqual=NotImplemented, referenceIdIn=NotImplemented, replacingEntryIdEqual=NotImplemented, replacingEntryIdIn=NotImplemented, replacedEntryIdEqual=NotImplemented, replacedEntryIdIn=NotImplemented, replacementStatusEqual=NotImplemented, replacementStatusIn=NotImplemented, partnerSortValueGreaterThanOrEqual=NotImplemented, partnerSortValueLessThanOrEqual=NotImplemented, rootEntryIdEqual=NotImplemented, rootEntryIdIn=NotImplemented, parentEntryIdEqual=NotImplemented, entitledUsersEditMatchAnd=NotImplemented, entitledUsersPublishMatchAnd=NotImplemented, tagsNameMultiLikeOr=NotImplemented, tagsAdminTagsMultiLikeOr=NotImplemented, tagsAdminTagsNameMultiLikeOr=NotImplemented, tagsNameMultiLikeAnd=NotImplemented, tagsAdminTagsMultiLikeAnd=NotImplemented, tagsAdminTagsNameMultiLikeAnd=NotImplemented, freeText=NotImplemented, isRoot=NotImplemented, categoriesFullNameIn=NotImplemented, categoryAncestorIdIn=NotImplemented, redirectFromEntryId=NotImplemented, documentTypeEqual=NotImplemented, documentTypeIn=NotImplemented, assetParamsIdsMatchOr=NotImplemented, assetParamsIdsMatchAnd=NotImplemented): KalturaBaseEntryFilter.__init__(self, orderBy, advancedSearch, idEqual, idIn, idNotIn, nameLike, nameMultiLikeOr, nameMultiLikeAnd, nameEqual, partnerIdEqual, partnerIdIn, userIdEqual, userIdIn, creatorIdEqual, tagsLike, tagsMultiLikeOr, tagsMultiLikeAnd, adminTagsLike, adminTagsMultiLikeOr, adminTagsMultiLikeAnd, categoriesMatchAnd, categoriesMatchOr, categoriesNotContains, categoriesIdsMatchAnd, categoriesIdsMatchOr, categoriesIdsNotContains, categoriesIdsEmpty, statusEqual, statusNotEqual, statusIn, statusNotIn, moderationStatusEqual, moderationStatusNotEqual, moderationStatusIn, moderationStatusNotIn, typeEqual, typeIn, createdAtGreaterThanOrEqual, createdAtLessThanOrEqual, updatedAtGreaterThanOrEqual, updatedAtLessThanOrEqual, totalRankLessThanOrEqual, totalRankGreaterThanOrEqual, groupIdEqual, searchTextMatchAnd, searchTextMatchOr, accessControlIdEqual, accessControlIdIn, startDateGreaterThanOrEqual, startDateLessThanOrEqual, startDateGreaterThanOrEqualOrNull, startDateLessThanOrEqualOrNull, endDateGreaterThanOrEqual, endDateLessThanOrEqual, endDateGreaterThanOrEqualOrNull, endDateLessThanOrEqualOrNull, referenceIdEqual, referenceIdIn, replacingEntryIdEqual, replacingEntryIdIn, replacedEntryIdEqual, replacedEntryIdIn, replacementStatusEqual, replacementStatusIn, partnerSortValueGreaterThanOrEqual, partnerSortValueLessThanOrEqual, rootEntryIdEqual, rootEntryIdIn, parentEntryIdEqual, entitledUsersEditMatchAnd, entitledUsersPublishMatchAnd, tagsNameMultiLikeOr, tagsAdminTagsMultiLikeOr, tagsAdminTagsNameMultiLikeOr, tagsNameMultiLikeAnd, tagsAdminTagsMultiLikeAnd, tagsAdminTagsNameMultiLikeAnd, freeText, isRoot, categoriesFullNameIn, categoryAncestorIdIn, redirectFromEntryId) # @var KalturaDocumentType self.documentTypeEqual = documentTypeEqual # @var string self.documentTypeIn = documentTypeIn # @var string self.assetParamsIdsMatchOr = assetParamsIdsMatchOr # @var string self.assetParamsIdsMatchAnd = assetParamsIdsMatchAnd PROPERTY_LOADERS = { 'documentTypeEqual': (KalturaEnumsFactory.createInt, "KalturaDocumentType"), 'documentTypeIn': getXmlNodeText, 'assetParamsIdsMatchOr': getXmlNodeText, 'assetParamsIdsMatchAnd': getXmlNodeText, } def fromXml(self, node): KalturaBaseEntryFilter.fromXml(self, node) self.fromXmlImpl(node, KalturaDocumentEntryBaseFilter.PROPERTY_LOADERS) def toParams(self): kparams = KalturaBaseEntryFilter.toParams(self) kparams.put("objectType", "KalturaDocumentEntryBaseFilter") kparams.addIntEnumIfDefined("documentTypeEqual", self.documentTypeEqual) kparams.addStringIfDefined("documentTypeIn", self.documentTypeIn) kparams.addStringIfDefined("assetParamsIdsMatchOr", self.assetParamsIdsMatchOr) kparams.addStringIfDefined("assetParamsIdsMatchAnd", self.assetParamsIdsMatchAnd) return kparams def getDocumentTypeEqual(self): return self.documentTypeEqual def setDocumentTypeEqual(self, newDocumentTypeEqual): self.documentTypeEqual = newDocumentTypeEqual def getDocumentTypeIn(self): return self.documentTypeIn def setDocumentTypeIn(self, newDocumentTypeIn): self.documentTypeIn = newDocumentTypeIn def getAssetParamsIdsMatchOr(self): return self.assetParamsIdsMatchOr def setAssetParamsIdsMatchOr(self, newAssetParamsIdsMatchOr): self.assetParamsIdsMatchOr = newAssetParamsIdsMatchOr def getAssetParamsIdsMatchAnd(self): return self.assetParamsIdsMatchAnd def setAssetParamsIdsMatchAnd(self, newAssetParamsIdsMatchAnd): self.assetParamsIdsMatchAnd = newAssetParamsIdsMatchAnd # @package Kaltura # @subpackage Client class KalturaDocumentEntryFilter(KalturaDocumentEntryBaseFilter): def __init__(self, orderBy=NotImplemented, advancedSearch=NotImplemented, idEqual=NotImplemented, idIn=NotImplemented, idNotIn=NotImplemented, nameLike=NotImplemented, nameMultiLikeOr=NotImplemented, nameMultiLikeAnd=NotImplemented, nameEqual=NotImplemented, partnerIdEqual=NotImplemented, partnerIdIn=NotImplemented, userIdEqual=NotImplemented, userIdIn=NotImplemented, creatorIdEqual=NotImplemented, tagsLike=NotImplemented, tagsMultiLikeOr=NotImplemented, tagsMultiLikeAnd=NotImplemented, adminTagsLike=NotImplemented, adminTagsMultiLikeOr=NotImplemented, adminTagsMultiLikeAnd=NotImplemented, categoriesMatchAnd=NotImplemented, categoriesMatchOr=NotImplemented, categoriesNotContains=NotImplemented, categoriesIdsMatchAnd=NotImplemented, categoriesIdsMatchOr=NotImplemented, categoriesIdsNotContains=NotImplemented, categoriesIdsEmpty=NotImplemented, statusEqual=NotImplemented, statusNotEqual=NotImplemented, statusIn=NotImplemented, statusNotIn=NotImplemented, moderationStatusEqual=NotImplemented, moderationStatusNotEqual=NotImplemented, moderationStatusIn=NotImplemented, moderationStatusNotIn=NotImplemented, typeEqual=NotImplemented, typeIn=NotImplemented, createdAtGreaterThanOrEqual=NotImplemented, createdAtLessThanOrEqual=NotImplemented, updatedAtGreaterThanOrEqual=NotImplemented, updatedAtLessThanOrEqual=NotImplemented, totalRankLessThanOrEqual=NotImplemented, totalRankGreaterThanOrEqual=NotImplemented, groupIdEqual=NotImplemented, searchTextMatchAnd=NotImplemented, searchTextMatchOr=NotImplemented, accessControlIdEqual=NotImplemented, accessControlIdIn=NotImplemented, startDateGreaterThanOrEqual=NotImplemented, startDateLessThanOrEqual=NotImplemented, startDateGreaterThanOrEqualOrNull=NotImplemented, startDateLessThanOrEqualOrNull=NotImplemented, endDateGreaterThanOrEqual=NotImplemented, endDateLessThanOrEqual=NotImplemented, endDateGreaterThanOrEqualOrNull=NotImplemented, endDateLessThanOrEqualOrNull=NotImplemented, referenceIdEqual=NotImplemented, referenceIdIn=NotImplemented, replacingEntryIdEqual=NotImplemented, replacingEntryIdIn=NotImplemented, replacedEntryIdEqual=NotImplemented, replacedEntryIdIn=NotImplemented, replacementStatusEqual=NotImplemented, replacementStatusIn=NotImplemented, partnerSortValueGreaterThanOrEqual=NotImplemented, partnerSortValueLessThanOrEqual=NotImplemented, rootEntryIdEqual=NotImplemented, rootEntryIdIn=NotImplemented, parentEntryIdEqual=NotImplemented, entitledUsersEditMatchAnd=NotImplemented, entitledUsersPublishMatchAnd=NotImplemented, tagsNameMultiLikeOr=NotImplemented, tagsAdminTagsMultiLikeOr=NotImplemented, tagsAdminTagsNameMultiLikeOr=NotImplemented, tagsNameMultiLikeAnd=NotImplemented, tagsAdminTagsMultiLikeAnd=NotImplemented, tagsAdminTagsNameMultiLikeAnd=NotImplemented, freeText=NotImplemented, isRoot=NotImplemented, categoriesFullNameIn=NotImplemented, categoryAncestorIdIn=NotImplemented, redirectFromEntryId=NotImplemented, documentTypeEqual=NotImplemented, documentTypeIn=NotImplemented, assetParamsIdsMatchOr=NotImplemented, assetParamsIdsMatchAnd=NotImplemented): KalturaDocumentEntryBaseFilter.__init__(self, orderBy, advancedSearch, idEqual, idIn, idNotIn, nameLike, nameMultiLikeOr, nameMultiLikeAnd, nameEqual, partnerIdEqual, partnerIdIn, userIdEqual, userIdIn, creatorIdEqual, tagsLike, tagsMultiLikeOr, tagsMultiLikeAnd, adminTagsLike, adminTagsMultiLikeOr, adminTagsMultiLikeAnd, categoriesMatchAnd, categoriesMatchOr, categoriesNotContains, categoriesIdsMatchAnd, categoriesIdsMatchOr, categoriesIdsNotContains, categoriesIdsEmpty, statusEqual, statusNotEqual, statusIn, statusNotIn, moderationStatusEqual, moderationStatusNotEqual, moderationStatusIn, moderationStatusNotIn, typeEqual, typeIn, createdAtGreaterThanOrEqual, createdAtLessThanOrEqual, updatedAtGreaterThanOrEqual, updatedAtLessThanOrEqual, totalRankLessThanOrEqual, totalRankGreaterThanOrEqual, groupIdEqual, searchTextMatchAnd, searchTextMatchOr, accessControlIdEqual, accessControlIdIn, startDateGreaterThanOrEqual, startDateLessThanOrEqual, startDateGreaterThanOrEqualOrNull, startDateLessThanOrEqualOrNull, endDateGreaterThanOrEqual, endDateLessThanOrEqual, endDateGreaterThanOrEqualOrNull, endDateLessThanOrEqualOrNull, referenceIdEqual, referenceIdIn, replacingEntryIdEqual, replacingEntryIdIn, replacedEntryIdEqual, replacedEntryIdIn, replacementStatusEqual, replacementStatusIn, partnerSortValueGreaterThanOrEqual, partnerSortValueLessThanOrEqual, rootEntryIdEqual, rootEntryIdIn, parentEntryIdEqual, entitledUsersEditMatchAnd, entitledUsersPublishMatchAnd, tagsNameMultiLikeOr, tagsAdminTagsMultiLikeOr, tagsAdminTagsNameMultiLikeOr, tagsNameMultiLikeAnd, tagsAdminTagsMultiLikeAnd, tagsAdminTagsNameMultiLikeAnd, freeText, isRoot, categoriesFullNameIn, categoryAncestorIdIn, redirectFromEntryId, documentTypeEqual, documentTypeIn, assetParamsIdsMatchOr, assetParamsIdsMatchAnd) PROPERTY_LOADERS = { } def fromXml(self, node): KalturaDocumentEntryBaseFilter.fromXml(self, node) self.fromXmlImpl(node, KalturaDocumentEntryFilter.PROPERTY_LOADERS) def toParams(self): kparams = KalturaDocumentEntryBaseFilter.toParams(self) kparams.put("objectType", "KalturaDocumentEntryFilter") return kparams # @package Kaltura # @subpackage Client class KalturaDocumentFlavorParamsBaseFilter(KalturaFlavorParamsFilter): def __init__(self, orderBy=NotImplemented, advancedSearch=NotImplemented, systemNameEqual=NotImplemented, systemNameIn=NotImplemented, isSystemDefaultEqual=NotImplemented, tagsEqual=NotImplemented, formatEqual=NotImplemented): KalturaFlavorParamsFilter.__init__(self, orderBy, advancedSearch, systemNameEqual, systemNameIn, isSystemDefaultEqual, tagsEqual, formatEqual) PROPERTY_LOADERS = { } def fromXml(self, node): KalturaFlavorParamsFilter.fromXml(self, node) self.fromXmlImpl(node, KalturaDocumentFlavorParamsBaseFilter.PROPERTY_LOADERS) def toParams(self): kparams = KalturaFlavorParamsFilter.toParams(self) kparams.put("objectType", "KalturaDocumentFlavorParamsBaseFilter") return kparams # @package Kaltura # @subpackage Client class KalturaImageFlavorParamsBaseFilter(KalturaFlavorParamsFilter): def __init__(self, orderBy=NotImplemented, advancedSearch=NotImplemented, systemNameEqual=NotImplemented, systemNameIn=NotImplemented, isSystemDefaultEqual=NotImplemented, tagsEqual=NotImplemented, formatEqual=NotImplemented): KalturaFlavorParamsFilter.__init__(self, orderBy, advancedSearch, systemNameEqual, systemNameIn, isSystemDefaultEqual, tagsEqual, formatEqual) PROPERTY_LOADERS = { } def fromXml(self, node): KalturaFlavorParamsFilter.fromXml(self, node) self.fromXmlImpl(node, KalturaImageFlavorParamsBaseFilter.PROPERTY_LOADERS) def toParams(self): kparams = KalturaFlavorParamsFilter.toParams(self) kparams.put("objectType", "KalturaImageFlavorParamsBaseFilter") return kparams # @package Kaltura # @subpackage Client class KalturaPdfFlavorParamsBaseFilter(KalturaFlavorParamsFilter): def __init__(self, orderBy=NotImplemented, advancedSearch=NotImplemented, systemNameEqual=NotImplemented, systemNameIn=NotImplemented, isSystemDefaultEqual=NotImplemented, tagsEqual=NotImplemented, formatEqual=NotImplemented): KalturaFlavorParamsFilter.__init__(self, orderBy, advancedSearch, systemNameEqual, systemNameIn, isSystemDefaultEqual, tagsEqual, formatEqual) PROPERTY_LOADERS = { } def fromXml(self, node): KalturaFlavorParamsFilter.fromXml(self, node) self.fromXmlImpl(node, KalturaPdfFlavorParamsBaseFilter.PROPERTY_LOADERS) def toParams(self): kparams = KalturaFlavorParamsFilter.toParams(self) kparams.put("objectType", "KalturaPdfFlavorParamsBaseFilter") return kparams # @package Kaltura # @subpackage Client class KalturaSwfFlavorParamsBaseFilter(KalturaFlavorParamsFilter): def __init__(self, orderBy=NotImplemented, advancedSearch=NotImplemented, systemNameEqual=NotImplemented, systemNameIn=NotImplemented, isSystemDefaultEqual=NotImplemented, tagsEqual=NotImplemented, formatEqual=NotImplemented): KalturaFlavorParamsFilter.__init__(self, orderBy, advancedSearch, systemNameEqual, systemNameIn, isSystemDefaultEqual, tagsEqual, formatEqual) PROPERTY_LOADERS = { } def fromXml(self, node): KalturaFlavorParamsFilter.fromXml(self, node) self.fromXmlImpl(node, KalturaSwfFlavorParamsBaseFilter.PROPERTY_LOADERS) def toParams(self): kparams = KalturaFlavorParamsFilter.toParams(self) kparams.put("objectType", "KalturaSwfFlavorParamsBaseFilter") return kparams # @package Kaltura # @subpackage Client class KalturaDocumentFlavorParamsFilter(KalturaDocumentFlavorParamsBaseFilter): def __init__(self, orderBy=NotImplemented, advancedSearch=NotImplemented, systemNameEqual=NotImplemented, systemNameIn=NotImplemented, isSystemDefaultEqual=NotImplemented, tagsEqual=NotImplemented, formatEqual=NotImplemented): KalturaDocumentFlavorParamsBaseFilter.__init__(self, orderBy, advancedSearch, systemNameEqual, systemNameIn, isSystemDefaultEqual, tagsEqual, formatEqual) PROPERTY_LOADERS = { } def fromXml(self, node): KalturaDocumentFlavorParamsBaseFilter.fromXml(self, node) self.fromXmlImpl(node, KalturaDocumentFlavorParamsFilter.PROPERTY_LOADERS) def toParams(self): kparams = KalturaDocumentFlavorParamsBaseFilter.toParams(self) kparams.put("objectType", "KalturaDocumentFlavorParamsFilter") return kparams # @package Kaltura # @subpackage Client class KalturaImageFlavorParamsFilter(KalturaImageFlavorParamsBaseFilter): def __init__(self, orderBy=NotImplemented, advancedSearch=NotImplemented, systemNameEqual=NotImplemented, systemNameIn=NotImplemented, isSystemDefaultEqual=NotImplemented, tagsEqual=NotImplemented, formatEqual=NotImplemented): KalturaImageFlavorParamsBaseFilter.__init__(self, orderBy, advancedSearch, systemNameEqual, systemNameIn, isSystemDefaultEqual, tagsEqual, formatEqual) PROPERTY_LOADERS = { } def fromXml(self, node): KalturaImageFlavorParamsBaseFilter.fromXml(self, node) self.fromXmlImpl(node, KalturaImageFlavorParamsFilter.PROPERTY_LOADERS) def toParams(self): kparams = KalturaImageFlavorParamsBaseFilter.toParams(self) kparams.put("objectType", "KalturaImageFlavorParamsFilter") return kparams # @package Kaltura # @subpackage Client class KalturaPdfFlavorParamsFilter(KalturaPdfFlavorParamsBaseFilter): def __init__(self, orderBy=NotImplemented, advancedSearch=NotImplemented, systemNameEqual=NotImplemented, systemNameIn=NotImplemented, isSystemDefaultEqual=NotImplemented, tagsEqual=NotImplemented, formatEqual=NotImplemented): KalturaPdfFlavorParamsBaseFilter.__init__(self, orderBy, advancedSearch, systemNameEqual, systemNameIn, isSystemDefaultEqual, tagsEqual, formatEqual) PROPERTY_LOADERS = { } def fromXml(self, node): KalturaPdfFlavorParamsBaseFilter.fromXml(self, node) self.fromXmlImpl(node, KalturaPdfFlavorParamsFilter.PROPERTY_LOADERS) def toParams(self): kparams = KalturaPdfFlavorParamsBaseFilter.toParams(self) kparams.put("objectType", "KalturaPdfFlavorParamsFilter") return kparams # @package Kaltura # @subpackage Client class KalturaSwfFlavorParamsFilter(KalturaSwfFlavorParamsBaseFilter): def __init__(self, orderBy=NotImplemented, advancedSearch=NotImplemented, systemNameEqual=NotImplemented, systemNameIn=NotImplemented, isSystemDefaultEqual=NotImplemented, tagsEqual=NotImplemented, formatEqual=NotImplemented): KalturaSwfFlavorParamsBaseFilter.__init__(self, orderBy, advancedSearch, systemNameEqual, systemNameIn, isSystemDefaultEqual, tagsEqual, formatEqual) PROPERTY_LOADERS = { } def fromXml(self, node): KalturaSwfFlavorParamsBaseFilter.fromXml(self, node) self.fromXmlImpl(node, KalturaSwfFlavorParamsFilter.PROPERTY_LOADERS) def toParams(self): kparams = KalturaSwfFlavorParamsBaseFilter.toParams(self) kparams.put("objectType", "KalturaSwfFlavorParamsFilter") return kparams # @package Kaltura # @subpackage Client class KalturaDocumentFlavorParamsOutputBaseFilter(KalturaFlavorParamsOutputFilter): def __init__(self, orderBy=NotImplemented, advancedSearch=NotImplemented, systemNameEqual=NotImplemented, systemNameIn=NotImplemented, isSystemDefaultEqual=NotImplemented, tagsEqual=NotImplemented, formatEqual=NotImplemented, flavorParamsIdEqual=NotImplemented, flavorParamsVersionEqual=NotImplemented, flavorAssetIdEqual=NotImplemented, flavorAssetVersionEqual=NotImplemented): KalturaFlavorParamsOutputFilter.__init__(self, orderBy, advancedSearch, systemNameEqual, systemNameIn, isSystemDefaultEqual, tagsEqual, formatEqual, flavorParamsIdEqual, flavorParamsVersionEqual, flavorAssetIdEqual, flavorAssetVersionEqual) PROPERTY_LOADERS = { } def fromXml(self, node): KalturaFlavorParamsOutputFilter.fromXml(self, node) self.fromXmlImpl(node, KalturaDocumentFlavorParamsOutputBaseFilter.PROPERTY_LOADERS) def toParams(self): kparams = KalturaFlavorParamsOutputFilter.toParams(self) kparams.put("objectType", "KalturaDocumentFlavorParamsOutputBaseFilter") return kparams # @package Kaltura # @subpackage Client class KalturaImageFlavorParamsOutputBaseFilter(KalturaFlavorParamsOutputFilter): def __init__(self, orderBy=NotImplemented, advancedSearch=NotImplemented, systemNameEqual=NotImplemented, systemNameIn=NotImplemented, isSystemDefaultEqual=NotImplemented, tagsEqual=NotImplemented, formatEqual=NotImplemented, flavorParamsIdEqual=NotImplemented, flavorParamsVersionEqual=NotImplemented, flavorAssetIdEqual=NotImplemented, flavorAssetVersionEqual=NotImplemented): KalturaFlavorParamsOutputFilter.__init__(self, orderBy, advancedSearch, systemNameEqual, systemNameIn, isSystemDefaultEqual, tagsEqual, formatEqual, flavorParamsIdEqual, flavorParamsVersionEqual, flavorAssetIdEqual, flavorAssetVersionEqual) PROPERTY_LOADERS = { } def fromXml(self, node): KalturaFlavorParamsOutputFilter.fromXml(self, node) self.fromXmlImpl(node, KalturaImageFlavorParamsOutputBaseFilter.PROPERTY_LOADERS) def toParams(self): kparams = KalturaFlavorParamsOutputFilter.toParams(self) kparams.put("objectType", "KalturaImageFlavorParamsOutputBaseFilter") return kparams # @package Kaltura # @subpackage Client class KalturaPdfFlavorParamsOutputBaseFilter(KalturaFlavorParamsOutputFilter): def __init__(self, orderBy=NotImplemented, advancedSearch=NotImplemented, systemNameEqual=NotImplemented, systemNameIn=NotImplemented, isSystemDefaultEqual=NotImplemented, tagsEqual=NotImplemented, formatEqual=NotImplemented, flavorParamsIdEqual=NotImplemented, flavorParamsVersionEqual=NotImplemented, flavorAssetIdEqual=NotImplemented, flavorAssetVersionEqual=NotImplemented): KalturaFlavorParamsOutputFilter.__init__(self, orderBy, advancedSearch, systemNameEqual, systemNameIn, isSystemDefaultEqual, tagsEqual, formatEqual, flavorParamsIdEqual, flavorParamsVersionEqual, flavorAssetIdEqual, flavorAssetVersionEqual) PROPERTY_LOADERS = { } def fromXml(self, node): KalturaFlavorParamsOutputFilter.fromXml(self, node) self.fromXmlImpl(node, KalturaPdfFlavorParamsOutputBaseFilter.PROPERTY_LOADERS) def toParams(self): kparams = KalturaFlavorParamsOutputFilter.toParams(self) kparams.put("objectType", "KalturaPdfFlavorParamsOutputBaseFilter") return kparams # @package Kaltura # @subpackage Client class KalturaSwfFlavorParamsOutputBaseFilter(KalturaFlavorParamsOutputFilter): def __init__(self, orderBy=NotImplemented, advancedSearch=NotImplemented, systemNameEqual=NotImplemented, systemNameIn=NotImplemented, isSystemDefaultEqual=NotImplemented, tagsEqual=NotImplemented, formatEqual=NotImplemented, flavorParamsIdEqual=NotImplemented, flavorParamsVersionEqual=NotImplemented, flavorAssetIdEqual=NotImplemented, flavorAssetVersionEqual=NotImplemented): KalturaFlavorParamsOutputFilter.__init__(self, orderBy, advancedSearch, systemNameEqual, systemNameIn, isSystemDefaultEqual, tagsEqual, formatEqual, flavorParamsIdEqual, flavorParamsVersionEqual, flavorAssetIdEqual, flavorAssetVersionEqual) PROPERTY_LOADERS = { } def fromXml(self, node): KalturaFlavorParamsOutputFilter.fromXml(self, node) self.fromXmlImpl(node, KalturaSwfFlavorParamsOutputBaseFilter.PROPERTY_LOADERS) def toParams(self): kparams = KalturaFlavorParamsOutputFilter.toParams(self) kparams.put("objectType", "KalturaSwfFlavorParamsOutputBaseFilter") return kparams # @package Kaltura # @subpackage Client class KalturaDocumentFlavorParamsOutputFilter(KalturaDocumentFlavorParamsOutputBaseFilter): def __init__(self, orderBy=NotImplemented, advancedSearch=NotImplemented, systemNameEqual=NotImplemented, systemNameIn=NotImplemented, isSystemDefaultEqual=NotImplemented, tagsEqual=NotImplemented, formatEqual=NotImplemented, flavorParamsIdEqual=NotImplemented, flavorParamsVersionEqual=NotImplemented, flavorAssetIdEqual=NotImplemented, flavorAssetVersionEqual=NotImplemented): KalturaDocumentFlavorParamsOutputBaseFilter.__init__(self, orderBy, advancedSearch, systemNameEqual, systemNameIn, isSystemDefaultEqual, tagsEqual, formatEqual, flavorParamsIdEqual, flavorParamsVersionEqual, flavorAssetIdEqual, flavorAssetVersionEqual) PROPERTY_LOADERS = { } def fromXml(self, node): KalturaDocumentFlavorParamsOutputBaseFilter.fromXml(self, node) self.fromXmlImpl(node, KalturaDocumentFlavorParamsOutputFilter.PROPERTY_LOADERS) def toParams(self): kparams = KalturaDocumentFlavorParamsOutputBaseFilter.toParams(self) kparams.put("objectType", "KalturaDocumentFlavorParamsOutputFilter") return kparams # @package Kaltura # @subpackage Client class KalturaImageFlavorParamsOutputFilter(KalturaImageFlavorParamsOutputBaseFilter): def __init__(self, orderBy=NotImplemented, advancedSearch=NotImplemented, systemNameEqual=NotImplemented, systemNameIn=NotImplemented, isSystemDefaultEqual=NotImplemented, tagsEqual=NotImplemented, formatEqual=NotImplemented, flavorParamsIdEqual=NotImplemented, flavorParamsVersionEqual=NotImplemented, flavorAssetIdEqual=NotImplemented, flavorAssetVersionEqual=NotImplemented): KalturaImageFlavorParamsOutputBaseFilter.__init__(self, orderBy, advancedSearch, systemNameEqual, systemNameIn, isSystemDefaultEqual, tagsEqual, formatEqual, flavorParamsIdEqual, flavorParamsVersionEqual, flavorAssetIdEqual, flavorAssetVersionEqual) PROPERTY_LOADERS = { } def fromXml(self, node): KalturaImageFlavorParamsOutputBaseFilter.fromXml(self, node) self.fromXmlImpl(node, KalturaImageFlavorParamsOutputFilter.PROPERTY_LOADERS) def toParams(self): kparams = KalturaImageFlavorParamsOutputBaseFilter.toParams(self) kparams.put("objectType", "KalturaImageFlavorParamsOutputFilter") return kparams # @package Kaltura # @subpackage Client class KalturaPdfFlavorParamsOutputFilter(KalturaPdfFlavorParamsOutputBaseFilter): def __init__(self, orderBy=NotImplemented, advancedSearch=NotImplemented, systemNameEqual=NotImplemented, systemNameIn=NotImplemented, isSystemDefaultEqual=NotImplemented, tagsEqual=NotImplemented, formatEqual=NotImplemented, flavorParamsIdEqual=NotImplemented, flavorParamsVersionEqual=NotImplemented, flavorAssetIdEqual=NotImplemented, flavorAssetVersionEqual=NotImplemented): KalturaPdfFlavorParamsOutputBaseFilter.__init__(self, orderBy, advancedSearch, systemNameEqual, systemNameIn, isSystemDefaultEqual, tagsEqual, formatEqual, flavorParamsIdEqual, flavorParamsVersionEqual, flavorAssetIdEqual, flavorAssetVersionEqual) PROPERTY_LOADERS = { } def fromXml(self, node): KalturaPdfFlavorParamsOutputBaseFilter.fromXml(self, node) self.fromXmlImpl(node, KalturaPdfFlavorParamsOutputFilter.PROPERTY_LOADERS) def toParams(self): kparams = KalturaPdfFlavorParamsOutputBaseFilter.toParams(self) kparams.put("objectType", "KalturaPdfFlavorParamsOutputFilter") return kparams # @package Kaltura # @subpackage Client class KalturaSwfFlavorParamsOutputFilter(KalturaSwfFlavorParamsOutputBaseFilter): def __init__(self, orderBy=NotImplemented, advancedSearch=NotImplemented, systemNameEqual=NotImplemented, systemNameIn=NotImplemented, isSystemDefaultEqual=NotImplemented, tagsEqual=NotImplemented, formatEqual=NotImplemented, flavorParamsIdEqual=NotImplemented, flavorParamsVersionEqual=NotImplemented, flavorAssetIdEqual=NotImplemented, flavorAssetVersionEqual=NotImplemented): KalturaSwfFlavorParamsOutputBaseFilter.__init__(self, orderBy, advancedSearch, systemNameEqual, systemNameIn, isSystemDefaultEqual, tagsEqual, formatEqual, flavorParamsIdEqual, flavorParamsVersionEqual, flavorAssetIdEqual, flavorAssetVersionEqual) PROPERTY_LOADERS = { } def fromXml(self, node): KalturaSwfFlavorParamsOutputBaseFilter.fromXml(self, node) self.fromXmlImpl(node, KalturaSwfFlavorParamsOutputFilter.PROPERTY_LOADERS) def toParams(self): kparams = KalturaSwfFlavorParamsOutputBaseFilter.toParams(self) kparams.put("objectType", "KalturaSwfFlavorParamsOutputFilter") return kparams ########## services ########## # @package Kaltura # @subpackage Client class KalturaDocumentsService(KalturaServiceBase): """Document service lets you upload and manage document files""" def __init__(self, client = None): KalturaServiceBase.__init__(self, client) def addFromUploadedFile(self, documentEntry, uploadTokenId): """Add new document entry after the specific document file was uploaded and the upload token id exists""" kparams = KalturaParams() kparams.addObjectIfDefined("documentEntry", documentEntry) kparams.addStringIfDefined("uploadTokenId", uploadTokenId) self.client.queueServiceActionCall("document_documents", "addFromUploadedFile", KalturaDocumentEntry, kparams) if self.client.isMultiRequest(): return self.client.getMultiRequestResult() resultNode = self.client.doQueue() return KalturaObjectFactory.create(resultNode, KalturaDocumentEntry) def addFromEntry(self, sourceEntryId, documentEntry = NotImplemented, sourceFlavorParamsId = NotImplemented): """Copy entry into new entry""" kparams = KalturaParams() kparams.addStringIfDefined("sourceEntryId", sourceEntryId) kparams.addObjectIfDefined("documentEntry", documentEntry) kparams.addIntIfDefined("sourceFlavorParamsId", sourceFlavorParamsId); self.client.queueServiceActionCall("document_documents", "addFromEntry", KalturaDocumentEntry, kparams) if self.client.isMultiRequest(): return self.client.getMultiRequestResult() resultNode = self.client.doQueue() return KalturaObjectFactory.create(resultNode, KalturaDocumentEntry) def addFromFlavorAsset(self, sourceFlavorAssetId, documentEntry = NotImplemented): """Copy flavor asset into new entry""" kparams = KalturaParams() kparams.addStringIfDefined("sourceFlavorAssetId", sourceFlavorAssetId) kparams.addObjectIfDefined("documentEntry", documentEntry) self.client.queueServiceActionCall("document_documents", "addFromFlavorAsset", KalturaDocumentEntry, kparams) if self.client.isMultiRequest(): return self.client.getMultiRequestResult() resultNode = self.client.doQueue() return KalturaObjectFactory.create(resultNode, KalturaDocumentEntry) def convert(self, entryId, conversionProfileId = NotImplemented, dynamicConversionAttributes = NotImplemented): """Convert entry""" kparams = KalturaParams() kparams.addStringIfDefined("entryId", entryId) kparams.addIntIfDefined("conversionProfileId", conversionProfileId); kparams.addArrayIfDefined("dynamicConversionAttributes", dynamicConversionAttributes) self.client.queueServiceActionCall("document_documents", "convert", None, kparams) if self.client.isMultiRequest(): return self.client.getMultiRequestResult() resultNode = self.client.doQueue() return getXmlNodeInt(resultNode) def get(self, entryId, version = -1): """Get document entry by ID.""" kparams = KalturaParams() kparams.addStringIfDefined("entryId", entryId) kparams.addIntIfDefined("version", version); self.client.queueServiceActionCall("document_documents", "get", KalturaDocumentEntry, kparams) if self.client.isMultiRequest(): return self.client.getMultiRequestResult() resultNode = self.client.doQueue() return KalturaObjectFactory.create(resultNode, KalturaDocumentEntry) def update(self, entryId, documentEntry): """Update document entry. Only the properties that were set will be updated.""" kparams = KalturaParams() kparams.addStringIfDefined("entryId", entryId) kparams.addObjectIfDefined("documentEntry", documentEntry) self.client.queueServiceActionCall("document_documents", "update", KalturaDocumentEntry, kparams) if self.client.isMultiRequest(): return self.client.getMultiRequestResult() resultNode = self.client.doQueue() return KalturaObjectFactory.create(resultNode, KalturaDocumentEntry) def delete(self, entryId): """Delete a document entry.""" kparams = KalturaParams() kparams.addStringIfDefined("entryId", entryId) self.client.queueServiceActionCall("document_documents", "delete", None, kparams) if self.client.isMultiRequest(): return self.client.getMultiRequestResult() resultNode = self.client.doQueue() def list(self, filter = NotImplemented, pager = NotImplemented): """List document entries by filter with paging support.""" kparams = KalturaParams() kparams.addObjectIfDefined("filter", filter) kparams.addObjectIfDefined("pager", pager) self.client.queueServiceActionCall("document_documents", "list", KalturaDocumentListResponse, kparams) if self.client.isMultiRequest(): return self.client.getMultiRequestResult() resultNode = self.client.doQueue() return KalturaObjectFactory.create(resultNode, KalturaDocumentListResponse) def upload(self, fileData): """Upload a document file to Kaltura, then the file can be used to create a document entry.""" kparams = KalturaParams() kfiles = KalturaFiles() kfiles.put("fileData", fileData); self.client.queueServiceActionCall("document_documents", "upload", None, kparams, kfiles) if self.client.isMultiRequest(): return self.client.getMultiRequestResult() resultNode = self.client.doQueue() return getXmlNodeText(resultNode) def convertPptToSwf(self, entryId): """This will queue a batch job for converting the document file to swf Returns the URL where the new swf will be available""" kparams = KalturaParams() kparams.addStringIfDefined("entryId", entryId) self.client.queueServiceActionCall("document_documents", "convertPptToSwf", None, kparams) if self.client.isMultiRequest(): return self.client.getMultiRequestResult() resultNode = self.client.doQueue() return getXmlNodeText(resultNode) def serve(self, entryId, flavorAssetId = NotImplemented, forceProxy = False): """Serves the file content""" kparams = KalturaParams() kparams.addStringIfDefined("entryId", entryId) kparams.addStringIfDefined("flavorAssetId", flavorAssetId) kparams.addBoolIfDefined("forceProxy", forceProxy); self.client.queueServiceActionCall('document_documents', 'serve', None ,kparams) return self.client.getServeUrl() def serveByFlavorParamsId(self, entryId, flavorParamsId = NotImplemented, forceProxy = False): """Serves the file content""" kparams = KalturaParams() kparams.addStringIfDefined("entryId", entryId) kparams.addStringIfDefined("flavorParamsId", flavorParamsId) kparams.addBoolIfDefined("forceProxy", forceProxy); self.client.queueServiceActionCall('document_documents', 'serveByFlavorParamsId', None ,kparams) return self.client.getServeUrl() def updateContent(self, entryId, resource, conversionProfileId = NotImplemented): """Replace content associated with the given document entry.""" kparams = KalturaParams() kparams.addStringIfDefined("entryId", entryId) kparams.addObjectIfDefined("resource", resource) kparams.addIntIfDefined("conversionProfileId", conversionProfileId); self.client.queueServiceActionCall("document_documents", "updateContent", KalturaDocumentEntry, kparams) if self.client.isMultiRequest(): return self.client.getMultiRequestResult() resultNode = self.client.doQueue() return KalturaObjectFactory.create(resultNode, KalturaDocumentEntry) def approveReplace(self, entryId): """Approves document replacement""" kparams = KalturaParams() kparams.addStringIfDefined("entryId", entryId) self.client.queueServiceActionCall("document_documents", "approveReplace", KalturaDocumentEntry, kparams) if self.client.isMultiRequest(): return self.client.getMultiRequestResult() resultNode = self.client.doQueue() return KalturaObjectFactory.create(resultNode, KalturaDocumentEntry) def cancelReplace(self, entryId): """Cancels document replacement""" kparams = KalturaParams() kparams.addStringIfDefined("entryId", entryId) self.client.queueServiceActionCall("document_documents", "cancelReplace", KalturaDocumentEntry, kparams) if self.client.isMultiRequest(): return self.client.getMultiRequestResult() resultNode = self.client.doQueue() return KalturaObjectFactory.create(resultNode, KalturaDocumentEntry) ########## main ########## class KalturaDocumentClientPlugin(KalturaClientPlugin): # KalturaDocumentClientPlugin instance = None # @return KalturaDocumentClientPlugin @staticmethod def get(): if KalturaDocumentClientPlugin.instance == None: KalturaDocumentClientPlugin.instance = KalturaDocumentClientPlugin() return KalturaDocumentClientPlugin.instance # @return array<KalturaServiceBase> def getServices(self): return { 'documents': KalturaDocumentsService, } def getEnums(self): return { 'KalturaDocumentType': KalturaDocumentType, 'KalturaDocumentEntryOrderBy': KalturaDocumentEntryOrderBy, 'KalturaDocumentFlavorParamsOrderBy': KalturaDocumentFlavorParamsOrderBy, 'KalturaDocumentFlavorParamsOutputOrderBy': KalturaDocumentFlavorParamsOutputOrderBy, 'KalturaImageFlavorParamsOrderBy': KalturaImageFlavorParamsOrderBy, 'KalturaImageFlavorParamsOutputOrderBy': KalturaImageFlavorParamsOutputOrderBy, 'KalturaPdfFlavorParamsOrderBy': KalturaPdfFlavorParamsOrderBy, 'KalturaPdfFlavorParamsOutputOrderBy': KalturaPdfFlavorParamsOutputOrderBy, 'KalturaSwfFlavorParamsOrderBy': KalturaSwfFlavorParamsOrderBy, 'KalturaSwfFlavorParamsOutputOrderBy': KalturaSwfFlavorParamsOutputOrderBy, } def getTypes(self): return { 'KalturaDocumentEntry': KalturaDocumentEntry, 'KalturaDocumentListResponse': KalturaDocumentListResponse, 'KalturaDocumentFlavorParams': KalturaDocumentFlavorParams, 'KalturaImageFlavorParams': KalturaImageFlavorParams, 'KalturaPdfFlavorParams': KalturaPdfFlavorParams, 'KalturaSwfFlavorParams': KalturaSwfFlavorParams, 'KalturaDocumentFlavorParamsOutput': KalturaDocumentFlavorParamsOutput, 'KalturaImageFlavorParamsOutput': KalturaImageFlavorParamsOutput, 'KalturaPdfFlavorParamsOutput': KalturaPdfFlavorParamsOutput, 'KalturaSwfFlavorParamsOutput': KalturaSwfFlavorParamsOutput, 'KalturaDocumentEntryBaseFilter': KalturaDocumentEntryBaseFilter, 'KalturaDocumentEntryFilter': KalturaDocumentEntryFilter, 'KalturaDocumentFlavorParamsBaseFilter': KalturaDocumentFlavorParamsBaseFilter, 'KalturaImageFlavorParamsBaseFilter': KalturaImageFlavorParamsBaseFilter, 'KalturaPdfFlavorParamsBaseFilter': KalturaPdfFlavorParamsBaseFilter, 'KalturaSwfFlavorParamsBaseFilter': KalturaSwfFlavorParamsBaseFilter, 'KalturaDocumentFlavorParamsFilter': KalturaDocumentFlavorParamsFilter, 'KalturaImageFlavorParamsFilter': KalturaImageFlavorParamsFilter, 'KalturaPdfFlavorParamsFilter': KalturaPdfFlavorParamsFilter, 'KalturaSwfFlavorParamsFilter': KalturaSwfFlavorParamsFilter, 'KalturaDocumentFlavorParamsOutputBaseFilter': KalturaDocumentFlavorParamsOutputBaseFilter, 'KalturaImageFlavorParamsOutputBaseFilter': KalturaImageFlavorParamsOutputBaseFilter, 'KalturaPdfFlavorParamsOutputBaseFilter': KalturaPdfFlavorParamsOutputBaseFilter, 'KalturaSwfFlavorParamsOutputBaseFilter': KalturaSwfFlavorParamsOutputBaseFilter, 'KalturaDocumentFlavorParamsOutputFilter': KalturaDocumentFlavorParamsOutputFilter, 'KalturaImageFlavorParamsOutputFilter': KalturaImageFlavorParamsOutputFilter, 'KalturaPdfFlavorParamsOutputFilter': KalturaPdfFlavorParamsOutputFilter, 'KalturaSwfFlavorParamsOutputFilter': KalturaSwfFlavorParamsOutputFilter, } # @return string def getName(self): return 'document'
35.042238
118
0.648264
5,377
97,067
11.625442
0.09987
0.008703
0.013438
0.019197
0.773668
0.753623
0.738474
0.720429
0.715054
0.708911
0
0.001047
0.291747
97,067
2,769
119
35.054893
0.908216
0.041446
0
0.820747
0
0
0.041894
0.023045
0
0
0
0
0
1
0.073351
false
0.006944
0.000868
0.016059
0.155382
0
0
0
1
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
e55af26e82b59e747ae03e61b0c89b977986ab28
1,563
py
Python
octicons16px/squirrel.py
andrewp-as-is/octicons16px.py
1272dc9f290619d83bd881e87dbd723b0c48844c
[ "Unlicense" ]
1
2021-01-28T06:47:39.000Z
2021-01-28T06:47:39.000Z
octicons16px/squirrel.py
andrewp-as-is/octicons16px.py
1272dc9f290619d83bd881e87dbd723b0c48844c
[ "Unlicense" ]
null
null
null
octicons16px/squirrel.py
andrewp-as-is/octicons16px.py
1272dc9f290619d83bd881e87dbd723b0c48844c
[ "Unlicense" ]
null
null
null
OCTICON_SQUIRREL = """ <svg class="octicon octicon-squirrel" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16" width="16" height="16"><path fill-rule="evenodd" d="M3.499.75a.75.75 0 011.5 0v.996C5.9 2.903 6.793 3.65 7.662 4.376l.24.202c-.036-.694.055-1.422.426-2.163C9.1.873 10.794-.045 12.622.26 14.408.558 16 1.94 16 4.25c0 1.278-.954 2.575-2.44 2.734l.146.508.065.22c.203.701.412 1.455.476 2.226.142 1.707-.4 3.03-1.487 3.898C11.714 14.671 10.27 15 8.75 15h-6a.75.75 0 010-1.5h1.376a4.489 4.489 0 01-.563-1.191 3.833 3.833 0 01-.05-2.063 4.636 4.636 0 01-2.025-.293.75.75 0 11.525-1.406c1.357.507 2.376-.006 2.698-.318l.009-.01a.748.748 0 011.06 0 .75.75 0 01-.012 1.074c-.912.92-.992 1.835-.768 2.586.221.74.745 1.337 1.196 1.621H8.75c1.343 0 2.398-.296 3.074-.836.635-.507 1.036-1.31.928-2.602-.05-.603-.216-1.224-.422-1.93l-.064-.221c-.12-.407-.246-.84-.353-1.29a2.404 2.404 0 01-.507-.441 3.063 3.063 0 01-.633-1.248.75.75 0 011.455-.364c.046.185.144.436.31.627.146.168.353.305.712.305.738 0 1.25-.615 1.25-1.25 0-1.47-.95-2.315-2.123-2.51-1.172-.196-2.227.387-2.706 1.345-.46.92-.27 1.774.019 3.062l.042.19a.753.753 0 01.01.05c.348.443.666.949.94 1.553a.75.75 0 11-1.365.62c-.553-1.217-1.32-1.94-2.3-2.768a85.08 85.08 0 00-.317-.265c-.814-.68-1.75-1.462-2.692-2.619a3.74 3.74 0 00-1.023.88c-.406.495-.663 1.036-.722 1.508.116.122.306.21.591.239.388.038.797-.06 1.032-.19a.75.75 0 01.728 1.31c-.515.287-1.23.439-1.906.373-.682-.067-1.473-.38-1.879-1.193L.75 5.677V5.5c0-.984.48-1.94 1.077-2.664.46-.559 1.05-1.055 1.673-1.353V.75z"></path></svg> """
312.6
1,534
0.667946
431
1,563
2.419954
0.563805
0.023011
0.033557
0.01534
0
0
0
0
0
0
0
0.616713
0.073576
1,563
4
1,535
390.75
0.103591
0
0
0
0
0.333333
0.983355
0.443662
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
1
1
1
0
0
0
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
e5b32bf0d7623cb45f039f30ab9cbd36eedfa237
28,065
py
Python
leo/modes/ada95.py
ATikhonov2/leo-editor
225aac990a9b2804aaa9dea29574d6e072e30474
[ "MIT" ]
1,550
2015-01-14T16:30:37.000Z
2022-03-31T08:55:58.000Z
leo/modes/ada95.py
ATikhonov2/leo-editor
225aac990a9b2804aaa9dea29574d6e072e30474
[ "MIT" ]
2,009
2015-01-13T16:28:52.000Z
2022-03-31T18:21:48.000Z
leo/modes/ada95.py
ATikhonov2/leo-editor
225aac990a9b2804aaa9dea29574d6e072e30474
[ "MIT" ]
200
2015-01-05T15:07:41.000Z
2022-03-07T17:05:01.000Z
# Leo colorizer control file for ada95 mode. # This file is in the public domain. # Properties for ada95 mode. properties = { "lineComment": "--", } # Attributes dict for ada95_main ruleset. ada95_main_attributes_dict = { "default": "null", "digit_re": "", "escape": "", "highlight_digits": "true", "ignore_case": "true", "no_word_sep": "", } # Dictionary of attributes dictionaries for ada95 mode. attributesDictDict = { "ada95_main": ada95_main_attributes_dict, } # Keywords dict for ada95_main ruleset. ada95_main_keywords_dict = { "abort": "keyword2", "abs": "keyword2", "abstract": "keyword2", "accept": "keyword2", "access": "keyword2", "address": "literal2", "aliased": "keyword2", "all": "keyword2", "and": "keyword2", "array": "keyword2", "at": "keyword2", "begin": "keyword2", "body": "keyword2", "boolean": "literal2", "case": "keyword2", "character": "literal2", "constant": "keyword2", "declare": "keyword2", "delay": "keyword2", "delta": "keyword2", "digits": "keyword2", "do": "keyword2", "duration": "literal2", "else": "keyword2", "elsif": "keyword2", "end": "keyword2", "entry": "keyword1", "exception": "keyword2", "exit": "keyword2", "false": "literal1", "float": "literal2", "for": "keyword2", "function": "keyword1", "goto": "keyword2", "if": "keyword2", "in": "keyword2", "integer": "literal2", "is": "keyword2", "latin_1": "literal2", "limited": "keyword2", "loop": "keyword2", "mod": "keyword2", "natural": "literal2", "new": "keyword2", "not": "keyword2", "null": "literal1", "or": "keyword2", "others": "keyword2", "out": "keyword2", "package": "keyword2", "positive": "literal2", "pragma": "keyword2", "private": "keyword2", "procedure": "keyword1", "protected": "keyword2", "raise": "keyword2", "range": "keyword2", "record": "keyword2", "rem": "keyword2", "renames": "keyword2", "requeue": "keyword2", "return": "keyword2", "select": "keyword2", "separate": "keyword2", "string": "literal2", "subtype": "keyword2", "tagged": "keyword2", "task": "keyword2", "terminate": "keyword2", "then": "keyword2", "time": "literal2", "true": "literal1", "type": "keyword2", "until": "keyword2", "use": "keyword2", "when": "keyword2", "while": "keyword2", "with": "keyword2", "xor": "keyword2", } # Dictionary of keywords dictionaries for ada95 mode. keywordsDictDict = { "ada95_main": ada95_main_keywords_dict, } # Rules for ada95_main ruleset. def ada95_rule0(colorer, s, i): return colorer.match_eol_span(s, i, kind="comment1", seq="--", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="", exclude_match=False) def ada95_rule1(colorer, s, i): return colorer.match_span(s, i, kind="literal1", begin="\"", end="\"", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="",exclude_match=False, no_escape=False, no_line_break=True, no_word_break=False) def ada95_rule2(colorer, s, i): return colorer.match_seq(s, i, kind="operator", seq=")", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule3(colorer, s, i): return colorer.match_seq(s, i, kind="operator", seq="(", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule4(colorer, s, i): return colorer.match_seq(s, i, kind="operator", seq="..", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule5(colorer, s, i): return colorer.match_seq(s, i, kind="operator", seq=".all", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule6(colorer, s, i): return colorer.match_seq(s, i, kind="operator", seq=":=", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule7(colorer, s, i): return colorer.match_seq(s, i, kind="operator", seq="/=", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule8(colorer, s, i): return colorer.match_seq(s, i, kind="operator", seq="=>", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule9(colorer, s, i): return colorer.match_seq(s, i, kind="operator", seq="=", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule10(colorer, s, i): return colorer.match_seq(s, i, kind="null", seq="<>", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule11(colorer, s, i): return colorer.match_seq(s, i, kind="label", seq="<<", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule12(colorer, s, i): return colorer.match_seq(s, i, kind="label", seq=">>", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule13(colorer, s, i): return colorer.match_seq(s, i, kind="operator", seq=">=", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule14(colorer, s, i): return colorer.match_seq(s, i, kind="operator", seq="<=", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule15(colorer, s, i): return colorer.match_seq(s, i, kind="operator", seq=">", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule16(colorer, s, i): return colorer.match_seq(s, i, kind="operator", seq="<", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule17(colorer, s, i): return colorer.match_seq(s, i, kind="operator", seq="&", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule18(colorer, s, i): return colorer.match_seq(s, i, kind="operator", seq="+", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule19(colorer, s, i): return colorer.match_seq(s, i, kind="operator", seq="-", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule20(colorer, s, i): return colorer.match_seq(s, i, kind="operator", seq="/", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule21(colorer, s, i): return colorer.match_seq(s, i, kind="operator", seq="**", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule22(colorer, s, i): return colorer.match_seq(s, i, kind="operator", seq="*", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule23(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'access", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule24(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'address", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule25(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'adjacent", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule26(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'aft", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule27(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'alignment", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule28(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'base", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule29(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'bit_order", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule30(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'body_version", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule31(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'callable", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule32(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'caller", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule33(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'ceiling", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule34(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'class", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule35(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'component_size", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule36(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'composed", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule37(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'constrained", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule38(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'copy_size", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule39(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'count", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule40(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'definite", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule41(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'delta", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule42(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'denorm", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule43(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'digits", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule44(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'exponent", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule45(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'external_tag", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule46(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'first", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule47(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'first_bit", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule48(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'floor", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule49(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'fore", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule50(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'fraction", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule51(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'genetic", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule52(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'identity", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule53(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'image", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule54(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'input", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule55(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'last", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule56(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'last_bit", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule57(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'leading_part", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule58(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'length", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule59(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'machine", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule60(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'machine_emax", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule61(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'machine_emin", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule62(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'machine_mantissa", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule63(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'machine_overflows", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule64(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'machine_radix", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule65(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'machine_rounds", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule66(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'max", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule67(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'max_size_in_storage_elements", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule68(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'min", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule69(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'model", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule70(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'model_emin", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule71(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'model_epsilon", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule72(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'model_mantissa", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule73(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'model_small", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule74(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'modulus", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule75(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'output", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule76(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'partition_id", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule77(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'pos", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule78(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'position", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule79(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'pred", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule80(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'range", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule81(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'read", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule82(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'remainder", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule83(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'round", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule84(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'rounding", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule85(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'safe_first", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule86(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'safe_last", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule87(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'scale", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule88(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'scaling", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule89(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'signed_zeros", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule90(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'size", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule91(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'small", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule92(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'storage_pool", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule93(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'storage_size", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule94(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'succ", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule95(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'tag", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule96(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'terminated", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule97(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'truncation", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule98(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'unbiased_rounding", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule99(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'unchecked_access", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule100(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'val", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule101(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'valid", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule102(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'value", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule103(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'version", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule104(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'wide_image", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule105(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'wide_value", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule106(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'wide_width", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule107(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'width", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule108(colorer, s, i): return colorer.match_seq(s, i, kind="keyword3", seq="'write", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="") def ada95_rule109(colorer, s, i): return colorer.match_span(s, i, kind="literal1", begin="'", end="'", at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="",exclude_match=False, no_escape=False, no_line_break=True, no_word_break=False) def ada95_rule110(colorer, s, i): return colorer.match_keywords(s, i) # Rules dict for ada95_main ruleset. rulesDict1 = { "\"": [ada95_rule1,], "&": [ada95_rule17,], "'": [ada95_rule23,ada95_rule24,ada95_rule25,ada95_rule26,ada95_rule27,ada95_rule28,ada95_rule29,ada95_rule30,ada95_rule31,ada95_rule32,ada95_rule33,ada95_rule34,ada95_rule35,ada95_rule36,ada95_rule37,ada95_rule38,ada95_rule39,ada95_rule40,ada95_rule41,ada95_rule42,ada95_rule43,ada95_rule44,ada95_rule45,ada95_rule46,ada95_rule47,ada95_rule48,ada95_rule49,ada95_rule50,ada95_rule51,ada95_rule52,ada95_rule53,ada95_rule54,ada95_rule55,ada95_rule56,ada95_rule57,ada95_rule58,ada95_rule59,ada95_rule60,ada95_rule61,ada95_rule62,ada95_rule63,ada95_rule64,ada95_rule65,ada95_rule66,ada95_rule67,ada95_rule68,ada95_rule69,ada95_rule70,ada95_rule71,ada95_rule72,ada95_rule73,ada95_rule74,ada95_rule75,ada95_rule76,ada95_rule77,ada95_rule78,ada95_rule79,ada95_rule80,ada95_rule81,ada95_rule82,ada95_rule83,ada95_rule84,ada95_rule85,ada95_rule86,ada95_rule87,ada95_rule88,ada95_rule89,ada95_rule90,ada95_rule91,ada95_rule92,ada95_rule93,ada95_rule94,ada95_rule95,ada95_rule96,ada95_rule97,ada95_rule98,ada95_rule99,ada95_rule100,ada95_rule101,ada95_rule102,ada95_rule103,ada95_rule104,ada95_rule105,ada95_rule106,ada95_rule107,ada95_rule108,ada95_rule109,], "(": [ada95_rule3,], ")": [ada95_rule2,], "*": [ada95_rule21,ada95_rule22,], "+": [ada95_rule18,], "-": [ada95_rule0,ada95_rule19,], ".": [ada95_rule4,ada95_rule5,], "/": [ada95_rule7,ada95_rule20,], "0": [ada95_rule110,], "1": [ada95_rule110,], "2": [ada95_rule110,], "3": [ada95_rule110,], "4": [ada95_rule110,], "5": [ada95_rule110,], "6": [ada95_rule110,], "7": [ada95_rule110,], "8": [ada95_rule110,], "9": [ada95_rule110,], ":": [ada95_rule6,], "<": [ada95_rule10,ada95_rule11,ada95_rule14,ada95_rule16,], "=": [ada95_rule8,ada95_rule9,], ">": [ada95_rule12,ada95_rule13,ada95_rule15,], "@": [ada95_rule110,], "A": [ada95_rule110,], "B": [ada95_rule110,], "C": [ada95_rule110,], "D": [ada95_rule110,], "E": [ada95_rule110,], "F": [ada95_rule110,], "G": [ada95_rule110,], "H": [ada95_rule110,], "I": [ada95_rule110,], "J": [ada95_rule110,], "K": [ada95_rule110,], "L": [ada95_rule110,], "M": [ada95_rule110,], "N": [ada95_rule110,], "O": [ada95_rule110,], "P": [ada95_rule110,], "Q": [ada95_rule110,], "R": [ada95_rule110,], "S": [ada95_rule110,], "T": [ada95_rule110,], "U": [ada95_rule110,], "V": [ada95_rule110,], "W": [ada95_rule110,], "X": [ada95_rule110,], "Y": [ada95_rule110,], "Z": [ada95_rule110,], "_": [ada95_rule110,], "a": [ada95_rule110,], "b": [ada95_rule110,], "c": [ada95_rule110,], "d": [ada95_rule110,], "e": [ada95_rule110,], "f": [ada95_rule110,], "g": [ada95_rule110,], "h": [ada95_rule110,], "i": [ada95_rule110,], "j": [ada95_rule110,], "k": [ada95_rule110,], "l": [ada95_rule110,], "m": [ada95_rule110,], "n": [ada95_rule110,], "o": [ada95_rule110,], "p": [ada95_rule110,], "q": [ada95_rule110,], "r": [ada95_rule110,], "s": [ada95_rule110,], "t": [ada95_rule110,], "u": [ada95_rule110,], "v": [ada95_rule110,], "w": [ada95_rule110,], "x": [ada95_rule110,], "y": [ada95_rule110,], "z": [ada95_rule110,], } # x.rulesDictDict for ada95 mode. rulesDictDict = { "ada95_main": rulesDict1, } # Import dict for ada95 mode. importDict = {}
43.044479
1,154
0.677499
3,924
28,065
4.557849
0.091488
0.024825
0.055857
0.093095
0.774448
0.773162
0.770143
0.766564
0.766564
0.766564
0
0.060738
0.16697
28,065
651
1,155
43.110599
0.704265
0.014716
0
0.219653
0
0.001927
0.109123
0.001075
0
0
0
0
0
1
0.213873
false
0
0.001927
0.213873
0.429672
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
8
005fa77818fe9dc071b695c98d74d16966a07833
3,811
py
Python
BluePlug/Init.py
liufeng3486/BluePlug
c7c5c769ed35c71ebc542d34848d6bf309abd051
[ "MIT" ]
1
2019-01-27T04:08:05.000Z
2019-01-27T04:08:05.000Z
BluePlug/Init.py
liufeng3486/BluePlug
c7c5c769ed35c71ebc542d34848d6bf309abd051
[ "MIT" ]
5
2021-03-18T21:35:20.000Z
2022-01-13T00:58:18.000Z
BluePlug/Init.py
liufeng3486/BluePlug
c7c5c769ed35c71ebc542d34848d6bf309abd051
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- import os,time from BluePlug.Base import * #启动模拟器以及APP class Init(object): def __init__(self,index,app=""): self.index = index def start_mock(self): string = ".\dnplayer2\dnconsole.exe launch --index %s" % str(index) os.popen(string) def start_app(self, channel=False): # self.index = index try: prt("尝试启动APP", channel) string = '.\dnplayer2\dnconsole.exe adb --index %s --command "shell am start -n com.tencent.xyj/.MGameActivity"' % str( index) result = os.popen(string) res = result.read() for line in res.splitlines(): if "found" in line: return False if "Starting" in line: return True except: return False def init_start(self, channel=False): # self.index = index start_mock(index) prt("模拟器启动", channel) for i in range(10): time.sleep(5) if start_app(index): i = 10 prt("APP启动", channel) return "Init_check" else: prt("模拟器启动中,APP启动延时等待", channel) prt("模拟器启动超时", channel) return False def start_check(self, channel=False): # self.index = index try: for i in range(20): time.sleep(1) if (three_point_check([36, 466], [255, 247, 186], [47, 465], [255, 247, 186], [50, 459], [195, 157, 121], index=index)): prt("APP启动成功", channel) return "Login" else: prt("APP启动中", channel) prt("APP启动超时", channel) return "Init.start_check Error" except Exception as ex: prt("start_check false", str(ex), channel) return "Init.start_check Error" def start_mock(index): string = ".\dnplayer2\dnconsole.exe launch --index %s"%str(index) os.popen(string) def start_app(index,channel=False): try: prt("尝试启动APP",channel) string = '.\dnplayer2\dnconsole.exe adb --index %s --command "shell am start -n com.tencent.xyj/.MGameActivity"' % str( index) result = os.popen(string) res = result.read() for line in res.splitlines(): if "found" in line: return False if "Starting" in line: return True except: return False def init_start(index,channel=False): start_mock(index) prt("模拟器启动",channel) for i in range(10): time.sleep(5) if start_app(index): i=10 prt("APP启动",channel) return "Init_check" else: prt("模拟器启动中,APP启动延时等待",channel) prt("模拟器启动超时",channel) return False def start_check(index,channel=False): try: for i in range(20): time.sleep(1) if (three_point_check([36,466],[255, 247, 186], [47,465],[255, 247, 186], [50,459],[195, 157, 121],index=index)): prt("APP启动成功",channel) return "Login" else: prt("APP启动中",channel) prt("APP启动超时",channel) return "Init.start_check Error" except Exception as ex: prt("start_check false",str(ex),channel) return "Init.start_check Error" if __name__ == '__main__': cus_state = 0 index = 0 # global cus_state if cus_state == 0: if(init_start(index)): if(start_check(index)): cus_state = 1 if cus_state == 1: pass
30.488
132
0.505642
430
3,811
4.381395
0.234884
0.069002
0.05414
0.057325
0.836518
0.836518
0.820594
0.788747
0.788747
0.788747
0
0.046374
0.377591
3,811
124
133
30.733871
0.747892
0.033062
0
0.792453
0
0.018868
0.162045
0.044046
0
0
0
0
0
1
0.084906
false
0.009434
0.018868
0
0.264151
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
00ca7d0acbc38684cbb3a52412ec3f471e13c91b
16,387
py
Python
auxiliary/auxiliary_tools.py
aysuavci/ose-data-science-course-project-aysuavci
33ea374588ba22d6328fec84c78e831ba2eb88cf
[ "MIT" ]
null
null
null
auxiliary/auxiliary_tools.py
aysuavci/ose-data-science-course-project-aysuavci
33ea374588ba22d6328fec84c78e831ba2eb88cf
[ "MIT" ]
null
null
null
auxiliary/auxiliary_tools.py
aysuavci/ose-data-science-course-project-aysuavci
33ea374588ba22d6328fec84c78e831ba2eb88cf
[ "MIT" ]
null
null
null
"""This module contains auxiliary functions for RD predictions used in the main notebook.""" import numpy as np import pandas as pd import pandas.io.formats.style import seaborn as sns import statsmodels as sm import statsmodels.formula.api as smf import statsmodels.api as sm_api import matplotlib as pl import matplotlib.pyplot as plt from IPython.display import HTML from stargazer.stargazer import Stargazer, LineLocation from statsmodels.iolib.summary2 import summary_col from auxiliary.auxiliary_tools import * from auxiliary.auxiliary_plots import * from auxiliary.auxiliary_tables import * def get_Bayesian_predictions(df): df["post_1"] = df["prior_1"]/100 df.loc[df['pos_comparisons'] == 0, 'post_1'] = 0 df.loc[df['pos_comparisons'] == 1, 'post_1'] = 0 df.loc[df['pos_comparisons'] == 2, 'post_1'] = 0 df.loc[df['pos_comparisons'] == 3, 'post_1'] = df["prior_1"]/(df["prior_1"] + df["prior_2"]*(8/9)*(8/9)*(8/9) + df["prior_3"]*(7/9)*(7/9)*(7/9) + df["prior_4"]*(6/9)*(6/9)*(6/9) + df["prior_5"]*(5/9)*(5/9)*(5/9) + df["prior_6"]*(4/9)*(4/9)*(4/9) + df["prior_7"]*(3/9)*(3/9)*(3/9) + df["prior_8"]*(2/9)*(2/9)*(2/9) + df["prior_9"]*(1/9)*(1/9)*(1/9)) df["post_2"] = df["prior_2"]/100 df.loc[df['pos_comparisons'] == 0, 'post_2'] = (df["prior_2"]*(1/9)*(1/9)*(1/9))/(df["prior_2"]*(1/9)*(1/9)*(1/9) + df["prior_3"]*(2/9)*(2/9)*(2/9) + df["prior_4"]*(3/9)*(3/9)*(3/9) + df["prior_5"]*(4/9)*(4/9)*(4/9) + df["prior_6"]*(5/9)*(5/9)*(5/9) + df["prior_7"]*(6/9)*(6/9)*(6/9) + df["prior_8"]*(7/9)*(7/9)*(7/9) + df["prior_9"]*(8/9)*(8/9)*(8/9)+ df["prior_10"]) df.loc[df['pos_comparisons'] == 1, 'post_2'] = (df["prior_2"]*(24/9)*(1/9)*(1/9))/(df["prior_2"]*(24/9)*(1/9)*(1/9) + df["prior_3"]*(21/9)*(2/9)*(2/9) + df["prior_4"]*(18/9)*(3/9)*(3/9) + df["prior_5"]*(15/9)*(4/9)*(4/9) + df["prior_6"]*(12/9)*(5/9)*(5/9) + df["prior_7"]*(9/9)*(6/9)*(6/9) + df["prior_8"]*(6/9)*(7/9)*(7/9) + df["prior_9"]*(3/9)*(8/9)*(8/9)) df.loc[df['pos_comparisons'] == 2, 'post_2'] = (df["prior_2"]*(24/9)*(8/9)*(1/9))/(df["prior_2"]*(24/9)*(8/9)*(1/9) + df["prior_3"]*(21/9)*(7/9)*(2/9) + df["prior_4"]*(18/9)*(6/9)*(3/9) + df["prior_5"]*(15/9)*(5/9)*(4/9) + df["prior_6"]*(12/9)*(4/9)*(5/9) + df["prior_7"]*(9/9)*(3/9)*(6/9) + df["prior_8"]*(6/9)*(2/9)*(7/9) + df["prior_9"]*(3/9)*(1/9)*(8/9)) df.loc[df['pos_comparisons'] == 3, 'post_2'] = (df["prior_2"]*(8/9)*(8/9)*(8/9))/(df["prior_1"] + df["prior_2"]*(8/9)*(8/9)*(8/9) + df["prior_3"]*(7/9)*(7/9)*(7/9) + df["prior_4"]*(6/9)*(6/9)*(6/9) + df["prior_5"]*(5/9)*(5/9)*(5/9) + df["prior_6"]*(4/9)*(4/9)*(4/9) + df["prior_7"]*(3/9)*(3/9)*(3/9) + df["prior_8"]*(2/9)*(2/9)*(2/9) + df["prior_9"]*(1/9)*(1/9)*(1/9)) df["post_3"] = df["prior_3"]/100 df.loc[df['pos_comparisons'] == 0, 'post_3'] = (df["prior_3"]*(2/9)*(2/9)*(2/9))/(df["prior_2"]*(1/9)*(1/9)*(1/9) + df["prior_3"]*(2/9)*(2/9)*(2/9) + df["prior_4"]*(3/9)*(3/9)*(3/9) + df["prior_5"]*(4/9)*(4/9)*(4/9) + df["prior_6"]*(5/9)*(5/9)*(5/9) + df["prior_7"]*(6/9)*(6/9)*(6/9) + df["prior_8"]*(7/9)*(7/9)*(7/9) + df["prior_9"]*(8/9)*(8/9)*(8/9)+ df["prior_10"]) df.loc[df['pos_comparisons'] == 1, 'post_3'] = (df["prior_3"]*(21/9)*(2/9)*(2/9))/(df["prior_2"]*(24/9)*(1/9)*(1/9) + df["prior_3"]*(21/9)*(2/9)*(2/9) + df["prior_4"]*(18/9)*(3/9)*(3/9) + df["prior_5"]*(15/9)*(4/9)*(4/9) + df["prior_6"]*(12/9)*(5/9)*(5/9) + df["prior_7"]*(9/9)*(6/9)*(6/9) + df["prior_8"]*(6/9)*(7/9)*(7/9) + df["prior_9"]*(3/9)*(8/9)*(8/9)) df.loc[df['pos_comparisons'] == 2, 'post_3'] = (df["prior_3"]*(21/9)*(7/9)*(2/9))/(df["prior_2"]*(24/9)*(8/9)*(1/9) + df["prior_3"]*(21/9)*(7/9)*(2/9) + df["prior_4"]*(18/9)*(6/9)*(3/9) + df["prior_5"]*(15/9)*(5/9)*(4/9) + df["prior_6"]*(12/9)*(4/9)*(5/9) + df["prior_7"]*(9/9)*(3/9)*(6/9) + df["prior_8"]*(6/9)*(2/9)*(7/9) + df["prior_9"]*(3/9)*(1/9)*(8/9)) df.loc[df['pos_comparisons'] == 3, 'post_3'] = (df["prior_3"]*(7/9)*(7/9)*(7/9))/(df["prior_1"] + df["prior_2"]*(8/9)*(8/9)*(8/9) + df["prior_3"]*(7/9)*(7/9)*(7/9) + df["prior_4"]*(6/9)*(6/9)*(6/9) + df["prior_5"]*(5/9)*(5/9)*(5/9) + df["prior_6"]*(4/9)*(4/9)*(4/9) + df["prior_7"]*(3/9)*(3/9)*(3/9) + df["prior_8"]*(2/9)*(2/9)*(2/9) + df["prior_9"]*(1/9)*(1/9)*(1/9)) df["post_4"] = df["prior_4"]/100 df.loc[df['pos_comparisons'] == 0, 'post_4'] = (df["prior_4"]*(3/9)*(3/9)*(3/9))/(df["prior_2"]*(1/9)*(1/9)*(1/9) + df["prior_3"]*(2/9)*(2/9)*(2/9) + df["prior_4"]*(3/9)*(3/9)*(3/9) + df["prior_5"]*(4/9)*(4/9)*(4/9) + df["prior_6"]*(5/9)*(5/9)*(5/9) + df["prior_7"]*(6/9)*(6/9)*(6/9) + df["prior_8"]*(7/9)*(7/9)*(7/9) + df["prior_9"]*(8/9)*(8/9)*(8/9)+ df["prior_10"]) df.loc[df['pos_comparisons'] == 1, 'post_4'] = (df["prior_4"]*(18/9)*(3/9)*(3/9))/(df["prior_2"]*(24/9)*(1/9)*(1/9) + df["prior_3"]*(21/9)*(2/9)*(2/9) + df["prior_4"]*(18/9)*(3/9)*(3/9) + df["prior_5"]*(15/9)*(4/9)*(4/9) + df["prior_6"]*(12/9)*(5/9)*(5/9) + df["prior_7"]*(9/9)*(6/9)*(6/9) + df["prior_8"]*(6/9)*(7/9)*(7/9) + df["prior_9"]*(3/9)*(8/9)*(8/9)) df.loc[df['pos_comparisons'] == 2, 'post_4'] = (df["prior_4"]*(18/9)*(6/9)*(3/9))/(df["prior_2"]*(24/9)*(8/9)*(1/9) + df["prior_3"]*(21/9)*(7/9)*(2/9) + df["prior_4"]*(18/9)*(6/9)*(3/9) + df["prior_5"]*(15/9)*(5/9)*(4/9) + df["prior_6"]*(12/9)*(4/9)*(5/9) + df["prior_7"]*(9/9)*(3/9)*(6/9) + df["prior_8"]*(6/9)*(2/9)*(7/9) + df["prior_9"]*(3/9)*(1/9)*(8/9)) df.loc[df['pos_comparisons'] == 3, 'post_4'] = (df["prior_4"]*(6/9)*(6/9)*(6/9))/(df["prior_1"] + df["prior_2"]*(8/9)*(8/9)*(8/9) + df["prior_3"]*(7/9)*(7/9)*(7/9) + df["prior_4"]*(6/9)*(6/9)*(6/9) + df["prior_5"]*(5/9)*(5/9)*(5/9) + df["prior_6"]*(4/9)*(4/9)*(4/9) + df["prior_7"]*(3/9)*(3/9)*(3/9) + df["prior_8"]*(2/9)*(2/9)*(2/9) + df["prior_9"]*(1/9)*(1/9)*(1/9)) df["post_5"] = df["prior_5"]/100 df.loc[df['pos_comparisons'] == 0, 'post_5'] = (df["prior_5"]*(4/9)*(4/9)*(4/9))/(df["prior_2"]*(1/9)*(1/9)*(1/9) + df["prior_3"]*(2/9)*(2/9)*(2/9) + df["prior_4"]*(3/9)*(3/9)*(3/9) + df["prior_5"]*(4/9)*(4/9)*(4/9) + df["prior_6"]*(5/9)*(5/9)*(5/9) + df["prior_7"]*(6/9)*(6/9)*(6/9) + df["prior_8"]*(7/9)*(7/9)*(7/9) + df["prior_9"]*(8/9)*(8/9)*(8/9)+ df["prior_10"]) df.loc[df['pos_comparisons'] == 1, 'post_5'] = (df["prior_5"]*(15/9)*(4/9)*(4/9))/(df["prior_2"]*(24/9)*(1/9)*(1/9) + df["prior_3"]*(21/9)*(2/9)*(2/9) + df["prior_4"]*(18/9)*(3/9)*(3/9) + df["prior_5"]*(15/9)*(4/9)*(4/9) + df["prior_6"]*(12/9)*(5/9)*(5/9) + df["prior_7"]*(9/9)*(6/9)*(6/9) + df["prior_8"]*(6/9)*(7/9)*(7/9) + df["prior_9"]*(3/9)*(8/9)*(8/9)) df.loc[df['pos_comparisons'] == 2, 'post_5'] = (df["prior_5"]*(15/9)*(5/9)*(4/9))/(df["prior_2"]*(24/9)*(8/9)*(1/9) + df["prior_3"]*(21/9)*(7/9)*(2/9) + df["prior_4"]*(18/9)*(6/9)*(3/9) + df["prior_5"]*(15/9)*(5/9)*(4/9) + df["prior_6"]*(12/9)*(4/9)*(5/9) + df["prior_7"]*(9/9)*(3/9)*(6/9) + df["prior_8"]*(6/9)*(2/9)*(7/9) + df["prior_9"]*(3/9)*(1/9)*(8/9)) df.loc[df['pos_comparisons'] == 3, 'post_5'] = (df["prior_5"]*(5/9)*(5/9)*(5/9))/(df["prior_1"] + df["prior_2"]*(8/9)*(8/9)*(8/9) + df["prior_3"]*(7/9)*(7/9)*(7/9) + df["prior_4"]*(6/9)*(6/9)*(6/9) + df["prior_5"]*(5/9)*(5/9)*(5/9) + df["prior_6"]*(4/9)*(4/9)*(4/9) + df["prior_7"]*(3/9)*(3/9)*(3/9) + df["prior_8"]*(2/9)*(2/9)*(2/9) + df["prior_9"]*(1/9)*(1/9)*(1/9)) df["post_6"] = df["prior_6"]/100 df.loc[df['pos_comparisons'] == 0, 'post_6'] = (df["prior_6"]*(5/9)*(5/9)*(5/9))/(df["prior_2"]*(1/9)*(1/9)*(1/9) + df["prior_3"]*(2/9)*(2/9)*(2/9) + df["prior_4"]*(3/9)*(3/9)*(3/9) + df["prior_5"]*(4/9)*(4/9)*(4/9) + df["prior_6"]*(5/9)*(5/9)*(5/9) + df["prior_7"]*(6/9)*(6/9)*(6/9) + df["prior_8"]*(7/9)*(7/9)*(7/9) + df["prior_9"]*(8/9)*(8/9)*(8/9)+ df["prior_10"]) df.loc[df['pos_comparisons'] == 1, 'post_6'] = (df["prior_6"]*(12/9)*(5/9)*(5/9))/(df["prior_2"]*(24/9)*(1/9)*(1/9) + df["prior_3"]*(21/9)*(2/9)*(2/9) + df["prior_4"]*(18/9)*(3/9)*(3/9) + df["prior_5"]*(15/9)*(4/9)*(4/9) + df["prior_6"]*(12/9)*(5/9)*(5/9) + df["prior_7"]*(9/9)*(6/9)*(6/9) + df["prior_8"]*(6/9)*(7/9)*(7/9) + df["prior_9"]*(3/9)*(8/9)*(8/9)) df.loc[df['pos_comparisons'] == 2, 'post_6'] = (df["prior_6"]*(12/9)*(4/9)*(5/9))/(df["prior_2"]*(24/9)*(8/9)*(1/9) + df["prior_3"]*(21/9)*(7/9)*(2/9) + df["prior_4"]*(18/9)*(6/9)*(3/9) + df["prior_5"]*(15/9)*(5/9)*(4/9) + df["prior_6"]*(12/9)*(4/9)*(5/9) + df["prior_7"]*(9/9)*(3/9)*(6/9) + df["prior_8"]*(6/9)*(2/9)*(7/9) + df["prior_9"]*(3/9)*(1/9)*(8/9)) df.loc[df['pos_comparisons'] == 3, 'post_6'] = (df["prior_6"]*(4/9)*(4/9)*(4/9))/(df["prior_1"] + df["prior_2"]*(8/9)*(8/9)*(8/9) + df["prior_3"]*(7/9)*(7/9)*(7/9) + df["prior_4"]*(6/9)*(6/9)*(6/9) + df["prior_5"]*(5/9)*(5/9)*(5/9) + df["prior_6"]*(4/9)*(4/9)*(4/9) + df["prior_7"]*(3/9)*(3/9)*(3/9) + df["prior_8"]*(2/9)*(2/9)*(2/9) + df["prior_9"]*(1/9)*(1/9)*(1/9)) df["post_7"] = df["prior_7"]/100 df.loc[df['pos_comparisons'] == 0, 'post_7'] = (df["prior_7"]*(6/9)*(6/9)*(6/9))/(df["prior_2"]*(1/9)*(1/9)*(1/9) + df["prior_3"]*(2/9)*(2/9)*(2/9) + df["prior_4"]*(3/9)*(3/9)*(3/9) + df["prior_5"]*(4/9)*(4/9)*(4/9) + df["prior_6"]*(5/9)*(5/9)*(5/9) + df["prior_7"]*(6/9)*(6/9)*(6/9) + df["prior_8"]*(7/9)*(7/9)*(7/9) + df["prior_9"]*(8/9)*(8/9)*(8/9)+ df["prior_10"]) df.loc[df['pos_comparisons'] == 1, 'post_7'] = (df["prior_7"]*(9/9)*(6/9)*(6/9))/(df["prior_2"]*(24/9)*(1/9)*(1/9) + df["prior_3"]*(21/9)*(2/9)*(2/9) + df["prior_4"]*(18/9)*(3/9)*(3/9) + df["prior_5"]*(15/9)*(4/9)*(4/9) + df["prior_6"]*(12/9)*(5/9)*(5/9) + df["prior_7"]*(9/9)*(6/9)*(6/9) + df["prior_8"]*(6/9)*(7/9)*(7/9) + df["prior_9"]*(3/9)*(8/9)*(8/9)) df.loc[df['pos_comparisons'] == 2, 'post_7'] = (df["prior_7"]*(9/9)*(3/9)*(6/9))/(df["prior_2"]*(24/9)*(8/9)*(1/9) + df["prior_3"]*(21/9)*(7/9)*(2/9) + df["prior_4"]*(18/9)*(6/9)*(3/9) + df["prior_5"]*(15/9)*(5/9)*(4/9) + df["prior_6"]*(12/9)*(4/9)*(5/9) + df["prior_7"]*(9/9)*(3/9)*(6/9) + df["prior_8"]*(6/9)*(2/9)*(7/9) + df["prior_9"]*(3/9)*(1/9)*(8/9)) df.loc[df['pos_comparisons'] == 3, 'post_7'] = (df["prior_7"]*(3/9)*(3/9)*(3/9))/(df["prior_1"] + df["prior_2"]*(8/9)*(8/9)*(8/9) + df["prior_3"]*(7/9)*(7/9)*(7/9) + df["prior_4"]*(6/9)*(6/9)*(6/9) + df["prior_5"]*(5/9)*(5/9)*(5/9) + df["prior_6"]*(4/9)*(4/9)*(4/9) + df["prior_7"]*(3/9)*(3/9)*(3/9) + df["prior_8"]*(2/9)*(2/9)*(2/9) + df["prior_9"]*(1/9)*(1/9)*(1/9)) df["post_8"] = df["prior_8"]/100 df.loc[df['pos_comparisons'] == 0, 'post_8'] = (df["prior_8"]*(7/9)*(7/9)*(7/9))/(df["prior_2"]*(1/9)*(1/9)*(1/9) + df["prior_3"]*(2/9)*(2/9)*(2/9) + df["prior_4"]*(3/9)*(3/9)*(3/9) + df["prior_5"]*(4/9)*(4/9)*(4/9) + df["prior_6"]*(5/9)*(5/9)*(5/9) + df["prior_7"]*(6/9)*(6/9)*(6/9) + df["prior_8"]*(7/9)*(7/9)*(7/9) + df["prior_9"]*(8/9)*(8/9)*(8/9)+ df["prior_10"]) df.loc[df['pos_comparisons'] == 1, 'post_8'] = (df["prior_8"]*(6/9)*(7/9)*(7/9))/(df["prior_2"]*(24/9)*(1/9)*(1/9) + df["prior_3"]*(21/9)*(2/9)*(2/9) + df["prior_4"]*(18/9)*(3/9)*(3/9) + df["prior_5"]*(15/9)*(4/9)*(4/9) + df["prior_6"]*(12/9)*(5/9)*(5/9) + df["prior_7"]*(9/9)*(6/9)*(6/9) + df["prior_8"]*(6/9)*(7/9)*(7/9) + df["prior_9"]*(3/9)*(8/9)*(8/9)) df.loc[df['pos_comparisons'] == 2, 'post_8'] = (df["prior_8"]*(6/9)*(2/9)*(7/9))/(df["prior_2"]*(24/9)*(8/9)*(1/9) + df["prior_3"]*(21/9)*(7/9)*(2/9) + df["prior_4"]*(18/9)*(6/9)*(3/9) + df["prior_5"]*(15/9)*(5/9)*(4/9) + df["prior_6"]*(12/9)*(4/9)*(5/9) + df["prior_7"]*(9/9)*(3/9)*(6/9) + df["prior_8"]*(6/9)*(2/9)*(7/9) + df["prior_9"]*(3/9)*(1/9)*(8/9)) df.loc[df['pos_comparisons'] == 3, 'post_8'] = (df["prior_8"]*(2/9)*(2/9)*(2/9))/(df["prior_1"] + df["prior_2"]*(8/9)*(8/9)*(8/9) + df["prior_3"]*(7/9)*(7/9)*(7/9) + df["prior_4"]*(6/9)*(6/9)*(6/9) + df["prior_5"]*(5/9)*(5/9)*(5/9) + df["prior_6"]*(4/9)*(4/9)*(4/9) + df["prior_7"]*(3/9)*(3/9)*(3/9) + df["prior_8"]*(2/9)*(2/9)*(2/9) + df["prior_9"]*(1/9)*(1/9)*(1/9)) df["post_9"] = df["prior_9"]/100 df.loc[df['pos_comparisons'] == 0, 'post_9'] = (df["prior_9"]*(8/9)*(8/9)*(8/9))/(df["prior_2"]*(1/9)*(1/9)*(1/9) + df["prior_3"]*(2/9)*(2/9)*(2/9) + df["prior_4"]*(3/9)*(3/9)*(3/9) + df["prior_5"]*(4/9)*(4/9)*(4/9) + df["prior_6"]*(5/9)*(5/9)*(5/9) + df["prior_7"]*(6/9)*(6/9)*(6/9) + df["prior_8"]*(7/9)*(7/9)*(7/9) + df["prior_9"]*(8/9)*(8/9)*(8/9)+ df["prior_10"]) df.loc[df['pos_comparisons'] == 1, 'post_9'] = (df["prior_9"]*(3/9)*(8/9)*(8/9))/(df["prior_2"]*(24/9)*(1/9)*(1/9) + df["prior_3"]*(21/9)*(2/9)*(2/9) + df["prior_4"]*(18/9)*(3/9)*(3/9) + df["prior_5"]*(15/9)*(4/9)*(4/9) + df["prior_6"]*(12/9)*(5/9)*(5/9) + df["prior_7"]*(9/9)*(6/9)*(6/9) + df["prior_8"]*(6/9)*(7/9)*(7/9) + df["prior_9"]*(3/9)*(8/9)*(8/9)) df.loc[df['pos_comparisons'] == 2, 'post_9'] = (df["prior_9"]*(3/9)*(1/9)*(8/9))/(df["prior_2"]*(24/9)*(8/9)*(1/9) + df["prior_3"]*(21/9)*(7/9)*(2/9) + df["prior_4"]*(18/9)*(6/9)*(3/9) + df["prior_5"]*(15/9)*(5/9)*(4/9) + df["prior_6"]*(12/9)*(4/9)*(5/9) + df["prior_7"]*(9/9)*(3/9)*(6/9) + df["prior_8"]*(6/9)*(2/9)*(7/9) + df["prior_9"]*(3/9)*(1/9)*(8/9)) df.loc[df['pos_comparisons'] == 3, 'post_9'] = (df["prior_9"]*(1/9)*(1/9)*(1/9))/(df["prior_1"] + df["prior_2"]*(8/9)*(8/9)*(8/9) + df["prior_3"]*(7/9)*(7/9)*(7/9) + df["prior_4"]*(6/9)*(6/9)*(6/9) + df["prior_5"]*(5/9)*(5/9)*(5/9) + df["prior_6"]*(4/9)*(4/9)*(4/9) + df["prior_7"]*(3/9)*(3/9)*(3/9) + df["prior_8"]*(2/9)*(2/9)*(2/9) + df["prior_9"]*(1/9)*(1/9)*(1/9)) df["post_10"] = df["prior_10"]/100 df.loc[df['pos_comparisons'] == 0, 'post_10'] = df["prior_10"]/(df["prior_2"]*(1/9)*(1/9)*(1/9) + df["prior_3"]*(2/9)*(2/9)*(2/9) + df["prior_4"]*(3/9)*(3/9)*(3/9) + df["prior_5"]*(4/9)*(4/9)*(4/9) + df["prior_6"]*(5/9)*(5/9)*(5/9) + df["prior_7"]*(6/9)*(6/9)*(6/9) + df["prior_8"]*(7/9)*(7/9)*(7/9) + df["prior_9"]*(8/9)*(8/9)*(8/9) + df["prior_10"]) df.loc[df['pos_comparisons'] == 1, 'post_10'] = 0 df.loc[df['pos_comparisons'] == 2, 'post_10'] = 0 df.loc[df['pos_comparisons'] == 3, 'post_10'] = 0 return def get_rank_dummies(df): df["rankdummy1"] = 0 df.loc[df['rank'] == 1, 'rankdummy1'] = 1 df["rankdummy2"] = 0 df.loc[df['rank'] == 2, 'rankdummy2'] = 1 df["rankdummy3"] = 0 df.loc[df['rank'] == 3, 'rankdummy3'] = 1 df["rankdummy4"] = 0 df.loc[df['rank'] == 4, 'rankdummy4'] = 1 df["rankdummy5"] = 0 df.loc[df['rank'] == 5, 'rankdummy5'] = 1 df["rankdummy6"] = 0 df.loc[df['rank'] == 6, 'rankdummy6'] = 1 df["rankdummy7"] = 0 df.loc[df['rank'] == 7, 'rankdummy7'] = 1 df["rankdummy8"] = 0 df.loc[df['rank'] == 8, 'rankdummy8'] = 1 df["rankdummy9"] = 0 df.loc[df['rank'] == 9, 'rankdummy9'] = 1 return def get_rank_interation_term(df): df["rankdummy1_interact"] = df["rankdummy1"] * df["dummytreat_direct1month"] df["rankdummy2_interact"] = df["rankdummy2"] * df["dummytreat_direct1month"] df["rankdummy3_interact"] = df["rankdummy3"] * df["dummytreat_direct1month"] df["rankdummy4_interact"] = df["rankdummy4"] * df["dummytreat_direct1month"] df["rankdummy5_interact"] = df["rankdummy5"] * df["dummytreat_direct1month"] df["rankdummy6_interact"] = df["rankdummy6"] * df["dummytreat_direct1month"] df["rankdummy7_interact"] = df["rankdummy7"] * df["dummytreat_direct1month"] df["rankdummy8_interact"] = df["rankdummy8"] * df["dummytreat_direct1month"] df["rankdummy9_interact"] = df["rankdummy9"] * df["dummytreat_direct1month"] return def cluster_fit(formula, data, group_var): """ To run regressions with standard errors clustered at subject level """ fit = sm_api.OLS.from_formula(formula, data=data).fit() to_keep = pd.RangeIndex(len(data)).difference(pd.Index(fit.model.data.missing_row_idx)) robust = fit.get_robustcov_results(cov_type='cluster', groups=data.iloc[to_keep][group_var]) return robust def get_variables_for_extension(df_control): #Dummies for analysis df_control['secondwave'] = np.nan df_control.loc[df_control['time'] == 2, 'secondwave'] = 1 df_control.loc[df_control['time'] == 1, 'secondwave'] = 0 df_control['dummy_feedback'] = np.nan df_control.loc[df_control['treatment'] == 'confidence_1monthlater', 'dummy_feedback'] = 1 df_control.loc[df_control['treatment'] == 'nofeedback', 'dummy_feedback'] = 0 #Interaction term df_control['feedback_secondwave'] = df_control['secondwave']*df_control['dummy_feedback'] return
101.782609
372
0.509916
3,594
16,387
2.188926
0.037563
0.297191
0.288801
0.024914
0.764332
0.735223
0.725181
0.716665
0.661879
0.661879
0
0.166881
0.096784
16,387
161
373
101.782609
0.364638
0.011595
0
0.035398
0
0
0.254653
0.014161
0
0
0
0
0
1
0.044248
false
0
0.132743
0
0.221239
0
0
0
0
null
1
1
0
0
1
1
1
0
1
0
0
0
0
0
0
1
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
dab31b36f35e54f55b6edcd1b7a2e0673a829dfc
46
py
Python
steps/__init__.py
merkio/python-websocket-test-example
4d9a1ea4c05bc1195f496ab00dccd61ccfd24176
[ "Apache-2.0" ]
null
null
null
steps/__init__.py
merkio/python-websocket-test-example
4d9a1ea4c05bc1195f496ab00dccd61ccfd24176
[ "Apache-2.0" ]
null
null
null
steps/__init__.py
merkio/python-websocket-test-example
4d9a1ea4c05bc1195f496ab00dccd61ccfd24176
[ "Apache-2.0" ]
null
null
null
from . import connect from . import subscribe
15.333333
23
0.782609
6
46
6
0.666667
0.555556
0
0
0
0
0
0
0
0
0
0
0.173913
46
2
24
23
0.947368
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
dac1c9d68fa87c24fabbb0bf2c88b7496be23c59
71,656
py
Python
scripts/RDEX-periodic-pull.py
socialcar-project-eu/back-end
ba667c9b7d68ee10d1e07d43645ea12c0aae068b
[ "MIT" ]
null
null
null
scripts/RDEX-periodic-pull.py
socialcar-project-eu/back-end
ba667c9b7d68ee10d1e07d43645ea12c0aae068b
[ "MIT" ]
2
2018-10-03T14:14:24.000Z
2019-11-05T10:23:24.000Z
scripts/RDEX-periodic-pull.py
socialcar-project-eu/back-end
ba667c9b7d68ee10d1e07d43645ea12c0aae068b
[ "MIT" ]
3
2018-04-20T09:53:37.000Z
2021-07-02T18:56:33.000Z
import sys import os import pymongo import argparse import requests import hashlib import hmac import json import time, datetime, threading from requests.auth import HTTPBasicAuth from requests.packages.urllib3.exceptions import InsecureRequestWarning from socialcar.utils import waypoints_to_polyline, generate_custom_objectid, str_to_oid, oid_to_str, inside_bounding_box MONGO_HOST = os.environ.get('MONGO_HOST', 'localhost') MONGO_PORT = int(os.environ.get('MONGO_PORT', 27017)) MONGO_USERNAME = os.environ.get('MONGO_USERNAME', '') MONGO_PASSWORD = os.environ.get('MONGO_PASSWORD', '') MONGO_RIDES_COLLECTION = 'rides' MONGO_SITES_COLLECTION = 'sites' CITIES = [ 'brussels', 'edinburgh', 'ljubljana', 'ticino' ] brussels_driver_id = None edinburgh_driver_id = None ljubljana_driver_id = None ticino1_driver_id = None ticino2_driver_id = None brussels_car_id = None edinburgh_car_id = None ljubljana_car_id = None ticino1_car_id = None ticino2_car_id = None #=============================================================================== # date_to_sec() #=============================================================================== def date_to_sec(date_str, date_format): return time.mktime(datetime.datetime.strptime(date_str, date_format).timetuple()) #=============================================================================== # time_to_sec () #=============================================================================== def time_to_sec(time_str): h, m = time_str.split(':') return int(h) * 3600 + int(m) * 60 #=============================================================================== # periodic_pull () #=============================================================================== def periodic_pull(host, port, interval, period, radius, dbname, city, use_ssl): print('%s - Checking for external rides...' % (datetime.datetime.utcnow().strftime('%d-%m-%Y @ %H:%M:%S (UTC)'))) client = pymongo.MongoClient(MONGO_HOST, MONGO_PORT) db = client[dbname] rides_collection = db[MONGO_RIDES_COLLECTION] sites_collection = db[MONGO_SITES_COLLECTION] ids_to_keep = [] counter_api = 0 prefix = "https" if use_ssl else "http" # Disable SSL warnings requests.packages.urllib3.disable_warnings(InsecureRequestWarning) # Request Timestamp timestamp = int(time.time()) if city == 'brussels': # Set private key for test server # privateKey = "qUqxRCTZhf5XDEq8F7mMWnVbP4QSHfgJsAv5H65pTJG" # Set private key for main server privateKey = "yYDyFQN4v51Fi4vx513z9f1ERv74BgSuVzI4IeSW3SZ" # Set public key for test server # publicKey = "sc_dimitris_tsoukalas_live" # Set public key for main server publicKey = "tx_socialcar_server" # Set base url baseUrl = "https://api.carpool.be/rdexapi/" # Set call call = "period.json" # Create extra values areaCode = "BE-BRU" # Compose unsigned URL unSingnedUrl = "%s%s?timestamp=%s&apikey=%s&area_code=%s&period=%s&distance=%s" % (baseUrl, call, timestamp, publicKey, areaCode, period, radius) # Convert private key to bytes privateKeyToBytes = bytearray(privateKey, "ASCII") # Convert unsigned URL to bytes unSingnedUrlToBytes = bytearray(unSingnedUrl,"ASCII") # Create the signature # Hash the unsigned url with the 'sha256’-algorithm using the private key sign = hmac.new(privateKeyToBytes, unSingnedUrlToBytes, hashlib.sha256).hexdigest() # Compose signed url signedUrl = "%s&signature=%s" % (unSingnedUrl, sign) elif city == 'edinburgh': # Set private key privateKey = "WPEbb4uJDL76VVc6pmBCrE7BL27L5QQiekuSJJpm" # Set public key publicKey = "socialcar" # Set base url baseUrl = "https://api.liftshare.com/rdex/v1/" # Set call call = "period" # compose unsigned URL unSingnedUrl = "%s%s?timestamp=%s&apikey=%s&period=%s" % (baseUrl, call, timestamp, publicKey, period) # Convert private key to bytes privateKeyToBytes = bytearray(privateKey, "ASCII") # Convert unsigned URL to bytes unSingnedUrlToBytes = bytearray(unSingnedUrl,"ASCII") # Create the signature # Hash the unsigned url with the 'sha256’-algorithm using the private key sign = hmac.new(privateKeyToBytes, unSingnedUrlToBytes, hashlib.sha256).hexdigest() # Compose signed url signedUrl = "%s&signature=%s" % (unSingnedUrl, sign) elif city == 'ljubljana': signedUrl = "https://prevoz.org/api/search/socialcar/?format=json" elif city == 'ticino': # Set private key privateKey = "A93reRTUJHsCuQSHR+L3GxqOJyDmQpCgps102ciuabc=" # Set public key publicKey = "SocialCar" # Set base url baseUrl = "https://ws.bepooler.ch/socialcar/" # Set call call = "period" # Create extra values areaCode = "CH" # compose unsigned URL unSingnedUrl = "%s%s?timestamp=%s&apikey=%s&area_code=%s&period=%s&distance=%s" % (baseUrl, call, timestamp, publicKey, areaCode, period, radius) # Convert private key to bytes privateKeyToBytes = bytearray(privateKey, "ASCII") # Convert unsigned URL to bytes unSingnedUrlToBytes = bytearray(unSingnedUrl,"ASCII") # Create the signature # Hash the unsigned url with the 'sha256’-algorithm using the private key sign = hmac.new(privateKeyToBytes, unSingnedUrlToBytes, hashlib.sha256).hexdigest() # Compose signed url signedUrl = "%s&signature=%s" % (unSingnedUrl, sign) # GET request headers = {'content-type': 'application/json'} r = requests.get(signedUrl, headers=headers, verify=False) if r.status_code == 200: rides = [] auth = HTTPBasicAuth('admin', 'password') # TODO: Insert admin credentials here headers = {'content-type': 'application/json'} #=============================================================================== # POST fetched rides to server #=============================================================================== res_json = json.loads(r.text) counter = 0 # Add delay for Edinburgh to update rides after 00:00 if city == 'edinburgh': time.sleep(10800) # For every fetched ride for ride in res_json: # If 'last_modification' field exists and trip offers a ride if ride['last_modification'] != '' and ride['driver']['state'] != 0: # Compute ride date and flexibility if ride['outward']['mintime'] != None and ride['outward']['maxtime'] != None: flexibility = int((time_to_sec(ride['outward']['maxtime']) - time_to_sec(ride['outward']['mintime'])) / 2) else: flexibility = 0 ride_date = date_to_sec(ride['outward']['mindate'], "%Y-%m-%d") + time_to_sec(ride['outward']['mintime']) + flexibility # Compute return ride date and flexibility if 'return' in ride and ride['return'] != None and ride['return']['mindate'] != None and ride['return']['mintime'] != None: if ride['return']['mintime'] != None and ride['return']['maxtime'] != None: return_flexibility = int((time_to_sec(ride['return']['maxtime']) - time_to_sec(ride['return']['mintime'])) / 2) else: return_flexibility = 0 return_ride_date = date_to_sec(ride['return']['mindate'], "%Y-%m-%d") + time_to_sec(ride['return']['mintime']) + return_flexibility else: return_ride_date = None return_flexibility = 0 # Store ride date in UTC according to local time offset of each site if city == 'brussels': ride_date = ride_date - 3600 return_ride_date = return_ride_date - 3600 if return_ride_date else None last_modification = date_to_sec(ride['last_modification'], "%Y-%m-%d %H:%M:%S") - 3600 elif city == 'edinburgh': ride_date = ride_date return_ride_date = return_ride_date if return_ride_date else None last_modification = date_to_sec(ride['last_modification'], "%d/%m/%Y %H:%M:%S") elif city == 'ljubljana': ride_date = ride_date - 3600 return_ride_date = return_ride_date - 3600 if return_ride_date else None last_modification = date_to_sec(ride['last_modification'], "%Y-%m-%d %H:%M") - 3600 elif city == 'ticino': ride_date = ride_date - 3600 return_ride_date = return_ride_date - 3600 if return_ride_date else None last_modification = date_to_sec(ride['last_modification'], "%d/%m/%Y %H:%M:%S") - 3600 # If city is Brussels if city == 'brussels': # Don't add past rides into DB if timestamp < ride_date: site = sites_collection.find_one({'name': 'Brussels'}) bb_minlat = site['bounding_box']['min_lat'] bb_minlon = site['bounding_box']['min_lon'] bb_maxlat = site['bounding_box']['max_lat'] bb_maxlon = site['bounding_box']['max_lon'] s_lat = float(ride['from']['latitude'].replace(",", ".")) s_lon = float(ride['from']['longitude'].replace(",", ".")) t_lat = float(ride['to']['latitude'].replace(",", ".")) t_lon = float(ride['to']['longitude'].replace(",", ".")) # Don't add rides that are outside bounding box into DB if inside_bounding_box(bb_minlat, bb_minlon, bb_maxlat, bb_maxlon, s_lat, s_lon, t_lat, t_lon): uuid = generate_custom_objectid(ride['uuid'], 24) ids_to_keep.append(uuid) # Don't add rides that exceed period time into DB if timestamp + (period * 3600) > ride_date: # Fetch ride with uuid from database cursor_rides = rides_collection.find({ '_id': str_to_oid(uuid) }) # If ride not in database POST if cursor_rides.count() == 0: ride_data = { '_id': uuid, 'driver_id': brussels_driver_id, 'car_id': brussels_car_id, 'name': '%s - %s' % (ride['from']['address'], ride['to']['address']), 'start_point': { 'lat': float(ride['from']['latitude'].replace(",", ".")), 'lon': float(ride['from']['longitude'].replace(",", ".")) }, 'end_point': { 'lat': float(ride['to']['latitude'].replace(",", ".")), 'lon': float(ride['to']['longitude'].replace(",", ".")) }, 'date': ride_date, 'activated': True, 'polyline': waypoints_to_polyline(ride['waypoints']), 'extras': { 'uuid': ride['uuid'], 'url': ride['url'] }, } rides.append(ride_data) # If ride in database PATCH else: # Store fetched ride for f_ride in cursor_rides: fetched_ride = f_ride # If a ride is modified between the intervals if (timestamp - last_modification) < interval or fetched_ride['date'] != ride_date: ride_data = { 'driver_id': brussels_driver_id, 'car_id': brussels_car_id, 'name': '%s - %s' % (ride['from']['address'], ride['to']['address']), 'start_point': { 'lat': float(ride['from']['latitude'].replace(",", ".")), 'lon': float(ride['from']['longitude'].replace(",", ".")) }, 'end_point': { 'lat': float(ride['to']['latitude'].replace(",", ".")), 'lon': float(ride['to']['longitude'].replace(",", ".")) }, 'date': ride_date, 'polyline': waypoints_to_polyline(ride['waypoints']), 'extras': { 'uuid': ride['uuid'], 'url': ride['url'] } } json_body = json.dumps(ride_data) patch_rides_url = "%s://%s:%s/rest/v2/rides/%s" % (prefix, host, port, uuid) patch_res = requests.patch(patch_rides_url, data=json_body, headers=headers, auth=auth) if patch_res.status_code == 200: counter = counter + 1 # Don't add past rides into DB if return_ride_date and timestamp < return_ride_date: site = sites_collection.find_one({'name': 'Brussels'}) bb_minlat = site['bounding_box']['min_lat'] bb_minlon = site['bounding_box']['min_lon'] bb_maxlat = site['bounding_box']['max_lat'] bb_maxlon = site['bounding_box']['max_lon'] s_lat = float(ride['from']['latitude'].replace(",", ".")) s_lon = float(ride['from']['longitude'].replace(",", ".")) t_lat = float(ride['to']['latitude'].replace(",", ".")) t_lon = float(ride['to']['longitude'].replace(",", ".")) # Don't add rides that are outside bounding box into DB if inside_bounding_box(bb_minlat, bb_minlon, bb_maxlat, bb_maxlon, s_lat, s_lon, t_lat, t_lon): uuid = generate_custom_objectid('%sret' % ride['uuid'], 24) ids_to_keep.append(uuid) # Don't add rides that exceed period time into DB if timestamp + (period * 3600) > return_ride_date: # Fetch ride with uuid from database cursor_rides = rides_collection.find({ '_id': str_to_oid(uuid) }) # If ride not in database POST if cursor_rides.count() == 0: ride_data = { '_id': uuid, 'driver_id': brussels_driver_id, 'car_id': brussels_car_id, 'name': '%s - %s' % (ride['to']['address'], ride['from']['address']), 'start_point': { 'lat': float(ride['to']['latitude'].replace(",", ".")), 'lon': float(ride['to']['longitude'].replace(",", ".")) }, 'end_point': { 'lat': float(ride['from']['latitude'].replace(",", ".")), 'lon': float(ride['from']['longitude'].replace(",", ".")) }, 'date': return_ride_date, 'activated': True, 'polyline': waypoints_to_polyline(ride['waypoints']), 'extras': { 'uuid': ride['uuid'], 'url': ride['url'] }, } rides.append(ride_data) # If ride in database PATCH else: # Store fetched ride for f_ride in cursor_rides: fetched_ride = f_ride # If a ride is modified between the intervals if (timestamp - last_modification) < interval or fetched_ride['date'] != return_ride_date: ride_data = { 'driver_id': brussels_driver_id, 'car_id': brussels_car_id, 'name': '%s - %s' % (ride['to']['address'], ride['from']['address']), 'start_point': { 'lat': float(ride['to']['latitude'].replace(",", ".")), 'lon': float(ride['to']['longitude'].replace(",", ".")) }, 'end_point': { 'lat': float(ride['from']['latitude'].replace(",", ".")), 'lon': float(ride['from']['longitude'].replace(",", ".")) }, 'date': return_ride_date, 'polyline': waypoints_to_polyline(ride['waypoints']), 'extras': { 'uuid': ride['uuid'], 'url': 'https://www.carpool.be/cplz/%s' % (ride['uuid']) # TODO: Probably a different URL for Brussels } } json_body = json.dumps(ride_data) patch_rides_url = "%s://%s:%s/rest/v2/rides/%s" % (prefix, host, port, uuid) patch_res = requests.patch(patch_rides_url, data=json_body, headers=headers, auth=auth) if patch_res.status_code == 200: counter = counter + 1 # If city is Edinburgh elif city == 'edinburgh': # Don't add past rides into DB if timestamp < ride_date: site = sites_collection.find_one({'name': 'Edinburgh'}) bb_minlat = site['bounding_box']['min_lat'] bb_minlon = site['bounding_box']['min_lon'] bb_maxlat = site['bounding_box']['max_lat'] bb_maxlon = site['bounding_box']['max_lon'] s_lat = ride['from']['latitude'] s_lon = ride['from']['longitude'] t_lat = ride['to']['latitude'] t_lon = ride['to']['longitude'] # Don't add rides that are outside bounding box into DB if inside_bounding_box(bb_minlat, bb_minlon, bb_maxlat, bb_maxlon, s_lat, s_lon, t_lat, t_lon): uuid = generate_custom_objectid(ride['uuid'], 24) # If uuid already exists in ids_to_keep then generate a new uuid based on ride date and insert it in ids_to_keep if uuid in ids_to_keep: uuid = generate_custom_objectid('%s%s' % (ride['uuid'], ride_date), 24) ids_to_keep.append(uuid) # Don't add rides that exceed period time into DB if timestamp + (period * 3600) > ride_date: # Fetch ride with uuid from database cursor_rides = rides_collection.find({ '_id': str_to_oid(uuid) }) # If ride not in database POST if cursor_rides.count() == 0: ride_data = { '_id': uuid, 'driver_id': edinburgh_driver_id, 'car_id': edinburgh_car_id, 'name': '%s - %s' % (ride['from']['city'], ride['to']['city']), 'start_point': { 'lat': ride['from']['latitude'], 'lon': ride['from']['longitude'] }, 'end_point': { 'lat': ride['to']['latitude'], 'lon': ride['to']['longitude'] }, 'date': ride_date, 'activated': True, 'polyline': waypoints_to_polyline(ride['waypoints']), 'extras': { 'uuid': ride['uuid'], 'url': 'https://liftshare.com/uk/lift/view/%s?community=iip' % (ride['uuid']) }, } rides.append(ride_data) # If ride in database PATCH else: # Store fetched ride for f_ride in cursor_rides: fetched_ride = f_ride # If a ride is modified between the intervals if (timestamp - last_modification) < interval or fetched_ride['date'] != ride_date: ride_data = { 'driver_id': edinburgh_driver_id, 'car_id': edinburgh_car_id, 'name': '%s - %s' % (ride['from']['city'], ride['to']['city']), 'start_point': { 'lat': ride['from']['latitude'], 'lon': ride['from']['longitude'] }, 'end_point': { 'lat': ride['to']['latitude'], 'lon': ride['to']['longitude'] }, 'date': ride_date, 'polyline': waypoints_to_polyline(ride['waypoints']), 'extras': { 'uuid': ride['uuid'], 'url': 'https://liftshare.com/uk/lift/view/%s?community=iip' % (ride['uuid']) }, } json_body = json.dumps(ride_data) patch_rides_url = "%s://%s:%s/rest/v2/rides/%s" % (prefix, host, port, uuid) patch_res = requests.patch(patch_rides_url, data=json_body, headers=headers, auth=auth) if patch_res.status_code == 200: counter = counter + 1 # Don't add past rides into DB if return_ride_date and timestamp < return_ride_date: site = sites_collection.find_one({'name': 'Edinburgh'}) bb_minlat = site['bounding_box']['min_lat'] bb_minlon = site['bounding_box']['min_lon'] bb_maxlat = site['bounding_box']['max_lat'] bb_maxlon = site['bounding_box']['max_lon'] s_lat = ride['from']['latitude'] s_lon = ride['from']['longitude'] t_lat = ride['to']['latitude'] t_lon = ride['to']['longitude'] # Don't add rides that are outside bounding box into DB if inside_bounding_box(bb_minlat, bb_minlon, bb_maxlat, bb_maxlon, s_lat, s_lon, t_lat, t_lon): uuid = generate_custom_objectid('%sret' % ride['uuid'], 24) # If uuid already exists in ids_to_keep then generate a new uuid based on ride date and insert it in ids_to_keep if uuid in ids_to_keep: uuid = generate_custom_objectid('%sret%s' % (ride['uuid'], return_ride_date), 24) ids_to_keep.append(uuid) # Don't add rides that exceed period time into DB if timestamp + (period * 3600) > return_ride_date: # Fetch ride with uuid from database cursor_rides = rides_collection.find({ '_id': str_to_oid(uuid) }) # If ride not in database POST if cursor_rides.count() == 0: ride_data = { '_id': uuid, 'driver_id': edinburgh_driver_id, 'car_id': edinburgh_car_id, 'name': '%s - %s' % (ride['to']['city'], ride['from']['city']), 'start_point': { 'lat': ride['to']['latitude'], 'lon': ride['to']['longitude'] }, 'end_point': { 'lat': ride['from']['latitude'], 'lon': ride['from']['longitude'] }, 'date': return_ride_date, 'activated': True, 'polyline': waypoints_to_polyline(ride['waypoints']), 'extras': { 'uuid': ride['uuid'], 'url': 'https://liftshare.com/uk/lift/view/%s?community=iip' % (ride['uuid']) }, } rides.append(ride_data) # If ride in database PATCH else: # Store fetched ride for f_ride in cursor_rides: fetched_ride = f_ride # If a ride is modified between the intervals if (timestamp - last_modification) < interval or fetched_ride['date'] != return_ride_date: ride_data = { 'driver_id': edinburgh_driver_id, 'car_id': edinburgh_car_id, 'name': '%s - %s' % (ride['to']['city'], ride['from']['city']), 'start_point': { 'lat': ride['to']['latitude'], 'lon': ride['to']['longitude'] }, 'end_point': { 'lat': ride['from']['latitude'], 'lon': ride['from']['longitude'] }, 'date': return_ride_date, 'polyline': waypoints_to_polyline(ride['waypoints']), 'extras': { 'uuid': ride['uuid'], 'url': 'https://liftshare.com/uk/lift/view/%s?community=iip' % (ride['uuid']) }, } json_body = json.dumps(ride_data) patch_rides_url = "%s://%s:%s/rest/v2/rides/%s" % (prefix, host, port, uuid) patch_res = requests.patch(patch_rides_url, data=json_body, headers=headers, auth=auth) if patch_res.status_code == 200: counter = counter + 1 # If city is Ljubljana elif city == 'ljubljana': # Don't add past rides into DB if timestamp < ride_date: site = sites_collection.find_one({'name': 'Ljubljana'}) bb_minlat = site['bounding_box']['min_lat'] bb_minlon = site['bounding_box']['min_lon'] bb_maxlat = site['bounding_box']['max_lat'] bb_maxlon = site['bounding_box']['max_lon'] s_lat = ride['from']['latitude'] s_lon = ride['from']['longitude'] t_lat = ride['to']['latitude'] t_lon = ride['to']['longitude'] # Don't add rides that are outside bounding box into DB if inside_bounding_box(bb_minlat, bb_minlon, bb_maxlat, bb_maxlon, s_lat, s_lon, t_lat, t_lon): uuid = generate_custom_objectid(str(ride['uuid']), 24) ids_to_keep.append(uuid) # Don't add rides that exceed period time into DB if timestamp + (period * 3600) > ride_date: # Fetch ride with uuid from database cursor_rides = rides_collection.find({ '_id': str_to_oid(uuid) }) # If ride not in database POST if cursor_rides.count() == 0: ride_data = { '_id': uuid, 'driver_id': ljubljana_driver_id, 'car_id': ljubljana_car_id, 'name': '%s - %s' % (ride['from']['city'], ride['to']['city']), 'start_point': { 'lat': ride['from']['latitude'], 'lon': ride['from']['longitude'] }, 'end_point': { 'lat': ride['to']['latitude'], 'lon': ride['to']['longitude'] }, 'date': ride_date, 'activated': True, 'polyline': waypoints_to_polyline(ride['waypoints']), 'extras': { 'uuid': ride['uuid'], 'url': ride['public_uri'] }, } rides.append(ride_data) # If ride in database PATCH else: # Store fetched ride for f_ride in cursor_rides: fetched_ride = f_ride # If a ride is modified between the intervals if (timestamp - last_modification) < interval or fetched_ride['date'] != ride_date: ride_data = { 'driver_id': ljubljana_driver_id, 'car_id': ljubljana_car_id, 'name': '%s - %s' % (ride['from']['city'], ride['to']['city']), 'start_point': { 'lat': ride['from']['latitude'], 'lon': ride['from']['longitude'] }, 'end_point': { 'lat': ride['to']['latitude'], 'lon': ride['to']['longitude'] }, 'date': ride_date, 'polyline': waypoints_to_polyline(ride['waypoints']), 'extras': { 'uuid': ride['uuid'], 'url': ride['public_uri'] }, } json_body = json.dumps(ride_data) patch_rides_url = "%s://%s:%s/rest/v2/rides/%s" % (prefix, host, port, uuid) patch_res = requests.patch(patch_rides_url, data=json_body, headers=headers, auth=auth) if patch_res.status_code == 200: counter = counter + 1 # Don't add past rides into DB if return_ride_date and timestamp < return_ride_date: site = sites_collection.find_one({'name': 'Ljubljana'}) bb_minlat = site['bounding_box']['min_lat'] bb_minlon = site['bounding_box']['min_lon'] bb_maxlat = site['bounding_box']['max_lat'] bb_maxlon = site['bounding_box']['max_lon'] s_lat = ride['from']['latitude'] s_lon = ride['from']['longitude'] t_lat = ride['to']['latitude'] t_lon = ride['to']['longitude'] # Don't add rides that are outside bounding box into DB if inside_bounding_box(bb_minlat, bb_minlon, bb_maxlat, bb_maxlon, s_lat, s_lon, t_lat, t_lon): uuid = generate_custom_objectid(str('%sret' % ride['uuid']), 24) ids_to_keep.append(uuid) # Don't add rides that exceed period time into DB if timestamp + (period * 3600) > return_ride_date: # Fetch ride with uuid from database cursor_rides = rides_collection.find({ '_id': str_to_oid(uuid) }) # If ride not in database POST if cursor_rides.count() == 0: ride_data = { '_id': uuid, 'driver_id': ljubljana_driver_id, 'car_id': ljubljana_car_id, 'name': '%s - %s' % (ride['to']['city'], ride['from']['city']), 'start_point': { 'lat': ride['to']['latitude'], 'lon': ride['to']['longitude'] }, 'end_point': { 'lat': ride['from']['latitude'], 'lon': ride['from']['longitude'] }, 'date': return_ride_date, 'activated': True, 'polyline': waypoints_to_polyline(ride['waypoints']), 'extras': { 'uuid': ride['uuid'], 'url': ride['public_uri'] }, } rides.append(ride_data) # If ride in database PATCH else: # Store fetched ride for f_ride in cursor_rides: fetched_ride = f_ride # If a ride is modified between the intervals if (timestamp - last_modification) < interval or fetched_ride['date'] != return_ride_date: ride_data = { 'driver_id': ljubljana_driver_id, 'car_id': ljubljana_car_id, 'name': '%s - %s' % (ride['to']['city'], ride['from']['city']), 'start_point': { 'lat': ride['to']['latitude'], 'lon': ride['to']['longitude'] }, 'end_point': { 'lat': ride['from']['latitude'], 'lon': ride['from']['longitude'] }, 'date': return_ride_date, 'polyline': waypoints_to_polyline(ride['waypoints']), 'extras': { 'uuid': ride['uuid'], 'url': ride['public_uri'] }, } json_body = json.dumps(ride_data) patch_rides_url = "%s://%s:%s/rest/v2/rides/%s" % (prefix, host, port, uuid) patch_res = requests.patch(patch_rides_url, data=json_body, headers=headers, auth=auth) if patch_res.status_code == 200: counter = counter + 1 # If city is Ticino elif city == 'ticino': # Don't add past rides into DB if timestamp < ride_date: site = sites_collection.find_one({'name': 'Canton Ticino'}) bb_minlat = site['bounding_box']['min_lat'] bb_minlon = site['bounding_box']['min_lon'] bb_maxlat = site['bounding_box']['max_lat'] bb_maxlon = site['bounding_box']['max_lon'] s_lat = ride['from']['latitude'] s_lon = ride['from']['longitude'] t_lat = ride['to']['latitude'] t_lon = ride['to']['longitude'] # Don't add rides that are outside bounding box into DB if inside_bounding_box(bb_minlat, bb_minlon, bb_maxlat, bb_maxlon, s_lat, s_lon, t_lat, t_lon): uuid = generate_custom_objectid(str(ride['uuid']), 24) ids_to_keep.append(uuid) # Don't add rides that exceed period time into DB if timestamp + (period * 3600) > ride_date: # Fetch ride with uuid from database cursor_rides = rides_collection.find({ '_id': str_to_oid(uuid) }) # If ride not in database POST if cursor_rides.count() == 0: ride_data = { '_id': uuid, 'driver_id': ticino1_driver_id, 'car_id': ticino1_car_id, 'name': '%s - %s' % (ride['from']['city'], ride['to']['city']), 'start_point': { 'lat': ride['from']['latitude'], 'lon': ride['from']['longitude'] }, 'end_point': { 'lat': ride['to']['latitude'], 'lon': ride['to']['longitude'] }, 'date': ride_date, 'activated': True, 'polyline': waypoints_to_polyline(ride['waypoints']), 'extras': { 'uuid': ride['uuid'], 'url': ride['url'] }, } rides.append(ride_data) # If ride in database PATCH else: # Store fetched ride for f_ride in cursor_rides: fetched_ride = f_ride # If a ride is modified between the intervals if (timestamp - last_modification) < interval or fetched_ride['date'] != ride_date: ride_data = { 'driver_id': ticino1_driver_id, 'car_id': ticino1_car_id, 'name': '%s - %s' % (ride['from']['city'], ride['to']['city']), 'start_point': { 'lat': ride['from']['latitude'], 'lon': ride['from']['longitude'] }, 'end_point': { 'lat': ride['to']['latitude'], 'lon': ride['to']['longitude'] }, 'date': ride_date, 'polyline': waypoints_to_polyline(ride['waypoints']), 'extras': { 'uuid': ride['uuid'], 'url': ride['url'] }, } json_body = json.dumps(ride_data) patch_rides_url = "%s://%s:%s/rest/v2/rides/%s" % (prefix, host, port, uuid) patch_res = requests.patch(patch_rides_url, data=json_body, headers=headers, auth=auth) if patch_res.status_code == 200: counter = counter + 1 # Don't add past rides into DB if return_ride_date and timestamp < return_ride_date: site = sites_collection.find_one({'name': 'Canton Ticino'}) bb_minlat = site['bounding_box']['min_lat'] bb_minlon = site['bounding_box']['min_lon'] bb_maxlat = site['bounding_box']['max_lat'] bb_maxlon = site['bounding_box']['max_lon'] s_lat = ride['from']['latitude'] s_lon = ride['from']['longitude'] t_lat = ride['to']['latitude'] t_lon = ride['to']['longitude'] # Don't add rides that are outside bounding box into DB if inside_bounding_box(bb_minlat, bb_minlon, bb_maxlat, bb_maxlon, s_lat, s_lon, t_lat, t_lon): uuid = generate_custom_objectid(str('%sret' % ride['uuid']), 24) ids_to_keep.append(uuid) # Don't add rides that exceed period time into DB if timestamp + (period * 3600) > return_ride_date: # Fetch ride with uuid from database cursor_rides = rides_collection.find({ '_id': str_to_oid(uuid) }) # If ride not in database POST if cursor_rides.count() == 0: ride_data = { '_id': uuid, 'driver_id': ticino1_driver_id, 'car_id': ticino1_car_id, 'name': '%s - %s' % (ride['to']['city'], ride['from']['city']), 'start_point': { 'lat': ride['to']['latitude'], 'lon': ride['to']['longitude'] }, 'end_point': { 'lat': ride['from']['latitude'], 'lon': ride['from']['longitude'] }, 'date': return_ride_date, 'activated': True, 'polyline': waypoints_to_polyline(ride['waypoints']), 'extras': { 'uuid': ride['uuid'], 'url':ride['url'] }, } rides.append(ride_data) # If ride in database PATCH else: # Store fetched ride for f_ride in cursor_rides: fetched_ride = f_ride # If a ride is modified between the intervals if (timestamp - last_modification) < interval or fetched_ride['date'] != return_ride_date: ride_data = { 'driver_id': ticino1_driver_id, 'car_id': ticino1_car_id, 'name': '%s - %s' % (ride['to']['city'], ride['from']['city']), 'start_point': { 'lat': ride['to']['latitude'], 'lon': ride['to']['longitude'] }, 'end_point': { 'lat': ride['from']['latitude'], 'lon': ride['from']['longitude'] }, 'date': return_ride_date, 'polyline': waypoints_to_polyline(ride['waypoints']), 'extras': { 'uuid': ride['uuid'], 'url': ride['url'] }, } json_body = json.dumps(ride_data) patch_rides_url = "%s://%s:%s/rest/v2/rides/%s" % (prefix, host, port, uuid) patch_res = requests.patch(patch_rides_url, data=json_body, headers=headers, auth=auth) if patch_res.status_code == 200: counter = counter + 1 # If rides to POST if len(rides) > 0: json_body = json.dumps(rides) post_rides_url = "%s://%s:%s/rest/v2/rides" % (prefix, host, port) post_res = requests.post(post_rides_url, data=json_body, headers=headers, auth=auth) if post_res.status_code == 201: # print("%s | %s - %s" % (post_res.status_code, post_res.url, post_res.text)) print(' %s rides added to database' % (len(rides))) else: print(' Error when posting rides into database') if counter > 0: print(' %s rides updated in database' % (counter)) else: print("%s - %s" % (r.status_code, r.text)) #----------------------------------------------------------------------- # Delete obsolete rides #----------------------------------------------------------------------- if city == 'brussels': city_regex = 'carpool.be' site_name = 'Brussels' elif city == 'edinburgh': city_regex = 'liftshare.com' site_name = 'Edinburgh' elif city == 'ljubljana': city_regex = 'prevoz.org' site_name = 'Ljubljana' elif city == 'ticino': city_regex = 'bepooler.ch' site_name = 'Canton Ticino' rides_url = "%s://%s:%s/rest/v2/rides" % (prefix, host, port) # Fetch all rides from database cursor_rides = rides_collection.find({ '$and': [ {'_deleted': {'$eq': False}}, {'extras.url': { '$regex': city_regex}} ] }) # For every ride for ride in cursor_rides: if oid_to_str(ride['_id']) not in ids_to_keep: # DELETE rides if they are not included in API delete_rides_url = '%s/%s' % (rides_url, ride['_id']) delete_res_rides = requests.delete(delete_rides_url, headers=headers, auth=auth) if delete_res_rides.status_code == 204: counter_api = counter_api + 1 if counter_api > 0: print(' -------------------------------------------------------') print(' %s obsolete rides deleted from database' % counter_api) # Update nightly_version field site = sites_collection.find_one({'name': site_name}) site['carpooling_info']['nightly_version'] = site['carpooling_info']['nightly_version'] + 1 site['carpooling_info']['nightly_updated'] = int(time.time()) sites_collection.update({'_id': site['_id']}, site, upsert = False) #=============================================================================== # addDriverAndCar () #=============================================================================== def addDriverAndCar(host, port, use_ssl): prefix = "https" if use_ssl else "http" auth = HTTPBasicAuth('admin', 'password') # TODO: Insert admin credentials here headers = {'content-type': 'application/json'} #=============================================================================== # GET or POST dummy driver for Brussels #=============================================================================== user_data = { "email": "brussels@rdex.com", "password": "password", "name": "Carpool.be driver", "phone": "n/a", "dob": "1970-12-31", "gender": "MALE", "fcm_token": "1234567890" } json_body = json.dumps(user_data) get_user_url = "%s://%s:%s/rest/v2/users?email=%s" % (prefix, host, port, user_data['email']) get_res = requests.get(get_user_url, headers=headers, auth=auth) get_res_json = json.loads(get_res.text) try: global brussels_driver_id if get_res_json['users']: # user exists brussels_driver_id = get_res_json['users'][0]['_id'] else: # post user post_user_url = "%s://%s:%s/rest/v2/users" % (prefix, host, port) post_res = requests.post(post_user_url, data=json_body, headers=headers, auth=auth) post_res_json = json.loads(post_res.text) brussels_driver_id = post_res_json['_id'] except KeyError: print("%s | %s - %s" % (get_res.status_code, get_res.url, get_res.text)) #=============================================================================== # GET or POST dummy car for Brussels #=============================================================================== car_data = { "owner_id": brussels_driver_id, "model": "Carpool.be car", "plate": "carpool.be", "colour": "black", "seats": 4, "car_usage_preferences": { "air_conditioning": False, "child_seat": False, "food_allowed": False, "luggage_type": "SMALL", "pets_allowed": False, "smoking_allowed": False, "music_allowed": False } } json_body = json.dumps(car_data) get_car_url = "%s://%s:%s/rest/v2/cars" % (prefix, host, port) get_res = requests.get(get_car_url, headers=headers, auth=auth) get_res_json = json.loads(get_res.text) try: global brussels_car_id for car in get_res_json['cars']: if car['owner_id'] == brussels_driver_id: # car exists brussels_car_id = car['_id'] if brussels_car_id == None: # POST car post_car_url = "%s://%s:%s/rest/v2/cars" % (prefix, host, port) post_res = requests.post(post_car_url, data=json_body, headers=headers, auth=auth) post_res_json = json.loads(post_res.text) brussels_car_id = post_res_json['_id'] except KeyError: print("%s | %s - %s" % (get_res.status_code, get_res.url, get_res.text)) #=============================================================================== # GET or POST dummy driver for Edinburgh #=============================================================================== user_data = { "email": "edinburgh@rdex.com", "password": "password", "name": "Liftshare driver", "phone": "n/a", "dob": "1970-12-31", "gender": "MALE", "fcm_token": "1234567890" } json_body = json.dumps(user_data) get_user_url = "%s://%s:%s/rest/v2/users?email=%s" % (prefix, host, port, user_data['email']) get_res = requests.get(get_user_url, headers=headers, auth=auth) get_res_json = json.loads(get_res.text) try: global edinburgh_driver_id if get_res_json['users']: # user exists edinburgh_driver_id = get_res_json['users'][0]['_id'] else: # post user post_user_url = "%s://%s:%s/rest/v2/users" % (prefix, host, port) post_res = requests.post(post_user_url, data=json_body, headers=headers, auth=auth) post_res_json = json.loads(post_res.text) edinburgh_driver_id = post_res_json['_id'] except KeyError: print("%s | %s - %s" % (get_res.status_code, get_res.url, get_res.text)) #=============================================================================== # GET or POST dummy car for Edinburgh #=============================================================================== car_data = { "owner_id": edinburgh_driver_id, "model": "Liftshare car", "plate": "liftshare", "colour": "black", "seats": 4, "car_usage_preferences": { "air_conditioning": False, "child_seat": False, "food_allowed": False, "luggage_type": "SMALL", "pets_allowed": False, "smoking_allowed": False, "music_allowed": False } } json_body = json.dumps(car_data) get_car_url = "%s://%s:%s/rest/v2/cars" % (prefix, host, port) get_res = requests.get(get_car_url, headers=headers, auth=auth) get_res_json = json.loads(get_res.text) try: global edinburgh_car_id for car in get_res_json['cars']: if car['owner_id'] == edinburgh_driver_id: # car exists edinburgh_car_id = car['_id'] if edinburgh_car_id == None: # POST car post_car_url = "%s://%s:%s/rest/v2/cars" % (prefix, host, port) post_res = requests.post(post_car_url, data=json_body, headers=headers, auth=auth) post_res_json = json.loads(post_res.text) edinburgh_car_id = post_res_json['_id'] except KeyError: print("%s | %s - %s" % (get_res.status_code, get_res.url, get_res.text)) #=============================================================================== # GET or POST dummy driver for Ljubljana #=============================================================================== user_data = { "email": "ljubljana@rdex.com", "password": "password", "name": "Prevoz driver", "phone": "n/a", "dob": "1970-12-31", "gender": "MALE", "fcm_token": "1234567890" } json_body = json.dumps(user_data) get_user_url = "%s://%s:%s/rest/v2/users?email=%s" % (prefix, host, port, user_data['email']) get_res = requests.get(get_user_url, headers=headers, auth=auth) get_res_json = json.loads(get_res.text) try: global ljubljana_driver_id if get_res_json['users']: # user exists ljubljana_driver_id = get_res_json['users'][0]['_id'] else: # post user post_user_url = "%s://%s:%s/rest/v2/users" % (prefix, host, port) post_res = requests.post(post_user_url, data=json_body, headers=headers, auth=auth) post_res_json = json.loads(post_res.text) ljubljana_driver_id = post_res_json['_id'] except KeyError: print("%s | %s - %s" % (get_res.status_code, get_res.url, get_res.text)) #=============================================================================== # GET or POST dummy car for Ljubljana #=============================================================================== car_data = { "owner_id": ljubljana_driver_id, "model": "Prevoz car", "plate": "prevoz", "colour": "black", "seats": 4, "car_usage_preferences": { "air_conditioning": False, "child_seat": False, "food_allowed": False, "luggage_type": "SMALL", "pets_allowed": False, "smoking_allowed": False, "music_allowed": False } } json_body = json.dumps(car_data) get_car_url = "%s://%s:%s/rest/v2/cars" % (prefix, host, port) get_res = requests.get(get_car_url, headers=headers, auth=auth) get_res_json = json.loads(get_res.text) try: global ljubljana_car_id for car in get_res_json['cars']: if car['owner_id'] == ljubljana_driver_id: # car exists ljubljana_car_id = car['_id'] if ljubljana_car_id == None: # POST car post_car_url = "%s://%s:%s/rest/v2/cars" % (prefix, host, port) post_res = requests.post(post_car_url, data=json_body, headers=headers, auth=auth) post_res_json = json.loads(post_res.text) ljubljana_car_id = post_res_json['_id'] except KeyError: print("%s | %s - %s" % (get_res.status_code, get_res.url, get_res.text)) #=============================================================================== # GET or POST dummy driver for Ticino 1 #=============================================================================== user_data = { "email": "ticino1@rdex.com", "password": "password", "name": "Bepooler driver", "phone": "n/a", "dob": "1970-12-31", "gender": "MALE", "fcm_token": "1234567890" } json_body = json.dumps(user_data) get_user_url = "%s://%s:%s/rest/v2/users?email=%s" % (prefix, host, port, user_data['email']) get_res = requests.get(get_user_url, headers=headers, auth=auth) get_res_json = json.loads(get_res.text) try: global ticino1_driver_id if get_res_json['users']: # user exists ticino1_driver_id = get_res_json['users'][0]['_id'] else: # post user post_user_url = "%s://%s:%s/rest/v2/users" % (prefix, host, port) post_res = requests.post(post_user_url, data=json_body, headers=headers, auth=auth) post_res_json = json.loads(post_res.text) ticino1_driver_id = post_res_json['_id'] except KeyError: print("%s | %s - %s" % (get_res.status_code, get_res.url, get_res.text)) #=============================================================================== # GET or POST dummy car for Ticino 1 #=============================================================================== car_data = { "owner_id": ticino1_driver_id, "model": "Bepooler car", "plate": "bepooler", "colour": "black", "seats": 4, "car_usage_preferences": { "air_conditioning": False, "child_seat": False, "food_allowed": False, "luggage_type": "SMALL", "pets_allowed": False, "smoking_allowed": False, "music_allowed": False } } json_body = json.dumps(car_data) get_car_url = "%s://%s:%s/rest/v2/cars" % (prefix, host, port) get_res = requests.get(get_car_url, headers=headers, auth=auth) get_res_json = json.loads(get_res.text) try: global ticino1_car_id for car in get_res_json['cars']: if car['owner_id'] == ticino1_driver_id: # car exists ticino1_car_id = car['_id'] if ticino1_car_id == None: # POST car post_car_url = "%s://%s:%s/rest/v2/cars" % (prefix, host, port) post_res = requests.post(post_car_url, data=json_body, headers=headers, auth=auth) post_res_json = json.loads(post_res.text) ticino1_car_id = post_res_json['_id'] except KeyError: print("%s | %s - %s" % (get_res.status_code, get_res.url, get_res.text)) #=============================================================================== # GET or POST dummy driver for Ticino 2 #=============================================================================== user_data = { "email": "ticino2@rdex.com", "password": "password", "name": "Mobalt driver", "phone": "n/a", "dob": "1970-12-31", "gender": "MALE", "fcm_token": "1234567890" } json_body = json.dumps(user_data) get_user_url = "%s://%s:%s/rest/v2/users?email=%s" % (prefix, host, port, user_data['email']) get_res = requests.get(get_user_url, headers=headers, auth=auth) get_res_json = json.loads(get_res.text) try: global ticino2_driver_id if get_res_json['users']: # user exists ticino2_driver_id = get_res_json['users'][0]['_id'] else: # post user post_user_url = "%s://%s:%s/rest/v2/users" % (prefix, host, port) post_res = requests.post(post_user_url, data=json_body, headers=headers, auth=auth) post_res_json = json.loads(post_res.text) ticino2_driver_id = post_res_json['_id'] except KeyError: print("%s | %s - %s" % (get_res.status_code, get_res.url, get_res.text)) #=============================================================================== # GET or POST dummy car for Ticino 2 #=============================================================================== car_data = { "owner_id": ticino2_driver_id, "model": "Mobalt car", "plate": "mobalt", "colour": "black", "seats": 4, "car_usage_preferences": { "air_conditioning": False, "child_seat": False, "food_allowed": False, "luggage_type": "SMALL", "pets_allowed": False, "smoking_allowed": False, "music_allowed": False } } json_body = json.dumps(car_data) get_car_url = "%s://%s:%s/rest/v2/cars" % (prefix, host, port) get_res = requests.get(get_car_url, headers=headers, auth=auth) get_res_json = json.loads(get_res.text) try: global ticino2_car_id for car in get_res_json['cars']: if car['owner_id'] == ticino2_driver_id: # car exists ticino2_car_id = car['_id'] if ticino2_car_id == None: # POST car post_car_url = "%s://%s:%s/rest/v2/cars" % (prefix, host, port) post_res = requests.post(post_car_url, data=json_body, headers=headers, auth=auth) post_res_json = json.loads(post_res.text) ticino2_car_id = post_res_json['_id'] except KeyError: print("%s | %s - %s" % (get_res.status_code, get_res.url, get_res.text)) #=============================================================================== # run_periodically () #=============================================================================== def run_periodically(host, port, interval, period, radius, dbname, city, use_ssl): # This implementation is subject to change threading.Timer(interval, run_periodically, args=(host, port, interval, period, radius, dbname, city, use_ssl)).start() periodic_pull(host, port, interval, period, radius, dbname, city, use_ssl) #=============================================================================== # create_arg_parser () #=============================================================================== def create_arg_parser(): parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('h', metavar='HOST', help="Server HOST (e.g. 'localhost')", type=str) parser.add_argument('p', metavar='PORT', help="Server PORT (e.g. '5000')", type=str) parser.add_argument('-i', '--interval', metavar='INTERVAL', help="Interval to run this script periodically (secs)", type=int, default=60) parser.add_argument('-r', '--period', metavar='PERIOD', help="Retrieve all rides within a certain period (max 72 hours)", type=int, default=12) parser.add_argument('-d', '--radius', metavar='RADIUS', help="Retrieve all rides within a certain radius", type=int, default=20) parser.add_argument('dbname', metavar='DBNAME', help="Database name", type=str) parser.add_argument('city', metavar='CITY', help='City name - currently: ' + ', '.join(CITIES), choices=CITIES, type=str) parser.add_argument('--ssl', help="Use SSL", action='store_true', default=False) return parser #=============================================================================== # main () #=============================================================================== def main(): parser = create_arg_parser() # If script run without arguments, print syntax if len(sys.argv) == 1: parser.print_help() sys.exit(1) args = parser.parse_args() dbname = args.dbname interval = args.interval period = args.period radius = args.radius city = args.city host = args.h port = args.p use_ssl = args.ssl print('city: %s' % (city)) print('interval: %s' % (interval)) print('period: %s' % (period)) print('radius: %s' % (radius)) print(' * RDEX Periodic Pull Service is active! * ') addDriverAndCar(host, port, use_ssl) run_periodically(host, port, interval, period, radius, dbname, city, use_ssl) if __name__ == '__main__': main()
53.594615
153
0.410768
6,321
71,656
4.429204
0.063123
0.007429
0.004393
0.006429
0.806694
0.778226
0.767832
0.761975
0.75951
0.751688
0
0.010104
0.440633
71,656
1,336
154
53.634731
0.688389
0.112314
0
0.705015
0
0.001967
0.141046
0.019352
0.000983
0
0
0.000749
0
1
0.006883
false
0.007866
0.011799
0.000983
0.021632
0.022616
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
dac6e633de92e1cf6b84d714442891c924f14fd3
2,668
py
Python
tests/test_suite.py
CDKGlobal/cd-performance-plugin
58176139ef744535b156b8ef5f187f38b683b2a5
[ "MIT" ]
null
null
null
tests/test_suite.py
CDKGlobal/cd-performance-plugin
58176139ef744535b156b8ef5f187f38b683b2a5
[ "MIT" ]
null
null
null
tests/test_suite.py
CDKGlobal/cd-performance-plugin
58176139ef744535b156b8ef5f187f38b683b2a5
[ "MIT" ]
null
null
null
import unittest import xmlrunner import sys import os from cd_perf_promotion.engines.configengine import ConfigEngine from cd_perf_promotion.engines.dataengine import DataEngine class TestSuite(unittest.TestCase): def test_no_appdynamics_connection(self): # Grab the configuration information configengine = ConfigEngine("./test_configs/config_test1.json", None, None, None, None, None, None, None, None) config_data = configengine.process_config() # Grab the performance data dataengine = DataEngine() # Check for a system exit call with self.assertRaises(SystemExit) as cm: dataengine.get_data(config_data) # Make sure that sys.exit(1) is called self.assertEqual(cm.exception.code, 1) def test_no_blazemeter_connection(self): # Grab the configuration information configengine = ConfigEngine("./test_configs/config_test2.json", None, None, None, None, None, None, None, None) config_data = configengine.process_config() # Grab the performance data dataengine = DataEngine() # Check for a system exit call with self.assertRaises(SystemExit) as cm: dataengine.get_data(config_data) # Make sure that sys.exit(1) is called self.assertEqual(cm.exception.code, 1) def test_no_webpagetest_connection(self): # Grab the configuration information configengine = ConfigEngine("./test_configs/config_test3.json", None, None, None, None, None, None, None, None) config_data = configengine.process_config() # Grab the performance data dataengine = DataEngine() # Check for a system exit call with self.assertRaises(SystemExit) as cm: dataengine.get_data(config_data) # Make sure that sys.exit(1) is called self.assertEqual(cm.exception.code, 1) def test_bad_webpagetest_location(self): # Grab the configuration information configengine = ConfigEngine("./test_configs/config_test3.json", None, None, None, None, None, None, None, None) config_data = configengine.process_config() # Grab the performance data dataengine = DataEngine() # Check for a system exit call with self.assertRaises(SystemExit) as cm: dataengine.get_data(config_data) # Make sure that sys.exit(1) is called self.assertEqual(cm.exception.code, 1) if __name__ == '__main__': sys.stdout = open(os.devnull, 'w') unittest.main(testRunner=xmlrunner.XMLTestRunner(output='test-reports'), failfast=False, buffer=False, catchbreak=False)
36.054054
124
0.686282
319
2,668
5.589342
0.23511
0.125631
0.161526
0.179473
0.810432
0.781268
0.781268
0.781268
0.781268
0.781268
0
0.005834
0.22901
2,668
73
125
36.547945
0.860963
0.19003
0
0.578947
0
0
0.069496
0.059701
0
0
0
0
0.210526
1
0.105263
false
0
0.157895
0
0.289474
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
dad262e3f6fbc20bcbef93139a5eee3929aa4511
4,572
py
Python
weather_tracker/models.py
gotkd21/wtracker
fdc77f003dd38f0a8873228f8afd1fabcbccf9ca
[ "CC0-1.0" ]
null
null
null
weather_tracker/models.py
gotkd21/wtracker
fdc77f003dd38f0a8873228f8afd1fabcbccf9ca
[ "CC0-1.0" ]
null
null
null
weather_tracker/models.py
gotkd21/wtracker
fdc77f003dd38f0a8873228f8afd1fabcbccf9ca
[ "CC0-1.0" ]
null
null
null
from django.db import models # Create your models here. class WeatherInfo(models.Model): ICON_TEXT = ( ('CD', 'clear-day'), ('CN', 'clear-night'), ('RN', 'rain'), ('SN', 'snow'), ('SL', 'sleet'), ('WI', 'wind'), ('CL', 'cloudy'), ('PD', 'partly-cloudy-day'), ('PN', 'partly-cloudy-night'), ('HL', 'hail'), ('TH', 'thunderstorm'), ('TR', 'tornado') ) id = models.AutoField(primary_key=True) timestamp = models.DateTimeField() curlattitude = models.DecimalField(max_digits=6, decimal_places=3) curlongtitude = models.DecimalField(max_digits=6, decimal_places=3) pressure = models.DecimalField(max_digits=6, decimal_places=2,null=True) summary = models.CharField(max_length=30,null=True) icon = models.CharField(max_length=2,null=True, choices=ICON_TEXT) precipIntensity = models.DecimalField(max_digits=6,decimal_places=4,null=True) precipProbability = models.DecimalField(max_digits=5, decimal_places=3,null=True) precipAccumulation = models.DecimalField(max_digits=5, decimal_places=3,null=True) precipType = models.CharField(max_length=10,null=True) temperature = models.DecimalField(max_digits=5,decimal_places=2,null=True) apparentTemperature = models.DecimalField(max_digits=5,decimal_places=2,null=True) dewPoint = models.DecimalField(max_digits=5,decimal_places=2,null=True) humidity = models.DecimalField(max_digits=5,decimal_places=2,null=True) uvIndex = models.DecimalField(max_digits=4,decimal_places=2,null=True) visibility = models.DecimalField(max_digits=5,decimal_places=2,null=True) ozone = models.DecimalField(max_digits=6,decimal_places=2,null=True) class Meta: abstract = True indexes = [ models.Index(fields=['timestamp']) ] class wtracker(WeatherInfo): class Meta: ordering = ['timestamp'] class wforecast(WeatherInfo): class Meta: ordering = ['timestamp'] #class Tracker(models.Model): # id = models.AutoField(primary_key=True) # timestamp = models.DateTimeField() # curlattitude = models.DecimalField(max_digits=6, decimal_places=3) # curlongtitude = models.DecimalField(max_digits=6, decimal_places=3) # actual_pressure = models.DecimalField(max_digits=6, decimal_places=2) # actual_summary = models.CharField(max_length=30) # actual_icon = models.CharField(max_length=25) # actual_precipIntensity = models.DecimalField(max_digits=6,decimal_places=4) # actual_precipProbability = models.DecimalField(max_digits=5, decimal_places=3) # actual_precipAccumulation = models.DecimalField(max_digits=5, decimal_places=3) # actual_precipType = models.CharField(max_length=10) # actual_temperature = models.DecimalField(max_digits=5,decimal_places=2) # actual_apparentTemperature = models.DecimalField(max_digits=5,decimal_places=2) # actual_dewPoint = models.DecimalField(max_digits=5,decimal_places=2) # actual_humidity = models.DecimalField(max_digits=5,decimal_places=2) # actual_uvIndex = models.DecimalField(max_digits=4,decimal_places=2) # actual_visibility = models.DecimalField(max_digits=5,decimal_places=2) # actual_ozone = models.DecimalField(max_digits=6,decimal_places=2) #class Forecast(models.Model): # id = models.AutoField(primary_key=True) # timestamp = models.DateTimeField() # curlattitude = models.DecimalField(max_digits=6, decimal_places=3) # curlongtitude = models.DecimalField(max_digits=6, decimal_places=3) # fcast_pressure = models.DecimalField(max_digits=6, decimal_places=2) # fcast_summary = models.CharField(max_length=30) # fcast_icon = models.CharField(max_length=25) # fcast_precipIntensity = models.DecimalField(max_digits=6,decimal_places=4) # fcast_precipProbability = models.DecimalField(max_digits=5, decimal_places=3) # fcast_precipAccumulation = models.DecimalField(max_digits=5, decimal_places=3) # fcast_precipType = models.CharField(max_length=10) # fcast_temperature = models.DecimalField(max_digits=5,decimal_places=2) # fcast_apparentTemperature = models.DecimalField(max_digits=5,decimal_places=2) # fcast_dewPoint = models.DecimalField(max_digits=5,decimal_places=2) # fcast_humidity = models.DecimalField(max_digits=5,decimal_places=2) # fcast_uvIndex = models.DecimalField(max_digits=4,decimal_places=2) # fcast_visibility = models.DecimalField(max_digits=5,decimal_places=2) # fcast_ozone = models.DecimalField(max_digits=6,decimal_places=2)
47.625
86
0.73622
567
4,572
5.724868
0.155203
0.216266
0.252311
0.324399
0.87061
0.859519
0.751386
0.751386
0.751386
0.300678
0
0.024179
0.140639
4,572
95
87
48.126316
0.801985
0.544838
0
0.111111
0
0
0.074963
0
0
0
0
0
0
1
0
false
0
0.022222
0
0.577778
0
0
0
0
null
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
975ec5755c4d3da54cf24c4f4d7a2ab4c767b905
14,659
py
Python
ESP32_LilyGo/WeatherInfoTV/smallfont.py
edwios/dotIoT
f678673f990cdd7b69607972d412f9424ff73e82
[ "MIT" ]
null
null
null
ESP32_LilyGo/WeatherInfoTV/smallfont.py
edwios/dotIoT
f678673f990cdd7b69607972d412f9424ff73e82
[ "MIT" ]
5
2019-03-29T12:28:40.000Z
2020-07-06T13:57:56.000Z
ESP32_LilyGo/WeatherInfoTV/smallfont.py
edwios/dotIoT
f678673f990cdd7b69607972d412f9424ff73e82
[ "MIT" ]
null
null
null
# Code generated by font_to_py.py. # Font: Jura-Medium.ttf Char set: !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyzºåö # Cmd: font_to_py.py -x Jura-Medium.ttf 16 smallfont.py -k charset.txt version = '0.33' def height(): return 16 def baseline(): return 12 def max_width(): return 16 def hmap(): return True def reverse(): return False def monospaced(): return False def min_ch(): return 32 def max_ch(): return 246 _font =\ b'\x0a\x00\x00\x00\x3f\x00\x40\x80\x40\x80\x00\x80\x03\x00\x0c\x00'\ b'\x08\x00\x08\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00'\ b'\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\ b'\x00\x00\x00\x00\x04\x00\x40\x40\x40\x40\x40\x40\x40\x40\x40\x00'\ b'\x00\x40\x00\x00\x00\x00\x05\x00\x50\x50\x50\x50\x00\x00\x00\x00'\ b'\x00\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x02\x80\x06\x80'\ b'\x05\x00\x1f\xc0\x19\x00\x19\x00\x11\x00\x3f\x80\x22\x00\x66\x00'\ b'\x44\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x04\x00'\ b'\x04\x00\x7f\xc0\x44\x40\x44\x00\x3f\xc0\x04\x40\x44\x40\x7f\xc0'\ b'\x04\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0a\x00\x00\x00'\ b'\x00\x00\x70\x80\x51\x80\x73\x00\x04\x00\x08\x00\x33\x80\x62\x80'\ b'\x43\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00'\ b'\x00\x00\x38\x00\x64\x00\x44\x00\x40\x00\x20\x00\x30\x00\x59\x00'\ b'\x4d\x00\x47\x00\x63\x00\x3d\x00\x00\x00\x00\x00\x00\x00\x00\x00'\ b'\x04\x00\x40\x40\x40\x40\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\ b'\x00\x00\x05\x00\x60\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40'\ b'\x20\x00\x00\x00\x05\x00\x60\x20\x20\x20\x20\x20\x20\x20\x20\x20'\ b'\x20\x20\x60\x00\x00\x00\x07\x00\x00\x10\x38\x10\x38\x10\x00\x00'\ b'\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00'\ b'\x00\x00\x08\x00\x08\x00\x08\x00\x7f\x00\x08\x00\x08\x00\x08\x00'\ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00'\ b'\x00\x00\x00\x00\x00\x00\x00\x40\x40\x40\x00\x00\x08\x00\x00\x00'\ b'\x00\x00\x00\x00\x00\x7c\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00'\ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00'\ b'\x06\x00\x04\x04\x0c\x08\x08\x10\x10\x10\x20\x20\x60\x40\x00\x00'\ b'\x00\x00\x0a\x00\x00\x00\x3f\x00\x61\x80\x40\x80\x40\x80\x40\x80'\ b'\x40\x80\x40\x80\x40\x80\x40\x80\x61\x80\x3f\x00\x00\x00\x00\x00'\ b'\x00\x00\x00\x00\x0a\x00\x04\x00\x0c\x00\x04\x00\x04\x00\x04\x00'\ b'\x04\x00\x04\x00\x04\x00\x04\x00\x04\x00\x04\x00\x04\x00\x00\x00'\ b'\x00\x00\x00\x00\x00\x00\x0a\x00\x00\x00\x7f\x00\x41\x80\x00\x80'\ b'\x00\x80\x01\x80\x06\x00\x18\x00\x30\x00\x60\x00\x40\x00\x7f\x80'\ b'\x00\x00\x00\x00\x00\x00\x00\x00\x0a\x00\x00\x00\x3f\x00\x41\x80'\ b'\x00\x80\x00\x80\x01\x00\x0e\x00\x01\x00\x00\x80\x00\x80\x41\x80'\ b'\x3f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0a\x00\x00\x00\x03\x00'\ b'\x03\x00\x05\x00\x09\x00\x11\x00\x31\x00\x61\x00\x7f\x80\x01\x00'\ b'\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0a\x00\x00\x00'\ b'\x3f\x00\x40\x00\x40\x00\x7e\x00\x01\x00\x00\x80\x00\x80\x00\x80'\ b'\x00\x80\x61\x00\x3e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0a\x00'\ b'\x00\x00\x0c\x00\x18\x00\x30\x00\x20\x00\x7e\x00\x61\x00\x40\x80'\ b'\x40\x80\x40\x80\x21\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00'\ b'\x0a\x00\x00\x00\x7f\x80\x40\x80\x01\x00\x01\x00\x02\x00\x02\x00'\ b'\x02\x00\x04\x00\x04\x00\x08\x00\x08\x00\x00\x00\x00\x00\x00\x00'\ b'\x00\x00\x0a\x00\x00\x00\x3f\x00\x40\x80\x40\x80\x40\x80\x40\x80'\ b'\x3f\x00\x40\x80\x40\x80\x40\x80\x60\x80\x3f\x00\x00\x00\x00\x00'\ b'\x00\x00\x00\x00\x0a\x00\x00\x00\x1f\x00\x61\x80\x40\x80\x40\x80'\ b'\x61\x80\x1f\x80\x01\x00\x01\x00\x02\x00\x04\x00\x38\x00\x00\x00'\ b'\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x00\x00\x40\x00\x00\x00'\ b'\x00\x00\x40\x00\x00\x00\x00\x00\x05\x00\x00\x00\x00\x00\x40\x00'\ b'\x00\x00\x00\x00\x40\x40\x40\x00\x00\x00\x09\x00\x00\x00\x00\x00'\ b'\x03\x00\x06\x00\x18\x00\x60\x00\x60\x00\x38\x00\x0c\x00\x03\x00'\ b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00'\ b'\x00\x00\x00\x7c\x00\x7c\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00'\ b'\x00\x00\x00\x00\x60\x00\x18\x00\x0e\x00\x03\x00\x01\x80\x06\x00'\ b'\x18\x00\x30\x00\x40\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\ b'\x0a\x00\x00\x00\x3f\x00\x40\x80\x40\x80\x00\x80\x03\x00\x0c\x00'\ b'\x08\x00\x08\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00'\ b'\x00\x00\x0b\x00\x00\x00\x1e\x00\x21\x00\x40\x80\x4f\x80\x51\x80'\ b'\x51\x80\x51\x80\x4f\x00\x40\x00\x20\x80\x1f\x80\x00\x00\x00\x00'\ b'\x00\x00\x00\x00\x0a\x00\x00\x00\x0c\x00\x0c\x00\x1e\x00\x12\x00'\ b'\x13\x00\x21\x00\x21\x00\x7f\x80\x40\x80\x40\x80\x80\x40\x80\x40'\ b'\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x7f\x00\x41\x80\x40\x80'\ b'\x40\x80\x41\x80\x7f\x80\x40\x40\x40\x40\x40\x40\x40\xc0\x7f\x80'\ b'\x00\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x3f\xc0\x60\x40'\ b'\x40\x00\x40\x00\x40\x00\x40\x00\x40\x00\x40\x00\x40\x00\x60\x40'\ b'\x3f\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x7f\x80'\ b'\x40\xc0\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40'\ b'\x40\xc0\x7f\x80\x00\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00'\ b'\x7f\x80\x40\x00\x40\x00\x40\x00\x7e\x00\x40\x00\x40\x00\x40\x00'\ b'\x40\x00\x40\x00\x7f\x80\x00\x00\x00\x00\x00\x00\x00\x00\x0a\x00'\ b'\x00\x00\x7f\x80\x40\x00\x40\x00\x40\x00\x7e\x00\x40\x00\x40\x00'\ b'\x40\x00\x40\x00\x40\x00\x40\x00\x00\x00\x00\x00\x00\x00\x00\x00'\ b'\x0b\x00\x00\x00\x3f\xc0\x60\x40\x40\x00\x40\x00\x40\x00\x40\x00'\ b'\x43\xc0\x40\x40\x40\x40\x60\x40\x3f\xc0\x00\x00\x00\x00\x00\x00'\ b'\x00\x00\x0b\x00\x00\x00\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40'\ b'\x7f\xc0\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40\x00\x00\x00\x00'\ b'\x00\x00\x00\x00\x04\x00\x00\x40\x40\x40\x40\x40\x40\x40\x40\x40'\ b'\x40\x40\x00\x00\x00\x00\x0a\x00\x00\x00\x00\x80\x00\x80\x00\x80'\ b'\x00\x80\x00\x80\x00\x80\x00\x80\x00\x80\x40\x80\x41\x80\x3f\x00'\ b'\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x43\x00\x42\x00'\ b'\x44\x00\x48\x00\x50\x00\x70\x00\x48\x00\x44\x00\x42\x00\x41\x00'\ b'\x40\x80\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x40\x00'\ b'\x40\x00\x40\x00\x40\x00\x40\x00\x40\x00\x40\x00\x40\x00\x40\x00'\ b'\x40\x00\x7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\ b'\x40\x40\x60\xc0\x51\xc0\x51\x40\x4a\x40\x44\x40\x40\x40\x40\x40'\ b'\x40\x40\x40\x40\x40\x40\x00\x00\x00\x00\x00\x00\x00\x00\x0b\x00'\ b'\x00\x00\x40\x40\x60\x40\x50\x40\x58\x40\x48\x40\x44\x40\x42\x40'\ b'\x43\x40\x41\xc0\x40\xc0\x40\x40\x00\x00\x00\x00\x00\x00\x00\x00'\ b'\x0b\x00\x00\x00\x1f\x00\x30\x80\x40\x40\x40\x40\x40\x40\x40\x40'\ b'\x40\x40\x40\x40\x40\x40\x30\x80\x1f\x00\x00\x00\x00\x00\x00\x00'\ b'\x00\x00\x0a\x00\x00\x00\x7f\x00\x40\x80\x40\x80\x40\x80\x7f\x00'\ b'\x40\x00\x40\x00\x40\x00\x40\x00\x40\x00\x40\x00\x00\x00\x00\x00'\ b'\x00\x00\x00\x00\x0b\x00\x00\x00\x1f\x00\x30\x80\x40\x40\x40\x40'\ b'\x40\x40\x40\x40\x40\x40\x40\x40\x43\x40\x30\x80\x1f\x40\x00\x00'\ b'\x00\x00\x00\x00\x00\x00\x0a\x00\x00\x00\x7f\x00\x40\x80\x40\x80'\ b'\x40\x80\x7f\x00\x41\x00\x41\x00\x40\x80\x40\x80\x40\x40\x40\x40'\ b'\x00\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x3f\x80\x60\x40'\ b'\x40\x40\x40\x00\x30\x00\x0f\x00\x00\xc0\x00\x40\x40\x40\x60\xc0'\ b'\x3f\x80\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x7f\x00'\ b'\x08\x00\x08\x00\x08\x00\x08\x00\x08\x00\x08\x00\x08\x00\x08\x00'\ b'\x08\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00'\ b'\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40'\ b'\x40\x40\x30\x80\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0a\x00'\ b'\x00\x00\x80\x40\x40\xc0\x40\x80\x60\x80\x21\x00\x21\x00\x13\x00'\ b'\x12\x00\x1e\x00\x0c\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00'\ b'\x10\x00\x00\x00\xc1\x83\x63\xc3\x63\x46\x23\x46\x26\x66\x36\x2c'\ b'\x1e\x2c\x1c\x3c\x1c\x18\x1c\x18\x08\x18\x00\x00\x00\x00\x00\x00'\ b'\x00\x00\x0a\x00\x00\x00\x40\xc0\x60\x80\x21\x00\x12\x00\x0e\x00'\ b'\x0c\x00\x0e\x00\x12\x00\x21\x00\x60\x80\x40\xc0\x00\x00\x00\x00'\ b'\x00\x00\x00\x00\x0a\x00\x00\x00\x80\x80\x41\x00\x23\x00\x36\x00'\ b'\x14\x00\x08\x00\x08\x00\x08\x00\x08\x00\x08\x00\x08\x00\x00\x00'\ b'\x00\x00\x00\x00\x00\x00\x0a\x00\x00\x00\x7f\x80\x00\x80\x01\x00'\ b'\x02\x00\x06\x00\x0c\x00\x08\x00\x10\x00\x20\x00\x60\x00\x7f\x80'\ b'\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x60\x40\x40\x40\x40\x40'\ b'\x40\x40\x40\x40\x40\x40\x60\x00\x00\x00\x06\x00\x40\x40\x60\x20'\ b'\x20\x10\x10\x18\x08\x08\x0c\x04\x00\x00\x00\x00\x05\x00\x60\x20'\ b'\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x60\x00\x00\x00\x08\x00'\ b'\x00\x10\x38\x28\x64\x44\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\ b'\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\x80\x00\x00\x00\x00'\ b'\x00\x00\x04\x00\x80\x40\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00'\ b'\x00\x00\x00\x00\x0a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\x80'\ b'\x40\x80\x00\x80\x3f\x80\x60\x80\x40\x80\x41\x80\x3e\x80\x00\x00'\ b'\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x40\x00\x40\x00\x40\x00'\ b'\x5f\x80\x60\xc0\x40\x40\x40\x40\x40\x40\x40\x40\x60\xc0\x5f\x80'\ b'\x00\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x00\x00\x00\x00'\ b'\x00\x00\x3f\xc0\x60\x40\x40\x00\x40\x00\x40\x00\x40\x00\x60\x40'\ b'\x3f\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x00\x40'\ b'\x00\x40\x00\x40\x3f\xc0\x60\x40\x40\x40\x40\x40\x40\x40\x40\x40'\ b'\x60\xc0\x3f\x40\x00\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00'\ b'\x00\x00\x00\x00\x00\x00\x3f\x80\x60\xc0\x40\x40\x7f\xc0\x40\x00'\ b'\x40\x00\x60\x40\x3f\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00'\ b'\x00\x38\x60\x40\xf8\x40\x40\x40\x40\x40\x40\x40\x00\x00\x00\x00'\ b'\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\x40\x60\xc0\x40\x40'\ b'\x40\x40\x40\x40\x40\x40\x60\x40\x3f\xc0\x00\x40\x00\x40\x40\x40'\ b'\x7f\xc0\x0b\x00\x00\x00\x40\x00\x40\x00\x40\x00\x5f\x80\x60\xc0'\ b'\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40\x00\x00\x00\x00'\ b'\x00\x00\x00\x00\x04\x00\x00\x40\x00\x00\x40\x40\x40\x40\x40\x40'\ b'\x40\x40\x00\x00\x00\x00\x06\x00\x00\x10\x00\x00\x10\x10\x10\x10'\ b'\x10\x10\x10\x10\x10\x10\xe0\x00\x08\x00\x00\x40\x40\x40\x46\x4c'\ b'\x50\x60\x58\x4c\x46\x41\x00\x00\x00\x00\x05\x00\x00\x20\x20\x20'\ b'\x20\x20\x20\x20\x20\x20\x20\x30\x00\x00\x00\x00\x0f\x00\x00\x00'\ b'\x00\x00\x00\x00\x00\x00\x5e\x78\x61\x84\x41\x04\x41\x04\x41\x04'\ b'\x41\x04\x41\x04\x41\x04\x00\x00\x00\x00\x00\x00\x00\x00\x0b\x00'\ b'\x00\x00\x00\x00\x00\x00\x00\x00\x5f\x80\x60\xc0\x40\x40\x40\x40'\ b'\x40\x40\x40\x40\x40\x40\x40\x40\x00\x00\x00\x00\x00\x00\x00\x00'\ b'\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\x80\x60\xc0\x40\x40'\ b'\x40\x40\x40\x40\x40\x40\x60\xc0\x3f\x80\x00\x00\x00\x00\x00\x00'\ b'\x00\x00\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x5f\x80\x60\xc0'\ b'\x40\x40\x40\x40\x40\x40\x40\x40\x40\xc0\x7f\x80\x40\x00\x40\x00'\ b'\x40\x00\x00\x00\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\x40'\ b'\x60\xc0\x40\x40\x40\x40\x40\x40\x40\x40\x60\x40\x3f\xc0\x00\x40'\ b'\x00\x40\x00\x40\x00\x00\x07\x00\x00\x00\x00\x00\x5c\x60\x40\x40'\ b'\x40\x40\x40\x40\x00\x00\x00\x00\x0b\x00\x00\x00\x00\x00\x00\x00'\ b'\x00\x00\x3f\xc0\x40\x40\x40\x00\x3f\x80\x00\x40\x00\x40\x40\x40'\ b'\x7f\x80\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\x40\x40'\ b'\xf8\x40\x40\x40\x40\x40\x60\x38\x00\x00\x00\x00\x0b\x00\x00\x00'\ b'\x00\x00\x00\x00\x00\x00\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40'\ b'\x40\x40\x60\xc0\x3f\x40\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00'\ b'\x00\x00\x00\x00\x00\x00\x00\x00\x40\x80\x21\x80\x21\x00\x33\x00'\ b'\x12\x00\x12\x00\x0c\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00'\ b'\x0e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x41\x04\x43\x04\x22\x88'\ b'\x22\x88\x24\xd8\x14\x50\x14\x50\x18\x20\x00\x00\x00\x00\x00\x00'\ b'\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00\x41\x80\x61\x00'\ b'\x32\x00\x1c\x00\x0c\x00\x16\x00\x23\x00\x41\x80\x00\x00\x00\x00'\ b'\x00\x00\x00\x00\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x40\x40'\ b'\x40\x40\x40\x40\x40\x40\x40\x40\x40\x40\x60\xc0\x3f\x40\x00\x40'\ b'\x00\x40\x40\x40\x7f\xc0\x0a\x00\x00\x00\x00\x00\x00\x00\x00\x00'\ b'\x7f\x80\x41\x00\x42\x00\x04\x00\x08\x00\x30\x80\x60\x80\x7f\x80'\ b'\x00\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\x00\x00\x38\x28'\ b'\x28\x38\x7c\x00\x00\x00\x00\x00\x00\x00\x0a\x00\x0c\x00\x14\x00'\ b'\x0c\x00\x00\x00\x3f\x80\x40\x80\x00\x80\x3f\x80\x60\x80\x40\x80'\ b'\x41\x80\x3e\x80\x00\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00'\ b'\x00\x00\x12\x00\x00\x00\x3f\x80\x60\xc0\x40\x40\x40\x40\x40\x40'\ b'\x40\x40\x60\xc0\x3f\x80\x00\x00\x00\x00\x00\x00\x00\x00' _sparse =\ b'\x20\x00\x22\x00\x21\x00\x34\x00\x22\x00\x46\x00\x23\x00\x58\x00'\ b'\x24\x00\x7a\x00\x25\x00\x9c\x00\x26\x00\xbe\x00\x27\x00\xe0\x00'\ b'\x28\x00\xf2\x00\x29\x00\x04\x01\x2a\x00\x16\x01\x2b\x00\x28\x01'\ b'\x2c\x00\x4a\x01\x2d\x00\x5c\x01\x2e\x00\x6e\x01\x2f\x00\x80\x01'\ b'\x30\x00\x92\x01\x31\x00\xb4\x01\x32\x00\xd6\x01\x33\x00\xf8\x01'\ b'\x34\x00\x1a\x02\x35\x00\x3c\x02\x36\x00\x5e\x02\x37\x00\x80\x02'\ b'\x38\x00\xa2\x02\x39\x00\xc4\x02\x3a\x00\xe6\x02\x3b\x00\xf8\x02'\ b'\x3c\x00\x0a\x03\x3d\x00\x2c\x03\x3e\x00\x3e\x03\x3f\x00\x60\x03'\ b'\x40\x00\x82\x03\x41\x00\xa4\x03\x42\x00\xc6\x03\x43\x00\xe8\x03'\ b'\x44\x00\x0a\x04\x45\x00\x2c\x04\x46\x00\x4e\x04\x47\x00\x70\x04'\ b'\x48\x00\x92\x04\x49\x00\xb4\x04\x4a\x00\xc6\x04\x4b\x00\xe8\x04'\ b'\x4c\x00\x0a\x05\x4d\x00\x2c\x05\x4e\x00\x4e\x05\x4f\x00\x70\x05'\ b'\x50\x00\x92\x05\x51\x00\xb4\x05\x52\x00\xd6\x05\x53\x00\xf8\x05'\ b'\x54\x00\x1a\x06\x55\x00\x3c\x06\x56\x00\x5e\x06\x57\x00\x80\x06'\ b'\x58\x00\xa2\x06\x59\x00\xc4\x06\x5a\x00\xe6\x06\x5b\x00\x08\x07'\ b'\x5c\x00\x1a\x07\x5d\x00\x2c\x07\x5e\x00\x3e\x07\x5f\x00\x50\x07'\ b'\x60\x00\x72\x07\x61\x00\x84\x07\x62\x00\xa6\x07\x63\x00\xc8\x07'\ b'\x64\x00\xea\x07\x65\x00\x0c\x08\x66\x00\x2e\x08\x67\x00\x40\x08'\ b'\x68\x00\x62\x08\x69\x00\x84\x08\x6a\x00\x96\x08\x6b\x00\xa8\x08'\ b'\x6c\x00\xba\x08\x6d\x00\xcc\x08\x6e\x00\xee\x08\x6f\x00\x10\x09'\ b'\x70\x00\x32\x09\x71\x00\x54\x09\x72\x00\x76\x09\x73\x00\x88\x09'\ b'\x74\x00\xaa\x09\x75\x00\xbc\x09\x76\x00\xde\x09\x77\x00\x00\x0a'\ b'\x78\x00\x22\x0a\x79\x00\x44\x0a\x7a\x00\x66\x0a\xba\x00\x88\x0a'\ b'\xe5\x00\x9a\x0a\xf6\x00\xbc\x0a' _mvfont = memoryview(_font) _mvsp = memoryview(_sparse) ifb = lambda l : l[0] | (l[1] << 8) def bs(lst, val): while True: m = (len(lst) & ~ 7) >> 1 v = ifb(lst[m:]) if v == val: return ifb(lst[m + 2:]) if not m: return 0 lst = lst[m:] if v < val else lst[:m] def get_ch(ch): doff = bs(_mvsp, ord(ch)) width = ifb(_mvfont[doff : ]) next_offs = doff + 2 + ((width - 1)//8 + 1) * 16 return _mvfont[doff + 2:next_offs], 16, width
57.940711
128
0.700662
3,508
14,659
2.922178
0.064424
0.53146
0.599649
0.582968
0.685299
0.653302
0.622671
0.58843
0.561799
0.532729
0
0.422559
0.032335
14,659
252
129
58.170635
0.300106
0.015485
0
0.034188
1
0.84188
0.875858
0.875581
0
1
0
0
0
1
0.042735
false
0
0
0.034188
0.089744
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
1
1
1
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
10
97ac512bc06fbc4ace3b2a766e0629e98383911a
1,314
py
Python
AssetInvestment/forms/login.py
Janspiry/AssetInvestment
b305236748187873cfd849f943f9ae5529a0c85d
[ "Apache-2.0" ]
2
2021-02-28T08:34:44.000Z
2021-04-09T17:27:11.000Z
AssetInvestment/forms/login.py
Janspiry/AssetInvestment
b305236748187873cfd849f943f9ae5529a0c85d
[ "Apache-2.0" ]
null
null
null
AssetInvestment/forms/login.py
Janspiry/AssetInvestment
b305236748187873cfd849f943f9ae5529a0c85d
[ "Apache-2.0" ]
2
2021-02-28T08:34:46.000Z
2021-09-10T13:22:30.000Z
from django import forms # from ..models import class LoginForm(forms.Form): username = forms.CharField(max_length=20, min_length=6, required=True, error_messages={"required": "用户账号不能为空", 'invalid': "格式错误"}, widget=forms.TextInput(attrs={"class": "form-control"})) passwd = forms.CharField(max_length=20, min_length=6, widget=forms.PasswordInput(attrs={"class": "form-control"})) class RegisterForm(forms.Form): userEmail = forms.CharField(max_length=100, min_length=6, required=True, error_messages={"required":"用户邮箱不能为空", "invalid":"格式错误"}, widget=forms.TextInput(attrs={"class": "form-control"})) username = forms.CharField(max_length=20, min_length=6, required=True, error_messages={"required": "用户账号不能为空", 'invalid': "格式错误"}, widget=forms.TextInput(attrs={"class": "form-control"})) passwd = forms.CharField(max_length=20, min_length=6, widget=forms.PasswordInput(attrs={"class": "form-control"}))
54.75
89
0.518265
119
1,314
5.613445
0.268908
0.10479
0.127246
0.172156
0.806886
0.806886
0.806886
0.806886
0.742515
0.664671
0
0.018868
0.354642
1,314
24
90
54.75
0.768868
0.015221
0
0.684211
0
0
0.128384
0
0
0
0
0
0
1
0
false
0.210526
0.052632
0
0.421053
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
8
8aee62d024952e9b56f93c6ee85db8e4ea1132a4
12,914
py
Python
tests/python/TestConnections.py
vuiseng9/vdms
9bc14219c8942a3d686936b3f1105cc02a788a12
[ "MIT" ]
54
2018-03-07T20:20:42.000Z
2022-03-23T08:34:38.000Z
tests/python/TestConnections.py
vuiseng9/vdms
9bc14219c8942a3d686936b3f1105cc02a788a12
[ "MIT" ]
88
2018-02-22T23:21:58.000Z
2022-03-22T21:04:17.000Z
tests/python/TestConnections.py
vuiseng9/vdms
9bc14219c8942a3d686936b3f1105cc02a788a12
[ "MIT" ]
25
2018-05-09T21:44:15.000Z
2022-02-21T19:23:30.000Z
# # The MIT License # # @copyright Copyright (c) 2017 Intel Corporation # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, # including without limitation the rights to use, copy, modify, # merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, # ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # from threading import Thread import TestCommand class TestConnections(TestCommand.TestCommand): def test_FindEntity_link_constraints_float(self): db = self.create_connection() props = {} props["name"] = "Jon" props["lastname"] = "Bonachon" props["age"] = 29 response, arr = self.addEntity("felcflo_People", properties=props, check_status=True) props = {} props["type"] = "foo" props["name"] = "alligator" response, arr = self.addEntity("felcflo_foo", properties=props, check_status=True) props = {} props["type"] = "foo" props["name"] = "cat" response, arr = self.addEntity("felcflo_foo", properties=props, check_status=True) all_queries = [] fE = { "FindEntity": { "class": "felcflo_People", "constraints": { "name": ["==", "Jon"], "lastname": ["==", "Bonachon"], }, "_ref": 2 } } all_queries.append(fE) fE = { "FindEntity": { "class": "felcflo_foo", "constraints": { "name": ["==", "alligator"] }, "_ref": 3 } } all_queries.append(fE) fE = { "FindEntity": { "class": "felcflo_foo", "constraints": { "name": ["==", "cat"] }, "_ref": 4 } } all_queries.append(fE) aC = { "AddConnection": { "class": "foo_connection", "ref1": 2, "ref2": 3, "properties":{ "name": "best_type_of_connection", "probablity": 0.3 } } } all_queries.append(aC) aC = { "AddConnection": { "class": "foo_connection", "ref1": 2, "ref2": 4, "properties":{ "name": "best_type_of_connection", "probablity": 0.6 } } } all_queries.append(aC) response, res_arr = db.query(all_queries) self.assertEqual(response[0]["FindEntity"]["status"], 0) self.assertEqual(response[1]["FindEntity"]["status"], 0) self.assertEqual(response[2]["FindEntity"]["status"], 0) self.assertEqual(response[3]["AddConnection"]["status"], 0) self.assertEqual(response[4]["AddConnection"]["status"], 0) all_queries = [] fE = { "FindEntity": { "class": "felcflo_People", "_ref": 1, "results": { "list": ["name", "lastname"] } } } all_queries.append(fE) fE = { "FindEntity": { "class": "felcflo_foo", "link": { "ref": 1, "constraints": { "probablity": [">=", 0.5], "name": ["==", "best_type_of_connection"] } }, "results": { "list": ["name"] } } } all_queries.append(fE) response, res_arr = db.query(all_queries) self.assertEqual(len(response[1]["FindEntity"]["entities"]), 1) self.assertEqual(response[1]["FindEntity"]["entities"][0]["name"], "cat") all_queries = [] fE = { "FindEntity": { "class": "felcflo_People", "_ref": 1, "results": { "list": ["name", "lastname"] } } } all_queries.append(fE) fE = { "FindEntity": { "class": "felcflo_foo", "link": { "ref": 1, "constraints": { "probablity": [">=", 0.1], "name": ["==", "best_type_of_connection"] } }, "results": { "list": ["name"] } } } all_queries.append(fE) response, res_arr = db.query(all_queries) self.assertEqual(len(response[1]["FindEntity"]["entities"]), 2) all_queries = [] fE = { "FindEntity": { "class": "felcflo_People", "_ref": 1, "results": { "list": ["name", "lastname"] } } } all_queries.append(fE) fE = { "FindEntity": { "class": "felcflo_foo", "link": { "ref": 1, "constraints": { "probablity": [">=", 1.0], "name": ["==", "best_type_of_connection"] } }, "results": { "list": ["name"] } } } all_queries.append(fE) response, res_arr = db.query(all_queries) self.assertEqual(len(response[1]["FindEntity"]["entities"]), 0) def test_FindEntity_link_constraints_string(self): db = self.create_connection() props = {} props["name"] = "Jon" props["lastname"] = "Bonachon" props["age"] = 29 response, arr = self.addEntity("felcstr_People", properties=props, check_status=True) props = {} props["type"] = "foo" props["name"] = "alligator" response, arr = self.addEntity("felcstr_foo", properties=props, check_status=True) props = {} props["type"] = "foo" props["name"] = "cat" response, arr = self.addEntity("felcstr_foo", properties=props, check_status=True) all_queries = [] fE = { "FindEntity": { "class": "felcstr_People", "constraints": { "name": ["==", "Jon"], "lastname": ["==", "Bonachon"], }, "_ref": 2 } } all_queries.append(fE) fE = { "FindEntity": { "class": "felcstr_foo", "constraints": { "name": ["==", "alligator"] }, "_ref": 3 } } all_queries.append(fE) fE = { "FindEntity": { "class": "felcstr_foo", "constraints": { "name": ["==", "cat"] }, "_ref": 4 } } all_queries.append(fE) aC = { "AddConnection": { "class": "foo_connection", "ref1": 2, "ref2": 3, "properties":{ "name": "best_type_of_connection_1", "probablity": 0.3 } } } all_queries.append(aC) aC = { "AddConnection": { "class": "foo_connection", "ref1": 2, "ref2": 4, "properties":{ "name": "best_type_of_connection", "probablity": 0.6 } } } all_queries.append(aC) response, res_arr = db.query(all_queries) self.assertEqual(response[0]["FindEntity"]["status"], 0) self.assertEqual(response[1]["FindEntity"]["status"], 0) self.assertEqual(response[2]["FindEntity"]["status"], 0) self.assertEqual(response[3]["AddConnection"]["status"], 0) self.assertEqual(response[4]["AddConnection"]["status"], 0) all_queries = [] fE = { "FindEntity": { "class": "felcstr_People", "_ref": 1, "results": { "list": ["name", "lastname"] } } } all_queries.append(fE) fE = { "FindEntity": { "class": "felcstr_foo", "link": { "ref": 1, "constraints": { "name": ["==", "best_type_of_connection_1"] } }, "results": { "list": ["name"] } } } all_queries.append(fE) response, res_arr = db.query(all_queries) self.assertEqual(len(response[1]["FindEntity"]["entities"]), 1) self.assertEqual(response[1]["FindEntity"]["entities"][0]["name"], "alligator") all_queries = [] fE = { "FindEntity": { "class": "felcstr_People", "_ref": 1, "results": { "list": ["name", "lastname"] } } } all_queries.append(fE) fE = { "FindEntity": { "class": "felcstr_foo", "link": { "ref": 1, "constraints": { "name": [">=", "best_type_of_connection"] } }, "results": { "list": ["name"] } } } all_queries.append(fE) response, res_arr = db.query(all_queries) self.assertEqual(len(response[1]["FindEntity"]["entities"]), 2) all_queries = [] fE = { "FindEntity": { "class": "felcstr_People", "_ref": 1, "results": { "list": ["name", "lastname"] } } } all_queries.append(fE) fE = { "FindEntity": { "class": "felcstr_foo", "link": { "ref": 1, "constraints": { "name": ["<", "best_type_of_connection"] } }, "results": { "list": ["name"] } } } all_queries.append(fE) response, res_arr = db.query(all_queries) self.assertEqual(len(response[1]["FindEntity"]["entities"]), 0) all_queries = [] fE = { "FindEntity": { "class": "felcstr_People", "_ref": 1, "results": { "list": ["name", "lastname"] } } } all_queries.append(fE) fE = { "FindEntity": { "class": "felcstr_foo", "link": { "ref": 1, "constraints": { "name": ["==", "best_type_of_connection"] } }, "results": { "list": ["name"] } } } all_queries.append(fE) response, res_arr = db.query(all_queries) self.assertEqual(len(response[1]["FindEntity"]["entities"]), 1) self.assertEqual(response[1]["FindEntity"]["entities"][0]["name"], "cat")
27.831897
87
0.408936
991
12,914
5.182644
0.159435
0.081776
0.074766
0.070093
0.808022
0.792835
0.792445
0.78972
0.78972
0.78972
0
0.014239
0.456172
12,914
463
88
27.892009
0.717072
0.08394
0
0.723288
0
0
0.196544
0.021772
0
0
0
0
0.054795
1
0.005479
false
0
0.005479
0
0.013699
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
c100ece161608aea1bd1d15c58bc1d5d8be4918b
3,196
py
Python
kmeans.py
withlqs/IshiharaEdge
fb14b65cf57cdc5258187c355ad6655362486115
[ "MIT" ]
null
null
null
kmeans.py
withlqs/IshiharaEdge
fb14b65cf57cdc5258187c355ad6655362486115
[ "MIT" ]
null
null
null
kmeans.py
withlqs/IshiharaEdge
fb14b65cf57cdc5258187c355ad6655362486115
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 import random def kmeans_sets(sets, input): for e in list(input): final_distance = 256 ** 2 * 3 final_set = sets[0] for now_s in sets: distance = 0 for element in now_s: distance += (e[0] - element[0]) ** 2 + (e[1] - element[1]) ** 2 + (e[2] - element[2]) ** 2 distance /= len(now_s) if distance < final_distance: final_distance = distance final_set = now_s input.discard(e) final_set.add(e) changed = True loop = 10 i = 0 while changed and i < loop: i += 1 print('---') for s in sets: for e in list(s): final_distance = 256 ** 2 * 3 final_set = sets[0] for now_s in sets: distance = 0 for element in now_s: distance += (e[0] - element[0]) ** 2 + (e[1] - element[1]) ** 2 + (e[2] - element[2]) ** 2 distance /= len(now_s) if distance < final_distance: final_distance = distance final_set = now_s if final_set != s: changed = True s.discard(e) final_set.add(e) return sets def kmeans(input, sets_number): sets = [] for i in range(sets_number): sets.append(set()) element = random.choice(list(input)) input.discard(element) sets[i].add(element) for e in list(input): final_distance = 256 ** 2 * 3 final_set = sets[0] for now_s in sets: distance = 0 for element in now_s: distance += (e[0] - element[0]) ** 2 + (e[1] - element[1]) ** 2 + (e[2] - element[2]) ** 2 distance /= len(now_s) if distance < final_distance: final_distance = distance final_set = now_s input.discard(e) final_set.add(e) changed = True loop = 10 i = 0 while changed and i < loop: i += 1 print('---') for s in sets: for e in list(s): final_distance = 256 ** 2 * 3 final_set = sets[0] for now_s in sets: distance = 0 for element in now_s: distance += (e[0] - element[0]) ** 2 + (e[1] - element[1]) ** 2 + (e[2] - element[2]) ** 2 distance /= len(now_s) if distance < final_distance: final_distance = distance final_set = now_s if final_set != s: changed = True s.discard(e) final_set.add(e) return sets def main(): input = { (0, 1, 1), (0, 2, 1), (0, 1, 2), (0, 4, 5), (0, 4, 4), (0, 5, 4) } sets = kmeans(input, 2) for s in sets: for e in s: print(e) print('======') pass if __name__ == '__main__': main()
28.035088
114
0.426471
387
3,196
3.385013
0.121447
0.048855
0.128244
0.030534
0.81374
0.81374
0.81374
0.801527
0.801527
0.801527
0
0.053148
0.458385
3,196
113
115
28.283186
0.70364
0.006571
0
0.744898
0
0
0.006301
0
0
0
0
0
0
1
0.030612
false
0.010204
0.010204
0
0.061224
0.040816
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
c18795c1a192453b6529f8a889dfbfb743f9068e
182,167
py
Python
fdint/tests/test_gfd.py
jgukelberger/fdint
0237323d6fd5d4161190ff7982811d8ae290f89e
[ "BSD-3-Clause" ]
11
2015-10-25T18:51:55.000Z
2021-02-26T13:05:07.000Z
fdint/tests/test_gfd.py
jgukelberger/fdint
0237323d6fd5d4161190ff7982811d8ae290f89e
[ "BSD-3-Clause" ]
19
2015-04-23T19:41:20.000Z
2017-08-01T02:04:04.000Z
fdint/tests/test_gfd.py
jgukelberger/fdint
0237323d6fd5d4161190ff7982811d8ae290f89e
[ "BSD-3-Clause" ]
10
2017-05-31T07:27:16.000Z
2021-08-28T15:34:09.000Z
# Copyright (c) 2015, Scott J Maddox. All rights reserved. # Use of this source code is governed by the BSD-3-Clause # license that can be found in the LICENSE file. # This file was generated by `scripts/gen_test_gfd.py`. # Do not edit this file directly, or your changes will be lost. ''' Tests the `gfd` module. ''' # Make sure we import the local package import os import sys sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../..'))) from fdint import gfd import unittest import numpy import warnings class Test_GFD(unittest.TestCase): def assertRTOL(self, a, b, RTOL): assert RTOL >= 0 rerr = abs(a-b)/a if rerr > RTOL: self.fail('Outside of relative tolerance of {}: {}' ''.format(RTOL, rerr)) def assert_all_rtol(self, a, b, rtol): a = numpy.array(a) b = numpy.array(b) rtol = numpy.array(rtol) for rtol_ in rtol: assert rtol_ >= 0 rerr = abs(a-b)/a if (rerr > rtol).all(): self.fail('Outside of relative tolerance of {}: {}' ''.format(rtol, rerr)) def test_gfdm1h_1(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(-50.0, 0.0), 3.41862030057428093e-22, 1e-07) def test_vgfdm1h_1(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((-50.0,)), numpy.array((0.0,))), (3.41862030057428093e-22,), (1e-07,)) def test_gfdm1h_2(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(-3.0, 0.0), 8.52597063488509527e-02, 1e-07) def test_vgfdm1h_2(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((-3.0,)), numpy.array((0.0,))), (8.52597063488509527e-02,), (1e-07,)) def test_gfdm1h_3(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(-2.0, 0.0), 2.19191620737676202e-01, 1e-07) def test_vgfdm1h_3(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((-2.0,)), numpy.array((0.0,))), (2.19191620737676202e-01,), (1e-07,)) def test_gfdm1h_4(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(-1.0, 0.0), 5.21150414377014259e-01, 1e-07) def test_vgfdm1h_4(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((-1.0,)), numpy.array((0.0,))), (5.21150414377014259e-01,), (1e-07,)) def test_gfdm1h_5(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(0.0, 0.0), 1.07215499426948724e+00, 1e-07) def test_vgfdm1h_5(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((0.0,)), numpy.array((0.0,))), (1.07215499426948724e+00,), (1e-07,)) def test_gfdm1h_6(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(1.0, 0.0), 1.82041146637164375e+00, 1e-07) def test_vgfdm1h_6(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((1.0,)), numpy.array((0.0,))), (1.82041146637164375e+00,), (1e-07,)) def test_gfdm1h_7(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(4.0, 0.0), 3.87435338318501366e+00, 1e-07) def test_vgfdm1h_7(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((4.0,)), numpy.array((0.0,))), (3.87435338318501366e+00,), (1e-07,)) def test_gfdm1h_8(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(5.0, 0.0), 4.38325669770695736e+00, 1e-07) def test_vgfdm1h_8(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((5.0,)), numpy.array((0.0,))), (4.38325669770695736e+00,), (1e-07,)) def test_gfdm1h_9(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(7.0, 0.0), 5.24155545558703384e+00, 1e-07) def test_vgfdm1h_9(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((7.0,)), numpy.array((0.0,))), (5.24155545558703384e+00,), (1e-07,)) def test_gfdm1h_10(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(10.0, 0.0), 6.29713762236208385e+00, 1e-07) def test_vgfdm1h_10(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((10.0,)), numpy.array((0.0,))), (6.29713762236208385e+00,), (1e-07,)) def test_gfdm1h_11(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(15.0, 0.0), 7.73151352106451295e+00, 1e-07) def test_vgfdm1h_11(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((15.0,)), numpy.array((0.0,))), (7.73151352106451295e+00,), (1e-07,)) def test_gfdm1h_12(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(20.0, 0.0), 8.93497320226533098e+00, 1e-07) def test_vgfdm1h_12(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((20.0,)), numpy.array((0.0,))), (8.93497320226533098e+00,), (1e-07,)) def test_gfdm1h_13(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(30.0, 0.0), 1.09494219613718897e+01, 1e-07) def test_vgfdm1h_13(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((30.0,)), numpy.array((0.0,))), (1.09494219613718897e+01,), (1e-07,)) def test_gfdm1h_14(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(40.0, 0.0), 1.26458514472489920e+01, 1e-07) def test_vgfdm1h_14(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((40.0,)), numpy.array((0.0,))), (1.26458514472489920e+01,), (1e-07,)) def test_gfdm1h_15(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(50.0, 0.0), 1.41398061393993402e+01, 1e-07) def test_vgfdm1h_15(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((50.0,)), numpy.array((0.0,))), (1.41398061393993402e+01,), (1e-07,)) def test_gfdm1h_16(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(-50.0, 0.075), 3.45023869035781114e-22, 9e-08) def test_vgfdm1h_16(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((-50.0,)), numpy.array((0.075,))), (3.45023869035781114e-22,), (9e-08,)) def test_gfdm1h_17(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(-3.0, 0.075), 8.60617942686103948e-02, 9e-08) def test_vgfdm1h_17(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((-3.0,)), numpy.array((0.075,))), (8.60617942686103948e-02,), (9e-08,)) def test_gfdm1h_18(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(-2.0, 0.075), 2.21310594441572001e-01, 1e-07) def test_vgfdm1h_18(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((-2.0,)), numpy.array((0.075,))), (2.21310594441572001e-01,), (1e-07,)) def test_gfdm1h_19(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(-1.0, 0.075), 5.26519912692117509e-01, 1e-07) def test_vgfdm1h_19(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((-1.0,)), numpy.array((0.075,))), (5.26519912692117509e-01,), (1e-07,)) def test_gfdm1h_20(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(0.0, 0.075), 1.08467593486047531e+00, 1e-09) def test_vgfdm1h_20(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((0.0,)), numpy.array((0.075,))), (1.08467593486047531e+00,), (1e-09,)) def test_gfdm1h_21(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(1.0, 0.075), 1.84614866407679723e+00, 3e-09) def test_vgfdm1h_21(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((1.0,)), numpy.array((0.075,))), (1.84614866407679723e+00,), (3e-09,)) def test_gfdm1h_22(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(4.0, 0.075), 3.97966737443307395e+00, 5e-09) def test_vgfdm1h_22(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((4.0,)), numpy.array((0.075,))), (3.97966737443307395e+00,), (5e-09,)) def test_gfdm1h_23(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(5.0, 0.075), 4.52570562678987720e+00, 6e-09) def test_vgfdm1h_23(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((5.0,)), numpy.array((0.075,))), (4.52570562678987720e+00,), (6e-09,)) def test_gfdm1h_24(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(7.0, 0.075), 5.46972697064253399e+00, 9e-10) def test_vgfdm1h_24(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((7.0,)), numpy.array((0.075,))), (5.46972697064253399e+00,), (9e-10,)) def test_gfdm1h_25(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(10.0, 0.075), 6.67659724958244460e+00, 4e-09) def test_vgfdm1h_25(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((10.0,)), numpy.array((0.075,))), (6.67659724958244460e+00,), (4e-09,)) def test_gfdm1h_26(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(15.0, 0.075), 8.40902185520310397e+00, 5e-09) def test_vgfdm1h_26(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((15.0,)), numpy.array((0.075,))), (8.40902185520310397e+00,), (5e-09,)) def test_gfdm1h_27(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(20.0, 0.075), 9.95449196388720381e+00, 7e-09) def test_vgfdm1h_27(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((20.0,)), numpy.array((0.075,))), (9.95449196388720381e+00,), (7e-09,)) def test_gfdm1h_28(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(30.0, 0.075), 1.27504736062638031e+01, 3e-09) def test_vgfdm1h_28(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((30.0,)), numpy.array((0.075,))), (1.27504736062638031e+01,), (3e-09,)) def test_gfdm1h_29(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(40.0, 0.075), 1.53257081165839342e+01, 6e-09) def test_vgfdm1h_29(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((40.0,)), numpy.array((0.075,))), (1.53257081165839342e+01,), (6e-09,)) def test_gfdm1h_30(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(50.0, 0.075), 1.77719171979791852e+01, 6e-11) def test_vgfdm1h_30(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((50.0,)), numpy.array((0.075,))), (1.77719171979791852e+01,), (6e-11,)) def test_gfdm1h_31(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(-50.0, 0.15), 3.48106251908235795e-22, 2e-07) def test_vgfdm1h_31(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((-50.0,)), numpy.array((0.15,))), (3.48106251908235795e-22,), (2e-07,)) def test_gfdm1h_32(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(-3.0, 0.15), 8.68435600907420391e-02, 2e-07) def test_vgfdm1h_32(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((-3.0,)), numpy.array((0.15,))), (8.68435600907420391e-02,), (2e-07,)) def test_gfdm1h_33(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(-2.0, 0.15), 2.23375183085195328e-01, 5e-09) def test_vgfdm1h_33(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((-2.0,)), numpy.array((0.15,))), (2.23375183085195328e-01,), (5e-09,)) def test_gfdm1h_34(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(-1.0, 0.15), 5.31747154463234506e-01, 5e-09) def test_vgfdm1h_34(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((-1.0,)), numpy.array((0.15,))), (5.31747154463234506e-01,), (5e-09,)) def test_gfdm1h_35(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(0.0, 0.15), 1.09684291233822795e+00, 6e-09) def test_vgfdm1h_35(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((0.0,)), numpy.array((0.15,))), (1.09684291233822795e+00,), (6e-09,)) def test_gfdm1h_36(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(1.0, 0.15), 1.87107615391477422e+00, 9e-09) def test_vgfdm1h_36(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((1.0,)), numpy.array((0.15,))), (1.87107615391477422e+00,), (9e-09,)) def test_gfdm1h_37(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(4.0, 0.15), 4.07990784552398189e+00, 8e-08) def test_vgfdm1h_37(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((4.0,)), numpy.array((0.15,))), (4.07990784552398189e+00,), (8e-08,)) def test_gfdm1h_38(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(5.0, 0.15), 4.66034331354291353e+00, 3e-09) def test_vgfdm1h_38(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((5.0,)), numpy.array((0.15,))), (4.66034331354291353e+00,), (3e-09,)) def test_gfdm1h_39(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(7.0, 0.15), 5.68232613899786987e+00, 1e-08) def test_vgfdm1h_39(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((7.0,)), numpy.array((0.15,))), (5.68232613899786987e+00,), (1e-08,)) def test_gfdm1h_40(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(10.0, 0.15), 7.02291019121403437e+00, 4e-09) def test_vgfdm1h_40(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((10.0,)), numpy.array((0.15,))), (7.02291019121403437e+00,), (4e-09,)) def test_gfdm1h_41(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(15.0, 0.15), 9.00841827043538323e+00, 2e-09) def test_vgfdm1h_41(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((15.0,)), numpy.array((0.15,))), (9.00841827043538323e+00,), (2e-09,)) def test_gfdm1h_42(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(20.0, 0.15), 1.08324690858367116e+01, 4e-09) def test_vgfdm1h_42(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((20.0,)), numpy.array((0.15,))), (1.08324690858367116e+01,), (4e-09,)) def test_gfdm1h_43(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(30.0, 0.15), 1.42340752123558119e+01, 4e-07) def test_vgfdm1h_43(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((30.0,)), numpy.array((0.15,))), (1.42340752123558119e+01,), (4e-07,)) def test_gfdm1h_44(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(40.0, 0.15), 1.74579864824527320e+01, 2e-04) def test_vgfdm1h_44(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((40.0,)), numpy.array((0.15,))), (1.74579864824527320e+01,), (2e-04,)) def test_gfdm1h_45(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(50.0, 0.15), 2.06418432701824841e+01, 6e-03) def test_vgfdm1h_45(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((50.0,)), numpy.array((0.15,))), (2.06418432701824841e+01,), (6e-03,)) def test_gfdm1h_46(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(-50.0, 0.225), 3.51117756491165601e-22, 2e-07) def test_vgfdm1h_46(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((-50.0,)), numpy.array((0.225,))), (3.51117756491165601e-22,), (2e-07,)) def test_gfdm1h_47(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(-3.0, 0.225), 8.76072067647288427e-02, 2e-07) def test_vgfdm1h_47(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((-3.0,)), numpy.array((0.225,))), (8.76072067647288427e-02,), (2e-07,)) def test_gfdm1h_48(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(-2.0, 0.225), 2.25391246283487862e-01, 7e-09) def test_vgfdm1h_48(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((-2.0,)), numpy.array((0.225,))), (2.25391246283487862e-01,), (7e-09,)) def test_gfdm1h_49(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(-1.0, 0.225), 5.36847718624313019e-01, 8e-09) def test_vgfdm1h_49(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((-1.0,)), numpy.array((0.225,))), (5.36847718624313019e-01,), (8e-09,)) def test_gfdm1h_50(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(0.0, 0.225), 1.10869585877475774e+00, 1e-08) def test_vgfdm1h_50(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((0.0,)), numpy.array((0.225,))), (1.10869585877475774e+00,), (1e-08,)) def test_gfdm1h_51(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(1.0, 0.225), 1.89529108118685441e+00, 2e-08) def test_vgfdm1h_51(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((1.0,)), numpy.array((0.225,))), (1.89529108118685441e+00,), (2e-08,)) def test_gfdm1h_52(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(4.0, 0.225), 4.17588519952772597e+00, 4e-09) def test_vgfdm1h_52(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((4.0,)), numpy.array((0.225,))), (4.17588519952772597e+00,), (4e-09,)) def test_gfdm1h_53(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(5.0, 0.225), 4.78854351316443871e+00, 4e-10) def test_vgfdm1h_53(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((5.0,)), numpy.array((0.225,))), (4.78854351316443871e+00,), (4e-10,)) def test_gfdm1h_54(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(7.0, 0.225), 5.88258669200485862e+00, 1e-09) def test_vgfdm1h_54(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((7.0,)), numpy.array((0.225,))), (5.88258669200485862e+00,), (1e-09,)) def test_gfdm1h_55(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(10.0, 0.225), 7.34439766989452103e+00, 6e-10) def test_vgfdm1h_55(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((10.0,)), numpy.array((0.225,))), (7.34439766989452103e+00,), (6e-10,)) def test_gfdm1h_56(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(15.0, 0.225), 9.55390506670982376e+00, 3e-09) def test_vgfdm1h_56(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((15.0,)), numpy.array((0.225,))), (9.55390506670982376e+00,), (3e-09,)) def test_gfdm1h_57(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(20.0, 0.225), 1.16191847019764545e+01, 2e-06) def test_vgfdm1h_57(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((20.0,)), numpy.array((0.225,))), (1.16191847019764545e+01,), (2e-06,)) def test_gfdm1h_58(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(30.0, 0.225), 1.55466689055292804e+01, 2e-03) def test_vgfdm1h_58(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((30.0,)), numpy.array((0.225,))), (1.55466689055292804e+01,), (2e-03,)) def test_gfdm1h_59(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(40.0, 0.225), 2.01017376048420005e+01, 8e-02) def test_vgfdm1h_59(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((40.0,)), numpy.array((0.225,))), (2.01017376048420005e+01,), (8e-02,)) def test_gfdm1h_60(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(50.0, 0.225), 3.72959709474892236e+01, 1e+00) def test_vgfdm1h_60(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((50.0,)), numpy.array((0.225,))), (3.72959709474892236e+01,), (1e+00,)) def test_gfdm1h_61(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(-50.0, 0.3), 3.54064902642658540e-22, 2e-07) def test_vgfdm1h_61(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((-50.0,)), numpy.array((0.3,))), (3.54064902642658540e-22,), (2e-07,)) def test_gfdm1h_62(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(-3.0, 0.3), 8.83544073023509180e-02, 2e-07) def test_vgfdm1h_62(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((-3.0,)), numpy.array((0.3,))), (8.83544073023509180e-02,), (2e-07,)) def test_gfdm1h_63(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(-2.0, 0.3), 2.27363426531536572e-01, 1e-08) def test_vgfdm1h_63(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((-2.0,)), numpy.array((0.3,))), (2.27363426531536572e-01,), (1e-08,)) def test_gfdm1h_64(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(-1.0, 0.3), 5.41833916844009611e-01, 1e-08) def test_vgfdm1h_64(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((-1.0,)), numpy.array((0.3,))), (5.41833916844009611e-01,), (1e-08,)) def test_gfdm1h_65(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(0.0, 0.3), 1.12026641774436864e+00, 2e-08) def test_vgfdm1h_65(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((0.0,)), numpy.array((0.3,))), (1.12026641774436864e+00,), (2e-08,)) def test_gfdm1h_66(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(1.0, 0.3), 1.91886965841472179e+00, 3e-08) def test_vgfdm1h_66(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((1.0,)), numpy.array((0.3,))), (1.91886965841472179e+00,), (3e-08,)) def test_gfdm1h_67(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(4.0, 0.3), 4.26819597451099764e+00, 1e-08) def test_vgfdm1h_67(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((4.0,)), numpy.array((0.3,))), (4.26819597451099764e+00,), (1e-08,)) def test_gfdm1h_68(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(5.0, 0.3), 4.91128862748422979e+00, 3e-09) def test_vgfdm1h_68(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((5.0,)), numpy.array((0.3,))), (4.91128862748422979e+00,), (3e-09,)) def test_gfdm1h_69(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(7.0, 0.3), 6.07269354910286019e+00, 5e-09) def test_vgfdm1h_69(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((7.0,)), numpy.array((0.3,))), (6.07269354910286019e+00,), (5e-09,)) def test_gfdm1h_70(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(10.0, 0.3), 7.64624379698125711e+00, 7e-08) def test_vgfdm1h_70(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((10.0,)), numpy.array((0.3,))), (7.64624379698125711e+00,), (7e-08,)) def test_gfdm1h_71(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(15.0, 0.3), 1.00589425324919723e+01, 8e-06) def test_vgfdm1h_71(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((15.0,)), numpy.array((0.3,))), (1.00589425324919723e+01,), (8e-06,)) def test_gfdm1h_72(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(20.0, 0.3), 1.23427708786633286e+01, 5e-04) def test_vgfdm1h_72(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((20.0,)), numpy.array((0.3,))), (1.23427708786633286e+01,), (5e-04,)) def test_gfdm1h_73(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(30.0, 0.3), 1.75069361023896022e+01, 1e-01) def test_vgfdm1h_73(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((30.0,)), numpy.array((0.3,))), (1.75069361023896022e+01,), (1e-01,)) def test_gfdm1h_74(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(40.0, 0.3), 5.00973909081849911e+01, 3e+00) def test_vgfdm1h_74(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((40.0,)), numpy.array((0.3,))), (5.00973909081849911e+01,), (3e+00,)) def test_gfdm1h_75(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfdm1h(50.0, 0.3), 4.24788289017311456e+02, 3e+01) def test_vgfdm1h_75(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfdm1h(numpy.array((50.0,)), numpy.array((0.3,))), (4.24788289017311456e+02,), (3e+01,)) def test_gfd1h_1(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(-50.0, 0.0), 1.70931015028714046e-22, 1e-07) def test_vgfd1h_1(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((-50.0,)), numpy.array((0.0,))), (1.70931015028714046e-22,), (1e-07,)) def test_gfd1h_2(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(-3.0, 0.0), 4.33663701523976236e-02, 1e-07) def test_vgfd1h_2(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((-3.0,)), numpy.array((0.0,))), (4.33663701523976236e-02,), (1e-07,)) def test_gfd1h_3(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(-2.0, 0.0), 1.14587830800532503e-01, 1e-07) def test_vgfd1h_3(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((-2.0,)), numpy.array((0.0,))), (1.14587830800532503e-01,), (1e-07,)) def test_gfd1h_4(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(-1.0, 0.0), 2.90500913599971378e-01, 1e-07) def test_vgfd1h_4(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((-1.0,)), numpy.array((0.0,))), (2.90500913599971378e-01,), (1e-07,)) def test_gfd1h_5(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(0.0, 0.0), 6.78093935838734674e-01, 1e-07) def test_vgfd1h_5(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((0.0,)), numpy.array((0.0,))), (6.78093935838734674e-01,), (1e-07,)) def test_gfd1h_6(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(1.0, 0.0), 1.39637536444908084e+00, 1e-07) def test_vgfd1h_6(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((1.0,)), numpy.array((0.0,))), (1.39637536444908084e+00,), (1e-07,)) def test_gfd1h_7(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(4.0, 0.0), 5.77072687384848759e+00, 1e-07) def test_vgfd1h_7(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((4.0,)), numpy.array((0.0,))), (5.77072687384848759e+00,), (1e-07,)) def test_gfd1h_8(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(5.0, 0.0), 7.83797652757165952e+00, 1e-07) def test_vgfd1h_8(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((5.0,)), numpy.array((0.0,))), (7.83797652757165952e+00,), (1e-07,)) def test_gfd1h_9(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(7.0, 0.0), 1.26646383321303588e+01, 1e-07) def test_vgfd1h_9(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((7.0,)), numpy.array((0.0,))), (1.26646383321303588e+01,), (1e-07,)) def test_gfd1h_10(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(10.0, 0.0), 2.13444727730234760e+01, 1e-07) def test_vgfd1h_10(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((10.0,)), numpy.array((0.0,))), (2.13444727730234760e+01,), (1e-07,)) def test_gfd1h_11(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(15.0, 0.0), 3.89430489375154991e+01, 1e-07) def test_vgfd1h_11(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((15.0,)), numpy.array((0.0,))), (3.89430489375154991e+01,), (1e-07,)) def test_gfd1h_12(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(20.0, 0.0), 5.98127989591257574e+01, 1e-07) def test_vgfd1h_12(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((20.0,)), numpy.array((0.0,))), (5.98127989591257574e+01,), (1e-07,)) def test_gfd1h_13(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(30.0, 0.0), 1.09694824918955632e+02, 1e-07) def test_vgfd1h_13(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((30.0,)), numpy.array((0.0,))), (1.09694824918955632e+02,), (1e-07,)) def test_gfd1h_14(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(40.0, 0.0), 1.68784932721796395e+02, 1e-07) def test_vgfd1h_14(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((40.0,)), numpy.array((0.0,))), (1.68784932721796395e+02,), (1e-07,)) def test_gfd1h_15(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(50.0, 0.0), 2.35818629275001257e+02, 1e-07) def test_vgfd1h_15(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((50.0,)), numpy.array((0.0,))), (2.35818629275001257e+02,), (1e-07,)) def test_gfd1h_16(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(-50.0, 0.075), 1.75632472638411867e-22, 3e-08) def test_vgfd1h_16(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((-50.0,)), numpy.array((0.075,))), (1.75632472638411867e-22,), (3e-08,)) def test_gfd1h_17(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(-3.0, 0.075), 4.45693568806805954e-02, 3e-08) def test_vgfd1h_17(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((-3.0,)), numpy.array((0.075,))), (4.45693568806805954e-02,), (3e-08,)) def test_gfd1h_18(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(-2.0, 0.075), 1.17810625935642538e-01, 3e-08) def test_vgfd1h_18(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((-2.0,)), numpy.array((0.075,))), (1.17810625935642538e-01,), (3e-08,)) def test_gfd1h_19(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(-1.0, 0.075), 2.98945928877046907e-01, 3e-08) def test_vgfd1h_19(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((-1.0,)), numpy.array((0.075,))), (2.98945928877046907e-01,), (3e-08,)) def test_gfd1h_20(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(0.0, 0.075), 6.99200487338112686e-01, 2e-08) def test_vgfd1h_20(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((0.0,)), numpy.array((0.075,))), (6.99200487338112686e-01,), (2e-08,)) def test_gfd1h_21(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(1.0, 0.075), 1.44502709099618265e+00, 2e-08) def test_vgfd1h_21(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((1.0,)), numpy.array((0.075,))), (1.44502709099618265e+00,), (2e-08,)) def test_gfd1h_22(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(4.0, 0.075), 6.08971762438002084e+00, 5e-08) def test_vgfd1h_22(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((4.0,)), numpy.array((0.075,))), (6.08971762438002084e+00,), (5e-08,)) def test_gfd1h_23(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(5.0, 0.075), 8.33885063738478394e+00, 1e-08) def test_vgfd1h_23(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((5.0,)), numpy.array((0.075,))), (8.33885063738478394e+00,), (1e-08,)) def test_gfd1h_24(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(7.0, 0.075), 1.37056355584969562e+01, 5e-09) def test_vgfd1h_24(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((7.0,)), numpy.array((0.075,))), (1.37056355584969562e+01,), (5e-09,)) def test_gfd1h_25(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(10.0, 0.075), 2.37044470618182537e+01, 2e-08) def test_vgfd1h_25(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((10.0,)), numpy.array((0.075,))), (2.37044470618182537e+01,), (2e-08,)) def test_gfd1h_26(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(15.0, 0.075), 4.50830486391759422e+01, 1e-08) def test_vgfd1h_26(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((15.0,)), numpy.array((0.075,))), (4.50830486391759422e+01,), (1e-08,)) def test_gfd1h_27(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(20.0, 0.075), 7.19794174887380933e+01, 2e-08) def test_vgfd1h_27(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((20.0,)), numpy.array((0.075,))), (7.19794174887380933e+01,), (2e-08,)) def test_gfd1h_28(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(30.0, 0.075), 1.41532187835316762e+02, 9e-09) def test_vgfd1h_28(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((30.0,)), numpy.array((0.075,))), (1.41532187835316762e+02,), (9e-09,)) def test_gfd1h_29(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(40.0, 0.075), 2.31477206375404734e+02, 1e-08) def test_vgfd1h_29(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((40.0,)), numpy.array((0.075,))), (2.31477206375404734e+02,), (1e-08,)) def test_gfd1h_30(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(50.0, 0.075), 3.41437761287440139e+02, 2e-09) def test_vgfd1h_30(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((50.0,)), numpy.array((0.075,))), (3.41437761287440139e+02,), (2e-09,)) def test_gfd1h_31(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(-50.0, 0.15), 1.80144534348771730e-22, 7e-08) def test_vgfd1h_31(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((-50.0,)), numpy.array((0.15,))), (1.80144534348771730e-22,), (7e-08,)) def test_gfd1h_32(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(-3.0, 0.15), 4.57236851642160644e-02, 7e-08) def test_vgfd1h_32(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((-3.0,)), numpy.array((0.15,))), (4.57236851642160644e-02,), (7e-08,)) def test_gfd1h_33(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(-2.0, 0.15), 1.20902196226494160e-01, 7e-08) def test_vgfd1h_33(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((-2.0,)), numpy.array((0.15,))), (1.20902196226494160e-01,), (7e-08,)) def test_gfd1h_34(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(-1.0, 0.15), 3.07041412974718042e-01, 8e-08) def test_vgfd1h_34(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((-1.0,)), numpy.array((0.15,))), (3.07041412974718042e-01,), (8e-08,)) def test_gfd1h_35(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(0.0, 0.15), 7.19401826377869313e-01, 9e-08) def test_vgfd1h_35(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((0.0,)), numpy.array((0.15,))), (7.19401826377869313e-01,), (9e-08,)) def test_gfd1h_36(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(1.0, 0.15), 1.49145549474437922e+00, 1e-07) def test_vgfd1h_36(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((1.0,)), numpy.array((0.15,))), (1.49145549474437922e+00,), (1e-07,)) def test_gfd1h_37(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(4.0, 0.15), 6.38891646674312330e+00, 1e-08) def test_vgfd1h_37(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((4.0,)), numpy.array((0.15,))), (6.38891646674312330e+00,), (1e-08,)) def test_gfd1h_38(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(5.0, 0.15), 8.80517851626515480e+00, 7e-09) def test_vgfd1h_38(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((5.0,)), numpy.array((0.15,))), (8.80517851626515480e+00,), (7e-09,)) def test_gfd1h_39(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(7.0, 0.15), 1.46599411190672146e+01, 4e-10) def test_vgfd1h_39(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((7.0,)), numpy.array((0.15,))), (1.46599411190672146e+01,), (4e-10,)) def test_gfd1h_40(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(10.0, 0.15), 2.58187406970339026e+01, 1e-08) def test_vgfd1h_40(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((10.0,)), numpy.array((0.15,))), (2.58187406970339026e+01,), (1e-08,)) def test_gfd1h_41(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(15.0, 0.15), 5.03946789953786265e+01, 6e-09) def test_vgfd1h_41(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((15.0,)), numpy.array((0.15,))), (5.03946789953786265e+01,), (6e-09,)) def test_gfd1h_42(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(20.0, 0.15), 8.21886508630633870e+01, 9e-09) def test_vgfd1h_42(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((20.0,)), numpy.array((0.15,))), (8.21886508630633870e+01,), (9e-09,)) def test_gfd1h_43(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(30.0, 0.15), 1.66943232233332822e+02, 1e-06) def test_vgfd1h_43(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((30.0,)), numpy.array((0.15,))), (1.66943232233332822e+02,), (1e-06,)) def test_gfd1h_44(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(40.0, 0.15), 2.79637310890885772e+02, 5e-04) def test_vgfd1h_44(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((40.0,)), numpy.array((0.15,))), (2.79637310890885772e+02,), (5e-04,)) def test_gfd1h_45(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(50.0, 0.15), 4.23027005180958383e+02, 1e-02) def test_vgfd1h_45(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((50.0,)), numpy.array((0.15,))), (4.23027005180958383e+02,), (1e-02,)) def test_gfd1h_46(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(-50.0, 0.225), 1.84493804131964310e-22, 2e-07) def test_vgfd1h_46(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((-50.0,)), numpy.array((0.225,))), (1.84493804131964310e-22,), (2e-07,)) def test_gfd1h_47(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(-3.0, 0.225), 4.68362035160664325e-02, 2e-07) def test_vgfd1h_47(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((-3.0,)), numpy.array((0.225,))), (4.68362035160664325e-02,), (2e-07,)) def test_gfd1h_48(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(-2.0, 0.225), 1.23881072020770297e-01, 2e-07) def test_vgfd1h_48(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((-2.0,)), numpy.array((0.225,))), (1.23881072020770297e-01,), (2e-07,)) def test_gfd1h_49(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(-1.0, 0.225), 3.14837133747599474e-01, 2e-07) def test_vgfd1h_49(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((-1.0,)), numpy.array((0.225,))), (3.14837133747599474e-01,), (2e-07,)) def test_gfd1h_50(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(0.0, 0.225), 7.38829369987956230e-01, 1e-08) def test_vgfd1h_50(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((0.0,)), numpy.array((0.225,))), (7.38829369987956230e-01,), (1e-08,)) def test_gfd1h_51(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(1.0, 0.225), 1.53599522382729647e+00, 1e-08) def test_vgfd1h_51(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((1.0,)), numpy.array((0.225,))), (1.53599522382729647e+00,), (1e-08,)) def test_gfd1h_52(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(4.0, 0.225), 6.67198092070039817e+00, 4e-08) def test_vgfd1h_52(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((4.0,)), numpy.array((0.225,))), (6.67198092070039817e+00,), (4e-08,)) def test_gfd1h_53(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(5.0, 0.225), 9.24386249954516259e+00, 1e-09) def test_vgfd1h_53(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((5.0,)), numpy.array((0.225,))), (9.24386249954516259e+00,), (1e-09,)) def test_gfd1h_54(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(7.0, 0.225), 1.55475406671984615e+01, 4e-09) def test_vgfd1h_54(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((7.0,)), numpy.array((0.225,))), (1.55475406671984615e+01,), (4e-09,)) def test_gfd1h_55(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(10.0, 0.225), 2.77546203959929692e+01, 5e-09) def test_vgfd1h_55(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((10.0,)), numpy.array((0.225,))), (2.77546203959929692e+01,), (5e-09,)) def test_gfd1h_56(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(15.0, 0.225), 5.51542415233834475e+01, 5e-09) def test_vgfd1h_56(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((15.0,)), numpy.array((0.225,))), (5.51542415233834475e+01,), (5e-09,)) def test_gfd1h_57(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(20.0, 0.225), 9.11844155758273871e+01, 6e-06) def test_vgfd1h_57(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((20.0,)), numpy.array((0.225,))), (9.11844155758273871e+01,), (6e-06,)) def test_gfd1h_58(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(30.0, 0.225), 1.89190523435314418e+02, 4e-03) def test_vgfd1h_58(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((30.0,)), numpy.array((0.225,))), (1.89190523435314418e+02,), (4e-03,)) def test_gfd1h_59(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(40.0, 0.225), 3.51352594869688005e+02, 2e-01) def test_vgfd1h_59(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((40.0,)), numpy.array((0.225,))), (3.51352594869688005e+02,), (2e-01,)) def test_gfd1h_60(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(50.0, 0.225), 1.16174735294552215e+03, 3e+00) def test_vgfd1h_60(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((50.0,)), numpy.array((0.225,))), (1.16174735294552215e+03,), (3e+00,)) def test_gfd1h_61(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(-50.0, 0.3), 1.88700467882265615e-22, 1e-08) def test_vgfd1h_61(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((-50.0,)), numpy.array((0.3,))), (1.88700467882265615e-22,), (1e-08,)) def test_gfd1h_62(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(-3.0, 0.3), 4.79121065683675990e-02, 1e-08) def test_vgfd1h_62(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((-3.0,)), numpy.array((0.3,))), (4.79121065683675990e-02,), (1e-08,)) def test_gfd1h_63(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(-2.0, 0.3), 1.26761300940476579e-01, 1e-08) def test_vgfd1h_63(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((-2.0,)), numpy.array((0.3,))), (1.26761300940476579e-01,), (1e-08,)) def test_gfd1h_64(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(-1.0, 0.3), 3.22370766316044888e-01, 1e-08) def test_vgfd1h_64(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((-1.0,)), numpy.array((0.3,))), (3.22370766316044888e-01,), (1e-08,)) def test_gfd1h_65(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(0.0, 0.3), 7.57581974572145267e-01, 2e-08) def test_vgfd1h_65(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((0.0,)), numpy.array((0.3,))), (7.57581974572145267e-01,), (2e-08,)) def test_gfd1h_66(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(1.0, 0.3), 1.57889622462815948e+00, 2e-08) def test_vgfd1h_66(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((1.0,)), numpy.array((0.3,))), (1.57889622462815948e+00,), (2e-08,)) def test_gfd1h_67(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(4.0, 0.3), 6.94149732106426764e+00, 1e-07) def test_vgfd1h_67(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((4.0,)), numpy.array((0.3,))), (6.94149732106426764e+00,), (1e-07,)) def test_gfd1h_68(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(5.0, 0.3), 9.65966213111688532e+00, 5e-09) def test_vgfd1h_68(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((5.0,)), numpy.array((0.3,))), (9.65966213111688532e+00,), (5e-09,)) def test_gfd1h_69(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(7.0, 0.3), 1.63814857356260859e+01, 2e-09) def test_vgfd1h_69(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((7.0,)), numpy.array((0.3,))), (1.63814857356260859e+01,), (2e-09,)) def test_gfd1h_70(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(10.0, 0.3), 2.95526013663800313e+01, 4e-07) def test_vgfd1h_70(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((10.0,)), numpy.array((0.3,))), (2.95526013663800313e+01,), (4e-07,)) def test_gfd1h_71(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(15.0, 0.3), 5.95102317520063906e+01, 3e-05) def test_vgfd1h_71(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((15.0,)), numpy.array((0.3,))), (5.95102317520063906e+01,), (3e-05,)) def test_gfd1h_72(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(20.0, 0.3), 9.93880241203347907e+01, 1e-03) def test_vgfd1h_72(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((20.0,)), numpy.array((0.3,))), (9.93880241203347907e+01,), (1e-03,)) def test_gfd1h_73(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(30.0, 0.3), 2.31853742532847605e+02, 2e-01) def test_vgfd1h_73(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((30.0,)), numpy.array((0.3,))), (2.31853742532847605e+02,), (2e-01,)) def test_gfd1h_74(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(40.0, 0.3), 1.46480797069886626e+03, 6e+00) def test_vgfd1h_74(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((40.0,)), numpy.array((0.3,))), (1.46480797069886626e+03,), (6e+00,)) def test_gfd1h_75(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd1h(50.0, 0.3), 1.92572027975448000e+04, 7e+01) def test_vgfd1h_75(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd1h(numpy.array((50.0,)), numpy.array((0.3,))), (1.92572027975448000e+04,), (7e+01,)) def test_gfd3h_1(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(-50.0, 0.0), 2.56396522543071034e-22, 1e-07) def test_vgfd3h_1(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((-50.0,)), numpy.array((0.0,))), (2.56396522543071034e-22,), (1e-07,)) def test_gfd3h_2(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(-3.0, 0.0), 6.56117427430797789e-02, 1e-07) def test_vgfd3h_2(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((-3.0,)), numpy.array((0.0,))), (6.56117427430797789e-02,), (1e-07,)) def test_gfd3h_3(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(-2.0, 0.0), 1.75800999402072250e-01, 1e-07) def test_vgfd3h_3(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((-2.0,)), numpy.array((0.0,))), (1.75800999402072250e-01,), (1e-07,)) def test_gfd3h_4(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(-1.0, 0.0), 4.60848833941030100e-01, 1e-07) def test_vgfd3h_4(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((-1.0,)), numpy.array((0.0,))), (4.60848833941030100e-01,), (1e-07,)) def test_gfd3h_5(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(0.0, 0.0), 1.15280390625659157e+00, 1e-07) def test_vgfd3h_5(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((0.0,)), numpy.array((0.0,))), (1.15280390625659157e+00,), (1e-07,)) def test_gfd3h_6(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(1.0, 0.0), 2.66168278443296202e+00, 1e-07) def test_vgfd3h_6(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((1.0,)), numpy.array((0.0,))), (2.66168278443296202e+00,), (1e-07,)) def test_gfd3h_7(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(4.0, 0.0), 1.76277035587071147e+01, 1e-07) def test_vgfd3h_7(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((4.0,)), numpy.array((0.0,))), (1.76277035587071147e+01,), (1e-07,)) def test_gfd3h_8(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(5.0, 0.0), 2.78024478838951552e+01, 1e-07) def test_vgfd3h_8(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((5.0,)), numpy.array((0.0,))), (2.78024478838951552e+01,), (1e-07,)) def test_gfd3h_9(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(7.0, 0.0), 5.83422205422022486e+01, 1e-07) def test_vgfd3h_9(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((7.0,)), numpy.array((0.0,))), (5.83422205422022486e+01,), (1e-07,)) def test_gfd3h_10(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(10.0, 0.0), 1.34270168019349427e+02, 1e-07) def test_vgfd3h_10(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((10.0,)), numpy.array((0.0,))), (1.34270168019349427e+02,), (1e-07,)) def test_gfd3h_11(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(15.0, 0.0), 3.58112269195261376e+02, 1e-07) def test_vgfd3h_11(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((15.0,)), numpy.array((0.0,))), (3.58112269195261376e+02,), (1e-07,)) def test_gfd3h_12(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(20.0, 0.0), 7.26568327559272234e+02, 1e-07) def test_vgfd3h_12(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((20.0,)), numpy.array((0.0,))), (7.26568327559272234e+02,), (1e-07,)) def test_gfd3h_13(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(30.0, 0.0), 1.98531149686472122e+03, 1e-07) def test_vgfd3h_13(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((30.0,)), numpy.array((0.0,))), (1.98531149686472122e+03,), (1e-07,)) def test_gfd3h_14(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(40.0, 0.0), 4.06331804904606452e+03, 1e-07) def test_vgfd3h_14(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((40.0,)), numpy.array((0.0,))), (4.06331804904606452e+03,), (1e-07,)) def test_gfd3h_15(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(50.0, 0.0), 7.08851338555907569e+03, 1e-07) def test_vgfd3h_15(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((50.0,)), numpy.array((0.0,))), (7.08851338555907569e+03,), (1e-07,)) def test_gfd3h_16(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(-50.0, 0.075), 2.68050319568913796e-22, 5e-08) def test_vgfd3h_16(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((-50.0,)), numpy.array((0.075,))), (2.68050319568913796e-22,), (5e-08,)) def test_gfd3h_17(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(-3.0, 0.075), 6.86066750207212639e-02, 5e-08) def test_vgfd3h_17(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((-3.0,)), numpy.array((0.075,))), (6.86066750207212639e-02,), (5e-08,)) def test_gfd3h_18(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(-2.0, 0.075), 1.83881988194320833e-01, 5e-08) def test_vgfd3h_18(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((-2.0,)), numpy.array((0.075,))), (1.83881988194320833e-01,), (5e-08,)) def test_gfd3h_19(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(-1.0, 0.075), 4.82401060953742189e-01, 5e-08) def test_vgfd3h_19(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((-1.0,)), numpy.array((0.075,))), (4.82401060953742189e-01,), (5e-08,)) def test_gfd3h_20(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(0.0, 0.075), 1.20878657947807766e+00, 5e-08) def test_vgfd3h_20(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((0.0,)), numpy.array((0.075,))), (1.20878657947807766e+00,), (5e-08,)) def test_gfd3h_21(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(1.0, 0.075), 2.80000794588478330e+00, 6e-08) def test_vgfd3h_21(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((1.0,)), numpy.array((0.075,))), (2.80000794588478330e+00,), (6e-08,)) def test_gfd3h_22(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(4.0, 0.075), 1.89101326780703793e+01, 3e-08) def test_vgfd3h_22(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((4.0,)), numpy.array((0.075,))), (1.89101326780703793e+01,), (3e-08,)) def test_gfd3h_23(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(5.0, 0.075), 3.00840492504489951e+01, 1e-08) def test_vgfd3h_23(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((5.0,)), numpy.array((0.075,))), (3.00840492504489951e+01,), (1e-08,)) def test_gfd3h_24(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(7.0, 0.075), 6.43119244745047070e+01, 1e-08) def test_vgfd3h_24(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((7.0,)), numpy.array((0.075,))), (6.43119244745047070e+01,), (1e-08,)) def test_gfd3h_25(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(10.0, 0.075), 1.52325523752477324e+02, 3e-08) def test_vgfd3h_25(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((10.0,)), numpy.array((0.075,))), (1.52325523752477324e+02,), (3e-08,)) def test_gfd3h_26(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(15.0, 0.075), 4.25678129558724834e+02, 2e-08) def test_vgfd3h_26(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((15.0,)), numpy.array((0.075,))), (4.25678129558724834e+02,), (2e-08,)) def test_gfd3h_27(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(20.0, 0.075), 9.02150729494757911e+02, 3e-08) def test_vgfd3h_27(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((20.0,)), numpy.array((0.075,))), (9.02150729494757911e+02,), (3e-08,)) def test_gfd3h_28(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(30.0, 0.075), 2.66501589243813351e+03, 1e-08) def test_vgfd3h_28(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((30.0,)), numpy.array((0.075,))), (2.66501589243813351e+03,), (1e-08,)) def test_gfd3h_29(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(40.0, 0.075), 5.83652161907989921e+03, 2e-08) def test_vgfd3h_29(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((40.0,)), numpy.array((0.075,))), (5.83652161907989921e+03,), (2e-08,)) def test_gfd3h_30(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(50.0, 0.075), 1.08078726745106324e+04, 4e-09) def test_vgfd3h_30(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((50.0,)), numpy.array((0.075,))), (1.08078726745106324e+04,), (4e-09,)) def test_gfd3h_31(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(-50.0, 0.15), 2.79070532998296774e-22, 5e-10) def test_vgfd3h_31(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((-50.0,)), numpy.array((0.15,))), (2.79070532998296774e-22,), (5e-10,)) def test_gfd3h_32(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(-3.0, 0.15), 7.14384547295619787e-02, 5e-10) def test_vgfd3h_32(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((-3.0,)), numpy.array((0.15,))), (7.14384547295619787e-02,), (5e-10,)) def test_gfd3h_33(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(-2.0, 0.15), 1.91521288044533333e-01, 6e-10) def test_vgfd3h_33(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((-2.0,)), numpy.array((0.15,))), (1.91521288044533333e-01,), (6e-10,)) def test_gfd3h_34(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(-1.0, 0.15), 5.02765361586696824e-01, 5e-10) def test_vgfd3h_34(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((-1.0,)), numpy.array((0.15,))), (5.02765361586696824e-01,), (5e-10,)) def test_gfd3h_35(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(0.0, 0.15), 1.26162371655850070e+00, 2e-10) def test_vgfd3h_35(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((0.0,)), numpy.array((0.15,))), (1.26162371655850070e+00,), (2e-10,)) def test_gfd3h_36(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(1.0, 0.15), 2.93026606807198586e+00, 4e-09) def test_vgfd3h_36(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((1.0,)), numpy.array((0.15,))), (2.93026606807198586e+00,), (4e-09,)) def test_gfd3h_37(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(4.0, 0.15), 2.00994854317833500e+01, 5e-08) def test_vgfd3h_37(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((4.0,)), numpy.array((0.15,))), (2.00994854317833500e+01,), (5e-08,)) def test_gfd3h_38(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(5.0, 0.15), 3.21851009539578143e+01, 1e-08) def test_vgfd3h_38(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((5.0,)), numpy.array((0.15,))), (3.21851009539578143e+01,), (1e-08,)) def test_gfd3h_39(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(7.0, 0.15), 6.97256534740510574e+01, 7e-09) def test_vgfd3h_39(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((7.0,)), numpy.array((0.15,))), (6.97256534740510574e+01,), (7e-09,)) def test_gfd3h_40(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(10.0, 0.15), 1.68320698038579991e+02, 1e-08) def test_vgfd3h_40(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((10.0,)), numpy.array((0.15,))), (1.68320698038579991e+02,), (1e-08,)) def test_gfd3h_41(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(15.0, 0.15), 4.83402429203509769e+02, 9e-09) def test_vgfd3h_41(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((15.0,)), numpy.array((0.15,))), (4.83402429203509769e+02,), (9e-09,)) def test_gfd3h_42(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(20.0, 0.15), 1.04746266604618586e+03, 1e-08) def test_vgfd3h_42(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((20.0,)), numpy.array((0.15,))), (1.04746266604618586e+03,), (1e-08,)) def test_gfd3h_43(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(30.0, 0.15), 3.19891327460370758e+03, 2e-06) def test_vgfd3h_43(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((30.0,)), numpy.array((0.15,))), (3.19891327460370758e+03,), (2e-06,)) def test_gfd3h_44(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(40.0, 0.15), 7.17577763915706964e+03, 7e-04) def test_vgfd3h_44(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((40.0,)), numpy.array((0.15,))), (7.17577763915706964e+03,), (7e-04,)) def test_gfd3h_45(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(50.0, 0.15), 1.36710648966167355e+04, 2e-02) def test_vgfd3h_45(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((50.0,)), numpy.array((0.15,))), (1.36710648966167355e+04,), (2e-02,)) def test_gfd3h_46(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(-50.0, 0.225), 2.89565826333962415e-22, 6e-08) def test_vgfd3h_46(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((-50.0,)), numpy.array((0.225,))), (2.89565826333962415e-22,), (6e-08,)) def test_gfd3h_47(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(-3.0, 0.225), 7.41350911550967828e-02, 6e-08) def test_vgfd3h_47(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((-3.0,)), numpy.array((0.225,))), (7.41350911550967828e-02,), (6e-08,)) def test_gfd3h_48(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(-2.0, 0.225), 1.98794851403979123e-01, 6e-08) def test_vgfd3h_48(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((-2.0,)), numpy.array((0.225,))), (1.98794851403979123e-01,), (6e-08,)) def test_gfd3h_49(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(-1.0, 0.225), 5.22146878499213152e-01, 6e-08) def test_vgfd3h_49(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((-1.0,)), numpy.array((0.225,))), (5.22146878499213152e-01,), (6e-08,)) def test_gfd3h_50(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(0.0, 0.225), 1.31186401823519261e+00, 7e-08) def test_vgfd3h_50(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((0.0,)), numpy.array((0.225,))), (1.31186401823519261e+00,), (7e-08,)) def test_gfd3h_51(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(1.0, 0.225), 3.05389390748236655e+00, 7e-08) def test_vgfd3h_51(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((1.0,)), numpy.array((0.225,))), (3.05389390748236655e+00,), (7e-08,)) def test_gfd3h_52(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(4.0, 0.225), 2.12149012797099559e+01, 8e-09) def test_vgfd3h_52(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((4.0,)), numpy.array((0.225,))), (2.12149012797099559e+01,), (8e-09,)) def test_gfd3h_53(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(5.0, 0.225), 3.41451325993784280e+01, 4e-09) def test_vgfd3h_53(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((5.0,)), numpy.array((0.225,))), (3.41451325993784280e+01,), (4e-09,)) def test_gfd3h_54(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(7.0, 0.225), 7.47210028844568797e+01, 1e-08) def test_vgfd3h_54(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((7.0,)), numpy.array((0.225,))), (7.47210028844568797e+01,), (1e-08,)) def test_gfd3h_55(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(10.0, 0.225), 1.82851523256851323e+02, 3e-09) def test_vgfd3h_55(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((10.0,)), numpy.array((0.225,))), (1.82851523256851323e+02,), (3e-09,)) def test_gfd3h_56(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(15.0, 0.225), 5.34712394088538190e+02, 2e-07) def test_vgfd3h_56(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((15.0,)), numpy.array((0.225,))), (5.34712394088538190e+02,), (2e-07,)) def test_gfd3h_57(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(20.0, 0.225), 1.17445243366865725e+03, 1e-05) def test_vgfd3h_57(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((20.0,)), numpy.array((0.225,))), (1.17445243366865725e+03,), (1e-05,)) def test_gfd3h_58(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(30.0, 0.225), 3.66617542497532895e+03, 7e-03) def test_vgfd3h_58(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((30.0,)), numpy.array((0.225,))), (3.66617542497532895e+03,), (7e-03,)) def test_gfd3h_59(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(40.0, 0.225), 9.49800336542126570e+03, 3e-01) def test_vgfd3h_59(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((40.0,)), numpy.array((0.225,))), (9.49800336542126570e+03,), (3e-01,)) def test_gfd3h_60(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(50.0, 0.225), 4.78840107064072945e+04, 4e+00) def test_vgfd3h_60(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((50.0,)), numpy.array((0.225,))), (4.78840107064072945e+04,), (4e+00,)) def test_gfd3h_61(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(-50.0, 0.3), 2.99614222780075374e-22, 1e-07) def test_vgfd3h_61(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((-50.0,)), numpy.array((0.3,))), (2.99614222780075374e-22,), (1e-07,)) def test_gfd3h_62(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(-3.0, 0.3), 7.67166915520561821e-02, 1e-07) def test_vgfd3h_62(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((-3.0,)), numpy.array((0.3,))), (7.67166915520561821e-02,), (1e-07,)) def test_gfd3h_63(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(-2.0, 0.3), 2.05757183974466101e-01, 2e-07) def test_vgfd3h_63(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((-2.0,)), numpy.array((0.3,))), (2.05757183974466101e-01,), (2e-07,)) def test_gfd3h_64(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(-1.0, 0.3), 5.40692695085241315e-01, 2e-07) def test_vgfd3h_64(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((-1.0,)), numpy.array((0.3,))), (5.40692695085241315e-01,), (2e-07,)) def test_gfd3h_65(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(0.0, 0.3), 1.35990014077538435e+00, 8e-09) def test_vgfd3h_65(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((0.0,)), numpy.array((0.3,))), (1.35990014077538435e+00,), (8e-09,)) def test_gfd3h_66(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(1.0, 0.3), 3.17191463518228423e+00, 8e-09) def test_vgfd3h_66(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((1.0,)), numpy.array((0.3,))), (3.17191463518228423e+00,), (8e-09,)) def test_gfd3h_67(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(4.0, 0.3), 2.22694509399631286e+01, 3e-08) def test_vgfd3h_67(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((4.0,)), numpy.array((0.3,))), (2.22694509399631286e+01,), (3e-08,)) def test_gfd3h_68(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(5.0, 0.3), 3.59905469435718572e+01, 5e-08) def test_vgfd3h_68(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((5.0,)), numpy.array((0.3,))), (3.59905469435718572e+01,), (5e-08,)) def test_gfd3h_69(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(7.0, 0.3), 7.93853531162478987e+01, 1e-07) def test_vgfd3h_69(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((7.0,)), numpy.array((0.3,))), (7.93853531162478987e+01,), (1e-07,)) def test_gfd3h_70(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(10.0, 0.3), 1.96267893099285288e+02, 1e-06) def test_vgfd3h_70(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((10.0,)), numpy.array((0.3,))), (1.96267893099285288e+02,), (1e-06,)) def test_gfd3h_71(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(15.0, 0.3), 5.81410456309328310e+02, 5e-05) def test_vgfd3h_71(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((15.0,)), numpy.array((0.3,))), (5.81410456309328310e+02,), (5e-05,)) def test_gfd3h_72(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(20.0, 0.3), 1.29011458309667273e+03, 2e-03) def test_vgfd3h_72(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((20.0,)), numpy.array((0.3,))), (1.29011458309667273e+03,), (2e-03,)) def test_gfd3h_73(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(30.0, 0.3), 4.75831799589709590e+03, 3e-01) def test_vgfd3h_73(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((30.0,)), numpy.array((0.3,))), (4.75831799589709590e+03,), (3e-01,)) def test_gfd3h_74(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(40.0, 0.3), 5.17342847313166931e+04, 9e+00) def test_vgfd3h_74(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((40.0,)), numpy.array((0.3,))), (5.17342847313166931e+04,), (9e+00,)) def test_gfd3h_75(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd3h(50.0, 0.3), 9.00261010849752696e+05, 1e+02) def test_vgfd3h_75(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd3h(numpy.array((50.0,)), numpy.array((0.3,))), (9.00261010849752696e+05,), (1e+02,)) def test_gfd5h_1(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(-50.0, 0.0), 6.40991306357677657e-22, 1e-07) def test_vgfd5h_1(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((-50.0,)), numpy.array((0.0,))), (6.40991306357677657e-22,), (1e-07,)) def test_gfd5h_2(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(-3.0, 0.0), 1.64740403616628783e-01, 1e-07) def test_vgfd5h_2(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((-3.0,)), numpy.array((0.0,))), (1.64740403616628783e-01,), (1e-07,)) def test_gfd5h_3(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(-2.0, 0.0), 4.44554480132030305e-01, 1e-07) def test_vgfd5h_3(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((-2.0,)), numpy.array((0.0,))), (4.44554480132030305e-01,), (1e-07,)) def test_gfd5h_4(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(-1.0, 0.0), 1.18596824660155797e+00, 1e-07) def test_vgfd5h_4(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((-1.0,)), numpy.array((0.0,))), (1.18596824660155797e+00,), (1e-07,)) def test_gfd5h_5(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(0.0, 0.0), 3.08258626779258371e+00, 1e-07) def test_vgfd5h_5(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((0.0,)), numpy.array((0.0,))), (3.08258626779258371e+00,), (1e-07,)) def test_gfd5h_6(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(1.0, 0.0), 7.62653581259771762e+00, 1e-07) def test_vgfd5h_6(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((1.0,)), numpy.array((0.0,))), (7.62653581259771762e+00,), (1e-07,)) def test_gfd5h_7(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(4.0, 0.0), 7.13480566047064997e+01, 1e-07) def test_vgfd5h_7(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((4.0,)), numpy.array((0.0,))), (7.13480566047064997e+01,), (1e-07,)) def test_gfd5h_8(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(5.0, 0.0), 1.27489552562595904e+02, 1e-07) def test_vgfd5h_8(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((5.0,)), numpy.array((0.0,))), (1.27489552562595904e+02,), (1e-07,)) def test_gfd5h_9(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(7.0, 0.0), 3.36814345132562380e+02, 1e-07) def test_vgfd5h_9(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((7.0,)), numpy.array((0.0,))), (3.36814345132562380e+02,), (1e-07,)) def test_gfd5h_10(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(10.0, 0.0), 1.03468431626258916e+03, 1e-07) def test_vgfd5h_10(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((10.0,)), numpy.array((0.0,))), (1.03468431626258916e+03,), (1e-07,)) def test_gfd5h_11(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(15.0, 0.0), 3.97448812041092924e+03, 1e-07) def test_vgfd5h_11(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((15.0,)), numpy.array((0.0,))), (3.97448812041092924e+03,), (1e-07,)) def test_gfd5h_12(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(20.0, 0.0), 1.05906398120527356e+04, 1e-07) def test_vgfd5h_12(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((20.0,)), numpy.array((0.0,))), (1.05906398120527356e+04,), (1e-07,)) def test_gfd5h_13(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(30.0, 0.0), 4.29292601608553960e+04, 1e-07) def test_vgfd5h_13(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((30.0,)), numpy.array((0.0,))), (4.29292601608553960e+04,), (1e-07,)) def test_gfd5h_14(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(40.0, 0.0), 1.16689928020525942e+05, 1e-07) def test_vgfd5h_14(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((40.0,)), numpy.array((0.0,))), (1.16689928020525942e+05,), (1e-07,)) def test_gfd5h_15(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(50.0, 0.0), 2.53992583816560684e+05, 1e-07) def test_vgfd5h_15(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((50.0,)), numpy.array((0.0,))), (2.53992583816560684e+05,), (1e-07,)) def test_gfd5h_16(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(-50.0, 0.075), 6.81441293231909427e-22, 2e-08) def test_vgfd5h_16(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((-50.0,)), numpy.array((0.075,))), (6.81441293231909427e-22,), (2e-08,)) def test_gfd5h_17(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(-3.0, 0.075), 1.75158585864029048e-01, 2e-08) def test_vgfd5h_17(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((-3.0,)), numpy.array((0.075,))), (1.75158585864029048e-01,), (2e-08,)) def test_gfd5h_18(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(-2.0, 0.075), 4.72767843554117995e-01, 2e-08) def test_vgfd5h_18(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((-2.0,)), numpy.array((0.075,))), (4.72767843554117995e-01,), (2e-08,)) def test_gfd5h_19(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(-1.0, 0.075), 1.26191167665433168e+00, 3e-08) def test_vgfd5h_19(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((-1.0,)), numpy.array((0.075,))), (1.26191167665433168e+00,), (3e-08,)) def test_gfd5h_20(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(0.0, 0.075), 3.28408317536401162e+00, 3e-08) def test_vgfd5h_20(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((0.0,)), numpy.array((0.075,))), (3.28408317536401162e+00,), (3e-08,)) def test_gfd5h_21(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(1.0, 0.075), 8.14550605882054413e+00, 3e-08) def test_vgfd5h_21(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((1.0,)), numpy.array((0.075,))), (8.14550605882054413e+00,), (3e-08,)) def test_gfd5h_22(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(4.0, 0.075), 7.75426595369294631e+01, 1e-08) def test_vgfd5h_22(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((4.0,)), numpy.array((0.075,))), (7.75426595369294631e+01,), (1e-08,)) def test_gfd5h_23(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(5.0, 0.075), 1.39718804751399517e+02, 2e-09) def test_vgfd5h_23(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((5.0,)), numpy.array((0.075,))), (1.39718804751399517e+02,), (2e-09,)) def test_gfd5h_24(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(7.0, 0.075), 3.76009161636877081e+02, 2e-08) def test_vgfd5h_24(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((7.0,)), numpy.array((0.075,))), (3.76009161636877081e+02,), (2e-08,)) def test_gfd5h_25(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(10.0, 0.075), 1.18961530804305448e+03, 3e-08) def test_vgfd5h_25(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((10.0,)), numpy.array((0.075,))), (1.18961530804305448e+03,), (3e-08,)) def test_gfd5h_26(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(15.0, 0.075), 4.79728462444000888e+03, 3e-08) def test_vgfd5h_26(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((15.0,)), numpy.array((0.075,))), (4.79728462444000888e+03,), (3e-08,)) def test_gfd5h_27(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(20.0, 0.075), 1.33810484383240291e+04, 4e-08) def test_vgfd5h_27(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((20.0,)), numpy.array((0.075,))), (1.33810484383240291e+04,), (4e-08,)) def test_gfd5h_28(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(30.0, 0.075), 5.88614674968122199e+04, 1e-08) def test_vgfd5h_28(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((30.0,)), numpy.array((0.075,))), (5.88614674968122199e+04,), (1e-08,)) def test_gfd5h_29(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(40.0, 0.075), 1.71730807793203887e+05, 2e-05) def test_vgfd5h_29(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((40.0,)), numpy.array((0.075,))), (1.71730807793203887e+05,), (2e-05,)) def test_gfd5h_30(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(50.0, 0.075), 3.97844914563980128e+05, 4e-04) def test_vgfd5h_30(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((50.0,)), numpy.array((0.075,))), (3.97844914563980128e+05,), (4e-04,)) def test_gfd5h_31(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(-50.0, 0.15), 7.19161706636993687e-22, 6e-08) def test_vgfd5h_31(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((-50.0,)), numpy.array((0.15,))), (7.19161706636993687e-22,), (6e-08,)) def test_gfd5h_32(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(-3.0, 0.15), 1.84873049884995505e-01, 6e-08) def test_vgfd5h_32(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((-3.0,)), numpy.array((0.15,))), (1.84873049884995505e-01,), (6e-08,)) def test_gfd5h_33(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(-2.0, 0.15), 4.99072306661250686e-01, 6e-08) def test_vgfd5h_33(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((-2.0,)), numpy.array((0.15,))), (4.99072306661250686e-01,), (6e-08,)) def test_gfd5h_34(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(-1.0, 0.15), 1.33269479686240855e+00, 6e-08) def test_vgfd5h_34(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((-1.0,)), numpy.array((0.15,))), (1.33269479686240855e+00,), (6e-08,)) def test_gfd5h_35(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(0.0, 0.15), 3.47174786148591163e+00, 7e-08) def test_vgfd5h_35(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((0.0,)), numpy.array((0.15,))), (3.47174786148591163e+00,), (7e-08,)) def test_gfd5h_36(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(1.0, 0.15), 8.62808262870235154e+00, 7e-08) def test_vgfd5h_36(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((1.0,)), numpy.array((0.15,))), (8.62808262870235154e+00,), (7e-08,)) def test_gfd5h_37(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(4.0, 0.15), 8.32308639722571826e+01, 2e-07) def test_vgfd5h_37(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((4.0,)), numpy.array((0.15,))), (8.32308639722571826e+01,), (2e-07,)) def test_gfd5h_38(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(5.0, 0.15), 1.50876251223504482e+02, 1e-08) def test_vgfd5h_38(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((5.0,)), numpy.array((0.15,))), (1.50876251223504482e+02,), (1e-08,)) def test_gfd5h_39(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(7.0, 0.15), 4.11253503283934208e+02, 2e-08) def test_vgfd5h_39(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((7.0,)), numpy.array((0.15,))), (4.11253503283934208e+02,), (2e-08,)) def test_gfd5h_40(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(10.0, 0.15), 1.32576412217039683e+03, 5e-08) def test_vgfd5h_40(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((10.0,)), numpy.array((0.15,))), (1.32576412217039683e+03,), (5e-08,)) def test_gfd5h_41(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(15.0, 0.15), 5.49445385756848918e+03, 2e-06) def test_vgfd5h_41(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((15.0,)), numpy.array((0.15,))), (5.49445385756848918e+03,), (2e-06,)) def test_gfd5h_42(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(20.0, 0.15), 1.56707076932394375e+04, 5e-05) def test_vgfd5h_42(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((20.0,)), numpy.array((0.15,))), (1.56707076932394375e+04,), (5e-05,)) def test_gfd5h_43(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(30.0, 0.15), 7.14251358747767226e+04, 5e-03) def test_vgfd5h_43(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((30.0,)), numpy.array((0.15,))), (7.14251358747767226e+04,), (5e-03,)) def test_gfd5h_44(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(40.0, 0.15), 2.20891583722858282e+05, 8e-02) def test_vgfd5h_44(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((40.0,)), numpy.array((0.15,))), (2.20891583722858282e+05,), (8e-02,)) def test_gfd5h_45(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(50.0, 0.15), 6.38673363837586949e+05, 5e-01) def test_vgfd5h_45(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((50.0,)), numpy.array((0.15,))), (6.38673363837586949e+05,), (5e-01,)) def test_gfd5h_46(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(-50.0, 0.225), 7.54696653284489481e-22, 1e-08) def test_vgfd5h_46(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((-50.0,)), numpy.array((0.225,))), (7.54696653284489481e-22,), (1e-08,)) def test_gfd5h_47(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(-3.0, 0.225), 1.94024141918108045e-01, 1e-08) def test_vgfd5h_47(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((-3.0,)), numpy.array((0.225,))), (1.94024141918108045e-01,), (1e-08,)) def test_gfd5h_48(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(-2.0, 0.225), 5.23848886120844015e-01, 1e-08) def test_vgfd5h_48(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((-2.0,)), numpy.array((0.225,))), (5.23848886120844015e-01,), (1e-08,)) def test_gfd5h_49(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(-1.0, 0.225), 1.39934982722688672e+00, 1e-08) def test_vgfd5h_49(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((-1.0,)), numpy.array((0.225,))), (1.39934982722688672e+00,), (1e-08,)) def test_gfd5h_50(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(0.0, 0.225), 3.64836190286121509e+00, 2e-08) def test_vgfd5h_50(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((0.0,)), numpy.array((0.225,))), (3.64836190286121509e+00,), (2e-08,)) def test_gfd5h_51(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(1.0, 0.225), 9.08166989206396202e+00, 2e-08) def test_vgfd5h_51(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((1.0,)), numpy.array((0.225,))), (9.08166989206396202e+00,), (2e-08,)) def test_gfd5h_52(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(4.0, 0.225), 8.85263311960657404e+01, 2e-08) def test_vgfd5h_52(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((4.0,)), numpy.array((0.225,))), (8.85263311960657404e+01,), (2e-08,)) def test_gfd5h_53(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(5.0, 0.225), 1.61214719508784299e+02, 1e-06) def test_vgfd5h_53(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((5.0,)), numpy.array((0.225,))), (1.61214719508784299e+02,), (1e-06,)) def test_gfd5h_54(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(7.0, 0.225), 4.43581322643197154e+02, 3e-06) def test_vgfd5h_54(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((7.0,)), numpy.array((0.225,))), (4.43581322643197154e+02,), (3e-06,)) def test_gfd5h_55(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(10.0, 0.225), 1.44879933670252876e+03, 2e-05) def test_vgfd5h_55(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((10.0,)), numpy.array((0.225,))), (1.44879933670252876e+03,), (2e-05,)) def test_gfd5h_56(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(15.0, 0.225), 6.11260123899200516e+03, 5e-04) def test_vgfd5h_56(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((15.0,)), numpy.array((0.225,))), (6.11260123899200516e+03,), (5e-04,)) def test_gfd5h_57(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(20.0, 0.225), 1.77237353014303735e+04, 7e-03) def test_vgfd5h_57(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((20.0,)), numpy.array((0.225,))), (1.77237353014303735e+04,), (7e-03,)) def test_gfd5h_58(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(30.0, 0.225), 9.20765192192947143e+04, 3e-01) def test_vgfd5h_58(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((30.0,)), numpy.array((0.225,))), (9.20765192192947143e+04,), (3e-01,)) def test_gfd5h_59(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(40.0, 0.225), 5.80758419784098049e+05, 3e+00) def test_vgfd5h_59(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((40.0,)), numpy.array((0.225,))), (5.80758419784098049e+05,), (3e+00,)) def test_gfd5h_60(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(50.0, 0.225), 5.19088362398220226e+06, 2e+01) def test_vgfd5h_60(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((50.0,)), numpy.array((0.225,))), (5.19088362398220226e+06,), (2e+01,)) def test_gfd5h_61(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(-50.0, 0.3), 7.88423967604413785e-22, 1e-05) def test_vgfd5h_61(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((-50.0,)), numpy.array((0.3,))), (7.88423967604413785e-22,), (1e-05,)) def test_gfd5h_62(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(-3.0, 0.3), 2.02709309484947309e-01, 1e-05) def test_vgfd5h_62(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((-3.0,)), numpy.array((0.3,))), (2.02709309484947309e-01,), (1e-05,)) def test_gfd5h_63(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(-2.0, 0.3), 5.47362086788045543e-01, 1e-05) def test_vgfd5h_63(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((-2.0,)), numpy.array((0.3,))), (5.47362086788045543e-01,), (1e-05,)) def test_gfd5h_64(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(-1.0, 0.3), 1.46259299424101075e+00, 1e-05) def test_vgfd5h_64(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((-1.0,)), numpy.array((0.3,))), (1.46259299424101075e+00,), (1e-05,)) def test_gfd5h_65(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(0.0, 0.3), 3.81585298984370835e+00, 1e-05) def test_vgfd5h_65(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((0.0,)), numpy.array((0.3,))), (3.81585298984370835e+00,), (1e-05,)) def test_gfd5h_66(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(1.0, 0.3), 9.51138485803959099e+00, 1e-05) def test_vgfd5h_66(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((1.0,)), numpy.array((0.3,))), (9.51138485803959099e+00,), (1e-05,)) def test_gfd5h_67(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(4.0, 0.3), 9.35055036426053192e+01, 3e-05) def test_vgfd5h_67(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((4.0,)), numpy.array((0.3,))), (9.35055036426053192e+01,), (3e-05,)) def test_gfd5h_68(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(5.0, 0.3), 1.70901471558054510e+02, 4e-05) def test_vgfd5h_68(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((5.0,)), numpy.array((0.3,))), (1.70901471558054510e+02,), (4e-05,)) def test_gfd5h_69(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(7.0, 0.3), 4.73657769681105947e+02, 1e-04) def test_vgfd5h_69(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((7.0,)), numpy.array((0.3,))), (4.73657769681105947e+02,), (1e-04,)) def test_gfd5h_70(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(10.0, 0.3), 1.56244525749910190e+03, 7e-04) def test_vgfd5h_70(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((10.0,)), numpy.array((0.3,))), (1.56244525749910190e+03,), (7e-04,)) def test_gfd5h_71(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(15.0, 0.3), 6.70965549483038922e+03, 1e-02) def test_vgfd5h_71(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((15.0,)), numpy.array((0.3,))), (6.70965549483038922e+03,), (1e-02,)) def test_gfd5h_72(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(20.0, 0.3), 2.05967064205203205e+04, 1e-01) def test_vgfd5h_72(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((20.0,)), numpy.array((0.3,))), (2.05967064205203205e+04,), (1e-01,)) def test_gfd5h_73(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(30.0, 0.3), 2.26561193608227069e+05, 3e+00) def test_vgfd5h_73(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((30.0,)), numpy.array((0.3,))), (2.26561193608227069e+05,), (3e+00,)) def test_gfd5h_74(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(40.0, 0.3), 4.03591059130166192e+06, 3e+01) def test_vgfd5h_74(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((40.0,)), numpy.array((0.3,))), (4.03591059130166192e+06,), (3e+01,)) def test_gfd5h_75(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assertRTOL(gfd.gfd5h(50.0, 0.3), 4.81487891604254469e+07, 1e+02) def test_vgfd5h_75(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") self.assert_all_rtol(gfd.gfd5h(numpy.array((50.0,)), numpy.array((0.3,))), (4.81487891604254469e+07,), (1e+02,)) if __name__ == "__main__": unittest.main()
40.107221
70
0.458014
16,974
182,167
4.772947
0.029044
0.07443
0.118495
0.155525
0.982213
0.974437
0.950676
0.950072
0.950072
0.703145
0
0.18537
0.425022
182,167
4,541
71
40.116054
0.588112
0.001855
0
0.600968
1
0
0.020301
0
0
0
0
0
0.153807
1
0.153298
false
0
0.001528
0
0.15508
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
c1a059ee8f5a3cab6024a9f4220d7252e1f8bafb
3,257
gyp
Python
build/temp_gyp/googleurl.gyp
gtmetrix/pagespeed-library
4aac4ab995bc99c6a40f0c820cfefc88db6fcf33
[ "Apache-2.0" ]
11
2016-08-23T12:31:32.000Z
2021-05-12T09:47:01.000Z
build/temp_gyp/googleurl.gyp
gtmetrix/page-speed-library
4aac4ab995bc99c6a40f0c820cfefc88db6fcf33
[ "Apache-2.0" ]
null
null
null
build/temp_gyp/googleurl.gyp
gtmetrix/page-speed-library
4aac4ab995bc99c6a40f0c820cfefc88db6fcf33
[ "Apache-2.0" ]
1
2019-03-13T00:51:45.000Z
2019-03-13T00:51:45.000Z
# Copyright (c) 2009 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. { 'variables': { 'chromium_code': 1, }, 'targets': [ { 'target_name': 'googleurl', 'type': '<(library)', 'dependencies': [ '<(DEPTH)/base/base.gyp:base', '<(DEPTH)/third_party/icu/icu.gyp:icuuc', ], 'sources': [ '<(DEPTH)/googleurl/src/gurl.cc', '<(DEPTH)/googleurl/src/gurl.h', '<(DEPTH)/googleurl/src/url_canon.h', '<(DEPTH)/googleurl/src/url_canon_etc.cc', '<(DEPTH)/googleurl/src/url_canon_filesystemurl.cc', '<(DEPTH)/googleurl/src/url_canon_fileurl.cc', '<(DEPTH)/googleurl/src/url_canon_host.cc', '<(DEPTH)/googleurl/src/url_canon_icu.cc', '<(DEPTH)/googleurl/src/url_canon_icu.h', '<(DEPTH)/googleurl/src/url_canon_internal.cc', '<(DEPTH)/googleurl/src/url_canon_internal.h', '<(DEPTH)/googleurl/src/url_canon_internal_file.h', '<(DEPTH)/googleurl/src/url_canon_ip.cc', '<(DEPTH)/googleurl/src/url_canon_ip.h', '<(DEPTH)/googleurl/src/url_canon_mailtourl.cc', '<(DEPTH)/googleurl/src/url_canon_path.cc', '<(DEPTH)/googleurl/src/url_canon_pathurl.cc', '<(DEPTH)/googleurl/src/url_canon_query.cc', '<(DEPTH)/googleurl/src/url_canon_relative.cc', '<(DEPTH)/googleurl/src/url_canon_stdstring.h', '<(DEPTH)/googleurl/src/url_canon_stdurl.cc', '<(DEPTH)/googleurl/src/url_file.h', '<(DEPTH)/googleurl/src/url_parse.cc', '<(DEPTH)/googleurl/src/url_parse.h', '<(DEPTH)/googleurl/src/url_parse_file.cc', '<(DEPTH)/googleurl/src/url_parse_internal.h', '<(DEPTH)/googleurl/src/url_util.cc', '<(DEPTH)/googleurl/src/url_util.h', ], 'include_dirs': [ '<(DEPTH)', ], 'direct_dependent_settings': { 'include_dirs': [ '<(DEPTH)', ], }, # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { 'target_name': 'googleurl_unittests', 'type': 'executable', 'dependencies': [ 'googleurl', '<(DEPTH)/base/base.gyp:base', '<(DEPTH)/testing/gtest.gyp:gtest', '<(DEPTH)/third_party/icu/icu.gyp:icuuc', ], 'defines': [ # Our ICU build does not provide character set converters. We # set this define to disable tests that depend on non-default # character set conversions. 'ICU_NO_CONVERTER_DATA', ], 'sources': [ '<(DEPTH)/googleurl/src/gurl_unittest.cc', '<(DEPTH)/googleurl_noconv/src/url_canon_unittest.cc', '<(DEPTH)/googleurl/src/url_parse_unittest.cc', '<(DEPTH)/googleurl/src/url_test_utils.h', '<(DEPTH)/googleurl/src/url_util_unittest.cc', '<(DEPTH)/googleurl/src/gurl_test_main.cc', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, ], } # Local Variables: # tab-width:2 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=2 shiftwidth=2:
35.402174
72
0.601167
393
3,257
4.783715
0.3257
0.253191
0.298404
0.308511
0.628723
0.544681
0.175532
0.076596
0.076596
0.076596
0
0.011085
0.22444
3,257
91
73
35.791209
0.733175
0.162112
0
0.289474
0
0
0.667035
0.593002
0
0
0
0.010989
0
1
0
true
0
0
0
0
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
null
0
0
1
0
0
0
1
0
0
0
0
0
0
7
a9a6f1190a74059ea20d9013091645c2aa996089
31,004
py
Python
python_msx_sdk/api/users_api.py
CiscoDevNet/python-msx-sdk
d7e0a08c656504b4f4551d263e67c671a2a04b3f
[ "MIT" ]
null
null
null
python_msx_sdk/api/users_api.py
CiscoDevNet/python-msx-sdk
d7e0a08c656504b4f4551d263e67c671a2a04b3f
[ "MIT" ]
null
null
null
python_msx_sdk/api/users_api.py
CiscoDevNet/python-msx-sdk
d7e0a08c656504b4f4551d263e67c671a2a04b3f
[ "MIT" ]
null
null
null
""" MSX SDK MSX SDK client. # noqa: E501 The version of the OpenAPI document: 1.0.9 Generated by: https://openapi-generator.tech """ import re # noqa: F401 import sys # noqa: F401 from python_msx_sdk.api_client import ApiClient, Endpoint as _Endpoint from python_msx_sdk.model_utils import ( # noqa: F401 check_allowed_values, check_validations, date, datetime, file_type, none_type, validate_and_convert_types ) from python_msx_sdk.model.error import Error from python_msx_sdk.model.update_password import UpdatePassword from python_msx_sdk.model.user import User from python_msx_sdk.model.user_create import UserCreate from python_msx_sdk.model.user_update import UserUpdate from python_msx_sdk.model.users_page import UsersPage class UsersApi(object): """NOTE: This class is auto generated by OpenAPI Generator Ref: https://openapi-generator.tech Do not edit the class manually. """ def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client def __create_user( self, user_create, **kwargs ): """Creates a new user. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_user(user_create, async_req=True) >>> result = thread.get() Args: user_create (UserCreate): Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: User If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_host_index'] = kwargs.get('_host_index') kwargs['user_create'] = \ user_create return self.call_with_http_info(**kwargs) self.create_user = _Endpoint( settings={ 'response_type': (User,), 'auth': [], 'endpoint_path': '/idm/api/v8/users', 'operation_id': 'create_user', 'http_method': 'POST', 'servers': None, }, params_map={ 'all': [ 'user_create', ], 'required': [ 'user_create', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'user_create': (UserCreate,), }, 'attribute_map': { }, 'location_map': { 'user_create': 'body', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [ 'application/json' ] }, api_client=api_client, callable=__create_user ) def __delete_user( self, id, **kwargs ): """Deletes a user by id. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_user(id, async_req=True) >>> result = thread.get() Args: id (str): Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: None If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_host_index'] = kwargs.get('_host_index') kwargs['id'] = \ id return self.call_with_http_info(**kwargs) self.delete_user = _Endpoint( settings={ 'response_type': None, 'auth': [], 'endpoint_path': '/idm/api/v8/users/{id}', 'operation_id': 'delete_user', 'http_method': 'DELETE', 'servers': None, }, params_map={ 'all': [ 'id', ], 'required': [ 'id', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'id': (str,), }, 'attribute_map': { 'id': 'id', }, 'location_map': { 'id': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client, callable=__delete_user ) def __get_current_user( self, **kwargs ): """Returns the current user. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_current_user(async_req=True) >>> result = thread.get() Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: User If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_host_index'] = kwargs.get('_host_index') return self.call_with_http_info(**kwargs) self.get_current_user = _Endpoint( settings={ 'response_type': (User,), 'auth': [], 'endpoint_path': '/idm/api/v8/users/current', 'operation_id': 'get_current_user', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ ], 'required': [], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { }, 'attribute_map': { }, 'location_map': { }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client, callable=__get_current_user ) def __get_user( self, id, **kwargs ): """Returns an existing user. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_user(id, async_req=True) >>> result = thread.get() Args: id (str): Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: User If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_host_index'] = kwargs.get('_host_index') kwargs['id'] = \ id return self.call_with_http_info(**kwargs) self.get_user = _Endpoint( settings={ 'response_type': (User,), 'auth': [], 'endpoint_path': '/idm/api/v8/users/{id}', 'operation_id': 'get_user', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'id', ], 'required': [ 'id', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'id': (str,), }, 'attribute_map': { 'id': 'id', }, 'location_map': { 'id': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client, callable=__get_user ) def __get_users_page( self, page, page_size, **kwargs ): """Returns a page of users. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_users_page(page, page_size, async_req=True) >>> result = thread.get() Args: page (int): page_size (int): Keyword Args: tenant_id (str): [optional] deleted (bool): [optional] _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: UsersPage If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_host_index'] = kwargs.get('_host_index') kwargs['page'] = \ page kwargs['page_size'] = \ page_size return self.call_with_http_info(**kwargs) self.get_users_page = _Endpoint( settings={ 'response_type': (UsersPage,), 'auth': [], 'endpoint_path': '/idm/api/v8/users', 'operation_id': 'get_users_page', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'page', 'page_size', 'tenant_id', 'deleted', ], 'required': [ 'page', 'page_size', ], 'nullable': [ ], 'enum': [ ], 'validation': [ 'page', 'page_size', ] }, root_map={ 'validations': { ('page',): { 'inclusive_minimum': 0, }, ('page_size',): { 'inclusive_maximum': 1000, 'inclusive_minimum': 1, }, }, 'allowed_values': { }, 'openapi_types': { 'page': (int,), 'page_size': (int,), 'tenant_id': (str,), 'deleted': (bool,), }, 'attribute_map': { 'page': 'page', 'page_size': 'pageSize', 'tenant_id': 'tenantId', 'deleted': 'deleted', }, 'location_map': { 'page': 'query', 'page_size': 'query', 'tenant_id': 'query', 'deleted': 'query', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client, callable=__get_users_page ) def __update_user( self, id, user_update, **kwargs ): """Updates an existing user. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_user(id, user_update, async_req=True) >>> result = thread.get() Args: id (str): user_update (UserUpdate): Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: User If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_host_index'] = kwargs.get('_host_index') kwargs['id'] = \ id kwargs['user_update'] = \ user_update return self.call_with_http_info(**kwargs) self.update_user = _Endpoint( settings={ 'response_type': (User,), 'auth': [], 'endpoint_path': '/idm/api/v8/users/{id}', 'operation_id': 'update_user', 'http_method': 'PUT', 'servers': None, }, params_map={ 'all': [ 'id', 'user_update', ], 'required': [ 'id', 'user_update', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'id': (str,), 'user_update': (UserUpdate,), }, 'attribute_map': { 'id': 'id', }, 'location_map': { 'id': 'path', 'user_update': 'body', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [ 'application/json' ] }, api_client=api_client, callable=__update_user ) def __update_user_password( self, update_password, **kwargs ): """Update a user password. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_user_password(update_password, async_req=True) >>> result = thread.get() Args: update_password (UpdatePassword): Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: None If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_host_index'] = kwargs.get('_host_index') kwargs['update_password'] = \ update_password return self.call_with_http_info(**kwargs) self.update_user_password = _Endpoint( settings={ 'response_type': None, 'auth': [], 'endpoint_path': '/idm/api/v8/users/updatepassword', 'operation_id': 'update_user_password', 'http_method': 'PUT', 'servers': None, }, params_map={ 'all': [ 'update_password', ], 'required': [ 'update_password', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'update_password': (UpdatePassword,), }, 'attribute_map': { }, 'location_map': { 'update_password': 'body', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [ 'application/json' ] }, api_client=api_client, callable=__update_user_password )
34.875141
88
0.445297
2,638
31,004
4.977256
0.074678
0.033587
0.027723
0.028789
0.840137
0.825895
0.817974
0.810891
0.797411
0.780274
0
0.003416
0.471262
31,004
888
89
34.914414
0.797536
0.321604
0
0.608108
1
0
0.209673
0.031552
0
0
0
0
0
1
0.013514
false
0.023649
0.016892
0
0.043919
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
e711c8a5b6dddd15359935bbcce66e78280d31f6
85
py
Python
src/toypkg/__init__.py
Rayman/toypkg
76f6862cdd4e923abd34d981c60cab292e7a4a29
[ "MIT" ]
null
null
null
src/toypkg/__init__.py
Rayman/toypkg
76f6862cdd4e923abd34d981c60cab292e7a4a29
[ "MIT" ]
null
null
null
src/toypkg/__init__.py
Rayman/toypkg
76f6862cdd4e923abd34d981c60cab292e7a4a29
[ "MIT" ]
null
null
null
from __future__ import print_function def main(): print('hello from', __file__)
17
37
0.741176
11
85
4.909091
0.818182
0
0
0
0
0
0
0
0
0
0
0
0.164706
85
4
38
21.25
0.760563
0
0
0
0
0
0.117647
0
0
0
0
0
0
1
0.333333
true
0
0.333333
0
0.666667
0.666667
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
1
0
7
e73bea4528b0d5b2c6542077bc98b93413b1ea6d
129
py
Python
tests/conftest.py
tadamic/marshmallow_configparser
cea40ff56757348ed8aedf1dce14e42d329d763b
[ "MIT" ]
null
null
null
tests/conftest.py
tadamic/marshmallow_configparser
cea40ff56757348ed8aedf1dce14e42d329d763b
[ "MIT" ]
null
null
null
tests/conftest.py
tadamic/marshmallow_configparser
cea40ff56757348ed8aedf1dce14e42d329d763b
[ "MIT" ]
null
null
null
from .fixtures import * try: import colored_traceback.auto import colored_traceback.always except ImportError: pass
16.125
35
0.75969
15
129
6.4
0.733333
0.270833
0.458333
0
0
0
0
0
0
0
0
0
0.193798
129
7
36
18.428571
0.923077
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.166667
0.666667
0
0.666667
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
8
e78dd6f9b6df4ef96d1ec8b8c982e7c917a252a1
305
py
Python
planetarium/engine/star_catalog_calib.py
QQuick/Planetarium
31f80c3e0404e452beffae5d7303d8e24b103174
[ "Apache-2.0" ]
null
null
null
planetarium/engine/star_catalog_calib.py
QQuick/Planetarium
31f80c3e0404e452beffae5d7303d8e24b103174
[ "Apache-2.0" ]
1
2021-06-30T19:27:36.000Z
2021-07-11T01:50:04.000Z
planetarium/engine/star_catalog_calib.py
QQuick/Planetarium
31f80c3e0404e452beffae5d7303d8e24b103174
[ "Apache-2.0" ]
null
null
null
# Source: http://simbad.harvard.edu/simbad/sim-fsam starCatalog = ''' %; -; 0 0 0 0 0 0; -20 %; -; 0 0 0 5 0 0; 0 %; -; 3 0 0 5 0 0; 0 %; -; 6 0 0 5 0 0; 0 %; -; 9 0 0 5 0 0; 0 %; -; 12 0 0 5 0 0; 0 %; -; 15 0 0 5 0 0; 0 %; -; 18 0 0 5 0 0; 0 %; -; 21 0 0 5 0 0; 0 '''
23.461538
51
0.386885
72
305
1.638889
0.25
0.508475
0.330508
0.271186
0.457627
0.40678
0
0
0
0
0
0.375691
0.406557
305
13
52
23.461538
0.276243
0.160656
0
0
0
0
0.909804
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
1
null
1
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
e7cebc4fbc7c0c8ee7267c29a7167be5d4bce242
268
py
Python
MyLittleProgram/sys-argv_from-import.py
zymzs/PythonSpace
88027affb2fd8fae9b629d9c1310fd9b4789bfd0
[ "MIT" ]
null
null
null
MyLittleProgram/sys-argv_from-import.py
zymzs/PythonSpace
88027affb2fd8fae9b629d9c1310fd9b4789bfd0
[ "MIT" ]
null
null
null
MyLittleProgram/sys-argv_from-import.py
zymzs/PythonSpace
88027affb2fd8fae9b629d9c1310fd9b4789bfd0
[ "MIT" ]
null
null
null
#!/usr/bin/python3 # Filename: sys-argv_from-import.py from sys import argv print("You can use argv without sys after from..import") print(argv[1:]) print("You can't use sys.argv after from..import") print("You can't use other part of sys after from..import either.")
33.5
67
0.738806
49
268
4.020408
0.428571
0.203046
0.167513
0.182741
0.152284
0
0
0
0
0
0
0.008511
0.123134
268
7
68
38.285714
0.829787
0.190299
0
0
0
0
0.67907
0
0
0
0
0
0
1
0
true
0
0.8
0
0.8
0.8
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
7
99b61a71f627e430d4f6696d29bfafc0cbeed58c
73,687
py
Python
sdk/python/pulumi_azure/keyvault/outputs.py
suresh198526/pulumi-azure
bf27206a38d7a5c58b3c2c57ec8769fe3d0fc5d7
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
sdk/python/pulumi_azure/keyvault/outputs.py
suresh198526/pulumi-azure
bf27206a38d7a5c58b3c2c57ec8769fe3d0fc5d7
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
sdk/python/pulumi_azure/keyvault/outputs.py
suresh198526/pulumi-azure
bf27206a38d7a5c58b3c2c57ec8769fe3d0fc5d7
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union from .. import _utilities, _tables from . import outputs __all__ = [ 'CertifiateCertificate', 'CertifiateCertificateAttribute', 'CertifiateCertificatePolicy', 'CertifiateCertificatePolicyIssuerParameters', 'CertifiateCertificatePolicyKeyProperties', 'CertifiateCertificatePolicyLifetimeAction', 'CertifiateCertificatePolicyLifetimeActionAction', 'CertifiateCertificatePolicyLifetimeActionTrigger', 'CertifiateCertificatePolicySecretProperties', 'CertifiateCertificatePolicyX509CertificateProperties', 'CertifiateCertificatePolicyX509CertificatePropertiesSubjectAlternativeNames', 'CertificateCertificate', 'CertificateCertificateAttribute', 'CertificateCertificatePolicy', 'CertificateCertificatePolicyIssuerParameters', 'CertificateCertificatePolicyKeyProperties', 'CertificateCertificatePolicyLifetimeAction', 'CertificateCertificatePolicyLifetimeActionAction', 'CertificateCertificatePolicyLifetimeActionTrigger', 'CertificateCertificatePolicySecretProperties', 'CertificateCertificatePolicyX509CertificateProperties', 'CertificateCertificatePolicyX509CertificatePropertiesSubjectAlternativeNames', 'CertificateIssuerAdmin', 'KeyVaultAccessPolicy', 'KeyVaultContact', 'KeyVaultNetworkAcls', 'GetCertificateCertificatePolicyResult', 'GetCertificateCertificatePolicyIssuerParameterResult', 'GetCertificateCertificatePolicyKeyPropertyResult', 'GetCertificateCertificatePolicyLifetimeActionResult', 'GetCertificateCertificatePolicyLifetimeActionActionResult', 'GetCertificateCertificatePolicyLifetimeActionTriggerResult', 'GetCertificateCertificatePolicySecretPropertyResult', 'GetCertificateCertificatePolicyX509CertificatePropertyResult', 'GetCertificateCertificatePolicyX509CertificatePropertySubjectAlternativeNameResult', 'GetCertificateIssuerAdminResult', 'GetKeyVaultAccessPolicyResult', 'GetKeyVaultNetworkAclResult', ] @pulumi.output_type class CertifiateCertificate(dict): def __init__(__self__, *, contents: str, password: Optional[str] = None): """ :param str contents: The base64-encoded certificate contents. Changing this forces a new resource to be created. :param str password: The password associated with the certificate. Changing this forces a new resource to be created. """ pulumi.set(__self__, "contents", contents) if password is not None: pulumi.set(__self__, "password", password) @property @pulumi.getter def contents(self) -> str: """ The base64-encoded certificate contents. Changing this forces a new resource to be created. """ return pulumi.get(self, "contents") @property @pulumi.getter def password(self) -> Optional[str]: """ The password associated with the certificate. Changing this forces a new resource to be created. """ return pulumi.get(self, "password") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertifiateCertificateAttribute(dict): def __init__(__self__, *, created: Optional[str] = None, enabled: Optional[bool] = None, expires: Optional[str] = None, not_before: Optional[str] = None, recovery_level: Optional[str] = None, updated: Optional[str] = None): """ :param str created: The create time of the Key Vault Certificate. :param bool enabled: whether the Key Vault Certificate is enabled. :param str expires: The expires time of the Key Vault Certificate. :param str not_before: The not before valid time of the Key Vault Certificate. :param str recovery_level: The deletion recovery level of the Key Vault Certificate. :param str updated: The recent update time of the Key Vault Certificate. """ if created is not None: pulumi.set(__self__, "created", created) if enabled is not None: pulumi.set(__self__, "enabled", enabled) if expires is not None: pulumi.set(__self__, "expires", expires) if not_before is not None: pulumi.set(__self__, "not_before", not_before) if recovery_level is not None: pulumi.set(__self__, "recovery_level", recovery_level) if updated is not None: pulumi.set(__self__, "updated", updated) @property @pulumi.getter def created(self) -> Optional[str]: """ The create time of the Key Vault Certificate. """ return pulumi.get(self, "created") @property @pulumi.getter def enabled(self) -> Optional[bool]: """ whether the Key Vault Certificate is enabled. """ return pulumi.get(self, "enabled") @property @pulumi.getter def expires(self) -> Optional[str]: """ The expires time of the Key Vault Certificate. """ return pulumi.get(self, "expires") @property @pulumi.getter(name="notBefore") def not_before(self) -> Optional[str]: """ The not before valid time of the Key Vault Certificate. """ return pulumi.get(self, "not_before") @property @pulumi.getter(name="recoveryLevel") def recovery_level(self) -> Optional[str]: """ The deletion recovery level of the Key Vault Certificate. """ return pulumi.get(self, "recovery_level") @property @pulumi.getter def updated(self) -> Optional[str]: """ The recent update time of the Key Vault Certificate. """ return pulumi.get(self, "updated") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertifiateCertificatePolicy(dict): def __init__(__self__, *, issuer_parameters: 'outputs.CertifiateCertificatePolicyIssuerParameters', key_properties: 'outputs.CertifiateCertificatePolicyKeyProperties', secret_properties: 'outputs.CertifiateCertificatePolicySecretProperties', lifetime_actions: Optional[Sequence['outputs.CertifiateCertificatePolicyLifetimeAction']] = None, x509_certificate_properties: Optional['outputs.CertifiateCertificatePolicyX509CertificateProperties'] = None): """ :param 'CertifiateCertificatePolicyIssuerParametersArgs' issuer_parameters: A `issuer_parameters` block as defined below. :param 'CertifiateCertificatePolicyKeyPropertiesArgs' key_properties: A `key_properties` block as defined below. :param 'CertifiateCertificatePolicySecretPropertiesArgs' secret_properties: A `secret_properties` block as defined below. :param Sequence['CertifiateCertificatePolicyLifetimeActionArgs'] lifetime_actions: A `lifetime_action` block as defined below. :param 'CertifiateCertificatePolicyX509CertificatePropertiesArgs' x509_certificate_properties: A `x509_certificate_properties` block as defined below. Required when `certificate` block is not specified. """ pulumi.set(__self__, "issuer_parameters", issuer_parameters) pulumi.set(__self__, "key_properties", key_properties) pulumi.set(__self__, "secret_properties", secret_properties) if lifetime_actions is not None: pulumi.set(__self__, "lifetime_actions", lifetime_actions) if x509_certificate_properties is not None: pulumi.set(__self__, "x509_certificate_properties", x509_certificate_properties) @property @pulumi.getter(name="issuerParameters") def issuer_parameters(self) -> 'outputs.CertifiateCertificatePolicyIssuerParameters': """ A `issuer_parameters` block as defined below. """ return pulumi.get(self, "issuer_parameters") @property @pulumi.getter(name="keyProperties") def key_properties(self) -> 'outputs.CertifiateCertificatePolicyKeyProperties': """ A `key_properties` block as defined below. """ return pulumi.get(self, "key_properties") @property @pulumi.getter(name="secretProperties") def secret_properties(self) -> 'outputs.CertifiateCertificatePolicySecretProperties': """ A `secret_properties` block as defined below. """ return pulumi.get(self, "secret_properties") @property @pulumi.getter(name="lifetimeActions") def lifetime_actions(self) -> Optional[Sequence['outputs.CertifiateCertificatePolicyLifetimeAction']]: """ A `lifetime_action` block as defined below. """ return pulumi.get(self, "lifetime_actions") @property @pulumi.getter(name="x509CertificateProperties") def x509_certificate_properties(self) -> Optional['outputs.CertifiateCertificatePolicyX509CertificateProperties']: """ A `x509_certificate_properties` block as defined below. Required when `certificate` block is not specified. """ return pulumi.get(self, "x509_certificate_properties") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertifiateCertificatePolicyIssuerParameters(dict): def __init__(__self__, *, name: str): """ :param str name: The name of the Certificate Issuer. Possible values include `Self` (for self-signed certificate), or `Unknown` (for a certificate issuing authority like `Let's Encrypt` and Azure direct supported ones). Changing this forces a new resource to be created. """ pulumi.set(__self__, "name", name) @property @pulumi.getter def name(self) -> str: """ The name of the Certificate Issuer. Possible values include `Self` (for self-signed certificate), or `Unknown` (for a certificate issuing authority like `Let's Encrypt` and Azure direct supported ones). Changing this forces a new resource to be created. """ return pulumi.get(self, "name") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertifiateCertificatePolicyKeyProperties(dict): def __init__(__self__, *, exportable: bool, key_size: int, key_type: str, reuse_key: bool): """ :param bool exportable: Is this Certificate Exportable? Changing this forces a new resource to be created. :param int key_size: The size of the Key used in the Certificate. Possible values include `2048`, `3072`, and `4096`. Changing this forces a new resource to be created. :param str key_type: Specifies the Type of Key, such as `RSA`. Changing this forces a new resource to be created. :param bool reuse_key: Is the key reusable? Changing this forces a new resource to be created. """ pulumi.set(__self__, "exportable", exportable) pulumi.set(__self__, "key_size", key_size) pulumi.set(__self__, "key_type", key_type) pulumi.set(__self__, "reuse_key", reuse_key) @property @pulumi.getter def exportable(self) -> bool: """ Is this Certificate Exportable? Changing this forces a new resource to be created. """ return pulumi.get(self, "exportable") @property @pulumi.getter(name="keySize") def key_size(self) -> int: """ The size of the Key used in the Certificate. Possible values include `2048`, `3072`, and `4096`. Changing this forces a new resource to be created. """ return pulumi.get(self, "key_size") @property @pulumi.getter(name="keyType") def key_type(self) -> str: """ Specifies the Type of Key, such as `RSA`. Changing this forces a new resource to be created. """ return pulumi.get(self, "key_type") @property @pulumi.getter(name="reuseKey") def reuse_key(self) -> bool: """ Is the key reusable? Changing this forces a new resource to be created. """ return pulumi.get(self, "reuse_key") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertifiateCertificatePolicyLifetimeAction(dict): def __init__(__self__, *, action: 'outputs.CertifiateCertificatePolicyLifetimeActionAction', trigger: 'outputs.CertifiateCertificatePolicyLifetimeActionTrigger'): """ :param 'CertifiateCertificatePolicyLifetimeActionActionArgs' action: A `action` block as defined below. :param 'CertifiateCertificatePolicyLifetimeActionTriggerArgs' trigger: A `trigger` block as defined below. """ pulumi.set(__self__, "action", action) pulumi.set(__self__, "trigger", trigger) @property @pulumi.getter def action(self) -> 'outputs.CertifiateCertificatePolicyLifetimeActionAction': """ A `action` block as defined below. """ return pulumi.get(self, "action") @property @pulumi.getter def trigger(self) -> 'outputs.CertifiateCertificatePolicyLifetimeActionTrigger': """ A `trigger` block as defined below. """ return pulumi.get(self, "trigger") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertifiateCertificatePolicyLifetimeActionAction(dict): def __init__(__self__, *, action_type: str): """ :param str action_type: The Type of action to be performed when the lifetime trigger is triggerec. Possible values include `AutoRenew` and `EmailContacts`. Changing this forces a new resource to be created. """ pulumi.set(__self__, "action_type", action_type) @property @pulumi.getter(name="actionType") def action_type(self) -> str: """ The Type of action to be performed when the lifetime trigger is triggerec. Possible values include `AutoRenew` and `EmailContacts`. Changing this forces a new resource to be created. """ return pulumi.get(self, "action_type") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertifiateCertificatePolicyLifetimeActionTrigger(dict): def __init__(__self__, *, days_before_expiry: Optional[int] = None, lifetime_percentage: Optional[int] = None): """ :param int days_before_expiry: The number of days before the Certificate expires that the action associated with this Trigger should run. Changing this forces a new resource to be created. Conflicts with `lifetime_percentage`. :param int lifetime_percentage: The percentage at which during the Certificates Lifetime the action associated with this Trigger should run. Changing this forces a new resource to be created. Conflicts with `days_before_expiry`. """ if days_before_expiry is not None: pulumi.set(__self__, "days_before_expiry", days_before_expiry) if lifetime_percentage is not None: pulumi.set(__self__, "lifetime_percentage", lifetime_percentage) @property @pulumi.getter(name="daysBeforeExpiry") def days_before_expiry(self) -> Optional[int]: """ The number of days before the Certificate expires that the action associated with this Trigger should run. Changing this forces a new resource to be created. Conflicts with `lifetime_percentage`. """ return pulumi.get(self, "days_before_expiry") @property @pulumi.getter(name="lifetimePercentage") def lifetime_percentage(self) -> Optional[int]: """ The percentage at which during the Certificates Lifetime the action associated with this Trigger should run. Changing this forces a new resource to be created. Conflicts with `days_before_expiry`. """ return pulumi.get(self, "lifetime_percentage") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertifiateCertificatePolicySecretProperties(dict): def __init__(__self__, *, content_type: str): """ :param str content_type: The Content-Type of the Certificate, such as `application/x-pkcs12` for a PFX or `application/x-pem-file` for a PEM. Changing this forces a new resource to be created. """ pulumi.set(__self__, "content_type", content_type) @property @pulumi.getter(name="contentType") def content_type(self) -> str: """ The Content-Type of the Certificate, such as `application/x-pkcs12` for a PFX or `application/x-pem-file` for a PEM. Changing this forces a new resource to be created. """ return pulumi.get(self, "content_type") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertifiateCertificatePolicyX509CertificateProperties(dict): def __init__(__self__, *, key_usages: Sequence[str], subject: str, validity_in_months: int, extended_key_usages: Optional[Sequence[str]] = None, subject_alternative_names: Optional['outputs.CertifiateCertificatePolicyX509CertificatePropertiesSubjectAlternativeNames'] = None): """ :param Sequence[str] key_usages: A list of uses associated with this Key. Possible values include `cRLSign`, `dataEncipherment`, `decipherOnly`, `digitalSignature`, `encipherOnly`, `keyAgreement`, `keyCertSign`, `keyEncipherment` and `nonRepudiation` and are case-sensitive. Changing this forces a new resource to be created. :param str subject: The Certificate's Subject. Changing this forces a new resource to be created. :param int validity_in_months: The Certificates Validity Period in Months. Changing this forces a new resource to be created. :param Sequence[str] extended_key_usages: A list of Extended/Enhanced Key Usages. Changing this forces a new resource to be created. :param 'CertifiateCertificatePolicyX509CertificatePropertiesSubjectAlternativeNamesArgs' subject_alternative_names: A `subject_alternative_names` block as defined below. """ pulumi.set(__self__, "key_usages", key_usages) pulumi.set(__self__, "subject", subject) pulumi.set(__self__, "validity_in_months", validity_in_months) if extended_key_usages is not None: pulumi.set(__self__, "extended_key_usages", extended_key_usages) if subject_alternative_names is not None: pulumi.set(__self__, "subject_alternative_names", subject_alternative_names) @property @pulumi.getter(name="keyUsages") def key_usages(self) -> Sequence[str]: """ A list of uses associated with this Key. Possible values include `cRLSign`, `dataEncipherment`, `decipherOnly`, `digitalSignature`, `encipherOnly`, `keyAgreement`, `keyCertSign`, `keyEncipherment` and `nonRepudiation` and are case-sensitive. Changing this forces a new resource to be created. """ return pulumi.get(self, "key_usages") @property @pulumi.getter def subject(self) -> str: """ The Certificate's Subject. Changing this forces a new resource to be created. """ return pulumi.get(self, "subject") @property @pulumi.getter(name="validityInMonths") def validity_in_months(self) -> int: """ The Certificates Validity Period in Months. Changing this forces a new resource to be created. """ return pulumi.get(self, "validity_in_months") @property @pulumi.getter(name="extendedKeyUsages") def extended_key_usages(self) -> Optional[Sequence[str]]: """ A list of Extended/Enhanced Key Usages. Changing this forces a new resource to be created. """ return pulumi.get(self, "extended_key_usages") @property @pulumi.getter(name="subjectAlternativeNames") def subject_alternative_names(self) -> Optional['outputs.CertifiateCertificatePolicyX509CertificatePropertiesSubjectAlternativeNames']: """ A `subject_alternative_names` block as defined below. """ return pulumi.get(self, "subject_alternative_names") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertifiateCertificatePolicyX509CertificatePropertiesSubjectAlternativeNames(dict): def __init__(__self__, *, dns_names: Optional[Sequence[str]] = None, emails: Optional[Sequence[str]] = None, upns: Optional[Sequence[str]] = None): """ :param Sequence[str] dns_names: A list of alternative DNS names (FQDNs) identified by the Certificate. Changing this forces a new resource to be created. :param Sequence[str] emails: A list of email addresses identified by this Certificate. Changing this forces a new resource to be created. :param Sequence[str] upns: A list of User Principal Names identified by the Certificate. Changing this forces a new resource to be created. """ if dns_names is not None: pulumi.set(__self__, "dns_names", dns_names) if emails is not None: pulumi.set(__self__, "emails", emails) if upns is not None: pulumi.set(__self__, "upns", upns) @property @pulumi.getter(name="dnsNames") def dns_names(self) -> Optional[Sequence[str]]: """ A list of alternative DNS names (FQDNs) identified by the Certificate. Changing this forces a new resource to be created. """ return pulumi.get(self, "dns_names") @property @pulumi.getter def emails(self) -> Optional[Sequence[str]]: """ A list of email addresses identified by this Certificate. Changing this forces a new resource to be created. """ return pulumi.get(self, "emails") @property @pulumi.getter def upns(self) -> Optional[Sequence[str]]: """ A list of User Principal Names identified by the Certificate. Changing this forces a new resource to be created. """ return pulumi.get(self, "upns") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertificateCertificate(dict): def __init__(__self__, *, contents: str, password: Optional[str] = None): """ :param str contents: The base64-encoded certificate contents. Changing this forces a new resource to be created. :param str password: The password associated with the certificate. Changing this forces a new resource to be created. """ pulumi.set(__self__, "contents", contents) if password is not None: pulumi.set(__self__, "password", password) @property @pulumi.getter def contents(self) -> str: """ The base64-encoded certificate contents. Changing this forces a new resource to be created. """ return pulumi.get(self, "contents") @property @pulumi.getter def password(self) -> Optional[str]: """ The password associated with the certificate. Changing this forces a new resource to be created. """ return pulumi.get(self, "password") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertificateCertificateAttribute(dict): def __init__(__self__, *, created: Optional[str] = None, enabled: Optional[bool] = None, expires: Optional[str] = None, not_before: Optional[str] = None, recovery_level: Optional[str] = None, updated: Optional[str] = None): """ :param str created: The create time of the Key Vault Certificate. :param bool enabled: whether the Key Vault Certificate is enabled. :param str expires: The expires time of the Key Vault Certificate. :param str not_before: The not before valid time of the Key Vault Certificate. :param str recovery_level: The deletion recovery level of the Key Vault Certificate. :param str updated: The recent update time of the Key Vault Certificate. """ if created is not None: pulumi.set(__self__, "created", created) if enabled is not None: pulumi.set(__self__, "enabled", enabled) if expires is not None: pulumi.set(__self__, "expires", expires) if not_before is not None: pulumi.set(__self__, "not_before", not_before) if recovery_level is not None: pulumi.set(__self__, "recovery_level", recovery_level) if updated is not None: pulumi.set(__self__, "updated", updated) @property @pulumi.getter def created(self) -> Optional[str]: """ The create time of the Key Vault Certificate. """ return pulumi.get(self, "created") @property @pulumi.getter def enabled(self) -> Optional[bool]: """ whether the Key Vault Certificate is enabled. """ return pulumi.get(self, "enabled") @property @pulumi.getter def expires(self) -> Optional[str]: """ The expires time of the Key Vault Certificate. """ return pulumi.get(self, "expires") @property @pulumi.getter(name="notBefore") def not_before(self) -> Optional[str]: """ The not before valid time of the Key Vault Certificate. """ return pulumi.get(self, "not_before") @property @pulumi.getter(name="recoveryLevel") def recovery_level(self) -> Optional[str]: """ The deletion recovery level of the Key Vault Certificate. """ return pulumi.get(self, "recovery_level") @property @pulumi.getter def updated(self) -> Optional[str]: """ The recent update time of the Key Vault Certificate. """ return pulumi.get(self, "updated") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertificateCertificatePolicy(dict): def __init__(__self__, *, issuer_parameters: 'outputs.CertificateCertificatePolicyIssuerParameters', key_properties: 'outputs.CertificateCertificatePolicyKeyProperties', secret_properties: 'outputs.CertificateCertificatePolicySecretProperties', lifetime_actions: Optional[Sequence['outputs.CertificateCertificatePolicyLifetimeAction']] = None, x509_certificate_properties: Optional['outputs.CertificateCertificatePolicyX509CertificateProperties'] = None): """ :param 'CertificateCertificatePolicyIssuerParametersArgs' issuer_parameters: A `issuer_parameters` block as defined below. :param 'CertificateCertificatePolicyKeyPropertiesArgs' key_properties: A `key_properties` block as defined below. :param 'CertificateCertificatePolicySecretPropertiesArgs' secret_properties: A `secret_properties` block as defined below. :param Sequence['CertificateCertificatePolicyLifetimeActionArgs'] lifetime_actions: A `lifetime_action` block as defined below. :param 'CertificateCertificatePolicyX509CertificatePropertiesArgs' x509_certificate_properties: A `x509_certificate_properties` block as defined below. Required when `certificate` block is not specified. """ pulumi.set(__self__, "issuer_parameters", issuer_parameters) pulumi.set(__self__, "key_properties", key_properties) pulumi.set(__self__, "secret_properties", secret_properties) if lifetime_actions is not None: pulumi.set(__self__, "lifetime_actions", lifetime_actions) if x509_certificate_properties is not None: pulumi.set(__self__, "x509_certificate_properties", x509_certificate_properties) @property @pulumi.getter(name="issuerParameters") def issuer_parameters(self) -> 'outputs.CertificateCertificatePolicyIssuerParameters': """ A `issuer_parameters` block as defined below. """ return pulumi.get(self, "issuer_parameters") @property @pulumi.getter(name="keyProperties") def key_properties(self) -> 'outputs.CertificateCertificatePolicyKeyProperties': """ A `key_properties` block as defined below. """ return pulumi.get(self, "key_properties") @property @pulumi.getter(name="secretProperties") def secret_properties(self) -> 'outputs.CertificateCertificatePolicySecretProperties': """ A `secret_properties` block as defined below. """ return pulumi.get(self, "secret_properties") @property @pulumi.getter(name="lifetimeActions") def lifetime_actions(self) -> Optional[Sequence['outputs.CertificateCertificatePolicyLifetimeAction']]: """ A `lifetime_action` block as defined below. """ return pulumi.get(self, "lifetime_actions") @property @pulumi.getter(name="x509CertificateProperties") def x509_certificate_properties(self) -> Optional['outputs.CertificateCertificatePolicyX509CertificateProperties']: """ A `x509_certificate_properties` block as defined below. Required when `certificate` block is not specified. """ return pulumi.get(self, "x509_certificate_properties") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertificateCertificatePolicyIssuerParameters(dict): def __init__(__self__, *, name: str): """ :param str name: The name of the Certificate Issuer. Possible values include `Self` (for self-signed certificate), or `Unknown` (for a certificate issuing authority like `Let's Encrypt` and Azure direct supported ones). Changing this forces a new resource to be created. """ pulumi.set(__self__, "name", name) @property @pulumi.getter def name(self) -> str: """ The name of the Certificate Issuer. Possible values include `Self` (for self-signed certificate), or `Unknown` (for a certificate issuing authority like `Let's Encrypt` and Azure direct supported ones). Changing this forces a new resource to be created. """ return pulumi.get(self, "name") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertificateCertificatePolicyKeyProperties(dict): def __init__(__self__, *, exportable: bool, key_size: int, key_type: str, reuse_key: bool): """ :param bool exportable: Is this Certificate Exportable? Changing this forces a new resource to be created. :param int key_size: The size of the Key used in the Certificate. Possible values include `2048`, `3072`, and `4096`. Changing this forces a new resource to be created. :param str key_type: Specifies the Type of Key, such as `RSA`. Changing this forces a new resource to be created. :param bool reuse_key: Is the key reusable? Changing this forces a new resource to be created. """ pulumi.set(__self__, "exportable", exportable) pulumi.set(__self__, "key_size", key_size) pulumi.set(__self__, "key_type", key_type) pulumi.set(__self__, "reuse_key", reuse_key) @property @pulumi.getter def exportable(self) -> bool: """ Is this Certificate Exportable? Changing this forces a new resource to be created. """ return pulumi.get(self, "exportable") @property @pulumi.getter(name="keySize") def key_size(self) -> int: """ The size of the Key used in the Certificate. Possible values include `2048`, `3072`, and `4096`. Changing this forces a new resource to be created. """ return pulumi.get(self, "key_size") @property @pulumi.getter(name="keyType") def key_type(self) -> str: """ Specifies the Type of Key, such as `RSA`. Changing this forces a new resource to be created. """ return pulumi.get(self, "key_type") @property @pulumi.getter(name="reuseKey") def reuse_key(self) -> bool: """ Is the key reusable? Changing this forces a new resource to be created. """ return pulumi.get(self, "reuse_key") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertificateCertificatePolicyLifetimeAction(dict): def __init__(__self__, *, action: 'outputs.CertificateCertificatePolicyLifetimeActionAction', trigger: 'outputs.CertificateCertificatePolicyLifetimeActionTrigger'): """ :param 'CertificateCertificatePolicyLifetimeActionActionArgs' action: A `action` block as defined below. :param 'CertificateCertificatePolicyLifetimeActionTriggerArgs' trigger: A `trigger` block as defined below. """ pulumi.set(__self__, "action", action) pulumi.set(__self__, "trigger", trigger) @property @pulumi.getter def action(self) -> 'outputs.CertificateCertificatePolicyLifetimeActionAction': """ A `action` block as defined below. """ return pulumi.get(self, "action") @property @pulumi.getter def trigger(self) -> 'outputs.CertificateCertificatePolicyLifetimeActionTrigger': """ A `trigger` block as defined below. """ return pulumi.get(self, "trigger") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertificateCertificatePolicyLifetimeActionAction(dict): def __init__(__self__, *, action_type: str): """ :param str action_type: The Type of action to be performed when the lifetime trigger is triggerec. Possible values include `AutoRenew` and `EmailContacts`. Changing this forces a new resource to be created. """ pulumi.set(__self__, "action_type", action_type) @property @pulumi.getter(name="actionType") def action_type(self) -> str: """ The Type of action to be performed when the lifetime trigger is triggerec. Possible values include `AutoRenew` and `EmailContacts`. Changing this forces a new resource to be created. """ return pulumi.get(self, "action_type") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertificateCertificatePolicyLifetimeActionTrigger(dict): def __init__(__self__, *, days_before_expiry: Optional[int] = None, lifetime_percentage: Optional[int] = None): """ :param int days_before_expiry: The number of days before the Certificate expires that the action associated with this Trigger should run. Changing this forces a new resource to be created. Conflicts with `lifetime_percentage`. :param int lifetime_percentage: The percentage at which during the Certificates Lifetime the action associated with this Trigger should run. Changing this forces a new resource to be created. Conflicts with `days_before_expiry`. """ if days_before_expiry is not None: pulumi.set(__self__, "days_before_expiry", days_before_expiry) if lifetime_percentage is not None: pulumi.set(__self__, "lifetime_percentage", lifetime_percentage) @property @pulumi.getter(name="daysBeforeExpiry") def days_before_expiry(self) -> Optional[int]: """ The number of days before the Certificate expires that the action associated with this Trigger should run. Changing this forces a new resource to be created. Conflicts with `lifetime_percentage`. """ return pulumi.get(self, "days_before_expiry") @property @pulumi.getter(name="lifetimePercentage") def lifetime_percentage(self) -> Optional[int]: """ The percentage at which during the Certificates Lifetime the action associated with this Trigger should run. Changing this forces a new resource to be created. Conflicts with `days_before_expiry`. """ return pulumi.get(self, "lifetime_percentage") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertificateCertificatePolicySecretProperties(dict): def __init__(__self__, *, content_type: str): """ :param str content_type: The Content-Type of the Certificate, such as `application/x-pkcs12` for a PFX or `application/x-pem-file` for a PEM. Changing this forces a new resource to be created. """ pulumi.set(__self__, "content_type", content_type) @property @pulumi.getter(name="contentType") def content_type(self) -> str: """ The Content-Type of the Certificate, such as `application/x-pkcs12` for a PFX or `application/x-pem-file` for a PEM. Changing this forces a new resource to be created. """ return pulumi.get(self, "content_type") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertificateCertificatePolicyX509CertificateProperties(dict): def __init__(__self__, *, key_usages: Sequence[str], subject: str, validity_in_months: int, extended_key_usages: Optional[Sequence[str]] = None, subject_alternative_names: Optional['outputs.CertificateCertificatePolicyX509CertificatePropertiesSubjectAlternativeNames'] = None): """ :param Sequence[str] key_usages: A list of uses associated with this Key. Possible values include `cRLSign`, `dataEncipherment`, `decipherOnly`, `digitalSignature`, `encipherOnly`, `keyAgreement`, `keyCertSign`, `keyEncipherment` and `nonRepudiation` and are case-sensitive. Changing this forces a new resource to be created. :param str subject: The Certificate's Subject. Changing this forces a new resource to be created. :param int validity_in_months: The Certificates Validity Period in Months. Changing this forces a new resource to be created. :param Sequence[str] extended_key_usages: A list of Extended/Enhanced Key Usages. Changing this forces a new resource to be created. :param 'CertificateCertificatePolicyX509CertificatePropertiesSubjectAlternativeNamesArgs' subject_alternative_names: A `subject_alternative_names` block as defined below. """ pulumi.set(__self__, "key_usages", key_usages) pulumi.set(__self__, "subject", subject) pulumi.set(__self__, "validity_in_months", validity_in_months) if extended_key_usages is not None: pulumi.set(__self__, "extended_key_usages", extended_key_usages) if subject_alternative_names is not None: pulumi.set(__self__, "subject_alternative_names", subject_alternative_names) @property @pulumi.getter(name="keyUsages") def key_usages(self) -> Sequence[str]: """ A list of uses associated with this Key. Possible values include `cRLSign`, `dataEncipherment`, `decipherOnly`, `digitalSignature`, `encipherOnly`, `keyAgreement`, `keyCertSign`, `keyEncipherment` and `nonRepudiation` and are case-sensitive. Changing this forces a new resource to be created. """ return pulumi.get(self, "key_usages") @property @pulumi.getter def subject(self) -> str: """ The Certificate's Subject. Changing this forces a new resource to be created. """ return pulumi.get(self, "subject") @property @pulumi.getter(name="validityInMonths") def validity_in_months(self) -> int: """ The Certificates Validity Period in Months. Changing this forces a new resource to be created. """ return pulumi.get(self, "validity_in_months") @property @pulumi.getter(name="extendedKeyUsages") def extended_key_usages(self) -> Optional[Sequence[str]]: """ A list of Extended/Enhanced Key Usages. Changing this forces a new resource to be created. """ return pulumi.get(self, "extended_key_usages") @property @pulumi.getter(name="subjectAlternativeNames") def subject_alternative_names(self) -> Optional['outputs.CertificateCertificatePolicyX509CertificatePropertiesSubjectAlternativeNames']: """ A `subject_alternative_names` block as defined below. """ return pulumi.get(self, "subject_alternative_names") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertificateCertificatePolicyX509CertificatePropertiesSubjectAlternativeNames(dict): def __init__(__self__, *, dns_names: Optional[Sequence[str]] = None, emails: Optional[Sequence[str]] = None, upns: Optional[Sequence[str]] = None): """ :param Sequence[str] dns_names: A list of alternative DNS names (FQDNs) identified by the Certificate. Changing this forces a new resource to be created. :param Sequence[str] emails: A list of email addresses identified by this Certificate. Changing this forces a new resource to be created. :param Sequence[str] upns: A list of User Principal Names identified by the Certificate. Changing this forces a new resource to be created. """ if dns_names is not None: pulumi.set(__self__, "dns_names", dns_names) if emails is not None: pulumi.set(__self__, "emails", emails) if upns is not None: pulumi.set(__self__, "upns", upns) @property @pulumi.getter(name="dnsNames") def dns_names(self) -> Optional[Sequence[str]]: """ A list of alternative DNS names (FQDNs) identified by the Certificate. Changing this forces a new resource to be created. """ return pulumi.get(self, "dns_names") @property @pulumi.getter def emails(self) -> Optional[Sequence[str]]: """ A list of email addresses identified by this Certificate. Changing this forces a new resource to be created. """ return pulumi.get(self, "emails") @property @pulumi.getter def upns(self) -> Optional[Sequence[str]]: """ A list of User Principal Names identified by the Certificate. Changing this forces a new resource to be created. """ return pulumi.get(self, "upns") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class CertificateIssuerAdmin(dict): def __init__(__self__, *, email_address: str, first_name: Optional[str] = None, last_name: Optional[str] = None, phone: Optional[str] = None): """ :param str email_address: E-mail address of the admin. :param str first_name: First name of the admin. :param str last_name: Last name of the admin. :param str phone: Phone number of the admin. """ pulumi.set(__self__, "email_address", email_address) if first_name is not None: pulumi.set(__self__, "first_name", first_name) if last_name is not None: pulumi.set(__self__, "last_name", last_name) if phone is not None: pulumi.set(__self__, "phone", phone) @property @pulumi.getter(name="emailAddress") def email_address(self) -> str: """ E-mail address of the admin. """ return pulumi.get(self, "email_address") @property @pulumi.getter(name="firstName") def first_name(self) -> Optional[str]: """ First name of the admin. """ return pulumi.get(self, "first_name") @property @pulumi.getter(name="lastName") def last_name(self) -> Optional[str]: """ Last name of the admin. """ return pulumi.get(self, "last_name") @property @pulumi.getter def phone(self) -> Optional[str]: """ Phone number of the admin. """ return pulumi.get(self, "phone") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class KeyVaultAccessPolicy(dict): def __init__(__self__, *, object_id: str, tenant_id: str, application_id: Optional[str] = None, certificate_permissions: Optional[Sequence[str]] = None, key_permissions: Optional[Sequence[str]] = None, secret_permissions: Optional[Sequence[str]] = None, storage_permissions: Optional[Sequence[str]] = None): """ :param str object_id: The object ID of a user, service principal or security group in the Azure Active Directory tenant for the vault. The object ID must be unique for the list of access policies. :param str tenant_id: The Azure Active Directory tenant ID that should be used for authenticating requests to the key vault. Must match the `tenant_id` used above. :param str application_id: The object ID of an Application in Azure Active Directory. :param Sequence[str] certificate_permissions: List of certificate permissions, must be one or more from the following: `backup`, `create`, `delete`, `deleteissuers`, `get`, `getissuers`, `import`, `list`, `listissuers`, `managecontacts`, `manageissuers`, `purge`, `recover`, `restore`, `setissuers` and `update`. :param Sequence[str] key_permissions: List of key permissions, must be one or more from the following: `backup`, `create`, `decrypt`, `delete`, `encrypt`, `get`, `import`, `list`, `purge`, `recover`, `restore`, `sign`, `unwrapKey`, `update`, `verify` and `wrapKey`. :param Sequence[str] secret_permissions: List of secret permissions, must be one or more from the following: `backup`, `delete`, `get`, `list`, `purge`, `recover`, `restore` and `set`. :param Sequence[str] storage_permissions: List of storage permissions, must be one or more from the following: `backup`, `delete`, `deletesas`, `get`, `getsas`, `list`, `listsas`, `purge`, `recover`, `regeneratekey`, `restore`, `set`, `setsas` and `update`. """ pulumi.set(__self__, "object_id", object_id) pulumi.set(__self__, "tenant_id", tenant_id) if application_id is not None: pulumi.set(__self__, "application_id", application_id) if certificate_permissions is not None: pulumi.set(__self__, "certificate_permissions", certificate_permissions) if key_permissions is not None: pulumi.set(__self__, "key_permissions", key_permissions) if secret_permissions is not None: pulumi.set(__self__, "secret_permissions", secret_permissions) if storage_permissions is not None: pulumi.set(__self__, "storage_permissions", storage_permissions) @property @pulumi.getter(name="objectId") def object_id(self) -> str: """ The object ID of a user, service principal or security group in the Azure Active Directory tenant for the vault. The object ID must be unique for the list of access policies. """ return pulumi.get(self, "object_id") @property @pulumi.getter(name="tenantId") def tenant_id(self) -> str: """ The Azure Active Directory tenant ID that should be used for authenticating requests to the key vault. Must match the `tenant_id` used above. """ return pulumi.get(self, "tenant_id") @property @pulumi.getter(name="applicationId") def application_id(self) -> Optional[str]: """ The object ID of an Application in Azure Active Directory. """ return pulumi.get(self, "application_id") @property @pulumi.getter(name="certificatePermissions") def certificate_permissions(self) -> Optional[Sequence[str]]: """ List of certificate permissions, must be one or more from the following: `backup`, `create`, `delete`, `deleteissuers`, `get`, `getissuers`, `import`, `list`, `listissuers`, `managecontacts`, `manageissuers`, `purge`, `recover`, `restore`, `setissuers` and `update`. """ return pulumi.get(self, "certificate_permissions") @property @pulumi.getter(name="keyPermissions") def key_permissions(self) -> Optional[Sequence[str]]: """ List of key permissions, must be one or more from the following: `backup`, `create`, `decrypt`, `delete`, `encrypt`, `get`, `import`, `list`, `purge`, `recover`, `restore`, `sign`, `unwrapKey`, `update`, `verify` and `wrapKey`. """ return pulumi.get(self, "key_permissions") @property @pulumi.getter(name="secretPermissions") def secret_permissions(self) -> Optional[Sequence[str]]: """ List of secret permissions, must be one or more from the following: `backup`, `delete`, `get`, `list`, `purge`, `recover`, `restore` and `set`. """ return pulumi.get(self, "secret_permissions") @property @pulumi.getter(name="storagePermissions") def storage_permissions(self) -> Optional[Sequence[str]]: """ List of storage permissions, must be one or more from the following: `backup`, `delete`, `deletesas`, `get`, `getsas`, `list`, `listsas`, `purge`, `recover`, `regeneratekey`, `restore`, `set`, `setsas` and `update`. """ return pulumi.get(self, "storage_permissions") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class KeyVaultContact(dict): def __init__(__self__, *, email: str, name: Optional[str] = None, phone: Optional[str] = None): """ :param str email: E-mail address of the contact. :param str name: Name of the contact. :param str phone: Phone number of the contact. """ pulumi.set(__self__, "email", email) if name is not None: pulumi.set(__self__, "name", name) if phone is not None: pulumi.set(__self__, "phone", phone) @property @pulumi.getter def email(self) -> str: """ E-mail address of the contact. """ return pulumi.get(self, "email") @property @pulumi.getter def name(self) -> Optional[str]: """ Name of the contact. """ return pulumi.get(self, "name") @property @pulumi.getter def phone(self) -> Optional[str]: """ Phone number of the contact. """ return pulumi.get(self, "phone") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class KeyVaultNetworkAcls(dict): def __init__(__self__, *, bypass: str, default_action: str, ip_rules: Optional[Sequence[str]] = None, virtual_network_subnet_ids: Optional[Sequence[str]] = None): """ :param str bypass: Specifies which traffic can bypass the network rules. Possible values are `AzureServices` and `None`. :param str default_action: The Default Action to use when no rules match from `ip_rules` / `virtual_network_subnet_ids`. Possible values are `Allow` and `Deny`. :param Sequence[str] ip_rules: One or more IP Addresses, or CIDR Blocks which should be able to access the Key Vault. :param Sequence[str] virtual_network_subnet_ids: One or more Subnet ID's which should be able to access this Key Vault. """ pulumi.set(__self__, "bypass", bypass) pulumi.set(__self__, "default_action", default_action) if ip_rules is not None: pulumi.set(__self__, "ip_rules", ip_rules) if virtual_network_subnet_ids is not None: pulumi.set(__self__, "virtual_network_subnet_ids", virtual_network_subnet_ids) @property @pulumi.getter def bypass(self) -> str: """ Specifies which traffic can bypass the network rules. Possible values are `AzureServices` and `None`. """ return pulumi.get(self, "bypass") @property @pulumi.getter(name="defaultAction") def default_action(self) -> str: """ The Default Action to use when no rules match from `ip_rules` / `virtual_network_subnet_ids`. Possible values are `Allow` and `Deny`. """ return pulumi.get(self, "default_action") @property @pulumi.getter(name="ipRules") def ip_rules(self) -> Optional[Sequence[str]]: """ One or more IP Addresses, or CIDR Blocks which should be able to access the Key Vault. """ return pulumi.get(self, "ip_rules") @property @pulumi.getter(name="virtualNetworkSubnetIds") def virtual_network_subnet_ids(self) -> Optional[Sequence[str]]: """ One or more Subnet ID's which should be able to access this Key Vault. """ return pulumi.get(self, "virtual_network_subnet_ids") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class GetCertificateCertificatePolicyResult(dict): def __init__(__self__, *, issuer_parameters: Sequence['outputs.GetCertificateCertificatePolicyIssuerParameterResult'], key_properties: Sequence['outputs.GetCertificateCertificatePolicyKeyPropertyResult'], secret_properties: Sequence['outputs.GetCertificateCertificatePolicySecretPropertyResult'], x509_certificate_properties: Sequence['outputs.GetCertificateCertificatePolicyX509CertificatePropertyResult'], lifetime_actions: Optional[Sequence['outputs.GetCertificateCertificatePolicyLifetimeActionResult']] = None): """ :param Sequence['GetCertificateCertificatePolicyIssuerParameterArgs'] issuer_parameters: A `issuer_parameters` block as defined below. :param Sequence['GetCertificateCertificatePolicyKeyPropertyArgs'] key_properties: A `key_properties` block as defined below. :param Sequence['GetCertificateCertificatePolicySecretPropertyArgs'] secret_properties: A `secret_properties` block as defined below. :param Sequence['GetCertificateCertificatePolicyX509CertificatePropertyArgs'] x509_certificate_properties: An `x509_certificate_properties` block as defined below. :param Sequence['GetCertificateCertificatePolicyLifetimeActionArgs'] lifetime_actions: A `lifetime_action` block as defined below. """ pulumi.set(__self__, "issuer_parameters", issuer_parameters) pulumi.set(__self__, "key_properties", key_properties) pulumi.set(__self__, "secret_properties", secret_properties) pulumi.set(__self__, "x509_certificate_properties", x509_certificate_properties) if lifetime_actions is not None: pulumi.set(__self__, "lifetime_actions", lifetime_actions) @property @pulumi.getter(name="issuerParameters") def issuer_parameters(self) -> Sequence['outputs.GetCertificateCertificatePolicyIssuerParameterResult']: """ A `issuer_parameters` block as defined below. """ return pulumi.get(self, "issuer_parameters") @property @pulumi.getter(name="keyProperties") def key_properties(self) -> Sequence['outputs.GetCertificateCertificatePolicyKeyPropertyResult']: """ A `key_properties` block as defined below. """ return pulumi.get(self, "key_properties") @property @pulumi.getter(name="secretProperties") def secret_properties(self) -> Sequence['outputs.GetCertificateCertificatePolicySecretPropertyResult']: """ A `secret_properties` block as defined below. """ return pulumi.get(self, "secret_properties") @property @pulumi.getter(name="x509CertificateProperties") def x509_certificate_properties(self) -> Sequence['outputs.GetCertificateCertificatePolicyX509CertificatePropertyResult']: """ An `x509_certificate_properties` block as defined below. """ return pulumi.get(self, "x509_certificate_properties") @property @pulumi.getter(name="lifetimeActions") def lifetime_actions(self) -> Optional[Sequence['outputs.GetCertificateCertificatePolicyLifetimeActionResult']]: """ A `lifetime_action` block as defined below. """ return pulumi.get(self, "lifetime_actions") @pulumi.output_type class GetCertificateCertificatePolicyIssuerParameterResult(dict): def __init__(__self__, *, name: str): """ :param str name: Specifies the name of the Key Vault Secret. """ pulumi.set(__self__, "name", name) @property @pulumi.getter def name(self) -> str: """ Specifies the name of the Key Vault Secret. """ return pulumi.get(self, "name") @pulumi.output_type class GetCertificateCertificatePolicyKeyPropertyResult(dict): def __init__(__self__, *, exportable: bool, key_size: int, key_type: str, reuse_key: bool): """ :param bool exportable: Is this Certificate Exportable? :param int key_size: The size of the Key used in the Certificate. :param str key_type: Specifies the Type of Key, for example `RSA`. :param bool reuse_key: Is the key reusable? """ pulumi.set(__self__, "exportable", exportable) pulumi.set(__self__, "key_size", key_size) pulumi.set(__self__, "key_type", key_type) pulumi.set(__self__, "reuse_key", reuse_key) @property @pulumi.getter def exportable(self) -> bool: """ Is this Certificate Exportable? """ return pulumi.get(self, "exportable") @property @pulumi.getter(name="keySize") def key_size(self) -> int: """ The size of the Key used in the Certificate. """ return pulumi.get(self, "key_size") @property @pulumi.getter(name="keyType") def key_type(self) -> str: """ Specifies the Type of Key, for example `RSA`. """ return pulumi.get(self, "key_type") @property @pulumi.getter(name="reuseKey") def reuse_key(self) -> bool: """ Is the key reusable? """ return pulumi.get(self, "reuse_key") @pulumi.output_type class GetCertificateCertificatePolicyLifetimeActionResult(dict): def __init__(__self__, *, actions: Sequence['outputs.GetCertificateCertificatePolicyLifetimeActionActionResult'], triggers: Sequence['outputs.GetCertificateCertificatePolicyLifetimeActionTriggerResult']): """ :param Sequence['GetCertificateCertificatePolicyLifetimeActionActionArgs'] actions: A `action` block as defined below. :param Sequence['GetCertificateCertificatePolicyLifetimeActionTriggerArgs'] triggers: A `trigger` block as defined below. """ pulumi.set(__self__, "actions", actions) pulumi.set(__self__, "triggers", triggers) @property @pulumi.getter def actions(self) -> Sequence['outputs.GetCertificateCertificatePolicyLifetimeActionActionResult']: """ A `action` block as defined below. """ return pulumi.get(self, "actions") @property @pulumi.getter def triggers(self) -> Sequence['outputs.GetCertificateCertificatePolicyLifetimeActionTriggerResult']: """ A `trigger` block as defined below. """ return pulumi.get(self, "triggers") @pulumi.output_type class GetCertificateCertificatePolicyLifetimeActionActionResult(dict): def __init__(__self__, *, action_type: str): """ :param str action_type: The Type of action to be performed when the lifetime trigger is triggerec. """ pulumi.set(__self__, "action_type", action_type) @property @pulumi.getter(name="actionType") def action_type(self) -> str: """ The Type of action to be performed when the lifetime trigger is triggerec. """ return pulumi.get(self, "action_type") @pulumi.output_type class GetCertificateCertificatePolicyLifetimeActionTriggerResult(dict): def __init__(__self__, *, days_before_expiry: int, lifetime_percentage: int): """ :param int days_before_expiry: The number of days before the Certificate expires that the action associated with this Trigger should run. :param int lifetime_percentage: The percentage at which during the Certificates Lifetime the action associated with this Trigger should run. """ pulumi.set(__self__, "days_before_expiry", days_before_expiry) pulumi.set(__self__, "lifetime_percentage", lifetime_percentage) @property @pulumi.getter(name="daysBeforeExpiry") def days_before_expiry(self) -> int: """ The number of days before the Certificate expires that the action associated with this Trigger should run. """ return pulumi.get(self, "days_before_expiry") @property @pulumi.getter(name="lifetimePercentage") def lifetime_percentage(self) -> int: """ The percentage at which during the Certificates Lifetime the action associated with this Trigger should run. """ return pulumi.get(self, "lifetime_percentage") @pulumi.output_type class GetCertificateCertificatePolicySecretPropertyResult(dict): def __init__(__self__, *, content_type: str): """ :param str content_type: The Content-Type of the Certificate, for example `application/x-pkcs12` for a PFX or `application/x-pem-file` for a PEM. """ pulumi.set(__self__, "content_type", content_type) @property @pulumi.getter(name="contentType") def content_type(self) -> str: """ The Content-Type of the Certificate, for example `application/x-pkcs12` for a PFX or `application/x-pem-file` for a PEM. """ return pulumi.get(self, "content_type") @pulumi.output_type class GetCertificateCertificatePolicyX509CertificatePropertyResult(dict): def __init__(__self__, *, extended_key_usages: Sequence[str], key_usages: Sequence[str], subject: str, subject_alternative_names: Sequence['outputs.GetCertificateCertificatePolicyX509CertificatePropertySubjectAlternativeNameResult'], validity_in_months: int): """ :param Sequence[str] extended_key_usages: A list of Extended/Enhanced Key Usages. :param Sequence[str] key_usages: A list of uses associated with this Key. :param str subject: The Certificate's Subject. :param Sequence['GetCertificateCertificatePolicyX509CertificatePropertySubjectAlternativeNameArgs'] subject_alternative_names: A `subject_alternative_names` block as defined below. :param int validity_in_months: The Certificates Validity Period in Months. """ pulumi.set(__self__, "extended_key_usages", extended_key_usages) pulumi.set(__self__, "key_usages", key_usages) pulumi.set(__self__, "subject", subject) pulumi.set(__self__, "subject_alternative_names", subject_alternative_names) pulumi.set(__self__, "validity_in_months", validity_in_months) @property @pulumi.getter(name="extendedKeyUsages") def extended_key_usages(self) -> Sequence[str]: """ A list of Extended/Enhanced Key Usages. """ return pulumi.get(self, "extended_key_usages") @property @pulumi.getter(name="keyUsages") def key_usages(self) -> Sequence[str]: """ A list of uses associated with this Key. """ return pulumi.get(self, "key_usages") @property @pulumi.getter def subject(self) -> str: """ The Certificate's Subject. """ return pulumi.get(self, "subject") @property @pulumi.getter(name="subjectAlternativeNames") def subject_alternative_names(self) -> Sequence['outputs.GetCertificateCertificatePolicyX509CertificatePropertySubjectAlternativeNameResult']: """ A `subject_alternative_names` block as defined below. """ return pulumi.get(self, "subject_alternative_names") @property @pulumi.getter(name="validityInMonths") def validity_in_months(self) -> int: """ The Certificates Validity Period in Months. """ return pulumi.get(self, "validity_in_months") @pulumi.output_type class GetCertificateCertificatePolicyX509CertificatePropertySubjectAlternativeNameResult(dict): def __init__(__self__, *, dns_names: Sequence[str], emails: Sequence[str], upns: Sequence[str]): """ :param Sequence[str] dns_names: A list of alternative DNS names (FQDNs) identified by the Certificate. :param Sequence[str] emails: A list of email addresses identified by this Certificate. :param Sequence[str] upns: A list of User Principal Names identified by the Certificate. """ pulumi.set(__self__, "dns_names", dns_names) pulumi.set(__self__, "emails", emails) pulumi.set(__self__, "upns", upns) @property @pulumi.getter(name="dnsNames") def dns_names(self) -> Sequence[str]: """ A list of alternative DNS names (FQDNs) identified by the Certificate. """ return pulumi.get(self, "dns_names") @property @pulumi.getter def emails(self) -> Sequence[str]: """ A list of email addresses identified by this Certificate. """ return pulumi.get(self, "emails") @property @pulumi.getter def upns(self) -> Sequence[str]: """ A list of User Principal Names identified by the Certificate. """ return pulumi.get(self, "upns") @pulumi.output_type class GetCertificateIssuerAdminResult(dict): def __init__(__self__, *, email_address: str, first_name: str, last_name: str, phone: str): """ :param str email_address: E-mail address of the admin. :param str first_name: First name of the admin. :param str last_name: Last name of the admin. :param str phone: Phone number of the admin. """ pulumi.set(__self__, "email_address", email_address) pulumi.set(__self__, "first_name", first_name) pulumi.set(__self__, "last_name", last_name) pulumi.set(__self__, "phone", phone) @property @pulumi.getter(name="emailAddress") def email_address(self) -> str: """ E-mail address of the admin. """ return pulumi.get(self, "email_address") @property @pulumi.getter(name="firstName") def first_name(self) -> str: """ First name of the admin. """ return pulumi.get(self, "first_name") @property @pulumi.getter(name="lastName") def last_name(self) -> str: """ Last name of the admin. """ return pulumi.get(self, "last_name") @property @pulumi.getter def phone(self) -> str: """ Phone number of the admin. """ return pulumi.get(self, "phone") @pulumi.output_type class GetKeyVaultAccessPolicyResult(dict): def __init__(__self__, *, application_id: str, certificate_permissions: Sequence[str], key_permissions: Sequence[str], object_id: str, secret_permissions: Sequence[str], storage_permissions: Sequence[str], tenant_id: str): """ :param str application_id: The Object ID of a Azure Active Directory Application. :param Sequence[str] certificate_permissions: A list of certificate permissions applicable to this Access Policy. :param Sequence[str] key_permissions: A list of key permissions applicable to this Access Policy. :param str object_id: An Object ID of a User, Service Principal or Security Group. :param Sequence[str] secret_permissions: A list of secret permissions applicable to this Access Policy. :param Sequence[str] storage_permissions: A list of storage permissions applicable to this Access Policy. :param str tenant_id: The Azure Active Directory Tenant ID used to authenticate requests for this Key Vault. """ pulumi.set(__self__, "application_id", application_id) pulumi.set(__self__, "certificate_permissions", certificate_permissions) pulumi.set(__self__, "key_permissions", key_permissions) pulumi.set(__self__, "object_id", object_id) pulumi.set(__self__, "secret_permissions", secret_permissions) pulumi.set(__self__, "storage_permissions", storage_permissions) pulumi.set(__self__, "tenant_id", tenant_id) @property @pulumi.getter(name="applicationId") def application_id(self) -> str: """ The Object ID of a Azure Active Directory Application. """ return pulumi.get(self, "application_id") @property @pulumi.getter(name="certificatePermissions") def certificate_permissions(self) -> Sequence[str]: """ A list of certificate permissions applicable to this Access Policy. """ return pulumi.get(self, "certificate_permissions") @property @pulumi.getter(name="keyPermissions") def key_permissions(self) -> Sequence[str]: """ A list of key permissions applicable to this Access Policy. """ return pulumi.get(self, "key_permissions") @property @pulumi.getter(name="objectId") def object_id(self) -> str: """ An Object ID of a User, Service Principal or Security Group. """ return pulumi.get(self, "object_id") @property @pulumi.getter(name="secretPermissions") def secret_permissions(self) -> Sequence[str]: """ A list of secret permissions applicable to this Access Policy. """ return pulumi.get(self, "secret_permissions") @property @pulumi.getter(name="storagePermissions") def storage_permissions(self) -> Sequence[str]: """ A list of storage permissions applicable to this Access Policy. """ return pulumi.get(self, "storage_permissions") @property @pulumi.getter(name="tenantId") def tenant_id(self) -> str: """ The Azure Active Directory Tenant ID used to authenticate requests for this Key Vault. """ return pulumi.get(self, "tenant_id") @pulumi.output_type class GetKeyVaultNetworkAclResult(dict): def __init__(__self__, *, bypass: str, default_action: str, ip_rules: Sequence[str], virtual_network_subnet_ids: Sequence[str]): pulumi.set(__self__, "bypass", bypass) pulumi.set(__self__, "default_action", default_action) pulumi.set(__self__, "ip_rules", ip_rules) pulumi.set(__self__, "virtual_network_subnet_ids", virtual_network_subnet_ids) @property @pulumi.getter def bypass(self) -> str: return pulumi.get(self, "bypass") @property @pulumi.getter(name="defaultAction") def default_action(self) -> str: return pulumi.get(self, "default_action") @property @pulumi.getter(name="ipRules") def ip_rules(self) -> Sequence[str]: return pulumi.get(self, "ip_rules") @property @pulumi.getter(name="virtualNetworkSubnetIds") def virtual_network_subnet_ids(self) -> Sequence[str]: return pulumi.get(self, "virtual_network_subnet_ids")
41.986895
333
0.670349
7,993
73,687
5.965094
0.04554
0.02284
0.032991
0.048218
0.813964
0.801003
0.787496
0.75872
0.749869
0.73223
0
0.004369
0.23589
73,687
1,754
334
42.010832
0.84243
0.337739
0
0.791455
1
0
0.189922
0.115796
0
0
0
0
0
1
0.188199
false
0.018311
0.006104
0.030519
0.382503
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
822337db18d8192318818b7835aca305316198e5
2,931
py
Python
peeringdb/migrations/0016_auto_20210420_2144.py
maznu/peering-manager
d249fcf530f4cc48b39429badb79bc203e0148ba
[ "Apache-2.0" ]
127
2017-10-12T00:27:45.000Z
2020-08-07T11:13:55.000Z
peeringdb/migrations/0016_auto_20210420_2144.py
maznu/peering-manager
d249fcf530f4cc48b39429badb79bc203e0148ba
[ "Apache-2.0" ]
247
2017-12-26T12:55:34.000Z
2020-08-08T11:57:35.000Z
peeringdb/migrations/0016_auto_20210420_2144.py
maznu/peering-manager
d249fcf530f4cc48b39429badb79bc203e0148ba
[ "Apache-2.0" ]
63
2017-10-13T06:46:05.000Z
2020-08-08T00:41:57.000Z
# Generated by Django 3.2 on 2021-04-20 19:44 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [("peeringdb", "0015_auto_20210227_1258")] operations = [ migrations.AlterField( model_name="facility", name="id", field=models.BigAutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), migrations.AlterField( model_name="internetexchange", name="id", field=models.BigAutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), migrations.AlterField( model_name="internetexchangefacility", name="id", field=models.BigAutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), migrations.AlterField( model_name="ixlan", name="id", field=models.BigAutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), migrations.AlterField( model_name="ixlanprefix", name="id", field=models.BigAutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), migrations.AlterField( model_name="network", name="id", field=models.BigAutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), migrations.AlterField( model_name="networkcontact", name="id", field=models.BigAutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), migrations.AlterField( model_name="networkfacility", name="id", field=models.BigAutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), migrations.AlterField( model_name="networkixlan", name="id", field=models.BigAutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), migrations.AlterField( model_name="organization", name="id", field=models.BigAutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), migrations.AlterField( model_name="synchronization", name="id", field=models.BigAutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ]
32.932584
87
0.542818
259
2,931
5.96139
0.189189
0.085492
0.178109
0.206606
0.793394
0.793394
0.793394
0.793394
0.793394
0.793394
0
0.015781
0.351416
2,931
88
88
33.306818
0.796423
0.014671
0
0.804878
1
0
0.074498
0.016286
0
0
0
0
0
1
0
false
0
0.012195
0
0.04878
0
0
0
0
null
0
0
1
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
41ca3401c7898068772dab68005bd9b16db7772b
10,848
py
Python
src/txkube/testing/test/test_matchers.py
LeastAuthority/txkube
a7e555d00535ff787d4b1204c264780da40cf736
[ "MIT" ]
14
2017-01-16T20:00:36.000Z
2020-02-15T05:58:23.000Z
src/txkube/testing/test/test_matchers.py
LeastAuthority/txkube
a7e555d00535ff787d4b1204c264780da40cf736
[ "MIT" ]
192
2017-01-16T18:39:14.000Z
2019-01-03T14:01:50.000Z
src/txkube/testing/test/test_matchers.py
LeastAuthority/txkube
a7e555d00535ff787d4b1204c264780da40cf736
[ "MIT" ]
6
2017-01-20T23:37:59.000Z
2020-10-16T21:38:59.000Z
# Copyright Least Authority Enterprises. # See LICENSE for details. """ Tests for ``txkube.testing.matchers``. """ import attr from pyrsistent import PClass, field from testtools.matchers import Is, Equals from sys import version_info from .. import TestCase from ..matchers import MappingEquals, AttrsEquals, PClassEquals class MappingEqualsTests(TestCase): """ Tests for ``MappingEquals``. """ def test_equals(self): """ ``MappingEquals.match`` returns ``None`` when comparing two ``dict`` which compare equal with ``==``. """ self.assertThat( MappingEquals({u"foo": u"bar"}).match({u"foo": u"bar"}), Is(None), ) def test_mismatch(self): """ ``MappingEquals.match`` returns a mismatch when comparing two ``dict`` which do not compare equal with ``==``. """ # Same keys, different value. mismatch = MappingEquals({u"foo": u"bar"}).match({u"foo": u"baz"}) self.expectThat( mismatch.describe(), Equals( u"field mismatch:\n" u"field: foo\n" u"reference = bar\n" u"actual = baz\n" ), ) # Actual value missing a key. mismatch = MappingEquals({u"foo": u"bar"}).match({}) self.expectThat( mismatch.describe(), Equals( u"field mismatch:\n" u"field: foo\n" u"reference = bar\n" u"actual = <<missing>>\n" ), ) # Expected value missing a key. mismatch = MappingEquals({}).match({u"foo": u"baz"}) self.expectThat( mismatch.describe(), Equals( u"field mismatch:\n" u"field: foo\n" u"reference = <<missing>>\n" u"actual = baz\n" ), ) # The matcher has a nice string representation. self.expectThat( str(MappingEquals({})), Equals("MappingEquals({})"), ) def test_mismatch_py2(self): """ ``MappingEquals.match`` returns a mismatch when comparing two ``dict`` which do not compare equal with ``==``. """ if version_info >= (3,): self.skipTest("skipping test on Python 3") # Different types altogether. mismatch = MappingEquals(0).match({0: 1}) self.expectThat( mismatch.describe(), Equals( u"type mismatch:\n" u"reference = <type 'int'> (0)\n" u"actual = <type 'dict'> ({0: 1})\n" ), ) def test_mismatch_py3(self): """ ``MappingEquals.match`` returns a mismatch when comparing two ``dict`` which do not compare equal with ``==``. """ if version_info < (3,): self.skipTest("skipping test on Python 2") # Different types altogether. mismatch = MappingEquals(0).match({0: 1}) self.expectThat( mismatch.describe(), Equals( u"type mismatch:\n" u"reference = <class 'int'> (0)\n" u"actual = <class 'dict'> ({0: 1})\n" ), ) class AttrsEqualsTests(TestCase): """ Tests for ``AttrsEquals``. """ @attr.s class attrs(object): foo = attr.ib() def test_equals(self): """ ``AttrsEquals.match`` returns ``None`` when comparing two attrs-based instances which compare equal with ``==``. """ self.assertThat( AttrsEquals(self.attrs(u"bar")).match(self.attrs(u"bar")), Is(None), ) def test_equals_py2(self): """ ``AttrsEquals.match`` returns ``None`` when comparing two attrs-based instances which compare equal with ``==``. """ if version_info >= (3,): self.skipTest("skipping test on Python 3") # The matcher has a nice string representation. self.expectThat( str(AttrsEquals(self.attrs(u"bar"))), Equals("AttrsEquals(attrs(foo=u'bar'))"), ) def test_equals_py3(self): """ ``AttrsEquals.match`` returns ``None`` when comparing two attrs-based instances which compare equal with ``==``. """ if version_info < (3,): self.skipTest("skipping test on Python 2") # The matcher has a nice string representation. self.expectThat( str(AttrsEquals(self.attrs(u"bar"))), Equals("AttrsEquals(AttrsEqualsTests.attrs(foo='bar'))"), ) def test_mismatch(self): """ ``AttrsEquals.match`` returns a mismatch when comparing two attrs-based instances which do not compare equal with ``==``. """ # Different value for the single attribute. mismatch = AttrsEquals(self.attrs(u"bar")).match(self.attrs(u"baz")) self.expectThat( mismatch.describe(), Equals( u"field mismatch:\n" u"field: foo\n" u"reference = bar\n" u"actual = baz\n" ), ) def test_mismatch_py2(self): """ ``AttrsEquals.match`` returns ``None`` when comparing two attrs-based instances which compare equal with ``==``. """ if version_info >= (3,): self.skipTest("skipping test on Python 3") # Different types altogether. mismatch = AttrsEquals(self.attrs(0)).match(1) self.expectThat( mismatch.describe(), Equals( u"type mismatch:\n" u"reference = <class 'txkube.testing.test.test_matchers.attrs'> (attrs(foo=0))\n" u"actual = <type 'int'> (1)\n" ), ) def test_mismatch_py3(self): """ ``AttrsEquals.match`` returns ``None`` when comparing two attrs-based instances which compare equal with ``==``. """ if version_info < (3,): self.skipTest("skipping test on Python 2") # Different types altogether. mismatch = AttrsEquals(self.attrs(0)).match(1) self.expectThat( mismatch.describe(), Equals( u"type mismatch:\n" u"reference = <class 'txkube.testing.test.test_matchers.AttrsEqualsTests.attrs'> (AttrsEqualsTests.attrs(foo=0))\n" u"actual = <class 'int'> (1)\n" ), ) class PClassEqualsTests(TestCase): """ Tests for ``PClassEquals``. """ class pclass(PClass): foo = field() bar = field() def test_equals(self): """ ``PClassEquals.match`` returns ``None`` when comparing two PClass-based instances which compare equal with ``==``. """ self.assertThat( PClassEquals(self.pclass(foo=u"bar")).match(self.pclass(foo=u"bar")), Is(None), ) def test_equals_py2(self): """ On Python 2, the str representation of ``PClassEquals`` preserves the 'u' prefix for a unicode kwarg. """ if version_info >= (3,): self.skipTest("skipping test on Python 3") # The matcher has a nice string representation. self.expectThat( str(PClassEquals(self.pclass(foo=u"bar"))), Equals("PClassEquals(pclass(foo=u'bar'))"), ) def test_equals_py3(self): """ On Python 3, the str representation of ``PClassEquals`` does not preserve the 'u' prefix for a unicode kwarg. """ if version_info < (3,): self.skipTest("skipping test on Python 2") # The matcher has a nice string representation. self.expectThat( str(PClassEquals(self.pclass(foo=u"bar"))), Equals("PClassEquals(pclass(foo='bar'))"), ) def test_mismatch(self): """ ``PClassEquals.match`` returns a mismatch when comparing two ``dict`` which do not compare equal with ``==``. """ # Same attributes, different value. mismatch = PClassEquals(self.pclass(foo=u"bar")).match(self.pclass(foo=u"baz")) self.expectThat( mismatch.describe(), Equals( u"field mismatch:\n" u"field: foo\n" u"reference = bar\n" u"actual = baz\n" ), ) # Actual value missing an attribute. mismatch = PClassEquals(self.pclass(foo=u"bar")).match(self.pclass()) self.expectThat( mismatch.describe(), Equals( u"field mismatch:\n" u"field: foo\n" u"reference = bar\n" u"actual = <<missing>>\n" ), ) # Expected value missing an attribute. mismatch = PClassEquals(self.pclass()).match(self.pclass(foo=u"baz")) self.expectThat( mismatch.describe(), Equals( u"field mismatch:\n" u"field: foo\n" u"reference = <<missing>>\n" u"actual = baz\n" ), ) def test_mismatch_py2(self): """ ``PClassEquals.match`` returns a mismatch when comparing two ``dict`` which do not compare equal with ``==``. On Python 2, the reference should contain <type 'int'> if passed an integer argument. """ if version_info >= (3,): self.skipTest("skipping test on Python 3") # Different types altogether. mismatch = PClassEquals(0).match(self.pclass(foo=1)) self.expectThat( mismatch.describe(), Equals( u"type mismatch:\n" u"reference = <type 'int'> (0)\n" u"actual = <class 'txkube.testing.test.test_matchers.pclass'> (pclass(foo=1))\n" ), ) def test_mismatch_py3(self): """ ``PClassEquals.match`` returns a mismatch when comparing two ``dict`` which do not compare equal with ``==``. """ if version_info < (3,): self.skipTest("skipping test on Python 2") # Different types altogether. mismatch = PClassEquals(0).match(self.pclass(foo=1)) self.expectThat( mismatch.describe(), Equals( u"type mismatch:\n" u"reference = <class 'int'> (0)\n" u"actual = <class 'txkube.testing.test.test_matchers.PClassEqualsTests.pclass'> (pclass(foo=1))\n" ), )
29.720548
131
0.517699
1,119
10,848
4.982127
0.099196
0.011839
0.040179
0.069955
0.839462
0.815247
0.774709
0.743139
0.714619
0.682332
0
0.008659
0.350572
10,848
364
132
29.802198
0.782683
0.247511
0
0.710145
0
0.019324
0.221065
0.049697
0
0
0
0
0.014493
1
0.077295
false
0
0.028986
0
0.130435
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
68e03398bd7a22ed7c304ffc1666b93aff1bc59b
1,816
py
Python
gcode/SignalGenerators.py
dapperfu/python_GCode
b5beee57a65d63a821b7f3d539f422ad12518cae
[ "BSD-3-Clause" ]
null
null
null
gcode/SignalGenerators.py
dapperfu/python_GCode
b5beee57a65d63a821b7f3d539f422ad12518cae
[ "BSD-3-Clause" ]
1
2019-10-28T02:35:20.000Z
2019-10-28T02:35:20.000Z
gcode/SignalGenerators.py
dapperfu/python_GCode
b5beee57a65d63a821b7f3d539f422ad12518cae
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- import numpy as np import scipy as sp def sine(t, A=1, f=1, D=0): """ t: time A: the amplitude, the peak deviation of the function from zero. f: the ordinary frequency, the number of oscillations (cycles) that occur each second of time. D: non-zero center amplitude """ sine_ = A * np.sin(2 * np.pi * f * t) + D return sine_ def cosine(t, A=1, f=1, D=0): """ t: time A: the amplitude, the peak deviation of the function from zero. f: the ordinary frequency, the number of oscillations (cycles) that occur each second of time. D: non-zero center amplitude """ cos_ = A * np.sin(2 * np.pi * f * t) + D return cos_ def square(t, A=1, f=1, D=0): """ t: time A: the amplitude, the peak deviation of the function from zero. f: the ordinary frequency, the number of oscillations (cycles) that occur each second of time. D: non-zero center amplitude """ square_ = A * sp.signal.square(2 * np.pi * f * t) + D return square_ def sawtooth(t, A=1, f=1, D=0): """ t: time A: the amplitude, the peak deviation of the function from zero. f: the ordinary frequency, the number of oscillations (cycles) that occur each second of time. D: non-zero center amplitude """ sawtooth_ = A * sp.signal.sawtooth(2 * np.pi * f * t, width=1) + D return sawtooth_ def triangle(t, A=1, f=1, D=0): """ t: time A: the amplitude, the peak deviation of the function from zero. f: the ordinary frequency, the number of oscillations (cycles) that occur each second of time. D: non-zero center amplitude """ triangle_ = A * sp.signal.sawtooth(2 * np.pi * f * t, width=0.5) + D return triangle_ signal_generators = [sine, cosine, square, sawtooth, triangle]
29.290323
98
0.632159
296
1,816
3.841216
0.168919
0.010554
0.013193
0.01759
0.802111
0.802111
0.802111
0.789798
0.789798
0.789798
0
0.017751
0.255507
1,816
61
99
29.770492
0.823225
0.551762
0
0
0
0
0
0
0
0
0
0
0
1
0.277778
false
0
0.111111
0
0.666667
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
8
6b8a93f640bf7df7062b61298f780bd3778fcc44
70
py
Python
sf-orgs_app.py
MervmessInc/sfdx_scratch_org_builder
c592e5d1842a2567c2ea57e4671dd8bb317ef619
[ "MIT" ]
null
null
null
sf-orgs_app.py
MervmessInc/sfdx_scratch_org_builder
c592e5d1842a2567c2ea57e4671dd8bb317ef619
[ "MIT" ]
null
null
null
sf-orgs_app.py
MervmessInc/sfdx_scratch_org_builder
c592e5d1842a2567c2ea57e4671dd8bb317ef619
[ "MIT" ]
null
null
null
import sf_org_manager.org_manager as org_manager org_manager.main()
14
48
0.842857
12
70
4.5
0.5
0.740741
0.481481
0.740741
0
0
0
0
0
0
0
0
0.1
70
4
49
17.5
0.857143
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
7
6b9c6f703af0f53b18cca30d8ad5f6b3b2b15a59
3,588
py
Python
.modules/.Hash/core/dcrypt.py
termux-one/EasY_HaCk
0a8d09ca4b126b027b6842e02fa0c29d8250e090
[ "Apache-2.0" ]
1,103
2018-04-20T14:08:11.000Z
2022-03-29T06:22:43.000Z
.modules/.Hash/core/dcrypt.py
sshourya948/EasY_HaCk
0a8d09ca4b126b027b6842e02fa0c29d8250e090
[ "Apache-2.0" ]
29
2019-04-03T14:52:38.000Z
2022-03-24T12:33:05.000Z
.modules/.Hash/core/dcrypt.py
sshourya948/EasY_HaCk
0a8d09ca4b126b027b6842e02fa0c29d8250e090
[ "Apache-2.0" ]
161
2018-04-20T15:57:12.000Z
2022-03-15T19:16:16.000Z
## dcrypt.py - Useful module of 1337Hash # -*- coding: utf-8 -*- ## import sys import hashlib # Console colors N = '\033[0m' # normal R = '\033[1;31m' # red Y = '\033[1;33m' # yellow W = '\033[1;37m' # white def md5hashcrack(): hash01 = raw_input(R + '[' + Y + '*' + R + ']' + W + ' Hash ' + R + ':' + Y + ' ') wordlist = raw_input(R + '[' + Y + '*' + R + ']' + W + ' Type zaki.txt' + R + ':' + Y + ' ') try: words = open(wordlist, 'r') except IOError, e: print("\n%s[%s!%s] ERROR: %s%s\n%s" % (R,Y,R,W,e,N)) sys.exit() words = words.readlines() for word in words: hash = hashlib.md5(word[:-1]) value = hash.hexdigest() if hash01 == value: print(R + '\n[' + Y + '+' + R + ']' + W + ' Word' + R + ':' + W + ' ' + word + '\n' + N) sys.exit() def sha1hashcrack(): hash01 = raw_input(R + '[' + Y + '*' + R + ']' + W + ' Hash ' + R + ':' + Y + ' ') wordlist = raw_input(R + '[' + Y + '*' + R + ']' + W + ' Wordlist' + R + ':' + Y + ' ') try: words = open(wordlist, 'r') except IOError, e: print("\n%s[%s!%s] ERROR: %s%s\n%s" % (R,Y,R,W,e,N)) sys.exit() words = words.readlines() for word in words: hash = hashlib.sha1(word[:-1]) value = hash.hexdigest() if hash01 == value: print(R + '\n[' + Y + '+' + R + ']' + W + ' Word' + R + ':' + W + ' ' + word + '\n' + N) sys.exit() def sha224hashcrack(): hash01 = raw_input(R + '[' + Y + '*' + R + ']' + W + ' Hash ' + R + ':' + Y + ' ') wordlist = raw_input(R + '[' + Y + '*' + R + ']' + W + ' Wordlist' + R + ':' + Y + ' ') try: words = open(wordlist, 'r') except IOError, e: print("\n%s[%s!%s] ERROR: %s%s\n%s" % (R,Y,R,W,e,N)) sys.exit() words = words.readlines() for word in words: hash = hashlib.sha224(word[:-1]) value = hash.hexdigest() if hash01 == value: print(R + '\n[' + Y + '+' + R + ']' + W + ' Word' + R + ':' + W + ' ' + word + '\n' + N) sys.exit() def sha256hashcrack(): hash01 = raw_input(R + '[' + Y + '*' + R + ']' + W + ' Hash ' + R + ':' + Y + ' ') wordlist = raw_input(R + '[' + Y + '*' + R + ']' + W + ' Wordlist' + R + ':' + Y + ' ') try: words = open(wordlist, 'r') except IOError, e: print("\n%s[%s!%s] ERROR: %s%s\n%s" % (R,Y,R,W,e,N)) sys.exit() words = words.readlines() for word in words: hash = hashlib.sha256(word[:-1]) value = hash.hexdigest() if hash01 == value: print(R + '\n[' + Y + '+' + R + ']' + W + ' Word' + R + ':' + W + ' ' + word + '\n' + N) sys.exit() def sha384hashcrack(): hash01 = raw_input(R + '[' + Y + '*' + R + ']' + W + ' Hash ' + R + ':' + Y + ' ') wordlist = raw_input(R + '[' + Y + '*' + R + ']' + W + ' Wordlist' + R + ':' + Y + ' ') try: words = open(wordlist, 'r') except IOError, e: print("\n%s[%s!%s] ERROR: %s%s\n%s" % (R,Y,R,W,e,N)) sys.exit() words = words.readlines() for word in words: hash = hashlib.sha384(word[:-1]) value = hash.hexdigest() if hash01 == value: print(R + '\n[' + Y + '+' + R + ']' + W + ' Word' + R + ':' + W + ' ' + word + '\n' + N) sys.exit() def sha512hashcrack(): hash01 = raw_input(R + '[' + Y + '*' + R + ']' + W + ' Hash ' + R + ':' + Y + ' ') wordlist = raw_input(R + '[' + Y + '*' + R + ']' + W + ' Wordlist' + R + ':' + Y + ' ') try: words = open(wordlist, 'r') except IOError, e: print("\n%s[%s!%s] ERROR: %s%s\n%s" % (R,Y,R,W,e,N)) sys.exit() words = words.readlines() for word in words: hash = hashlib.sha512(word[:-1]) value = hash.hexdigest() if hash01 == value: print(R + '\n[' + Y + '+' + R + ']' + W + ' Word' + R + ':' + W + ' ' + word + '\n' + N) sys.exit()
31.752212
93
0.459309
516
3,588
3.170543
0.127907
0.036675
0.04401
0.04401
0.844132
0.844132
0.844132
0.844132
0.844132
0.844132
0
0.032197
0.264214
3,588
113
94
31.752212
0.5875
0.027313
0
0.802083
0
0
0.132759
0
0
0
0
0
0
0
null
null
0
0.020833
null
null
0.125
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
8
6bca77422b9bab31e191bfcdde3ee11136f7296f
13,645
py
Python
code/plot_path.py
okuchap/DAA_Lucas
dec7e7218b36222069cd733fb9f0a419ec2a4259
[ "MIT" ]
null
null
null
code/plot_path.py
okuchap/DAA_Lucas
dec7e7218b36222069cd733fb9f0a419ec2a4259
[ "MIT" ]
null
null
null
code/plot_path.py
okuchap/DAA_Lucas
dec7e7218b36222069cd733fb9f0a419ec2a4259
[ "MIT" ]
1
2019-12-02T00:18:32.000Z
2019-12-02T00:18:32.000Z
import numpy as np import pandas as pd import matplotlib.pyplot as plt from hash import * from simulation_fixed_path import * def coloring_BTC(block_times, y, init_adjust=2016-942, ylabel='', title=''): length = block_times.shape[0] cum_blocktimes = block_times.cumsum() time_shifts = [] num_shifts = (length+init_adjust)//2016 for i in range(num_shifts): time_shifts.append(cum_blocktimes[(i+1)*2016 - init_adjust - 1]) y_low = y.min() y_high = y.max() for i in range(len(time_shifts)): if i < len(time_shifts)-1: x_low = time_shifts[i]/1440 x_high = time_shifts[i+1]/1440 else: x_low = time_shifts[i]/1440 x_high = cum_blocktimes.max()/1440 testx = [x_low, x_high, x_high, x_low] testy = [y_low, y_low, y_high, y_high] if i % 2 == 0: plt.fill(testx, testy, color="y", alpha=0.3) # else: # plt.fill(testx,testy,color="blue",alpha=0.3) plt.plot(cum_blocktimes/1440, y) plt.xticks(rotation=30) plt.xlabel('days') if len(ylabel) > 0: plt.ylabel(ylabel) if len(title) > 0: plt.title(title) plt.show() return None def plot_paths(sim_list=[], title_list=['DAA-1', 'DAA-1 with bound', 'DAA-2', 'DAA-2 with bound']): ''' sim_list is a list containing instances of simulation class. Plot the path of the variables that sim contains. default: sim_list = [sim_BTC, sim_BTC_bdd, sim_BCH, sim_BCH_bdd] ''' init_adjust = 2016-942 fig = plt.figure() for i in range(len(sim_list)): sim = sim_list[i] x = sim.block_times.cumsum()/1440 # minute -> day ax1 = fig.add_subplot(4, 4, 1+i) y = sim.winning_rates ax1.plot(x, y) #plt.setp(ax1.get_xticklabels(), rotation=30) ax1.set_xlabel('time (day)') #ax1.set_ylim(0.00003, 0.00007) # fill length = sim.block_times.shape[0] cum_blocktimes = sim.block_times.cumsum() time_shifts = [] num_shifts = (length+init_adjust)//2016 for j in range(num_shifts): time_shifts.append(cum_blocktimes [(j+1)*2016 - init_adjust - 1]) y_low = y.min() y_high = y.max() for j in range(len(time_shifts)): if j < len(time_shifts)-1: x_low = time_shifts[j]/1440 x_high = time_shifts[j+1]/1440 else: x_low = time_shifts[j]/1440 x_high = cum_blocktimes.max()/1440 testx = [x_low, x_high, x_high, x_low] testy = [y_low, y_low, y_high, y_high] if j % 2 == 0: ax1.fill(testx, testy, color="y", alpha=0.3) if i == 0: ax1.set_ylabel('Winning Rate $W(t)$\n(Pr(success)/Ehash)') ax1.set_title(title_list[i]) ax2 = fig.add_subplot(4, 4, 5+i) y = sim.prices*sim.winning_rates*12.5 ax2.plot(x, y) #plt.setp(ax3.get_xticklabels(), rotation=30) ax2.set_xlabel('time (day)') #ax2.set_ylim(0.5, 3.0) length = sim.block_times.shape[0] cum_blocktimes = sim.block_times.cumsum() time_shifts = [] num_shifts = (length+init_adjust)//2016 for j in range(num_shifts): time_shifts.append(cum_blocktimes [(j+1)*2016 - init_adjust - 1]) y_low = y.min() y_high = y.max() for j in range(len(time_shifts)): if j < len(time_shifts)-1: x_low = time_shifts[j]/1440 x_high = time_shifts[j+1]/1440 else: x_low = time_shifts[j]/1440 x_high = cum_blocktimes.max()/1440 testx = [x_low, x_high, x_high, x_low] testy = [y_low, y_low, y_high, y_high] if j % 2 == 0: ax2.fill(testx, testy, color="y", alpha=0.3) if i == 0: ax2.set_ylabel('Reward $W(t)M(t)S(t)$\n(USD/Ehash)') # plt.title(title_list[i]) ax3 = fig.add_subplot(4, 4, 9+i) y = sim.hash_rates ax3.plot(x, y) #plt.setp(ax4.get_xticklabels(), rotation=30) ax3.set_xlabel('time (day)') #ax3.set_ylim(0, 55) length = sim.block_times.shape[0] cum_blocktimes = sim.block_times.cumsum() time_shifts = [] num_shifts = (length+init_adjust)//2016 for j in range(num_shifts): time_shifts.append(cum_blocktimes [(j+1)*2016 - init_adjust - 1]) y_low = y.min() y_high = y.max() for j in range(len(time_shifts)): if j < len(time_shifts)-1: x_low = time_shifts[j]/1440 x_high = time_shifts[j+1]/1440 else: x_low = time_shifts[j]/1440 x_high = cum_blocktimes.max()/1440 testx = [x_low, x_high, x_high, x_low] testy = [y_low, y_low, y_high, y_high] if j % 2 == 0: ax3.fill(testx, testy, color="y", alpha=0.3) if i == 0: ax3.set_ylabel('Hash Rate $H(t)$\n(Ehash/s)') # plt.title(title_list[i]) ax4 = fig.add_subplot(4, 4, 13+i) y = sim.block_times ax4.plot(x, y) #plt.setp(ax2.get_xticklabels(), rotation=30) ax4.set_xlabel('time (day)') #ax4.set_ylim(0, 500) length = sim.block_times.shape[0] cum_blocktimes = sim.block_times.cumsum() time_shifts = [] num_shifts = (length+init_adjust)//2016 for j in range(num_shifts): time_shifts.append(cum_blocktimes [(j+1)*2016 - init_adjust - 1]) y_low = y.min() y_high = y.max() for j in range(len(time_shifts)): if j < len(time_shifts)-1: x_low = time_shifts[j]/1440 x_high = time_shifts[j+1]/1440 else: x_low = time_shifts[j]/1440 x_high = cum_blocktimes.max()/1440 testx = [x_low, x_high, x_high, x_low] testy = [y_low, y_low, y_high, y_high] if j % 2 == 0: ax4.fill(testx, testy, color="y", alpha=0.3) if i == 0: ax4.set_ylabel('Block Time $B(t)$\n(min.)') # plt.title(title_list[i]) plt.tight_layout() fig.align_labels() plt.show() return None def plot_paths_2(exprvs=pd.DataFrame(), sim_list=[], W_init_low=1e-6, W_init_high=1e-4, W_grid=1e-8, tol=1e-10, title_list=['DAA-1(2016)', 'DAA-2(144)']): ''' sim_list is a list containing instances of simulation class. Plot the path of the variables that sim contains. default: sim_list = [sim_BTC, sim_BTC_bdd, sim_BCH, sim_BCH_bdd] Parameters ---------- exprvs: numpy array containing block shocks delta(t) ~ Exp(1). sim_list: list containing instances of simulation class default: sim_list = [sim1, sim2] where sim1 contains the data about DAA-1 and sim2 contains the data abount DAA-2. title_list: list containing titles used when graphs are plotted. ''' # assuming the height of the first block to be created is 551443 init_adjust = 2016-942 fig = plt.figure() for i in range(len(sim_list)): sim = sim_list[i] x = sim.block_times.cumsum()/1440 # minute -> day # winning rate ax1 = fig.add_subplot(4, 2, 1+i) y = sim.winning_rates opt_w = compute_opt_w_array(sim.prices, W_init_low=W_init_low, W_init_high=W_init_high, W_grid=W_grid, tol=tol) ax1.plot(x, y, label='real') ax1.plot(x, opt_w, label='first-best') ax1.legend(loc='upper right') #plt.setp(ax1.get_xticklabels(), rotation=30) ax1.set_xlabel('time (day)') # y_low = y.min() # y_high = y.max() y_low = 0.000025 y_high = 0.000095 ax1.set_ylim(y_low, y_high) # fill length = sim.block_times.shape[0] cum_blocktimes = sim.block_times.cumsum() time_shifts = [] num_shifts = (length+init_adjust)//2016 for j in range(num_shifts): time_shifts.append(cum_blocktimes [(j+1)*2016 - init_adjust - 1]) for j in range(len(time_shifts)): if j < len(time_shifts)-1: x_low = time_shifts[j]/1440 x_high = time_shifts[j+1]/1440 else: x_low = time_shifts[j]/1440 x_high = cum_blocktimes.max()/1440 testx = [x_low, x_high, x_high, x_low] testy = [y_low, y_low, y_high, y_high] if j % 2 == 0: ax1.fill(testx, testy, color="y", alpha=0.3) if i == 0: ax1.set_ylabel('Winning Rate $W(t)$\n(Pr(success)/Ehash)') ax1.set_title(title_list[i]) # reward ax2 = fig.add_subplot(4, 2, 3+i) opt_reward = opt_w*12.5*sim.prices y = sim.prices*sim.winning_rates*12.5 ax2.plot(x, y, label='real') ax2.plot(x, opt_reward, label='first-best') ax2.legend(loc='upper right') #plt.setp(ax3.get_xticklabels(), rotation=30) ax2.set_xlabel('time (day)') # y_low = y.min() # y_high = y.max() y_low = 0.9 y_high = 3.7 ax2.set_ylim(y_low, y_high) length = sim.block_times.shape[0] cum_blocktimes = sim.block_times.cumsum() time_shifts = [] num_shifts = (length+init_adjust)//2016 for j in range(num_shifts): time_shifts.append(cum_blocktimes [(j+1)*2016 - init_adjust - 1]) for j in range(len(time_shifts)): if j < len(time_shifts)-1: x_low = time_shifts[j]/1440 x_high = time_shifts[j+1]/1440 else: x_low = time_shifts[j]/1440 x_high = cum_blocktimes.max()/1440 testx = [x_low, x_high, x_high, x_low] testy = [y_low, y_low, y_high, y_high] if j % 2 == 0: ax2.fill(testx, testy, color="y", alpha=0.3) if i == 0: ax2.set_ylabel('Reward $R(t)$\n(USD/Ehash)') # plt.title(title_list[i]) # hash rate ax3 = fig.add_subplot(4, 2, 5+i) opt_hash = hash(opt_reward) y = sim.hash_rates ax3.plot(x, y, label='real') ax3.plot(x, opt_hash, label='first-best') ax3.legend(loc='upper right') #plt.setp(ax4.get_xticklabels(), rotation=30) ax3.set_xlabel('time (day)') # y_low = y.min() # y_high = y.max() y_low = 10 y_high = 60 ax3.set_ylim(y_low, y_high) length = sim.block_times.shape[0] cum_blocktimes = sim.block_times.cumsum() time_shifts = [] num_shifts = (length+init_adjust)//2016 for j in range(num_shifts): time_shifts.append(cum_blocktimes [(j+1)*2016 - init_adjust - 1]) for j in range(len(time_shifts)): if j < len(time_shifts)-1: x_low = time_shifts[j]/1440 x_high = time_shifts[j+1]/1440 else: x_low = time_shifts[j]/1440 x_high = cum_blocktimes.max()/1440 testx = [x_low, x_high, x_high, x_low] testy = [y_low, y_low, y_high, y_high] if j % 2 == 0: ax3.fill(testx, testy, color="y", alpha=0.3) if i == 0: ax3.set_ylabel('Hash Rate $H(t)$\n(Ehash/s)') # plt.title(title_list[i]) # block time ax4 = fig.add_subplot(4, 2, 7+i) y = sim.block_times opt_blocktime = 10 * exprvs[: x.shape[0]] # The following two lines should be fixed: The graph should be bar graphs and I should have used ax4.bar # As the horizontal line is very short, there is little problem...? (But it seems to me that the graphs in the paper are bar graphs.) ax4.plot(x, y, label='real', linewidth=1) ax4.plot(x, opt_blocktime, label='first-best', alpha=0.5, linewidth=1) ax4.legend(loc='upper right') #plt.setp(ax2.get_xticklabels(), rotation=30) ax4.set_xlabel('time (day)') y_low = y.min() y_high = y.max() y_low = 0 y_high = 400 ax4.set_ylim(y_low, y_high) length = sim.block_times.shape[0] cum_blocktimes = sim.block_times.cumsum() time_shifts = [] num_shifts = (length+init_adjust)//2016 for j in range(num_shifts): time_shifts.append(cum_blocktimes [(j+1)*2016 - init_adjust - 1]) for j in range(len(time_shifts)): if j < len(time_shifts)-1: x_low = time_shifts[j]/1440 x_high = time_shifts[j+1]/1440 else: x_low = time_shifts[j]/1440 x_high = cum_blocktimes.max()/1440 testx = [x_low, x_high, x_high, x_low] testy = [y_low, y_low, y_high, y_high] if j % 2 == 0: ax4.fill(testx, testy, color="y", alpha=0.3) if i == 0: ax4.set_ylabel('Block Time $B(t)$\n(min.)') # plt.title(title_list[i]) plt.tight_layout() fig.align_labels() plt.show() return None
34.63198
141
0.535727
1,985
13,645
3.473048
0.101259
0.091384
0.022483
0.036554
0.806643
0.764433
0.732666
0.725413
0.707427
0.692776
0
0.061179
0.33756
13,645
393
142
34.720102
0.701516
0.134848
0
0.75
0
0
0.042786
0.006931
0
0
0
0
0
1
0.010417
false
0
0.017361
0
0.038194
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
6bf30d1858248c103119731ec119fd3d82727f42
160
py
Python
loldib/getratings/models/NA/na_malphite/__init__.py
koliupy/loldib
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
[ "Apache-2.0" ]
null
null
null
loldib/getratings/models/NA/na_malphite/__init__.py
koliupy/loldib
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
[ "Apache-2.0" ]
null
null
null
loldib/getratings/models/NA/na_malphite/__init__.py
koliupy/loldib
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
[ "Apache-2.0" ]
null
null
null
from .na_malphite_top import * from .na_malphite_jng import * from .na_malphite_mid import * from .na_malphite_bot import * from .na_malphite_sup import *
26.666667
31
0.78125
25
160
4.6
0.36
0.26087
0.608696
0.695652
0
0
0
0
0
0
0
0
0.15625
160
5
32
32
0.851852
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
d44630d70712fb078bdf79308923fbc47e7e2662
399,504
py
Python
experimental/conversation_go_awry/feature_extraction/utils/politeness_with_spacy/train_documents.py
CyberFlameGO/wikidetox
60ee914c8bb81bada0847a3676e0bf24a6e35221
[ "Apache-2.0" ]
66
2017-09-10T12:47:37.000Z
2022-03-18T01:33:10.000Z
experimental/conversation_go_awry/feature_extraction/utils/politeness_with_spacy/train_documents.py
CyberFlameGO/wikidetox
60ee914c8bb81bada0847a3676e0bf24a6e35221
[ "Apache-2.0" ]
82
2017-09-12T13:01:59.000Z
2021-11-10T19:40:01.000Z
experimental/conversation_go_awry/feature_extraction/utils/politeness_with_spacy/train_documents.py
CyberFlameGO/wikidetox
60ee914c8bb81bada0847a3676e0bf24a6e35221
[ "Apache-2.0" ]
20
2017-11-02T21:23:35.000Z
2022-03-09T01:30:58.000Z
TEST_DOCUMENTS=[{"text": "Where did you learn English? How come you're taking on a third language?", "score": -1.1200492637766977}, {"text": "Thanks very much for your edit to the <url> article. Would you be interested in tackling the <url> of <url>?", "score": 1.3139550758835656}, {"text": "\"|style=\"\"vertical-align: middle; padding: 3px;\"\" | I've started the Badfinger wiki amd I need help. You seem to know a lot about them, could you please help out?\"", "score": 0.6192467887019745}, {"text": "\"These are my numbers: 7 years in Wikipedia, 6 years as an admin, 570+ articles, 4 featured articles, 1 featured list, 21 Good articles, 60 DYK's - After six years as an admin. I recently made some mistakes and I can understand if I am placed in some type of probation were I am monitored and forbidden to use my tools maybe for a year, but do I really merit the removal of my adminship?\"", "score": -0.4735386507011431}, {"text": "I couldn't tell you why glam rock was there. Better?", "score": -0.9629069818525974}, {"text": "Are you calling me a vandal for visiting your user page? Or am I just <url>?", "score": -0.8166612181712788}, {"text": "\"Ok, that's no problem. Can you recommend any other users who would be able to train me?\"", "score": 0.978724981024653}, {"text": "\"Congratulations, OGoncho! Isn't there a barnstar for things like that?\"", "score": 0.8434827719167922}, {"text": "\"I'd be shocked if no one has asked you this yet, but you do an enormous amount of work keeping the bio articles free of vandalism and nonsense as well as filling them up with useful content ;) You seem like you could use a few extra buttons on your toolbar. Are you interested in being nominated for adminship?\"", "score": 1.5018221885351892}, {"text": "I'm also having to use your link. Perhaps we should change the main page link to use the megaupload until we know that the main website is up?", "score": 0.9260581278856386}, {"text": "\"Is there any reason you keep moving boldface marks? ITN is on my watchlist and you seem to do this quite frequently, yet, as far as I can see, it makes no difference, so I feel compelled to ask if there's any particular reason?\"", "score": -0.6661648374892484}, {"text": "\"Come on Mohrflies, I donxb4t understand why you insist in these edits: <url>? Why \"\"Yugoslav\"\" when absolutely no one uses it?\"", "score": -2.01262417185682}, {"text": "I asked you a question. Be educated and please respond: What does <url> has to do with <url>?", "score": -2.2829181539119223}, {"text": "\"Well, welcome, and thanks for helping out, as you can see it's greatly needed. Do you have <url> yet?\"", "score": 1.1483465794461214}, {"text": "\"Fine, but it took me no more than a couple of seconds to find a source. Why couldn't you do it instead of removing the content?\"", "score": -1.7617599666505968}, {"text": "\"Hi there. On a tangential note, would you happen to be <person>?\"", "score": 0.6435247082979328}, {"text": "\"Hi, thanks for moving three of the Brussels municipalities. Could you move <url> to <url> as well?\"", "score": 1.5386994327498784}, {"text": "Anytime. How do you like this closer crop?", "score": 0.7515482559465462}, {"text": "How come you removed Dobby as an omitted character? Where does he appear?", "score": -0.5980509913124431}, {"text": "\"My question is \"\"Why?\"\" I am doing all the citations correctly and this still appears often. Can anyone tell me what I'm doing wrong?\"", "score": 0.6350854407739798}, {"text": "\"I see you have been creating lots of articles on bus stations on the <url>. Considering how little information there is (and is likely to be) for these stations, would it not be better to have them all on one article?\"", "score": -0.5403605735762399}, {"text": "\"I actually posted my comments a couple minutes ago. Do you think that explanation works, or do you think it's a good idea I elaborate further?\"", "score": 0.6100308105883776}, {"text": "I restored the lead picture series I like. What do you think?", "score": 0.6941318127718009}, {"text": "Seriously? Seriously?", "score": -2.20552721013786}, {"text": "\"Yep, she told me. How are you doing?\"", "score": 0.5643699890548239}, {"text": "<url> and <url> seem to be two articles on the same subject. What's the point of having separate articles?", "score": -1.3926755087265863}, {"text": "\"You may not remember doing this but, you erased a page entitled, Earl Newton. Why did you do it?\"", "score": -0.6065656871280627}, {"text": "Hi Asbestos! This is kind of weird for me but I see that you are a participant in the photo project and list yourself as a photographer in Edinburgh and I was wondering if there was any possibility that you would be willing to respond to my recent request <url>?", "score": 1.2307005896153538}, {"text": "\"---- Though I thought I would note that you have now exceeded <big>'''500'''</big> edits while on \"\"wikibreak\"\" and that is over less than 10 days, May I ask what kind of break is that???\"", "score": -0.700019155828249}, {"text": "Interrobang has asked me if I would also help mentor you when he is not around. I said that I would gladly help out if it was ok with you?", "score": 0.7253512815100932}, {"text": "\"Ok. Thank you for clarifying. Could you be more specific as to what you are specifying as \"\"the claim\"\" so that I may find relevant information to refute?\"", "score": 1.4360160565168771}, {"text": "Gah! What just happned?", "score": -0.7284408398416524}, {"text": "Thanks for the ce's on Attachment therapy. Are you kindly undertaking the second opinion?", "score": 0.7104647018287737}, {"text": "You are removing the film poster s images from the Kansas City Film Critics Circle Awards articles and the Academy awards. Why?", "score": -0.8419999292174024}, {"text": "\"I also have problems with a lot of the other recategorisation you have done to these articles, but I won't revert any more until I hear from you. Taking <url> as an example: why have you removed it from <url> when it clearly is a linear filter; why remove it from <url> when it forms part of image parameter theory; why does an article which discusses electronic designs not belong in <url>; why remove it from <url> when, obviously, it is a term?\"", "score": -0.4592870561503199}, {"text": "\"You recently edited this page. It is my view that this page is just a corporate marketing listing, there are several hundred companies that do the same job yet are not listed in Wikipedia - Do you not think that this is advertorial?\"", "score": -0.6386882601569744}, {"text": "\"You've violated <url> on <url>, with 2 reverts in half an hour. Would you like to revert yourself to avoid being blocked again?\"", "score": -0.572729328944986}, {"text": "\"Never mind, got it. Is the stereochemistry known?\"", "score": -0.6170150696108441}, {"text": "\"The review has been up there for something like six weeks, I notice. Think you'll be able to take care of those last couple of things?\"", "score": -1.094270791952888}, {"text": "COI? What COI do you think I have?", "score": -0.9597261183164999}, {"text": "\"You may need to enable your own setting to allow E-mail too; I'm not sure. If so, maybe you can turn it on temporarily, so that you can send me one e-mail?\"", "score": 0.8958516506718291}, {"text": "He has been indef blocked once again. I wonder how long it will take him to come back this time?", "score": -1.1548734223544517}, {"text": "\"This is helpful for automatized transfer scripts. By the way, did I get the location tag right on <url> :-) ?\"", "score": 1.1060530217404678}, {"text": "\"...cause I feel like a petulant kid, posting it. But: \"\"We have three volunteers: Awadewit, Mike Christie and Wrad.\"\" Given that I, you know, proposed the countries idea, wasn't it obvious that I was volunteering?\"", "score": -0.8141941779331046}, {"text": "I noticed that you are the major contributor to the articles. Mind if I nominate <url> at <url>?", "score": 0.7527525650125224}, {"text": "''The sources are already there!'' Have you even looked at it?", "score": -2.071900284287463}, {"text": "I can assist you with your sockpuppet report against him. :) Will you like me to just fill it out for you?", "score": 1.5734419411252734}, {"text": "\"As a former administrator, and therefore a veteran editor who knows how wikipedia really works, I am actually surprised you would even ask such a question with such an obvious answer. I suspect you were simply pointing out a perceived injustice with a question founded in a <url> that you yourself did not take seriously?\"", "score": -2.4964135880274343}, {"text": "What is a wall these days? 10 sentences?", "score": -0.4527987873567}, {"text": "Would you mind doing the same for <url>?", "score": 0.6432416736147927}, {"text": "\"Hello, thank you for adding interesting content to <url> <url>. Are you able to provide <url> that will help the content meet our \"\"<url>\"\" standard?\"", "score": 0.8433270176637121}, {"text": "\"I was just now leaving a note on Ruhrfisch's talk page, and noticed that you'd left a note there. I wrote about the <url> the other day, the first time that I've ever created an NRHP article: would you be willing to check it, and to add to the infobox if there's something that can easily be added?\"", "score": 1.1877978430827427}, {"text": "There is a very nice moving picture at <url> which illustrates the bisection method. Can we get it for our version?", "score": 0.9619815192767478}, {"text": "\"Ack, hit the wrong button? How do you manage that?\"", "score": -0.5318436077556591}, {"text": "Thanks Anon. Better now?", "score": 0.6844704271748879}, {"text": "I have just put the article to peer review. Would you care to take a look?", "score": 1.2538487905430213}, {"text": "\"Dude, what's going on? Why are you leaving?\"", "score": -0.8203538801218526}, {"text": "\"uh, okay lol :) no, i dont think i am (although i dont remember drinking any at all..), and um? okay?\"", "score": -0.7257178059723094}, {"text": "\"I don't have the full list online, but I do have the 07-08 AJHL record book. Are there any specific seasons or teams you are looking for?\"", "score": 0.6011982043565901}, {"text": "\"I'm not finding the name change request at <url>, nor does there appear to be such an edit under the \"\"Rogerboas\"\" account name. Did you posti t while logged out, perhaps?\"", "score": 0.5795990561398885}, {"text": "\"Hi, I understand that the WP:COI rules would still apply, regardless of username. Could you kindly approve the change in my user name from cecn to funshine?\"", "score": 1.6544515845784786}, {"text": "That user <url> moved <url> page for no reason and I do not understand why it was moved or why a persistent vandalist like this was allowed to move it. Is there anyway at all to get it moved back and that user permanently banned?", "score": -0.8115398976052408}, {"text": "\"I'm just saying that Kingboyk hasn't really been editing lately. So if you're actually interested in resolving an issue, why not take it to a noticeboard?\"", "score": -1.2341491856296254}, {"text": "You removed posts by me as well. Do you have any explanation?", "score": -0.960858887361162}, {"text": "\"How about saying that the fastest flow is near the deepest part of the channel, and that in most meanders this is near the outer bank? What do you think?\"", "score": 0.6403547932767741}, {"text": "\"I've steadily become more interested in getting this article up to FA, but I'm having trouble finding sources that actually analyze ''contemporary R&B'' instead of the entire <url> genre. Do you have any advice or would you be interested in working on it together?\"", "score": 1.5175010318443267}, {"text": "\"Just looking at that photo and the one in the article makes me feel cold :). I just finished this article <url> a week or so ago, and wonder if those Forward Operating Locations are tied into intercepts from those NORAD radar sites ?\"", "score": 0.5516684356235847}, {"text": "\"Also, u201ccould be expectedu201d? By whom?\"", "score": -0.6816233772224131}, {"text": "\"I think the article needs work. For example, I think \"\"U.S. late 1980s - 1990s\"\" should after \"\"Detroit sound.\"\" \"\"UK 1990s\"\" should after \"\"U.S. late 1980s - 1990s.\"\" What do you think?\"", "score": 0.6188945757905808}, {"text": "\"Thanks for blocking this user -- he promptly erased the warnings on his talk page (I put them back). I guess a block doesn't automatically block the talk page, huh?\"", "score": 1.0897973494264264}, {"text": "\"Thank you for caring re: my loss. We go on, because it's all we can ever do, y'know?\"", "score": 1.294905726683798}, {"text": "Why did you remove the pollution references on <url>. Do you have a source which refutes this claim?", "score": -0.4728616958260107}, {"text": "\"Hey, thanks for joining! Have any ideas you wish to add to the project?\"", "score": 0.9459526555836533}, {"text": "\"Hi Debresser, what's up? Why are you doing what you are doing?\"", "score": -0.5197098946216643}, {"text": "\"Hi SKS, I saw your post on the discussion on the charts page about Billboard Korea and I clicked the link and the website seems to be up and running but I can't make sense of it. Can you have a look at it?\"", "score": 0.7283373016938739}, {"text": "\"Apologies, I was busy prod'ding this article when you speedily deleted it, so I inadvertently re-created it. Can you delete it again, please?\"", "score": 1.797996438661438}, {"text": "I enjoyed your comments on the 'Road to Serfdom' article. I wonder if you know if anyone has offered a reading of the book as a satire of the bourgeois-administrative-pseudo-capitalist state?", "score": 1.0125298708367503}, {"text": "original article????", "score": -1.3876263021881257}, {"text": "\"Firstly, why do you assume I don't care about the content, and secondly, what chance have you given anyone to voice a dissenting opinion? Correct me if I am wrong, but you don't seem to have asked if anyone had a problem with the merge, did you?\"", "score": -1.8839091574404716}, {"text": "I'm afraid I don't understand what you are asking. Could you perhaps try to explain it in another way?", "score": 1.2519752839631588}, {"text": "\"Well, lyrics are generally frowned upon on Wikipedia, unless the song is old enough that its copyright no longer applies. Could you perhaps link to the article?\"", "score": 1.0878930638252469}, {"text": "Such as the redundant link to ''Roe v. Wade''? Not an improvement?", "score": -1.0629538122185864}, {"text": "\"Arguably, listing the former constructors and even the template itself is not necessary. Could we please try reinstating the previous version and see if and how many other folk don't find the decade information useful/intriguing?\"", "score": 1.0379247280996797}, {"text": "\"Do you mind if you tell me why do you add duplicate links to the article? More specifically, why do you add internal links to the See Also sections when those links are already in the article navbox?\"", "score": -0.6160346280444527}, {"text": "\"My problem is that the Style Guide for cities causes several topics for this small village to be repeated somewhat, especially the railroad, Gas Boom, and stockyard. Any thoughts on this article's organization?\"", "score": -0.8539415722211426}, {"text": "\"Hi, it looks like the en/em distinction is being preserved. What do you see as the advantage of changing the way they're currently written?\"", "score": 0.6388065203653109}, {"text": "Are u the guy on TV Forum using the same name??", "score": -0.5788370588398216}, {"text": "\"Hey, I just noticed that you are NOT an administator, I had always assumed that you were. I wondered if you would be interested in being nominated?\"", "score": 0.9722484435187372}, {"text": "Eh. Awesome as in you are going to be more active?", "score": -0.49184295294332914}, {"text": "I like the image of the Finnish lighthouses. Do you know if it is for sale anywhere as a poster?", "score": 0.7140931078513275}, {"text": "\"No, I don't know of any way to measure the number of visitors to articles. Perhaps someone on the <url> can answer your question?\"", "score": 0.8506722262092941}, {"text": "\"Hi Johnuniq, thank you for your edit. Does the file should be deleted/removed from commons then?\"", "score": 1.026091166539683}, {"text": "\"Which little tag are you referring to? As for your second question, that does depend on a couple of things- you can send it through the official OTRS address, if you like, and someone will get to you in due course, but you're welcome to forward the email to me personally at jmilburninfaerun AT gmail.com, and I'll take a look at it for you, and upload the image/explain why it's not possible?\"", "score": 1.023573638631892}, {"text": "I have just put the article to peer review. Would you care to take a look?", "score": 1.3164138757161674}, {"text": "\"<url> is based at 340 Laurier Avenue West Ottawa, Ontario K1A 0P9 - It doesn't have a photo posted yet. Is this convenient for you?\"", "score": 1.092415623889549}, {"text": "\"How is it false to state he was the ruler of the PRC - terms such as \"\"Mao's government\"\" are used in the article. Can you leave your Marxist personal beliefs at the door?\"", "score": -2.4656141035782912}, {"text": "\"CIV, I'll grant (and yes, I shouldn't have). But how is it a violation of AGF to say that Stewart suggested the article be vandalized?\"", "score": -0.4971849750976265}, {"text": "If you completely disagree then why havn't you required references on other airport pages??", "score": -1.7461310034537036}, {"text": "\"I presume you haven't obliterated them completely, because that would be to be editorialise enormously. Where have you put them instead?\"", "score": -0.9528757623510238}, {"text": "\"I will find religious advocacy statements. You don't understand, or disagree, that medical issues are over emphasized in the circumcision article?\"", "score": -0.813490097065795}, {"text": "\"Notability is an issue of common sense: you're writing for other people, so what is notable for you (or me) is not that relevant. My local shop is more important and notable ''to me'' than Wal-mart, but who, other than me, actually cares?\"", "score": -1.0849853492415766}, {"text": "\"Hi. I would argue that a consensus is far from being reached, so I'm wondering why you have went ahead and starting changing links?\"", "score": -0.8528640474226021}, {"text": "On the mediation page you made a number of statements that I said I would like to discuss with you. Would you be willing to do that <url>?", "score": 0.7261019541823408}, {"text": "\"Comply with what your talk page says, get off and study!!!! Have you willpower?\"", "score": -1.6129086194193334}, {"text": "\"Would it be premature to remove the autobiography and list laundry tags from <url> yet? I've been trying to improve it bit by bit, and everytime there are those tags on the page... mocking me... What's the go?\"", "score": -0.7960865100452564}, {"text": "Sure! How could I have missed that?", "score": 0.5496761528959929}, {"text": "Sounds great. Do you think you could add this over at the dicsussion I've started at <url>?", "score": 0.944892210306994}, {"text": "Are you crazy down here?! What the heck is going on?", "score": -1.9615436084477351}, {"text": "Hello? You going to answer this?", "score": -2.25430631231216}, {"text": "\"In the ACR for <url>, you mentioned that you had concerns with my writing style and were wary of supporting it. Can you specifically clarify what content in the article you had grammatical concerns with so I can make the needed improvements and gain your support?\"", "score": 0.6917779500565094}, {"text": "I am for some reason disappointed that it took me a few seconds longer to pick up the Bon Jovi reference than it did the Tom Petty one. Does that mean the terrorists have already won?", "score": -1.023833229351666}, {"text": "4.5 million hits on Google turning up the non-Columbia campuses. What are you talking about?", "score": -1.262234076200372}, {"text": "\"Let me ask you this. Assuming we take it that the player might have chosen any door to start with and the host may have opened any unchosen goat-hiding door, why do you not insist that the starting sample space must include all the possible door combinations?\"", "score": -1.249050553283888}, {"text": "\"Is it ok to use words like: here are some \"\"good\"\" or \"\"recommended\"\" reliable secondary sources? Or does that have to be determined by consensus?\"", "score": 0.6324067321222809}, {"text": "\"Well, I bet you got that from stalking me. Are you making a template I can use?\"", "score": -1.7412032050988753}, {"text": "I made a change to your user page. Is that the change you wanted?", "score": 0.9700958244585036}, {"text": "\"I never programmed that up because there were so few portal reviews, far fewer even than portal candidates. How often does this happen?\"", "score": 0.5329781475057358}, {"text": "Can you take a look at the article <url> and the conversation going on at <url> (that should probably be moved to the article talk page). Your thoughts on the most recent commentary?", "score": 0.9365205914668092}, {"text": "Alright then!?", "score": -1.2694699975632886}, {"text": "\"I'm trying to install Lupin/Anti-vandal tool, but im stuck. Could you help please?\"", "score": 0.9439894349800928}, {"text": "What's up with all those tweaky changes? Just salting redirects?", "score": -1.5080358117064565}, {"text": "I wasn't aware my statement was supposed to cause worry. Is there something I'm missing?", "score": -0.4814745427051214}, {"text": "Are you saying that it's impossible for Wikipedia to admit that most people don't know the true meaning of slavery? That it's my point of view & original research in understanding what the true meaning of slavery is?", "score": -0.7721600651428839}, {"text": "\"Since this person made a sincere attempt to discuss the page, and the page in his opinion was relevent, you can remove this warning from his talk page? In the spirit of <url>, perhaps?\"", "score": 0.6424344858011902}, {"text": "Let me know when's convenient and I'll put the kettle on. Tea or coffee?", "score": 1.2704004954458414}, {"text": "\"When creating <url>, I also had to create <url>, <url> and <url>. I was wondering if you could populate them?\"", "score": 0.9378985883380583}, {"text": "You recently attached a <nowiki><person></nowiki> tag to an article about an Azeri poet. May I ask why?", "score": 0.8203018126892367}, {"text": "\"Your <url> is a brilliant idea! Would you mind if I linked to it from my collection of \"\"Useful things\"\" on my user page?\"", "score": 1.9251889443150343}, {"text": "Nice tune Modernist <url>. You owe me one back?", "score": -0.44875972471699754}, {"text": "\"How exactly is it unclear? The ships link to the same place, the prefix \"\"HMS\"\" comes before ''Eagle'' to show her name change... what's not there that should be?\"", "score": -0.8587825372670057}, {"text": "Don't you think you should remove the welcoming plate of cookies you left at User:Dococe page???", "score": -1.588432596296736}, {"text": "\"Hey poetic decay, I was planing on merging <url>, <url>, <url>, <url> and <url> together. You think you could help me with it?\"", "score": 1.1630421235603767}, {"text": "\"Your \"\"resolution\"\" was to move it back to \"\"Detective Terry Crowley\"\" again? Why do you keep doing that?\"", "score": -1.2584352866379442}, {"text": "\"Dan, can you take a look at the last exchange regarding the 1952 election? Looking at the citations, is there doubt in your mind that there was a deal, or at least that it has been widely suggested by reliable sources that there was a deal?\"", "score": 0.6802697893870441}, {"text": "\"Hi, I think that you do a fantastic job on DELSORT. However, I was just wondering if, when you add to Schools, whether you would be good enough to add them to the appropriate section, please?\"", "score": 1.258220159994184}, {"text": "The press is calling you bisexual. Did you know that?", "score": -1.457467300016651}, {"text": "<url>???", "score": -0.6564521931315369}, {"text": "Iulius! Could you provide exact reference of article u017dirmu016bnai part ''Demographics'' ethnicity composition?", "score": -0.4807208588438825}, {"text": "\"This page, which you recently edited on, is constantly getting vandalized by a rival school. Could you please put an edit lock on the page?\"", "score": 0.8800110307978745}, {"text": "You have given your point by point rebuttals to my reasons several times. How can you say I haven't provided my reasoning?", "score": -0.7456601974819634}, {"text": "\"Well, I suppose it is only reasonable for you to conclude that everyone behaves as you and your allies do. If \"\"good typography\"\" is whatever Dicklyon likes, why shouldn't the truth be whatever subserves Dicklyon's cause?\"", "score": -0.7914556603812143}, {"text": "\"Hi, You stated the editor is inactive, yet the editor is stalking me through MY CONTRIBUTIONS and reverting my entries and the editor left a message on the top of my TALK page, so why would you not block them ? You didn't even give them a warning ?\"", "score": -1.4127388916863253}, {"text": "While watching the contribs I noticed that the bot blanked <url> which has already been checked and is listed at <url>. Isn't that supposed to be the thing that these lists of MRGs were supposed to prevent?", "score": -0.6906275280686897}, {"text": "\"Well, for a start, why is this irreplaceable? He's still alive, so what's to stop someone taking a picture and releasing it under a free license?\"", "score": -0.6302553871021638}, {"text": "Thanks for the info but the page is still giving the same warnings. Any ideas why this might be happening?", "score": 1.3294871325964912}, {"text": "Thank you for the response on my user page. Would you be willing to add a few words to the FAC candidacy?", "score": 1.355535248233611}, {"text": "Good luck on your <url>. What time is <url> <url>?", "score": 0.8718958508429198}, {"text": "\"Sure thing, and thank you for noticing! Is the colour better now?\"", "score": 1.6553322996288877}, {"text": "\"I don't follow you I was not aware that my edits were vandalism, I was editing according to what had (it appeared to me) been agreed on the talk page, but additional material had been inserted (that had not been agreed upon) that esentialy said the same thing (and was not supported by the sources) that had caused the origioanl disagrement. I think I may have mis-understood you point, are you saying that I can only revert three times if its vandalism, or that my edits were vandalims?\"", "score": 0.5316213811094479}, {"text": "\"The reason for the malfunctioning redirects is that the character immediately after \"\"REDIRECT\"\" is not an ordinary space, but a \"\"non-breaking space\"\", hex code 0xA0. Out of curiosity: how did you manage to create them?\"", "score": 0.5524637294203935}, {"text": "\"Since the \"\"discussion\"\" was not advertised at the affected pages, it did not involve the \"\"community\"\". Do you not see that?\"", "score": -1.7746449595689906}, {"text": "You may want to look at the nomination. What did you intend to do?", "score": -0.9841047819399478}, {"text": "Howdy Resolute. Is <url> for real?", "score": -1.0144986226445374}, {"text": "Where does it say in the policy that it doesn't apply to good users? Where in the policy does it say only soft blocks should be performed?", "score": -0.7001890911945746}, {"text": "\"Sorry, I understand, the bot has just moved the category of the sub-cat, not merged it. Is the bot going to proceed to the next layer - or does that have to be done manually?\"", "score": 0.7999271493584368}, {"text": "Please explain how that is a personal attack? You do realise that someone critisizing your actions is not the same as a personal attack don't you?", "score": -1.456322285731245}, {"text": "Thanks! There is yet another at <url>; would you agree it should be closed the same as the others?", "score": 1.43292234464989}, {"text": "\"Hi, StuRat. Do you want to participate in <url>?\"", "score": 0.6394612398746176}, {"text": "\"I'm sorry, but I'm unlikely to have time to help. Maybe try <url>?\"", "score": 0.5650495471261705}, {"text": "\"I recently was approved for VandalProof, and I installed everything and stuff, but I'm confused on how it is actually run, like what do I click to start it. Could you please help me a little?\"", "score": 0.6461372353876101}, {"text": "\"Lunch sounds great, Rocket. Do you have an e-mail address I can use to get in touch with you?\"", "score": 0.8676826068285143}, {"text": "\"Hi, Why were the images <url> from the infobox in <url>? Is this part of an MOS or something?\"", "score": -0.7988871637777172}, {"text": "\"I aggree... river name then \"\"River\"\". Do you think that since it's a list of rivers in China we should use the Chinese name and the international norm name too?\"", "score": 0.7108373979930622}, {"text": "\"I certainly think more things should be deleted from enwiki, but TheCommunity(tm) is fickle. Are you complaining about something ''I'' did?\"", "score": -1.0435895631277943}, {"text": "I have left a comment on the Talk page under that section. You haven't said much there - is there some place where you present your view with a little more detail?", "score": 0.8849104115170923}, {"text": "Why is your AWB robot changing '''&amp;Oslash;''' to the letter '''xd8'''? I asked you if all computers in the World are set up to read &Oslash; ?", "score": -0.9452891546016758}, {"text": "Are these two more articles that should be moved on January 1st? Did you get my email about admin nomination for you?", "score": 0.5570324564129122}, {"text": "I'm pretty sure there already is a filter (possibly more than one) for this. What page got moved?", "score": 0.5885879694329682}, {"text": "The notion that a hunter would kill dozens of birds with a single shot seems outlandish to me. Do you have a reference for this?", "score": -0.636128978963384}, {"text": "\"Seems like <url> has conveniently \"\"quit\"\" the wiki all of a sudden. That wasn't one of ''your'' sockpuppets, was it?\"", "score": -1.4512942200074421}, {"text": "Sorry... haven't unblocked many people before. Is it ok now?", "score": 0.6825353439402699}, {"text": "\"eh, i've ALWAYS hated 50 meter pools (200 butterfly was always the BIGGEST pain in the ass) although i really did like butterfly for some reason.... never quite got that, so was it an indoor or outdoor pool????\"", "score": -1.0286325530210072}, {"text": "\"Nice work, that explains it quite clearly! Any idea who came up with the scheme?\"", "score": 1.3544571666866205}, {"text": "I know what the band Rush is. I'm just wondering if you were joking or didn't know the user was referring to Rush Limbaugh?", "score": -0.6266266882446212}, {"text": "\"I see you removed the review, which makes sense until at least four days from now, when it won't be mentioned on the mainpage at all. But maybe leave a note at <url> so he doesn't feel put-off?\"", "score": -0.5918786630147267}, {"text": "\"Please stop removing properly referenced information that I have added to this article. This is an article that <b>YOU</b> nominated for deletion, so why are you even editing it?\"", "score": -0.837878871039275}, {"text": "\"Hi Eugene, long ago you made a dump of missing articles at <url>. Are you able to produce a new dump like this, as it needs a refresh, otherwise can you point me to how it is done?\"", "score": 0.6591868669653985}, {"text": "You added the name Ken Taylor to the <url> page but there is no such person listed on the DOD website as having received that award. Who were you refering to?", "score": -0.5708665224840423}, {"text": "\"This statement by The Four Deuces <url> makes little sense to me, does this user often obfuscate like this? The source is quite unambiguous in stating \"\"communist terrorists\"\" how on earth does this not mean the author is talking of communist terrorism?\"", "score": -1.1305955530500242}, {"text": "Hi John. Do you have any more edits to make before I start the merge?", "score": 0.6655085993477726}, {"text": "\"As I've only ever tagged one page for speedy deletion before (and that was an obvious cut-and-paste copyvio), I just want to make sure I tagged it properly. Was db-1 the right tag for <url>?\"", "score": 0.8754915684844704}, {"text": "\"Thank you for your Wikignome-like edits. What do you think, substantively?\"", "score": 1.8026792642590546}, {"text": "And why exactly am I looking at your monobook? And why did you substitute your user links onto here?", "score": -1.3920741425027017}, {"text": "\"Just FYI, I have decided to be <url> and remove the screenshot at the top of the article. There is already a link to a mirror of goatse.cx in external links, so why force readers to look at that?\"", "score": -1.1154375556463343}, {"text": "\"You were right that there was a problem, but I see Xn4 has now had a go at it. Perhaps you could look at it again?\"", "score": 0.9340689215466614}, {"text": "I've had enough of this. What would you think about filing a RfC?", "score": -0.7838761865358993}, {"text": "\"I am far from worked up over this, but find it hard to AGF when someone tries to include inaccurate information into an article, claiming it had been checked by an administrator. Who was the administrator?\"", "score": -0.8477442509115118}, {"text": "\"thanks for your input, it was such a pleasure interacting with you, now that the overlapping projects assessment are done, we need never interact again. projects opting out of policy, how charming, some projects are more equal than others, can i take a vote on SI project opting out of copyright vios?\"", "score": 1.0689351497698225}, {"text": "David Sprenkle was at the University of Illinois in 1982 (at least part of it) while I was there. Were you there too?", "score": 0.5722747245904835}, {"text": "\"Yes, I have. Did you really need to send me a reminder?\"", "score": -1.8615733331003508}, {"text": "\"Sorry, I completely missed the subsection title. You think this is Merkey?\"", "score": 0.7865707030228558}, {"text": "\"Holy steaming <url>. I presume you know about the <url>, yes?\"", "score": -1.5270726933185856}, {"text": "Truly Kafkaesque. Did GoRight try to have it taken down?", "score": -0.5056104283729471}, {"text": "\"I consider <url> to be the single most important edit I have ever made. God, what is happening with The Beatles coverage on Wikipedia?\"", "score": -1.0919988796547373}, {"text": "\"Hi, Matt. Aren't you supposed to be banned?\"", "score": -1.4903258647595614}, {"text": "\"Hi, Schutz; it's still busted. Would it be OK with you if I asked GimmeBot to take over the mainpage bolding?\"", "score": 1.136728674897941}, {"text": "You took me by surprise on this one with your comment-what about the obedience a diocesan priest owes to his bishop? And I was under the impression that all Latin-rite priests took simple vows of chastity?", "score": -0.5120553015840179}, {"text": "You changed the result here: Weu2019ve already had <url>; do you really want to do it again? The result given is a compromise; are you wanting to upset that?", "score": -0.8432364676445495}, {"text": "\"Hi, I can run a newsletter bot for you, WesleyDodds. Whatcha need me to deliver?\"", "score": 0.5828340093765769}, {"text": "\"Hi there, it looks like the moving editor has added a bunch of references to the talk page supporting the fact Toto is no longer formally referred to as Moorland Totilas. Do you think if we added one or more of the refs he's added on the talk page to the article we could move it back?\"", "score": 0.9224970421241817}, {"text": "Great. Any reason why SCIRS is still an essay and not a content guideline?", "score": -0.4704138968504822}, {"text": "No worries. So back to my question: what software makes them rock like that?", "score": 0.6902401543654317}, {"text": "\"Yeah, at the moment. Unless there are any other dryandras on the cusp of GA-hood....?\"", "score": -0.5599007586677034}, {"text": "The only way I have a naval website is that I am one of the people actively working to make sure Wikipedia has good coverage of naval topics here. Which website do you think I run?", "score": -0.7611879075901715}, {"text": "\"If you're talking about the Recycling Troll, I've never blocked him. Why do you think I have?\"", "score": -0.705375120416255}, {"text": "\"I was checking out the article on the Unitarian Society in Fall River that you contributed to, and there's a lot that was wrong. I want to reconcile what you know with what I know so we can get this article right... a little help here?\"", "score": -0.5167570722365816}, {"text": "There is a special tool which shows you page sizes? Would you like to know how to use it?", "score": 0.8606608860204059}, {"text": "\"We have a <url> page on <url> which, we suspect, may be in Slovenian. Could you have a look at it (whether it is Slovenian and, if so, whether it is worth translating)?\"", "score": 0.6296508753308505}, {"text": "And the username? Do you have any thoughts on changing it to something not involving Nazism?", "score": -0.7615562292934589}, {"text": "\"I'm usually polite, but this one seemed to need a slap across the face. It's ... words escape me .. mindboggling... jaw-dropping ... ?\"", "score": -1.2445793311531097}, {"text": "Are you saying that you have a close or imtimate relationship with N.I.?", "score": -0.773953262590184}, {"text": "\"BTW, nobody else has been keeping that page updated as much as I have worked on it for the entire hockey year, and it would be dead and empty without my work. So, why don't you be positive and praise my work instead of being so picky on such a minor, insignificant thing as putting 'vacant' on-top instead of below a player who no longer is on the team?\"", "score": -2.290025353349514}, {"text": "\"Except for the one about the Chevrolet Monza bit at the middle which came from the Prototype book, was in fact come from the Brumos website, but the last time I read that was back in February 2001. Also is with the referencing, the page is now a total mess, is there a way that could be sorted out, I would be very thankful if you could help?\"", "score": 1.261254601036553}, {"text": "\"And after seeing this talk page, I'll also note that there is no difference between scalars in math, physics and computing, those articles should all be merged together. :-/ Maybe you'd be interested in doing such a merge?\"", "score": 0.5577392930145612}, {"text": "\"I responded to your talk page comment, encouraging you to try your hand at reworking it. Good analysis of a potential problem, so why not take a whack at it?\"", "score": 0.8783154946559879}, {"text": "Is this really notable? Time for an AfD nom?", "score": -0.8687423423700027}, {"text": "\"Well it looked as though it had stopped, certainly tapered off for a few days but now some bloody wanker has started adding it again. I don't suppose you know how to get a page protected, I've never had to do it before?\"", "score": -0.4695135744670008}, {"text": "\"Thanks. As an aside, since this did turn out to be factual, just very hard to source, do you think the community would countenance an unblock request from Blaabla if he accepted some strict unblock conditions (such as packing in the 'systemic bias' thing, discussing his edits in a less confrontational manner etc)?\"", "score": 0.8919013402228935}, {"text": "\"Everything about <url> looks fantastic, but.. going to |2 instead of |30em seems like a major step back. Is there a reason for it?\"", "score": 0.747433736782505}, {"text": "\"I wonder if it would ever be worth doing an article on G&S scholarship? You know, cover the major discoveries, describe the evolution of the field... or is that too likely to hit problems?\"", "score": 0.766203165807502}, {"text": "\"Thanks for your help on this, it's much appreciated. Should I delete my request for checkuser?\"", "score": 1.7167895543497174}, {"text": "Yes please! Buffing up ''<url>'' to at least reflect a bit better on current state-of-play-taxonomically would be good :) Any Reliable Sources call it a <url> ?", "score": 0.672840865666147}, {"text": "\"Okay. What, exactly, can I do about that?\"", "score": -0.6267716759580528}, {"text": "Great job. Will it be done by next week?", "score": 0.9649159176387915}, {"text": "\"It was nominated for speedy deletion because he wasn't notable. However, if I undelete the page, will you improve the article and be able to provide decent sources to help his notability?\"", "score": 0.7525524139391032}, {"text": "\"Yeah, but playtime is over. Find some articles to edit, yes?\"", "score": -1.705078542495856}, {"text": "You seem to have an agenda of denying that some of the ancients were pedophiles. Why?", "score": -1.4814641279360028}, {"text": "True. What do you think about the notability of this particular article?", "score": 0.9782625563884457}, {"text": "\"Wait a minute! I've done plenty of good edits, where did they all go?\"", "score": -0.5909630429231947}, {"text": "\"It is disputed among admins regarding the protection of userpages, although the policy says that users can have their user/user sub-pages (but not the talks) semi-protected if the user requests it. Are there other subpages of yours that you'd like protecting?\"", "score": 0.9503161986612854}, {"text": "\"What \"\"fact\"\" that was recently prooved is referred to? And since when is pi the ratio of circumference to radius, rather than circumference to diameter?\"", "score": -0.775550086448994}, {"text": "\"Alright, I don't like it but is not worth wasting more time on, but (as you now noticed as well) if it ''really'' had been deleted for <url>, it would have been truly problematic. Can I just ask: ''if'' Mephistophelian would return, are we allowed to undelete it?\"", "score": -0.9368666436697832}, {"text": "\"Thanks! By the way, have you sent anything to Maggie since you received the email from the photographer?\"", "score": 0.9458715653830765}, {"text": "\"You are right, I apologize not doing so. A different layout for the template could be interesting (perhaps even with names, or those delivered in person/by text) but I didn't quite see the logic in the way you had broken up the dates by 20 year blocks interspaced with 4 year blocks, right?\"", "score": 0.518596885572393}, {"text": "Nice continued work on the expressway articles. Is it possible to have some maps or schematic diagrams showing the location or routes of the expressways?", "score": 0.9102718409912715}, {"text": "I'm not sure what you mean? Add my interests?", "score": -0.5066003333860577}, {"text": "\"At the top of the page it characterizes a guideline as ''a generally accepted standard that editors should follow'' along with a passing reference to <url>. So, I might ask, how would using 4 tildes prevent you from improving or maintaining Wikipedia?\"", "score": -0.4738582893797231}, {"text": "\"I've added a note to <url> to explain the dating; there's another couple of places it should go, but I don't want to start putting it in 'til you've seen it. What do you reckon?\"", "score": 0.870657711116732}, {"text": "\"If you don't agree with this approach, you can ether add the references yourself, or delete the added material and I will go elsewhere and write something else, and you can take over doing the anti-tank warfare article 'properly'. Ok?\"", "score": -1.6480174771327731}, {"text": "\"Hi there, you entered info in the bio of <url>, saying he is related to <url>. Do you have any more info on the specific relationship, and also how they were related to <url> ?\"", "score": 0.7294153748698544}, {"text": "Are you really denying that you brought Giano and Bishonen's names into this? Seriously?", "score": -1.644855478216621}, {"text": "\"Six months ago was episode 10, and has peter not been in every episode up to and including that? Seriously, where are you getting your figures?\"", "score": -1.906109935333958}, {"text": "\"That's a <url>-length rant. Still, be nice and AGF, okay?\"", "score": -0.8713271622167632}, {"text": "\"Here, let me parse it out for you:''I regret that because Gravitor and Carfiend's behavior consists of acting in tandem (in the manner of meat or sock puppets), refuse to discuss disputed content on the talk page, and continally provoke revert wars that some administrative intervention is likely required. '' Is it clear now?\"", "score": -1.6620716921791074}, {"text": "\"I would be willing to move this article to the incubator so that it can be worked on (I agree it wouldn't be an A7 today), though if it's to be made into a real article it needs to be done with someone with no conflict of interest regarding the subject, as creating articles for things you own is highly discouraged. If I undelete it to the incubator would you agree to let it go and leave it up to other users without a conflict of interest?\"", "score": 0.7951698177608487}, {"text": "\"Hi, Leandrod - my computer is slow, and your last two edits were made on top of vandalism that needed to be reverted - I can't tell if I reverted legit changes by you or not? If I did, I'm sorry, can you have a look?\"", "score": 0.7579404558707641}, {"text": "\"As a side note, I'm slightly amused that on the one hand, you caution me not to \"\"encourage\"\" the penguieater troll, explicitly striking out the \"\"mocking\"\" part, but then in the same breath you sign the finding that cites those same penguin diffs as evidence of terrible insults and intimidation. Slightly inconsistent, perhaps?\"", "score": -1.0919543293892415}, {"text": "I was thinking about creating a page about Nintendo eShop. And since you are a well trusted user I am asking you if you think that would just be deleted because there isn't enough information?", "score": 1.3382553424462569}, {"text": "\"Just wondering, is there any reason why you removed categories from a number of articles and replaced them with the non-existend <url>. Wouldn't <url> have been the better choice, which, as it appears, is a category you yourself created a few days ago?\"", "score": -0.6803463485221906}, {"text": "Changing your userpage doesn't answer my question. Why is your userpage a copy of <url>'s?", "score": -0.9103157302641879}, {"text": "Thank you. Was it anything in particular?", "score": 1.4039830151180135}, {"text": "I hope Ed's doesn't look like mine. Wait--what is this navbar we're talking about?", "score": -0.4725731649719306}, {"text": "\"To whom are you referring, Hipicrite? Who has a \"\"secret opinion\"\"?\"", "score": -2.0962722247991175}, {"text": "I saw on the status page that you have a free spot. Would you be able to coach me?", "score": 0.6600551207661534}, {"text": "\"Have made some comments on your list, looks pretty good overall, my points are just picky ones! Any chance you could return the favour and have a look over <url> for me?\"", "score": 0.6395139611123922}, {"text": "Helllo?????", "score": -1.6651998796292085}, {"text": "Do we have an automated-edit-using editor who's decided to just stick these in for completeness' sake? Did I miss all this?", "score": -1.0408432821493807}, {"text": "\"I have not seen quotation sections on any other Australian politician page. Also, who chose these \"\"quotations?\"\" What qualifies as a quotation?\"", "score": -0.899336416419388}, {"text": "By the way: Do you know how to remove the spaces created below the project banner when others are placed below it? And how to make capitalisation of the rating not matter?", "score": 0.7664469591771889}, {"text": "\"OK, we are talking about two remaining articles here. The categories serve absolutely no categorization purpose, so why are you being stubborn about it?\"", "score": -1.6678247056945448}, {"text": "\"The information itself obviously demonstrates notability. I'm sure other sources can be found, however&mdash;will you back off if other sources are added?\"", "score": -0.8706925799709943}, {"text": "I notice that to the <url> and <url> pages edited '''''Gorka''''' '''Josxe9 Unda Velasco''' and changed it to simply ''Gorka''. Why?", "score": -0.5738859949795974}, {"text": "I know this is a bit outside your usual territory but I wonder if you would be interested in helping us get <url> to FA status in time for the 50th anniversary of the line's closure on 10 June 2011? A question has also arisen in connection with the proper referencing of a journal article using the sfn template: when quoting a journal article - say from January 2010 - should the correct formulation be <nowiki><person></nowiki> or should the month be dropped?", "score": 0.6633473916075239}, {"text": "\"Hi there, I've found some peer-reviewed publications that discuss Hoser's work. Could you take another look at the article and the AfD discussion?\"", "score": 1.4516586084021046}, {"text": "\"Thank you, Cremepuff222, for the kind words. :) How have you been?\"", "score": 1.3268819206054343}, {"text": "\"Jeez, Joe, and I was going to suggest you hang around Andrea's to make sure he doesn't do himself a disservice... What is this? Getting blocked is the new cool?\"", "score": -1.307216813086342}, {"text": "\"Your reasoning is very sound, it would appear that USGS is wrong and I have no problem with changing it back. In general, what are your thoughts on using the GNIS database for park coordinates/elevations as we convert to the Geobox?\"", "score": 0.5797200238911244}, {"text": "\"Hi. How do you think, is <url> of FL quality?\"", "score": 0.5623109467342904}, {"text": "What do you mean they are not related to the scope of <url>? Like which articles?", "score": -1.4655328461429251}, {"text": "\"Excellent, thanks! I wonder if it's worth temporarily uploading a cropped image for the main page, as 100x100 is really very small?\"", "score": 1.0902940247320811}, {"text": "Hey thanks for reassessing the <url> article to B class. So what's need to be done to make it GA?", "score": 1.327308584457808}, {"text": "\"Thanks for adding the WSL windmill picture. I see you are in Belgium, any chance of adding more pictures to the various lists?\"", "score": 1.1489066361066995}, {"text": "\"At first I couldn't see it, but now that I've taken a closer look I see what you mean. Now, I wonder, who was the first most decorated PR soldier in WW II?\"", "score": 0.6603648745889298}, {"text": "To ''Arbcom''? Aren't we being a bit dramatic here when other forms of dispute resolution have not been tried?", "score": -1.2505130525748254}, {"text": "\"I'm really very sorry, but no, I '''didn't''' know, didn't get a message saying that was wrong. Where was it?\"", "score": 0.7846248251405117}, {"text": "\"Hey I saw that you're using Huggle and wanted to try it out, too. I downloaded it, but every time I try to open the program I get the message: CLR error: 80004005. The program will now terminate....What can I do to fix the problem?\"", "score": 0.7733958188796265}, {"text": "Good idea.... Perhaps though you could also make a list of kings on that page??", "score": 0.5279155598753841}, {"text": "Wow that's old! What do you think of <url>?", "score": -0.662765815431295}, {"text": "\"Hi, I am having problems with one of the references you added as it goes to the main page and not to one showing any of the figures on religion. Can you have a look at <url>?\"", "score": 0.8994191561060767}, {"text": "When we have a GLAM barnstar then you get it ..... just for the Hindi article! I cannot explain why you needed to create a Derby page in Hindi - surely it should be there already?", "score": -1.0140586577058954}, {"text": "\"Unless im making a major mistake here, this article was about a Not for profit organisation which seemed notable enough to give the origional creator at least some time to source it. Are you sure this had to be a speedy?\"", "score": -0.6399123965536923}, {"text": "\"Hi there, I've raised the possibility of unprotecting the tamazepam page <url>. What are your thoughts?\"", "score": 1.032675860539739}, {"text": "\"In your nom of Everyking you wrote ''...as Everyking is a former administrator, he made over 1,300 in his 2+ year time as a sysop.'' Should that be over 1,300 sysop actions?\"", "score": -0.7142621206171912}, {"text": "Done. Does Adam Hunter need your disambig-style touch?", "score": -0.4984321783458454}, {"text": "\"I'm always happy to give a second opinion on ID - you're welcome to put images on my talk page to discuss them. Regarding <url>, I would tend to go with ''<url>'', what do you think?\"", "score": 1.2003592169345807}, {"text": "\"I'm sorry, but I don't see the value in having different criteria for two versions of the same list, one sorted by name and one sorted by nationality. Every other list and article on space travel uses the altitude definition, why would the sub-list sorted by nationality be different?\"", "score": -0.9132526148716884}, {"text": "\"I'm leaving radiant/nescott to you. Can I ask you to look at my report of UBeR, please?\"", "score": 1.869911592409716}, {"text": "Yellowmonkey has told me we need another FAR delegate and recommended you for the job. Is that something you're willing to do?", "score": 0.6512067350291763}, {"text": "Thanks for your reply. I've tried to incorporate the references into the article; how is it now?", "score": 1.4858647045332738}, {"text": "So you're interested in both Amiga computers and BDSM! Are you an atheist too?", "score": -1.7103671860717646}, {"text": "\"Thank you for removing the \"\"Speedy Deletion\"\" from my post. I realise that I still need to do more work on the post itself, but are the sources that I am using acceptable?\"", "score": 1.1119356667589126}, {"text": "\"OK, then, define \"\"bias\"\", propose a way to quantify it, and if it's accepted by consensus, we'll apply it to ''all'' the admins on that page. Or, perhaps as a starting point, how about presenting some evidence?\"", "score": -1.151564214925647}, {"text": "\"Sorry, I dont quite remember what we were talking about. Can you tell me what you were/are confused about?\"", "score": 0.925632030699506}, {"text": "\"That's basically what our members have said, not necessarily my opinion. Anything I miss?\"", "score": -0.5610518172611855}, {"text": "So you don't like people pointing out the errors you make? Or do you have any actual arguments to counter my post?", "score": -2.0133829743405736}, {"text": "Thank you for deleting the attack page I tagged. Should <url> be blocked for creating it?", "score": 1.2366208120904107}, {"text": "I'm curious as to why you chose to <url> a full two minutes after my last comment? Am I to take this as a sign you are no longer willing to discuss the issue?", "score": -1.5103748916197794}, {"text": "<url> has been promoted to A-class. Shouldn't that score?", "score": -0.4968631347898091}, {"text": "\"I received a message from the bot you run telling me that one of two duplicate files I uploaded is due for speedy deletion. If possible, could it be made that Bath_badge.png is the one that is deleted, rather than Bath_rugby_badge.png please?\"", "score": 1.2611435428094027}, {"text": "\"Acad, at <url> the article suddenly starts talking about ''Pomone'' without any context - is this a typo? You're a major contributor - can you clarify?\"", "score": -0.5521554818825445}, {"text": "\"Ah well, if you are going to have such a bizarre flight of fancy as far as to calling it an attack page then perhaps I will remove it. Maybe you should also look at how you interact with others?\"", "score": -1.9708691465606865}, {"text": "You know there's no excuse for the edits you're making related to your position at <url>. This has to go to <url>; would you like to make the report?", "score": -1.570315603755062}, {"text": "Your bot has ruined the formatting on ref 2 in <url>. Would you care to either call off the bot or suggest an alternate formatting that will work better?", "score": -1.1676544599269956}, {"text": "\"Excellent, thank you! Can you also delete the <url> from en and move hooks from the <url> to <url>?\"", "score": 1.7625994855749565}, {"text": "\"If someone is an immigrant and not legally documented, that someone is an illegal immigrant. How can such a rally be anything but pro-illegal-immigration?\"", "score": -0.7844981293424984}, {"text": "I'm very impressed with your work getting Niagara County's NRHP list filled out with pictures and articles! Did you take most of those new pictures yourself?", "score": 2.0249267599784715}, {"text": "\"Okay, I will fix the \"\"crystals\"\", but how do I put myself up for adoption again? Will you kindergarten <url> me through the process?\"", "score": -0.9679907406991246}, {"text": "\"I would like to first thank you for coding the afd helper app. Secondly, is it possible it could be changed to work with other XfDs?\"", "score": 0.9889657274993475}, {"text": "Just move <url> to <url>. And shouldn't the clean-up parameter be added to <url> also to make it work?", "score": -0.8442826036163635}, {"text": "Could you please block me indefinitely. If you can't then what do I need to do to get blocked indefinitely?", "score": -0.6123013400333914}, {"text": "\"Was it you who deleted the section in Victor Garber's article about his sexuality and his partner? If yes, what was the reason?\"", "score": -1.0828108250335973}, {"text": "Why did you remove those German emigrant cases. Shouldn't we include those cases of people who was born in what was then Germany at the time?", "score": -0.7054495672401445}, {"text": "\"Granted, it wouldn't be terribly difficult for me to recreate what I did, but I don't like doing extra work when I don't have to. Could you at least userfy the last version edited by me?\"", "score": -0.5875436177045736}, {"text": "\"I was actually going to work on your suggestions today, regarding the GA nomination for <url>. Do you mind reopening the nomination?\"", "score": 0.8550927794003469}, {"text": "\"I appreciate your effort to seek consensus through a merge proposal before merging <url> to <url>, but as you are no doubt aware, no one participated in the discussion, meaning that while there was no articulated consensus against it, there was also no consensus for it. May I recommend undoing the merge for now, re-opening the discussion, and seeking wider input at the relatively large number of WikiProjects that handle those articles?\"", "score": 0.9817738098744633}, {"text": "\"Thanks. Do you feel the information is \"\"weak\"\" or is it just too complex?\"", "score": 0.7290454548318126}, {"text": "\"The reason given on the <nowiki><person></nowiki> tag (''\"\"Non-notable book promo\"\"'') seems to have nothing at all to do with the article. Is this on the correct article?\"", "score": -0.568511862510667}, {"text": "Does it not take just a bit more than someone saying they own it to remove a copyvio? As can be seen I post from Canada and what's to stop me claiming to be <url> and anything published in the <url> can be put on Wikipedia (or something a little more sensible)?", "score": -1.3565660821442296}, {"text": "\"... but when you do things like <url>, I start distancing myself. What made you come to that decision if you did differently on the <url> and <url>?\"", "score": -0.9141041311852428}, {"text": "\"Can you explain what you mean by ''\"\"Randi, paranormal, Larry King etc. are all redundant redundant\"\"''?\"", "score": -0.5333649301277572}, {"text": "\"Yes, I agree. Would you please make the appropriate change?\"", "score": 0.9643510229437815}, {"text": "You also seem somedeal similar to <url>. Not logging in is not sockpuppetry but do you acknowledge that User:64.194.44.220 is you?", "score": -0.6958307680984033}, {"text": "\"I've noticed that you're engaged in several edit wars, with Meliniki, regarding minor alterations to multiple Greek language articles. Perhaps you should consider discussing the changes on Meliniki's <url> instead?\"", "score": -0.9279869597637875}, {"text": "\"I don't see how you can use this to discount his trustworthiness and oppose him. If there is consensus to promote among several bureaucrats, what's it matter who actually presses the button?\"", "score": -1.2207796551651806}, {"text": "I have always maintained a control of the military section and I can check up the names in the educational section. Which section would you like to clean-up?", "score": 0.6913345298767979}, {"text": "\"Since I have not been able to get an answer to this on the project page, let me ask you directly: Did you receive Durova's \"\"secret evidence\"\" prior to the blocking of <url>?\"", "score": -0.906063106904343}, {"text": "Afterthought: where did you get your figure of 82500 tons pa??", "score": -0.5536787783154553}, {"text": "I would like to nominate you for adminship. Would you accept now?", "score": 0.5388010888489506}, {"text": "Crossover with ''30 Days of Night''?! wow....link?", "score": -0.5559918684792496}, {"text": "\"Here's my question, just to be clear, is it possible for the owner to release the picture under a lower resolution for Wikipedia and at the same time retaining the ownership of the higher resolution version? If it is, can you help me process this image?\"", "score": 0.8228402002949482}, {"text": "\"After looking through a bunch of those, I think the lead should be formatted like this: a paragraph explaining who he is, a paragraph summarizing his life and career, and a paragraph summarizing why he is so great. What do you think?\"", "score": 1.3176225443047778}, {"text": "\"''Don't'' talk ''about'' me, in my virtual presence, and tell someone else to ignore me. Do you understand why this behavior is inappropriate?\"", "score": -1.4619854606465026}, {"text": "\"Re your e-mail, there is info there which is not on my talk page. May I add it to my talk page, or would you prefer it stayed off?\"", "score": 0.722067516435993}, {"text": "That's a good idea. What about army and corps HQs though?", "score": 0.5676999144365465}, {"text": "Sounds good. Could you drop Fritzpoll a note?", "score": 0.6550275020577048}, {"text": "\"Um - no. And I take it you delieve all racist and fascist groups are \"\"right wing\"\"?\"", "score": -1.2366455994497998}, {"text": "\"There's been a slow revert war occurring between <url> and <url> over the Katja Kassin article and I've just left a strongly worded suggestion that they hash out their differences in the article's talk page. Could you keep an eye on the article for a day or two, just to make sure they stop reverting and start discussing?\"", "score": 0.6105474626031467}, {"text": "I got the page fixed. Sorry I had to do it several times on the page but it wouldn't let me revert it to before the IP user vandalized it how do you do that?", "score": 0.7747795977863867}, {"text": "\"Fair enough on the general murder victims. But Kriss Donald was British, was he not?\"", "score": -0.529965628441181}, {"text": "The new features of Hotcat are excellent: whom do I thank for this? :-) Are you still its maintainer?", "score": 1.1295728466965753}, {"text": "Subukan kong ipagtatanong-tanong ang pangalan ng lola mo sa mga kaibigan kong native dito. Sa Tuguegarao ba siya/sila mismo o sa ibang mga karatig-bayan?", "score": -0.6549886752609361}, {"text": "Do you understand now what was wrong with your edits? What do you intend to do if unblocked?", "score": -0.9994823101635291}, {"text": "I'm in Japan timezone - and at work now - and most IRC is totally blocked through work proxies. Can we do a weekend?", "score": 0.5960402301512344}, {"text": "Really? Why?", "score": -0.8502188883976782}, {"text": "\"Even worse: \"\"The authorized use of this data is limited to informational and educational purposes only, and NOT for operational or commercial purposes.\"\" This clearly disqualifies the text for Wikipedia purposes, I'm afraid we'll have to delete the article. Do you know of any similar contributions you've made?\"", "score": -0.6166532010892005}, {"text": "\"Hi, I hadn't realized that you had made a revert on redirecting this. Could you see my comments on <url>?\"", "score": 0.5610951051112012}, {"text": "\"Hi, ixb4d like to know why are you constantly removing all information i put in your scm comparison about Plastic SCM. donxb4t we have the right to show there our information as the rest of users?\"", "score": -0.6709962949133819}, {"text": "I have started to fix it. Can you help?", "score": 0.9184649343132648}, {"text": "\"Why have you included the Putnam Line in the MTA infobox, when it was never operated by the MTA (excepting the Marble Hill stub, which is hardly on a par with the Hudson Line, Harlem Line, etc.)?\"", "score": -1.1462561214814844}, {"text": "You call what I did wikistalking yet these editors are allowed to do the same thing to me? Where's the sense in that?", "score": -1.634303279612378}, {"text": "I have to protest that I find it rather meaningless to present arguments on the talk page for edits which are then blankly reverted by the likes of User:Historicar without explanation or discussion. Any suggestions for a more formal process or other remedies?", "score": -1.186541360341717}, {"text": "\"I do have one semi-newbie question: I made some edits under my IP before registering (<url>) and would like to \"\"claim\"\" them now that I have registered. Do you know if that's possible, and if so, how to go about it?\"", "score": 0.7443664314549523}, {"text": "Don't add any more trash to the article. Got it?", "score": -2.617203861565741}, {"text": "\"I think I lot of the things on the Methuselah page are very interesting, but perhaps should be transferred to the Biblical Generations article, if they touch on more than just Methuselah. Would you agree?\"", "score": 0.7683135641438719}, {"text": "You've contributed a lot to Puget Sound area articles. Would you be willing to take a look at the debate I'm having at <url> and do whatever you think is best?", "score": 1.4958763170186262}, {"text": "\"Hello, sorry I probably should have asked first, but I thought it might be a good idea to nominate one of the images you uploaded, <url>, for featured picture. I hope you don't mind me taking the liberty, as it were?\"", "score": 0.8998970942296609}, {"text": "\"No, I didn't know that. Where does this \"\"rule\"\" come from?\"", "score": -0.8557634839115638}, {"text": "I see you created a nonsense article yesterday because you were bored. If I unblock you will you disrupt more?", "score": -1.4381917135436364}, {"text": "\"Hello Jeffq, I noticed you revert vandalism occasionally. Would you like me to grant your account <url> to help you revert vandalism more easily?\"", "score": 1.946720190049128}, {"text": "\"Hi Dirk. Does the two books you cited for the heavy Belgian <url> losses have any info of how many Gladiators the Belgians lost, and perhaps what losses they incurred on the Germans?\"", "score": 0.5340957004593727}, {"text": "Something's still not right with me here. Think you can perform one of those cleanups?", "score": -0.6593552785957715}, {"text": "\"Hi Michael, have you ever heard of this term before? Does it have wide usage?\"", "score": 0.7607393074231197}, {"text": "\"Owing to your status as an active wikipedian, I'm not yet reverting your edit to this article. But what sense is it to move his birth location from the birthdate position in line one, to the top of (a newly formed) paragraph 2 which then (continues from the former paragraph 1 subject) takes off on a subject entirely unrelated to his birth?\"", "score": -0.8477677372373034}, {"text": "\"Hey, what do you think if instead of naming surahs like this <url>, we add the surah number to each like : \"\"Surah 1, Al-Fatiha\"\". Would u agree with such page moves?\"", "score": 1.1413678625033428}, {"text": "I don't understand Tasc0??", "score": -0.8447780999322465}, {"text": "\"See my comments on the article's talk page, which is on my watchlist by the way. Be bold?\"", "score": -0.48138405921592786}, {"text": "\"There may not be a lot of situations like this, but there will definitely be some. I was wondering if there is any kind of invisible comment that can be added to the article in these situations so that the report doesn't include it, or if you have any other thoughts about how it might be possible to handle this situation?\"", "score": 0.8409160484498663}, {"text": "What City in Connecticut do you live in? And have you been to the north as your name suggests?", "score": -0.5551982127100976}, {"text": "\"Thanks. Can you leave a comment at <url>, saying you are the creator?\"", "score": 0.735294284726409}, {"text": "I'm curious how you justify your removal of a link to the Washington Blade newspaper website in the <url> article<url>. The Blade is a valid media outlet - a newspaper dating back to 1969. Is there some bias at work?", "score": -0.5957468248826945}, {"text": "\"Can it be expanded beyond a definition? Can you write something about who coined it, references to its usage in popular culture, things of those sorts?\"", "score": 0.5504708850402175}, {"text": "\"Maybe you should consider a user name change? To, say, \"\"Sarah 'The Hammer' Ewart\"\"?\"", "score": -1.1533106991730742}, {"text": "I don't see any mention of Wikipedia in that article. Don't you have other things you could be worrying about?", "score": -1.8383195504874803}, {"text": "Just wondering why removed death material? Perhaps another article on his death and circumstances- legacy?", "score": -0.466053240503496}, {"text": "\"Regarding our WikiProject discussion about airport naming, I am unsure about Wiki etiquette - should I edit your sandbox with links indicating the common name (i.e. airport even though it's an aerodrome) or should I list them on my own talk page?\"", "score": 0.6827519426518907}, {"text": "\"Hi JoJan. I see that the template which listed the failed FA attempt on <url> was deleted from the talk page, and I wanted to ask if that is considered OK or not?\"", "score": 0.5595424022562547}, {"text": "I was not trying to sound rude earlier. Haunted Angel which of my seven reasons had you decided were evidence of his sock puppetry?", "score": -0.5591493549325406}, {"text": "Can I get some time to finish what I am doing without everything being deleted??", "score": -1.1541323602865667}, {"text": "\"I don't really get it. In your terminology, is there someone practicing Germanic paganism who is not a neopagan?\"", "score": -0.49482562962409277}, {"text": "\"Without participating in talk page discussion, you have deleted reliable sources, and rewritten a sentence so that it is supported by no sources at all except your personal opinion.<url> Am I missing something here?\"", "score": -1.3514181955912101}, {"text": "I read your publication and enjoyed seeing the relationship between activeness and chance of success to become an admin. Are you planning to do a similar model for bureaucrats?", "score": 0.6189743646184318}, {"text": "\"I was doing some minor editing on this, and noticed that the date of death in the intro and infobox don't match. Could you take a look at this and see which is correct?\"", "score": 0.9284861990570779}, {"text": "\"No problem, It was open and shut <url> issue. For your next project can you find something that doesnt make your fanclub at the Wikipedia Review angry?\"", "score": -1.1450798923465015}, {"text": "\"The policy is quite clear; userids set up for the purposes of harassment or vandalism are blocked indefinitely. That said, what policy is it that you think I'm not following?\"", "score": -1.5147421457737915}, {"text": "\"I see the dilemma with separate series.. in the case with (untranslated) Spirou there are no such problems, but it seems to make more sense, with no english publication, to switch the brackets for english and original titles (ex: <url>). Does this create a problem with continuity?\"", "score": 0.739099154922144}, {"text": "\"Concerning <url>, I must say I am a bit confused: isn't the article's title \"\"Austria<span style=\"\"color:red\"\">u2013</span>Hungary\"\" (where, incidentally, it was moved after an \"\"Austria<span style=\"\"color:red\"\">-</span>Hungary\"\" period)? It's really no big deal, but how can something be both correct and incorrect, and which standard are we at long last supposed to follow?\"", "score": -0.7671004651172764}, {"text": "\"You won't even let the ink on your report dry and you're already at it again. Care to explain finally what about the DAB needs \"\"cleaning\"\"?\"", "score": -2.122328030387186}, {"text": "Thank you for your answer about Illustrator. How does Adobe Illustrator compare with InDesign?", "score": 0.5923482208701023}, {"text": "\"Hi, Thank you for closing <url> as \"\"delete\"\", but much of the material immediately re-appeared at <url> (see <url>) so the questionable content hasn't been deleted at all. Can we do something about this?\"", "score": 0.715300795096762}, {"text": "\"Hi Steph - this would be a fine article, but its already pretty fleshed out - do you have a specific sub-topic regarding propaganda films that you'd like to write about? Have you found some research about propaganda films that you'd like to synthesize?\"", "score": 0.5946852372422493}, {"text": "Thanks for help! One more question: similar item at this address is a painting: http://sflib1.sfpl.org:82/record=b1035824~S0 Do I use the same tags?", "score": 1.166897227462534}, {"text": "\"Crap, I'm sorry, I didn't realize someone was working on it, and it's done now, sorry to get your hopes up. No dramas?\"", "score": 0.5477367424694931}, {"text": "\"As I said in the summary when moving him away from <url>, for the last twenty-six years of his life he wasn't a priest. Perhaps I've missed the part of the naming convention which requires 'priest', could you please say more?\"", "score": -0.6551329590311536}, {"text": "\"I do understand the bot was approved and is doing useful job, but. Is it possible to modify it to not save the changes if it is only one-two whitespace(s)?\"", "score": 0.6481807237557049}, {"text": "\"Hi, I tweaked it again. Could you check please?\"", "score": 0.9288454418689286}, {"text": "\"Great- so bump up to around ten selected pictures (selective choices, obviously) and then you feel it is ready for FPOC? Is there anything you feel the portal is obviously lacking?\"", "score": 0.690815493856506}, {"text": "I could have a go. When would you need it?", "score": 0.6590695528316559}, {"text": "\"Regarding comment on \"\"length of Performance reviews\"\" section - the intention here is to provide support for the statement in the biography that \"\"Latsabidze is already regarded by many as one of the more significant performers of his generation\"\". Do you recommend that I just delete this statement, or is there some other way to support it other than providing representative critical reviews that say so?\"", "score": 0.8331507248430885}, {"text": "\"I think this deserves its own article, as opposed to being in the centrifugal one. Do you have any good sources that describe how it works?\"", "score": 0.6444812964226465}, {"text": "\"Yes, I apologize, my friend (I guess I have some kind of a phobia that you will forget about my posts :) ) I will wait as long as you want me to. Could you read Mir Harven's posts on talk page?\"", "score": 1.2222410525596468}, {"text": "\"Back when the Nirvana article was a COTW (in like September I think) I started working out a \"\"Musical style\"\" section on a userpage, but never finished it. Would you like to help out with it?\"", "score": 0.8769541502136953}, {"text": "\"A little while back, at the beginning of November, I had said that if you ever were considering adminship, that I would love to support and/or nominate you. Do you have any interest in pursuing it?\"", "score": 1.41479800745452}, {"text": "Hello. I was wondering if any of the books you have can shed much more light on <url>?", "score": 0.8394988265717125}, {"text": "\"I have put another warning on <url> with full details of the vandalism. Can this be reported to London Grid, please?\"", "score": 0.9505395952680417}, {"text": "\"Hi DeltaQuad, Since several reviews have been done, can you review the version from the same date? 1 July 2010?\"", "score": 0.5368387880054941}, {"text": "\"What's wrong with having everything look the same, the links are for the Official website of the channel so I don't see the problem??\"", "score": -1.228434147343398}, {"text": "\"It's not that, it's just feels ... wrong. An admin especially should be accountable for all his dealings, so why delete the history?\"", "score": -0.48232252380366775}, {"text": "\"Alright, the NPOV template is even worse, since it specifically points people to a talk page discussion which does not exist. Why are you adding these templates?\"", "score": -0.7357488668419444}, {"text": "Liked that <url> I tore into that spammer? Got a box with a gun?", "score": -1.1862769358231617}, {"text": "Please answer Bishonen question to you. When were you invited to comment on UC related subjects?", "score": -0.8723692363900664}, {"text": "\"FYI, perhaps you failed to notice, but Anthropologist's \"\"accursed\"\" faux pas was already brought to his attention by Philbrick. I think you'll probably agree that a second, dedicated TP section is a bit of an overkill, no?\"", "score": -0.7820434935248468}, {"text": "\"Eeeeerrrrr... Could you type a little slower, I am having trouble with the above!?\"", "score": -0.5798346025712144}, {"text": "\"Multiple editors never occurred to me - I didn't think much about the boxes, and the blocking help page wasn't of much use. Of \"\"Block anonymous users only <url>, Prevent account creation <url>, Automatically block the last IP address used by this user, and any subsequent addresses they try to edit from <url>,\"\" which should I have checked?\"", "score": 0.5402513706172948}, {"text": "\"If there was another film that had the same title and wasn't disambiguated on the 1961 film's page or the short story's page, then I cannot be held responsible. How was I to know that there was another film?\"", "score": -0.6950675885508111}, {"text": "\"I have never seen anything which says that warning levels should begin anew each day. <url> says that warning templates \"\"are listed at right in order of severity, but need not be used in succession.\"\" If there is another policy somewhere, could you show it to me?\"", "score": 0.8199127365848357}, {"text": "\"Thanks, the logo looks much better now. You wouldn't have a logo for the two predessesor clubs by any chance?\"", "score": 0.929719755953248}, {"text": "\"Thanks, but I am now having trouble finding it. Any suggestions?\"", "score": 0.6633157325116168}, {"text": "\"Say what? And what does this have to do with \"\"Happy New Year\"\" and my pretty picture?\"", "score": -1.6765745720907532}, {"text": "\"Hi Skully, I've been keeping an eye on your edits over the past few weeks, and I think you should be a admin. Is it alright if I nominated you to be an admin?\"", "score": 1.1314774906807166}, {"text": "Thank you for closing out this debate. Can you please restore the history for GFDL purposes?", "score": 1.723452496373956}, {"text": "\"Yeah I'd be happy to look at the list, however can't promise much! Shall we discuss it on the article's talk page so that any comments are in the relevent place?\"", "score": 1.2070530200132028}, {"text": "I had no idea that I was such a rogue admin. Are you seriously suggesting that I need to be kicked off the project for closing a deletion debate?", "score": -2.402599026281254}, {"text": "Thanks for your comment. What was done wrong in listing the image for deletion?", "score": 1.3063235587152804}, {"text": "\"You told me that you couldn't access the page in question, although I had recently changed the link to its new location. Maybe you just didn't bother to check it out?\"", "score": -1.4545264461300138}, {"text": "\"With respect to your format, I think that all the things listed in the section before the sections should be separated as slang refs, phrases using breaking then the other section that appears. What do you think?\"", "score": 0.857753793324781}, {"text": "\"... then you have to at least meet me half-way. For instance, \"\"''<url>'' described the episode as displaying \"\"an exhilarating flair for rapid change of comic gear\"\" and made commented positively on the scene.\"\" Can you see the problem here?\"", "score": -1.4150598138151882}, {"text": "\"I have just noticed that today a Wikipedia administrator added a POV tag in the u201cPsychohistoryu201d article, which you have contributed to edit. Perhaps someone should do a little work to balance the article and remove the tag?\"", "score": -0.4800387191418996}, {"text": "\"Um, you know that the last couple of replies have been from someone other than Esprqii? Right?\"", "score": -1.1078512220807137}, {"text": "\"With all due respect, that is going to take ages. How can one create a 'Bot' to do all this for me for <url>, <url>, <url>, <url> and <url>?\"", "score": 1.1717482114425417}, {"text": "\"Thank you for your time, but, perhaps; If I give you the iformation and images and such for Lord Strachan, could you maybe make it? It shouldn't be that hard to find info, or images, but I already have them, so if I give you this stuff, perhaps could you do it?\"", "score": 0.8989174694609197}, {"text": "\"He's doing it again, removing Lauren's \"\"bottom 2\"\" placing (<url>). I just reverted him but maybe we need to file something at AN/I ?\"", "score": -0.8194343060023079}, {"text": "\"Thanks for the redirect help! Is that something I can do myself in the future, or is that an admin thing?\"", "score": 1.008833462246584}, {"text": "\"Weren't you involved with <url> (aka \"\"wikibugs\"\") at some point? Is it dead?\"", "score": -0.603477685040914}, {"text": "\"As I wrote above, at first I thought lets keep it, but after I heard some arguments, and when I made analysis of my own, I got to my conclusion. What's yours?\"", "score": 0.5801439762502341}, {"text": "Good day. Can look at this report please <url> ?", "score": 0.9541658735448312}, {"text": "\"<url> - I removed a see-also to a link placed by a COI-editor pointing to a topic about himself, which duplicated an existing link on the same topic. Exactly what is <em>your</em> reason for the template on my user page?\"", "score": -1.0878074647996725}, {"text": "\"<url> is a validly notable topic which has been ruthlessly suppressed at WP, probably because of egoism rather than religionist POV warriors but possibly both. Can you advise me on how to go about appealling deletions?\"", "score": -0.5411167433333969}, {"text": "\"Alf has a large <url> in Germany. How on earth do you know that he is a character \"\"almost no one knows about\"\" Where you get your f'n info?\"", "score": -2.6189000029122846}, {"text": "\"I noticed, that for users warned before, Huggle still uses level 1 warning. Is there anything I can do?\"", "score": 0.5636159992284645}, {"text": "He's gone. Was it something I said?", "score": -0.6360840527044991}, {"text": "\"Editing pattern just means, have you made more than 3R in 24h. Why should S get blocked, when S didn't break 3RR?\"", "score": -0.7464464999730345}, {"text": "\"I will understand if you decline, but would very much like you to accept. May I nominate you?\"", "score": 1.2220990975147283}, {"text": "\"You are being very insulting on <url>. Can't you just trust the strength of your arguments, instead of insulting people who are doing their best to help improve Wikipedia?\"", "score": -2.1273908207501093}, {"text": "\"Thanks from me, too &mdash; cheers! I don't suppose you could put copyright and source information on those images, though?\"", "score": 1.20853627239356}, {"text": "I finished my expansion of the category you created. Any suggestions on how to improve the category?", "score": 0.9540287502627557}, {"text": "\"Please take a look at Space Shuttle Discovery, for FPC. Will you reconsider?\"", "score": 0.9498165969294062}, {"text": "\"I am an administrator on this project and my task is to enforce these rules. Now, will you cooperate or will I have to block you?\"", "score": -1.780212350591259}, {"text": "\"Thank you for your information, I just received a flood of information from <url> as well. Ok, so in this case it is assumed that she was \"\"Prinzessin v. Sachsen\"\", whereas <url> was \"\"Herzog\"\", ja?\"", "score": 0.6329375841347727}, {"text": "\"Emico, you have to understand that sometimes people use different words to describe the same thing. If I find a source saying \"\"Hitler killed himself\"\" and I write \"\"Hitler committed suicide\"\" am I wrong because the word ''suicide'' isn't in the sources?\"", "score": -0.5885667931376195}, {"text": "I tried to address your concerns in the nomination page. Do you mind having another look?", "score": 0.7981178346628297}, {"text": "\"\"\"I will be retiring to the Cayman Islands shortly with the riches I'm gathering through my Wikipedia editing of political pages.\"\" - ''(From your userpage)'' - You're joking, right? Oh, and you're also joking about being a Republican too?\"", "score": -0.9549789491351277}, {"text": "And <url> targets <url>. Shouldn't it go to <url>?", "score": -0.6676494188359692}, {"text": "\"<url> is at it again, adding in the \"\"in the Premiership\"\" content you & I successfully merged several weeks ago. Could you perhaps also drop a note on his talk page asking him to stop?\"", "score": 0.5800423568051043}, {"text": "\"On reflection it's up for GA peer review, preparatory to trying for FAC. Would you care to <url>?\"", "score": 0.6093917210318793}, {"text": "Hmm...it appears to be an infobox at the moment and lacking in any content other than that. Have you done a search for the battle at all?", "score": -0.4528505117376488}, {"text": "I don't have the delete privilege assigned to my account. What are you talking about?", "score": -0.9375967192887595}, {"text": "\"Hi, you have tagged the article <url> orphan. Can you please check it ?\"", "score": 0.9546100063061003}, {"text": "\"Hi- The Magnificent Clean-keeper- I note that you recently removed a contribution I made to the <url> page, citing lack of \"\"notability\"\" as the reason. Iu2019m not saying you where wrong to do so (notability is an important criteria in the Wiki project), however, since the definition of \"\"notability\"\" is hard to pin down, could we discuss this particular instance, in greater detail, at some point?\"", "score": 0.7632103005438919}, {"text": "\"You mean the links that are being edit warred over? I can, but first can you explain what the edit war was all about and what the point of the links are?\"", "score": -0.5483311505036178}, {"text": "\"Good job, great article. Where did you get the idea, out of curiosity?\"", "score": 1.7512311313216842}, {"text": "\"A redirect works exactly the same way regardless of whether there's a space between \"\"#REDIRECT\"\" and the target article's name or not. Why bother removing spaces from the redirects if it doesn't change the functionality at all?\"", "score": -0.6666290929509537}, {"text": "\"The link you provided to speedy delection criteria only goes up to CSD12, and there is no CSD 13 listed there. Could you clarify?\"", "score": 0.6790626062981133}, {"text": "\"Thanks for readjusting the <url> remarks on stationsby - I must confess I'm not that much into Danish history, so my edit contained some guesswork... thanks for correcting me. Do I understand correctly that a Stationsby would not be the station that originaly is intended for a town further away, around which then a new town grows up?\"", "score": 1.350847533317392}, {"text": "Thanks for protecting this. Perhaps no longer needed?", "score": 1.072916111745266}, {"text": "\"Hello Iris. Would you add Straka's signing with a Czech team into, to his page?\"", "score": 0.572691358418034}, {"text": "Why did you remove my comment from your page? What was the reason for that?", "score": -1.332315257607657}, {"text": "Yes I can. Is it an alternate account?", "score": 0.5756883012744802}, {"text": "I found <url> whilst looking for something else. Any use to you?", "score": 0.8569343692746078}, {"text": "\"Regarding your time setting, I'm a little unsure what you want to do - there are only two things you can change - the timezone offset from GMT, which you set at the bottom of the screen by selecting the correct offset for your location from the list - I'm guessing Asia/Calcutta would be about right, and the way the date/time is displayed, which you choose from the list of five options at the top of the screen. What problems are you having ?\"", "score": 0.732192623030697}, {"text": "\"So, basically, you're just violating <url> and <url>. Why?\"", "score": -1.707454535725637}, {"text": "You're welcome. What else is peer review for?", "score": 0.8145926718130168}, {"text": "\"I was going to create the list of tallest buildings in Hartford, when I saw that it had been deleted three years ago by you. I don't understand the reason for deletion; can you clarify why?\"", "score": 0.6526127220906914}, {"text": "\"You also did three reversions. Where are the administrators, meanwhile?\"", "score": -0.5236735474554755}, {"text": "\"What do you mean by \"\"not a derivative of Formula 3 article\"\"? Why did you feel compelled to remove the link to the main F3 article off the top of all the F3 sub-articles?\"", "score": -1.5714587244235978}, {"text": "\"I really would rather use, in English, the one expression \"\"<url>\"\" instead of \"\"<url>,\"\" \"\"marz,\"\" \"\"<url>,\"\" \"\"khaet,\"\" \"\"<url>,\"\" \"\"eparchia,\"\" \"\"ostan,\"\" \"\"khoueng,\"\" \"\"faritany,\"\" \"\"aimag,\"\" \"\"tinh\"\"u2014or \"\"<url>\"\" (aka \"\"<url>\"\"). <url> then why should \"\"<url>\"\" not be also?\"", "score": -0.6191888046200055}, {"text": "\"Thanks, firstly, for the affirmation and work on <url>; it was yesterday's featured article! As a related article, I've been working on <url> - what do you think would be necessary to get it to FA?\"", "score": 1.387036406223364}, {"text": "\"Thanks. Now, if I had to make an infobox on \"\"London\"\" (just ''London'', I know there are many terms: Greater London, City of London etc), would it be correct to include the area/population/density of the territories I mentioned + the city area, or would just the city area suffice?\"", "score": 0.6949363088625515}, {"text": "\"Thanks, was getting worried there for a moment. Have you though of asking Outriggr for a hand with <url>?\"", "score": 0.7009037832559296}, {"text": "\"That's the British English spelling. American English wins out on a lot of stuff on Wikipedia, so why not give a cookie to the Brits now and then?\"", "score": -1.0730459831347523}, {"text": "\"Crimson, there is a discussion at <url> where we have implemented a new template for bot operators when their bot is blocked stating that it is in ''no way'' an indication or accusation of wrong doing on the operator's behalf. I'm sorry you have interpreted that way - would you be willing to re-consider your retirement?\"", "score": 1.3305875558226066}, {"text": "This is incorrect. Have you looked at the article any time after October 2008?", "score": -1.9856165241602166}, {"text": "This is great so far! What do you feel you need help with?", "score": 1.176758363305329}, {"text": "\"What's your issue with noting the copyright registration date for this piece? Also, could you point me to the consensus you mentioned?\"", "score": -0.7859047900940369}, {"text": "Good point - I was unaware of that. Perhaps then you should include a short reference to Aquitanian in the sentence?", "score": 0.8222037703193609}, {"text": "Girne. Sen?", "score": -0.6125146411256065}, {"text": "Nice. What SVG editor did you use?", "score": 1.0710643405378328}, {"text": "Happy belated birthday to you too! How have you been?", "score": 2.013795425538008}, {"text": "\"I was just about to create an article on the 2009 Malawian election; the purpose of adding it to the template was to give me a convenient link when I decided to do it. If red linked elections can't go in the template, why don't you delete all the red linked past elections?\"", "score": -0.5301260954566315}, {"text": "How can people edit them if it just redirects? Did you think about that?", "score": -0.9359888559730889}, {"text": "\"Tom, that article is ready for publication, was written by someone else, and your changes aren't improving it. Did MER-C ask you to work on it?\"", "score": -1.8304248814617352}, {"text": "\"|style=\"\"vertical-align: top; border-top: 1px solid gray;\"\" | For creating an article on a relatively obscure bridge with interesting information that remains in the current article, which has become the focus of the world for all the wrong reasons, I hereby award you this oddball barnstar. Who knew all this could happen?\"", "score": -0.9039264130284078}, {"text": "My question about the truncation operation is to understand the generic algorithm for applying a ring (or un-ringing) a Dynkin node. What method (code) do you use to get the higher dimensions truncated based on the Coxeter Dynkin ringed nodes?", "score": 0.5473875848914196}, {"text": "\"Hi I noticed that you changed the category at <url> from \"\"People from Alabama\"\" to \"\"People from Elmore County, Alabama\"\". Is this appropriate, as Opothleyahola was dead for three years before Elmore county had been organized and had been forcibly removed from the area more than thirty years before?\"", "score": 0.6303992322038281}, {"text": "\"In the <url> article, is polar mount correct? It might be improved by a bit more detail about how it works, perhaps?\"", "score": 1.2760472053073204}, {"text": "You may want to comment on the A7 to ?2 graph with 4 lines. I suspect it is nonsense - but what are your thoughts on that folding option?", "score": 1.2266329281465205}, {"text": "Hello??? Are you just pretending I don't exist?", "score": -1.5402543282370234}, {"text": "\"One final thing: \"\"If anything, many vegetarians are characterized by their disapproval of non-violence (towards animals).\"\" You do realise that you are saying, here, that many vegetarians approve of violence towards animals? Is that really what you meant?\"", "score": -1.039094367385009}, {"text": "It is an obvious and strongly followed point that april fools jokes ''should not disrupt the running of the wiki''. Could you explain why the HELLS you coated a dozen users' pages with pictures of jimbo's ugly bearded fizzogg?", "score": -2.1144074339820245}, {"text": "I'm sorry but looking back on that edit I won't stand for that. Don't you think that actually removing the edit summary was a bit... well... let's say 'abusing sysop powers'?", "score": -1.2180953580865579}, {"text": "I noticed your nice improvements to <url>. Are you thinking of merging/redirecting <url> to that article?", "score": 0.7766705405759065}, {"text": "\"Hope that you don't mind, I modified your comment in the Craig intro, using a nice new template that I found. Check it out: what do you think?\"", "score": 1.4255787626151188}, {"text": "\"There seems to have been something wrong with the pages you made on the V8 Supercar Championship Series - a glitch with the \"\"align\"\" tags in the beginning infobox made the text grotesquely overflow the margins. I've trimmed these down; mind checking my work?\"", "score": 0.9711828877708326}, {"text": "\"Good start. Can you add more, please?\"", "score": 1.1615345803575718}, {"text": "\"That is, I would remove the inline reference to the date of the cited source, since it's given in the actual footnote. Is that OK with you?\"", "score": 1.259479593021964}, {"text": "You had deleted the mozzarepa article I was working on since it was judged as an advert (something I didn't intend.) How can I edit and change the article so it is acceptable?", "score": 0.7391272532253641}, {"text": "I assume you like to play '''U.S.''' billiards bottom right corner?", "score": -0.7114266887507972}, {"text": "\"If people want to know more about her, there's the article on her. Are you really meaning to argue that people could not fully understand the concept of imagism without seeing a picture of HD?\"", "score": -1.3512674564459224}, {"text": "\"I am not familiar with the credit card / porn star identity incident you mentioned in AnonEMouse's RFA, but I would like to take a closer look at it. Could you provide some more direct diffs related to Mouse's involvment?\"", "score": 0.6816475409784287}, {"text": "Agree. Who the hell came up with that in the first place?", "score": -1.9288802839622643}, {"text": "I saw you work alot with medal of honor articles and I wanted to ask. If you think that <url> article is close to GA?", "score": 0.5832651172785134}, {"text": "wants to move from a free nation like th US to a tyrannical country like the UK. Give me an honest opinion Haunted Angel what do you yhink of him?", "score": -0.4810995775798109}, {"text": "Not sure if you are saying that I am beating a dead horse in raising the inclusion criteria problem again... or if Doncram is doing so in objecting to my doing so again. Or is it both?", "score": -1.2102433982268617}, {"text": "\"I am puzzled that you just deleted the file 'harry potter stamps'. This file seems to have existed for two years without deletion, so perhaps you could explain the grounds for speedy deletion?\"", "score": -0.7917812672200961}, {"text": "\"Yes, this is a nice illustration. I'd love to have a look of the code; is it possible for you to mail it to me?\"", "score": 1.583775399535337}, {"text": "What the hell? Bitter much?", "score": -2.6935616611010387}, {"text": "\"Up to you - I see myself picking off one or two every now and then but nowhere near the level of work NortyNort is doing now. By the way, I have an open CCI request that I want a second opinion on - could you please help?\"", "score": 0.7134185039282898}, {"text": "Why did you delete see also section in Puerto Rico? why did you delete the political parties?", "score": -1.3386145343185398}, {"text": "\"It was closed as \"\"no consensus, default to keep\"\". What difference does it make?\"", "score": -1.1207148077823372}, {"text": "I am concern why this user never replies to messages left on her talk page. Does she talk?", "score": -0.7098184587773113}, {"text": "\"Well, I fully agree with the three rationales Lumos has given above; and so evidently do about half the Wikipedians who have contributed to the RfC Lumos has cited. If so many think that such links can be useful and appreciated, why does it matter so much to you that they should be eradicated?\"", "score": -1.2599352690632337}, {"text": "I'm sure you're right but I can't find it in FM and haven't found it anywhere else. Could you cite sources in the article?", "score": 0.6947420213798814}, {"text": "There's no tab at the top of the page with a bold EDIT THIS PAGE on it. Is it just my computer?", "score": -0.4742423372221519}, {"text": "\"Thanks for that, hopefully I have input this correctly. This page also seems to be flagged due to several issues, surely these can now be removed as the page has proved its worth as much as pages like <url> have?\"", "score": 0.9965376300377965}, {"text": "\"You realize the bot maintains a separate page, right? Is it not possible that the \"\"problem\"\" is having the pages inconsistent?\"", "score": -1.2212298507266726}, {"text": "I'm confused: what did you do? And how did you delete it twice without restoring it in between?", "score": -0.6424478752563609}, {"text": "\"And DAB uses a very foul language, persistently, is that right? Did I not talk to you about that already?\"", "score": -1.3199609227344364}, {"text": "\"Is there any point to the quotation that's not served equally by the link on its own? Other than the obvious (which I'd like to think you're above), which is trolling the student pollies?\"", "score": -0.8058714282354735}, {"text": "Thanks for letting me know. Is it considered a subspecies?", "score": 0.9385327659431576}, {"text": "\"Fine, but you didn't need to be an admin to make edits to articles. Or did you?\"", "score": -1.189413326894162}, {"text": "\"Yeeesh. Is it just me, or is it very difficult to follow these \"\"stream of consciousness\"\" postings?\"", "score": -1.1847188780187612}, {"text": "Hayashi and Spreng Explicitly claim that the Inuktitut data is completely compable to Greenlandic. When you write Mahieu & Tersis you mean Trondhjem right?", "score": -0.4462816858610161}, {"text": "Thanks for the cleanup. Why did you delink the songs and Weird Al's album?", "score": 1.131727333322813}, {"text": "\"xD if you say so! By the way, what's your favorite game ever?\"", "score": 0.700524400426964}, {"text": "\"Sorry for the slow reply, something I needed to take care of. <url> is restored- if it is mentioned in such <url>, could you please <url> them in the article?\"", "score": 1.070741710694079}, {"text": "\"BTW, I'm amazed that he has the gumption to show his mug after <url>. And do you have the balls (metaphorically or otherwise) to place a hatnote above his article linking to that term?\"", "score": -1.6079153606211496}, {"text": "Now why would you do that when he specifically stated that he didn't want to discuss the matter for 48 hours and I specifically stated that people should come here? Did you not read his request?", "score": -1.475357648195182}, {"text": "\"Lemme just remember, who's going to be mentioned first? Snyder, or the failed projects?\"", "score": -0.5933567561621256}, {"text": "\"You again speak of \"\"the probability of interest\"\", whereas I made it clear that either there is only one probability of interest, and hence it cannot depend on the door numbers, or there are several probabilities of interest, all with the same value, and hence independent of the numbers of the doors. Don't you understand this?\"", "score": -1.7697047299405984}, {"text": "So....what's up with this article? Is it a joke?", "score": -1.0187134937389093}, {"text": "Re your remarks on the talk page of 9/11: '''I do not troll'''. Comprende?", "score": -1.812331627288863}, {"text": "\"I'd be more than happy to :) It may take a while as I've got other stuff on, but as long as you're in no immediate hurry all should be well. Do you have any preferences/requirements for the end result?\"", "score": 1.5138539962124198}, {"text": "Just musing on creating an article about the formation of the Pixies (I reckon I could write a decent-sized article on the whole thing). What are your thoughts?", "score": 0.7991385983714929}, {"text": "<url> is brand new and may need some attention. I thought you might be just the person who can help get that article at least secured against SPOD-attacks?", "score": 0.8347004457211238}, {"text": "\"I have, and they have not responded in one week. How long should I give it?\"", "score": 0.7008743220110761}, {"text": "\"So far so good? Drop me an email sometime, ok?\"", "score": 0.8586512581131132}, {"text": "\"The consequences apparently have no sliding scale? It's either \"\"you're blocked,\"\" or you're not?\"", "score": -0.6166935187930098}, {"text": "Compare the two. How is the first in anyway superior to the second?", "score": -1.3330075403151724}, {"text": "Depends; do you care deeply about image policy? Or are you an incurable masochist?", "score": -1.5104370380603602}, {"text": "\"I'm sorry, I'm having trouble understanding your question. Could you try again?\"", "score": 1.0999329952578483}, {"text": "\"Just an FYI, I replied at FPOC. How many lists would you prefer?\"", "score": 0.6657731216973926}, {"text": "Where did you get the tracklist for the album? Can you please drop me a messafe with the link?", "score": 1.3182479509955969}, {"text": "\"That \"\"guest\"\" could have been an undercover Colbert staff member and the whole segment could have been staged. Otherwise, why would Colbert have taken 3 minutes out of his show to talk to someone in the audience?\"", "score": -0.6293938308738605}, {"text": "\"I took a quick look and I'm not up to reading through it properly at the moment. :) I'll be back on here tomorrow morning and I'll have a look then, if that's OK?\"", "score": 0.6104296547440387}, {"text": "To me these two bits of TeX look identical after they're rendered. Can you explain the advantage of the latter format (since you've made a point of changing from the first to the second form in some articles)?", "score": -0.7840517375295456}, {"text": "Thank you. Do you have any tips on the Kathy Dunderdale article to improve it?", "score": 1.5192958700764276}, {"text": "This is irritating. What browser are you using?", "score": -1.3934538613860759}, {"text": "Please don't talk like a robot. What harm did it do?", "score": -1.2554460246035384}, {"text": "\"its - it's - its - Sheesh ! Thanks for fixing :-) Scary, huh?\"", "score": 0.5538282127225471}, {"text": "I tried to make some improvements to the article. What do you think?", "score": 1.1054440402038361}, {"text": "Why not try adding sources from time to time? If you won't who do you expect to do so?", "score": -1.6758293206662898}, {"text": "\"Damn it! Should we just revert back to the beginning of the day, since what I wrote was based on analyzing the satellite data?\"", "score": -0.7040348924237143}, {"text": "\"yes, but we are hardly debating that edit that hasnt stood in the last few days anywway and certainly isnt an issue right now. What of that edit do you currently object to with such vehemence?\"", "score": -1.9038204802089804}, {"text": "Wonderful! :) Do you particularly like to work on comics creators' articles?", "score": 0.8949828088758812}, {"text": "What page is this? Why was it deleted?", "score": -0.6403064365161288}, {"text": "Quite welcome. Doesn't Huggle use rollback when available and applicable (last several edits were by the same user)?", "score": 0.7790145626232252}, {"text": "\"Jason, how do you view your phone call to tlc's father&pastor in the light of the Biblical conflict resolution process? And in the light of the Wikipedian conflict resolution process?\"", "score": 0.6010552407954984}, {"text": "\"Thanks, but I've already read those essays. Did you have anything to say yourself, personally?\"", "score": -1.2453776740540607}, {"text": "\"Thanks for the various corrections you have made to this article. Do you have a source for the Anglo-Saxon Chronicle date of 921, please?\"", "score": 1.7677251289099345}, {"text": "Thank you for taking the time to dig up the references. Are you planning on integrating them into the article sometime soon?", "score": 1.589974603470401}, {"text": "\"Someone has Blocked the whole Opera mini Browser, as you know even i use my Mobile-Opera for editing. can you please help?\"", "score": 1.1077464711396525}, {"text": "\"Likewise there is some discussion at <url>, where <url> is categorising itself and its documenttation. Could you please fix that as well?\"", "score": 0.8312021633071925}, {"text": "\"Sorry, almost forgot. After calling me a \"\"child\"\" \"\"uneducated\"\" and making remarks to the effect of \"\"shouldn't be allowed to post on edit Wikipedia\"\" what grounds have you got to stand on to say I've made personal attacks against you by suggesting vandalism?\"", "score": -1.15279345904568}, {"text": "Thanks! :) So we're allowed to do multiple revdels at a time now?", "score": 0.7986342875338897}, {"text": "\"Hello Glacier109, I am contacting you because in golf they put Money not Winnings as the standard on most leaderboards used at the end of tournaments. I am just wondering why you selected the word winnings?\"", "score": -0.6164124176446671}, {"text": "\"As you may have seen, <url> was also blocked for the same time. In the light of his pledge to stay away from the article, can you give me an assurance that you will not be disruptive?\"", "score": -0.832878439140621}, {"text": "\"of <url> as those were just plain old copyedit (replacing begining of paragraph \"\"he\"\"'s with the name and a template fix). I'm assuming you were on the cell phone again?\"", "score": -1.0236466384131009}, {"text": "I note that you've edited since this was brought up here. Could you please respond?", "score": 0.5361753384386544}, {"text": "\"The block seems to no longer be necessary or productive. If I were to bring a motion to get them unblocked, would you support it?\"", "score": 0.9943350922299725}, {"text": "Thank you for catching Known for Know. How did you happen to look at the article?", "score": 1.355953691201225}, {"text": "Hi Finell: I have filed an RfC at <url>. Could you take a look at this matter and proffer some advice?", "score": 0.8443331175216724}, {"text": "\"Martin, sorry to disturb you, but you said that Kelson is making the new ZIM file, where is the location of that file? Does it also reside at <url>?\"", "score": 1.3475868784029934}, {"text": "\"Hey, you seem to know a bit about coding, so I figured you might have an answer here. If I want use code to grab the target of a page known to be a redirect, how would I do that?\"", "score": 0.8039601986503933}, {"text": "\"Yet you did violate <url> and could be blocked right now. If that edit recurred, would you revert it again?\"", "score": -0.8234922718460804}, {"text": "\"How about this for a novel solution? The next time a banned editor posts something to your page, why not delete it yourself with an edit summary of \"\"Sorry, I'm topic-banned from discussing this\"\"?\"", "score": -1.2306537702932232}, {"text": "Denied. I take it then that you deny responsiblity for an account (which I'll not name here) created just four minutes after you made that edit?", "score": -1.4167699856829272}, {"text": "\"In short, based on the sparse information provided by outsiders, I would like to work constructively with you (and others) to whip the article into shape, as its presence seems warranted but the article is unlikely to garner necessary details from random contributors. What shall I do next?\"", "score": 1.2037121219566813}, {"text": "\"Hi. Would it be OK to move the timeline to a template, so that all the Mars-related geology pages can point to the same timeline, making updates easier?\"", "score": 0.6958351027451827}, {"text": "\"Thank you so much! I hope it's allright if I move it to my front page, at least when the talk has to be archived?\"", "score": 1.4367374233751886}, {"text": "\"No, of course I never did. So why do you keep writing about \"\"scorning others's sensitivities\"\" when such a thing was never suggested?\"", "score": -1.6012465236762157}, {"text": "Eight more articles semi-protected. How many of these damned T & J articles are there?", "score": -1.6029893444819603}, {"text": "# And what evidence do you have that I am committing revenge? Revenge for what?", "score": -1.5103901839985372}, {"text": "\"The <url> page said that <url> will be shown on the <url> subchannels 26.5 and 26.6, and also on analog channel 48. It is also expected that The U Too will displace Me Too from analog channel 48. If that's the case, then, where do you suppose Me Too will end up? What new channel in Chicago will it be shown on?\"", "score": 0.5353204967989271}, {"text": "\"Your obfuscation of the real issue is disengenuous; I'm rapidly losing respect for you. Is it not Wikipedia's mission to scornfully snub vandalism, which is done instantly, agressively, mercilessly?\"", "score": -2.128816638573292}, {"text": "\"Yeah, thank you so much. Can anyone do that or only admins?\"", "score": 1.4723769869093717}, {"text": "\"Oh, no, sorry. Why do you want to talk to me on IRC?\"", "score": -1.0007756369889635}, {"text": "\"4. I had once felt that the Euroleague, Italian and Greek leagues were the top leagues. If I were to choose selected European leagues from this page <url>, do you have any thoughts on which ones are important?\"", "score": 0.6192770518244997}, {"text": "I agreed from my very first post on the first subject that it shouldn't be included yet you and the other user continued to misrepresent my position. Would you disagree with that?", "score": -0.6533088173265315}, {"text": "?????????????????", "score": -1.7130401358041187}, {"text": "Probably I would have to do quite a bit of alteration in wording. Is that O.K?", "score": 0.696111901307342}, {"text": "\"So, Pasquale, you're not interested? Have you ever read <url> article?\"", "score": -1.2413274081178556}, {"text": "\"Having said that, I don't want to drive anyone away just because they can't write well. The question is, where do you begin?\"", "score": -0.9196936948529755}, {"text": "\"The image is only used on ONE page - the site of the film. What is the problem here, and how does it violate wikipedia policy?\"", "score": -1.3726440111785976}, {"text": "I had to revert another one <url>. How about that protection?", "score": -0.6780685582641526}, {"text": "\"We? Which \"\"we\"\" would this be?\"", "score": -0.8043291864593283}, {"text": "\"If she's been in a lot of porn movies, doesn't that show she is famous. Can't you undelete it and then let it go to a AfD discussion?\"", "score": -0.5336875267121629}, {"text": "\"give me your personal opinion, is an article with citations and written in a mature manner, about shart, ever going to remain without being deleted? or will i just waste my time if i make it?\"", "score": -0.8080467472481423}, {"text": "I tried updating it to WP:DAB style but I got reverted. Can you use your magic there and explain to the user why we should follow MOS:DP and such?", "score": 0.8193177651866257}, {"text": "\"While browsing wikipedia for missing pages to write i saw it on your things to do list. Assuming he's a poet, do you mean <url>?\"", "score": -0.5162188725102322}, {"text": "There's an interesting discussion here: <url>. Care to explain?", "score": -0.7655060370754787}, {"text": "\"One question - how come the page is marked as \"\"in dispute\"\"? The data that is currently posted isn't in dispute, correct?\"", "score": -0.502825148160877}, {"text": "\"Hello, I noticed you recently did a lot of work on the ''<url>'' page. I was wondering if you know of the origin of the word \"\"Nassarius\"\" - its etymology/ what is it named after?\"", "score": 1.1147935611775535}, {"text": "\"Looks like you've been a bit inactive lately, but on the off-chance you check in soon, take a look at <url> and <url>. You're probably the most familiar with CoM's style and brand of hostility, do you think Freakshownerd here is his sock?\"", "score": -0.46267983509082206}, {"text": "And this is your first edit. Isn't that interesting?", "score": -1.0384221496277901}, {"text": "\"Hi Mike, Awadewit suggested you might be interested in a project we're running here at the British Museum over the next week - <url>. Perhaps you'd be able to help out and sign up?\"", "score": 0.9706627444646226}, {"text": "Look even your 3rr message says to use the talk page. What authority does the Pokemon project have over Japanese mythology?", "score": -0.9929420993243966}, {"text": "We need a visual of some kind. How's the state of your right-brain today?", "score": -0.6184194123332569}, {"text": "\"Yes, I'm delighted; result after a torturous few days! Um, can we add a Dali or Miro img?\"", "score": 0.5438991868936428}, {"text": "Hi. Is there anything I can do to help you utilize the spell check features of your browser?", "score": 0.8381565699005122}, {"text": "Oh and you seem to have deleted half of the stuff on <url> ... I presume this was a mistake?", "score": -1.1506808169469604}, {"text": "\"I see you've added <url> to the category \"\"Jewish-Canadians\"\". What is your source for this?\"", "score": -0.5733661004514243}, {"text": "\"Converting <url> to a redirect without preserving either the information or sources, & without discussion was ill-advised. A bit more care in future?\"", "score": -1.574061710589525}, {"text": "\"Done. I may have missed a few links when copying them across, so could you please look through it just in case?\"", "score": 1.406913483204035}, {"text": "\"I thought that I should tell you that I created a new SVG version of the Wikipedia logo, located <url>. It has a few problems, could you help me with it?\"", "score": 0.910388191108509}, {"text": "I must be missing something here. Would you mind clarifying for me the nature of your extensive work on <url>?", "score": -0.7890362228436999}, {"text": "So it seems we both already know the problems with the current logo and the SVG one. What can I do to help you?", "score": 0.5925573418573501}, {"text": "\"Hey Dan, I see you around but we haven't talked in a bit. What are you up to these days?\"", "score": 0.8480736694429913}, {"text": "\"On another note, I'm going to assume that you were the one who also added ridership data for various LIRR stations, am I correct? If so, could you kindly point me towards your source?\"", "score": 0.5926886625782085}, {"text": "\"Alistair, you have only undone the autoblock; you haven't unblocked the account yet. Could you do that, please?\"", "score": 0.6769017243829565}, {"text": "\"you need a copy, you say... by that you mean a translation, or one which has been reconstructed to meet en. wiki coding?\"", "score": -0.4834747961026092}, {"text": "\"Not that I know of, but it's probably time it was. Want to start a discussion somewhere?\"", "score": 0.7943219963400796}, {"text": "\"Serouj, you didn't reply to my message and my email. One question: if the church was built after 1700 but later ruin, for instance ruined by Beria order in 30s, can it be considered Contemporary?\"", "score": -0.8280673590569301}, {"text": "\"Actually, the page is still up, and I'm still getting a deluge of obscene phone calls and emails in an effort to punish me for constructive editing on Wikipedia consistent with Wikipedia policies and guidelines. Is <url> and <url> a policy or isn't it?\"", "score": -0.7110763913727535}, {"text": "Hi Fropuff. How do you convert your commutative diagrams from LaTeX to PNG?", "score": 0.5996699289954474}, {"text": "\"And if you use Coulson you would probably have to say \"\"According to Coulson...\"\". Have you read <url> yet?\"", "score": -0.5800880033734375}, {"text": "\"No, Wikipedia does not consider itself (or wikis in general) as a reliable source. Surely you know such a basic policy?\"", "score": -1.1995374939965728}, {"text": "\"Btw, what's \"\"WPC MOS\"\", and where/when was it decided that we don't use volume indicators? Why is this, and how do we distinguish between different volumes of series when mentioning issue numbers?\"", "score": -0.6580288836295852}, {"text": "\"We already list Latvia, Lithuania and Estonia separately. Along with Belarus and Ukraine, that doesn't leave much of the USSR that was occupied, does it?\"", "score": -0.47321159359326037}, {"text": "Just got around to this on <url>. Do you still need help or is this sorted?", "score": 0.732105268426927}, {"text": "Picking up your challenge. You a big fan of skittles or something?", "score": -0.7626945203488766}, {"text": "You're unblocked. Do you want to discuss what the article should be named?", "score": 0.5602898593312519}, {"text": "Noticed your comments on the <url> talk page. What bands/styles are you into?", "score": 0.5779751866271278}, {"text": "\"Thanks for the help! That info should be on <url> in easy to read form, no?\"", "score": 1.0302962388501427}, {"text": "<url> reminded me of a report I want to file about Asad. May I?", "score": 0.8405691770114236}, {"text": "There was a redirect from KLPN to KTPN that was created. KLPN is the main station so why is the article called KTPN?", "score": -0.975501184037103}, {"text": "Moved. would you like me to nominate you for adminship?", "score": 0.7427200657322908}, {"text": "\"At the top of my talk page, it says, \"\"If I left you a message: please answer on your talk page, as I am watching it.\"\" I think I already explained this to you. Is there a reason we cannot communicate?\"", "score": -0.664168662255751}, {"text": "Why isn't <url> included there? Is it possible there's a redirect in it's place?", "score": -0.4598270601924336}, {"text": "\"Umm... isn't the template page itself semi-protected? If so, then shouldn't it be in the category?\"", "score": -1.245110070117502}, {"text": "\"If the llamas of Peru spoke Spanish, and one asked the other, ''xbfCxf3mo se llama?'', would he get confused?\"", "score": -0.4763792436379327}, {"text": "\"Hey, why do you edit the standings when a game isnxb4t over??\"", "score": -0.8663317588919899}, {"text": "\"Hi -i am editing the Juan Martin article with guitar technique information,and user addhoc is deleting all my writing. Please,what do i do about this?\"", "score": 0.7680111883573223}, {"text": "You also say that I am participating in a high-tech lynching. Isn't that what you did when you put SamuraiClinton on RFA?", "score": -1.3018712578803222}, {"text": "\"Yes, that was really necessary. What's wrong with establishing uniformity in CASH articles?\"", "score": -0.8231760701107248}, {"text": "\"Nice work! Since you dug up some information on Fahnbulleh, I'm curious u2014 do you know what relation he is to Ambassador Henry Fahnbulleh, whom Tubman accused of an attempted coup?\"", "score": 1.5950533999754632}, {"text": "\"I kind of agree. Are you planning on actually editing Wikipedia, or is your whole purpose here to promote this ... study (or whatever it is)?\"", "score": -0.9129093603716403}, {"text": "\"Yes, I posted a link to the history. I was asking/thinking where you stepped into the discussion and if you were editing as an IP and that would put a different slant on your 3rr warning to Bbb23?, have you been recently editing the article was the simple question?\"", "score": -0.8247208298257986}, {"text": "\"What does this have to do with Zeitgeist? I posted on your talk page because this is no longer remotely about the article, why are you continuing to quarrel here?\"", "score": -1.62313252355573}, {"text": "\"Don't push it, Zachary. Can you give me proof of the existence of \"\"MusicMaximum Hot 100\"\"?\"", "score": -1.2981017367712433}, {"text": "Yes. Need help?", "score": 0.5884102791740764}, {"text": "\"Email from the Darkshado guy complaining about you adding him to the spamlist. I searched the site and couldn't find any of our articles replicated there; has he removed them, or what?\"", "score": -0.5005053498963239}, {"text": "I reverted your change to the Pet Shop Boys article - it was kind of a bizarre change (i would have considered it the work of a vandal were it not for the seemingly valid other wikipedia work you've done). ?", "score": -1.876211585910101}, {"text": "\"Good work on the canal junctions! Where appropriate, do you think we should give the nearest pedestrian access point, from the road (as opposed to canal) network?\"", "score": 2.033421904104878}, {"text": "Which of course is all the more ironic as my initial edits were an attempt to ''improve'' the article per <url>. Who the hell would have predicted all this?", "score": -1.668277747141089}, {"text": "\"I read the diff upside down or something... Glad someone is keeping an eye on me :) I'm mainly doing stuff over here <url> these days, what are you up to currently? Still concentrating on the Ui Imair?\"", "score": 0.6376078856617218}, {"text": "\"Corrections, make that four terms: I pick a car and swap, I pick a goat and swap, I pick a car and do not swap, I pick a goat and do not swap. Who cares about doors or door numbers?\"", "score": -0.9713154177310894}, {"text": "Anyway why put up with all this carelessness? Why are sockpuppets and trolls being enabled?", "score": -0.9880183081975487}, {"text": "\"On a more practical note: One either does or doesn't agree to take part in mediation. Which is it, please?\"", "score": -1.1309604076094915}, {"text": "\"I know, I looked it up and fixed the article, but it was annoying that you just changed one small thing making the page contradict itself. What's the use changing it if the reader will have no idea what's going on?\"", "score": -0.8587531401234754}, {"text": "Who???", "score": -0.9993343310583395}, {"text": "sir.. can i still write an article? do you have any facebook account?", "score": 0.697335430870505}, {"text": "\"Yeah, I already noticed that. Why can't people can just edit in a responsible fashion, or through ''discussion with others''?\"", "score": -0.7099364731903273}, {"text": "\"No, I have not read it, as it is of no interest to me or this encyclopedia. Are you claiming that the references make the blog post a reliable source, or why are we still discussing this?\"", "score": -1.5987063130376078}, {"text": "\"Hi Leah, is it working again? I had some difficulties with WP in general a couple of days ago and was wondering if your issues had the same origin?\"", "score": 1.0340988250671412}, {"text": "\"Is there any consensus for these changes? If I've missed the discussion, I do apologise, but could you post me a link to where it occurred?\"", "score": 0.9890972062548087}, {"text": "\"As I have said before, it is an issue of breadth vs. depth. Are you familiar with such concepts?\"", "score": -1.539942409463674}, {"text": "If a main or major purpose of your presence here is to cause drama then I would support your being indefed. Am I misunderstanding something?", "score": -1.5142552143900772}, {"text": "Noticed you've been holding the fort alone... again :) Thought I'd drop by and say hello. Surely your sanity has reached its limits by now?", "score": 1.4963126344491708}, {"text": "\"I have moved the page back, as well as created the new article <url>. Is that better?\"", "score": 0.6643167593963019}, {"text": "\"Thanks. Oh, and if you don't mind, would you please block the sockmaster?\"", "score": 0.572379714019547}, {"text": "\"Hi Cmadler, I've written a new ALT hook at the <person> that I think addresses your concerns. Could you sign off on the nomination?\"", "score": 0.7590874853809229}, {"text": "Why did you block tanner1996 that is me I was going to choose Nascar1996 but someone else is using that name??????????", "score": -1.981101668845558}, {"text": "Wikipedia is not the place for new information. How would we know if it was true or not if someone credible hasn't verified it?", "score": -0.8193964795866548}, {"text": "\"I was just reminded of my offer to generate a list of uncategorised templates, when I happened across your bot request again. Are you still interested in such a thing?\"", "score": 0.6542074023700452}, {"text": "I believe that an opinion about AS belongs only to article about AS (and you are welcome to cite the MS opinion ''in proper context''). However an opinion about MS belongs only to article about MS. Is not this fair and logical?", "score": -0.4615485604935087}, {"text": "I wasn't sure what you mean by making the title a clickable link. Can you show me how to do that?", "score": 0.6924751915157052}, {"text": "\"<url>: I am not sure how a regular spy mission gone bad had contributed to the history of cryptography. Should every spy ship, listening post, tampered cable and what not get included in the category?\"", "score": -0.8219538397789993}, {"text": "\"I don't recall any consensus on that unilateral change. Can you direct me to it, please?\"", "score": 0.9295460677247249}, {"text": "\"I didn't see your internal link, I put it back. <url> isn't orphaned, what are you talking about?\"", "score": -1.3595810913928026}, {"text": "I don't understand the reason for <url>. Would you please explain it to me?", "score": 0.6903390845243831}, {"text": "It does not appear to have attracted any participants other than Gerda. Perhaps adding a <person> template would draw in more participants?", "score": 0.6648365043027011}, {"text": "I don't know why claim that you reverted vandalism (rv edit summary) when to me they appeared to be good faith edits. What exactly was the vandalism?", "score": -0.86456986830535}, {"text": "Do you mean article edits with sock puppets? Could you give me more precise examples?", "score": 0.5907408674838489}, {"text": "Well? Shall I request outside assistance?", "score": -1.0560129796116062}, {"text": "A challenge for you! What do you think about creating a template for convoys and a list of convoys?", "score": 0.5349261013514425}, {"text": "I see that you don't have an account linked to the Wikipedia email. Would you be able to send me a message so that we can talk off Wikipedia?", "score": 0.7891053087404754}, {"text": "\"You are correct that the science is not copyright, but the purpose of the cite is <url>. If I want to check your equation, where should I look?\"", "score": 0.6279498008813693}, {"text": "The file clearly states this is an image from a poster. Since when does this require any additional explanation?", "score": -1.240950691527988}, {"text": "Mind <url>. Since when one edit after protection expiry is disruptive editing or editwarring?", "score": -0.9982378747747704}, {"text": "Double standards? Biased administrators?", "score": -0.7979115933995179}, {"text": "Why do you contend that New Atheism and Atheism 3.0 are unrelated? They are so intermingled that people have difficulty discerning which is which?", "score": -0.9627902604547328}, {"text": "Thanks. Beer?", "score": 0.5980041291255004}, {"text": "\"Perhaps these articles should be nominated for deletion. A blue belt is far from a notable BJJ player; Google returns 594 results for him, many of which might not even be the Barry Ley in question; and as for the Blaggers article: I don't see any articles on the higher-selling books you mention, why does this one deserve it?\"", "score": -0.7120249719691379}, {"text": "\"LordAmeth-- once again, many thanks for nominating me. I have a question, though-- I noticed that the process was \"\"scheduled to end\"\" at 16:52 today, and we're past that now...what happens next?\"", "score": 0.8618115261726965}, {"text": "\"You reverted my removal of the \"\"to critical acclaim\"\" description from this article. I cannot see where in <url> there is a basis for that claim - it looks like a neutral factual review to me?\"", "score": -0.8153681882661186}, {"text": "\"Wow, thanks for pointing that out. I'm having a bad night, and all the theading... anyway, did I get them all?\"", "score": 0.6283126392164756}, {"text": "(S)he seems to have violated the spirit of 3RR <url>. How do we deal with this guy?", "score": -0.6424478752563609}, {"text": "And wasting our time as well. I can only repeat: why don't you do constructive work by adding contents about your beloved Makedonia?", "score": -1.55619859676183}, {"text": "\"Oh, lookee <url>: who put all the 'breakups' back with a blind revert???\"", "score": -1.4675604767391124}, {"text": "<url> suddenly has a bunch of new entries for today. Did you change the program?", "score": -0.7185888647753018}, {"text": "\"I understand ''your'' position, however I disagree with it, even regarding the spirit. <ins>Mostly because I don't see linking something as modification of the quote, therefore it's a moot point, even when coupled with bracketed additions.</ins> Perhaps we should see<ins>k</ins> comments from additional editors?\"", "score": -1.1286260891582696}, {"text": "You sure kill educating information. I wounder why?", "score": -1.8027745748054564}, {"text": "\"Just now I created this stub. Could you have a look at it, and try to improve it?\"", "score": 0.5666925135966321}, {"text": "\"Hello, I am interested in helping out with <url> and <url>, but the pages are very very long, which is unmanageable and discouraging. Could you please break them up into (many) smaller subpages?\"", "score": 1.0644010617057331}, {"text": "I'm not sure what you are reffering to. Could you give a link?", "score": 0.5751995903760603}, {"text": "\"Can I also point out that the text that you pasted into Wikipedia is entirely inappropriate for an encyclopedia article and at best will require extensive editing to conform to the<url>. Are you going to do that editing, or are you going to leave it to one of the other 'lazy' people who edit this encyclopedia?\"", "score": -1.5174719035281465}, {"text": "\"All right, I'll bite. Is honorary membership in the Justice League not criteria enough for being listed in that category?\"", "score": -0.570449401172332}, {"text": "\"I had an editorial dispute with a user called <url><, but I'm not responding to him anymore as it seems to escalate into heated arguments. If he keeps messaging, would it be ok if I got back in touch with you and you had a word with him?\"", "score": 1.2123769585851896}, {"text": "Done. Comments?", "score": -0.5371695961059026}, {"text": "\"Pat, for someone who has died, you don't say \"\"he is a former professional baseball player\"\", you say \"\"he was a professional baseball player\"\". Okay?\"", "score": -0.627142621301714}, {"text": "\"Problem is how do you recognise the categories, as there are thousands of them. I have already made a program that scans the database, and can locate the unstubbed articles, so finding the untagged stubs is ok, and AWB automatically adds the stub tag if the article is short, I just can't think how you could program it to automatically guess the type of stub, any ideas?\"", "score": 0.5990216811307933}, {"text": "Because Wikipedia is an encylopaedia not a free advertising and self promotion venue. Why do you keep abusing it?", "score": -1.2213591083806599}, {"text": "Thanks for putting the igneous rock template on all of those articles - it looks really useful! Do you think it should have an ultramafic section added?", "score": 1.5488186500858818}, {"text": "\"I'd like to try it out at FAC, after Friday. Would you be willing to co-nom?\"", "score": 1.005785864876599}, {"text": "Not sure why I got <url> from you. Had I tagged it?", "score": -0.8939913315463828}, {"text": "\"Good job working on the list, thanks. Could you make sure you move them from \"\"To do\"\" to \"\"Done\"\" when you finish?\"", "score": 1.8085395665268185}, {"text": "I really think that the text you have drafted at <url> is far fuller and better than the article as currently drafted. Do you have any objection to me deleting the current text (except for the infobox which I will retain) and importing you text in its entirety?", "score": 0.9660773973038699}, {"text": "\"What about ''Race, Ethnicity and Migration in Modern Japan'' (Weiner 2004)? Or any of the dozens of books you can find on Google by searching Ainu+Honshu?\"", "score": -1.0563082408857674}, {"text": "Thanks. Anything else?", "score": 0.965477486236388}, {"text": "Thanks for reverting that obnoxious vandalism on my talk page. How do I bring in an administrator to deal with the vandal?", "score": -0.5643186832367866}, {"text": "\"He used it to assert that '''''others''''' had expressed the notion that the process was concluded and that it was time to move on, and used your statement as evidence of this. Since you did not mean that it had been concluded, and since obviously you were only speaking for yourself and no one else, and participants have been continuing to post on the RfC page and its Talk Page to indicate that they do not feel this way at all, how does it serve to support the idea that they have?\"", "score": -0.5410313823534211}, {"text": "\"I had that fixed before you oranged me, I swears it! :) Are you not familiar with the technique of doubling your edit count using typos?\"", "score": -0.7298376527953033}, {"text": "\"I'm curious: how do you make those beautiful coats of arms? Do you have a script that generates them from the heraldic descriptions, or do you make them by hand?\"", "score": 0.855926901382041}, {"text": "Indeed... The trolls just keep coming. Are you aware of content of the <url>?", "score": -0.6449318089486196}, {"text": "Hope you had a good trip. Are you able to take a further look now?", "score": 0.7798349735271323}, {"text": "Personal attacks and refusing to answer questions. What are you hiding?", "score": -1.408265604196832}, {"text": "\"I removed that second paragraph entirely, since the information is covered in the article. Do you feel that the lead should be larger?\"", "score": 0.7117676918548496}, {"text": "Stole ur flag for my page. that ok?", "score": -1.265958966483709}, {"text": "\"Thanks very much indeed! :-) I don't suppose you'd fancy having a deadlink trawl over at <url>, would you?\"", "score": 0.8108465727135247}, {"text": "thank you very much for your patience and for listening. how do you think is the best way to resolve these issues?", "score": 1.8222115856989398}, {"text": "\"Just one more thing: The <url>' logo was re-made as a vector image a little while ago, but the colors (especially the skin), really seem to be off. Can you fix it?\"", "score": 0.7051859461744399}, {"text": "hi Mustafaa I have some historical pictures and some sentences written on them which seems to be arabic. Can you tell me if it is arabic or not if i send you a copy?", "score": 0.5333686911390902}, {"text": "\"Not according to the history on <url>. Can you give me a link to follow, please?\"", "score": 0.7932688424049255}, {"text": "Nice photo! Could you help answer a question (<url>) at the Reference Desk related to the picture?", "score": 1.758916891658674}, {"text": "\"Ok with you if I play white? I suck so I should go first....and, if you don't agree I will crush you like a mag.... whoa power rush..... Anyway ok with you?\"", "score": -1.8176707190866068}, {"text": "Calling all changes for vandalism just because you don't agree with them is NOT the way to explain why you reverts edits. May I suggest that you actually learn what vandalism actually means before you use the word?", "score": -1.728382454335168}, {"text": "No kidding. Perhaps you could be troubled to point out the parts you consider to be actionable as 'spam'?", "score": -1.0007218084068759}, {"text": "So nearly a year later you change it all again. Why?", "score": -1.3000740652045364}, {"text": "\"Hi, Google found your start at a Bayt Nuba article. Do you want help?\"", "score": 0.7895566501185718}, {"text": "Hi! What would you like help with in particular?", "score": 1.5939960482567306}, {"text": "\"OK, now I see the <url>. Wouldn't it have been better to just say \"\"see talk\"\" in the edit summary so that editors don't think that you just moved it on a childish whim?\"", "score": -1.2800264838129523}, {"text": "\"Sovereignty is something a thing, in this case Japan, possesses, hence the apostrophe. So what's the problem?\"", "score": -1.058948050064101}, {"text": "\"On a related note, I went through your recent article-work, and saw nothing wrong with your contributions. I was wondering, would you like me to grant your account <url> to make it easier to revert vandalism when you come across it?\"", "score": 1.0357765408576003}, {"text": "\"Many thanks. I can't resolve \"\"The work represents a summary of themes explored in Bacon's previous paintings\"\", Tony's right - its clunky and unclear; any ideas?\"", "score": 1.7451156804276629}, {"text": "OTRS. At this point why not try?", "score": -0.8584560172059603}, {"text": "Why did you delete the article on anal stretching????", "score": -1.356150672703235}, {"text": "\"Hey, I saw your article on the ''Mosambique'' in the Good New Article search results and looked at your other contributions, and I see that you do a lot on naval history. Would you be interested in helping me to expand <url>?\"", "score": 1.4349702924503467}, {"text": "\"Splendid work on <url>! Do you have a copy of that reference, or use the library's?\"", "score": 1.2837872691333416}, {"text": "If the deleted image only has a single character then no need to restore. Would you be against adding an image illustrating the 4 protagonist dinosaurs?", "score": 0.9273195936967683}, {"text": "I noticed that you have a little note at the top saying your status. How do you do that?", "score": 0.5988317032127366}, {"text": "\"re: <url> edit. Then let me ask you, what exactly was offensive about them?\"", "score": -0.6435865436132062}, {"text": "\"Hi Royalbroil, I can't seem to find a hook I entered in DYK Feb 24, it's about the novel <url>, along with suggestions from other eds. Can you find what happened to it, please?\"", "score": 1.5774995881340677}, {"text": "I state what I see when I read it. Are you and <url> the same editor?", "score": -0.5970273242494762}, {"text": "\"|} Thanks! Did you know its one week to Halloween, will you be there?\"", "score": 1.566483944006761}, {"text": "OK I've created the following test page: <url>. Tell me if you have a problem with it?", "score": 0.6187085835459609}, {"text": "\"I saw you removed the copyright CSD from Universum University as being \"\"unclear copyright status\"\". What am I missing?\"", "score": -0.4662203837583306}, {"text": "I noticed you deleted this while I was in the middle of creating it. Does the listing of it on the <url> not make it notable?", "score": -0.75941624646147}, {"text": "\"I fail to see how you call copyleft \"\"selfish\"\", but are fine with the idea of totally proprietary licenses. Wouldn't those be even more selfish, then?\"", "score": -1.494153491987793}, {"text": "\"Hi, I have cleaned up this page and, in the AfD, identified many other sources and additional content. I wonder if you would reconsider your delete !vote please (I will do the necessary expansion as and when there is a reasonable expectation of it being kept)?\"", "score": 0.5535518665296706}, {"text": "\"Sounds good then. In this case, would it be wise to simply remove the \"\"influence\"\" field since it simply overlaps with \"\"subjects\"\"?\"", "score": 0.7732174495566065}, {"text": "\"Thanks, looks like <url> needs some fixin'. By the way, are you honestly ok with me bothering you like this?\"", "score": 1.3932028618811854}, {"text": "\"Hello, I'm with the project of turning the list <url> hat is the same list in English, but in another language, and more link for azure. I have a problem, <url>, could you help me?\"", "score": 1.0256817128790345}, {"text": "\"You just created that article - but maybe you haven't noticed there's already <url>, which covers the same character set, as the ISO followed the Thai standard. Maybe you should merge your additions there, and make TIS-620 a redirect?\"", "score": -0.8238997629919049}, {"text": "Thanks for improving the article. Can you add categories also?", "score": 1.4360160981262124}, {"text": "\"Thank you for the links and the explanation. I'd like to help with the project, what did you have in mind?\"", "score": 0.9626633274776146}, {"text": "Based on what? Don't you think a little discussion is in order before doing this?", "score": -0.8872445996636792}, {"text": "\"Anomie, thank you very much, this is a great tool. Why isn't this a Mediawiki gadget ?\"", "score": 0.5950243384479628}, {"text": "\"Are you saying that Scotty was being \"\"hot-headed\"\"? If so, can you please tell us what Scotty did that was \"\"hot-headed\"\"?\"", "score": -0.8857619953785901}, {"text": "\"I couldn't find your \"\"email user\"\" function. What's up?\"", "score": -0.6319956316733248}, {"text": "The same goes for our lives. Arent we effected directly by decisions made in the White House?", "score": -0.5673433315116179}, {"text": "So how come he can have all types of slandereous links on his wiki page? Don't you think being an admin you should delete them?", "score": -0.8933524746466063}, {"text": "\"Can you respond to the comments I left on the S&M PR page please. If you are satisfied with the changes, can you tell me if it is qualifiable for FAN and if not for FAN, then for A class status?\"", "score": 0.524280035923336}, {"text": "\"They did and have done, on my talk page and on their talk page and most the time im not really doing anything and i always seem to get the finger pointed at me. So what do you expect me to do if someone is uncivil towards me?\"", "score": -1.0430966004381288}, {"text": "\"Oh that's charming, so instead of making a personal attack on a single editor you made it on a class of editors. Does that somehow make it more ok?\"", "score": -2.125316919294767}, {"text": "You lived next to her? What did you do wrong?", "score": -1.0087452056419013}, {"text": "Re <url>: I reverted once. Shouldn't I get a cut of the proceeds?", "score": -0.7769369224741404}, {"text": "\"Although I have yet to understand what some of your maps mean, they are superb! Have you ever considered professional cartography?\"", "score": 2.235788034165126}, {"text": "GorillaWarfare- I don't know if you saw my reply on my talk page. Are you interested in being my mentor for the GU De-Ba'athification article?", "score": 0.5641975166407955}, {"text": "\"ww - hey, good work on the new pages that you've added in the last couple of days (5-6 isn't it?). I add them to <url> when I notice them, but I'm not sure that I catch them all; I don't suppose you could add them to the list on creation?\"", "score": 0.9719054125005442}, {"text": "pls see my comments on your revert on the talk page. Perhaps you'll consider undoing my revert?", "score": 0.533160679530573}, {"text": "\"Wikipedia is anything '''but''' formal, many characters real or fictional don't get referred by their full names all the time. Besides, who else on wikipedia thinks \"\"OOO\"\" is awkward?\"", "score": -0.7033825051437896}, {"text": "\"Yes, but not this week as I have university exams. Perhaps next week (or you want this to be a lead-up story for Earth Day)?\"", "score": 0.68543560486269}, {"text": "He's taken the notice down. Doesn't this count as belaboring the point?", "score": -0.9486938267884067}, {"text": "\"You said: \"\"I am just upset that you are harassing a person who seems to be trying to better himself and his relationship with fellow wikipedians.\"\" First, I'm harrassing no-one, and it would be better if you didn't make such accusations. Secondly, what evidence do you see that SS is trying to do anything of the kind?\"", "score": -0.598577596082443}, {"text": "\"<url> posted a wonderful grammatical analysis of the \"\"has/had\"\" thing, another editor agreed, so I'm thinking we should make another request for the change, using Bluewave's post as the basis. What do you think?\"", "score": 0.7371004892858619}, {"text": "\"Note: Kurt's talk page almost certainly isn't a good place to seek a broad or unbiased consensus on this question, nor is it the appropriate venue in which to request an exemption from a ban &mdash; and in all honesty, I'm not sure that managing such an exemption for the purpose of posting gags would be a constructive use of anyone's time. Perhaps it would be best to save the jokes for a time when you're once again allowed to edit in the WP namespace, or post whatever jokes you wish (within reason) in your own user space?\"", "score": -1.1894588006836126}, {"text": "\"Why delete the page <url> with no explanation, as an act of deliberate vandalism? Why did you do it?\"", "score": -2.231103563501033}, {"text": "\"Hey, thanks for your contributions here :). Would you mind referencing them to conform with wiki-policy?\"", "score": 2.468366685897979}, {"text": "\"I've never seen something go beyond AN/I myself, and I find it hard to believe the other two hadn't been tried by now. Or is it a case-by-case thing?\"", "score": -0.852457203833527}, {"text": "Have a look at the edit to <url>. Is this page even a dab?", "score": -0.7984893261976621}, {"text": "\"I am good at that ;-). You know about the new \"\"Oversight\"\" priv?\"", "score": 1.209562704754771}, {"text": "\"You seemed, to me, to be addressing all of us who were opposed to your merger. Do you know a different meaning for \"\"oddwater\"\" other than \"\"urine\"\"?\"", "score": -0.8562429412036348}, {"text": "The deletion review page linked above contains all the instructions for requesting a review. Why did you not read these instructions?", "score": -1.4071134297594008}, {"text": "\"Instead of deletion, I'm considering simply redirecting Ponystars to <url>. Any objections?\"", "score": 0.5860593872612474}, {"text": "Interesting illustration on the PrIze law page of a Zeppelin taking a Norwegian vessel as a prize in 1917. I wonder whether you could point me to a source for more information on this incident?", "score": 0.800638509679627}, {"text": "\"Your comment about \"\"the additions were left intact as a courtesy\"\" sounds a bit strange to me. Maybe you should read <url>?\"", "score": -1.1876539290120967}, {"text": "\"Hi, Wereon - I'm afraid I'm rather amateur at nominating articles for deletion. Where do I start?\"", "score": 0.6674959577376891}, {"text": "\"Otherwise, very nice article. Are you going to submit it to DYK?\"", "score": 0.9403526838179804}, {"text": "\"To be honest, I'm not sure - the decision say \"\"remove individuals and organizations\"\" but it also said \"\"per BLP\"\" and Abascal is deceased. Perhaps you should ask Timrollpickering, the admin who closed the discussion, to clarify?\"", "score": 0.7410227899401178}, {"text": "You're welcome. Do you have an opinion as to whether they would be worth adding to <url>?", "score": 0.8934068215401905}, {"text": "\"Thanks. As for the previous edits, should I make a note on the User Page that it's my account?\"", "score": 0.942214753314867}, {"text": "\"Hi. The original link (http://www.lgso.org.uk/index.htm) gives a 404 error, but the amended URL (http://www.lgso.org.uk/) works for me, so why do you say it's still dead?\"", "score": -0.8450982949218124}, {"text": "\"Hello. I was just wondering, is it really fair to delete an unintentional duplication of a deleted category without a formal CFD submission?\"", "score": 0.5370991723630107}, {"text": "\"ED, eh? Have they tried Viagra?\"", "score": -0.9336417938738852}, {"text": "Good work maintaining the Tibet page i seems to getting an increasing amount of traffic!!! Is our Chinese friend still persisting?", "score": 1.122941009366109}, {"text": "\"I'm not sure how I feel about you adding coordinates to locations all over the place, but rather than get into that, let's start with something easy: \"\"<url>\"\" does not equal \"\"<url>\"\". Do you need me to explain why, or was that just a typo?\"", "score": -1.0119483404004694}, {"text": "\"Can I use a manga image colored by myself? There won't be any problems with the lisense, right?\"", "score": 0.6364100351333305}, {"text": "Many thanks. I'm afraid I don't know the reel - is it played in your part or the world?", "score": 1.2906300821748953}, {"text": "\"Thanks, I didn't notice the earlier discussion. Would you consider (if you haven't done so yet) adding some indication on either page that these are the same user?\"", "score": 1.313296308810772}, {"text": "\"I just put a table in for the novel villains (to put the novels and films on the same level), but it's kind of messed up (I know presscious little about tables on Wikipedia). Could you please help me fix it?\"", "score": 1.0952450966038323}, {"text": "\"I'm in Outer Mongolia from the 27th till the 10th, UB, Hovsgol, UB. Maybe we can meet?\"", "score": 0.712368679826097}, {"text": "\"The section starting with '''a philosophical defeat, etc''' is somewhat informal in style. Could you try to eliminate solecisms when you get a chance?\"", "score": 0.5644713600252751}, {"text": "\"Hey, I found <url> and at around 1:10, I think I can see the Backstreet Boys holding platinum discs (or double platinum, I'm not sure) for Backstreet's Back and Millennium. Can you please verify and is it possible to use this as a source for adding South African certifications?\"", "score": 1.3921520938133043}, {"text": "you recently deleted <url>. can you tell me where is the AfD debate on that article?", "score": -0.4810995775798109}, {"text": "\"I was looking for some help with editing portals, I've read up on some stuff but still don't quite understand. Do you think you could help me?\"", "score": 1.2968054548731183}, {"text": "\"I've been trying to convince this user that his only problem is his username, but he doesn't seem to fully believe it. Would you mind popping over and giving a word or two of encouragement?\"", "score": 1.207695389470554}, {"text": "How come no one will answer this question!??", "score": -2.1055559885397495}, {"text": "Nice pie chart for mobile phone contractor %'s during 3rd quarter 2008. Was just wondering though which country the chart represents as I think perhaps this should be mentioned?", "score": 1.2458178950596148}, {"text": "Good advice from Garion96 there. Would you please come to the talk page of the template and voice your opinion?", "score": 1.050609566106844}, {"text": "\"Hello, Jeff79. Would you weigh in on the above linked discussion regarding an edit you made?\"", "score": 0.7199570877244768}, {"text": "Indeed. I trust you have already watchlisted this editor?", "score": 0.5167742644029383}, {"text": "So you exchanged my suggestion with an ''expired'' nomination... Right?", "score": -1.0455174653296866}, {"text": "\"If the community decides that you no longer speak on its behalf--if the Wikipedia community decides that this supposed ban does not apply to constructive comments on AfDs on articles for which I am a major contributor, and that I may make such comments without asking for permission beforehand--who are you or anyone else to override that decision? I'm not saying that anyone has or will make that decision--but if that decision is made, are you claiming that you are entitled to do what you want regardless of community consensus to the contrary?\"", "score": -0.6289372593221467}, {"text": "\"No other encyclopedia deals with these charges in their entries on Reagan; encyclopedias have higher standards than supermarket tabloids and Kitty Kelley. On that note, why not post the content you want in the <url> article instead?\"", "score": -0.9203180184895674}, {"text": "An IP is complaining that I'm violating policy by removing unlinked nonnotable band members from the notable natives and residents section of <url>. Can you help me please?", "score": 1.030511411946127}, {"text": "\"Delete, who cares. If we're going to delete every fair use image that ''has'' a sufficient rationale, why not change to German Wikipedia image policy already?\"", "score": -1.771806846146161}, {"text": "\"I noticed that the A team article for Australia is at <url> instead of <url>. Is that intentional, or something we should fix with a page move?\"", "score": 0.5696356890617134}, {"text": "\"Those of us who've been here the whole time are pretty incredulous about it also. :) We miss your fine editing style and hope you come back sometime -- keep looking in on us now and then, at least?\"", "score": 1.1311702698868762}, {"text": "Could you please check over the article of <url>. I edited a couple of things and added a few images..could you reply your opinion please?", "score": 1.3971863969650808}, {"text": "\"Hello Roman! If you have time, could you add a <url> infobox (with census data) for <url>?\"", "score": 1.4066694989850828}, {"text": "How many times do I have to revert you before you stop? Do you want to get blocked?", "score": -2.708516642990856}, {"text": "Could you please clarify who the new users were exactly that you considered to be part of your 'sock farm'? Or did you not even bother to research before making such claims?", "score": -2.1649493897652645}, {"text": "\"In your most recent edit to <url> you stated that the track in Clermont, Florida does not fulfill IAAF regulations (since reverted by another editor). On what grounds does it not and do you have a source for that remark?\"", "score": -0.5401897101172675}, {"text": "\"Why tag for deletion? It was obviously a new user's attempt at creating a disambiguation page, why not clean it up?\"", "score": -1.4603036791310013}, {"text": "Sorry to bother you but theres a report on <url> regarding <url> which has been a target for IP vandalism. I'm only asking directly because the vandalism is continuing and I'm close to not being able to fight it anymore under policy so can I ask if you could please issue the protection and revert the IP's vandalism back to my last revision?", "score": 2.072777807505832}, {"text": "\"Unless you provide a wikipedia article, with solid references od proof of notability of the term, I am going to delist it. I ave no doubts that there was a dilike of Portugals (like any other major nation), but was this word in notable circulation?\"", "score": -1.032721956093988}, {"text": "\"However, as you can see, I provided citation for my edit. Will you revert THAT and accuse me of vandalizing?\"", "score": -1.1596144722595545}, {"text": "Lowell/Lawrence Blvd leading from Lowell to the Methuen Rotary is a device from hell as well. How many people does that short stretch of road have to kill?", "score": -1.0530242334758397}, {"text": "Thanks for your edits to the LEMS article a couple of days back. Any hope of you reviewing it for GAC?", "score": 0.7775748257637907}, {"text": "\"Why did you erase the <url> page? I was working on that article, and his notability is not disputed?\"", "score": -1.2236079589342215}, {"text": "Please see <url>. Can you help?", "score": 1.2195301181782738}, {"text": "Yeah I have. Yes I know that; you saying I'm a malicious admin?", "score": -0.9874684600634996}, {"text": "My job. Do you have any specific points to address?", "score": -0.5907136844600381}, {"text": "\"I do not, nor have I ever used other accounts on Wikipedia. Didn't you ask me this before?\"", "score": -1.0908152310018147}, {"text": "\"(sorry for delayed response) I think it would be difficult to intergrate them, my program would probably work more easily from a list of articles with typos generated from the database, with the spelling correction code built in to the program. While on the subject, are you planning on running Humanbot again soon?\"", "score": 0.7338234979663982}, {"text": "Thanks for your advice on my problems with categories. Perhaps someone should change the instructions?", "score": 1.2479519029902275}, {"text": "\"The answer is \"\"one citation per city\"\". Information must always be verifiable; without verifiable sources for say, Unna, how exactly are we to tell that the article is accurate?\"", "score": -0.7465840077955411}, {"text": "\"Sorry, but i do not find \"\"double torpedo tunnels\"\" to be very good. Can you come up with something better?\"", "score": -0.5325631187319072}, {"text": "now???", "score": -1.118051322411156}, {"text": "\"Just letting you know (since you have mentioned AFDing <url>) that weu2019ve gone to some effort to establish notability with <url>, and have added three reliable, notable, third-party sources to the article. Do you feel these sources establish <url> as being notable (<url> and all that)?\"", "score": 0.5470276897971977}, {"text": "On the third reading... Why not include an edit summary that tells me this? Why not revert my revert?", "score": -0.502948391206185}, {"text": "Nice to see you editing in article space! We have been chipping away at your compiled OBI list - can you believe it's almost done?", "score": 1.1646243874726903}, {"text": "Why you undid mi edition????", "score": -0.7139428865793007}, {"text": "\"Since your last warning to BigBang19, he's continued to vandalize the ''Punk'd'' article, undoing the grammatical corrections I made some time ago, and inserting irrelevant, non-noteworthy material about Kuthcer's observence of Jewish holidays and his marriage to Demi Moore. Can someone block this person for a period of more than just 24 hours so that he gets the message?\"", "score": -0.8485795935284737}, {"text": "\"Re: <url>: if it's eligible for Did You Know, it's by definition not a stub. Cool?\"", "score": -0.5323381158111582}, {"text": "\"Good! By the way, what do you think of merging the recurring animals in <url> to <url> and then getting rid of the rest?\"", "score": 0.7182619508771795}, {"text": "\"I see you've done a lot of work on hospital-related articles, especially in Oregon. Perhaps you'd like to join us in improving the articles for hospitals in Oregon?\"", "score": 1.3928183556381062}, {"text": "\"Thanks for the quick response. Would you have any suggestions on editors who would be interested in this field, or who might have experience with FA reviews?\"", "score": 1.121399415737327}, {"text": "Please see <url>. Could you help with the list on the talk page?", "score": 0.8983238071412536}, {"text": "\"Thank you. It's a strange hobby, isn't it?\"", "score": 0.8356626334968297}, {"text": "I have posted a question at <url> which you may be able to answer. Can you please return to that discussion to answer it?", "score": 1.2308073319813686}, {"text": "\"I hate to roll back 26 edits in a row, but I see a lot of good stuff, a lot of questionable and non-NPOV stuff, not much (if anything) in the way of citation, and a ton of misspellings. Can we discuss these edits before imposing them, please?\"", "score": 0.6222036016741269}, {"text": "\"Why, precisely? And what separates the etiquette of your area of society from our \"\"conventional etiquette\"\" speed-developed on the Internet?\"", "score": -0.9543160653642812}, {"text": "\"Making edits vanish without a trace would be ''extremely'' difficult to do u2013 only full <url> could do that, and I can assure you the oversighters wouldn't do such a thing just to allow somebody to cover their tracks. Maybe you are mis-remembering something, about edits that were in fact on some other page?\"", "score": -0.820422371672823}, {"text": "\"Can I ask why you deleted this AfD as \"\"A3: Article has no meaningful, substantive content\"\"? It obviously did have meaningful content; was there any reason it couldn't simply be closed like any other AfD?\"", "score": -0.7624876931980722}, {"text": "\"The YA article is a right trainwreck, and having had to do much of the exact same work on <url> and <url>, I'm not keen to do it again a third time on <url>, and then defend three separate articles from identical POV pushes. Thoughts?\"", "score": -1.0959204063870693}, {"text": "He's still got the youtube link on there even after your reversion. How does once vs. 25 times or so make any difference?", "score": -0.48684453152866547}, {"text": "Hi old friend! Would you mind unprotecting my user/talkpages please?", "score": 1.5437615894488066}, {"text": "Congratulations! You are the winner of <url> Round 10. Would you like to prepare the questions for the next Wikifun?", "score": 1.5544591615591767}, {"text": "OK done. When do I get my money?", "score": -1.2587738668095934}, {"text": "\"Katy, good buddy. Is <url> your lost article?\"", "score": 0.7962538767368645}, {"text": "\"Hmmm, you are basically asking me to think positively despite evidence and knowing that over this issue none good faith on part of some users was showed in past, but ok, letxb4s see how it goes. The question then is: do you consider including the discussions with outside mediation participants that happend on those talk-pages (direktor, for exemple, has been very active and enthusiastic there lately) or only the in-mediation discussions are the ones that count?\"", "score": -1.1069732479215266}, {"text": "\"OK, maybe alcohol is depressing my injoke-comprehension circuits, but I can't fathom <url> edit summary at all. Care to explain?\"", "score": -1.5168760037865652}, {"text": "No problem. Have you heard anything new about HRE?", "score": 1.025789082422299}, {"text": "Don't threaten me as that is not vandalism. What is wrong with a nude photo of Barack's mom?", "score": -2.016796831167919}, {"text": "I have reverted your relocation of the images of this fragment - as I think it better if the images and transcripts can be seen side-by-side. Do you agree?", "score": 0.650852046088845}, {"text": "Hey. I'm proposing a <url> - would you be interested in coming along?", "score": 1.0391496137701224}, {"text": "I have one last question about the SPI I filed et al. before I put this ugly chapter of my time here behind me; should the <url> sock not be tagged with <code><nowiki><person></nowiki></code>?", "score": -0.7053019915877436}, {"text": "\"You added partisan colours for the early Chancellors (such as a conservative blue colour for Bismarck); as the early Chancellors were not attached to any political party, and the \"\"party\"\" given is merely an approximate indication of their political leanings, I'm not sure if this is a good idea. Also, why did you remove the partisan colour from the last part of the table (the Republic)?\"", "score": -0.8003791466174537}, {"text": "\"Hi, I see that you added the etymology section of the <url> article. What does \"\"RatCE\"\" mean?\"", "score": 0.553373593326852}, {"text": "\"Lionel, tell me the truth. How long have you known BelloMello was wMo?\"", "score": -0.6125936990514352}, {"text": "\"I'm not going to get involved in the content dispute. But I'm pleased to see you finally taking this seriously: its no longer ''all in good fun'', perhaps?\"", "score": -0.6922014025059184}, {"text": "Doing too many things at once? Not listening ?", "score": -1.1266830716305927}, {"text": "Hi Kathleen - It looks OK to me; all I did was change the template wording a bit and add parent cats to the stub category. What problem were you having?", "score": 0.8015249246655959}, {"text": "\"MONGO, today you speedied this article as a non notable character. Surely you know the non notable speedy criteria is for real people, not characters?\"", "score": -0.6617533047969724}, {"text": "Hi. Would you be interested in reviewing <url> for its GA nomination?", "score": 0.6711101292529407}, {"text": "\"Respectfully, you've replied to the message below mine a day ago now. Could you provide guidance for my request please?\"", "score": 0.7036759265461345}, {"text": "\"What a joke, this player has played league football all his career and some clown has now deleted it off, what kind of rules has this been deleted under then? \"\"May be deleted\"\" seems a bit of a poor excuse to me personally, fancy giving me some information as to why this has been deleted then guys?\"", "score": -2.3442130679098248}, {"text": "Thanks. Do you happen to know if the postcard was origianlly published in Japan?", "score": 1.149879900245288}, {"text": "\"I've made some improvements to <url>, keeping in mind your previous comments about the article. Since you gave its grade before, would you like to see it if it could be reassessed?\"", "score": 1.3620733441454467}, {"text": "Not too sure now you come to mention it. Do i need to annihilate it?", "score": -0.705852093012669}, {"text": "\"I seem to have adopted this article, since country music and dance are interests of mine. Can you recommend an easy to use bot that would help me in keeping those unwanted links out of the article?\"", "score": 1.2540341158219568}, {"text": "Made a bunch of changes to the <url> article. What do you think?", "score": 0.7466351287676701}, {"text": "Thanks for the edit. Do you have any informaytion on <url> and artists associated with <url>?", "score": 1.0109324371684247}, {"text": "\"I note you reverted my edit on <url>, quoting \"\"consensus\"\". What consensus?\"", "score": -0.9179469966424249}, {"text": "\"Mel, is the above comment from you? If not, could you please respond?\"", "score": 1.0133029813600953}, {"text": "Due to certain Edits the page alignment has changed. Could you please help?", "score": 1.1520644908645494}, {"text": "\"OK then. What's your problem with putting this in the lead : \"\"is one of the <url> counties of the <url> and \"\" ?\"", "score": -1.501046338934335}, {"text": "\"And personally I'm not gonna apologize for <url>. Quite a bit different than a Holocaust denier like Nordling, don't you think?\"", "score": -1.348234866393456}, {"text": "At <url> there's a request that seems to cry out for a Wikipedian with a subscription to the Daily Gleaner. Might you be able to help?", "score": 1.1798609523334869}, {"text": "\"Can you give me a link to \"\"We Are Defiance\"\" ? And what do you mean by \"\"remove\"\" : 1) moving to the main space or 2) deleting the article ?\"", "score": -0.461266260154067}, {"text": "-- Bit confused by your DYK nom. Surely the world's first gasoline automobile was created by <url> four years earlier?", "score": -0.7778692207619076}, {"text": "<url> seems a bit bizarre. Why nowrap date fields in cite templates?", "score": -0.5733277162970872}, {"text": "\"Why avoid it? Is \"\"November 6,<br>2010\"\" somehow a more desired rendering than \"\"6 November<br>2010\"\"?\"", "score": -0.4565766372246065}, {"text": "\"Hey JC: I saw you move <url> to <url>, which is great, but I was surprised you didn't fix any of the double redirects that resulted. Also, the related essay is still entitled <url>; should that be moved (over redirect) as well?\"", "score": 1.0014410713655197}, {"text": "\"It already look great. BTW, what are your thoughts about the previous post on the names of the other post office and rates lists?\"", "score": 0.732341556635052}, {"text": "\"Reading your help desk comment, I'm totally confused now. Was it 2005 or 2006?\"", "score": -0.6445062389694827}, {"text": "\"I understand the problem with that redundant category--I've hashed that out, I think--but why did you remove ALL categories from the Christian shows? That seems....excessive... Or do I just need a nap?\"", "score": -0.5332062329257057}, {"text": "\"Thanks for offer of help on IRC - terminology completely baffling to me (not helped by freenode instructions written as if translated through several languages into final english) - just which channel are admins supposed to log into, and how does one register when it seems by invitation only. Finally what on earth is a cloak ?\"", "score": 1.0392686122647121}, {"text": "\"I see that you closed that CfD discussion as rename, but haven't actually renamed the categories. What's up with that?\"", "score": -0.9556501466208557}, {"text": "So why then does the lead stat in Christianity need a reference? Isn't it being contained in the article good enough?", "score": -0.8668985408348728}, {"text": "\"Hello Sirmyles, I noticed a puzzling section on <url> that could only have been written by you (about the parallels with Shiite law). I would appreciate if you could identify a source that could possibly support this assertion?\"", "score": 1.0184934187614512}, {"text": "Is there any way I could talk you into being more conservative with creation of new articles? Do you agree that some of what you're creating here would be seen as obviously deletable material by most editors?", "score": -0.4848732825918255}, {"text": "\"Well, you leaved me also curious. Why was it not appropriate?\"", "score": -0.7275526872807648}, {"text": "\"Why did you rollback <url>? It appears by a quick scan that the user was correct re <url> - but regardless seemed a good faith edit, one certainly not worthy of a vandalism rollback I would think?\"", "score": -0.5076427729228564}, {"text": "\"Hi, <url>. Do you have a suggestion as to how these article make Wikipedia better?\"", "score": 0.7479484698500204}, {"text": "\"Thanks, but why specifically? Because I know the difference between ''hale'' and ''hail''?\"", "score": -0.47666709960830744}, {"text": "\"I removed most of the boldface lettering from the article. And what do you mean by \"\"the referencing is insuficcient\"\"?\"", "score": -0.8962868562831832}, {"text": "I Mean hes been really sucessful why not???", "score": -0.761720434570607}, {"text": "Thank you for reverting vandalism on my page. How did you catch it so fast?", "score": 1.474281934555726}, {"text": "\"And what, may I ask, is ''your'' relationship to Hammersoft? How did you come upon this matter?\"", "score": -0.8256791083555631}, {"text": "Your new images of Micheal Knight and Devon look kinda squashed. Can you fix them or find better ones to replace them?", "score": -0.5658009247377035}, {"text": "\"Gee, thanks. But shouldn't it be a barnstar for '''''bad''''' humor, or maybe for the humor \"\"phlegm\"\"?\"", "score": -0.7226103354397941}, {"text": "I tagged the most recent article. Aren't there guidelines for public history in your state?", "score": -0.5980987303641194}, {"text": "(undent) Your Chinese is much better than mine. Is it possible in Chinese to omit a conjunction such as u548c or u8ddf and let a string of two successive nouns refer to a set of two objects?", "score": 1.4562528224624136}, {"text": "\"I want to send you a private message, but you don't have an e-mail address registered with your Wikipedia account. How could I contact you?\"", "score": 0.7375157130794199}, {"text": "\"So you reverted my edit, rstoring a badly written sentence that includes a point that is neither germane to the topic nor supported by a citation. Yeah, that really helped, didn't it?\"", "score": -3.0086093758416994}, {"text": "So far there are four editors on <url> who have called you on your edit-warring. Would you please stop?", "score": -0.5108740155056413}, {"text": "\"I'd assume it's a ban on this account, that was used by multiple people. I don't think you would be blocked because you at one point edited on this account, and as long as you don't use the same IP, I don't think you could be blocked, how whould anyone know?\"", "score": -0.6875173125613933}, {"text": "\"PS, I would seriously like an answer to my question above. What is your native language and where do you come from?\"", "score": -1.0280512229030756}, {"text": "\"Well, if it isn't \"\"ours\"\" to feature, why is it \"\"ours\"\" to use? Do you mean to say that, for you, the best possible encyclopedia would not use images from outside sources, no matter their license?\"", "score": -1.3285272710002374}, {"text": "Problem solved! Aren't administrators handy?", "score": 0.5997982740636563}, {"text": "\"It's simple Pumpie. Why don't you try to understand all parts, or write in your own language?\"", "score": -0.9248096484691807}, {"text": "\"May I ask why you removed <url> from the article on <url>? Since CAP is the USAF's official auxiliary, wouldn't it make sense for the CAP article to be in the USAF category?\"", "score": -0.4834747961026092}, {"text": "I see that KarBot is editing User Talk Pages. Is this deliberate?", "score": -0.5091235447429631}, {"text": "\"I'm attempting to be incisive, and the articles integrity hasn't been disrupted. Therefore i see no reason why you chose to comment unless you felt that ive wiped out important information?\"", "score": -0.9967273431815087}, {"text": "\"I notice that your userpage announces that this is an alternate account and won't get involved in policy. Did you always have that attitude, or is it since December last year?\"", "score": -0.9413586245790622}, {"text": "\"Hi, I've added a question to your RfA. Also, could you elaborate on how you will ''prevent'' vandalism?\"", "score": 0.6647957251434874}, {"text": "I disagree with your edit of the 21st of November. Would you object to me inserting a new version based around the word 'Notwithstanding'?", "score": 1.019709220094362}, {"text": "\"Hello -- I see you changed a couple of entries relating to \"\"PALATINE, the Subject Centre for Dance, Drama and Music of the UK Higher Education Academy\"\". I don't know (or even care) why that organisation chooses to refer to itself by capitalising the word, but (and I ask for information here) is there a policy that precludes us from respecting that choice?\"", "score": -1.001515467353546}, {"text": "\"Hi there, thanks a lot for commenting. Just to be absolutely clear, and avoid anybody misinterpreting your remarks, could you say if you prefer version 1 or 2 of the lead?\"", "score": 1.3013973262740386}, {"text": "\"On another subject, have you finished the books I suggested? Want more?\"", "score": 0.7479395514715055}, {"text": "\"Hi, thanks for your help. There is also this sentence \"\"Asukkaista 84,5% oli ruotsinkielisixe4 ja 15,4% suomenkielisixe4 vuonna 1970 ja kunta oli kaksikielinen.\"\" What does it mean, please?\"", "score": 1.54968161645914}, {"text": "\"Self-follow-up: Also, while sometimes the status quo should be maintained, by supporting it in this case, you're also supporting a \"\"revert-first, ask questions later\"\" approach to dealing with good faith edits. Is that really what you want?\"", "score": -0.980814592823761}, {"text": "You've completely lost me. What are you asking about?", "score": -1.172583191574325}, {"text": "Bureaucrats require a higher level of trust than administrators. Are you actually familiar with what these tasks entail?", "score": -0.9600405681070996}, {"text": "Kidding about what? Are there no reliable sources?", "score": -0.6839688573542698}, {"text": "\"Soman, since you visited the Swedish Social Democratic Party site in May 2010, did you put the May 2010 \"\"(This article) may need to be rewritten entirely to comply with Wikipedia's quality standards\"\" atomic bomb tag at the top of the article? If so, will you clarify which sections you feel are inappropriate to a discussion of the political party, and what sort of focus you'd prefer to see, and why?\"", "score": 0.6517013578067312}, {"text": "\"Im interested in where you got the information you added stating woodlands house was closed in 2003. As far as i can see your wording was taken from one website, the milesfaster one. Should you not cite the website you quoted rather than look for a couple of others?\"", "score": -0.7235749664493258}, {"text": "Welcome to wikipedia? Does <url> and <url> look like unreferenced uselessness to you?", "score": -0.8906032910375961}, {"text": "\"Hi, I borrowed your bit on Rauls Razor for my userpage. I hope thats okay?\"", "score": 0.7277206235155352}, {"text": "I need to read more about what exactly a pastoral opera is supposed to be. Perhaps it should be a supercategory?", "score": 0.8319200023088195}, {"text": "What a mess. Does anything need to be done about it at the moment?", "score": -0.7711115900622776}, {"text": "\"It may very well have been a better candidate for a stub rather than a speedy deletion. I would rather not talk about this on another users talk page, if you would like to discuss if further could we continue any further posts on <url>?\"", "score": 0.5398487742090182}, {"text": "I have made a Ukrainian version. Were would you like me to email it to?", "score": 0.5278189442944702}, {"text": "\"When we go to the American Indian museum, they would like our real name ahead of time that way they can get us through security quickly. Could you get yours to me by tomorrow, either via the page for the meetup, my talk page or an e-mail?\"", "score": 1.1135512416243458}, {"text": "\"He has requested unblock, states that he has edited from a school IP and that may be the reason for it. Who is he supposed to be a sock of, and what's the evidence?\"", "score": -0.5066398190498442}, {"text": "\"The only books by Paul I was able to find were ''Concordia the Lutheran Confessions: A Readers Edition of the Book of Concord'' (Hardcover - not yet released) and ''Communion fellowship: A resource for understanding, implementing, and retaining the practice of closed communion in the Lutheran parish'' (Unknown Binding - Jan 1, 1992). Are those his scholarly works?\"", "score": 0.5462193120681789}, {"text": "\"Furthermore, I've heard something about ''FE'' trading cards. I know that you know more about it than me, so maybe you could write a small passage about it in the \"\"media\"\" (can be renamed) and then I can reference it?\"", "score": 0.5184595820973045}, {"text": "\"Thanks, but I believe I've covered the sustance. Could you also get Beeblebrox to strike the \"\"Mr\"\" insult, unless he was doing it deliberately?\"", "score": -0.9706828009505226}, {"text": "Further: it seems to me that <url> was a perfectly reasonable dab page. I wonder why you needed to change it?", "score": -1.3092109028801349}, {"text": "\"I have found a better citation for reference 7, but am not sure how to insert the page number (p. 97 from the book cited). Do you know the right format for this?\"", "score": 0.6583343747698428}, {"text": "How can you possibly know that? Have you tried to find additional content and failed to find it?", "score": -1.489465311154893}, {"text": "A proposal has been made for the above WikiProject at <url>. Would you be interested in contributing to it?", "score": 0.8394504512491743}, {"text": "\"Hi, I noticed that you've pointed out an accuracy in the restoration of <url> used in the article. I've just started up a review page for paleoart here, and started a section for that image: <url> Would you mind commenting on it so it can be fixed?\"", "score": 1.170661209241381}, {"text": "\"Okay, so I did what it said and I sent a message to an admin. Now what do I do?\"", "score": -0.5362568394123861}, {"text": "See my answer in <url>. Is there some problem?", "score": -0.4511106252954044}, {"text": "<person> is apparently caught up in that rangeblock. Can you help him out?", "score": 0.6238069697728323}, {"text": "\"Number of articles on South Slavic languages is now pretty high and I'd like to add '''Category:South Slavic languages''' as subcategory to '''Category:Slavic languages''', possibly with subcategories on individual languages. The catch is, I don't know how :-) -- could you point me to relevant Wiki links or start the job yourself and let me finish it?\"", "score": 1.0198649352969575}, {"text": "In my opinion that would be more constructive way to approach the solution. What do you think?", "score": 0.7020653084011375}, {"text": "\"Hi, I was removed from the \"\"request for approval\"\" list at the vandalproof, and not put on either the \"\"approved\"\" or \"\"denied\"\" lists. Could you tell me why, please?\"", "score": 1.1163204158596203}, {"text": "\"Sorry, I was wondering if the are related to each other or separate organizations? Also is there a difference between North Twillingate and Twillingate?\"", "score": 0.7706595204470579}, {"text": "\"Could you do me a huge favour, but only if you want to, because I know it's a lot to ask. Could you do the Peer Review for ''Loud'' by any chance?\"", "score": 1.8386494449759117}, {"text": "Are you saying you have never edited as anything else? What brought you to that particular article?", "score": -0.8753008101471164}, {"text": "\"Hi, I see you were involved in creating the ship infobox. Would you be interested in creating an infobox for windmills so that I could use it on individual windmill articles?\"", "score": 0.7535702296451439}, {"text": "\"Actually, I counted 2 sets of <url>. Can he be blocked for 48 hours?\"", "score": -0.5991821397463207}, {"text": "\"Man, I'm discovering that Commons is lousy with copyrighted product packaging, there are ''thousands'' of them, I think. I am AWB-challenged, if I start compiling a list, perhaps you could eventually turn AWB loose on it?\"", "score": -0.522682968961338}, {"text": "Why did you just do that? I am being serious - what business was it of yours exactly?", "score": -1.3789889791295764}, {"text": "\"(after multipled edit conflicts)I checked your behaviour and your contributions myself at the time, as did numerous other admins who reviewed your edits and the block and it was subject of extensive discussion on and off-site so your suggestion that I should have checked with others is silly. You violated Wikipedia's policies and were blocked rightly for it; you've been given a second chance and instead of being productive you come here to moan about a comment I made more than six months ago, that I still stand by, and to pursue your now year-old vendetta against Nick?\"", "score": -1.6904965611808678}, {"text": "Was this wholesale revert really necessary? <url> Did the edit completely fail to improve the article in any way?", "score": -1.3948821557373412}, {"text": "\"Hey Nick, thanks for writing the book reviews! Would you like me to run one or both of them?\"", "score": 1.906030951230873}, {"text": "Sounds good to me. Shall we have a straw poll on it?", "score": 0.963335962905972}, {"text": "u2013 Use that dash my friend. How are you geting on?", "score": 0.7019196151901659}, {"text": "Are you saying that Racepacket attempted to contact Laura's employer? When and where did this happen?", "score": -0.5421532081296426}, {"text": "\"Hey, can you block this moron from editing the IIHF Page? Or what can i do in such cases?\"", "score": -1.9149145083432475}, {"text": "Hell yeah. How do we do that?", "score": -0.891697570790811}, {"text": "Also I noticed you made new Europe and USA maps for this and <url>. Do you think it would be good to have a world map for <url>?", "score": 0.6247567009453066}, {"text": "I wasn't sure whether you were actually reading anything because facts seem to bounce off of you like bullets off of Superman. And you accuse ''me'' of fantasies?", "score": -2.717003350247981}, {"text": "Your bot takes zh:u514bu91ccu65afu6258u5f17xb7u6885u7b56u723eu5fb7 off of the Christoph Metzelder page. What exactly is wrong with it?", "score": -0.8337000445096896}, {"text": "\"Are you the owner of hacktolive.org? If not, what's your relationship to it?\"", "score": -0.560804063601019}, {"text": "\"Mattisse hun, you haven't been on wiki for a couple of weeks. Are you ok?\"", "score": 1.431777524803213}, {"text": "I'll add that to the others. Will that work?", "score": 1.2349617527209005}, {"text": "The profile seems good as it is. Why don't we bring this back up after the Hibiki 2-parter?", "score": 0.5464100618087564}, {"text": "\"Now I'm confused--did I miss something? Who was talking about G1, and who was talking about IP blocks?\"", "score": -0.6649472561006537}, {"text": "\"I was going to be the first, but now I'll be the second. Do you have any idea who that Oppose !voter was or why he was so upset with you?\"", "score": 0.7014669559889494}, {"text": "Um... the Kent roast? Tentrake host?", "score": -0.6075714977721643}, {"text": "\"I hope I am now signed on and you remove the erroneous vandalism report. Don't you think it would be more politic to send a message with a question than to preemptorily declare an edit \"\"vandalism\"\"?\"", "score": -0.6424546058447749}, {"text": "We are working on adding some information regarding the significance of the podcast. Do you have any additional tips on improving out article's quality?", "score": 0.8006527084499077}, {"text": "Looks like he earned a block. He break the 3RR rule?", "score": -0.6942511214374998}, {"text": "I'm still scratching my head. How can this be?", "score": -0.9760391826984354}, {"text": "you could have started the discussion yourself. How can you say there was no consensus against the edits if there had been no discussion at the time you were doing the reverting?", "score": -0.5269527265567112}, {"text": "\"Slightly more seriously, there's a discussion over at <url> about the lack of non-European info regards torture. Any chance that in your manifold research on all things psychohistorical you've found something that could be used to expand the section?\"", "score": 0.575145069767742}, {"text": "\"I saw your notes, and will watch for any objections. If there are none, in two days, do you mind if I move that page and start the new categorization page/discussion?\"", "score": 1.0154438736024625}, {"text": "\"Alternatively, maybe the second column could be '''Country of origin''' and Rosen could simply be denoted USA/Poland, while Pillsbury would be USA/USA, without his US State being mentioned - it would be less confusing I think. Any thoughts?\"", "score": 0.5908596966172615}, {"text": "\"I want to purchase a macro lens and was hoping for some guidance from you. Apart from being good in macro, can it be used to take good pictures of landscape and portraits?\"", "score": 0.6395241065387619}, {"text": "\"I'm no expert on the language, but on the face of it that dab page disambiguates two words which are unambiguous anyway - Zakhmet and Khachpas. Am I missing something?\"", "score": -0.45926016061639663}, {"text": "\"Presumably the CfD will be inconclusive. Given the technical issues, perhaps an RfC might be more helpful?\"", "score": 0.978837143267738}, {"text": "There are quite a lot of <url> on Wikipedia. What's wrong with adding <url>?", "score": -0.5834917383146998}, {"text": "Hi Gokul. Just wondered if you got my previous message and had any thoughts about ledes for these articles?", "score": 1.0808134874415045}, {"text": "\"I don't understand. So then you decide by yourself, and the people who stated their support for the original are ignored?\"", "score": -1.0052411484755044}, {"text": "\"Just think about it. If, unlike Saudi Arabia, the US is the Land of the Free, then how come you are only allowed to drink <url>?\"", "score": -1.0544281491018022}, {"text": "I added a clarifying sentence to my user page <url>. Do you think it distills the essence?", "score": 0.8457268158044947}, {"text": "I replied to you <url> but you didn't answer. Did you not see it?", "score": -1.101672043547771}, {"text": "Thanks for your help with <url>. I've responded to your comments - could you please take a look and let me know if you are content now?", "score": 0.9574675670544961}, {"text": "\"Yes, this is the English Wikipedia so we use the English version of the infobox. And can there once be a time when you don't find your way to my talk a complain?\"", "score": -0.9347344380579836}, {"text": "\"Heh, thanks. Do you know of any scripts that might come in handy?\"", "score": 0.955962793155433}, {"text": "\"Ridding the project from copyvio images, which could result in expensive legal battles and put the whole project at risk? You think that we should risk putting the work of thousands of editors to risk because of some image?\"", "score": -1.3339932949682125}, {"text": "\"You removed the border issue coverage in <url>, and your explanation was that it's covered in another article. Where is it covered?\"", "score": -0.6821487241502988}, {"text": "\"The admin says he thinks it should be renamed, but someone else is trying to pull the \"\"but it happened in the same area nonsense\"\". Was I clear enough in explaining where the pictures are listed?\"", "score": -0.5929069361287189}, {"text": "\"At <url>, you've added from Bacon a line saying that the addition of 3 minor judges to the list of major judges brings the total to 12. But the numbers don't work out - not in the list in the article, anyway. I don't want to revert your good-faith (and referenced) edit, but could you go back to your source and see if you can fix this?\"", "score": 0.5183628098630472}, {"text": "\"I'm sure. How about we let the sockpuppet investigation let us know for certain, m'kay?\"", "score": -1.6470229353122654}, {"text": "Hi again - it looks like the same user is editing out all the information about Destiny being self-centred again. Is there some way we can stop this user from making these edits?", "score": 0.636123718259304}, {"text": "\"Your new map is a great, less distorting, format, but it leaves out a whole lot of countries that have recently joined the alliance - Estonia, Latvia, Lithuania, Slovenia etc among them. Can you fix this please?\"", "score": 1.1163204158596203}, {"text": "No worries. Can you take another look at the nom?", "score": 0.7124294935348108}, {"text": "\"...welcome back! Is this permanent, or are you just passing through?\"", "score": 0.988359105924412}, {"text": "\"Hi, I noticed <url> of yours, claiming a <url> violation. Mind explaining that?\"", "score": -0.602693437726983}, {"text": "I accept your point and won't classify as Military of Japan. What would be a better desciption for these historical Japanese soldiers?", "score": 0.932523610846831}, {"text": "\"That's a couple levels of automation beyond my experience. But if you just got the idea today, then how can you know the false positive level is low enough for WP:AWB/Typos?\"", "score": -0.5958325774287292}, {"text": "\"Thanks for telling me. Do you just perform the rename, and I then re-create the account to take it for myself?\"", "score": 1.0547839650714952}, {"text": "\"Thanks; it just occurred to me to cite USGS maps, but this is maybe even better. Was there once an airplane crash in the vicinity, or are there maps like this for all parts of the country?\"", "score": 0.6341228188748821}, {"text": "\"Thanks for voting, even though you didn't support. Can you tell me where you thought I was too aggressive?\"", "score": 0.9828342197609462}, {"text": "\"ER, I was wondering if <url> would be a good candidate for featured bio in the portal, since he was the first United States Marine Corps officer, in history to take command of a Naval flotilla. What do you think?\"", "score": 0.7885294082279898}, {"text": "\"<url> as having been deleted via a deletion discussion, however I can't find any such Afd. Do you know of one?\"", "score": 0.7151893021553442}, {"text": "\"You beat me to the punch with the archaeological discovery.<url>. I'm curious, do you know if any Scottish newspapers reported it?\"", "score": 0.9251749183062193}, {"text": "What is the source of this image? and why do you think it's PD?", "score": -0.539792135389727}, {"text": "\"Remember we had the whole edit warring on Edelsten article a while back? I find it curious that a recently created account posted this <url> as their first contribution basically supporting Edelsten's personal website (which when you look would hardly pass <url>, I looked it at this site a while back in research and found some of the positive claims could not be verified) and wanting a complete rewrite of the article... sound familiar?\"", "score": -1.0020943056317404}, {"text": "\"Hi, I just couldn't understand <url> ! Could you explain,please ?\"", "score": 1.0756043628409198}, {"text": "\"The specific claim of Boyer's that you included and I deleted was that \"\"cauchy based infinitesimals on limits\"\". Did you see anybody else saying this?\"", "score": -1.0797169198130399}, {"text": "Links to attack sites and so are not allowed. You must know that you violated BLP?", "score": -0.5870807867337364}, {"text": "\"I'm sorry, but I don't see the edit if you're referring to the <url> article. Are you referring to something else?\"", "score": 0.8969272414213411}, {"text": "\"Hi Khoikhoi - I see you deleted a previous incarnation of a pan-Arabism article which seems to be resurfacing under variant names, and is now at <url>. This doesn't read to me like an encyclopedia article - do you have any thoughts on whether/under what criterion it should be deleted?\"", "score": 0.7036540009325419}, {"text": "\"Ah, as I recall it was ''you'' who confused me on that one, by adding them to <url> during the GA review ;) What is the preferred display quote? Does one just use blockquote?\"", "score": -0.5553723520469822}, {"text": "\"Thank you for getting to all those WikiProject Canada assessements so quickly! Question, is it ''useful'' what I'm doing, adding the template even to what I know will be relative low importance articles?\"", "score": 1.59117297536823}, {"text": "\"I see you've created <url> which I think is great. Would you be able to add the template to the top of the category page, as per its parent <url>, so people can paste it to the top of Talk pages?\"", "score": 0.6770569438888233}, {"text": "Why on earth have you deleted the article on the <url>? Can you please undelete it?", "score": -1.0034279243975486}, {"text": "\"Hi friend, I've noticed you've made some good edits over at the article. Are you a big fan of McGinley's work?\"", "score": 1.393539431963829}, {"text": "\"Hey, what was with the fact tag? Do you dispute its name?\"", "score": -0.7008278843486628}, {"text": "\"Hi, <url> to <url> broke the templates. Can you fix that, please?\"", "score": 1.2444449289047625}, {"text": "Hi Gareth. I was wondering if you could point me in the right direction for learning about building a wikipedia profile?", "score": 1.0678621112229787}, {"text": "http://en.wikipedia.org/wiki/User:Mheart did the copyedit for the Japanese flag article and it was completed today. Is there anything else other than a copyedit that you need for this article?", "score": 0.6630095416476107}, {"text": "Per <url> we should summarize the article briefly in the lede. I didn't see anything about higher dimensions in the <url> body so should it be removed from the lede or should the body be expanded?", "score": 0.7269463867783378}, {"text": "\"I don't play club anymore (I stopped this year), I have just been too busy this year, but I will probably start again soon-ish. Where abouts do you play?\"", "score": 0.8069156174364822}, {"text": "\"I would say you went a few steps further than I did! Does make it easier to find yourself on the page, doesn't it?\"", "score": 0.5969157764287168}, {"text": "\"Okay, let's leave the naming thing posponed for now, why are you reverting the <url>-catagory thing? just because (as you say) Iraq was formed post-WWI that doesn't mean that they aren't the same people?\"", "score": -0.6233716054729235}, {"text": "\"I'd be glad to help, if you like. How should we divide it, by section or by criteria?\"", "score": 1.3303206496515023}, {"text": "Don't you think it's taken too far back to be of any use? Do we have any alternatives?", "score": -0.48775942584034615}, {"text": "\"no, not funny, unless you are a mentally challenged person. Why don't you find a better hobby where you don't post lies and offend the people of Tibet?\"", "score": -2.780986964273572}, {"text": "No worries found it... but yeah... We need a new Foo's pic... Does Flickr have fair use licenses?", "score": 0.5458172978878844}, {"text": "well it wasn't exactly a joke. am I the only one who notice they share the same melody?", "score": -0.5516185101609667}, {"text": "I don't want to be your enemy. What can I do to help this situation?", "score": 1.4665772154850043}, {"text": "I know there is some discussion at <url> regarding primary topics so I'm unsure if performing this edit would be an ok move. Thoughts?", "score": 0.6522002708818876}, {"text": "\"I am bewildered as to why anyone should think that they are \"\"representatives of the people\"\". They are not elected, chosen, delegated, or in any other way appointed by the people: how are they representatives of the people?\"", "score": -0.8024731627182338}, {"text": "\"Hi I noticed you speedied and article but not it's talk page? What's the use of leaving the talk page, discussion or no, if you're going to speedy the article?\"", "score": -0.892750475202039}, {"text": "\"Hi, thanks for letting me know about my signature not being linked. I tried to save the settings in preferences - did it work?\"", "score": 0.8850057585179464}, {"text": "???", "score": -0.7176602298798572}, {"text": "\"Hi, Michael David. Was there any consensus reached on this issue?\"", "score": 0.6616060897345915}, {"text": "\"Oops, I also just realized that I probably shouldn't delete this image because I added a comment on the talk page. Is this correct?\"", "score": 0.6366449156221512}, {"text": "\"He keeps coming back, and keeps getting blocked again.... When will they learn?\"", "score": -1.1188186112967422}, {"text": "\"Technically, Taku is within his rights to change things to conform to an existing standard, but it's not very politic of him to do so; if we decide to change, then it's still more meaningless work to change everything once again. Unfortunately, it's all too common for editors to edit first and talk second, eh?\"", "score": -0.710549225693144}, {"text": "\"Marhaba Tiamut. <url> has grown quite a bit since last week, and we're planning to take it to GA, would you be interested in joining the push?\"", "score": 1.4709751999098002}, {"text": "\"Sorry, I can't see it. Would you mind cutting and pasting the sentence that says she won an award?\"", "score": 0.635843690013179}, {"text": "Sigh. Was this renewed attack necessary now?", "score": -1.5847444744383474}, {"text": "I think that's a good solution. Should we give it a try?", "score": 1.503499840966175}, {"text": "\"OK, same situation here :-) Marskell and I agreed on a proposal that would initially just give us some backup on the day-to-day stuff, not necessarily closes, but to work in to people who could help when we needed time off. Is it OK with you if I go ahead and propose that to Raul?\"", "score": 0.606531270930509}, {"text": "Hi. I stumbled upon your user page and was wondering if you could offer any tips for creating locator maps... where do you get the 'base' map and what program do you use to fill in the colours?", "score": 1.0570149867061287}, {"text": "I have extended the block to 1 week. Has s/he been attempting to change your password too?", "score": 0.6162436938029732}, {"text": "\"ummm, it's a soft redirect. A placeholder for a future page ... Is there a problem?\"", "score": -0.7741072604310807}, {"text": "you have been following me around all week reverting my edits. what did i do to you?", "score": -1.4253416968201265}, {"text": "Misrepresenting a person's actions to portray them in a bad light is one of the more common (and transparent) forms of personal attack. Is that your defense for repeatedly violating <url>?", "score": -1.1701972044317397}, {"text": "I don't see you around much these days. Busy with non wiki life?", "score": 0.6879412441830273}, {"text": "\"Sorry, I've moved a number of articles and thought I did it correctly. How is moving done properly without losing the edit history?\"", "score": 0.8682397360516474}, {"text": "Can I send you a video? Or possibly some stills?", "score": 0.6214607641367866}, {"text": "\"Blocked, templated. Next?\"", "score": -1.4306486017826707}, {"text": "\"I've just seen your message on my Talk page. Why do you want the disambiguation page to be the one that doesn't say \"\"disambiguation\"\"?\"", "score": -0.5842796678647113}, {"text": "\"Sure, I'll have a look at it. It looks like it needs a bit of a copyedit though, hope you don't mind?\"", "score": 0.6376833078717385}, {"text": "\"lol thanks. so, australia, huh?\"", "score": 0.6147370328310112}, {"text": "\"OK, so while browsing my watchlist, I saw something peculiar in one of the pages that I'm watching; the user seems to be quite suspicious, based on his user page (similar to Wynchard Bloom's) which leads to the conclusion that he's Gerald. Can you like give him the stick?\"", "score": -0.5617205199381841}, {"text": "\"I have no need to search the interwebs, all that matters is it offends people and is a violation of NPOV and MoS. \"\"All Wikipedia articles and other encyclopedic content must be written from a neutral point of view (NPOV), representing fairly and without bias all significant views (that have been published by reliable sources)\"\" - ess-eff is a bias term for a minority group of fans, put it this way: is there an SF-channel, or how often is the clichxe9 ess-eff used outside fan-groups?\"", "score": -0.6715864581107136}, {"text": "\"Hey, Another editor and I are confused by your resent edit. \"\"Should the non-AQ qulify for the BCS Championship, and face either the Big Ten or Pac-10 Champion, the Rose Bowl may replace that champion with a team from same conference, so long as it is in the Top 14 of the final BCS Standings\"\" Could you please clarify it for us?\"", "score": 1.1587768294643435}, {"text": "Thanks! What type of documentation do you want u2014 an explanation of using the notes feature?", "score": 0.9811394523445707}, {"text": "\"My post above was a helpful hint about following DYK as a whole, not about the nomination process. Would you mind posting your post under a new heading, as it seems to have little to do with what I was saying and seems to be changing the subject?\"", "score": 0.533319543528954}, {"text": "\"I was just curious why you named the <url> for Williamsburg County, not Georgetown County. Is it because W County is listed first by the GNIS?\"", "score": 0.5588047249417629}, {"text": "\"Wanted to Email you about Wikimedia Converence Netherlands, but your email is not working. In any case, you could crash at our place?\"", "score": 0.6496453327004995}, {"text": "\"Okay, where can it go then? The list of episodes?\"", "score": -0.46468380984568647}, {"text": "\"I could go on from here, but this is essentially the underlying basis of my reasoning. Does this help explain my position?\"", "score": 0.576967531679264}, {"text": "I've started looking at this article as you asked and inevitably I've got a few questions. Would you prefer me to list them here or on the article's talk page?", "score": 0.6094698020617055}, {"text": "I thought that only section tags go into sections while the article tags go to the top of the page so they could serve it's purpose. Is there a Wikipedia guideline that can substantiate your revert?", "score": -0.5740625319963911}, {"text": "I can't remember if I asked/checked to see if it got to you? So... did it?", "score": -0.5254044750660352}, {"text": "\"No more saga! Help me by keeping this page the way it is right now, alright?\"", "score": -0.9260288669231652}, {"text": "Please don't mention other languages when we're discussing the English wikipedia! Why are you limiting yourself only to the Abrahamic concepts of ''God''?", "score": -0.7378287231312993}, {"text": "\"Hey, I'm inquiring about my <url> a few months ago for photos from the Flames-Rangers game that took place last week. I've gotten a few of these since then, but just wondering if you remembered and got any shots that would be worth using on here?\"", "score": 0.7059913731385193}, {"text": "The haven't first team appearances for gods sake! How can you say they deserve an article?", "score": -2.1337417294818484}, {"text": "\"<url> is one of the three fundamental Wikipedia content policies; why is it a \"\"shame\"\" you have to follow it? What did you mean by \"\"people like you\"\"?\"", "score": -0.9594759276511333}, {"text": "This was supposed to have been moved to <url> per the CFD. Why wasn't it moved?", "score": -1.0464393123363993}, {"text": "\"While editing the COIN page regarding Ciplex, I accidently broke your sig. Since I don't want any more flame, could you please fix it?\"", "score": 1.1029256981255597}, {"text": "\"Thanks for your reply. So just to clarify then, you are happy with the number of references, but you just need me to convert each of them into ref/footnote format, and then the article is GA-ready, is that correct?\"", "score": 0.7102681987430924}, {"text": "\"Well, in that case we should simply point the shortcuts to where they originally pointed, which in most cases is unrelated to both VIE and VINE. That sounds like a fair and neutral compromise, no?\"", "score": 0.7128559245948585}, {"text": "\"Hi Mecu, I've been asked by a user (a photographer) how they could add there pictures with limited copyright, for instance, there more than happy to upload them for use within wikipedia, but just don't want to release it completely free from copyright. Would a fair use tag be appropriate?\"", "score": 0.9457394171180973}, {"text": "\"ChrisO, am I being paranoiac or are you undertaking some kind of campaign against me? What exactly are you referring to this time?\"", "score": -1.0302952815933992}, {"text": "\"I often work out of Sweden and I have an excellent, free, built-in English or Swedish grammar ''and'' spelling program in ''Word'' on that computer using Firefox (as this user says she does) - red underlinings for spelling and green for grammar - which would preclude almost all of the huge amount of problems caused by this user regarding readability, or at least alert her to the very quanitity of the problems and inspire a constructive attitude as to her own capacity. Perhaps if one does not consider it a \"\"duty\"\" of sorts to contribute readable English here, that actually might be the problem?\"", "score": -1.1438954243302706}, {"text": "\"I'm curious what your reasoning is in saying <url>'s edits are in \"\"good faith\"\" and not blocking him when the two edits right before that were both vandalism of user pages. It seemed quite a slam dunk to me, did you happen to see something I missed?\"", "score": 0.514954699607581}, {"text": "\"Thanks, Wareh. Have to ask, Davey: why is the Circus Maximus article on your page called <url>?\"", "score": 0.6784511889471478}, {"text": "\"A quick question, I'd like your opinion on something. Do you think I should put <url> GAN on hold or fail it?\"", "score": 0.5933321432062358}, {"text": "\"The benefit of a version of the sentence is that it helps clarify the differences between Lavoisier and Priestley; however, we need to be careful that we don't convey the wrong impression to the reader. How best to do this?\"", "score": 0.8841266899412494}, {"text": "O RLY? And what does it do there?", "score": -1.556687284743621}, {"text": "\"Hi Bruce, I normally make only trivial changes, but I've just found it necessary to do two hook rewrites. Could you check, please?\"", "score": 0.7814537929065601}, {"text": "You recently uploaded <url> as being in the public domain as its copyright was not renewed. Does the book indicate that its copyright was not renewed or do you have any other reason to believe that the L.A. Times did not renew the copyright?", "score": -0.4816345088661594}, {"text": "\"I think the we've reached consensus in the FAC for <url>, as every opposing reason has been resolved. Do you think it's looking good?\"", "score": 0.6902307217505183}, {"text": "\"I just thought that it is nice when you are looking for RQ that you can find an algorithm that will do RQ, not QR. It's fine if we just make a section about RQ in the QR article, is that ok with you?\"", "score": 1.1088548165608345}, {"text": "\"On second thought, the mix of pseudo-politeness, vulgarity and threats is...really creepy. Could ya...stop?\"", "score": -1.5402303802356516}, {"text": "\"I used the <url> tool, and clicked rather a lot. I'm happy to switch it on for your account, if you'd like?\"", "score": 1.1800345774858576}, {"text": "\"Actually, material in the lead must be discussed in the body. Contemporary economics haven't been discussed in the body, so how can they be discussed in the lead?\"", "score": -0.6334401860361866}, {"text": "\"Yes, my feeling is to err on the safe side here. Thanks for your comment, is it okay if I quote you on the commons thread?\"", "score": 1.5921894969471746}, {"text": "\"Okay, I will be glad to help you here! Could you please list the username you would want now?\"", "score": 1.2167016626822933}, {"text": "\"Sure. I have to warn you, though, the file size advantage has pretty much been lost since SVG font rendering went the way of the dinosaurs :) Do you make a point of having the border, or can I leave it out?\"", "score": 0.9126522600178595}, {"text": "\"From the edit history to the template, it appears that your last edit has not been undone so hopefully this issue is resolved. You are both editors of between 1 and 2 years experience here, so perhaps a bit better communications would be a better approach in future?\"", "score": 0.7221748683339456}, {"text": "\"p.s. I'd be right that you're a supporter of corporal punishment, I presume?\"", "score": -1.3616663453744953}, {"text": "\"One more thing, you have a fellow Budgie insisting a losing appearance in an FA Cup semi is an \"\"Honour\"\" for your club. I guess it's because you haven't won much lately, but really, a losing semi-finalist isn't an honour in the real sense of the word, is it?\"", "score": -0.8096182049092278}, {"text": "The Indian collaboration of the week will be the <url> next week. Would you be interested in joining us to make it a Featured Article?", "score": 1.2738143195482505}, {"text": "\"Hey, long time no seeing! How's stuff?\"", "score": 1.1345833142097124}, {"text": "<url>! Shall we greet him as he deserves?", "score": -0.4577462308765651}, {"text": "You're joking? <person> and <person> have no word play in spite of the same kanji?", "score": -1.4788687068883577}, {"text": "Let's pretend that we have reached an impasse and then we can stop talking to each other. Deal?", "score": -0.7525122200004966}, {"text": "You liked my edit? Are there any other climate pages in New Jersey that need help?", "score": 0.9163219986383832}, {"text": "I've been talking to Bill Oakley on Twitter and he's going to direct message me a few minor changes to his article. Do you have any suggestions for how that could be acceptable as a source?", "score": 0.8631281939096942}, {"text": "\"Regarding <url>: <url> seems to say the exact opposite (that links on a dab page should ''not'' be through redirects). The page is at <url>, so shouldn't that be how it's linked on the dab page?\"", "score": -0.5396206230665969}, {"text": "How can a permablocked user edit? And who would set up a sockpuppet months in advance?", "score": -0.6697988577820897}, {"text": "\"Hi again, OK, say we don't go along the lines of Blair will go before the end of 2007; but we list the announced candidates for when Blair does resign (which will inevitably happen at some point). Do you think this would be any better/more suitable?\"", "score": 0.8410515777243578}, {"text": "\"I was visiting the <url> article again today, and I had the same instinctive reaction to the image, that it is too specific and not representative of all pederastic relationships but only the sexual ones. Would you mind if I changed to something more representative of the practice, like a symposium image?\"", "score": 0.8497827932919592}, {"text": "\"Well, that's probably going to be an annoying AfD. How about <url> and <url>?\"", "score": -0.6485121137407219}, {"text": "\"Makes me wanna ask, what do you say about having separate articles like Wikipedia has <url> and <url> or <url> and <url>? Should this be tolerated for non-Transformers stuff?\"", "score": -0.476529832575844}, {"text": "\"I've tweaked it further: It's now accurate, to within my ability to describe a complicated subject, but rather too complicated. Can we simplify it without losing precision, or, alternately, put something before the precise definition that's a bit easier to understand?\"", "score": 0.7181992618009424}, {"text": "\"How's tricks, JRM? Lying kind of low, aren't you?\"", "score": -0.5301464471646211}, {"text": "Who is this Brit you keep talking about? A past vandal or something?", "score": -0.7772331602619998}, {"text": "Why would you doubt that. And for that matter why wouldn't you rewrite it yourself?", "score": -1.8195534235442778}, {"text": "\"Hello, it was requested that some of the details be removed so as to make the article a bit more general. Is that ok?\"", "score": 1.055432150548737}, {"text": "Can I ask: was this really the implication that you intended in making this change? And do you really think the RfC expressed consensus support for such a position?", "score": -0.6781929514982556}, {"text": "\"Very well I won't contact you anymore. Geez, whatever happen to the spirit of collaboration?\"", "score": -1.7117613837357006}, {"text": "In the summer (June-July) I am going to work on the ''Messiah'' article. Would you like to be in on this?", "score": 0.6648233964475463}, {"text": "Stephen's solution certainly isn't optimal. Fancy giving me a basic rundown on the problem here and I'll see if I can devise a more elegant one?", "score": 0.656013518325157}, {"text": "Title says it all.<url> I dont have time to put up a page... but might I suggest <url>?", "score": -0.887187380783599}, {"text": "Thanks! To be absolutely fair his earlier edits to Rumble Fighter seemed serious; maybe he left the pc untended?", "score": 0.7678518435550773}, {"text": "\"(Constantine and Leo are English, not Latin names.) Why don't you answer my question about Constantine the Great?\"", "score": -1.4695683798816577}, {"text": "Very nice. Did you visit also Romania or not?", "score": 0.8327786008688889}, {"text": "\"How can anyone analyze the page if you keep deleting the content? <url> 01:28, 18 December 2006 (UTC) Are you afraid someone might vote for it?\"", "score": -2.2058549086213803}, {"text": "\"Hey MBK; well, I've got to be honest I wasn't expecting the events of last night - personally I thought the discussion was going reasonably well (not fabulously, but we were getting somewhere) and, whilst I agree that Mlm's actions weren't exactly advised, I must admit it does seem to be a relatively minor thing to retire over - was there something else going on here that made it the straw that broke his back? Anyhow, I agree with you that he'll be greatly missed - I sent him a message on NSF but I doubt if I'll get any reply - what are your thoughts on the matter?\"", "score": 0.6945444785369653}, {"text": "Indian cultural imperialism is rampant this am - what nxt? we have had the mongolians and indians - maybe the astragoths next?", "score": -0.8845382656037376}, {"text": "I found this talk page (and <url>) while deleting the user pages that you requested. Would you like to have them deleted as well?", "score": 0.8112146824735476}, {"text": "\"Greetings, and thank you for your cogent remarks at <url>. Would you be interested in voting in the straw poll <url> as well?\"", "score": 1.1752789506167725}, {"text": "Why do you ask me questions to which you already know the answers? Is it a rhetorical device?", "score": -1.3120104173378133}, {"text": "\"You are talking about direktor, right? He begin the edit war again, as seen by diffs, so C was blocked instead of direktor, see?\"", "score": -0.6584657076114921}, {"text": "????", "score": -0.8489152011430663}, {"text": "\"I'm going to walk you through this one very simply. Let's start here: Is there currently, or has there ever been a cartoon broadcast on television by the name of the Breakfast Monkey?\"", "score": -0.8213303883658811}, {"text": "\"I posted here rather than to Jon's page because I don't want to embarrass the students; I know Jon watchlists your page and I am sure other NRG editors do too. Anyway, what do you think?\"", "score": 0.5778080940391896}, {"text": "and so is the new Portishead. Did I mention the Knife?", "score": -0.5600941695960339}, {"text": "\"Also perhaps we should set up a subpage for these discussions, so we'll have the thread completely on one page? What do you think?\"", "score": 0.6758386288285483}, {"text": "\"Hi, you've done a lot of work on comparing FA country articles. Are you aware of any country that got special praise for the selection of images in the article?\"", "score": 0.776932049098873}, {"text": "Congrats! I thought you were going to be away today?", "score": 1.185104404844871}, {"text": "\"What do you mean by \"\"one edit war away\"\"? And whats a siteban?\"", "score": -0.9091231348611301}, {"text": "\"Well, look again! Does ''<url>'' look like a dictionary-definition page to you?\"", "score": -2.0238998379627464}, {"text": "I never partook in this AfD. Did you leave this on the wrong page?", "score": -0.7044434442778247}, {"text": "\"Unfortunately I think <url> may also add that material; I am not familiar with Citizendum, though, but I will look at it. If Citizendum has the same policies regarding primary, secondary, and tertiary sources, then why not criticize both Wikipedia and Citizendum?\"", "score": -0.5209417419790973}, {"text": "\"You assume correctly; same principle would apply. :-) By the way, is there a trick to getting the (Talk) link in your signature automatically that I haven't learned yet?\"", "score": 0.6285260866506198}, {"text": "There. Better?", "score": -1.0019311981493677}, {"text": "\"This is what you wrote: \"\"In the book 'Inventing Elliot' written by Graham Gardener, the book, 1948 is mentioned numerous times.\"\" What book, 1948??\"", "score": -0.45984225204120105}, {"text": "You have listed it as no source but the author asserts own work. Could you please clarify this?", "score": 0.7832377598043934}, {"text": "\"''SO!?!?'', what has it got to do with you?\"", "score": -2.330943113625249}, {"text": "\"Hi, what are the technical steps to withdraw a nomination to delete a category? Or more specifically, what template did you use to withdraw your nomination?\"", "score": 0.653951090485847}, {"text": "\"So I got rid of it. Have a little faith in my good judgment, okay?\"", "score": -1.5671952066095134}, {"text": "\"I was trying to be sarcastic, ok??\"", "score": -1.3856421103160268}, {"text": "\"The Hitchin-related hoax claims by <url> seem to have led to a rather precipitate response, considering that he has only been an editor for a few days. Is there something going on here that isn't obvious?\"", "score": -0.5447269045100029}, {"text": "\"Email checked and responded to. If you can't respond here, could you at least leave me a note to check my email?\"", "score": -0.9111597914359268}, {"text": "This is a great way to deal with external links. Wondering if we should look at applying it almost universally for medical pages?", "score": 0.5540362306717354}, {"text": "Great work you've been doing on this. Would you agree with me that International Political Economy should be capitalised?", "score": 0.9892823452400151}, {"text": "\"Nice work on <url> - I had been a bit hesitant to put in those other reports, but now it looks great. How did you find out that I created it?\"", "score": 1.1603292120208768}, {"text": "\"I see you deleted the Shayla LaVeaux article, citing copyright problems. I'd like to see if I can't clean it up... is there any way I can see the deleted article?\"", "score": 0.896110978297506}, {"text": "No problem. :) Did you see the additional notes on the project talk page about the rest of those books from the 70s?", "score": 1.0704019643294653}, {"text": "<url>.. it doesn't really explain what the issue was. Can you word it better?", "score": -0.7424143260957551}, {"text": "\"Thanks for clarifying the article - it looks much better now. By the way, is it just me, or does the Trisakti look a bit like the Flag of the Philippines?\"", "score": 1.031191330821238}, {"text": "Where should he be? As a parent of Isa-Beg?", "score": -0.4685082038749305}, {"text": "So what was the point of that? Are you looking to get blocked?", "score": -1.592015192443144}, {"text": "\"I have already read this - thank you very much for this kind gesture, dear friend. What's up with you?\"", "score": 0.9536863532209647}, {"text": "\"Do you think we're good to go with the proposal, maybe later tonight, or tomorrow? Any other problems with the proposal as it stands?\"", "score": 0.7113201922463318}, {"text": "It's not too late to help at all - being busy has kept the work on it to a crawl. ;) I'd like to represent what you are referring to; was it on the Abom talk page or somewhere else?", "score": 1.2869825317420485}, {"text": "I restored 3 non-piped blue links. How does that look?", "score": 0.6023808389109032}, {"text": "\"Hi, you recently speedily deleted this image and in your explanation stated that I, the uploader, was informed of this. I was not - please could you tell me why not?\"", "score": 0.7180959466140038}, {"text": "Actually it was a compliment. Does that count as an 'issue'?", "score": -0.7783995200264845}, {"text": "\"Hey, several of the footnotes that you use in <url> are Harvard footnotes, which means that they need the full citation of the books in works cited section. I imagine you grabbed some of that information from some other places, would you mind retriveing the bibliographic data?\"", "score": 1.1067083104436484}, {"text": "\"Again, what would be the point of redirection? Who on earth is going to fall on the redirect first?\"", "score": -1.0223097562162862}, {"text": "But he's still a WHU registered player with a squad number. Is that really the right thing to do?", "score": -0.6227506394427523}, {"text": "\"Re: <url> and <url>, if you think they are better off as redirects to the suburb/town rather than substubs on the lakes with a <nowiki><person></nowiki> tagline, you only need to overwrite the article with a redirect (or revert to a previous version that was a redirect), leaving the substub in the article history. How come you speedied them before redirecting?\"", "score": -1.1891384145072965}, {"text": "Im sorry to see that you withdrew. Maybe next time?", "score": 0.7211636237001365}, {"text": "\"If you're going to accuse me of operating in bad faith, then the discussion has become inappropriately personal. On what grounds do you make such an implication?\"", "score": -1.861420152972134}, {"text": "I thought that it seemed a bit removed from Imlay's own time and there was no real explanation of the significance of Imlay to Twain (if any) or of Imlay's reputation between her death and the end of the nineteenth century or in America. What do you think?", "score": 0.6979810311341307}, {"text": "What? Please speak brighted.Ps.Do you know gag?", "score": -1.5578391881587335}, {"text": "\"Why are records of the Julianna posers changes being deleted? Even if this really was her, do celebrities get special privlidges on wikipedia?\"", "score": -0.94213833040027}, {"text": "\"..to Giggy's RfA. Did you mean to remove 2,066 bytes of replies?\"", "score": -0.5720703840710326}, {"text": "\"So far, I haven't seen any sound samples in music articles, not even in <url>, which is featured according to the nomninator of <url>. Can you point me to a page where I can find this discussed?\"", "score": 0.6015076900863489}, {"text": "\"That's still not the original source, and after searching through a few pages on Flickr, I'm assuming the original is all rights reserved anyway. Can I ask why you believe it is public domain?\"", "score": -0.5131986730435176}, {"text": "\"Thanks. Are you Slashdot user Anothy by any chance, or someone else?\"", "score": 0.9547255499221553}, {"text": "Thanks for continuing to look over the article. Do you know or do you know anyone that knows anything about the <url> or the <url>?", "score": 1.369292212252613}, {"text": "\"\"\"Hablo ingles?\"\". ROFLMAO?\"", "score": -1.135082397605707}, {"text": "\"Thanks for responding, at any rate. <smile> Protocol question, as I'm new here: If a discussion is unproductive, should one just delete it?\"", "score": 0.9215840946945596}, {"text": "I am not equally confident about this pinning one: <person>. Hector again?", "score": -0.7212557514341633}, {"text": "\"I see you moved and renamed... um, it... to <url>. Are you sure about that, given the conventions at <url>?\"", "score": -0.6728578870716315}, {"text": "\"Third: it's August 31 2010. Please check my userpage for reasons why I really don't feel useful today. Ask another admin, okay?\"", "score": -1.1921008673714961}, {"text": "\"Speaking of FPC closures, is it common practice to close the likes of the Emu feeding video or the White Ibis, which had nothing but weak supports and supports (not enough total votes, but that was all) as \"\"Not Promoted\"\"? Isn't that what the \"\"Nominations 7 days or older - Decision Time\"\" section is for?\"", "score": -0.6106275564715162}, {"text": "I am the third party; <url> was the first to dispute its use. Why can't you describe the changes in words?", "score": -0.7036099830501047}, {"text": "\"When am I changing nationalities? I do remove flags when I encounter them per <url> and <url>, but changing them...?\"", "score": -0.48730045297866786}, {"text": "\"I guess I sent you something about the <url> project and Welsh data. But you were also somehow silent on lists like gnome-i18n, weren't you?\"", "score": -1.0172673629984925}, {"text": "\"Hey how are you? Now I can't find you online, where are you lol?\"", "score": 1.1189043237912952}, {"text": "\"I understand your point about MJ being very close and having competent editors, but do you not think those of us who understand the source problems, etc. that the Reagan page is having should try and explain them to those editors (that is, try to educate them)?\"", "score": -0.5962755335613721}, {"text": "Blocking people who disagree with your edits is a very zionist approach. Why don't you try instead to justify why your material should stay in the lead?", "score": -1.4144864066268419}, {"text": "\"Hmm, see you just reverted back. What's the bot's job suppost to be?\"", "score": -0.5881262906828939}, {"text": "\"Thanks. Aside from rewrites and creations (which I've no mind for), anything in particular you see that I should focus on, or is it going okay?\"", "score": 0.9704078119546455}, {"text": "\"Thanks. BTW, there aren't any others that I've missed, are there?\"", "score": 0.6895135279339508}, {"text": "\"Well, actually I wanted to know if there were any errors in my own maps. ;) Anyway, is the new map at <url> better?\"", "score": 0.5659637104798357}, {"text": "\"So you deny you did anything wrong Jason? And we're all Nazi's persecuting you, Itake and Uncle Davey?\"", "score": -2.207666111656146}, {"text": "\"How about Guant who plays Pansy? Or Richard Fish who played Bill Weasley, and the actor who played Charlie Weasley in POA?\"", "score": -0.5927278132312622}, {"text": "\"Should the \"\"accessdate\"\" for the articles you found (thanks for that!) be 200''7''?\"", "score": 0.7344077689033586}, {"text": "What for did you analyse the two ArbCom cases Piotrus-Ghirla and Eastern European dispute? Was that suggested to you by Sciurinxe6 during your off-wiki discussions for 20 days?", "score": -0.6364909407820587}, {"text": "\"Hey just wanted to say thanks for giving me the Autopatrolled thing! Just curious though, what did I do to get it?\"", "score": 1.727604540299263}, {"text": "\"Right, but not all of it. So can you retrieve it or not?\"", "score": -1.796080226112429}, {"text": "You didn't answer about the status of above mentioned image. Are you going to tag that image too?", "score": -0.6288184601703716}, {"text": "I just saw that after I left this message to you. By speech marks do you mean quotation marks?", "score": 0.6733802258924992}, {"text": "\"Thank you that clarifies matters. Has the image been listed for deletion on commons, or should I do so?\"", "score": 0.8071692828600924}, {"text": "Please explain this <url>. Is there a valid reason why this image was removed?", "score": -0.48845534228104703}, {"text": "Thank you! Could you protect the picture that's live too?", "score": 1.1865354608508014}, {"text": "I noticed you've been adding the template <person> to pages - I'm sure there's a good reason but I'm totally puzzled as to why. Can you explain what it does?", "score": 0.5733891584246026}, {"text": "\"Thanks for the response. May I ask, are you a video gamer?\"", "score": 1.0549695551223084}, {"text": "I am planning on changing the <url> to the ICF Flatwater World Championships. Any objections?", "score": 0.7546970307434241}, {"text": "\"BTW, small step, huh, from Berkely to Soman's talk page. What's next--Cambridge?\"", "score": -0.7679011110088052}, {"text": "\"Sorry, I didn't know. I added a tag, are you able to delete it?\"", "score": 0.6753469395370311}, {"text": "You just added it again. Maybe you should look over the guidelines for inclusion of the tag?", "score": -0.6165517423363776}, {"text": "\"No-one's saying you can't edit anything, but what we are saying is that introducing grammatically incorrect phrases, POV, weasel words and things discussed and discounted in the numerous PRs and FLCs this article went through is inappropriate. Buc, stop it please, I'm getting tired of this, why not work on being constructive to articles that aren't at featured status?\"", "score": -1.2046513739661393}, {"text": "Hate to be a bother... but need help with getting the bot running? Anything I can do as a non programmer?", "score": 0.9952236185184619}, {"text": "\"He probably couldn't understand the speedy notice. Then again, who knows what goes on in the minds of n00bs?\"", "score": -1.387592198616004}, {"text": "\"I noticed that you did some work on this article, and would like to expand it to include his recommendation (in ''Can Life Prevail'') that the UN develop hit squads to target large urban population centers (with neutron bombs as I recall), and also the discussion on the 9/11 terrorists being \"\"superior moral human beings\"\" for their actions. Understanding that this is a BLP, that NPOV is important, and that including secondary sources is nearly impossible as they are difficult (at best) to find, what is your feeling on this?\"", "score": 0.9891418271545815}, {"text": "I obviously made a mistake too. Do you really think I'd purposefully misinterpret things like that?", "score": -0.9549275668453902}, {"text": "How are recognizing and not recognising defined in this context? The country has explicitly and officially stated that they do or do not recognise the State of Palestine?", "score": -0.9539712858145124}, {"text": "Thanks for clean-up of the secret talkpage colourer-inner. Aren't people odd?", "score": 1.5223019227744163}, {"text": "Reading the guideline again: ''The name of each article (a link to the articles is recommended as well) in which fair use is claimed for the item''. Where does that say I '''need''' a link to the article?", "score": -0.5559751575010942}, {"text": "Good luck with the remaining time for your nomination. BTW: under what section on <url> do you think it should appear?", "score": 1.2925988887386521}, {"text": "\"\"\"Get Up\"\" was released in Canada??\"", "score": -0.5103022291545316}, {"text": "\"Thanks for pointing out that \"\"undiscussed moves\"\" discussion, I hope something comes out of that - but it doesn't really look like it will. I'm not sure what I'm looking for on the discussion page for <url> about this, what am I missing?\"", "score": 1.0701029211688653}, {"text": "\"Following your comments, I've improved the article a bit more. Do you think it's ready for A-class review?\"", "score": 0.9720537931769334}, {"text": "\"Just to let you know, you appear to have conducted the same blanking of links four times within a 24-hour period, in three different instances. Shall I report you for this, or will you kindly restore the links?\"", "score": -0.4690767998930637}, {"text": "I am genuinely trying to do something useful and helpful. Why are you attacking it?", "score": -1.5755073108284188}, {"text": "That was a nice pic you put up in the Civil War article. where did you find it?", "score": 1.6146845572940507}, {"text": "\"Hi, just noticed you reverted this page back to its pre-COPYVIO state. Any reason you didn't use the content at <url>?\"", "score": 0.7844246992816121}, {"text": "\"Thanks for your prompt action at AN/I and on behalf of NawlinWiki; as you gathered, I wasn't sure what to do but it's clear that you did. Two questions: is there any policy of which I should have been aware that governs this situation, and what (if anything) needs to happen to notify NawlinWiki?\"", "score": 0.9954298232443959}, {"text": "\"Correcting an article to remove mention of something that ''was made up here in Wikipedia'' and does not exist in any formal definition of the article's subject is a furtherance of the <url> policy, not vandalism. Are you interested in knee-jerk reverting, or in actually making your encyclopedia correct and accurate?\"", "score": -1.7695912541662886}, {"text": "\"Wikipedia is not IMDb, which can easily be used (and is more suited) to providing complete soundtrack listings for pretty much every movie ever made. Would you care to address any of the other points I've brought up?\"", "score": -0.6397663331113204}, {"text": "\"Hello. Since you have contributed to <url> in the past, could you spare a minute to discuss the fate of <url>?\"", "score": 1.096368621832008}, {"text": "\"<url>. Congrats, or should I say good luck?\"", "score": 0.5523945647555693}, {"text": "\"Nice addition to the <url> article. If you have the works from which those thoughts are taken, could you add a citation(s) for them?\"", "score": 1.5675411131116328}, {"text": "\"Greetings, sorry to bother you here... I have been using the MissingTopics tool for years, to generate <url> The last time I used it successfully was Nov 16. Something must have changed in the algorhythms since then, because it will no longer read any page starting with an Amharic character... Is it broke?\"", "score": 1.4402837096215213}, {"text": "\"Thanks for nuking that thing. While the page was still present, someone opened an <url>...could you please close it?\"", "score": 1.01208606047429}, {"text": "\"I did notice that some articles linked to the ones I worked on are now missing, which leaves a link hanging. If you're going to delete articles, shouldn't someone clean up these broken links?\"", "score": -1.0608084048963538}, {"text": "\"There is a lot to learn - but to date I have concentrated on content, followed by style. If I have to keep removing silly comments, though, I will get discouraged - is there an easy way to reverse somebody's goofing?\"", "score": -0.6820106011062235}, {"text": "It's not Bowdlerism to delete nonsensical trivia about which movies an actress appears nude in. Should we indicate what color her hair is in those movies?", "score": -1.272548480956333}, {"text": "\"Yes, but not for long. How are you?\"", "score": 0.6632907035950188}, {"text": "Hi! Could you consider speedy deleting <url>?", "score": 0.6918973056537131}, {"text": "See my recent edit as he was not charged for one of the events so not all are criminal. What do you mean by my identification?", "score": -0.5280774847750728}, {"text": "\"Your <url> give your first edit as on April 9, 2011. Do you have any other account you have not disclosed yet?\"", "score": -0.6582412683520472}, {"text": "\"I have indicated on <url> that, should you be desyopped, I will immediately re-nominate you for sysop. Would you accept the nomination if I did?\"", "score": 0.7590715489845055}, {"text": "\"On the WP:Football discussion you mentioned that Man City had 150,000+ views more than the seasons article. Please could you tell me where I can find page-views for wiki articles?\"", "score": 1.0844116768358207}, {"text": "\"When I wrote <url> I was not quite sure if it was a synonym of <url>, and hence did not perform a merge. Could you clarify if they are indeed the same, and if so whether we should merge the articles?\"", "score": 0.9377117721513504}, {"text": "It has been a pleasure working with you on the Burma campaign. What is your source for the Japanese information you have been adding as the books I have been reading on the subject are all from a British or American perspective?", "score": 0.9000695956787809}, {"text": "Your comments have been extremely helpful. I'd like to put this FAC to bed soon; is there any chance that you might be able to finish off your review of the article this weekend?", "score": 1.0346221510115627}, {"text": "\"Good one for the beginning. Is it possible to paint, say, red color an area between the null line and the curve below it, and blue (or green) an area between the null line and the curve above it?\"", "score": 0.6238411709887643}, {"text": "\"I think I'll leave it to the Math WikiProject to decide whether the dab page should be at <url> or <url> since they'll be more familiar with the importance of the various functions. It looks like all of the basic options I was considering are basically OK from the perspective of the DAB project, right?\"", "score": 0.6984418833231298}, {"text": "\"Thanks. And, what about the <url> about \"\"he drowned\"\" not being reflexive in Spanish?\"", "score": 0.9911201041331891}, {"text": "\"Your poorly written, bizarre comment makes no sense, since all of my edits in the Al Franken article conform to wikipedia's NPOV policy. What are you talking about?\"", "score": -2.2987330643300288}, {"text": "REVERT???", "score": -1.2716731922452815}, {"text": "Thanks for striking out the comment. Any chance of a support/oppose vote?", "score": 0.8453220921123517}, {"text": "\"Looking at my various edit summaries to <url>, I don't see any where I mentioned anything about another example. Can you say which edit you have in mind?\"", "score": 0.7032258489478356}, {"text": "\"Greetings, I ran across your 2008 article <url>, and the footnotes are done manually vice the usual Wiki way. Any chance you can go back and fix the footnotes?\"", "score": 0.7655353854230595}, {"text": "\"I'm loathe to come running to an admin, but Gregcaletta is just making the same edits that I had undone before and made objection to on the talk page without any attempt to address the previous objections on the talk page. Now what?\"", "score": -0.9430737788126098}, {"text": "It's on the talkpage - you are deleting like crazy. I can ask for a Cabral if you like?", "score": -1.1680444590305215}, {"text": "\"@Chzz, the coverage in the references provided are a dedicted chapeter and a reproduced artice??\"", "score": -0.576983756659681}, {"text": "\"Hi Darwi. Just to ask you as curiosity, how did that debate ended?\"", "score": 0.6112903542930918}, {"text": "Any idea what happened here <url>? Buffer overflow because my comment was too long?", "score": -0.6603855879145996}, {"text": "\"I thought that, when some informations are mentioned about in ''desperate housewives'', it is a reliable source? So, in your way of thinking, Susan Mayer maiden name is not Bremmer or something like that?\"", "score": -0.9697505064152414}, {"text": "I refer you to my last answer about independent sources. Do you have press coverage?", "score": -0.5212326449399919}, {"text": "\"I'm kind of afraid to undo <url>, mainly because I have doubt in whether the nihongo template is necessary at all. Thoughts?\"", "score": 0.5426474776034549}, {"text": "When you deleted <url> did you accidentally overlook the <person> template? Or do you have some reason for ignoring it?", "score": -1.230706992372875}, {"text": "\"Oh, it's not that big of a deal. Drew, which IP edit did you consider to be \"\"attacking\"\"?\"", "score": -0.6287904883316046}, {"text": "Did you upload ''Image:1111.jpg'' to make a point about censorship? What exactly is your intention with that image?", "score": -0.7300840546573324}, {"text": "\"Hi, do you mind if I move your hook to go live on 26/27th? The GLAM WIKI conference could do with being highlighed by a non US/UK hook?\"", "score": 0.6134136129202018}, {"text": "Please stop mentioning my name in relation to FW. <url> How many times do I have to ask?", "score": -1.4771066472155254}, {"text": "\"I don't think I've used it in Wikipedia. So, if I haven't, why did you bring it up?\"", "score": -1.0139744248870481}, {"text": "\"Fair enough, thanks for assuming good faith, I was merely making the page consistant with the past 2 champions league season articles (<url>, <url>) which use path and non-champions instead of route and league. Should they be changed too or was that what they used to be called?\"", "score": 0.6710939065252034}, {"text": "\"One of your edits concerns me - <url>. Maybe a copyedit would be good, but was it really required to be deleted completely?\"", "score": -0.8100349612933471}, {"text": "\"Steelbeard1, I'm very sorry about confusing the rabbit character in <url> with the official Bugs in <url>. He may not be the gray rabbit that we recognize, but he is supposed to be Bugs in prototype form, right?\"", "score": 0.5489360487150142}, {"text": "\"I hardly ever agree with Jimintheatl, but that block was bordering on absurd. Fast on the button much?\"", "score": -1.2505485639753962}, {"text": "\"You seem to have plenty of time to discuss, why don't you try to educate <url> ? Maybe he needs this more than me ?\"", "score": -1.2604545250149413}, {"text": "\"Let me know what you'd like me to do. Are you/anyone thinking of copying over the temporary copy wholesale or in parts; destroying the edit history of either, both or neither..?\"", "score": -0.4785811203832626}, {"text": "I saw you put images for the Kings of Bhutan and thank you. Could you find an image to use for <url> also?", "score": 1.0986346898916102}, {"text": "Thank you for your kind comment. Do you have a suggestion where the portals should be placed in the article?", "score": 1.7856355519132827}, {"text": "\"I think I updated the pharmacology project banner to put C-class articles into the appropriate category (previously, it was putting them into the unassessed class. If you want to double-check, that would be great; I think it may take a little while for it to update the articles?\"", "score": 0.8096869823584509}, {"text": "I do not know how to merge them. Are you familiar with the process?", "score": 0.5256025016041704}, {"text": "\"Yes, that's fine, especially with the smaller sections. Where will they be posting their topic ideas?\"", "score": 0.6004055721499562}, {"text": "\"Yes, I know what a score ''card'' is, but you wrote \"\"score '''car'''\"\" in the article. Was that a typo, or is there really such a thing as a score \"\"car\"\"?\"", "score": -0.9407276340725771}, {"text": "\"Thanks a lot for clearing that up. Do you also happen to know about the age of consent in Thailand, and why he could be charge with child sex offenses even though she was 17 years old?\"", "score": 1.1157317872250274}, {"text": "How are these inconsistent with the traditional account of the origins of the Torah involving redactions as late as Ezra? Why do we need bizarre hypotheses about documents being interwoven that are contrary to all known examples of how books are written?", "score": -0.9391035896618245}, {"text": "Helped them do what? Leave Israel?", "score": -0.7795314514098232}, {"text": "Shane I reverted the edit you made this moring <url> but on second thought I would like some advice. Is it implied in stub that they do not have references or is it appropriate to mark stubs as requiring a reference?", "score": 0.8340449446105094}, {"text": "\"Lil-Unique told me it with the reasons of what I put in my edit summary, so if you need a better explanation ask him. I left Billboard Hot 100 as Billboard Hot 100 because that's the exact name of the chart?\"", "score": -1.0833764870985942}, {"text": "You deleted Ricardo Marinello. What was written in the article?", "score": -1.0054407178465383}, {"text": "\"Thanks for the alert. By the way, how's Granny doing?\"", "score": 1.257772787971072}, {"text": "\"How exactly is a one-time martial arts tournament, all of whose participants are red-links, notable? Is Wikipedia to have separate articles for every martial arts tournament ever broadcast on television?\"", "score": -0.8589137941574023}, {"text": "\"Is 10 months sufficient time to relist a AFD? Not too soon, correct?\"", "score": 0.7663322898012319}, {"text": "\"Hi, I see you voted \"\"support\"\" but in the neutral section. Would you like to move that to the support section?\"", "score": 1.075622917062536}, {"text": "After ''New Jersey'' leaves the mainpage it will be nessicary for us to check all the article linked to USS ''New Jersey'' and ensure that no subtle vandalism crept into them by means of the USS ''New Jersey'' article. Are you willing to help?", "score": 0.5530392749268087}, {"text": "\"No worries, please, dear - I'll keep this stage open at least until tomorrow night if you wish. Sounds ok to you?\"", "score": 1.4124276869152397}, {"text": "It looks like you tried to file a checkuser case but you never created the subpage. Do you need some help?", "score": 0.883924925744951}, {"text": "Sorry to not reply earlier regarding Deathrockeru2014I've been away. Has the situation calmed?", "score": 1.0681832137906404}, {"text": "\"I'm glad you're pleased with the general appearance. Before I label all the streets, is the text size, font style, etc OK?\"", "score": 1.426938596583242}, {"text": "You come across as a contributor who possess great depth. Are you a history professor/ student?", "score": 1.0027801691173797}, {"text": "SuaveArt has edited his post on the Wikipedia Review and is now lying about me. Did you see the actual post there before he changed it so you can back up my cut-n-paste of what it said?", "score": -0.6454169602464701}, {"text": "WOW. PLEASE tell me that this will be going to FAC?", "score": -1.0757661029007508}, {"text": "You said other sources supported the info in the infobox yet you did not indicate what sources. And why can't we use a primary source for his birth date?", "score": -0.9444214064787564}, {"text": "It is an obvious and strongly followed point that april fools jokes ''should not disrupt the running of the wiki''. Could you explain why the HELLS you coated a dozen users' pages with pictures of jimbo's ugly bearded fizzogg?", "score": -1.8295366592714828}, {"text": "I have no clue. Isn't there a WikiProject on radio stations you can ask?", "score": -0.7945308703179752}, {"text": "\"I've removed my remark. But if somebody puts words in another's mouth, and that second person states that is not what he meant, and the first person responds that no, that ''is'' what the second person meant, well, wouldn't you say that's a little weird?\"", "score": -0.9264326092839387}, {"text": "\"Hey, I reverted your edits on recognition because the statements were already relayed in footnotes, which I hadn't realised when you brought it up on the talk page. Did you want to move the statements to the main text?\"", "score": 0.5240573154674805}, {"text": "\"Thanks for the infomative reply -- I wasn't really questioning your putting the image in the infobox; appearance-wise I don't think it makes much difference. It is better, though, I think, to have the thumbnail indicator -- Is it possible to have that inside the infobox?\"", "score": 1.025409933253465}, {"text": "\"I've asked participants to shift the discussion away from Karchmar--that discussion isn't producing any new arguments or sources. Would you be able to contribute some thoughts as to where we might go on the topic of collaboration (i.e., tomorrow)?\"", "score": 1.2175925552707352}, {"text": "\"Hi, thanks for unprotecting the <url> article. Could you also unprotect <url> so a redirect can be created?\"", "score": 0.985612296397641}, {"text": "\"Thanks for the heads-up on that; I don't really have anything to add beyond what you'd said. By the way, I noticed you said you weren't an admin: would you like to be?\"", "score": 1.10848048761693}, {"text": "\"Also, why is this glorious photo of a <url> not used in any articles? Was there some argument about its copyright status?\"", "score": -0.7227640205339495}, {"text": "\"Why did you revert my edits on that page? You say it was unsourced but it was sourced, I put two references to two fair sources (not blogs nor politicaly bound), so why did you remove them?\"", "score": -1.82477704481284}, {"text": "Doing it under the guidance of the relevant <url> would be a good idea as the folks that are active in the project will be best placed to advise on which templates and styles fit in with the rest of the project. What is the page/subject you are working on?", "score": 0.590761217190825}, {"text": "It's a low-profile page in the sense of not getting much attention from editors. Why do you think addressing Luther's views on grace is necessary to understand contemporary philosophy?", "score": -0.6610487402736915}, {"text": "You do not appear to have alerted the other previous Delete advocates yet. May we take this as a tacit admission that actually you ''were'' intending to skew the deletion review?", "score": -1.637575629400565}, {"text": "\"Please see my comments on <url> about a remark you added to <url> about the etymology of the term \"\"minim\"\" in music. Got any sources for your suggestion?\"", "score": 0.7806451609322264}, {"text": "\"Hmm. You ''do'' realize that two of the \"\"please sign\"\" diffs are from <url>, and that the talk page is splashed with <person> which has the sig thingy?\"", "score": -1.2573522728648387}, {"text": "\"I can certainly suggest to Isarig that 3RR is not an entitlement, and that dicussion is often the better approach. Would you like me to try to mediate the Karsh page?\"", "score": 0.7023606046102907}, {"text": "\"From my reading of the entries, the Japanese force in <url>/South <url> & the <url> appears to have been the <url>, rather than the <url>. Does that sound right?\"", "score": 0.6740188193152139}, {"text": "\"Any thoughts on what we should do with this? Delete it as a duplicate, or copy to Commons and rename?\"", "score": 0.6688639745354708}, {"text": "\"I started, but gave up. And why link the word 'mother'?\"", "score": -1.0420349600255425}, {"text": "Thank you very much for your kind comments but I don't recall if or when I made these contributions. Are you sure it was me?", "score": 1.470095310028339}, {"text": "And what about <person>? Does it need fixed now too?", "score": -0.4458497080072755}, {"text": "Thanks for your help on <url>. Any advice on edits we should make to get it up the quality scale?", "score": 0.6296951562552532}, {"text": "Hello? Whom do I speak to to become a <url>?", "score": 0.5284336241763055}, {"text": "\"Can I you take a look at <url> and <url>? Specifically, look at the formatting of the general reference; is it an acceptable way to present the references?\"", "score": 0.9578920824711951}, {"text": "\"Yeah. What's the point of reverting them again if they'll be removed, anyway?\"", "score": -1.3787830207399139}, {"text": "\"I haven't done an 'official investigation' but perhaps it would be revealing. Alright then, save us the time...enlighten us--how many links have you added to WP that are owned by Stephen Barrett?\"", "score": -0.8456555405213304}, {"text": "\"Yes, I just saw that. DJ, if you wished to delete the still tagged pages then that was your judgement call but to delete pages which I had already declined a speedy on was rather bad form wouldn't you say?\"", "score": -0.7091738608256176}, {"text": "\"Btw, I've issued a request for discussion on the Canadian Wikipedians' notice board about a dispute involving myself and another contributor on <url>. If you have a chance, could you look this over?\"", "score": 0.8009078307516951}, {"text": "Thanks for the heads up. What damage did they do?", "score": 0.5572873537541293}, {"text": "\"I've created <person> to deal with the new CSD A8, deletion of blatant copyvios. Mind to take a look and tell me if there's anything wrong with it?\"", "score": 0.7595273271685905}, {"text": "\"\"\"New dictator?\"\" Who's the old one? And who are \"\"others in your gang\"\"?\"", "score": -1.0836750577943755}, {"text": "Drat forgot to sign. You blanked my talk page and asked if I don't like K's?", "score": -0.8639511305310974}, {"text": "Got another one: <person>. Can't someone with CheckUser abilities block the IP address?", "score": -0.47836185640616496}, {"text": "\"Errr, okay. But let's make sure our answers are accurate by using <url> rather than something we heard someone say, eh?\"", "score": -0.9454086603955363}, {"text": "Thanks for the revert on <url>. Kinda curious how this blanking is always done by IPs that trace back to USAF installations isn't it?", "score": 0.6397723617196369}, {"text": "Could you be more specific. Are there specific points in the article that you believe are repetitive?", "score": 0.8406563216149519}, {"text": "I'm afraid I don't know what you mean Arcayne. What comment?", "score": 0.5151988632497021}, {"text": "\"Oh, and I checked your user page, as you requested. Perhaps you could help me understand why your \"\"spouses don't confer nobility\"\" argument doesn't apply to <url>?\"", "score": -0.5370257176521809}, {"text": "I have an email from the owner of this image that your BOT deleted. Can you please restore this and I can send in the email granting me permission to post?", "score": 1.070393657876898}, {"text": "You support Scotland leaving the United Kingdom despite the fact most Scots do not? I am curious as to why?", "score": -0.6633835202639433}, {"text": "Thanks for your input. Would you mind voicing your opinion in the GMC's talk page?", "score": 1.242616089913166}, {"text": "\"In the article on <url>, you recently replaced a non-free image with J. Malcolm Greany's ''Ansel Adams and Camera'' on the basis that the latter image is in the public domain. How did you determine that this image is a work of the U.S. government?\"", "score": -0.7013636919081077}, {"text": "\"You're right, but loosing it because of Devanampriya, of all users, would be quite a shame for Wikipedia. Don't we have to make a stand against ignorance, partisan POV-pushing and constant incivility?\"", "score": -0.5044287962165386}, {"text": "Thanks for letting me know. btw on that page I found <person> but before adding that to the list I was wondering if the templatelist on the project should go so deep into detail or if it just stay Germany wide?", "score": 0.5231105954915284}, {"text": "I don't understand what you are trying to ask me or suggest that I do. Can you clarify please?", "score": 0.6104401162056857}, {"text": "Answered at <url>. Maybe you are not familiar with the expression?", "score": -0.8903189301940542}, {"text": "\"As for restoring the image, no I will not. Did you consider that you are asking me to put up an image for which I know the \"\"fair use\"\" claim to be bogus, and that this is illegal?\"", "score": -1.1265068981342363}, {"text": "\"I'm sure that this is not a useful place to discuss the merits of your topic ban, which I think would need a wider audience to overturn. Perhaps you could make a request to the arbitration committee?\"", "score": 0.5631472811021496}, {"text": "I'm not sure what you mean by pointing out that votes were placed before the version you created. Could you elaborate?", "score": -0.6269725169754328}, {"text": "\"I nominated <url> for speedy renaming to <url> and you removed it saying it was moved to /Working, but I don't see it there and it hasn't been renamed. Do you know what happened?\"", "score": -0.5362089835775623}, {"text": "\"The page is getting smacked literally every half minute or so, and I think the joke's worn thin. Give us a break?\"", "score": -2.028608274522079}, {"text": "I believe supserstition exists. Why did you ask me?", "score": -0.587163992155249}, {"text": "\"Do you know how to make the 'All Star Template' appear only at the bottom, as well as the 'Confirmed future sites', so the actual table appears at the top of the page??\"", "score": 0.5639859659855537}, {"text": "That would be awesome. Do you want to ask someone for clarification on whether project sandboxes are allowed?", "score": 0.8802455640752717}, {"text": "\"I never received any notification of any deletion proposal, lest I would have contested. Is there any way to find the original proposal and/or artical, or has it just been sent to oblivion?\"", "score": -0.6408047796833094}, {"text": "Is the new draft at <url> OK with you? Do you have any comments or suggestions?", "score": 1.546544544097206}, {"text": "PS I am planning on starting an article on ''<url>'' (<url>). Maybe you'd like to help?", "score": 0.9871166292666544}, {"text": "I hope this makes my concerns clearer. Perhaps we can work together to resolve them?", "score": 1.074113329349387}, {"text": "\"I note also that so far you seem to be the sole editor who favors deletion, while several have favored retention of the article, possibly with editing. Could it be that your view does not command consensus?\"", "score": -0.6101070102558257}, {"text": "\"I advance (Ultra-Orthodox) Rabbi Dr Mordechai Halperin, (Ultra-Orthodox), Rabbi Gershon Winkler, (Conservative) Rabbi Elliot Dorff, to you, for example, that the Torah and Talmud only forbids anal sex. Now explain exactly what division of Judaism is left, for you to claim your view fairly represents?\"", "score": -0.5836951659709739}, {"text": "Thanks for adding that pmc= support to <url>. I made a suggestion for further improvement in <url>; can you please follow up there if you have the time?", "score": 1.2784141639927482}, {"text": "If it were me I'd want to try and find out more about how/why this happened first before I continued to use that software. Have you asked at the talk page I mentioned above?", "score": 0.5776908827017753}, {"text": "\"Thanks for the username change. Now, one last thingu2014since the account 'Dalahxe4st' already exists on Swedish Wikipedia and doesn't appear to have been taken over automatically, where should I go to take care of that?\"", "score": 1.3032501668567178}, {"text": "\"If the only page you've been visiting is Wikipedia, then it makes sense the viruses would come from here, but if you've been to other sites, they get thrown into the equation. Again, don't you have any anti-virus software?\"", "score": -0.48449919790211443}, {"text": "I will readily grant that. But where was the urgent reason not to follow it in any of the above cases?", "score": 0.6402883521029386}, {"text": "\"Yep, I'm definitely free for the whole break. Had fun in your trip?\"", "score": 0.5778578730280901}, {"text": "\"I know, I understood that. :) I didn't scold you for it at all, did I make you feel that way?\"", "score": 0.6418640599716874}, {"text": "\"It was for closure's sake, as to show that the debate was closed and that was the result, the user was '''blocked''' and the userpage was '''deleted'''. Doesn't that show you the items better?\"", "score": -0.7837498261673278}, {"text": "Have you visited <url>???", "score": -0.6270155009034243}, {"text": "\"\"\"Oh, that\"\", eh? So, you ''wrongly accused me of making death threats'', and that's all you have to say for yourself?\"", "score": -0.9687485837995821}, {"text": "\"And \"\"...definitions...\"\". What do you mean by \"\"...definitions...\"\"?\"", "score": -0.9052603241324224}, {"text": "\"...I have no clue what you're asking me about or seeking me to do. And what does \"\"RTL wikies\"\" mean?\"", "score": -1.538405033885509}, {"text": "Huh? What was that supposed to mean?", "score": -1.161115187029818}, {"text": "Sounds interesting. Could people keep me in the loop as well?", "score": 0.75235307132155}, {"text": "\"That sounds fine, but why would you want somebody who knows nothing about the show to write them? If you are informed about it, wouldn't you be a good person to do it?\"", "score": -1.0242079917432112}, {"text": "\"ok that was really confusing, I clicked on this link <url> and then cleaned the page but the page still exists, sso from what I understood if you put the ending \"\"/double redirect\"\" at the end of any article, you'll see how many redirects it has? is that right?\"", "score": 0.5939456377406735}, {"text": "He published the document himself for the world to see. What privacy?", "score": -0.889681574913582}, {"text": "\"The proper title of this image should be Eszopiclone.svg, since it depicts the S enantiomer. Could you change the title so I can upload an image of the racemic compound instead?\"", "score": 0.5644604796640516}, {"text": "\"In my calculation if I got blocked that would only be good for me as it would clear my mind from the obsession to try and change that which seems wrong and mutable, but carries no private benefits for the reformer. Is that line of thinking truly alien to you?\"", "score": -1.562176587894668}, {"text": "Why doesnt belong there??", "score": -0.9157887168826608}, {"text": "I think it is important to note that Spirit 03 was the last gunship to be brought down and that it has not been involved in a Class A mishap since that date. Should we include that?", "score": 0.9033507570305168}, {"text": "\"If it's alright with you, I'm next going to start from 1930 and work back towards where you are, since I'm particularly interested in the Lang era. Is that okay?\"", "score": 1.1538390015545807}, {"text": "\"Hi, I wanted to ask what do you follow to create redirects on this <url>??\"", "score": 0.5629407562670737}, {"text": "\"The flag \"\"visible=yes\"\" means a comment on the page is made visible, not that the page is visible in search engines. Why did you remove flag?\"", "score": -0.6709653895603306}, {"text": "\"Hi, nice article. What is meant by \"\"speculative townhouses?\"\" Ones that were built for prospective renters rather than for committed buyers?\"", "score": 0.829104634463004}, {"text": "\"One wonders, of course, who \"\"Elliott of Macedon\"\" would have been. Probably something analogous to Brian of Nazareth but in a Macedonian phalanx?\"", "score": -0.5422476421167955}, {"text": "\"I've just reinstated the perfectly valid redirect to this. I seem to remember there was some issue years ago with a rock group of the same name, but don't you look at these things before you delete?\"", "score": -1.5562714989590014}, {"text": "\"I'm wondering why in the <url> article, you keep changing the link <url> to <url> which just redirects to <url>? Why not just leave the link to point to the intended article directy?\"", "score": -0.9115731722771683}, {"text": "\"On your Citation templates subpage you have the statement \"\"CS1 and CS2 both use <person> as a meta-template and are variants of APA style.\"\" In the past I have had experienced editors tell me this, but none of them could point to any documentation or contemporaneous discussion to verify this statement. Can you point me to any confirmation of this statement?\"", "score": 0.7661554860995102}, {"text": "\"Also, in accordance with <url>, I think the Turkish Army Corps should be 3rd Corps, 4th Corps, 5th Corps, not III Corps, IV Corps, V Corps. What do you think?\"", "score": 0.6394818800628885}, {"text": "\"I read his note, and you should know that as I responded to it and then you responded to me. So where did the doubt come in regarding that matter?\"", "score": -0.7699897512625468}, {"text": "\"Could you take a look at the article now and see whether your concerns (except the reorganization) have been addressed? If not, what is still missing, and what technical terms do you feel need explaining?\"", "score": 1.0398176547021503}, {"text": "\"Also, is <url> about chemistry, sociology, theology, international law, or what?? '''I''' know the answer to that question, but does the person reading the article know?\"", "score": -1.3029834194416006}, {"text": "Seems to be down again. I assume this is nothing I can resurrect on my own when you have intermittent Internet access like this?", "score": -0.5238066968448576}, {"text": "\"As does ''<url>'', and ''<url>'', and even ''<url>''. Wanna be friends?\"", "score": 0.5356857110580855}, {"text": "\"Ah, I see. Although surely Aaron Lennon was younger when he first played for England?\"", "score": 0.6572672121332823}, {"text": "\"Hi Deb, I created the page which was deleted which I hoped could have been a stub starting point for building up a better and more detailed article about the organisation. Could you offer some advice on how I could have made it better and have avoided deletion?\"", "score": 1.0190528461090969}, {"text": "You've been asking the ''sensible'' sysops...!?", "score": -0.6777781367142468}, {"text": "\"Thanks for the heads up, I don't think they will succeed but it's good to know they're planning it. Did you let Jayjg know as well?\"", "score": 1.147434903372267}, {"text": "I think it's a great idea to get the CO3 community involved with keeping <url> correct and uptodate. I shan't be there myself this year (clashes with various things) - would you be able to bring it up?", "score": 0.7436385769518585}, {"text": "I don't understand why Hong is so reluctant to revert unilateral changes from time-to-time when he did so frequently in the past when I or someone else who supported my edits did it. What exactly is the point in me agreeing to this if someone conveniently comes along and then starts making lots of changes that I don't like but Giovanni doesn't mind?", "score": -1.117231789503816}, {"text": "Wow! How did you find this?", "score": 0.6331765654558104}, {"text": "\"I use that template, because it cannot be missed on a page, because it allows for different reasons for blocking (template:blocked is \"\"vandalism\"\", only, and takes 2 edits to change to something else), and because it gives instructions for requesting unblock. What is the objection to it?\"", "score": -0.6158177136502111}, {"text": "\"I'm disappointed that your first response here was to revert me without discussion, and to assert a consensus that was not apparent. Before you choose to revert me again, can you ''please'' participate in the discussion at WT:RD?\"", "score": -1.1342556794514875}, {"text": "The main article needs a facelift. Do you want to make suggestions?", "score": 0.5785770520884369}, {"text": "I say - are you a sort of picture finder? You couldn't find one of John Bowlby could you?", "score": 0.5981627503205991}, {"text": "Not that clear to me. What's wrong with this username?", "score": -0.4678629500512681}, {"text": "Thanks. Any idea how I log into this account?", "score": 0.8304484176668083}, {"text": "\"So your argument is that as long as you follow the \"\"rules,\"\" nothing else should matter? That if it's legal it's OK?\"", "score": -0.5059945476173773}, {"text": "Reply: I have just found your edits to the talk page of the above article - not impressive at all? I suggest that you take this to <url> and explain why his edits need to be reverted and not yours?", "score": -0.6599356311847252}, {"text": "\"Perfect, thank you! Can you help with the phonetics of other Nordic languages?\"", "score": 1.236243119067748}, {"text": "Shudder? Why?", "score": -1.0005234793509241}, {"text": "Thanks for adding stuff to the update. Would you like to help out with the notifications (the real pain of this ... grrr)?", "score": 0.7957539643124498}, {"text": "It's larger now than the sub-stub previously deleted. Can sources be added to verify the achievements?", "score": 0.6095735526861796}, {"text": "\"I was in doubt too, but when dealing with such incomprehensible concoctions I prefer to err on the side of BLP caution. Would you like it userfied?\"", "score": 0.7653009273904792}, {"text": "\"I wish I could help you, but I know very little about Iranian rock music. Have you asked any other Iranian members?\"", "score": 0.9325099856742847}, {"text": "\"Seeing you back on Wikipedia after a long time. How are you doing, and is everyone you know safe?\"", "score": 1.3147636819602495}, {"text": "Another admin has already deleted the article. Do you need me to restore it for you?", "score": 0.6132375038795398}, {"text": "\"P.S. Am I allowed to remove any of this abuse from my pages, or will this result in further accussations from <url> and attempts to get me discipled or banned?\"", "score": -1.0037770748496952}, {"text": "Ditto? whats the deal?", "score": -0.7421784439879667}, {"text": "Congratulations on your new daughter's arrival! What's her name?", "score": 1.420778499661484}, {"text": "You're a troll. Why not go somewhere else?", "score": -2.2554393149106695}, {"text": "\"It's not off topic, and I see no comment in that other discussion on the merits of WMC's Martian push -- in fact, you evaded the question. So I take it that this is a refusal to answer?\"", "score": -0.8596319074009289}, {"text": "\"Sorry, I did not know there has been a category before. Can you delete it?\"", "score": 1.035613132494894}, {"text": "I was thinking more of making the line longer so that the team's name can fit on it. Is that doable?", "score": 0.6444336386997389}, {"text": "\"ED... They have drugs to fix that now, don't they?\"", "score": -0.6968992174006714}, {"text": "\"I am reluctant to mention this, but having your paragraph in bold on <url> seems to me to be a bit self-promotional and unfair to the other members. Is there any chance that I might be able to persuade you to voluntarily unbold it?\"", "score": 0.9520809479936216}, {"text": "I seconded wholeheartedly your proposal at <url>. Could you please add a link to the discussion?", "score": 1.5211280898675197}, {"text": "\"I don't think so, Giano. Can we just once attempt to let this pass by without calling for the heads of people?\"", "score": -1.4832208111192684}, {"text": "\"Thanks. Does it happen to say anything about his marriage to Paula, as in when it ended?\"", "score": 0.931780010932847}, {"text": "\"Thanks, can we write an article on that ? do you think it is notable enough ?\"", "score": 0.5570720434635685}, {"text": "\"FYI, I found a unit representative, I think, see <url>. I directed him to your draft for the military reps, would you mind supporting him if you need, I am kindof wrapped up in several different things off wiki and on?\"", "score": 1.0548852969781457}, {"text": "\"When someone explicitly states that they created the image themselves, then surely GFDL is at least implied? I know i forgot to add the tag but is it really necessary to put a delete template on the image?\"", "score": -0.7117183878373347}, {"text": "\"In instances where there is one major usage of a name and several smaller ones it is conventional to leave the major one in the namespace and have a link in the top to the others (or to <url>. Surely 99.9% of people interested in <url> are going to be looking for the main one, so wouldn't it have made more sense to have left it there rather than making it a redirect?\"", "score": -0.5068332167389429}, {"text": "\"Hey, I could really use your skill on WikiProject Cities. Do you have time to assist?\"", "score": 1.000942775670539}, {"text": "I started on it. Do you more info on the last article on the list of references?", "score": 0.5871894002037881}, {"text": "Why are you putting <nowiki><person></nowiki> on several pages related with Yu-musicians? IMO this is a pretty <strike>stup</strike> meaningless stub to have; why not <nowiki><person></nowiki> instead?", "score": -1.2641870808175213}, {"text": "Why would one want to re-create GNAA??", "score": -0.891010658360589}, {"text": "\"I only used approved templates. If they are not considered polite, why have them?\"", "score": -0.744923487496558}, {"text": "There hasn't been much progress made in the last three days. Is anyone interested in responding?", "score": 0.8326389928674308}, {"text": "Vandalism is putting a live AFD template back? You're a smart one ain't you?", "score": -1.4264651075674615}, {"text": "\"Oh, okay. <url> should be placed as a \"\"see also\"\" on <url>, correct?\"", "score": 0.5802599601871875}, {"text": "\"That was nice work you did improving the reference I added. I still have to learn that, can you show me?\"", "score": 1.6001739103230475}, {"text": "What do you think? Is there a mistake somewhere?", "score": 0.5768269165670448}, {"text": "\"And it ''takes'' thousands and thousands of dollars to run a campaign, so exactly what's your point? Has there actually been some evidence of wrongdoing on anyone's part?\"", "score": -0.8614745536340245}, {"text": "\"Hi Editorofthewiki, both images are just fine. I don't see an open review for the article; is there somewhere in particular you'd like me to make this comment, if not just here?\"", "score": 0.8577896097221525}, {"text": "\"I notice you are still contributing to Wikipedia, but have not responded to my argument. Do you now agree with the argument I suggested?\"", "score": -0.5623572115850424}, {"text": "\"So how are u gonna concider weither the licences used by Fluckr are accurate...? ,and if this image has to bee conciderd than http://en.wikipedia.org/wiki/Image:Petercrouch_liverpool.JPG should be to..?\"", "score": -0.578284936196639}, {"text": "\"I should be able to get that Mayr paper, it's on Wiley. Do you need me to email it to you?\"", "score": 0.6341383228039141}, {"text": "\"That you would consider a section that he \"\"praised\"\" a murderer (referenced to a website dedicated to smear campaigns, no less) as \"\"NPOV\"\" is astonishing. Doesn't <url> apply here or do you expect other users to turn the attack into something NPOV?\"", "score": -1.0342469276952193}, {"text": "\"I'd love to help, but I'm not sure what you're asking me to look at, specifically. Is there a discussion ongoing that you can point to?\"", "score": 0.6169037259609154}, {"text": "\"My graphics editing skills aren't up to the creation of a map such as this from scratch, but I could probably do an adequate job of removing the red and orange lines and the related entries in the map legend. Would it be useful for me to take a whack at that?\"", "score": 0.9101662094348187}, {"text": "\"I'll unblock you if you give me some confirmation you've understood this and you are prepared to discuss the article through the proper channels, calmly and politely. So, cool down please, okay?\"", "score": 0.9838586317773457}, {"text": "I have proposed the merge for you at these pages: <url>/<url>. Is that what you wanted?", "score": 0.6518943730847069}, {"text": "\"Hey, I like your sharebox a lot. What do you think about trying to make it a <url>?\"", "score": 0.9204628470789638}, {"text": "I notice that you added a period to the middle of a sentence in the article about <url>. Why?", "score": -0.7060444562239134}, {"text": "You make some good points. How do you feel about nominating the article for AfD then?", "score": 0.765299769309068}, {"text": "I really like the template you used at <url> but it dosen't seem to exist on the WP:Vandalism page. Where can I grab it?", "score": 1.0793871765950525}, {"text": "Good catch. Do we have a solution for that yet?", "score": 0.5643429870349053}, {"text": "You see how time consuming and annoying it is when a person keeps on telling you to provide a source. Why do you want ''Michael'' to be just another compilation album?", "score": -1.042410462451611}, {"text": "\"Woohoo--279 hits! Hey Scientist, is that an all-time low?\"", "score": -1.0564878350751779}, {"text": "Oh..it is a spider with 'pajek'. Is it really necessary?", "score": -0.6200252463996974}, {"text": "Thanks--can you add the romanization of the Arabic at <url> too? Is it ''jibna baladi''?", "score": 1.2977242892806486}, {"text": "It's about freedom of speech and no Islamic terrorism is going to deter the truth from outing. And if it's forbidden to look at figures of Mohammed how come you did it; are you going to hell?", "score": -1.8945371997791358}, {"text": "\"Where do you suggest that I post the citation? In the \"\"USNA\"\" section or the \"\"Hispanic alumni section\"\"?\"", "score": 0.6885186544070383}, {"text": "\"But, before you accuse me of shying away from your second to last question, my PhD is in inorganic chemistry, homogeneous catalysis to be specific, but my latest scientific paper was submitted (still with the reviewers) to ''<url>'': it touches on the metrological consequences of the microscopic quantization of ''Q'', as it happens, although that is not its main thrust. So, do you want to thrash this out among scientists, or would you prefer to waste your time with ArbCom?\"", "score": -0.9229749718889829}, {"text": "\"Thanks for the <nowiki>{{For</nowiki> templates on Tkachyovs, they help a lot. Maybe in this case we should use the patronimics in the titles and have Alexander Tkachyov as a disambig?\"", "score": 1.2080448459819428}, {"text": "\"Ok, I will create. You will help to expand it?\"", "score": 0.5539001261192397}, {"text": "\"Some time ago you uploaded an image of the Knowledge Navigator, which was speedied some time later. Can you upload the file again?\"", "score": 0.6426180908914798}, {"text": "Well this seems odd to me. Don't you find it strange that otherwise authoritative sources like Emulex and IBM would be so full of mistakes?", "score": -1.0071743190313578}, {"text": "\"Dana, before I give the examples of what sections or parts in the article that needs fixing, I need to know if <url> is still gonna work on the problems. Do you get me?\"", "score": -0.6716473618427974}, {"text": "Take a look at: <url> Baltimore's were ordered as Recon bomber/ Great Britain - this is the GR designation??", "score": -0.4883665053065262}, {"text": "Do I get a reference to my comment? Show me where I refuse to provide evidence backing up my changes?", "score": -1.4787581733794661}, {"text": "Not trying to pick a fight - a user that reverts all articles to a version by you is suspect. Any New Year's resolutions Will?", "score": -0.4510694777353491}, {"text": "It looks like the current version of <url> has been reworked since when you built it in July 2005 and references have never been provided. Do you have any references you can add to the article?", "score": 0.8095756900607693}, {"text": "\"<url> is having <url> spelling \"\"<url>\"\". Perhaps you could help?\"", "score": 0.5680784642926164}, {"text": "\"Right, so the links on your userpage? And what is the \"\"article\"\" you want me to check over?\"", "score": -0.47246127826444473}, {"text": "\"Thanks; I expected a decline, despite my extensive comments at <url>. As there is nothing more to this article than listings of bylines in non-notable publications, minor book acknowledgments, uncredited movie extra roles, blog comments, paid directory listings, and unsubstantiated claims about wildly over-the-top PR accomplishments, what exactly do you consider to be the examples of \"\"credible notability\"\"?\"", "score": -0.6128956167367685}, {"text": "\"Although Singapore does not use Traditional Chinese, I still think we should at least add Traditional Chinese titles for shows not made by Mediacorp, and bought from overseas (mainly Hong Kong) TV stations. What do you think?\"", "score": 0.6422347434803652}, {"text": "\"And I guess with exception of 7 countries that recognize same-sex marriage, everybody else would end up in \"\"homophobia\"\" category. Or what would be the threshold?\"", "score": -0.7909150620172732}, {"text": "Do you live in the US? How excited are people about the album?", "score": 0.5937127339633316}, {"text": "Good shot of Danica's car qualifying at Japan. Were you able to see her win the race?", "score": 0.8301232749170167}, {"text": "\"Your first attempt at doing this also added a <nowiki>\"\"<url>\"\"</nowiki> line to the article, and that's what triggered the bot. Perhaps you should get into the habit of using the 'Show changes button?\"", "score": -1.1720698120331492}, {"text": "\"Hmm, page says you are unavailable right now. This means you wont be fixing up <url> any time soon?\"", "score": -0.6039238061811714}, {"text": "I apologize if I have misunderstood. Are you not an acknowledged devotee of Prem Rawat?", "score": 0.614814924643295}, {"text": "You're mentioned <url>. What do we know about these images?", "score": -0.48411419010977436}, {"text": "Sounds good to me. Should we start preparing for it now just in case the GA reviewer notices that something significant is missing?", "score": 0.6807805733518711}, {"text": "\"Hi, sorry I think I'm missing something here. Why are you adding a red link to the vandalism page?\"", "score": -0.44591441818704086}, {"text": "You are moved <url>. You know that ''major warship'' is clasification in <url>s ?", "score": -0.45435368773047}, {"text": "I'm afraid <url> is slightly under the 5x expansion threshold. Could you please see my comments at <url>?", "score": 0.9568104610689581}, {"text": "\"I think these are reasonable, and should protect you from lawyering. Should I go ahead and add this?\"", "score": 1.1381561059514425}, {"text": "Note however that <url> does not exist. Which page were you talking about in this case?", "score": 0.5216878990034448}, {"text": "See <url>. Are <url> and <url> the same team?", "score": -0.47272392116490786}, {"text": "Well the only plausible explanation was that you reverted to where I removed an extraneous cat (after Homestarmy's edit) and then swapped out a template... but as you're a competent editor that just seemed odd. Is that what indeed occurred?", "score": 0.9026502368939864}, {"text": "Vi is out of the hospital. Did I mention that she was in?", "score": -0.48249542799026185}, {"text": "\"That's reason to expand the treatment to other painters, not to blank this one. And you didn't think that there would be more articles on van Gogh paintings?\"", "score": -1.5922321438187435}, {"text": "\"Hello, I've written a page about the <url>, shamelessly copying the style from your CCT/FoFC lists. In the process, have I missed anything out?\"", "score": 0.6423580420469185}, {"text": "\"'''<url>'''Please also answer the questions at paragraph '''<url>''' above, as you are looking like a classic bully admin who abuses his powers and should be stripped of that role. Is there any review process in wikipedia, for proposing that you be stripped of your admin powers for bullyism?\"", "score": -1.8235420635186965}, {"text": "\"Sure thing, you've been doing great work so far. Would it help to maybe translate the whole article first as it is, and then over the course of time you can play around with the sections and add stuff that you find?\"", "score": 1.2215735527481684}, {"text": "\"I have also put speedy delete tags on <url> and <url>. Was there any particular reason to move your user/user talk pages to that name, and back again?\"", "score": -0.7276718498398813}, {"text": "That's OK &mdash; my pleasure. Have you tried the IMDb?", "score": 1.0596059821251544}, {"text": "\"Maybe I'm on crack, but in your edit summaries you're saying \"\"MT\"\" and \"\"Not MT\"\". What is MT an abbreviation for?\"", "score": -1.0084886521760463}, {"text": "\"\"\"The\"\" house arrest? Shouldn't it either be \"\"her\"\" house arrest or just simply \"\"house arrest\"\"?\"", "score": -0.5326996920282121}, {"text": "I'm going to remove it. Is that okay with you?", "score": 0.8663207019982126}, {"text": "\"Ok! First things first, is there anything in particular you would like me to help you out with?\"", "score": 0.9784049541708496}, {"text": "Replace the 100px with the size you're looking for. Did this help?", "score": 0.7140756320668113}, {"text": "Thanks for the detailed reply. Can a random admin who notices the image with the speedy tag do something useful to fix the situation?", "score": 1.1089798197855474}, {"text": "\"Thanks for pointing out a problem with the image licensing ... I must admit despite being a seasoned contributor I find this image licensing stuff pretty confusing and hard to get right .. are there recommended examples of standard things like book covers, album covers, paintings etc? Have I done the Jonah cover ok now, or not?\"", "score": 1.0858381457325499}, {"text": "How did I vandalize!!??", "score": -1.6739265193304527}, {"text": "\"Thanks a ton for looking that up for me. For the purposes of a citation, what are the page numbers for the whole article?\"", "score": 1.035626913208}, {"text": "\"I found <url> while working <url> from <url>, my first impulse was to give it a prod, an long unreferenced orphan without a clear reason to exist. I see you are still active on Wikipedia, is there chance you can do something for this article?\"", "score": 0.7212334471439509}, {"text": "Thanks! =D And how soon are we going to try to get The Lost Age to FA?", "score": 0.8906624988100106}, {"text": "\"Um, what? So presumably you'd like to change \"\"Irish footballer and manager\"\" to \"\"Republic of Ireland-born association footballer and coach (sports)...\"\"?\"", "score": -1.233121563580518}, {"text": "Why do you have a problem with the inclusion of Lady Bristol's persona prior to her 2nd marriage? And to the inclusion of Somerset's son-in-law?", "score": -1.0326720266712108}, {"text": "User 86.138.232.97 does have a history of malicous edits. Are you then confirming that these malicious edits done by user 86.138.232.97 were done by yourself?", "score": -0.9309287924857754}, {"text": "Either <url> is really misinterpreting the guidelines or I missed out on something. Care to shed light here?", "score": -0.8821552743275459}, {"text": "And now you undid my declination of that one which was copyrighted. Would you ''please'' quit undoing my declinations?", "score": -1.700766347582174}, {"text": "\"Hi, hopefully I will have the <url> ''wonderful-joy-of-a-job'' task done by tomorrow so will be able to join you with the infobox-adding task thereafter. Do you have a method, or is it just A &ndash; Z?\"", "score": 0.8313548071485289}, {"text": "Where have you put this material? On a user page somewhere?", "score": -0.6873605576497571}, {"text": "\"Hi Tim, I received the photograph from Delft but it's page looks a mess and I can't edit it. Can you, or do you know someone who can, fix it?\"", "score": 0.5900476718154912}, {"text": "\"OK, I give up? What notability is asserted?\"", "score": -0.6886777592522854}, {"text": "\"For one phonological citation I need an uppercase \"\"I\"\", as in \"\"Italy\"\", with _serifs_ to distinguish it from lowercase L. You advised \"\"The Classic skin uses a serifed font, if you can accept a personal solution.\"\" I'm sorry, I don't catch your meaning. Where do I find the Classic skin, what is a personal solution, and why wouldn't I accept it?\"", "score": -0.6232136056448753}, {"text": "Thanks for that - very nicely commented code (rare to see)! It seems to need a table to be set up - do you have the schema or sql for that handy?", "score": 1.1463509337072462}, {"text": "\"Why am I never asked to write a feature. Is it because \"\"I is black\"\"?\"", "score": -1.1989050520056206}, {"text": "\"Is this like a printing error on a postage stamp, i.e. a rarity?\"", "score": -0.45539257212894324}, {"text": "\"I'm happy to help! After all, what's the point of acquiring knowledge about a process if I don't use that knowledge to help others?\"", "score": 0.888771250008474}, {"text": "\"Hello, our project has begun to fall a little bit behind the collaboration/organization department, would you, as an experienced editor be willing to help us revamp how we deal with issues? The template is a little behind because of this, but it points to some good places:) Do you have anything in particular you would like to do with us?\"", "score": 1.0487465160826084}, {"text": "\"Hi, would you have any objection to renaming the sub-categories of <url> to look more like the articles which describe them - i.e. <url> -> <url> and so on?\"", "score": 0.8732124752295036}, {"text": "<person> I find this conversation remarkable given that you have twice opted to edit war over referencing format to the detriment of an article's quality in the last 24 hours on <url>. What gives?", "score": -1.7250669145864528}, {"text": "\"I started thinking of how to copy-edit it, but then lost interest... what is this thing that Americans have about writing about their bloody roads, for goodness sake? Can you imagine the <url> as a featured article, or having a \"\"List of A-roads in Gloucestershire\"\" as a featured list?\"", "score": -1.2009628733642819}, {"text": "\"The cover art image shows annual #2 (1983), and says \"\"introducing superwoman\"\". Is that what you were looking for?\"", "score": 0.5501513422560803}, {"text": "\"Could you kindly stick to the issue at hand in the FAC? Which is to discuss the article, not me?\"", "score": -1.6949397561912583}, {"text": "\"Thanks for your work in cleaning up <url> (which is now finished, but for a few hard cases which likely will require the writing of articles to conform to those links). Would you like to collaborate on another section of this list?\"", "score": 1.087828956645159}, {"text": "\"Thanks for reverting <url> - that was actually me, unlogged in. OK if I re-revert?\"", "score": 0.6246760941913306}, {"text": "Perhaps you can help me. How do I view the status of a vandalism report I've made?", "score": 0.5684519316891516}, {"text": "\"As another aside, they are mostly Germans. What's up with that?\"", "score": -0.8779805643032281}, {"text": "Thanks for the correction but I am still working on the table. Could you hold off for a bit so our edits don't cross?", "score": 0.7011983415628029}, {"text": "\"That's \"\"fucking consistent naming-schemers\"\", please. Unless there's some actual reason for a) restriction to highways, b) infeasibly small stub categories, and c) inconsistent capitalisation?\"", "score": -1.573601396142104}, {"text": "\"Further, one may ask why there is no proto-tumour-suppressor. Could it be because tumour-suppressors were discovered later, when mechanisms were better understood?\"", "score": 0.5146017081355057}, {"text": "These clickable images are great! Can you make one for the 10^-6 to 10^5 range too?", "score": 1.0458318235551178}, {"text": "That incident is already mentioned in the <url>. Why do you insist on redundantly adding it?", "score": -1.654279780923368}, {"text": "What do you mean? How am I supposed to vindicate myself of this ridiculous accusation?", "score": -0.8999734494086266}, {"text": "I tried a white map on <url> and a light yellow-pink on <url>. Which do you think looks better?", "score": 0.747264552872042}, {"text": "Especially considering that it took you ''a lot'' more time to vandalize the page than the two seconds it took us to fix it. What's the point?", "score": -1.7887743813676273}, {"text": "\"I made the revert as an editor, not an Administrator - I haven't used my Admin tools and I have said I would not use them as I'm involved. Where did I say I hadn't read any of the material?\"", "score": -0.5999796033450785}, {"text": "Thats good. Do you have any enemies on here as well?", "score": -0.6986947515810586}, {"text": "Thank you. So what did I do to earn the tea?", "score": 1.182446104138825}, {"text": "\"Hi SE7 - There's some vandalism coming from your account, see <url>. As your past history doesn't seem to fit with this, has someoen either hacked your account or a logged-in session?\"", "score": 0.5492206185133077}, {"text": "Ok thank you so much. How long have you been on Wikipedia?", "score": 1.0817963284335668}, {"text": "I see that there now. Do you want me to tie it all in or do you want to work on it?", "score": 0.8367683444319421}, {"text": "\"'''I just take care of the problem.''' Can't you work ''with'' me, and not ''against'' me?\"", "score": -1.5186058526257695}, {"text": "\"Hi, I missed your valued contributions recently. Everything ok?\"", "score": 0.5611019430685759}, {"text": "I don't understand why did you add that message at the bottom of this page??", "score": -1.545324604707997}, {"text": "\"According to ''Nature'' <url>, the picture is xa9 Zhongda Zhang/IVPP (IVPP is the Institute of Vertebrate Paleontology and Paleoanthropology in China). What evidence do you have that we have permission to use it?\"", "score": -0.584167848313589}, {"text": "Thanks again. Do you have any suggestions for how we might change the article to take the practical side of this into account with being (a) inaccurate or (b) overly technical?", "score": 1.4100766524176718}, {"text": "\"I was wondering, why did you do that, create that new cat and then add it to the polanski without any discussion at all? Have you read the polanski talkpage and seen the lengthy discussion regarding the cats there?\"", "score": -0.8314102382851241}, {"text": "\"I'd like to ask why you're so insistent on creating a page called <url>, which means the same thing as <url>. Are you disagreeing that they mean the same thing, do you feel that the <url> article is bad, or do you feel that <url> is a better name for the article?\"", "score": -0.8501148818547716}, {"text": "\"I have to say, I've reviewed the suggestions that \"\"year in x\"\" not be piped, and I vehemently disagree. As I note on the [[Wikipedia talk:Piped link|talk page]], \"\"[s]ee [[Lindsay Lohan]] for a good example of why piping is a good thing (a simple [&#91;2003 in film]] in line would create clunky, hard-to-follow writing&mdash;better to use no links at all).\"\" Comment?\"", "score": -0.6846024604376112}, {"text": "Just curious. Does the '''tab''' key move your cursor to the search box?", "score": 1.0470040346814855}, {"text": "See my comment on the talk page. How did this ever get to DYK?", "score": -0.4492086225806918}, {"text": "\"That sounds fine. When does the drive end: at the start of August 13 or at the end, and according to which timezone?\"", "score": 0.5543779975331896}, {"text": "I thought we had already established consensus last month. What exactly is the deal with the parenthesis and columns?", "score": -0.6407713495323686}, {"text": "Back on topic Jigglyfidders; I totally agree with Deconstructhis on this one. What will you do to address this issue?", "score": -0.5894068512746862}, {"text": "\"I'm on it. Instead, could I persuade you to offer comments on the FAC page of <url>?\"", "score": 0.9547975431261175}, {"text": "I would specifically appreciate an outside view at the thread <url>. What do you feel is the appropriate action?", "score": 0.919504860544766}, {"text": "Merci. Now do you have any idea to solve discrepancy in <url> ?", "score": 0.6797548528851222}, {"text": "Oh I can explain how for you. Do you have a <url> computer?", "score": 0.858490740204406}, {"text": "\"From my sources it seems that the Duke of York didn't have any radar when completed, but during her numerous refits different radar sets were added. So shall I list all the radar added from her refits?\"", "score": 0.6329841360218559}, {"text": "\"So, I'm a \"\"deletion fanatic\"\"? Why don't you refrain from uncivil remarks and focus on adding sources to your unsourced stubs - help the encyclopedia rather than pissing off people?\"", "score": -1.5130523446997521}, {"text": "\"Aberayron, certainly. But Aberaryron ?\"", "score": -0.6848890765080528}, {"text": "Would you like to elaborate? How are you empowering me exactly?", "score": -1.0880861909912185}, {"text": "I've heard you say you have quite a collection of dog books. Any chance you could list them for us?", "score": 0.7948009717719225}, {"text": "Do you really believe that your photograph at the top of the <url> is better than my photograph which you removed? Should Wikipedia have a dedicated sunrise article that does not feature even one single decent image of a red sky?", "score": -1.2083997875350132}, {"text": "\"I noticed you added the Jean Chretien link to the homepage, which is nice, but you didn't put any comment for the changelog and you also didn't bump of the last article, you bumped off the 2nd last article. Why?\"", "score": -0.5020554563867479}, {"text": "\"<url> - \"\"Even if he makes only a single administrative action then his having the tools is a net positive.\"\" - a single admin action> are you joking? he gets to see all the deleted data forever access to the extra buttons forever and you consider a single action as a net positive?\"", "score": -1.4928526373353}, {"text": "Hello. Can you help me create a map of Isabela which reflects the 3D map?", "score": 0.56181918362575}, {"text": "\"I think if we did it that way, it would add a lot of possibilities to the page, enabling us to give breakdowns of other wars and major events in separate tables as well, in addition to alternative listings, which I think would be quite useful. Because there are a lot of events with different aspects to them that could be covered in more detail IMO, what do you think?\"", "score": 0.9208228288655264}, {"text": "\"See <url> - not a good revert, and no reason given. Rollback?\"", "score": -1.0024995386024407}, {"text": "\"The article is not about this, at all. Could you explain your logic?\"", "score": -1.073125744681561}, {"text": "Hi RHM22. Would you or one of your coin-loving compatriots have access to <url>?", "score": -0.6894460986850944}, {"text": "\"Thanks for taking all that on....just wondering on the template if that Vancouver / North LM split vs Fraser Valley South LM split is something Elections BC cooked up; \"\"North\"\" and \"\"South\"\" just don't seem right geographically/compass-wise, it's more like \"\"West\"\" and East\"\".....I'm imagine there's reason to your rhyme; dito Vancouver Island/South Coast which is a-geographical somewhat if North Island is included (the Queen Charlotte Strait region is part of the Central Coast). If those are divisions used by Elections BC I guess that's what it is, though....I can make some additions to various pages describing the changes relating to ridings they're made out of and waht areas are in/out in each case.....and since the districts were created in 2008, shouldn't hte title say that instead of 2009?\"", "score": 0.5247972111463172}, {"text": "\"Listen, the reason I put those signs in the code 'break-clear' is because the arrangement of the blocks is ''off-key'', at least when looking through IE or Mozilla. Why are you taking them off?\"", "score": -1.8451486500683494}, {"text": "\"Thanks for your support! Would you consider commenting on the <url> talk page, saying it has your support (and adding those comments for discussion)?\"", "score": 1.6966846074675566}, {"text": "\"I wish I could say, but I don't know what it is. Could you tell me a little more?\"", "score": 0.8813383410361013}, {"text": "\"Hi Leonard G., I just read through your RFA including your reform proposals. Could you take a look at <url>, a proposal I have developed which would give just the rollback tool to people who request it?\"", "score": 0.801808602744466}, {"text": "\"Not that this really needs much more discussion but how did the object described as \"\"creepy\"\" change from the email in your first sentence to the administrator in the second? Especially when the original edit stated that the email was creepy, not the administrator?\"", "score": -1.2171620351243233}, {"text": "\"Wow, you have a lot of gall, given you removed the West Wing discussion from your Talk Page with the edit summary \"\"Archiving crap\"\". Perhaps if your going to make unnecessary comments on people's talk pages, you should hold yourself to the same standard?\"", "score": -1.2967716371524833}, {"text": "\"I'm a little surprised you haven't responded to my suggestions regarding the GAN yet. Is there some sort of a problem there, or have you just not managed to get around to it yet?\"", "score": -1.1127393917978972}, {"text": "\"The article already says where they come from in the ''birth location'' field. What's the point of repeating it, especially at the expense of other info?\"", "score": -0.6537231581595303}, {"text": "Thanks for identifying it as self created. What license do you release it under?", "score": 0.7195585723432619}, {"text": "I'm working on improving the references on Kulwicki's article to get more reliable ones. I've also worked on improving <url>'s article toward GA. Do you know much about drag racing?", "score": 0.6570830157716568}, {"text": "\"Good point for the text, but can I also add the photos? They seem to be out of copyright, but maybe not?\"", "score": 0.9826292266633736}, {"text": "\"OK, I'll bite. How is ''Matt \"\"Dirty\"\" Sanchez'' to be construed as anything other than a personal attack?\"", "score": -1.0245366835509473}, {"text": "\"Wait, I thought algorithms were named for <url>???\"", "score": -0.5466419677475163}, {"text": "\"WHADDAFUCK YA GONNA DO TO ME, HUH BITCH!?????????\"", "score": -2.7605072814212863}, {"text": "\"Saw your note on Herbythyme's pageu2014this is great! How come this isn't more widely \"\"publicized\"\"?\"", "score": 0.6952149466394626}, {"text": "\"OK, I've started the deletion debate at <url>. As a major contributor, would you mind indicating there that you agree with deleting the page?\"", "score": 0.9146116273250822}, {"text": "\"Do you disagree with this reasoning?. If so, could you explain why?\"", "score": 0.6711101292529407}, {"text": "\"IMDB calls her Ethyl, not Ethel. Which is it?\"", "score": -0.7931511054892448}, {"text": "\"I started working on missing translations of news titles inside citations, but discovered that square brackets leave an ugly residue, while round ones may not be optimal, since they clash visually with the \"\"language\"\" ones. Would it be acceptable to just use the English version as title?\"", "score": 0.5405591966333559}, {"text": "\"So do I, because, inspite of my 100s of friends and supporters, I'm feeling a little beleaguered and lonely here. If I can hang on in here, can't you at least keep me company?\"", "score": -0.5305985642588426}, {"text": "PS do you really think that attrition is a degeneration? Surely it's the norm?", "score": -0.5867430308376858}, {"text": "\"<url> Well there is no name to the byline, which an article in a newspaper usually has, op-ed or editorials not so much. Do you not think you should have gone to talk before reverting btw?\"", "score": -0.5909937410357916}, {"text": "I've also started <url>. Now is this acceptable?", "score": -0.5240286642955456}, {"text": "May I suggest you actually read the article before claiming it not to be nonsense and removing my CSD tag? Are you an admin?", "score": -2.322239334621106}, {"text": "\"Thank you for the tip Javier. As I've never done a FL before, can you take a look after I've completed the list?\"", "score": 1.6281208697237388}, {"text": "I haven't heard from you in sometime. Como estan las cosas?", "score": 0.7860607460258472}, {"text": "I also love your new CA Interstate shields. Could I trouble you to replace the current one for <url> with a similar one?", "score": 1.166953966827387}, {"text": "Hi - how's it going? Do you need more time?", "score": 1.6305816439784941}, {"text": "\"I suppose I should not have used rollback. I was aware of the problem, but you had not solved it, had you?\"", "score": -0.8003822173396191}, {"text": "Hmm. Over?", "score": -0.922860962339352}, {"text": "I've replied to most of the concerns. Could you please look over them?", "score": 0.6438955791122851}, {"text": "\"Evidently, I don't have enough consciousness, because I haven't the foggiest idea of what you're talking about. Could you tell me?\"", "score": -0.9998514678687356}, {"text": "to the Citations missing template. What's the deal?", "score": -1.572304866707324}, {"text": "\"Thanks for your note, Tom. As a matter of interest, how could you know who someone is based on the IP address?\"", "score": 0.5903334414376935}, {"text": "\"Hey, I just went ahead and reverted <url>. I was wondering if you wanted your userpage semi-protected to prevent that happening in the future?\"", "score": 0.964020061857698}, {"text": "last msg ???", "score": -0.9861051324405385}, {"text": "\"This talk page looked like it could use a kitten. How are you doing, is everything going OK?\"", "score": 1.3059797671589917}, {"text": "\"To be perfectly honest, I really have no idea what you're talking about. Would it not be simpler to just ''add'' a link to the template (if that is indeed what you mean) than to create a potentially offensive and almost certain to be rapidly deleted category?\"", "score": -0.7953784044658188}, {"text": "You're still missing the point. How do the two sites meet <url>?", "score": -1.2462998606488453}, {"text": "\"I will say this, though, in the form of a suggestion: it makes me a bit uneasy to see you so involved and sometimes argumentative in your nomination. Why not step back and let the community do its thing?\"", "score": -1.2233044446259485}, {"text": "Another sock down. How many more to go?", "score": -0.5129522462862399}, {"text": "\"Hi. I uploaded three very nice images in the <url>, can you please tell me what breed could that dog be?\"", "score": 1.3813668642831518}, {"text": "\"Elkman, there are other ways to take a Wikibreak. Can I help?\"", "score": 0.5331729941310634}, {"text": "\"Stephan, what did you mean by ''\"\"Is English your native language? You seem to fill in a lot of things not said with your assumptions.\"\"'' on my talk?\"", "score": -1.495410252498798}, {"text": "\"I've been going through <url> and replacing the FiveHorizons.com refs (per Quadzilla99's comments). However, refs 39 and 40 don't seem to appear anywhere else; could you perhaps replace them with more reliable sources if you get the chance?\"", "score": 0.6802180023044}, {"text": "Why haven't you submitted the diet to GA? And when are you going for FAC?", "score": -1.3311499777137243}, {"text": "\"Any more of this hoax crap at <url>, <url>, Eyre/Heller garbage, and you will be '''''permanently''''' banned from editing. Is that clear?\"", "score": -2.53677773265266}, {"text": "\"Isn't this section \"\"Vatican Responses\"\"? How come the wordings are not fact-based but seem to have devolved into media perceptions of the Vatican responses?\"", "score": -0.5231477406101993}, {"text": "Out of curiosity what are you testing? How are things going?", "score": 0.8544848866056395}, {"text": "\"I don't understand how you find it appropriate to talk about your illegal drug use on your user page. Also, how do you have the right to ban other people's usernames with a name like \"\"'''BONG'''warrior\"\"?\"", "score": -1.4012389121974782}, {"text": "comment from a member of the Article Rescue Squadron? Do you know how to hide a human body?", "score": -0.7183869574102767}, {"text": "\"Hi, I wsa just noticing that in <url>, players such as Tommy Walsh and Paul O'Connor have appeared under 'T' and 'P' respectively, rather than 'W' and 'O'. How should that be fixed?\"", "score": 0.6157472054704805}, {"text": "\"Should be OK. Let me finish up what I'm doing now, and then see what needs doing. How long are you around for?\"", "score": 0.6827453342087009}, {"text": "\"Not to worry. What's \"\"D.M.\"\"?\"", "score": 0.6490532478531967}, {"text": "\"What's the origin of the data graphed in <url>? The image is used in <url>, which has an in-text cite for solubility characteristics, but could you clarify the source for the graph itself?\"", "score": 0.5263065965430835}, {"text": "I saw your comment on Jimbo's talk page about plagiarism and copyleft. Would you have time to have a look at <url> and <url> (where most of the discussion has taken place)?", "score": 0.8891975483987133}, {"text": "I see that countybycounty listing that you have of your travels throughout the USA. I have a county map of the USA suitable for marking for travelling (I've done it for myself); would you like me to Email this user a copy of the map?", "score": 1.2094703183916171}, {"text": "Why do you have this user name??", "score": -1.6495099720009143}, {"text": "\"<url> Honestly, I'd like to know if you really are defending <url>. Do you really believe that they were justified?\"", "score": -1.130422857481766}, {"text": "Hello im trying to make a task force for the Empire of Brazil with some people but we dont know how to make the actually task force stuff like the banner and how to make the quality chart template to see what class each article is. Im asking if you can help make it since i saw you made WikiProject Dam?", "score": 0.592227568734573}, {"text": "Thanks! Do you know why this was held jointly and what was worked on?", "score": 1.3467722508174647}, {"text": "I was trying to create a subpage. Could you please make it a subpage for me?", "score": 0.5566624231573172}, {"text": "\"Where is the exception in <url> for \"\"tossing out citations and verifiable in order to make it easier for the reader\"\"? Who are you to disagree with Nikon's label of \"\"Compact Professional\"\"?\"", "score": -1.8205236185034654}, {"text": "I was wondering if you'd like me to make your userboxes (on your userpage) line up horizontally (in columns of 3) rather than vertically. Interested?", "score": 1.1772536056796437}, {"text": "\"Hi, <url> moved <url> to <url>, and then a bot came in to correct a double redirect so that the page can't simply be moved back again. Can you use your administrative powers to make it so?\"", "score": 0.9314973042213633}, {"text": "\"BTW, I see you've left some comments at <url>. Would you be interested in signing on to the project?\"", "score": 0.8422063020137959}, {"text": "\"Actually, if she is releasing a new single from the debut album, the title \"\"Home Run\"\" wouldn't encompass this. Someone else has changed it to \"\"Debut album\"\" which I think is more suiting, what do you think?\"", "score": 0.7130964390567228}, {"text": "\"Read beyond the obvious. What is that essay saying when you remove all referances to users under blocks, bans or other sanctions?\"", "score": -0.734030919244114}, {"text": "\"By \"\"this project\"\" I mean Wikipedia. Do you share the values of civility and consensus?\"", "score": -0.7173169391993511}, {"text": "\"At the Chicago house article, I think the current state of house music in its birthplace, Chicago, should be added. What do you think?\"", "score": 0.6091375230861049}, {"text": "\"Thanks for the articles on South African cricket teams - I was planning to do them myself but I really appreciate that you've done them. By the way, where did you find the information on what regions the teams represent?\"", "score": 1.2796111635055785}, {"text": "\"Yes, sorry I didn't see the discussion. How about moving it to a subpage of <url>, say <url>?\"", "score": 0.7680103108339822}, {"text": "Sounds like a great idea. Do you want to propose it at the project talk page?", "score": 1.1562172217671987}, {"text": "\"I am looking for help improving the dermatology content on wikipedia. Would you be willing to help, or do you have any friends interested in derm that would be interested in helping?\"", "score": 1.6450824115490317}, {"text": "I asked some questions in the Criteria for Inclusion section on 20th Dec but no-one has yet responded. Can you take a look?", "score": 0.9651325637395383}, {"text": "thanks for the welcome! I have a question... how come you can't edit the article about Whales?", "score": 1.485570513998582}, {"text": "\"SudoGhost, just a question. Do you think that simply calling someone's edits disruptive is very specific?\"", "score": -1.0642506502123499}, {"text": "my edit was described as ''(Reverted 1 edit by 64.53.137.232 (talk): Rv. (TW))'' and I didn't give the guy a warning or anything so how would anyone think I was calling him a vandal?", "score": -0.6703263024701767}, {"text": "\"Hi, I'm looking for a clerk that would be willing to act as my trainer. Would you be available to do so?\"", "score": 0.8811800037118926}, {"text": "\"Yesterday you undid an edit of mine (on the <url> article). What did you mean by \"\"a COI link\"\"?\"", "score": -0.5152092144246516}, {"text": "\"If you can remember, would you mind giving a bit more information about this photo e.g. where you took it?\"", "score": 0.7684131604118193}, {"text": "I'm see about the new Version 0.7 - which stated should FINALLY be published soon... When the exact date it published? Is there anything that I can help?", "score": 0.7981178346628297}, {"text": "\"I believe you wanted to find out which party the PP was? Well i'm sure you'll be able to find it at http://elections.uwa.edu.au/partysearch.lasso - my guess is that it could be the Progress Party, who ran in the 1977 WA HoR election?\"", "score": 0.6298093020105162}, {"text": "\"That was awfully quick, considering that it was open for less than 12 hours. Since I must now do them individually, must they be considered \"\"Second Nominations\"\" if I put them up again?\"", "score": -0.9308320525085818}, {"text": "\"I can't find a page by that name that has ever existed. Can you please specify the exact name (capitalization is important), or link to the deletion log or discussion?\"", "score": 0.6965815100690307}, {"text": "Nice work. :) Got anything for <url> or <url>?", "score": 0.9728244616394438}, {"text": "\"Bingo! Since you've seen the light, now will you stop trying to craft PR statements?\"", "score": -1.2443924574685206}, {"text": "I notice yall were trying to get some Canadian articles featured. How many Canadian articles are even at that status?", "score": -0.6151543638321079}, {"text": "\"Dear commander, this source: '''* Berg, Ole F.: <person>, Oslo 1997 ISBN 82-993545-2-8''' is a Norwegian source, as signified by the lang|no tag, hence the first letters of months is not supposed to be capital. I thought that tagging sources with lang|no would prevent this problem, isn't that correct?\"", "score": -0.6163353313998267}, {"text": "\"<sigh> some people have no sense of humor. If I create <url>, do you suppose people will ignore it?\"", "score": -0.456207879718184}, {"text": "\"Somebody labeled the \"\"stenography\"\" images you did for <url> as unverified. Would you midn adding a tag to those, too?\"", "score": 0.9470227620320465}, {"text": "Huh? Who the hell are you?", "score": -2.880110028454214}, {"text": "I tried to create a similar map for Canada but couldn't get a usable output from the site you used for the Australia map. Could you create a Canadian one?", "score": 0.8073108547556298}, {"text": "Thanks for change to Hugh de Grandmesnil - I am (It is) very short of refs for that article. Could you suggest one for your addition?", "score": 1.189654651867429}, {"text": "\"Hi, I'd like to help you with the image you just uploaded to the <url> article. Can you tell me where you found it?\"", "score": 1.2226388338860055}, {"text": "\"Hey, if you want to assert that a cite does not say what it clearly does say, how about actually coming to the discussion page to discuss it, rather than treating edit summaries like a discussion space? Why the hell am I the one being accused of edit warring, when I'm the only one actually willing to talk on the discussion page?\"", "score": -1.960192695654856}, {"text": "\"An excellent news, indeed! I don't have time to have a look at it right now (busy with the fr FA), but do they have something on Gndevank?\"", "score": 0.8402690746263396}, {"text": "\"Please do not remove warnings. Just follow the rules and everyone will get along, ok?\"", "score": -0.9154990986774869}, {"text": "\"I removed a few obvious links on <url>, but wasn't sure about some of the others (sparknotes, paperstarter, etc) and couldn't find anything yea or nay on their use...if you have a few spare minutes (ha!), could you take a look?\"", "score": 1.043420813044214}, {"text": "Why have you restored this article deleted pursuant to a perfectly acceptable AfD? Am I missing something here?", "score": -1.899733300741272}, {"text": "Seems like the troll committed suicide this morning. I don't know why; perhaps not enough fun?", "score": -1.522103740222508}, {"text": "\"\"\"Malbrain, I can't understand you, in either your comments to the talk page...\"\" is the criteria being used by user WHOSASKING to make reverts of my edits. Since when does any one editor have to understand all the material of a subject known by the other editors?\"", "score": -1.3330581165399042}, {"text": "\"What part of \"\"use common sense\"\" did not get through to you? Was it the \"\"use,\"\" the \"\"common,\"\" the \"\"sense,\"\" or some combination thereafter?\"", "score": -2.2512638913106224}, {"text": "\"I clearly provided a source that is valid, and yet a couple of hours is a big deal? Is it your way, or the highway?\"", "score": -1.5973411641865005}, {"text": "I'm not familiar with that article. Can you please specify the exact article title?", "score": 0.9875816040427106}, {"text": "I went to <url> and couldn't find it. Can you provide a link?", "score": 0.5527839807297517}, {"text": "Hi.. I'd actually like to move all images in <url> to the Commons. Is there a way that I can do it myself?", "score": 0.6280431403535706}, {"text": "\"And, no, I don't think that everyone who wrote a screenplay should have two pages, but I do think that every artist who has a substantial list of works which would clutter up their biography page should have two pages. Can you explain to me why the two-page thing is such a big deal when wikipedia is not paper?\"", "score": -1.4582256721331202}, {"text": "\"That's OK &mdash; I just wondered if you had a new translation that used \"\"on\"\". Where are you studying philosophy, by the way?\"", "score": 0.756011708994831}, {"text": "with an ndash rather than a hyphen. Can that be fixed?", "score": 0.529223421227361}, {"text": "Very nice. How could I doubt you?", "score": 0.5516414090783186}, {"text": "\"Would you know what the difference is between 'computing by Operating systems' and 'operating systems\"\"? Not obvious to me, why not just eliminate it and add the computing category to the operating systems category page?\"", "score": -0.7041802457027503}, {"text": "I don't really see that as a problem. If it's a fringe position why not simply state it as such?", "score": -1.0613718997704704}, {"text": "I'm confused. Why did I get a user warning relating to this article?", "score": -0.45479335980089325}, {"text": "\"Hey, I saw you edited the Dark Tower Series page. I was just wondering if you were a fan of the series?\"", "score": 1.0763281176489001}, {"text": "I notice you occasionally use that remark in your edit summaries. I understand simple stuff like r and c. What does awai stand for?", "score": 0.6432040858362182}, {"text": "\"Then, I am curious. Why did you create the redirect in the first place?\"", "score": -0.6047492422041718}, {"text": "So what exactly was the point in moving 'Birmingham New Street Station' to 'Birmingham New Street railway station' I've never heard it called that before nor any other of the Birmingham stations. Is there any need to add 'railway' to the title of a station unless it is being disambiguated from a bus station or something?", "score": -1.4680696264901054}, {"text": "\"What, my dear, will change the fact of your poor anargumentative technique??\"", "score": -2.1524456253098796}, {"text": "\"So, is the template too much? Should I add it to all articles contained within the template?\"", "score": 0.7365594688027352}, {"text": "\"How did you learn Interlingua? Was it an online course like lernu, or what?\"", "score": -0.7752414051133063}, {"text": "\"Still, I cannot understand why virtually all boxes related to the EU were removed. What makes this issue more divisive than for example the re-unification of Yugoslavia?\"", "score": -0.5678880310521424}, {"text": "Then maybe admins should focus on copyright infringements and avoid the leader/educator/cop/parliamentarian roles that they cherish so much. Could 1000 admins handle this?", "score": -1.31219538774468}, {"text": "The sources are ref'd in the article. Can you be more specific?", "score": -0.48574390876101353}, {"text": "I give a glimpse there now and then. What is up?", "score": -0.5144555997544848}, {"text": "\"What is unreliable about the newspaper article reports about the protests in Meliti, Lofi, etc? Are you alleging that the protests against the Greek military did not occur?\"", "score": -1.2895421343330173}, {"text": "Strikehold previously did a <url>. What do you think about adding it to the template?", "score": 0.8817467033436637}, {"text": "\"Don't worry about it. Take it easy, keep the ranting to a minimum and the productive editing to a maximum :) And you're still not convinced you should get an account?\"", "score": 0.66455193067287}, {"text": "\"I moved '''Engine Arm Aqueduct''' to <url>, because there seemed to be much more content about the canal than the actual <url>. Does that seem reasonable, to you?\"", "score": 0.9516354757437157}, {"text": "\"I realise it may seem ridiculous, but until the copyright holder has released it, we cannot use it (except under a claim of fair use, but this wouldn't fall under our rather strict <url>). I will delete the image now- why not draw your own map?\"", "score": -0.8006443343671531}, {"text": "\"Sorry if it seems like I'm bugging you about the above image - I'm trying to determine the copyright holder, which needs to be specified on the image page per <url>a. When I click on the source link, I get an \"\"access forbidden\"\" message. Can you please specify the copyright holder on the page?\"", "score": 1.3269446735001527}, {"text": "I am trying to figure out the exact definition of <url> however do not have access to Andrews. Could you help?", "score": 0.9114349711449441}, {"text": "\"If you're interested, I could provide you with Subversion (which is superior to CVS) for VandalProof. Interested?\"", "score": 0.736244150016873}, {"text": "Thank you ''very much'' for partaking in my effort to have a listing of stable article revisions. How do you suggest we get this project off the ground?", "score": 1.7324001248025667}, {"text": "\"I don't own any from Rousseau either, but I did know it was Grand Cru, just made a mistake! I notice above you and Stefan discussing the Langton's Classification Wines, I'm going to have a go at getting articles made for more of the exceptional level producers, I've just finished one on <url> - are you interested in working on any of them together?\"", "score": 0.9135165095008645}, {"text": "But ''you're'' the one branding his work as anti-Semitic! See what I mean about bad faith?", "score": -1.2937261660973038}, {"text": "\"I am sorry, got confused on who created this article. :-/ Apologies?\"", "score": 0.5511741432484293}, {"text": "You have been accused of sockpuppetry at <url>. Scared?", "score": -1.863347643167743}, {"text": "\"I've nominated a biology-type article, <url>, at <url>. Would you be interested in reviewing it?\"", "score": 1.0359792124860836}, {"text": "\"I believe you beat me to several vandalism reverts the other night, so I thought I'd give you this. We could all use more, right?\"", "score": 0.5562754534543546}, {"text": "\"So then \"\"good luck with your upcoming defense\"\" and \"\"congratulations\"\" (assuming it goes well:)? Are you headed somewhere else where you will be able to continue working with students on WP-Chem stuff?\"", "score": 1.3122844730142713}, {"text": "Thank you for correcting the error on my user page. I have a question how does one go about archiving his talk history?", "score": 1.144155877446103}, {"text": "\"Hi, congrats on your admin promotion, but in preparing the short note about it for ''The Signpost'', I'm puzzled as to \"\"the backlog\"\", which appeared in your RfA text. Is it a copyright backlog for images?\"", "score": 0.9119075653470796}, {"text": "\"Thank you much! May I call upon you if I make mistakes or need help, then, please?\"", "score": 2.2994064828544394}, {"text": "\"Hi Thunderbrand, thanks for letting me know that the image I uploaded is on IFD. Can I ask though, why is the image being deleted?\"", "score": 1.0975412401274394}, {"text": "See <url>. I assume you didn't mean <url> ?", "score": -0.7746476866845684}, {"text": "I figured that. Is there anything else I need to do?", "score": 0.6556264921390539}, {"text": "\"SPI is not for just simple yes no answers. Why did you not warn the Minneapolis IP, they are warring as much as Bb23?\"", "score": -0.6618315228065063}, {"text": "\"Many thanks for your correction on that article. I've been meaning to expand the content, but this might be quicker to address in the short term: I think it may be better to change \"\"consisting of\"\" to \"\"composed of,\"\" based on reading some stuff from your user page - thoughts?\"", "score": 1.9814105386344156}, {"text": "\"Ok. By the way, may I note that is an excellent resource you have there. I noticed the other day that we had acquired a list at <url> of unproduced scripts/stories - it seems entirely genuine and fits with material I have read elsewhere but was uncited - I wonder if you could help find a source for it?\"", "score": 0.6736371220789381}, {"text": "\"Hi ER, which Bio. do you think could use a little of my majic (smile)?\"", "score": 0.8547142780112218}, {"text": "\"Hey Love, as you wish, if you have any suggestions about my section, let me know. Is English a second language for you?\"", "score": -0.8107012115916788}, {"text": "\"Yes, and there has been a user from an AOL IP who has been trolling Pilotguy for weeks. Why must you blindly disagree with everything anyone says to you?\"", "score": -2.0002895240005993}, {"text": "\"Hey, Rcej! ;) Would you be up to reviewing <url>?\"", "score": 0.9719662942684331}, {"text": "But ATT is a re-statement of NOR and V. We don't need a re-statement of them and then call it RS. What would be the point?", "score": -0.9631844105801248}, {"text": "I based my sentence on the citation. Are you saying the citation is wrong?", "score": -1.0001513477208512}, {"text": "Figures as much. What's wrong with it?", "score": -0.4941344984135435}, {"text": "\"Nice work so far on your rewrite of \"\"The One After the Superbowl\"\". Are you planning on including info about the Diet Coke fiasco?\"", "score": 1.264640794144814}, {"text": "\"Is it your view that the United States Senate doesn't have \"\"any credibility\"\"? Or are you of the view that some cyber truck that zooms around the WWW must have bumped into the web page by \"\"accident\"\" and dumped stuff endorsed the United States government?\"", "score": -1.0478366058067272}, {"text": "What are you having a problem with? Are you having trouble with the ssh authentication?", "score": 0.5743484952078969}, {"text": "\"I agree with you in that this chapter has no place on <url>, moreover its inclusion there seems highly suspicious. Is there any way to see who added a particular word/sentence/paragraph without manually going through the history?\"", "score": 0.6600347055931717}, {"text": "Ahhhh... My apologies. You wouldn't be using the google toolbar perchance?", "score": 1.2166013331723717}, {"text": "Hi Emily! Do you have any ideas about a topic yet?", "score": 1.088516867190373}, {"text": "\"Hi, Jpa! Why not archive the old discussions on your talk page?\"", "score": -0.6234817289727763}, {"text": "\"Why are you trying to delete my user page, literally less than a minute after I began it. You are allowed one so why not me?\"", "score": -1.1004054700500865}, {"text": "Thanks for the redactions :). Is it worth getting a third opinion at the reliable sources noticeboard vis a vis the use of law textbooks for points of law?", "score": 1.2028802164602497}, {"text": "\"I was thinking that it would be appropriate to <url> <url> for the front page on April 16th, 2008, the one-year anniversary of the event. What do you think about that?\"", "score": 0.6342925094343519}, {"text": "\"I actually went to the long term abuse page right now, and tried to block them, but it seems that those accounts don't exist. Have they been listed incorrectly?\"", "score": 0.5962879223894257}, {"text": "What are they? Where did that offensive photo come from?", "score": -1.3339324183400953}, {"text": "Why are you uploading/changing badge files here? Surely you should just upload them to Commons then list the wikipedia version for deletion?", "score": -0.5229252096916919}, {"text": "\"I actually know nothing about karate, but you look to be doing a good job with your work, and I'm just here to say sorry really that some of your articles got deleted. Which artiles got deleted?\"", "score": 1.1829590027302914}, {"text": "\"All is good here, hopefully the same with yourself. How many English first-class cricketer you think we've got to get done in total?\"", "score": 1.0815103227958034}, {"text": "\"Thank you for telling me. Can you please revert my deletion, for I do not yet know how to do so?\"", "score": 1.3052368012948912}, {"text": "What's with all the crap behind him? Was this at a signature desk?", "score": -0.7428265219661967}, {"text": "\"I'm trying to keep Minority Report as a GA, and the <url> requested for copyediting - but didn't give examples to cleanup. Can you give me some input on what to rewrite?\"", "score": 0.6851591484534931}, {"text": "What's going on? Why did you revert my edits to those articles without any explanation?", "score": -1.767853643923304}, {"text": "\"Hm. Am I imagining things, or does <url> seem familiar?\"", "score": -0.4470720132584952}, {"text": "\"Would you be willing to do another pre-FAC review of a different article for me? Specifically, the \"\"John J. Tigert\"\" article?\"", "score": 0.5722112072221046}, {"text": "I did not think that was a refinement. How were those categories not appropriate?", "score": -0.9719453928195525}, {"text": "I would like to nominate you so that you could become an administrator. Would you accept or not?", "score": 1.1861100338918393}, {"text": "\"I like yours better, it's less demeaning. This is pretty new to me... should I remove it or leave it?\"", "score": 1.207874709605917}, {"text": "Wassat? LeaveVissToYou?", "score": -0.938584724863795}, {"text": "\"In terms of suitability for an article, as I understand it, Merrill's \"\"claim to notice\"\" is due to the company (work done by SOM, size of SOM, etc), and also because of the important changes he made to that company. Is that roughly correct?\"", "score": 0.8038279792944193}, {"text": "\"Hi Orlady, I wanted to seek your advice....Information on FIBAA is available in the German language on Wikipedia : http://de.wikipedia.org/wiki/Foundation_for_International_Business_Administration_Accreditation. Is there any chance to have it in English?\"", "score": 0.5945589889855155}, {"text": "Thanks for the referrence. Do you have any other response to the Bozo who doubts notability?", "score": -0.9896352351477613}, {"text": "\"One more thing. How is the policy of \"\"you're not allowed to block someone for personally attacking you\"\" unworkable?\"", "score": -0.52336428724854}, {"text": "\"Please explain why you continue to <url> <url> <url> over the folk magic/Christian revivalist wording in the lede despite the clear consensus against you. You have not gained any favorable comments for your position in the RFC (which in fact is about the treasure seeking sentence you want to add and not about rewording or removing the existing sentence), so on what basis do you claim preeminence for your preferred wording, or for removing the sentences completely?\"", "score": -1.3911392899821078}, {"text": "\"Actually, I think I should route these terms (seismic design, and seismic engineering) to <url>. Does that sound like a better idea?\"", "score": 0.9643506625238164}, {"text": "\"Why the removal of the citation dealing with coriolis? Was the proof not within the citation, or was the citation not considered primary?\"", "score": -0.5714820100331054}, {"text": "? What?", "score": -1.1798237286315012}, {"text": "Are you kidding me? Do you even bother to click on <url> to see its fleet?", "score": -2.299921959583164}, {"text": "\"Hi, I noticed you blocked '''BPRD Agent''' with no expiry time with reason: ''Vandalism-only account; see contribs and deleted contribs''. Um, isn't the Deleted Contributions only restricted to administrators?\"", "score": -1.2431644562348563}, {"text": "\"Hi Pete- The user \"\"Darmi\"\" on <url> says that footnotes are used on Wikisource, and he has two pages to show for it. What do you think?\"", "score": 0.7404350235449534}, {"text": "\"Regarding <url>, the alumni should be alphabetized and Tom DeLonge did go there. Why would you undo my edit?\"", "score": -1.6426921589629948}, {"text": "The Nyan Cat is a flying cat with a toast instead of its body. Isn't quite similar?", "score": -0.5429177038951976}, {"text": "...just vandalized my page again. What level warning is appropriate?", "score": -0.771934648876089}, {"text": "Would you email me with any points/incidents you especially want me to be aware of? Do you think Jimbo will be willing to pass this to the arbitration committee?", "score": 0.6148035849851108}, {"text": "\"Saw your edits on <url> - all good - but I have a question. What does \"\"collapse <url>\"\" mean in your edit summary?\"", "score": 0.6728106998786951}, {"text": "\"Hi 72Dino, I was going over the talk page for vector when I re-read that you had found a tax document for '09 regarding discussion on Vector's contributions to the charity fund. Could you please provide a link for my personal review of that Document?\"", "score": 1.203358406399079}, {"text": "\"In case you didn't realize, one of the first things they states on that article was that if the information on a main or major character becomes long, then it warrants its own article. So, if that the case, then why did you merge <url>, since his article is long and he is the main character on <url>?\"", "score": -0.9319009464302409}, {"text": "\"You have reverted four times, Sarek three times, and an IP once. Do I take it that you are declining to undo your edit?\"", "score": -1.1195716113983525}, {"text": "<url> apperantly we need a source...do you have one...??", "score": -0.6164493866190356}, {"text": "\"Huh, looks fine to me. Maybe this computer just lies to me to get me to shut up and stop complaining?\"", "score": -0.816560799431534}, {"text": "\"No, just admire them from a distance of a couple of centuries! You?\"", "score": -0.6258680840078413}, {"text": "Do you really think it is necessary to do that? Could you not simply take my advice on board?", "score": -0.9096536363245195}, {"text": "\"Actually, I thought that I was moving to the end of the list. I must be missing what is the issue with putting a (revised) picture at the end of the list?\"", "score": -0.46636563083450994}, {"text": "\"Actually, that's again a misromanization. Would you ''please'' stop this insanity?\"", "score": -0.8784757869690806}, {"text": "\"Since your tube station articles don't actually contain any information, what is the point of them? Why not put them together and work them into an article that says something interesting?\"", "score": -2.1127651526154954}, {"text": "\"If the Russia tricolor is not the flag of the Lokot Autonomy, than what is? Is the flag even available on Wikipedia?\"", "score": -0.844517152553092}, {"text": "\"I have looked back in my archives a year, and cannot find the issue concerned - although I certainly remember the name, your account, and the style. Can you give me the relevant link/diff, so I can refresh my memory and be best informed what action might be appropriate?\"", "score": 0.7859414523494715}, {"text": "\"What do you mean \"\"log in repeadedly? I just keep clickin' what button?\"", "score": -0.7789280336676618}, {"text": "\"Thanks for telling me. Do you just perform the rename, and I then re-create the account to take it for myself?\"", "score": 0.9594233986610898}, {"text": "Hardly. Does the <url> exercise executive control over Northern Ireland?", "score": -0.8111713965567369}, {"text": "\"Again, why are you simply reverting wholesale and without any explanation? Would you rather the article be out of line with the policies and guidelines, among other problems?\"", "score": -1.3631108553062652}, {"text": "\"After having collected some users willing to come, I created <url> subpage. Would you be so kind and add your ideas?\"", "score": 1.231327901638498}, {"text": "Thanks for your response! How should we proceed ?", "score": 0.9844231322817303}, {"text": "\"I assume you were directing these comments to Malcolm, who was reverted by multiple people (and who blatantly violated the 3RR, as pointed out on his talk page). Correct?\"", "score": -0.6098559583014769}, {"text": "\"Ah, thanks! Btw, IS THERE any kind of guideline for FAs in regard to lede length somewhere?\"", "score": 1.4536798399258215}, {"text": "<url>. Or is it just me?", "score": -0.6845294170790839}, {"text": "\"I hope I don't upset anyone by jumping in univited, but why are self-created images any different than those found elsewhere so long as they comply with all of the non-free content criteria? Is this caveat hiding in a talk page archive somewhere?\"", "score": 0.6736919474037111}, {"text": "\"There's nothing in your history besides editing my page, so I'm not sure to what you are referring. What page are you talking about?\"", "score": -0.9470973691953024}, {"text": "Hi again! Have you had a chance to get it lately?", "score": 0.7898880223473309}, {"text": "I asked it before but I'm asking it again: Was it a Basque state? Can we say that?", "score": -0.8210445562362445}, {"text": "\"Thanks for helping to source alternative names...helps quash everyone including his/her favorite. I don't have that ref handy--does \"\"unofficial\"\" mean \"\"lots of people call it that but WMATA doesn't\"\", or \"\"it's often called that as a nickname even in WMATA or other formal documents\"\"?\"", "score": 1.283156811105167}, {"text": "I've made a start to putting the list in chart form at <url>. Can you think of any other columns to add?", "score": 0.9716449720592533}, {"text": "Really? When/where did that happen?", "score": -0.6848260853181307}, {"text": "\"Hi MariAna, there are just a couple of outstanding issues with the article. Could you have a look at them so I can get the GA closed in the next few days?\"", "score": 0.8157498654361641}, {"text": "Sure AGK. Does the draft need to be updated to include the tweaks?", "score": 0.6343391601784375}, {"text": "\"\"\"Odder\"\" in what sense? Also, why are you still delaying?\"", "score": -0.6074499882675287}, {"text": "\"Also, 'topological proof'? Isn't 'proof' a bit strong?\"", "score": -0.7357488668419444}, {"text": "\"Why was <url> deleted when it was simply a crop of <url> which is public domain? I'm pretty sure I wrote that on the description page, but if not, can it be restored?\"", "score": -0.4441265699571888}, {"text": "\"@Smjg, thanks. But why did you also remove the categories I added?\"", "score": -0.48459526763760563}, {"text": "I think a bot could come along later and remove those double returns. Is there a way of coding a space into the template itself?", "score": 0.6813792081846484}, {"text": "i made a mistake on adding a colon on <url> and it can't be reverted. Could you help?", "score": 0.9641076102415157}, {"text": "Interesting. May I venture to enquire why?", "score": 0.6459657509169552}, {"text": "\"Thanks for the advice but that's why I remove the \"\"Thank you for your contributions\"\" on the welcoming template. Is that still okay to do?\"", "score": 0.9810671731339653}, {"text": "\"Note them where? On their talk page, or by adding them to the list you created at ArbCom so a clerk can notify them?\"", "score": -0.6447040323556463}, {"text": "\"I'm an admin, I'd be happy to do it. You want me to do it now?\"", "score": 2.351702298532658}, {"text": "\"Since working on the article you wrote, I have become fascinated with this band. Is any of their music availabe online?\"", "score": 0.935181439712035}, {"text": "\"It's not my job to add the \"\"Bronze Age\"\" material in the article on <url>, so please refrain from removing existing material just because you haven't made any effort to add the \"\"Bronze Age\"\" content. OK ?\"", "score": -2.046866088439049}, {"text": "I just noticed you've added me as a sock puppet for Blue Sea? Can I ask what led you to this conclusion?", "score": -0.6167927093594345}, {"text": "\"What, you are saying that nothing published at mises.org is a reliable source? What is your basis for asserting that?\"", "score": -1.7358872110067487}, {"text": "Why revert? Shouldn't it be included somewhere in the discography?", "score": -0.4641123326480824}, {"text": "\"Thank You. What is \"\"The Prince of Santorini\"\" anyway?\"", "score": 0.5727559566273441}, {"text": "\"What is your source for the PD Seantors? There is no news on RTE, ireland.com or the PD website?\"", "score": -0.5162122322289475}, {"text": "\"hahah, thanks! Must admit, when I saw the title of this section I was thinking \"\"oh lord, another what-is-the-wiki-coming-to comment..\"\" :p. How goes?\"", "score": 0.7552095714385545}, {"text": "I am currently writing up an RfA for Chacor. Can you check with the other Arbcom members and make absolutely sure that he's authorized to run?", "score": 0.5488926269599942}, {"text": "In this <url> there was a loss of some info. Was that intentional?", "score": -0.7185888647753018}, {"text": "\"Wow, you waited a whole ten minutes. What's the problem?\"", "score": -2.2512196473073915}, {"text": "\"\"\"Provocations\"\"? How is it provoking to suggest to an American-expat that calling people in Taiwan \"\"Taiwanese\"\" is not controversial?\"", "score": -1.0486736038863445}, {"text": "\"I'm trying to install Lupin/Anti-vandal tool, but im stuck. Could you help please?\"", "score": 1.4073451795789498}, {"text": "Looks good! Did you check out <url>?", "score": 0.5763080566162615}, {"text": "\"That said, why remove any mention of the way the incomplete? This seems especially relevant because the American production ended in the same way, with no word from the editor stating that the perfect edition ending would be released?\"", "score": -0.5405149213967972}, {"text": "\"Cool, I'd be happy to help out. :-) What kind of things are you thinking about removing or adding?\"", "score": 1.0064212904142646}, {"text": "\"Hi David, you're good with images. Anything you can do to clean up <url> so it's useful on ITN?\"", "score": 1.8166453202788055}, {"text": "Welcome to WikiProject Austria. I'm wondering if you're willing to coordinate the project for a while?", "score": 1.008216344733216}, {"text": "Hi Hektor. Could you please provide a rationale tag for this image?", "score": 0.8521553991168458}, {"text": "\"You make it very hard to believe that you're acting in good faith when you rewrite the <url> article to remove all references to \"\"liberal,\"\" but keep the phrase \"\"conservative Independence Institute.\"\" I see POV-pushing when that happens. If you're so concerned about adjectives, why not edit the articles about AEI or Heritage or Americans for Tax Reform?\"", "score": -2.0560262328116465}, {"text": "I think it's time we considered a perma-block on the IP in question. Comments?", "score": 0.8928471219491577}, {"text": "Hi. With only a little over one day remaiming and <url> not editing and not contributing to the case as yet are there to be any known deviations from the standard procedure?", "score": -0.4802879525966482}, {"text": "\"Erica, I understand you are a figure skating enthusiast. Are you still active in those types of articles?\"", "score": 0.6859767519616424}, {"text": "I have no axe to grind on this issue; I just want to understand: Why you consider hammerpond.org.uk a spam or conflict of interest link? It doesn't seem to be advertising or promoting anything?", "score": -0.7281583420264626}, {"text": "\"I wondered if you could recommend the best book on <url>. FOr Christmas, what would be the most detailed book (with lots of beautiful photographs) of Ladakh?\"", "score": 0.9699557817555207}, {"text": "\"Hey there, just curious if the changes made to the GA Nomination for Dan Brouthers were sufficient for pass. If not ready for pass, anything else I can do to expedite the process?\"", "score": 1.225359538208029}, {"text": "\"Testing, testing, 1,2, 3, are you still alive? Where are you?\"", "score": -0.4601168125011907}, {"text": "What?! Why did you resolve this without dealing with the User I mentioned?", "score": -1.9510756301244438}, {"text": "\"Why can't you just remove the offending phase and repost your comment? This resolves the situation, does it not?\"", "score": -1.9415740210515129}, {"text": "\"<url> - You made a bold edit, I reverted and now we discuss. Is there a reason why the Wikipedia editor generated filmography should be deleted and replaced by a link to an outside site?\"", "score": -0.6211051663279443}, {"text": "I'd be happy to help but I don't speak Ukrainian. What do you need help with?", "score": 1.310776725403149}, {"text": "Thanks for your assistance in the review of the Brian Keith review at the DYK! The user has cited the Internet Movie Database before?", "score": 1.2265222629431882}, {"text": "\"Hi reinyday - I've been noticing your sig on WP:CFD... this is probably something you know all about but... you do know about being able to use four tildes (<nowiki>~~~~</nowiki>) to sign, don't you? Or have you done some fancy trick with your signature to stop it linking back to your user page?\"", "score": -0.8581684340001481}, {"text": "Thank you for the rollback. Does it appear in <url> or only in the history?", "score": 1.2273437578879307}, {"text": "I see this page is indefinitely semi-protected. It's been over eighteen months: mind if I give unprotecting it a try?", "score": 0.65646262970997}, {"text": "\"The text of your warning <url> made me smile. Did you really think that if China got a new Prime Minister as of today, and a female one at that, it wouldn't be all over the news, and the link to this person would be red?\"", "score": -1.4087861906641161}, {"text": "\"Hello - some of your writing is a bit hard to understand. Is English your first language, out of curiousity?\"", "score": -1.0558269524963688}, {"text": "\"...and, really, BeeGees? And not Peter Frampton?\"", "score": -0.9127688010921968}, {"text": "\"Do you still take photos in Jefferson Parish? If so, would it be alright if I posted some photo requests of some parish high schools?\"", "score": 1.0424821053604734}, {"text": "\"Am I indeed \"\"dishonest\"\" by responding to you? Is <url> correct?\"", "score": -1.1599790879547105}, {"text": "\"From this end it looks like it might be working again. Could you try sending something to me via WP, please?\"", "score": 0.889856398466331}, {"text": "\"Sorry for the late reply, but I usually check new additions from the bottom of the page, not the top. What is your disappointment regarding?\"", "score": 0.9323931154530885}, {"text": "\"I've restored the histories of the userboxes; however, I would think it better to move them rather that cnp-move. Do you mind if I migrate these to your userspace for you?\"", "score": 1.3968136653482632}, {"text": "\"Hey Jondel. I was wondering, since Interlingua is basically resurrected Latin, k and w are basically nonexistant in Interlingua, right?\"", "score": 0.7008044626483436}, {"text": "\"You mean \"\"the higher solubility in the ocean\"\"? Solubility of what?\"", "score": -0.6819526274588416}, {"text": "\"#I am still convinced the long page on \"\"prostatitis\"\" (a misnomer in the case of CPPS) is confusing, with too many sections and subsections, and would benefit from restructuring and the creation of separate pages for each area, or at the very least the removal of the CPPS section to its own, in depth page. You were the only editor to object to this going ahead, with this comment: \"\"So, suggest keep this on hold for now, but in principle one might similarly split off other classification categories with this then just umbrella simple introduction.\"\" Do you still hold this view?\"", "score": -0.6152781081250557}, {"text": "\"Thanks; what should I do next time to make it easier, quicker? Just let you know?\"", "score": 1.2884904225157314}, {"text": "\"I do not know what you mean by \"\"important links\"\" as you mentioned on my talk page. Will you please clarify?\"", "score": 0.6774024701667725}, {"text": "i searched through the history and the question at http://en.wikipedia.org/wiki/Talk:Telephone_numbering_plan#coincidence.3F relates to infomation <url>. could you possiblly answer it?", "score": 0.54030993683771}, {"text": "\"I'd really like to keep the image, just because it's of a Medal of Honor winner. Is there a way to shrink the table, or another place in the article (Like the Footnotes section) where the image would work?\"", "score": 0.5987182435226951}, {"text": "Please see our ongoing discussion <url>. Is this how you properly send a message?", "score": -0.727947221324025}, {"text": "\"Are you both \"\"Dribblingscribe\"\" and \"\"Indiestu\"\" ? Do both those accounts belong to the same person?\"", "score": -0.6385806875968477}, {"text": "This is now ready for transfer. I will leave it up to you if that is OK?", "score": 1.0950582895778116}, {"text": "\"\"\"''Images with iconic status or historical importance: As subjects of commentary.''\"\". Listen, it's just trivial to look up, could you take the care of actually reading these policies before arguing over them?\"", "score": -1.7812261421013567}, {"text": "If you have time could you look at <url><url> on the <url> article. I'm wondering what your thoughts are in terms whether it should go or stay?", "score": 0.9120148925115317}, {"text": "I wish I could find a better one but I don't think I know where one is I think the composition needs more information. Can you do that?", "score": 0.6055743246576094}, {"text": "That might be very helpful. How can I get it?", "score": 0.6138389882148052}, {"text": "\"The PressTV references in Wikipedia's \"\"Turkey-PKK Conflict\"\" article are not the same, but the titles are wrong. If you have time, can you correct the titles?\"", "score": 0.6979019119957506}, {"text": "\"Currently, <url> is a subcategory of <url>. What would you suggest doing about this?\"", "score": 1.0186494019565744}, {"text": "\"Anyway, the point is that the bot is editing against <url>. Can you let me know whether you intend to continue or whether we can agree that you'll stop using it to do this?\"", "score": -1.3405820193608093}, {"text": "\"Thanks. Before I use, is there are way to deny the bot only within a portion of an article rather than the entire article?\"", "score": 1.0271675926284838}, {"text": "Maybe it would be better if we never talked directly to each other ever again. Agreed?", "score": -1.6657661378980655}, {"text": "Wait. Unless ''you'' need it repaired and updated quickly ?", "score": -1.2984179084444951}, {"text": "\"Howdy Mattisse. Haven't I seen you on hockey articles, years ago?\"", "score": 0.8097780854755585}, {"text": "\"But do you think you can take your concerns to the talk page? Crazy notion, eh?\"", "score": -2.1530613851364038}, {"text": "\"SE, I've briefly blocked your bot because I see it's changing embedded links to footnotes again, against <url>. Did you discuss with anyone that you were doing this, or get permission to use it?\"", "score": -1.1858248152358144}, {"text": "\"It's placed outside of the article body, not \"\"randomly put in the code\"\". Why such shuffling is a \"\"proper CHECKWIKKI task\"\" and why does it warrant saving an edit?\"", "score": -0.5325977449070574}, {"text": "These two factors make me think that Indian ice-cream is a better name for the article than sxusem. What are your thoughts on this?", "score": 0.8464994059303109}, {"text": "This is starting to almost feel like harassment. Perhaps we should take this to a mediation committee?", "score": -0.9402021386705028}, {"text": "Rather than tell me how wrong I was to close certain afd's maybe your time would be better spent dealing with the current afd backlog <url>. If my decisions were so wrong why haven't you re-opened them?", "score": -2.1179843267007183}, {"text": "\"Yep, no worries. What's the article?\"", "score": 0.606976436263378}, {"text": "I have listened to the beginning for your recording of Thomas Jefferson and nothing is mentioned that Thomas Jefferson owned slaves or Thomas Jefferson and Sally Hemings. Why?", "score": -0.8085962788590614}, {"text": "Sorry for not replying sooner. What would you like done with your page?", "score": 1.4005183698407293}, {"text": "\"Thank you for that, I was having a problem with diffs and the explanation didn't make sense to me. This is the first I have had to do, what is the procedure?\"", "score": 0.5755183818488673}, {"text": "\"I have noticed that your objection to someone who unilaterally stops a straw poll. What, then, do you think of somebody who unilaterally ''starts'' a straw poll?\"", "score": -0.7894076159677688}, {"text": "Thanks for <url>. Do you think you could do the same for <url> and <url>?", "score": 1.5141993510357605}, {"text": "How many other states follow the same pattern? And do we really need it to?", "score": -0.5122364466833484}, {"text": "\"So I have no authority and am not the chief ed, but you have the authority to give me chances. Are you the chief ed?\"", "score": -0.7021375198091594}, {"text": "\"CW, you are aware that Lebanon isn't Hezbollah, right? In the same way that Ireland isn't the IRA?\"", "score": -1.3283874999790306}, {"text": "This is clearly them being just as congenial and accurate as always.<url> What is the appropriate place to report them?", "score": -0.7091197235252215}, {"text": "\"LOL, I can't believe you reverted Kizzle's deletion of my summary at S & aQ. Did you actually like my summary?\"", "score": -0.6153778395917916}, {"text": "\"Why can't we do like IMDB and have multiple ratings? Is it that simple, or am I crazy?\"", "score": -1.0768488155187828}, {"text": "\"The Birr page looks good now... but, it seems a little strange to make a distinction between Abyssinia and Ethiopia, as if they were two different entities. The native name was ''ityoppya'' (Ethiopia) long before 1931; in that year, the Emperor formally requested the international community to begin using Ethiopia instead of Abyssinia, much as the Shah, around the same time, requested the use of \"\"Iran\"\" instead of \"\"Persia\"\" for his country (and it was also around this time that Siam became Thailand, I believe...) Anyway, it might make more sense for one box reading \"\"Abyssinia / Ethiopia\"\" in place of the two separate boxes...?\"", "score": 0.6529370655410618}, {"text": "Hello. Can you please pass the pilot whale article?", "score": 1.2734609905638035}, {"text": "Someone has moved this article to <url>. Can you please move it back?", "score": 0.8502348899099417}, {"text": "Why did you remove you comment on the talk page? Did you change your opinion?", "score": -0.5024323307759364}, {"text": "\"So, let me make sure I understand this. You think that, if we remove an image as it does not meet the NFCC, you would then be able to upload the same image, only this time, it would meet the NFCC?\"", "score": -0.8668662332367626}, {"text": "\"It is fair use. And, I have no free alternative in the context of \"\"Sugoroku Mutou.\"\" Now, why don't you find me a replacement picture in the context of \"\"Japanese names,\"\" and I will accept the removal of the picture?\"", "score": -0.7551606084786838}, {"text": "I just logged in for the first time this weekend. Do you want me to run through or have you already started?", "score": 0.9019716471891981}, {"text": "How do you know the author/uploader of <url> intended it to be licenced under the copyright licence that you attached to the file with <url> Are you clairvoyant or did the uploader tell you this was his wish?", "score": -2.0690846429028027}, {"text": "\"I'm doing the same. Hey, can you please tell me if <url> has been fixed enough by being stub-ified?\"", "score": 0.8670252855650808}, {"text": "\"Thanks for the help on the sides of leather question. I'm interested in citing the dictionary in the article as a reference for the meaning of \"\"sides\"\", but I'd like to have a little more data first u2014 what year's edition of the OED did you cite?\"", "score": 1.62479287954983}, {"text": "Hi im currently compiling a college project on unreliable information and would like to view the deleted article Dudley O'Neill. Can you help?", "score": 0.7603088526414048}, {"text": "\"You have a lot of cheek marking <url> with <person> without actually taking the trouble to say what needs fixing. If you're really someone who is able to contribute with a \"\"professional\"\" level of English, why not display this supposed proficiency rather than engaging in the illiterate practice of adding templates with tools?\"", "score": -1.943934201763208}, {"text": "\"It's not a ban, it's an indefinite block. He'll know about it when he tries to edit and he'll know that it was me that blocked him, so what's the pint of telling him again on his talk page?\"", "score": -1.1928282771247187}, {"text": "\"Jeff3000, thanks a lot for your help with the article. But I'm just curious - why was the link to the article for \"\"Advent of Divine Justice\"\" deleted?\"", "score": 1.3181059518062042}, {"text": "\"There you go, it's GA now. Have you done anything else that's gotten to GA?\"", "score": -0.5748291304175538}, {"text": "\"No problem, but since I don't know anything about cold fusion, I've had to go by the description. (I've left out the thermistor and heater to keep the diagram simple, I can put them in if you think them important) Is the diagram ok I do you think it needs any changes?\"", "score": 0.9619580669226281}, {"text": "I'm interested in why you reduced the columns from 5 to 2? Could you reply on my talk page please?", "score": 1.4032428651043}, {"text": "\"Which link are you referencing? Anyway, TTN, that another phony or is that you on WikiQuote?\"", "score": -0.5977439854762308}, {"text": "\", and <url> hot. What more could a young man need?\"", "score": -0.5318436077556591}, {"text": "\"Thank you, I was wondering what type of unit this was. Civil Defence, some kind of police squad, or perhaps even Freikorps?\"", "score": 0.6938690410149402}, {"text": "<url> was a bit bad form. Think twice before you do it again?", "score": -1.5909450516210983}, {"text": "\"Hi Zaps, I've noticed that you edited the abovementioned article. Could you please provide your sources?\"", "score": 1.0616678170490994}, {"text": "\"Thanks, I tried to do that but don't know how to do it? Can you help?\"", "score": 0.8406992210495762}, {"text": "\"Hi. The font-size for the links in this template may be on the small side, but it seems to be floating okay -- or do I need to check more places where it's used?\"", "score": 0.7327362615467391}, {"text": "I don't think I'll have the time to keep the flies off it all week but I'll have a go (I might get around to incorporating Caitlin Moran's detailed reviews too). Could you not semi it as well?", "score": -0.6385294170196743}, {"text": "Thanks for approving my request. Should I remove my request?", "score": 1.3562495429658643}, {"text": "Interesting. Would you mind asking at which Sae Gae seminar on what date it was taken?", "score": 0.9715672954910533}, {"text": "Many thanks for the review. So what was the truce term ''you'' used when little then?", "score": -0.5770760214559444}, {"text": "\"Why can't it just be fixed? There are other tables on wikipedia that aren't working properly, perhaps nobody noticed?\"", "score": -0.46822851476430066}, {"text": "You are welcome. Do i correctly understand that you plan to use the <url> account as your primary account from now on?", "score": 1.0695302588523214}, {"text": "\"I didn't remove the image; I just moved it down to the bottom of the box, to avoid having a long stack of images at the top. Is that a problem?\"", "score": -0.8756768804756117}, {"text": "Thanks for adding that ref to <url>. I don't suppose your copy of Conway's says anything about the roles and fates of ''Schleswig-Holstein'' or ''Schleisen''?", "score": 0.9962548883362009}, {"text": "\"- millions of people are using it to get help our service is free of charge. Since it's a tech support site you can't claim that it's not related to the article either (if you disagree with that just google for \"\"consumer electronics tech support\"\" see who comes first).We are here to server the community - why do you find it so offensive?\"", "score": -0.7893182767773442}, {"text": "I'm not even sure who this person is! Which page is this about?", "score": -1.4998373794642603}, {"text": "\"Hey there; I saw that you took the time to edit the Valletta article, but didn't remove user Jasalf's edit. How so?\"", "score": -0.8563715354483253}, {"text": "\"<url> is, in reality, a good guy, and quite reasonable. I'm confident that he'd be happy to help work with you on any image problems -- remember how you and I started out editing together?\"", "score": 0.9492861995993005}, {"text": "\"Without making a comment on the underlying coverage of the site being down, your edit comment <url> indicates a <url> violation. Under what logic do you propose that you have authority to set policy over the article?\"", "score": -2.003533726400477}, {"text": "\"So you are still sidestepping the question. Please answer it directly: please answer my question, clearly: are you saying is that you cannot use the wiki to edit cooperatively with other people on the same text, or are you saying that you can use the wiki as it was intended and edit a single text cooperatively?\"", "score": -0.4637258193812187}, {"text": "\"thanks for working on that. but... what is the difference between what you added to the intro, and what was already written below?\"", "score": 0.6985011297028612}, {"text": "\"I see you ended the AfD on <url> and removed the tag. Will you remove it on the other two articles that were proposed along with it, <url> and <url>?\"", "score": 0.8117618347682868}, {"text": "\"<person> - sorry about that, those situations are confusing. Maybe Twinkle uses, or behaves like, rollback?\"", "score": 0.6041385671234984}, {"text": "I'm currently considering nominating you for adminship at <url>. Are you interested?", "score": 0.5831932385607821}, {"text": "Its been over a week now and I see you still haven't passed or failed the article. Made a decision?", "score": -0.8804571331521297}, {"text": "\"Ha, you're too kind to me, my old friend. MK one day, promise?\"", "score": 0.90820020831845}, {"text": "\"Unfortunately for those who like railroads, it's been closed again! DOn't you people have better things to do?\"", "score": -1.761556047364174}, {"text": "Are you unable to pee with a half filled bladder???", "score": -2.2794372599950226}, {"text": "\"Very true. Out of interest, what do you make of the new kits(s)?\"", "score": 0.6581919026435039}, {"text": "I think we're not going to convince each other today. Thank you for the enjoyable debate :) Should we just agree to disagree for now?", "score": 1.06444513111876}, {"text": "A remarkably modern-sounding name for something being applied to articles on people who've been dead for centuries. You really think it's appropriate for <url> ?", "score": -1.0346924349910818}, {"text": "\"I pretty much agree with <url>, except I would like to know why you modified the see also section to be a hatnote. Doesn't <url> have specific requirements for this?\"", "score": -0.48654608617063}, {"text": "\"Unfortunately, I have no clue what you mean. Can you point me to a page that has such a feature?\"", "score": 0.627529188757956}, {"text": "\"Please discuss general principles on <url>. Do you think there should be a controversies page, or do you think the controversies should be integrated into the mainline texts of the various Giuliani articles?\"", "score": 0.5810688627868407}, {"text": "Good find. Do you have a cite for the second para?", "score": 0.6799067539905854}, {"text": "Excuse me? What are you accusing me of doing?", "score": -1.8298730504584815}, {"text": "\"The <url> has a request (header \"\"<url>\"\") left a few hours ago by someone who has pictures of signs in China but can't read them. Would you please be willing to help?\"", "score": 0.550145189185155}, {"text": "\"The reason given on the <nowiki><person></nowiki> tag (''\"\"Non-notable book promo\"\"'') seems to have nothing at all to do with the article. Is this on the correct article?\"", "score": -0.5336830664199333}, {"text": "\"I think that the standards should not apply to animated pictures like ''Tintin'' where start of production is a fuzzy limit. After all, do we have proof that the production of ''<url>'' or ''<url>'' is more advanced than the one of ''Tintin'' ?\"", "score": 0.5187826861010615}, {"text": "\"It seems you have been investigating this administrator and have brought up several incidents in your evidence based on that. If possible, could you please link to the feedback that Dreadstar has received as an administrator during/after each of those incidents (and if there was no feedback to any of those incidents, could you please let me know in reply to this)?\"", "score": 1.2282250875513707}, {"text": "\"\"\"Cactus League\"\", eh? Let's see... what does Liebman have in common with the type of pain you might get from a cactus needle?\"", "score": -0.9237957028258027}, {"text": "I will take out Haskett in deference to you. OK?", "score": -0.4680851801408936}, {"text": "thank you. any clue what happened?", "score": 0.8214926444297515}, {"text": "\"I put both in the first sentence, to show a range. Is that OK?\"", "score": 0.5247334880769028}, {"text": "\"Testing, testing, 1,2, 3, are you still alive? Where are you?\"", "score": -0.6334193470764999}, {"text": "\"just look out or you'll regret!OK, stop here, go to bed. Have U done \"\"Tourism Report\"\" yet?\"", "score": -1.145239736704372}, {"text": "\"He's back with yet another IP, leaving abusive and harassing messages on my personal talk page as well as here. Is there anything that can be done to end his disruption for good?\"", "score": -0.5497298185495435}, {"text": "\"Thanks Bro, it is better to ask is the stead of jumping out and getting a disgruntled Admin on my back. And why would you continualy :: bludgeon yourself over the head with your keyboard when you have my jokes?\"", "score": -0.521439629650154}, {"text": "\"Hi, I did post at <url> about the new Civil Ensign for Jersey but I've not had a reply yet. Any thoughts?\"", "score": 0.9880789879410911}, {"text": "\"Most of the editors who would care about the deletion of KiK's threads are the ones who would agree with him, so it's pointless to warn them. Hell ... come to think of it, hasn't KiK been all over their talk pages, with them high-fiving each other?\"", "score": -1.010973718947962}, {"text": "\"Just a question about the maintenance categories that FemtoBot automatically creates each month: I've noticed that the bot is frequently forced to recreate old, long-deleted monthly categories in the <person> queue, because an article got reverted to an old version for one reason or another and resulted in the old category being temporarily repopulated again. I'm just putting this forward as an idea for discussion, and am certainly not wedded to it u2014 but just to keep things simpler for everyone, I wonder what you would think about the feasibility and/or desirability of recoding the bot so that when it encounters a repopulated old maintenance category, it would retag the article to the ''current'' month instead of recreating the old one?\"", "score": 0.907550767280876}, {"text": "\"OK, so a website might not be reliable even if its sources are (despite the convenience in showing both on wikipedia for guidance, as mere reading material). But what you failed to explain is... WHY IS THIS PARTICULAR AND UNMENTIONED BY THE RULES WEBSITE UNRELIABLE?\"", "score": -1.1087762661537517}, {"text": "\"Hey Alex... I just emailed a United Kingdom professor, intending to email a University of Kentucky professor. If he expresses interest, are you the person he should talk to?\"", "score": 0.9171016114068002}, {"text": "Also I am willing to resume the discussion between you me and Direktor about the Legacy section. Would you be able to let me know on my talk page whether you are willing to do that in the near future?", "score": 0.8945377386717424}, {"text": "\"I figured I would ask you directly, since you objected to my original draft, and it seems to be difficult to workshop specific problems in the general discussion taking place at <url>. How is <url>?\"", "score": -0.553833775299489}, {"text": "\"Okay, so yeah, it's possible that that's his intent, but I even if it were, I don't see why the file has less value. Are interviews with BLP subjects not considered valid EL's general?\"", "score": -1.0057889867521335}, {"text": "\"Thank you for contacting me, the history has been restored given the link you provided. Would you mind assisting with sourcing of the article so that it complies with <url> policy?\"", "score": 1.4941187478634335}, {"text": "I see that you are becoming a little bit paranoid. Do i need to do a check user for you ?", "score": -1.3702331807521413}, {"text": "I noticed you're setting up s-line for DART but having trouble with the termini. Can I be of help?", "score": 1.8819654888649118}, {"text": "\"I understand, it could be quite time consuming. Maybe we could do a collab for an article related to Thalia (once I'm finished with some work)?\"", "score": 0.9380333277826347}, {"text": "\"I'm sorry, but I don't see what you're talking about. Could you be more specific?\"", "score": 0.8032941325070382}, {"text": "That is why I suggest the following: either we refer to him as u201cSouth-Netherlandishu201d (like he is described on the Dutch Wikipedia page) or we omit the nationality completely and we just mention the cities he lived in. Does this seem acceptable?", "score": 0.8539932008506206}, {"text": "\"So why didn't you just look through the source, to see if there was a number there? Or why didn't you simply ask the editor on their talk page?\"", "score": -1.3680481707408094}, {"text": "\"Look dude, <url> was ALSO just deleted. What makes 2005 so special?\"", "score": -2.0882062688025105}, {"text": "I have absolutely no idea what you are talking about. Please be more precise?", "score": -0.9161074648861}, {"text": "Hi. Is there any chance of getting <url> unblocked from making edits on Wikipedia?", "score": 0.8118025140837462}, {"text": "Did you see JzG <url> James Cantor that his alleged COI prohibits him from editing any BLPs related to his academic field? Didn't the AN discussion after the last round actually repudiate JzG's stance on this?", "score": -0.5528653178937674}, {"text": "\"Die Person, die Probleme macht, ist jetzt in Schwierigkeiten. Vielleicht seit langem?\"", "score": -0.7468532112721402}, {"text": "Same with the vandal over at <url>. I don't think enough people watch that page to get a consensus on protecting that page--what can be done in the meantime?", "score": 0.6443428746722994}, {"text": "\"Hey, look at this, I don't even know your name! Quel est votre nom, Mademoiselle?\"", "score": -0.47988513667078914}, {"text": "\"I could take a peek, at least. What problems are you having?\"", "score": 0.8536140388674411}, {"text": "Are you trying to help him milk the publicity over the WR incident? if it ends up on google with lots of nns why would that matter?", "score": -1.0337564147644023}, {"text": "\"I think it is a great idea to merge my Geraldine Jones article to the flip Wilson show article, but I don't know how to do that. Can you help me?\"", "score": 0.7043193879916967}, {"text": "\"Firstly who are you? Secondly there seems to be a lot more people involved than just Node ue thirdly I don't care what his view is as long as his edits are neutral, which edits do you dispute?\"", "score": -2.0588205945914964}, {"text": "\"I've finished with that category, yes. Would you like a hand clearing it?\"", "score": 1.034830415637682}, {"text": "You are very welcome. Isn't it great we can all create a better world?", "score": 1.3006310390785716}, {"text": "\"Hello, again. Would you care to intervene at <url>?\"", "score": 0.8348431028300338}, {"text": "He's now been asked if he is ''the'' Alex Salmond and if not he should make that known. Imagine if he was?", "score": -0.49363507235456294}, {"text": "\"You're acting very oddly today: You've just reverted two separate edits on <url>, saying they were the phrasing of the Lancet, but neither references the Lancet, and it seems unlikely that both would have the same statement applying to them. Have you been hacked?\"", "score": -0.743294399015347}, {"text": "\"You really want off here, dont you? I've already posted the incident on the incident board and you still are keeping it up?\"", "score": -2.626637921195542}, {"text": "I look forward to your genre plan. What do you have in mind?", "score": 0.9339776500697518}, {"text": "\"Opps, how come your archive links to the article's talk page archive? I'm going to fix it for you, ok?\"", "score": 0.9184784148992368}, {"text": "I'm always behind the times. PS- When did you guys decide to ''remove'' the Quebecois nation resolution from that article's introduction?", "score": -0.5998838425223901}, {"text": "\"Started, shall we post on the <url> page and ask for others to collaborate. Is that a good use of the WikiProject?\"", "score": 0.6091723679516272}, {"text": "\"Hi, I'm new to RC patrol, and I was wondering how you do the 'reverted edits by X to version by Y' edit summary. Is there a shortcut to reverting an edit?\"", "score": 0.7355271047694107}, {"text": "\"in this case the noun has to be capitalised, so \"\"Erdene Zuu monastery\"\" would be \"\"Erdene Zuu Monastery\"\". And why did you move <url>, what other monastery with this name exists?\"", "score": -0.7361464996507106}, {"text": "Where is the vandalism? Diffs?", "score": -0.6742521970490285}, {"text": "The source on the Daniel Larsson page looks good enough. Perhaps someone at Swedish interwiki might know of other sources?", "score": 0.677659013000689}, {"text": "You do realise this is complete rubbish - it's already been speedily deleted once if you check the logs. I can't see any advantage to having this crap floating around for 5 days - can you?", "score": -1.9205098341293134}, {"text": "\"Yes, the \"\"licking my balls\"\" thing. Why am I not surprised that that was his work too?\"", "score": -1.0712385295200204}, {"text": "Thank you very much for fixing the broken references in <url> and related articles. Are you planning to fix them in <url> as well?", "score": 1.35713522037504}, {"text": "\"Hey friend, you are missed on here. What's up?\"", "score": 2.16715310109712}, {"text": "\"Maybe others haven't noticed - or are too busy. If you are thinking what I'm thinking, why am I reverting so many of your edits?\"", "score": -0.9766429083647512}, {"text": "I am willing to try to discover the problem. What browser do you use?", "score": 0.8763037165409837}, {"text": "Okay thanks I'll try it! DId you choose the pictures yourself?", "score": 1.2604280738420657}, {"text": "<url> Am I missing something?", "score": -0.5883429901400243}, {"text": "\"Would you be okay if I seconded your nomination? Or would you prefer me to add as a \"\"support\"\"?\"", "score": 0.8216650620921095}, {"text": "\"Anupam, the etymology of ''chand raat'' Sanskritic, as stated in article is basically correct. Is there an issue?\"", "score": -0.6800888650937564}, {"text": "\"Wait, that wasn't the question, was it. What appears after these that I don't have?\"", "score": -1.0392334108622483}, {"text": "What's wrong with the standard NBA style for the userbox template? Why can't the Thunder's box look LIKE EVERY OTHER TEAM'S?", "score": -0.9104035415915822}, {"text": "\"Hello Alex! Since you are experienced in ''botovodstvo'', could you help us with the issue <url>?\"", "score": 0.8792621313132265}, {"text": "\"Forgive me if you've already done this, but...Could you post a list of the listing codes (such as DO, RN, etc.) for possibly non-listed properties with reference numbers?\"", "score": 1.5059939318987052}, {"text": "It appears that you deleted my comments and vote on <url>. May I ask why?", "score": 0.6074434219996598}, {"text": "\"Since you are STILL not responding to me (or the talk page discussion) and reverting anyway, I have mad you a <url> template. Post THAT on your page and leave the regular one alone, OK?\"", "score": -1.4807558502102696}, {"text": "Sigh. Did arbcomm teach you nothing?", "score": -2.083224131157202}, {"text": "\"Agian, wishful thinking is one thing, fantasyland, now back to reality, Can you provide me ONE journal, ONE book that mentioned the term \"\"palestinians\"\" before 1960's??? Why not?\"", "score": -1.751730669254649}, {"text": "Thanks for the good advice. I still think the electric-shock-button idea is a good one though :-) You been having problems with images as well?", "score": 0.9487518692023201}, {"text": "????????????", "score": -0.9496017807462687}, {"text": "\"You said ''\"\"undo. Raggz, you added the tag and THEN mentioned it on talk- you did not gain consensus first or even wait for anyone to discuss it.\"\"'' Is consensus required to add a tag?\"", "score": -0.47480887918475856}, {"text": "I seem to have stuffed up creating 4 April 2009 very badly. Can you help?", "score": 0.731871161077016}, {"text": "\"You're welcome to revert edits, but I'm puzzled as to why you reverted this one - it seems absolutely essential to me the give a summary of Genesis 1. And if you revert Gen1, why leave Gen2?\"", "score": -0.4676860983130558}, {"text": "I was about to participate in the Afd on Wooster Greeks. You've reverted yourself and re-closed it?", "score": -0.8149937446502902}, {"text": "\"What do you mean \"\"personal attack\"\"? Why don't site what my \"\"personal attack\"\" is?\"", "score": -0.9967193756888821}, {"text": "Thanks for your response. What happens now?", "score": 1.2663972490355457}, {"text": "\"Eh? No, those facts were correct, why do you think they were not?\"", "score": -1.7219263848148167}, {"text": "\"I understood just fine, but wasn't at my computer. Are you in a hurry?\"", "score": -0.8103608065291628}, {"text": "\"I've always been intrigued by 'dark-complected man.' What's with the radio, and fist in the air?\"", "score": -0.5183819369001386}, {"text": "Your early edit's clearly indicate that you were not a newbie. How do explain this?", "score": -0.8426595906162412}, {"text": "\"Instead of another 3O, why don't you put in a <url>. And no, it's not a threat - it's an observation - why don't you <url>?\"", "score": -1.2757935737677462}, {"text": "\"Great Article RaveenS, Do u want me to add this to the template (Sri Lankan Conflict)? I think it should be included in the ''see also'' section what do you suggest?\"", "score": 1.0044941805951866}, {"text": "\"Cool, Thanks, do you want to add it to their list? or can I?\"", "score": 1.1452264092984916}, {"text": "\"Sorry for the delay in responding. The problems seems to have calmed down at this point, is that correct?\"", "score": 1.140916721853912}, {"text": "PS - surprised you didn't make the annoucement in your blog. Trying to fool your hordes of devoted readers?", "score": -1.3140293348203982}, {"text": "I haven't. What are you referring to?", "score": -1.1503355298865952}, {"text": "\"So you see KV, just in case you missed the point, you are in the naughty chair in the naughty corner because of your own baloney, baloney that you bring yourself. Now, didn't you like that story?\"", "score": -2.2996176528937986}, {"text": "\"It's not too long, certainly; but the extended quote does look a bit out of place there. Perhaps it would be possible to move the bulk of the quote into the body of the article and leave a short phrase or two that could be inline with the second paragraph?\"", "score": 0.6210798245046428}, {"text": "\"BTW, I translated the page to the best of my ability (my Spanish only carried me so far). Do you speak Portuguese?\"", "score": 0.6461704832603521}, {"text": "Huh? Is that a web address or a wikilink?", "score": -1.055958632898158}, {"text": "\"Actually, I am concerned that you had completely removed verifiable facts of which I had included the references. Would you please explain why you had removed the entire section I wrote on Collection Agencies in Canada?\"", "score": -0.668733441725401}, {"text": "\"I was confused by \"\"darkhad\"\" being translated as \"\"sacred ones\"\", when the usual translation I know is smith or craftsman (kind of). Was I at least correct in being confused?\"", "score": 0.5336017389367853}, {"text": "Don't worry I understand. Using igloo I assume?", "score": 1.1736146847539064}, {"text": "\"could you email me when you get a chance? not having any luck this end, must be spam filters blocking?\"", "score": 0.80595127719608}, {"text": "\"Yeah, I'm not sure that I care one way or another, but the 3RR is clearly incorrect. And what happened to <url>?\"", "score": -0.7880446511743612}, {"text": "\"I noticed your \"\"Original Research\"\" notation for the section on weights and measures in the article in Liberia. Could you explain in more detail your concern and how it might be remedied?\"", "score": 0.5489473767412034}, {"text": "During your RFA you stated that you would be helping deal with the Scibaby situation after Raul stopped doing it. Do you feel that you've done what you said you were going to do?", "score": -1.1501313143924616}, {"text": "Thank you for your answer. What are the sources for region 4 (Australia) release dates?", "score": 1.1440879947599683}, {"text": "\"I'm sorry, I don't know what article you are talking about. Can you please provide the exact name?\"", "score": 1.1709734979653443}, {"text": "\"Good day, sorry to bother you. Is the 2:00 am news brief called eyewitness news everynite, and if it is do they play move closer to your world?\"", "score": 0.65980283338641}, {"text": "\"One to go. Shouldn't \"\"now welcome\"\" be \"\"not welcome\"\"?\"", "score": -0.5611448487598728}, {"text": "I don;t like commons because when i try to link things in the description it never works because it links only to commons. what is that?", "score": -0.5810795379302505}, {"text": "I have some made some improvements to the articles you references on the above user's talk page. Perhaps next time you would bring your concerns directly to '''me'''?", "score": -0.8637058661676373}, {"text": "\"I agree with Orlady here; full-protection, a fairly serious position when we're talking about an encyclopedia anyone can edit, is only to be taken out as a last (reasonable) resort. Since both participants to the dispute are now blocked, is there any reason for the protection to be necessary?\"", "score": 0.5196513084062007}, {"text": "\"Ok, many thanks for that. I'm not sure how reliable that 2nd group are - it has <url> but what about all his dinner-guests?\"", "score": 0.7313712240611943}, {"text": "\"Hello again. Could you move this to <url>, please?\"", "score": 1.6356092137222427}, {"text": "Yes I am planning to. Is that good?", "score": 0.9888167919826978}, {"text": "What do you think? Perhaps the coat of arms field can contain the picture that the article's family name infobox already has?", "score": 0.5927949857337845}, {"text": "Thanks for the tip. How come I sometimes come across pages where the trailing 's' is left off (or am I confusing that with the 's situation)?", "score": 0.6147430731722334}, {"text": "\"One more thing if you want, let's talk on the Seinfeld talk page. I think I am taking up your talk page so let's talk there instead, ok?\"", "score": 0.8916686132698892}, {"text": "\"Some time ago, you uploaded separate maps for each town and unincorporated community in <url>. Do you have similar maps for any other counties, or a town-level map of the entire state?\"", "score": 0.9917768507578941}, {"text": "\"Regarding <url>, <url>, <url>, <url>, <url> and <url>, may I redirect them? Should I leave the merge tag on the talk page?\"", "score": 0.7772083950378841}, {"text": "Thanks for the note about my work on the above line. Would you mind if I copied and pasted your version over to mine to work on?", "score": 1.951290012069593}, {"text": "I don't see any of that in his deletion log. Would you please find out which one it was as evidence so you're not blindly accusing an admin of deleting something they didn't delete?", "score": -1.145264433038415}, {"text": "Is <url> what you're referring to?? '''''One''''' autograph page that he deleted almost '''''five months''''' ago and you're now attacking him as if he deleted every single autograph book ever?", "score": -1.3869044533162154}, {"text": "\"If I had asked you, the same thing that I did would have been accomplished. Why force you to go through the trouble of removing them if someone else is already removing all the others?\"", "score": -1.0114000885598953}, {"text": "\"Hi, I recently got approval for your requested link replacement (sorry for the delay), but, after doing a <url> there seems to be very few links left to replace. Could you confirm the problem is solved?\"", "score": 1.0257327605738336}, {"text": "Thank you for taking the time to review my nomination of <url> as a featured article candidate. I have responded to the issues you raised in <url> - could you please review my replies and let me know if you're satisfied?", "score": 1.7550265050252072}, {"text": "Cheers! Don't suppose you have a link.....?", "score": 0.7356369517579872}, {"text": "\"Oh, that's because I only copyedited a tiny section of the article. Should I still count that as a yes?\"", "score": 0.5826692823765632}, {"text": "\"Oh? You know something I don't, Friday?\"", "score": -1.3995697219150807}, {"text": "\"I see you've just placed a deletion request on your user page, but this page has no history and hasn't been edited previously. Did you mean to place it on the article you created, <url>?\"", "score": 0.8491436610112343}, {"text": "What was your aim with <url>? have you abandoned it?", "score": -0.4472137657756384}, {"text": "\"No worries, thanks for doing that. Nice to hear from you again - new username for 2013?\"", "score": 1.4045483875209026}, {"text": "Fatwa? What is that?", "score": -0.5623299464551792}, {"text": "\"I didn't have those in <person> because they were already included under the tributaries categories. Is this one of those \"\"basketball player\"\" exceptions?\"", "score": -0.49714010832066047}, {"text": "\"If you're not going to reply then I will undo the edit. Again, why have \"\"Fictional martial artists\"\" over the sub-cat \"\"Anime and manga martial artists\"\" and not use alphabetical order?\"", "score": -0.7239580074194174}, {"text": "\"Certainly the work is out of copyright, and some of the neighboring volumes in the series are visible on archive.org (and I don't have trouble with other books on google generally). I wonder if you know of any way, or could suggest any way, that I could get to see the downloaded pdf of Newcomb vol.6?\"", "score": 0.7720323450692426}, {"text": "\"You still haven't answered my question: ''why did you delete the article so quickly'', without even bothering to read it? And why are you so willing to back Thunderbunny and not me?\"", "score": -1.6995690401788117}, {"text": "I am happy to rewrite it but please explain to me how it is any different from the Warrior Lacrosse and STX pages? They don't have any links except to their own websites?", "score": 1.2879393893633773}, {"text": "\"Sorry, I've moved a number of articles and thought I did it correctly. How is moving done properly without losing the edit history?\"", "score": 0.631174155598655}, {"text": "I had a look at the place noted on the top of the talk page about 1rr and I was unable to find the sanction you talked about. Where is it listed please?", "score": 1.3621122580929685}, {"text": "\"''You wrote:'' I'm not trying to be obtuse, but is what you're saying in essence that if we cannot tell whether an award is utterly trivial then that uncertainty becomes sufficient assertion of notability to not apply A7? So if an assumption needs to be made, or discretion employed, A7 doesn't apply because the assertion of notability has been made?\"", "score": -1.1194564195845214}, {"text": "Thank you for your input in my article on Iowa State Track and Field! I am trying to improve the article more and wondered if you could look at it and make suggestions that may improve the current content?", "score": 1.6817096040619155}, {"text": "\"I've come to state that I find it mildly amusing that you don't consider a press release for a journalism award from Columbia University to be a \"\"substantial story from a citable source\"\". Isn't Columbia among the world's most prestigious journalism schools?\"", "score": -0.8786975878668255}, {"text": "I'm not sure what you are asking here. Admins cannot check IPs but why would you even want the same IP that would be foolish wouldn't it?", "score": -0.9373550085409077}, {"text": "\"I've made some changes to the <url>, and started a discussion on the template talk page. I'm a bit stuck on some bits of table formatting and linking, so I wondered if you'd be able to help?\"", "score": 1.271119829316333}, {"text": "Turkish boys are circumcised they are Muslim's. Why did you delete it?", "score": -0.5528961263755143}, {"text": "Thanks for providing the IPA of <url>. can you help further in copyediting the text?", "score": 1.222893648689658}, {"text": "You should really use edit summaries to justify your edits. Explain?", "score": -1.2699794919477994}, {"text": "\"Your recent edits with Dab solver seem to be removing rather a lot of links, without mentioning this in the edit summary. Is this intended behaviour?\"", "score": -0.9419131366266642}, {"text": "\"Deleting Val Klassen's page. I redirected it to the creator's talkpage, wasn't that enough?\"", "score": -1.3688779075936466}, {"text": "\"Cla, your partisanship is indeed fairly obvious, and pointing to one article where you've deliberately been good isn't good enough. So: if I point you to my stand against AJL, will you declare me to be non-partisan?\"", "score": -1.0671630692181775}, {"text": "\"Thanks for explaining. Um, is that an official policy, or one just custom?\"", "score": 0.8688693214338373}, {"text": "The race was advertised and billed as the 100th Indy 500. Are you saying the promoters and Indycar are liars?", "score": -0.8463286386664823}, {"text": "I've started an article called <url>. Would you be able to do a good intro paragraph?", "score": 0.8533456695833262}, {"text": "\"Hi Steve, I've added refs & cites to this article. Do you think it still needs a warning about lack of refs/citations?\"", "score": 0.9892285524111456}, {"text": "\"Hi. Just out of interest, are you by any chance the same Tom Walker who maintains this <url>?\"", "score": 0.5814401249359791}, {"text": "Sorry Less? What is that last postdirected at?", "score": -0.7018012605979985}, {"text": "\"I also found your audio uploads at <url>. If it's not a burden, would you mind recording more Hungarian town names for WP?\"", "score": 1.4788709706695193}, {"text": "\"Thanks, I think Angel Mendez will be a good story for the portal. What do you think?\"", "score": 0.9802255968828216}, {"text": "I was wondering where you got your information for the dice statistics you recently added/changed in <url>. Can you add a source to those tables?", "score": 0.8080900483054462}, {"text": "And what did I do wrong???????????????????????", "score": -1.7624763248304745}, {"text": "\"Thank you, it's much appreciated. What do you think of the changes so far?\"", "score": 1.5141983819779266}, {"text": "\"Hi, although I agree with you, please check the following <url> in process. Why can't the darn thing work, in the first place?\"", "score": -0.65984639780439}, {"text": "\"Hello Jennica, I noticed you revert vandalsm occasionally. Would you like me to grant your account <url> to help you revert vandalism more easily?\"", "score": 0.9679042354806873}, {"text": "Hm. Are you asking to be banned?", "score": -1.754243794686127}, {"text": "\"I was at the library today looking through the back issues of ''Mojo'' and found a career-spanning Chili Peppers article from 2004. I read most of it, but I can go back next week and cite from it. Is there anything you want me to look for?\"", "score": 1.0865438257895454}, {"text": "..is indeed a sick and boring life. Where do I sign up to give you your celebratory sexin'?", "score": -1.2116447503230012}, {"text": "\"Ok, thanks for that. Could you please tell me why the Kiwa page is giving warning about notability and citations, where as for example this one <url> is not?\"", "score": 0.9630067751731834}, {"text": "???You wat?", "score": -1.7944763558885004}, {"text": "\"I see where someone put \"\"April Fool's\"\" in a revision, but otherwise, there was no other humor included, the page was utilitarian otherwise, and you can't move a category (meaning I would have to delete it anyway). Do we ''really'' need to retain this?\"", "score": -0.7981608260915489}, {"text": "E-mail me at <url> (just get rid of the spaces in between the letters and numbers) whenever you feel like it. By the way are you a member on any Iranian football forums online?", "score": 0.8218877807068928}, {"text": "\"Just to let you know, there's a <url> now. I don't know, is this mimicry allowed on Wikipedia?\"", "score": -0.7515940115992275}, {"text": "'''Wiki-star''': Hm? Is there a reason for your sorrow my good man?", "score": 0.7022491628751035}, {"text": "Being too efficient can be detrimental - what if I was doing it in the wrong order - does that allow my reversion if indeed I do utilise the abbreviation? A simple enquiry might have been in order?", "score": -1.1528544342563949}, {"text": "Okay thanks. Any easy way to determine this?", "score": 0.7273851431320687}, {"text": "\"Yes, it is a matter of civility. Do you think a year would be better?\"", "score": 0.6264806580665501}, {"text": "\"If you have something to say for keeping that picture, say it here and I'll copy it over to the FFD page. But, frankly, if you can't even be bothered to check out the <url> rules to find out about why the Zguri image isn't acceptable, how do you expect to be able to make any relevant argument about the other one?\"", "score": -1.327630137827498}, {"text": "Thanks. Can you use <url>?", "score": 0.910154363056311}, {"text": "This is not a suitable candidate for CSD as it asserts notability. May I suggest you consider a <url>?", "score": 0.5849294606644192}, {"text": "\"Well, now you've got me puzzled. If you think he's a sneaky vandal, why did you revert my reversion of his numerical change?\"", "score": -0.7002579856446518}, {"text": "What is the context that you think is lacking? any suggestions on what to add?", "score": 0.6072132993824758}, {"text": "No problem. Have you seen the <url> article?", "score": 0.6469388858440939}, {"text": "\"Due to your edits to this template, <url> is now required on all transclusions. Are you running a bot to clean this up?\"", "score": -0.8085962788590614}, {"text": "\"On <url> about the Chinese sex imbalance, you accused either the OP or the first responder (me) of <url>. Could you explain why you thought either of us was advocating, opining, or recruiting, in a way that diminished the quality of the reference desk?\"", "score": -1.0475862942377572}, {"text": "I don't think anybody wants the stale uninformative old Victorian results infoboxes. Can you gain consensus before deviating?", "score": -0.5716893911454595}]
199,752
399,503
0.722413
64,498
399,504
4.474449
0.130578
0.028587
0.001525
0.001698
0.056703
0.025254
0.011899
0.006941
0.006261
0.005943
0
0.110495
0.153097
399,504
1
399,504
399,504
0.74247
0
0
0
0
149
0.955132
0.049281
0
0
0.00001
0
1
1
0
false
1
1
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
1
0
0
0
0
0
1
1
0
null
0
0
0
1
0
0
0
1
1
0
1
1
0
10
2e56b876c35f652eb5ea842a05ab0a8f8e792011
39,348
py
Python
skvectors/tests/test_simple_vector_methods_3d.py
t-o-k/scikit-vectors
67ac86807160253b09cb5461f718b3f772ada263
[ "BSD-3-Clause" ]
4
2019-03-27T20:41:30.000Z
2020-07-28T19:02:03.000Z
skvectors/tests/test_simple_vector_methods_3d.py
t-o-k/scikit-vectors
67ac86807160253b09cb5461f718b3f772ada263
[ "BSD-3-Clause" ]
null
null
null
skvectors/tests/test_simple_vector_methods_3d.py
t-o-k/scikit-vectors
67ac86807160253b09cb5461f718b3f772ada263
[ "BSD-3-Clause" ]
1
2019-09-08T23:09:15.000Z
2019-09-08T23:09:15.000Z
""" Copyright (c) 2017 Tor Olav Kristensen, http://subcube.com https://github.com/t-o-k/scikit-vectors Use of this source code is governed by a BSD-license that can be found in the LICENSE file. """ from math import floor, ceil, trunc import itertools import unittest import skvectors class Test_Case_simple_vector(unittest.TestCase): create_vector_class = staticmethod(skvectors.create_class_Simple_Vector) @classmethod def setUpClass(cls): cls.V3D = \ cls.create_vector_class( name = 'V3D', component_names = 'xyz', brackets = '<>', sep = ', ' ) @classmethod def tearDownClass(cls): del cls.V3D def test_round(self): fail_msg = "Problem with method '__round__'" u = self.V3D(x=-555.555, y=-333.333, z=555.555) v = self.V3D.__round__(u) self.assertListEqual(v.component_values(), [ -556.0, -333.0, 556.0 ], msg=fail_msg) u = self.V3D(x=555.555, y=333.333, z=-555.555) v = u.__round__(ndigits=0) self.assertListEqual(v.component_values(), [ 556.0, 333.0, -556.0 ], msg=fail_msg) u = self.V3D(x=-555.555, y=-333.333, z=555.555) v = round(u) self.assertListEqual(v.component_values(), [ -556.0, -333.0, 556.0 ], msg=fail_msg) u = self.V3D(x=-555.555, y=-333.333, z=555.555) v = round(u, 0) self.assertListEqual(v.component_values(), [ -556.0, -333.0, 556.0 ], msg=fail_msg) u = self.V3D(x=-555.555, y=-333.333, z=555.555) v = round(u, ndigits=2) self.assertListEqual(v.component_values(), [ -555.55, -333.33, 555.55 ], msg=fail_msg) u = self.V3D(x=-555.555, y=-333.333, z=555.555) v = round(u, ndigits=-2) self.assertListEqual(v.component_values(), [ -600.0, -300.0, 600.0 ], msg=fail_msg) u = self.V3D(0, -1, 2) id_u_before = id(u) v = round(u) id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) def test_floor(self): fail_msg = "Problem with method '__floor__'" u = self.V3D(x=-555.555, y=-333.333, z=555.555) v = self.V3D.__floor__(u) self.assertListEqual(v.component_values(), [ -556.0, -334.0, 555.0 ], msg=fail_msg) u = self.V3D(x=555.555, y=333.333, z=-555.555) v = u.__floor__() self.assertListEqual(v.component_values(), [ 555.0, 333.0, -556.0 ], msg=fail_msg) u = self.V3D(x=-555.555, y=-333.333, z=555.555) v = floor(u) self.assertListEqual(v.component_values(), [ -556.0, -334.0, 555.0 ], msg=fail_msg) u = self.V3D(0, -1, 2) id_u_before = id(u) v = floor(u) id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) def test_ceil(self): fail_msg = "Problem with method '__ceil__'" u = self.V3D(x=-555.555, y=-333.333, z=555.555) v = self.V3D.__ceil__(u) self.assertListEqual(v.component_values(), [ -555.0, -333.0, 556.0 ], msg=fail_msg) u = self.V3D(x=555.555, y=333.333, z=-555.555) v = u.__ceil__() self.assertListEqual(v.component_values(), [ 556.0, 334.0, -555.0 ], msg=fail_msg) u = self.V3D(x=-555.555, y=-333.333, z=555.555) v = ceil(u) self.assertListEqual(v.component_values(), [ -555.0, -333.0, 556.0 ], msg=fail_msg) u = self.V3D(0, -1, 2) id_u_before = id(u) v = ceil(u) id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) def test_trunc(self): fail_msg = "Problem with method '__trunc__'" u = self.V3D(x=-555.555, y=-333.333, z=555.555) v = self.V3D.__trunc__(u) self.assertListEqual(v.component_values(), [ -555.0, -333.0, 555.0 ], msg=fail_msg) u = self.V3D(x=555.555, y=333.333, z=-555.555) v = u.__trunc__() self.assertListEqual(v.component_values(), [ 555.0, 333.0, -555.0 ], msg=fail_msg) u = self.V3D(x=-555.555, y=-333.333, z=555.555) v = trunc(u) self.assertListEqual(v.component_values(), [ -555.0, -333.0, 555.0 ], msg=fail_msg) u = self.V3D(0, -1, 2) id_u_before = id(u) v = trunc(u) id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) def test_abs(self): fail_msg = "Problem with method '__abs__'" u = self.V3D(0, -1, 2) v = self.V3D.__abs__(u) self.assertListEqual(v.component_values(), [ 0, 1, 2 ], msg=fail_msg) u = self.V3D(-3.5, 4.5, -5.5) v = u.__abs__() self.assertListEqual(v.component_values(), [ 3.5, 4.5, 5.5 ], msg=fail_msg) u = self.V3D(0, -1, 2) v = abs(u) self.assertListEqual(v.component_values(), [ 0, 1, 2 ], msg=fail_msg) u = self.V3D(0, -1, 2) id_u_before = id(u) v = abs(u) id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) def test_neg(self): fail_msg = "Problem with method '__neg__'" u = self.V3D(0, -1, 2) v = self.V3D.__neg__(u) self.assertListEqual(v.component_values(), [ 0, 1, -2 ], msg=fail_msg) u = self.V3D(-3.5, 4.5, -5.5) v = u.__neg__() self.assertListEqual(v.component_values(), [ 3.5, -4.5, 5.5 ], msg=fail_msg) u = self.V3D(0, -1, 2) v = -u self.assertListEqual(v.component_values(), [ 0, 1, -2 ], msg=fail_msg) u = self.V3D(0, -1, 2) id_u_before = id(u) v = -u id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) def test_pos(self): fail_msg = "Problem with method '__pos__'" u = self.V3D(0, -1, 2) v = self.V3D.__pos__(u) self.assertListEqual(v.component_values(), [ 0, -1, 2 ], msg=fail_msg) u = self.V3D(-3.5, 4.5, -5.5) v = u.__pos__() self.assertListEqual(v.component_values(), [ -3.5, 4.5, -5.5 ], msg=fail_msg) u = self.V3D(0, -1, 2) v = +u self.assertListEqual(v.component_values(), [ 0, -1, 2 ], msg=fail_msg) u = self.V3D(0, -1, 2) id_u_before = id(u) v = +u id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) def test_add(self): fail_msg = "Problem with method '__add__'" u = self.V3D(0, -1, 2) w = self.V3D(-3, 4, -5) v = self.V3D.__add__(u, w) self.assertListEqual(v.component_values(), [ -3, 3, -3 ], msg=fail_msg) u = self.V3D(-3, 4, -5) v = u.__add__(3.5) self.assertListEqual(v.component_values(), [ 0.5, 7.5, -1.5 ], msg=fail_msg) u = self.V3D(0, 1, -2) w = self.V3D(-3, 4, -5) v = u + w self.assertListEqual(v.component_values(), [ -3, 5, -7 ], msg=fail_msg) u = self.V3D(0, -1, 2) v = u + 4.5 self.assertListEqual(v.component_values(), [ 4.5, 3.5, 6.5 ], msg=fail_msg) u = self.V3D(0, -1, 2) v = u + (-3) self.assertListEqual(v.component_values(), [ -3, -4, -1 ], msg=fail_msg) u = self.V3D(0, 1, -2) w = self.V3D(-3, 4, -5) id_u_before = id(u) v = u + w id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) def test_sub(self): fail_msg = "Problem with method '__sub__'" u = self.V3D(0, -1, 2) w = self.V3D(-3, 4, -5) v = self.V3D.__sub__(u, w) self.assertListEqual(v.component_values(), [ 3, -5, 7 ], msg=fail_msg) u = self.V3D(-3, 4, -5) v = u.__sub__(3.5) self.assertListEqual(v.component_values(), [ -6.5, 0.5, -8.5 ], msg=fail_msg) u = self.V3D(0, -1, 2) w = self.V3D(-3, 4, -5) v = u - w self.assertListEqual(v.component_values(), [ 3, -5, 7 ], msg=fail_msg) u = self.V3D(0, -1, 2) v = u - 4.5 self.assertListEqual(v.component_values(), [ -4.5, -5.5, -2.5 ], msg=fail_msg) u = self.V3D(0, -1, 2) v = u - (-3) self.assertListEqual(v.component_values(), [ 3, 2, 5 ], msg=fail_msg) u = self.V3D(0, -1, 2) w = self.V3D(-3, 4, -5) id_u_before = id(u) v = u - w id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) def test_mul(self): fail_msg = "Problem with method '__mul__'" u = self.V3D(0, -1, 2) w = self.V3D(-3, 4, -5) v = self.V3D.__mul__(u, w) self.assertListEqual(v.component_values(), [ 0, -4, -10 ], msg=fail_msg) u = self.V3D(-3, 4, -5) v = u.__mul__(1.5) self.assertListEqual(v.component_values(), [ -4.5, 6.0, -7.5 ], msg=fail_msg) u = self.V3D(0, 1, -2) w = self.V3D(-3, 4, -5) v = u * w self.assertListEqual(v.component_values(), [ 0, 4, 10 ], msg=fail_msg) u = self.V3D(0, -1, 2) v = u * 4.5 self.assertListEqual(v.component_values(), [ 0.0, -4.5, 9.0 ], msg=fail_msg) u = self.V3D(-3, 4, -5) v = u * -3 self.assertListEqual(v.component_values(), [ 9, -12, 15 ], msg=fail_msg) u = self.V3D(0, 1, -2) w = self.V3D(-3, 4, -5) id_u_before = id(u) v = u * w id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) def test_pow(self): fail_msg = "Problem with method '__pow__'" u = self.V3D(0, -1, 2) w = self.V3D(2, 0, 1) v = self.V3D.__pow__(u, w) self.assertListEqual(v.component_values(), [ 0, 1, 2 ], msg=fail_msg) u = self.V3D(-3, 4, -5) w = self.V3D(2, 1, 0) v = u.__pow__(w) self.assertListEqual(v.component_values(), [ 9, 4, 1 ], msg=fail_msg) u = self.V3D(0, -1, 2) w = self.V3D(2, 1, 0) v = u**w self.assertListEqual(v.component_values(), [ 0, -1, 1 ], msg=fail_msg) u = self.V3D(0, -1, 2) w = self.V3D(0, 2, 1) v = u**w self.assertListEqual(v.component_values(), [ 1, 1, 2 ], msg=fail_msg) u = self.V3D(0, -1, 2) v = u**3 self.assertListEqual(v.component_values(), [ 0, -1, 8 ], msg=fail_msg) u = self.V3D(-2, 4, -5) v = u**-1 self.assertListEqual(v.component_values(), [ -0.5, 0.25, -0.2 ], msg=fail_msg) u = self.V3D(0, -1, 2) w = self.V3D(2, 1, 0) id_u_before = id(u) v = u**w id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) u = self.V3D(-1, 0, 2) w = self.V3D(0, -1, 2) with self.assertRaises(ZeroDivisionError, msg=fail_msg): u**w u = self.V3D(-1, 0, 2) with self.assertRaises(ZeroDivisionError, msg=fail_msg): u**-1 def test_truediv(self): fail_msg = "Problem with method '__truediv__'" u = self.V3D(0, -1, 2) w = self.V3D(-3, 5, -4) v = self.V3D.__truediv__(u, w) self.assertListEqual(v.component_values(), [ 0.0, -0.2, -0.5 ], msg=fail_msg) u = self.V3D(0, -1, 2) v = u.__truediv__(5) self.assertListEqual(v.component_values(), [ 0.0, -0.2, 0.4 ], msg=fail_msg) u = self.V3D(0, -1, 2) w = self.V3D(-3, 4, -5) v = u / w self.assertListEqual(v.component_values(), [ 0.0, -0.25, -0.4 ], msg=fail_msg) u = self.V3D(0, -1, 2) v = u / 4 self.assertListEqual(v.component_values(), [ 0.0, -0.25, 0.5 ], msg=fail_msg) u = self.V3D(0, -1, 2) v = u / -2 self.assertListEqual(v.component_values(), [ 0.0, 0.5, -1 ], msg=fail_msg) u = self.V3D(-3.0, 4.0, -2.0) v = 6.0 / u self.assertListEqual(v.component_values(), [ -2.0, 1.5, -3.0 ], msg=fail_msg) u = self.V3D(0, -1, 2) id_u_before = id(u) w = self.V3D(-3, 4, -5) v = u / w id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) u = self.V3D(0, -1, 2) w = self.V3D(0, 1, 1) with self.assertRaises(ZeroDivisionError, msg=fail_msg): u / w u = self.V3D(0, -1, 2) w = self.V3D(1, 0, 1) with self.assertRaises(ZeroDivisionError, msg=fail_msg): u / w u = self.V3D(0, -1, 2) w = self.V3D(1, 1, 0) with self.assertRaises(ZeroDivisionError, msg=fail_msg): u / w v = self.V3D(0, -1, 2) with self.assertRaises(ZeroDivisionError, msg=fail_msg): v / 0 def test_floordiv(self): fail_msg = "Problem with method '__floordiv__'" u = self.V3D(3, 2, -5) w = self.V3D(1, -2, -4) v = self.V3D.__floordiv__(u, w) self.assertListEqual(v.component_values(), [ 3, -1, 1 ], msg=fail_msg) u = self.V3D(-3, 4, 0) v = u.__floordiv__(5) self.assertListEqual(v.component_values(), [ -1, 0, 0 ], msg=fail_msg) u = self.V3D(-3, 4, -5) w = self.V3D(-1, 2, 4) v = u // w self.assertListEqual(v.component_values(), [ 3, 2, -2 ], msg=fail_msg) u = self.V3D(-3, 4, -5) v = u // 4 self.assertListEqual(v.component_values(), [ -1, 1, -2 ], msg=fail_msg) u = self.V3D(-3, 4, -5) v = u // -3 self.assertListEqual(v.component_values(), [ 1, -2, 1 ], msg=fail_msg) u = self.V3D(-3, 4, -5) id_u_before = id(u) w = self.V3D(-1, 2, 4) v = u // w id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) u = self.V3D(0, -1, 2) w = self.V3D(0, 1, 1) with self.assertRaises(ZeroDivisionError, msg=fail_msg): u // w u = self.V3D(0, -1, 2) w = self.V3D(1, 0, 1) with self.assertRaises(ZeroDivisionError, msg=fail_msg): u // w u = self.V3D(0, -1, 2) w = self.V3D(1, 1, 0) with self.assertRaises(ZeroDivisionError, msg=fail_msg): u // w v = self.V3D(0, -1, 2) with self.assertRaises(ZeroDivisionError, msg=fail_msg): v // 0 def test_mod(self): fail_msg = "Problem with method '__mod__'" u = self.V3D(-3, 4, -5) w = self.V3D(2, -4, 3) v = self.V3D.__mod__(u, w) self.assertListEqual(v.component_values(), [ 1, 0, 1 ], msg=fail_msg) u = self.V3D(3, -4, 5) v = u.__mod__(3) self.assertListEqual(v.component_values(), [ 0, 2, 2 ], msg=fail_msg) u = self.V3D(-3, 4, -5) w = self.V3D(-1, 2, 4) v = u % w self.assertListEqual(v.component_values(), [ 0, 0, 3 ], msg=fail_msg) u = self.V3D(-3, 4, -5) v = u % 4 self.assertListEqual(v.component_values(), [ 1, 0, 3 ], msg=fail_msg) u = self.V3D(-3, 4, -5) v = u % -3 self.assertListEqual(v.component_values(), [ 0, -2, -2 ], msg=fail_msg) u = self.V3D(-3, 4, -5) w = self.V3D(-1, 2, 4) id_u_before = id(u) v = u % w id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) u = self.V3D(0, -1, 2) w = self.V3D(0, 1, 1) with self.assertRaises(ZeroDivisionError, msg=fail_msg): u % w u = self.V3D(0, -1, 2) w = self.V3D(1, 0, 1) with self.assertRaises(ZeroDivisionError, msg=fail_msg): u % w u = self.V3D(0, -1, 2) w = self.V3D(1, 1, 0) with self.assertRaises(ZeroDivisionError, msg=fail_msg): u % w v = self.V3D(0, -1, 2) with self.assertRaises(ZeroDivisionError, msg=fail_msg): v % 0 def test_radd(self): fail_msg = "Problem with method '__radd__'" u = self.V3D(0, -1, 2) v = self.V3D.__radd__(u, -2) self.assertListEqual(v.component_values(), [ -2, -3, 0 ], msg=fail_msg) u = self.V3D(-3.0, 4.0, -5.0) v = u.__radd__(3.5) self.assertListEqual(v.component_values(), [ 0.5, 7.5, -1.5 ], msg=fail_msg) u = self.V3D(0, -1, 2) v = 4 + u self.assertListEqual(v.component_values(), [ 4, 3, 6 ], msg=fail_msg) u = self.V3D(-3.5, 4.5, -5.5) v = -2.0 + u self.assertListEqual(v.component_values(), [ -5.5, 2.5, -7.5 ], msg=fail_msg) u = self.V3D(0, -1, 2) id_u_before = id(u) v = 4 + u id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) def test_rsub(self): fail_msg = "Problem with method '__rsub__'" u = self.V3D(0, -1, 2) v = self.V3D.__rsub__(u, -2) self.assertListEqual(v.component_values(), [ -2, -1, -4 ], msg=fail_msg) u = self.V3D(-3.0, 4.0, -5.0) v = u.__rsub__(3.5) self.assertListEqual(v.component_values(), [ 6.5, -0.5, 8.5 ], msg=fail_msg) u = self.V3D(0, -1, 2) v = 4 - u self.assertListEqual(v.component_values(), [ 4, 5, 2 ], msg=fail_msg) u = self.V3D(-3.5, 4.5, -5.5) v = -2 - u self.assertListEqual(v.component_values(), [ 1.5, -6.5, 3.5 ], msg=fail_msg) u = self.V3D(0, -1, 2) id_u_before = id(u) v = 4 - u id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) def test_rmul(self): fail_msg = "Problem with method '__rmul__'" u = self.V3D(0, -1, 2) v = self.V3D.__rmul__(u, -2) self.assertListEqual(v.component_values(), [ 0, 2, -4 ], msg=fail_msg) u = self.V3D(-3.0, 4.0, -5.0) v = u.__rmul__(1.5) self.assertListEqual(v.component_values(), [ -4.5, 6.0, -7.5 ], msg=fail_msg) u = self.V3D(0, -1, 2) v = 4 * u self.assertListEqual(v.component_values(), [ 0, -4, 8 ], msg=fail_msg) u = self.V3D(-3.5, 4.5, -5.5) v = (-3.0) * u self.assertListEqual(v.component_values(), [ 10.5, -13.5, 16.5 ], msg=fail_msg) u = self.V3D(0, -1, 2) id_u_before = id(u) v = 4 * u id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) def test_rpow(self): fail_msg = "Problem with method '__rpow__'" u = self.V3D(0, 1, 2) v = self.V3D.__rpow__(u, 3) self.assertListEqual(v.component_values(), [ 1, 3, 9 ], msg=fail_msg) u = self.V3D(1.0, 2.0, -1.0) v = u.__rpow__(2.5) self.assertListEqual(v.component_values(), [ 2.5, 6.25, 0.4 ], msg=fail_msg) u = self.V3D(0, 1, 2) v = (-3)**u self.assertListEqual(v.component_values(), [ 1, -3, 9 ], msg=fail_msg) u = self.V3D(0, 1, 2) id_u_before = id(u) v = (-3)**u id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) v = self.V3D(-1, 0, 2) with self.assertRaises(ZeroDivisionError, msg=fail_msg): 0**v def test_rtruediv(self): fail_msg = "Problem with method '__rtruediv__'" u = self.V3D(-1, 4, -5) v = self.V3D.__rtruediv__(u, 2) self.assertListEqual(v.component_values(), [ -2.0, 0.5, -0.4 ], msg=fail_msg) u = self.V3D(2.0, 5.0, -4.0) v = u.__rtruediv__(-1.0) self.assertListEqual(v.component_values(), [ -0.5, -0.2, 0.25 ], msg=fail_msg) u = self.V3D(2.0, -4.0, 5.0) v = 4.0 / u self.assertListEqual(v.component_values(), [ 2.0, -1.0, 0.8 ], msg=fail_msg) u = self.V3D(2, -4, 5) id_u_before = id(u) v = 4 / u id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) v = self.V3D(0, 0, 0) with self.assertRaises(ZeroDivisionError, msg=fail_msg): 0 / v def test_rfloordiv(self): fail_msg = "Problem with method '__rfloordiv__'" ### TODO: Change values u = self.V3D(2, -4, 5) v = self.V3D.__rfloordiv__(u, 4) self.assertListEqual(v.component_values(), [ 2, -1, 0 ], msg=fail_msg) u = self.V3D(2, -4, 5) ### TODO: Change value v = u.__rfloordiv__(4) self.assertListEqual(v.component_values(), [ 2, -1, 0 ], msg=fail_msg) u = self.V3D(2, -4, 5) v = 4 // u self.assertListEqual(v.component_values(), [ 2, -1, 0 ], msg=fail_msg) u = self.V3D(-3, 4, -2) v = -6 // u self.assertListEqual(v.component_values(), [ 2, -2, 3 ], msg=fail_msg) u = self.V3D(2, -4, 5) id_u_before = id(u) v = 4 // u id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) v = self.V3D(0, 0, 0) with self.assertRaises(ZeroDivisionError, msg=fail_msg): 0 // v def test_rmod(self): fail_msg = "Problem with method '__rmod__'" ### TODO: Change values u = self.V3D(2, -4, 5) v = self.V3D.__rmod__(u, 4) self.assertListEqual(v.component_values(), [ 0, 0, 4 ], msg=fail_msg) u = self.V3D(2, -4, 5) ### TODO: Change value v = u.__rmod__(4) self.assertListEqual(v.component_values(), [ 0, 0, 4 ], msg=fail_msg) u = self.V3D(2, -4, 5) v = 4 % u self.assertListEqual(v.component_values(), [ 0, 0, 4 ], msg=fail_msg) u = self.V3D(2, -4, 5) id_u_before = id(u) v = 4 % u id_v_after = id(v) self.assertNotEqual(id_u_before, id_v_after, msg=fail_msg) v = self.V3D(0, 0, 0) with self.assertRaises(ZeroDivisionError, msg=fail_msg): 0 % v def test_iadd(self): fail_msg = "Problem with method '__iadd__'" ### TODO: Change values v = self.V3D(0, -1, 2) w = self.V3D(-3, 4, -5) v = self.V3D.__iadd__(v, w) self.assertListEqual(v.component_values(), [ -3, 3, -3 ], msg=fail_msg) v = self.V3D(0, -1, 2) ### TODO: Change value v = v.__iadd__(3) self.assertListEqual(v.component_values(), [ 3, 2, 5 ], msg=fail_msg) v = self.V3D(0, -1, 2) w = self.V3D(-3, 4, -5) v += w self.assertListEqual(v.component_values(), [ -3, 3, -3 ], msg=fail_msg) v = self.V3D(0, -1, 2) v += 4 self.assertListEqual(v.component_values(), [ 4, 3, 6 ], msg=fail_msg) v = self.V3D(0, -1, 2) w = self.V3D(-3, 4, -5) id_v_before = id(v) v += w id_v_after = id(v) self.assertEqual(id_v_before, id_v_after, msg=fail_msg) def test_isub(self): fail_msg = "Problem with method '__isub__'" ### TODO: Change values v = self.V3D(0, -1, 2) w = self.V3D(-3, 4, -5) v = self.V3D.__isub__(v, w) self.assertListEqual(v.component_values(), [ 3, -5, 7 ], msg=fail_msg) ### TODO: Change values v = self.V3D(0, -1, 2) v = v.__isub__(3) self.assertListEqual(v.component_values(), [ -3, -4, -1 ], msg=fail_msg) v = self.V3D(0, -1, 2) w = self.V3D(-3, 4, -5) v -= w self.assertListEqual(v.component_values(), [ 3, -5, 7 ], msg=fail_msg) v = self.V3D(0, -1, 2) v -= 4 self.assertListEqual(v.component_values(), [ -4, -5, -2 ], msg=fail_msg) v = self.V3D(0, -1, 2) w = self.V3D(-3, 4, -5) id_v_before = id(v) v -= w id_v_after = id(v) self.assertEqual(id_v_before, id_v_after, msg=fail_msg) def test_imul(self): fail_msg = "Problem with method '__imul__'" ### TODO: Change values v = self.V3D(0, -1, 2) w = self.V3D(-3, 4, -5) v = self.V3D.__imul__(v, w) self.assertListEqual(v.component_values(), [ 0, -4, -10 ], msg=fail_msg) v = self.V3D(0, -1, 2) ### TODO: Change value v = v.__imul__(3) self.assertListEqual(v.component_values(), [ 0, -3, 6 ], msg=fail_msg) v = self.V3D(0, -1, 2) w = self.V3D(-3, 4, -5) v *= w self.assertListEqual(v.component_values(), [ 0, -4, -10 ], msg=fail_msg) v = self.V3D(0, -1, 2) v *= 4 self.assertListEqual(v.component_values(), [ 0, -4, 8 ], msg=fail_msg) v = self.V3D(0, -1, 2) w = self.V3D(-3, 4, -5) id_v_before = id(v) v *= w id_v_after = id(v) self.assertEqual(id_v_before, id_v_after, msg=fail_msg) def test_ipow(self): fail_msg = "Problem with method '__ipow__'" ### TODO: Change values v = self.V3D(0, -1, 2) w = self.V3D(2, 1, 0) v = self.V3D.__ipow__(v, w) self.assertListEqual(v.component_values(), [ 0, -1, 1 ], msg=fail_msg) v = self.V3D(0, -1, 2) ### TODO: Change value v = v.__ipow__(3) self.assertListEqual(v.component_values(), [ 0, -1, 8 ], msg=fail_msg) v = self.V3D(0, -1, 2) w = self.V3D(2, 1, 0) v **= w self.assertListEqual(v.component_values(), [ 0, -1, 1 ], msg=fail_msg) v = self.V3D(0, -1, 2) w = self.V3D(0, 2, 1) v **= w self.assertListEqual(v.component_values(), [ 1, 1, 2 ], msg=fail_msg) v = self.V3D(0, -1, 2) v **= 3 self.assertListEqual(v.component_values(), [ 0, -1, 8 ], msg=fail_msg) v = self.V3D(0, -1, 2) w = self.V3D(0, 2, 1) id_v_before = id(v) v **= w id_v_after = id(v) self.assertEqual(id_v_before, id_v_after, msg=fail_msg) v = self.V3D(0, -1, 2) with self.assertRaises(ZeroDivisionError, msg=fail_msg): v **= -1 def test_itruediv(self): fail_msg = "Problem with method '__itruediv__'" ### TODO: Change values v = self.V3D(0, -1, 2) w = self.V3D(-3, 4, -5) v = self.V3D.__itruediv__(v, w) self.assertListEqual(v.component_values(), [ 0.0, -0.25, -0.4 ], msg=fail_msg) v = self.V3D(0, -1, 2) ### TODO: Change value v = v.__itruediv__(4) self.assertListEqual(v.component_values(), [ 0.0, -0.25, 0.5 ], msg=fail_msg) v = self.V3D(0.0, -1.0, 2.0) w = self.V3D(-3.0, 4.0, -5.0) v /= w self.assertListEqual(v.component_values(), [ 0.0, -0.25, -0.4 ], msg=fail_msg) v = self.V3D(0.0, -1.0, 2.0) v /= 4.0 self.assertListEqual(v.component_values(), [ 0.0, -0.25, 0.5 ], msg=fail_msg) v = self.V3D(0, -1, 2) id_v_before = id(v) v /= w id_v_after = id(v) self.assertEqual(id_v_before, id_v_after, msg=fail_msg) v = self.V3D(0, -1, 2) w = self.V3D(0, 1, 1) with self.assertRaises(ZeroDivisionError, msg=fail_msg): v /= w v = self.V3D(0, -1, 2) w = self.V3D(1, 0, 1) with self.assertRaises(ZeroDivisionError, msg=fail_msg): v /= w v = self.V3D(0, -1, 2) w = self.V3D(1, 1, 0) with self.assertRaises(ZeroDivisionError, msg=fail_msg): v /= w v = self.V3D(0, -1, 2) with self.assertRaises(ZeroDivisionError, msg=fail_msg): v /= 0 def test_ifloordiv(self): fail_msg = "Problem with method '__ifloordiv__'" ### TODO: Change values v = self.V3D(-3, 4, -5) w = self.V3D(-1, 2, 4) v = self.V3D.__ifloordiv__(v, w) self.assertListEqual(v.component_values(), [ 3, 2, -2 ], msg=fail_msg) v = self.V3D(-3, 4, -5) ### TODO: Change value v = v.__ifloordiv__(3) self.assertListEqual(v.component_values(), [ -1, 1, -2 ], msg=fail_msg) v = self.V3D(-3, 4, -5) w = self.V3D(-1, 2, 4) v //= w self.assertListEqual(v.component_values(), [ 3, 2, -2 ], msg=fail_msg) v = self.V3D(-3, 4, -5) v //= 4 self.assertListEqual(v.component_values(), [ -1, 1, -2 ], msg=fail_msg) v = self.V3D(-3, 4, -5) w = self.V3D(-1, 2, 4) id_v_before = id(v) v //= w id_v_after = id(v) self.assertEqual(id_v_before, id_v_after, msg=fail_msg) v = self.V3D(0, -1, 2) w = self.V3D(0, 1, 1) with self.assertRaises(ZeroDivisionError, msg=fail_msg): v //= w v = self.V3D(0, -1, 2) w = self.V3D(1, 0, 1) with self.assertRaises(ZeroDivisionError, msg=fail_msg): v //= w v = self.V3D(0, -1, 2) w = self.V3D(1, 1, 0) with self.assertRaises(ZeroDivisionError, msg=fail_msg): v //= w v = self.V3D(0, -1, 2) with self.assertRaises(ZeroDivisionError, msg=fail_msg): v //= 0 def test_imod(self): fail_msg = "Problem with method '__imod__'" ### TODO: Change values v = self.V3D(-3, 4, -5) w = self.V3D(-1, 2, 4) v = self.V3D.__imod__(v, w) self.assertListEqual(v.component_values(), [ 0, 0, 3 ], msg=fail_msg) v = self.V3D(-3, 4, -5) ### TODO: Change value v = v.__imod__(3) self.assertListEqual(v.component_values(), [ 0, 1, 1 ], msg=fail_msg) v = self.V3D(-3, 4, -5) w = self.V3D(-1, 2, 4) v %= w self.assertListEqual(v.component_values(), [ 0, 0, 3 ], msg=fail_msg) v = self.V3D(-3, 4, -5) v %= 4 self.assertListEqual(v.component_values(), [ 1, 0, 3 ], msg=fail_msg) v = self.V3D(-3, 4, -5) w = self.V3D(-1, 2, 4) id_v_before = id(v) v %= w id_v_after = id(v) self.assertEqual(id_v_before, id_v_after, msg=fail_msg) v = self.V3D(0, -1, 2) w = self.V3D(0, 1, 1) with self.assertRaises(ZeroDivisionError, msg=fail_msg): v %= w v = self.V3D(0, -1, 2) w = self.V3D(1, 0, 1) with self.assertRaises(ZeroDivisionError, msg=fail_msg): v %= w v = self.V3D(0, -1, 2) w = self.V3D(1, 1, 0) with self.assertRaises(ZeroDivisionError, msg=fail_msg): v %= w v = self.V3D(0, -1, 2) with self.assertRaises(ZeroDivisionError, msg=fail_msg): v %= 0 def test_getattr(self): fail_msg = "Problem with method '__getattr__'" def verify_attributes(test_vector, test_data): for attr_name, arg, result in test_data: function = getattr(test_vector, attr_name) v = function(*arg) with self.subTest(v=test_vector, operator=attr_name, expected_result=result): self.assertListEqual(v.component_values(), result, msg=fail_msg) def verify_attributes_i(test_vector, test_data): for attr_name, arg, result in test_data: v = test_vector.copy() function = getattr(v, attr_name) function(*arg) with self.subTest(v=v, operator=attr_name, expected_result=result): self.assertListEqual(v.component_values(), result, msg=fail_msg) def verify_attribute_names(test_vector, test_operators): invalid_elements = [ 'a' ] # invalid_elements = [ '', 'a' ] valid_elements = [ 'bar' ] + test_vector.cnames for test_op in test_operators: valid_attr_names = test_vector.cnames for i in range(len(valid_elements)+2): valid_attr_names += \ [ '_'.join(['c', test_op] + list(comb)) for comb in set(itertools.combinations(valid_elements, i)) ] for i in range(len(valid_elements + invalid_elements)+1): elements = [ test_op ] + valid_elements * i + invalid_elements for perm in set(itertools.permutations(elements, i)): attr_name = 'c_' + '_'.join(perm) with self.subTest(v=test_vector, attr=attr_name): if attr_name in valid_attr_names: self.assertTrue(hasattr(test_vector, attr_name), msg=fail_msg) else: self.assertFalse(hasattr(test_vector, attr_name), msg=fail_msg) elements = [ 'c', test_op, 'bar' ] elements += [ '' ] * 4 elements += [ '_' ] * 3 # NB: 4 or more will generate __pos__ etc. elements += test_vector.cnames[0:1] attribute_names = \ set( ''.join(perm) for i in range(8) for perm in set(itertools.permutations(elements, i)) ) for attr_name in attribute_names: with self.subTest(v=test_vector, attr=attr_name): if attr_name in valid_attr_names: self.assertTrue(hasattr(test_vector, attr_name), msg=fail_msg) else: self.assertFalse(hasattr(test_vector, attr_name), msg=fail_msg) verify_attributes( self.V3D(-3, -4, -5), [ ('c_abs' , [ ], [ -3, -4, -5 ]), ('c_abs_bar_x_y_z', [ ], [ -3, -4, -5 ]), ('c_abs_x' , [ ], [ 3, -4, -5 ]), ('c_abs_bar_y_z' , [ ], [ 3, -4, -5 ]), ('c_abs_y' , [ ], [ -3, 4, -5 ]), ('c_abs_bar_x_z' , [ ], [ -3, 4, -5 ]), ('c_abs_z' , [ ], [ -3, -4, 5 ]), ('c_abs_bar_x_y' , [ ], [ -3, -4, 5 ]), ('c_abs_x_y' , [ ], [ 3, 4, -5 ]), ('c_abs_bar_z' , [ ], [ 3, 4, -5 ]), ('c_abs_y_z' , [ ], [ -3, 4, 5 ]), ('c_abs_bar_x' , [ ], [ -3, 4, 5 ]), ('c_abs_x_z' , [ ], [ 3, -4, 5 ]), ('c_abs_bar_y' , [ ], [ 3, -4, 5 ]), ('c_abs_x_y_z' , [ ], [ 3, 4, 5 ]), ('c_abs_bar' , [ ], [ 3, 4, 5 ]) ] ) verify_attributes( self.V3D(-3.5, 4.5, -5.5), [ ('c_neg_y' , [ ], [ -3.5, -4.5, -5.5 ]), ('c_neg_x_z' , [ ], [ 3.5, 4.5, 5.5 ]), ('c_pos_y' , [ ], [ -3.5, 4.5, -5.5 ]), ('c_pos_x_z' , [ ], [ -3.5, 4.5, -5.5 ]), ('c_floor_y' , [ ], [ -3.5, 4 , -5.5 ]), ('c_floor_x_z', [ ], [ -4 , 4.5, -6 ]), ('c_ceil_y' , [ ], [ -3.5, 5 , -5.5 ]), ('c_ceil_x_z' , [ ], [ -3 , 4.5, -5 ]), ('c_trunc_y' , [ ], [ -3.5, 4 , -5.5 ]), ('c_trunc_x_z', [ ], [ -3 , 4.5, -5 ]) ] ) verify_attributes( self.V3D(-2.5, 3.5, -1.5), [ ('c_add_y' , [ 2 ], [ -2.5 , 5.5 , -1.5 ]), ('c_add_x_z' , [ 2 ], [ -0.5 , 3.5 , 0.5 ]), ('c_sub_y' , [ 2 ], [ -2.5 , 1.5 , -1.5 ]), ('c_sub_x_z' , [ 2 ], [ -4.5 , 3.5 , -3.5 ]), ('c_mul_y' , [ 2 ], [ -2.5 , 7.0 , -1.5 ]), ('c_mul_x_z' , [ 2 ], [ -5.0 , 3.5 , -3.0 ]), ('c_pow_y' , [ 2 ], [ -2.5 , 12.25, -1.5 ]), ('c_pow_x_z' , [ 2 ], [ 6.25, 3.5 , 2.25 ]), ('c_truediv_y' , [ 2 ], [ -2.5 , 1.75, -1.5 ]), ('c_truediv_x_z' , [ 2 ], [ -1.25, 3.5 , -0.75 ]), ('c_floordiv_y' , [ 2 ], [ -2.5 , 1.0 , -1.5 ]), ('c_floordiv_x_z', [ 2 ], [ -2.0 , 3.5 , -1.0 ]), ('c_mod_y' , [ 2 ], [ -2.5 , 1.5 , -1.5 ]), ('c_mod_x_z' , [ 2 ], [ 1.5 , 3.5 , 0.5 ]) ] ) ### TODO: # Test division by zero # Verify that c_radd etc. does not exist (?) verify_attributes_i( self.V3D(-2.5, 3.5, -1.5), [ ('c_iadd_y' , [ 2 ], [ -2.5 , 5.5 , -1.5 ]), ('c_iadd_x_z' , [ 2 ], [ -0.5 , 3.5 , 0.5 ]), ('c_isub_y' , [ 2 ], [ -2.5 , 1.5 , -1.5 ]), ('c_isub_x_z' , [ 2 ], [ -4.5 , 3.5 , -3.5 ]), ('c_imul_y' , [ 2 ], [ -2.5 , 7.0 , -1.5 ]), ('c_imul_x_z' , [ 2 ], [ -5.0 , 3.5 , -3.0 ]), ('c_ipow_y' , [ 2 ], [ -2.5 , 12.25, -1.5 ]), ('c_ipow_x_z' , [ 2 ], [ 6.25, 3.5 , 2.25 ]), ('c_itruediv_y' , [ 2 ], [ -2.5 , 1.75, -1.5 ]), ('c_itruediv_x_z' , [ 2 ], [ -1.25, 3.5 , -0.75 ]), ('c_ifloordiv_y' , [ 2 ], [ -2.5 , 1.0 , -1.5 ]), ('c_ifloordiv_x_z', [ 2 ], [ -2.0 , 3.5 , -1.0 ]), ('c_imod_y' , [ 2 ], [ -2.5 , 1.5 , -1.5 ]), ('c_imod_x_z' , [ 2 ], [ 1.5 , 3.5 , 0.5 ]), ] ) ### TODO: # Test division by zero # Test with non existing operators # Test with non existing components verify_attribute_names( self.V3D(-2.5, 3.5, -1.5), [ # 'abs', # 'neg', 'pos', # 'floor', # 'ceil', # 'trunc', # 'add', # 'sub', # 'mul', # 'pow', # 'truediv', # 'floordiv', # 'mod' ] ) ### TODO: # def test_setattr(self): # # fail_msg = "Problem with method '__setattr__'" class Test_Case_vector(Test_Case_simple_vector): create_vector_class = staticmethod(skvectors.create_class_Vector) class Test_Case_cartesian_vector(Test_Case_vector): create_vector_class = staticmethod(skvectors.create_class_Cartesian_Vector) def test_abs(self): fail_msg = "Problem with method '__abs__'" u = self.V3D(0, -4.0, 3.0) s = self.V3D.__abs__(u) self.assertAlmostEqual(s, 5.0, msg=fail_msg) u = self.V3D(-12.0, -5.0, 0.0) s = u.__abs__() self.assertAlmostEqual(s, 13.0, msg=fail_msg) u = self.V3D(0.0, 0.0, 0.0) s = abs(u) self.assertEqual(s, 0.0, msg=fail_msg) u = self.V3D(0.0, 0.0, -3.0) s = abs(u) self.assertAlmostEqual(s, 3.0, msg=fail_msg) u = self.V3D(-3.0, 0.0, 4.0) s = abs(u) self.assertAlmostEqual(s, 5.0, msg=fail_msg) u = self.V3D(5.0, 12.0, 0.0) s = abs(u) self.assertAlmostEqual(s, 13.0, msg=fail_msg) u = self.V3D(-2.0, -1.0, -2.0) s = abs(u) self.assertAlmostEqual(s, 3.0, msg=fail_msg) class Test_Case_tolerant_cartesian_vector(Test_Case_cartesian_vector): create_vector_class = staticmethod(skvectors.create_class_Tolerant_Cartesian_Vector) class Test_Case_cartesian_3d_vector(Test_Case_cartesian_vector): create_vector_class = staticmethod(skvectors.create_class_Cartesian_3D_Vector) class Test_Case_tolerant_cartesian_3d_vector(Test_Case_cartesian_3d_vector): create_vector_class = staticmethod(skvectors.create_class_Tolerant_Cartesian_3D_Vector) if __name__ == "__main__": unittest.main()
37.050847
95
0.503024
5,929
39,348
3.127172
0.033058
0.104957
0.100858
0.183
0.883016
0.871582
0.816245
0.792514
0.757187
0.730651
0
0.089713
0.32975
39,348
1,061
96
37.085768
0.613317
0.024576
0
0.584563
0
0
0.038681
0
0.001135
0
0
0.000943
0.212259
1
0.039728
false
0
0.00454
0
0.057889
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
2e5e91f6e0c9aaa2bd3d5c1ebeda0403884c358d
1,461
py
Python
tests/parser/bug.49.test.py
veltri/DLV2
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
[ "Apache-2.0" ]
null
null
null
tests/parser/bug.49.test.py
veltri/DLV2
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
[ "Apache-2.0" ]
null
null
null
tests/parser/bug.49.test.py
veltri/DLV2
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
[ "Apache-2.0" ]
null
null
null
input = """ % This example is due to Thomas Eiter. It produces a segmentation fault in the % DLV repository version of 2000-05-31. % Generate a linear ordering compatible with % facts pr(ri,rj). pr(X,Y) | pr(Y,X) :- rule(X),rule(Y), X != Y. pr(X,Z) :- pr(X,Y), pr(Y,Z). :- pr(X,X). % Generate another ordering % pr_one(X,Y) | pr_one(Y,X) :- rule(X),rule(Y), X != Y. pr_one(X,Z) :- pr_one(X,Y), pr_one(Y,Z). :- pr_one(X,X). % Minimize the difference between pr and pr_one %:- rule(X), rule(Y), pr(X,Y), not pr_one(X,Y). [1:1] % Facts: three rules, one precedence per order. rule(r1). rule(r2). rule(r3). pr(r1,r2). pr_one(r3,r1). """ output = """ % This example is due to Thomas Eiter. It produces a segmentation fault in the % DLV repository version of 2000-05-31. % Generate a linear ordering compatible with % facts pr(ri,rj). pr(X,Y) | pr(Y,X) :- rule(X),rule(Y), X != Y. pr(X,Z) :- pr(X,Y), pr(Y,Z). :- pr(X,X). % Generate another ordering % pr_one(X,Y) | pr_one(Y,X) :- rule(X),rule(Y), X != Y. pr_one(X,Z) :- pr_one(X,Y), pr_one(Y,Z). :- pr_one(X,X). % Minimize the difference between pr and pr_one %:- rule(X), rule(Y), pr(X,Y), not pr_one(X,Y). [1:1] % Facts: three rules, one precedence per order. rule(r1). rule(r2). rule(r3). pr(r1,r2). pr_one(r3,r1). """
18.037037
79
0.548255
262
1,461
2.98855
0.198473
0.114943
0.061303
0.076628
0.985951
0.985951
0.985951
0.985951
0.985951
0.985951
0
0.031628
0.264203
1,461
80
80
18.2625
0.696744
0
0
0.909091
1
0.136364
0.977617
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
cf188329a0e723a854debf80efa0152407d8d265
8,484
py
Python
tests/linear/derivatives/test_ders.py
LMNS3d/sharpy
af5964a2f8a7a1e133c0948cb2fdeb5cb5c46568
[ "BSD-3-Clause" ]
null
null
null
tests/linear/derivatives/test_ders.py
LMNS3d/sharpy
af5964a2f8a7a1e133c0948cb2fdeb5cb5c46568
[ "BSD-3-Clause" ]
null
null
null
tests/linear/derivatives/test_ders.py
LMNS3d/sharpy
af5964a2f8a7a1e133c0948cb2fdeb5cb5c46568
[ "BSD-3-Clause" ]
1
2021-01-27T17:57:40.000Z
2021-01-27T17:57:40.000Z
''' Test elementary derivative methods S. Maraniello, 4 Jun 2018 ''' import numpy as np import unittest import sharpy.linear.src.lib_dbiot as dbiot import sharpy.linear.src.libuvlm as libuvlm class Test_ders(unittest.TestCase): ''' Test methods into assembly module ''' def setUp(self): self.zetaP=np.array([3.0,5.5,2.0]) self.zeta0=np.array([1.0,3.0,0.9]) self.zeta1=np.array([5.0,3.1,1.9]) self.zeta2=np.array([4.8,8.1,2.5]) self.zeta3=np.array([0.9,7.9,1.7]) def test_dbiot_segment(self): print('\n-------------------------------------- Testing dbiot.eval_seg') gamma=2.4 zetaP=self.zetaP zetaA=self.zeta1 zetaB=self.zeta2 Q0=libuvlm.biot_segment(zetaP,zetaA,zetaB,gamma) ### compare different analytical derivative DerP_an,DerA_an,DerB_an=dbiot.eval_seg_exp(zetaP,zetaA,zetaB,gamma) DerP_an2,DerA_an2,DerB_an2=dbiot.eval_seg_comp(zetaP,zetaA,zetaB,gamma) er_max=max( np.max(np.abs(DerP_an2-DerP_an)), np.max(np.abs(DerA_an2-DerA_an)), np.max(np.abs(DerB_an2-DerB_an)) ) assert er_max<1e-16, 'Analytical models not matching' ### compare vs numerical derivative Steps=np.array([1e-2,1e-4,1e-6]) Er_max=0.0*Steps for ss in range(len(Steps)): step=Steps[ss] DerP_num=0.0*DerP_an DerA_num=0.0*DerA_an DerB_num=0.0*DerB_an for cc_zeta in range(3): dzeta=np.zeros((3,)) dzeta[cc_zeta]=step DerP_num[:,cc_zeta]=( libuvlm.biot_segment(zetaP+dzeta,zetaA,zetaB,gamma)-Q0)/step DerA_num[:,cc_zeta]=( libuvlm.biot_segment(zetaP,zetaA+dzeta,zetaB,gamma)-Q0)/step DerB_num[:,cc_zeta]=( libuvlm.biot_segment(zetaP,zetaA,zetaB+dzeta,gamma)-Q0)/step er_max=max( np.max(np.abs(DerP_num-DerP_an)), np.max(np.abs(DerA_num-DerA_an)), np.max(np.abs(DerB_num-DerB_an)) ) print('FD step: %.2e ---> Max error: %.2e'%(step,er_max) ) assert er_max<5e1*step, 'Error larger than 50 times step size' Er_max[ss]=er_max def test_dbiot_segment_mid(self): print('\n------------------------- Testing dbiot.eval_seg at mid-point') gamma=2.4 zetaA=self.zeta1 zetaB=self.zeta2 zetaP=.3*zetaA+0.7*zetaB Q0=libuvlm.biot_segment(zetaP,zetaA,zetaB,gamma) ### compare different analytical derivative DerP_an,DerA_an,DerB_an=dbiot.eval_seg_exp(zetaP,zetaA,zetaB,gamma) DerP_an2,DerA_an2,DerB_an2=dbiot.eval_seg_comp(zetaP,zetaA,zetaB,gamma) er_max=max( np.max(np.abs(DerP_an2-DerP_an)), np.max(np.abs(DerA_an2-DerA_an)), np.max(np.abs(DerB_an2-DerB_an)) ) assert er_max<1e-16, 'Analytical models not matching' ### compare vs numerical derivative Steps=np.array([1e-2,1e-4,1e-6]) Er_max=0.0*Steps for ss in range(len(Steps)): step=Steps[ss] DerP_num=0.0*DerP_an DerA_num=0.0*DerA_an DerB_num=0.0*DerB_an for cc_zeta in range(3): dzeta=np.zeros((3,)) dzeta[cc_zeta]=step DerP_num[:,cc_zeta]=( libuvlm.biot_segment(zetaP+dzeta,zetaA,zetaB,gamma)-Q0)/step DerA_num[:,cc_zeta]=( libuvlm.biot_segment(zetaP,zetaA+dzeta,zetaB,gamma)-Q0)/step DerB_num[:,cc_zeta]=( libuvlm.biot_segment(zetaP,zetaA,zetaB+dzeta,gamma)-Q0)/step er_max=max( np.max(np.abs(DerP_num-DerP_an)), np.max(np.abs(DerA_num-DerA_an)), np.max(np.abs(DerB_num-DerB_an)) ) print('FD step: %.2e ---> Max error: %.2e'%(step,er_max) ) assert er_max<5e1*step, 'Error larger than 50 times step size' Er_max[ss]=er_max def test_dbiot_panel(self): print('\n---------------------------------- Testing dbiot.eval_panel_*') gamma=2.4 zetaP=self.zetaP zeta0=self.zeta0 zeta1=self.zeta1 zeta2=self.zeta2 zeta3=self.zeta3 ZetaPanel=np.array([zeta0,zeta1,zeta2,zeta3]) Q0=libuvlm.biot_panel(zetaP,ZetaPanel,gamma) # compare analytical derivatives models DerP_an,DerVer_an=dbiot.eval_panel_exp(zetaP,ZetaPanel,gamma) DerP_an2,DerVer_an2=dbiot.eval_panel_comp(zetaP,ZetaPanel,gamma) DerP_an3,DerVer_an3=dbiot.eval_panel_fast(zetaP,ZetaPanel,gamma) DerP_an4,DerVer_an4=dbiot.eval_panel_cpp(zetaP,ZetaPanel,gamma) er_max=max( np.max(np.abs(DerP_an2-DerP_an)), np.max(np.abs(DerVer_an2-DerVer_an))) assert er_max<1e-16, 'eval_panel_comp not matching with eval_panel_exp' er_max=max( np.max(np.abs(DerP_an3-DerP_an)), np.max(np.abs(DerVer_an3-DerVer_an))) assert er_max<1e-16, 'eval_panel_fast not matching with eval_panel_exp' er_max=max( np.max(np.abs(DerP_an4-DerP_an)), np.max(np.abs(DerVer_an4-DerVer_an))) assert er_max<1e-16, 'eval_panel_cpp not matching with eval_panel_exp' # compare vs. numerical derivative Steps=np.array([1e-2,1e-4,1e-6]) ErP_max=0.0*Steps ErVer_max=0.0*Steps for ss in range(len(Steps)): step=Steps[ss] DerP_num=0.0*DerP_an DerVer_num=0.0*DerVer_an ### Perturb component for cc in range(3): dzeta=np.zeros((3,)) dzeta[cc]=step # derivative w.r.t. target point DerP_num[:,cc]=\ (libuvlm.biot_panel(zetaP+dzeta,ZetaPanel,gamma)-Q0)/step # derivative w.r.t panel vertices for vv in range(4): ZetaPanel_pert=ZetaPanel.copy() ZetaPanel_pert[vv,:]+=dzeta DerVer_num[vv,:,cc]=\ (libuvlm.biot_panel(zetaP,ZetaPanel_pert,gamma)-Q0)/step erP_max=np.max(np.abs(DerP_num-DerP_an)) erVer_max=np.max(np.abs(DerVer_num-DerVer_an)) print('FD step: %.2e ---> Max error (P,Vert): (%.2e,%.2e)'\ %(step,erP_max,erVer_max)) assert erP_max<5e1*step,\ 'Error w.r.t. zetaP larger than 50 times step size' assert erVer_max<5e1*step,\ 'Error w.r.t. ZetaPanel larger than 50 times step size' ErP_max[ss]=erP_max ErVer_max[ss]=erVer_max # assert monothony for ss in range(len(Steps)-1): assert ErP_max[ss+1]<ErP_max[ss],\ 'Error of derivative w.r.t. zetaP not decreasing monothonically' assert ErVer_max[ss+1]<ErVer_max[ss],\ 'Error of derivative w.r.t. ZetaPanel not decreasing monothonically' def test_dbiot_panel_mid_segment(self): print('\n-------------- Testing dbiot.eval_panel with zetaP on segment') gamma=2.4 zeta0=self.zeta0 zeta1=self.zeta1 zeta2=self.zeta2 zeta3=self.zeta3 zetaP=0.3*zeta1+0.7*zeta2 ZetaPanel=np.array([zeta0,zeta1,zeta2,zeta3]) Q0=libuvlm.biot_panel(zetaP,ZetaPanel,gamma) # compare analytical derivatives models DerP_an,DerVer_an=dbiot.eval_panel_exp(zetaP,ZetaPanel,gamma) DerP_an2,DerVer_an2=dbiot.eval_panel_comp(zetaP,ZetaPanel,gamma) DerP_an3,DerVer_an3=dbiot.eval_panel_fast(zetaP,ZetaPanel,gamma) DerP_an4,DerVer_an4=dbiot.eval_panel_cpp(zetaP,ZetaPanel,gamma) er_max=max( np.max(np.abs(DerP_an2-DerP_an)), np.max(np.abs(DerVer_an2-DerVer_an))) assert er_max<1e-16, 'eval_panel_comp not matching with eval_panel_exp' er_max=max( np.max(np.abs(DerP_an3-DerP_an)), np.max(np.abs(DerVer_an3-DerVer_an))) assert er_max<1e-16, 'eval_panel_fast not matching with eval_panel_exp' er_max=max( np.max(np.abs(DerP_an4-DerP_an)), np.max(np.abs(DerVer_an4-DerVer_an))) assert er_max<1e-16, 'eval_panel_cpp not matching with eval_panel_exp' # compare vs. numerical derivative Steps=np.array([1e-2,1e-4,1e-6]) ErP_max=0.0*Steps ErVer_max=0.0*Steps for ss in range(len(Steps)): step=Steps[ss] DerP_num=0.0*DerP_an DerVer_num=0.0*DerVer_an ### Perturb component for cc in range(3): dzeta=np.zeros((3,)) dzeta[cc]=step # derivative w.r.t. target point DerP_num[:,cc]=\ (libuvlm.biot_panel(zetaP+dzeta,ZetaPanel,gamma)-Q0)/step # derivative w.r.t panel vertices for vv in range(4): ZetaPanel_pert=ZetaPanel.copy() ZetaPanel_pert[vv,:]+=dzeta DerVer_num[vv,:,cc]=\ (libuvlm.biot_panel(zetaP,ZetaPanel_pert,gamma)-Q0)/step erP_max=np.max(np.abs(DerP_num-DerP_an)) erVer_max=np.max(np.abs(DerVer_num-DerVer_an)) print('FD step: %.2e ---> Max error (P,Vert): (%.2e,%.2e)'\ %(step,erP_max,erVer_max)) assert erP_max<5e1*step,\ 'Error w.r.t. zetaP larger than 50 times step size' assert erVer_max<5e1*step,\ 'Error w.r.t. ZetaPanel larger than 50 times step size' ErP_max[ss]=erP_max ErVer_max[ss]=erVer_max # assert monothony for ss in range(len(Steps)-1): assert ErP_max[ss+1]<ErP_max[ss],\ 'Error of derivative w.r.t. zetaP not decreasing monothonically' assert ErVer_max[ss+1]<ErVer_max[ss],\ 'Error of derivative w.r.t. ZetaPanel not decreasing monothonically'
30.3
74
0.687883
1,458
8,484
3.817558
0.087792
0.037729
0.035214
0.050305
0.917715
0.917715
0.900108
0.876033
0.876033
0.876033
0
0.036861
0.155823
8,484
279
75
30.408602
0.740296
0.06801
0
0.890052
0
0
0.165287
0.013116
0
0
0
0
0.094241
1
0.026178
false
0
0.020942
0
0.052356
0.041885
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
d846bcb9b4bd6b86d3cd9c7e3d9d632e931e57a5
13,070
py
Python
test/test_mcmatplot.py
prmiles/mcmcplotly
270112813d6e59ac5d6329d050ed2eb95144e30c
[ "MIT" ]
5
2019-07-14T07:31:59.000Z
2022-03-26T15:48:52.000Z
test/test_mcmatplot.py
prmiles/mcmcplotly
270112813d6e59ac5d6329d050ed2eb95144e30c
[ "MIT" ]
18
2018-08-03T17:12:05.000Z
2021-02-20T22:57:12.000Z
test/test_mcmatplot.py
prmiles/mcmcplotly
270112813d6e59ac5d6329d050ed2eb95144e30c
[ "MIT" ]
1
2018-08-07T11:31:11.000Z
2018-08-07T11:31:11.000Z
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Thu Jun 21 12:21:24 2018 @author: prmiles """ from mcmcplot import mcmatplot as MP import matplotlib.pyplot as plt import numpy as np import math import unittest # -------------------------- class PlotDensityPanel(unittest.TestCase): def test_basic_plot_features(self): npar = 3 chains = np.random.random_sample(size=(100, npar)) f = MP.plot_density_panel(chains=chains) for ii in range(npar): name = str('$p_{{{}}}$'.format(ii)) self.assertEqual(f.axes[ii].get_xlabel(), name, msg=str('Should be {}'.format(name))) self.assertEqual(f.axes[ii].get_ylabel(), str('$\\pi$({}$|M^{}$)'.format(name, '{data}')), msg='Should be posterior') self.assertEqual(f.get_figwidth(), 5.0, msg='Width is 5in') self.assertEqual(f.get_figheight(), 4.0, msg='Height is 4in') plt.close() f, settings = MP.plot_density_panel(chains=chains, return_settings=True) self.assertTrue(isinstance(settings, dict), msg='Expect dict return') plt.close() def test_basic_plot_features_with_hist_on(self): npar = 3 chains = np.random.random_sample(size=(100, npar)) settings = {'hist_on': True} f = MP.plot_density_panel(chains=chains, settings=settings) for ii in range(npar): name = str('$p_{{{}}}$'.format(ii)) self.assertEqual(f.axes[ii].get_xlabel(), name, msg=str('Should be {}'.format(name))) self.assertEqual(f.axes[ii].get_ylabel(), str('$\\pi$({}$|M^{}$)'.format(name, '{data}')), msg='Should be posterior') self.assertEqual(f.get_figwidth(), 5.0, msg='Width is 5in') self.assertEqual(f.get_figheight(), 4.0, msg='Height is 4in') plt.close() f, kde = MP.plot_density_panel( chains=chains, settings=settings, return_kde=True) for ii in range(npar): name = str('$p_{{{}}}$'.format(ii)) self.assertEqual(f.axes[ii].get_xlabel(), name, msg=str('Should be {}'.format(name))) self.assertEqual(f.axes[ii].get_ylabel(), str('$\\pi$({}$|M^{}$)'.format(name, '{data}')), msg='Should be posterior') self.assertEqual(f.get_figwidth(), 5.0, msg='Width is 5in') self.assertEqual(f.get_figheight(), 4.0, msg='Height is 4in') plt.close() self.assertEqual(len(kde), 3, msg='Expect 3 kde functions') # -------------------------- class PlotChainPanel(unittest.TestCase): def test_basic_plot_features_nsimu_lt_maxpoints(self): chains = np.random.random_sample(size=(100, 2)) f = MP.plot_chain_panel(chains=chains) x1, y1 = f.axes[0].lines[0].get_xydata().T x2, y2 = f.axes[1].lines[0].get_xydata().T self.assertTrue(np.array_equal(y1, chains[:, 0]), msg='Expect y1 to match column 1') self.assertTrue(np.array_equal(y2, chains[:, 1]), msg='Expect y2 to match column 2') self.assertEqual(f.axes[0].get_xlabel(), '', msg='Should be blank') self.assertEqual(f.axes[1].get_xlabel(), 'Iteration', msg='Should be Iteration') plt.close() def test_basic_plot_features_nsimu_gt_maxpoints(self): nsimu = 1000 chains = np.random.random_sample(size=(nsimu, 2)) f = MP.plot_chain_panel(chains=chains) x1, y1 = f.axes[0].lines[0].get_xydata().T x2, y2 = f.axes[1].lines[0].get_xydata().T skip = int(math.floor(nsimu/500)) self.assertTrue(np.array_equal(y1, chains[range(0, nsimu, skip), 0]), msg='Expect y1 to match column 1') self.assertTrue(np.array_equal(y2, chains[range(0, nsimu, skip), 1]), msg='Expect y2 to match column 2') self.assertEqual(f.axes[0].get_xlabel(), '', msg='Should be blank') self.assertEqual(f.axes[1].get_xlabel(), 'Iteration', msg='Should be Iteration') plt.close() def test_basic_plot_features_nsimu_gt_maxpoints_with_pm2std(self): nsimu = 1000 chains = np.random.random_sample(size=(nsimu, 2)) f = MP.plot_chain_panel(chains=chains, settings=dict(add_pm2std=True)) x1, y1 = f.axes[0].lines[0].get_xydata().T x2, y2 = f.axes[1].lines[0].get_xydata().T skip = int(math.floor(nsimu/500)) self.assertTrue(np.array_equal(y1, chains[range(0, nsimu, skip), 0]), msg='Expect y1 to match column 1') self.assertTrue(np.array_equal(y2, chains[range(0, nsimu, skip), 1]), msg='Expect y2 to match column 2') self.assertEqual(f.axes[0].get_xlabel(), '', msg='Should be blank') self.assertEqual(f.axes[1].get_xlabel(), 'Iteration', msg='Should be Iteration') self.assertEqual(len(f.axes[0].lines), 4, msg='Expect 4 lines') self.assertEqual(len(f.axes[1].lines), 4, msg='Expect 4 lines') plt.close() f, settings = MP.plot_chain_panel( chains=chains, return_settings=True, settings=dict(add_pm2std=True)) self.assertTrue(isinstance(settings, dict), msg='Expect dict return') plt.close() # -------------------------- class PlotHistogramPanel(unittest.TestCase): def test_basic_plot_features_nsimu_lt_maxpoints(self): npar = 3 chains = np.random.random_sample(size=(100, npar)) f = MP.plot_histogram_panel(chains=chains) for ii in range(npar): self.assertEqual(f.axes[ii].get_xlabel(), str('$p_{{{}}}$'.format(ii)), msg=str('Should be $p_{{{}}}$'.format(ii))) self.assertEqual(f.axes[ii].get_ylabel(), '', msg='Should be blank') plt.close() f, settings = MP.plot_histogram_panel(chains=chains, return_settings=True) self.assertTrue(isinstance(settings, dict), msg='Expect dict return') plt.close() # -------------------------- class PlotPairwiseCorrelationPanel(unittest.TestCase): def test_basic_plot_features_nsimu_lt_maxpoints(self): chains = np.random.random_sample(size=(100, 3)) f = MP.plot_pairwise_correlation_panel(chains=chains) x1, y1 = f.axes[0].lines[0].get_xydata().T x2, y2 = f.axes[1].lines[0].get_xydata().T x3, y3 = f.axes[1].lines[0].get_xydata().T self.assertTrue(np.array_equal(x1, chains[:, 0]), msg='Expect x1 to match column 0') self.assertTrue(np.array_equal(y1, chains[:, 1]), msg='Expect y1 to match column 1') self.assertTrue(np.array_equal(x2, chains[:, 0]), msg='Expect x2 to match column 0') self.assertTrue(np.array_equal(y2, chains[:, 2]), msg='Expect y2 to match column 2') self.assertTrue(np.array_equal(x3, chains[:, 0]), msg='Expect x3 to match column 1') self.assertTrue(np.array_equal(y3, chains[:, 2]), msg='Expect y3 to match column 2') for ai in f.axes: self.assertEqual(ai.get_xlabel(), '', msg='Should be blank') self.assertEqual(f.axes[0].get_title(), '$p_{0}$', msg='Expect $p_{0}$') self.assertEqual(f.axes[2].get_title(), '$p_{1}$', msg='Expect $p_{1}$') self.assertEqual(f.axes[0].get_ylabel(), '$p_{1}$', msg='Expect $p_{1}$') self.assertEqual(f.axes[1].get_ylabel(), '$p_{2}$', msg='Expect $p_{2}$') plt.close() def test_basic_plot_features_nsimu_lt_maxpoints_and_2_chains(self): chains = np.random.random_sample(size=(100, 2)) f = MP.plot_pairwise_correlation_panel(chains=chains) x1, y1 = f.axes[0].lines[0].get_xydata().T self.assertTrue(np.array_equal(x1, chains[:, 0]), msg='Expect x1 to match column 0') self.assertTrue(np.array_equal(y1, chains[:, 1]), msg='Expect y1 to match column 1') for ai in f.axes: self.assertEqual(ai.get_title(), '', msg='Should be blank') self.assertEqual(f.axes[0].get_xlabel(), '$p_{0}$', msg='Expect $p_{0}$') self.assertEqual(f.axes[0].get_ylabel(), '$p_{1}$', msg='Expect $p_{1}$') plt.close() def test_basic_plot_features_2c_w_contours(self): chains = np.random.random_sample(size=(100, 2)) f = MP.plot_pairwise_correlation_panel( chains=chains, settings=dict(add_5095_contours=True)) x1, y1 = f.axes[0].lines[0].get_xydata().T self.assertTrue(np.array_equal(x1, chains[:, 0]), msg='Expect x1 to match column 0') self.assertTrue(np.array_equal(y1, chains[:, 1]), msg='Expect y1 to match column 1') self.assertEqual(len(f.axes[0].lines), 3, msg='Expect 3 lines') for ai in f.axes: self.assertEqual(ai.get_title(), '', msg='Should be blank') self.assertEqual(f.axes[0].get_xlabel(), '$p_{0}$', msg='Expect $p_{0}$') self.assertEqual(f.axes[0].get_ylabel(), '$p_{1}$', msg='Expect $p_{1}$') plt.close() def test_basic_plot_features_2c_w_contours_and_legend(self): chains = np.random.random_sample(size=(100, 2)) f = MP.plot_pairwise_correlation_panel( chains=chains, settings=dict(add_5095_contours=True, add_legend=True)) x1, y1 = f.axes[0].lines[0].get_xydata().T self.assertTrue(np.array_equal(x1, chains[:, 0]), msg='Expect x1 to match column 0') self.assertTrue(np.array_equal(y1, chains[:, 1]), msg='Expect y1 to match column 1') self.assertEqual(len(f.axes[0].lines), 3, msg='Expect 3 lines') self.assertEqual(len(f.legends), 1, msg='Expect legend') plt.close() # -------------------------- class PlotChainMetrics(unittest.TestCase): def test_basic_plot_features(self): chains = np.random.random_sample(size=(100, 1)) f = MP.plot_chain_metrics(chain=chains, name=['a1']) x1, y1 = f.axes[0].lines[0].get_xydata().T self.assertTrue(np.array_equal(y1, chains[:, 0]), msg='Expect y1 to match column 1') self.assertEqual(f.axes[0].get_xlabel(), 'Iterations', msg='Should be Iterations') self.assertEqual(f.axes[0].get_ylabel(), 'a1-chain', msg='Should be a1-chain') self.assertEqual(f.axes[1].get_xlabel(), 'a1', msg='Strings should match') self.assertEqual(f.axes[1].get_ylabel(), 'Histogram of a1-chain', msg='Strings should match') self.assertEqual(f.get_figwidth(), 7.0, msg='Width is 7in') self.assertEqual(f.get_figheight(), 5.0, msg='Height is 5in') plt.close() def test_figsize_plot_features(self): chains = np.random.random_sample(size=(100, 1)) f = MP.plot_chain_metrics(chain=chains, name=['a1'], settings={'fig': dict(figsize=(10, 2))}) x1, y1 = f.axes[0].lines[0].get_xydata().T self.assertTrue(np.array_equal(y1, chains[:, 0]), msg='Expect y1 to match column 1') self.assertEqual(f.axes[0].get_xlabel(), 'Iterations', msg='Should be Iterations') self.assertEqual(f.axes[0].get_ylabel(), 'a1-chain', msg='Should be a1-chain') self.assertEqual(f.axes[1].get_xlabel(), 'a1', msg='Strings should match') self.assertEqual(f.axes[1].get_ylabel(), 'Histogram of a1-chain', msg='Strings should match') self.assertEqual(f.get_figwidth(), 10.0, msg='Width is 10in') self.assertEqual(f.get_figheight(), 2.0, msg='Height is 2in') plt.close() f, settings = MP.plot_chain_metrics( chain=chains, name=['a1'], settings={'fig': dict(figsize=(10, 2))}, return_settings=True) self.assertTrue(isinstance(settings, dict), msg='Expect dict return') plt.close()
46.845878
82
0.543611
1,656
13,070
4.150966
0.09058
0.037096
0.093104
0.087285
0.90355
0.882165
0.860489
0.837067
0.795316
0.774513
0
0.03559
0.299158
13,070
278
83
47.014388
0.714847
0.017751
0
0.723577
0
0
0.129835
0
0
0
0
0
0.296748
1
0.04878
false
0
0.020325
0
0.089431
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
d8c216aed2dea16e9d4bd346478b1689a81dfe26
102
py
Python
api/app/author/__init__.py
tonykuo222/vue-flask-in-action
26c6dbf5282541caeeb0784003309e22ed6adf46
[ "MIT" ]
1
2021-11-13T15:54:43.000Z
2021-11-13T15:54:43.000Z
api/app/author/__init__.py
tonykuo222/vue-flask-in-action
26c6dbf5282541caeeb0784003309e22ed6adf46
[ "MIT" ]
null
null
null
api/app/author/__init__.py
tonykuo222/vue-flask-in-action
26c6dbf5282541caeeb0784003309e22ed6adf46
[ "MIT" ]
1
2021-11-13T00:57:20.000Z
2021-11-13T00:57:20.000Z
from flask import Blueprint author_bp = Blueprint('author_bp',__name__) from app.author import routes
25.5
43
0.823529
15
102
5.2
0.6
0.384615
0.435897
0
0
0
0
0
0
0
0
0
0.107843
102
3
44
34
0.857143
0
0
0
0
0
0.088235
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0.666667
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
1
0
7
d8d0ff0c816cb85c3d3d09b13488e4d4280aa66f
4,385
py
Python
src/cs165/passport_holder/migrations/0003_auto_20191123_1710.py
kenserr/cs165
f6f818ae1c05fe492817da8dc460917a0b3020d2
[ "bzip2-1.0.6" ]
null
null
null
src/cs165/passport_holder/migrations/0003_auto_20191123_1710.py
kenserr/cs165
f6f818ae1c05fe492817da8dc460917a0b3020d2
[ "bzip2-1.0.6" ]
null
null
null
src/cs165/passport_holder/migrations/0003_auto_20191123_1710.py
kenserr/cs165
f6f818ae1c05fe492817da8dc460917a0b3020d2
[ "bzip2-1.0.6" ]
null
null
null
# Generated by Django 2.0.7 on 2019-11-23 09:10 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('passport_holder', '0002_auto_20191123_1701'), ] operations = [ migrations.AlterField( model_name='passport_holder', name='applicant_name', field=models.CharField(max_length=50), ), migrations.AlterField( model_name='passport_holder', name='citizen_of_another_country_before', field=models.CharField(max_length=50), ), migrations.AlterField( model_name='passport_holder', name='citizenship_acquiring_method', field=models.CharField(max_length=30), ), migrations.AlterField( model_name='passport_holder', name='contact_number', field=models.CharField(max_length=15), ), migrations.AlterField( model_name='passport_holder', name='email_address', field=models.CharField(max_length=50), ), migrations.AlterField( model_name='passport_holder', name='emergency_contact', field=models.CharField(max_length=50), ), migrations.AlterField( model_name='passport_holder', name='fathers_citizenship', field=models.CharField(max_length=50), ), migrations.AlterField( model_name='passport_holder', name='fathers_last_name', field=models.CharField(max_length=50), ), migrations.AlterField( model_name='passport_holder', name='fathers_name', field=models.CharField(max_length=50), ), migrations.AlterField( model_name='passport_holder', name='home_address', field=models.CharField(max_length=200), ), migrations.AlterField( model_name='passport_holder', name='issuing_authority', field=models.CharField(max_length=30), ), migrations.AlterField( model_name='passport_holder', name='lost_citizenship', field=models.CharField(max_length=3), ), migrations.AlterField( model_name='passport_holder', name='mothers_citizenship', field=models.CharField(max_length=50), ), migrations.AlterField( model_name='passport_holder', name='mothers_last_name', field=models.CharField(max_length=50), ), migrations.AlterField( model_name='passport_holder', name='mothers_name', field=models.CharField(max_length=50), ), migrations.AlterField( model_name='passport_holder', name='passport_no', field=models.CharField(max_length=10), ), migrations.AlterField( model_name='passport_holder', name='passport_status', field=models.CharField(max_length=30), ), migrations.AlterField( model_name='passport_holder', name='place_issued', field=models.CharField(max_length=50), ), migrations.AlterField( model_name='passport_holder', name='place_of_birth', field=models.CharField(max_length=50), ), migrations.AlterField( model_name='passport_holder', name='preferred_delivery_address', field=models.CharField(max_length=200), ), migrations.AlterField( model_name='passport_holder', name='present_address', field=models.CharField(max_length=200), ), migrations.AlterField( model_name='passport_holder', name='served_military_of_other_country', field=models.CharField(max_length=50), ), migrations.AlterField( model_name='passport_holder', name='sex', field=models.CharField(max_length=1), ), migrations.AlterField( model_name='passport_holder', name='spouse_name', field=models.CharField(max_length=50), ), ]
32.723881
55
0.571266
400
4,385
5.9875
0.185
0.130271
0.250522
0.290605
0.846764
0.810438
0.790814
0.717328
0.664301
0.664301
0
0.027
0.324287
4,385
133
56
32.969925
0.781303
0.010262
0
0.724409
1
0
0.183725
0.032734
0
0
0
0
0
1
0
false
0.212598
0.007874
0
0.031496
0
0
0
0
null
0
1
1
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
10
d8ec9446a58a1b3cdc979ece7cae66e488a4991a
1,893
py
Python
dlp/migrations/0008_auto_20200506_1349.py
bpotvin-bccrc/colossus
fa5ca7ce4cfe794c7d2167acb868aa9167988941
[ "MIT" ]
2
2018-10-03T16:05:14.000Z
2019-03-08T23:01:29.000Z
dlp/migrations/0008_auto_20200506_1349.py
bpotvin-bccrc/colossus
fa5ca7ce4cfe794c7d2167acb868aa9167988941
[ "MIT" ]
3
2019-05-09T22:48:22.000Z
2020-06-05T18:52:05.000Z
dlp/migrations/0008_auto_20200506_1349.py
bpotvin-bccrc/colossus
fa5ca7ce4cfe794c7d2167acb868aa9167988941
[ "MIT" ]
4
2018-08-16T22:25:10.000Z
2021-02-19T16:10:15.000Z
# -*- coding: utf-8 -*- # Generated by Django 1.11.17 on 2020-05-06 20:49 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('dlp', '0007_auto_20200327_1736'), ] operations = [ migrations.AddField( model_name='dlplibrary', name='normal_sample_id', field=models.CharField(blank=True, default=None, max_length=50, null=True, verbose_name='Normal sample ID'), ), migrations.AddField( model_name='dlplibrary', name='normal_sample_library_id', field=models.CharField(blank=True, default=None, max_length=50, null=True, verbose_name='Normal sample library ID'), ), migrations.AddField( model_name='dlplibrary', name='normal_sample_type', field=models.CharField(blank=True, choices=[('D', 'DLP+ library'), ('W', 'GSC WGS library')], default=None, max_length=50, null=True, verbose_name='Normal sample type'), ), migrations.AddField( model_name='historicaldlplibrary', name='normal_sample_id', field=models.CharField(blank=True, default=None, max_length=50, null=True, verbose_name='Normal sample ID'), ), migrations.AddField( model_name='historicaldlplibrary', name='normal_sample_library_id', field=models.CharField(blank=True, default=None, max_length=50, null=True, verbose_name='Normal sample library ID'), ), migrations.AddField( model_name='historicaldlplibrary', name='normal_sample_type', field=models.CharField(blank=True, choices=[('D', 'DLP+ library'), ('W', 'GSC WGS library')], default=None, max_length=50, null=True, verbose_name='Normal sample type'), ), ]
41.152174
181
0.630217
213
1,893
5.413146
0.2723
0.104076
0.166522
0.140503
0.83608
0.83608
0.83608
0.83608
0.763226
0.711188
0
0.032033
0.241416
1,893
45
182
42.066667
0.770891
0.03645
0
0.789474
1
0
0.222954
0.03899
0
0
0
0
0
1
0
false
0
0.052632
0
0.131579
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
2b374a9c68ee3987fb84c97c630a334f7f22b2e1
217
py
Python
forge_sdk/utils/__init__.py
ArcBlock/forge-python-sdk
4e72d75d3c06b16554d660860708732c83b5f8b2
[ "Apache-2.0" ]
9
2019-05-08T01:30:22.000Z
2020-05-08T22:11:40.000Z
forge_sdk/utils/__init__.py
ArcBlock/forge-python-sdk
4e72d75d3c06b16554d660860708732c83b5f8b2
[ "Apache-2.0" ]
22
2019-05-14T18:36:17.000Z
2019-12-24T10:09:42.000Z
forge_sdk/utils/__init__.py
ArcBlock/forge-python-sdk
4e72d75d3c06b16554d660860708732c83b5f8b2
[ "Apache-2.0" ]
null
null
null
from forge_sdk.utils.conversion import * from forge_sdk.utils.crypto import * from forge_sdk.utils.other import * from forge_sdk.utils.proto import * from forge_sdk.utils.tx import * from forge_sdk.utils.itx import *
31
40
0.806452
36
217
4.694444
0.305556
0.319527
0.426036
0.60355
0.680473
0
0
0
0
0
0
0
0.110599
217
6
41
36.166667
0.875648
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
2b49fb97da62aa791cdf801a11e948542bc7b26c
203
py
Python
src/vocabulator/grpc_api/routes.py
sysint64/vocabulator-server-side
1cbe6253367ca0461be9a88fb9a2cab927170393
[ "MIT" ]
null
null
null
src/vocabulator/grpc_api/routes.py
sysint64/vocabulator-server-side
1cbe6253367ca0461be9a88fb9a2cab927170393
[ "MIT" ]
null
null
null
src/vocabulator/grpc_api/routes.py
sysint64/vocabulator-server-side
1cbe6253367ca0461be9a88fb9a2cab927170393
[ "MIT" ]
null
null
null
from vocabulator.grpc_api.generated.sync_pb2_grpc import add_SyncServicer_to_server from vocabulator.grpc_api.views import Sync def add_services(server): add_SyncServicer_to_server(Sync(), server)
29
83
0.847291
30
203
5.366667
0.5
0.186335
0.236025
0.273292
0
0
0
0
0
0
0
0.005405
0.08867
203
6
84
33.833333
0.864865
0
0
0
1
0
0
0
0
0
0
0
0
1
0.25
false
0
0.5
0
0.75
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
7
2b737ce95079e636126f4ab281eb04985a0d6697
67
py
Python
util.py
fedecalendino/alfred-crypto-prices
417ff9cda9c8dbb718402cd29f0aa66e572f4a53
[ "MIT" ]
1
2022-01-25T21:58:00.000Z
2022-01-25T21:58:00.000Z
util.py
fedecalendino/alfred-crypto-prices
417ff9cda9c8dbb718402cd29f0aa66e572f4a53
[ "MIT" ]
null
null
null
util.py
fedecalendino/alfred-crypto-prices
417ff9cda9c8dbb718402cd29f0aa66e572f4a53
[ "MIT" ]
null
null
null
import os def getenv(key): return os.getenv(key, "").strip()
11.166667
37
0.626866
10
67
4.2
0.7
0.428571
0
0
0
0
0
0
0
0
0
0
0.19403
67
5
38
13.4
0.777778
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
7
99131475bdc48b338901eaa72baaca0cd62877a0
116
py
Python
src/eascheduler/errors/__init__.py
spacemanspiff2007/eascheduler
849fe8f43b7bbcb8db3e76c0dda2811eb935cf39
[ "Apache-2.0" ]
null
null
null
src/eascheduler/errors/__init__.py
spacemanspiff2007/eascheduler
849fe8f43b7bbcb8db3e76c0dda2811eb935cf39
[ "Apache-2.0" ]
3
2021-04-08T11:02:31.000Z
2022-02-14T06:07:56.000Z
src/eascheduler/errors/__init__.py
spacemanspiff2007/eascheduler
849fe8f43b7bbcb8db3e76c0dda2811eb935cf39
[ "Apache-2.0" ]
null
null
null
from .errors import BoundaryFunctionError, FirstRunInThePastError, JobAlreadyCanceledException, UnknownWeekdayError
58
115
0.905172
7
116
15
1
0
0
0
0
0
0
0
0
0
0
0
0.060345
116
1
116
116
0.963303
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
991fd82dc02904b324ca2157f179cbde3f7e8671
78,438
py
Python
Tests/KidProfile_2/functionality.py
yupasik/AT
8f4f6535cef8b7714cb325a32dadf36f6f8664ee
[ "Apache-2.0" ]
null
null
null
Tests/KidProfile_2/functionality.py
yupasik/AT
8f4f6535cef8b7714cb325a32dadf36f6f8664ee
[ "Apache-2.0" ]
null
null
null
Tests/KidProfile_2/functionality.py
yupasik/AT
8f4f6535cef8b7714cb325a32dadf36f6f8664ee
[ "Apache-2.0" ]
null
null
null
# Test name = KidProfile_2 # Script dir = R:\Stingray\Tests\KidProfile_2\functionality\functionality.py # Rev v.2.0 from time import sleep from device import handler, updateTestResult import RC import UART import DO import GRAB import MOD import os from DO import status import OPER def runTest(): status("active") TestName = "KidProfile_2" ScriptName = "functionality" ScriptIndex = "1" Grabber = DO.grab_define() platform = DO.load_platform() Modulation = "DVBS" FEC = "3/4" SR = "27500000" Stream2 = "\\Kid Profile\\X_0000_00000_MUX_38000_kidsprofile-test_12226_20130905a.ts" Stream = "\\Kid Profile\\X_0000_00000_MUX_38000_KidProfile_Auto_20140905a.ts" Frequency = 1476 Modulator = "1" COM = "COM7" settings = [ScriptName, ScriptIndex, Grabber, Modulation, FEC, SR, Stream, Frequency, Modulator, COM] DO.save_settings(settings) GRAB.start_capture() MOD.stop(Modulator) ############################ TestCase 1 ########################################## testcase = 1 status("active") UART.default_settings() MOD.stop(Modulator) MOD.play_stream(Modulation, FEC, SR, Stream, Frequency, Modulator) RC.push(["exit 1 1000", "exit 1 1000"]) #-----Activating KID Profile-----# UART.activate_app("kidsmode") #UART.start_app("settings") #sleep(5) #RC.push(["right 1 1500", "right 1 1500", "right 1 1500", "right 1 1500", "right 1 1500", "right 1 1500", "right 1 1500", "OK 1 1500", "down 1 1500", "4 1 500", "3 1 500", "2 1 500", "1 1 500", "1 1 500", "1 1 500", "1 1 500", "1 1 500", "1 1 500", "1 1 500", "1 1 500", "1 1 500", "1 1 500", "1 1 500", "1 1 500", "1 1 500", "1 1 500", "1 1 500", "1 1 5500", "exit 1 500", "exit 1 500", "exit 1 500", "exit 1 500"]) sleep(3) RC.push(["kid_standby 1 10500", "standby 1 15000"]) GRAB.compare(testcase) ############################ TestCase 2 ########################################## testcase = "2_1" status("active") UART.default_settings() RC.push(["exit 1 3500"]) #The following line has been added because of #26846 #RC.push(["kid_1 1 2500", "exit 1 6000"]) RC.push(["kid_1 1 1500"]) GRAB.compare(testcase) testcase = "2_2" status("active") RC.push(["exit 1 5000"]) RC.push(["kid_2 1 1500"]) GRAB.compare(testcase) testcase = "2_3" status("active") RC.push(["exit 1 5000"]) RC.push(["kid_3 1 1500"]) GRAB.compare(testcase) testcase = "2_4" status("active") RC.push(["exit 1 5000"]) RC.push(["kid_4 1 1500"]) GRAB.compare(testcase) testcase = "2_5" status("active") RC.push(["exit 1 5000"]) RC.push(["kid_5 1 1500"]) GRAB.compare(testcase) testcase = "2_6" status("active") RC.push(["exit 1 5000"]) RC.push(["kid_6 1 1500"]) GRAB.compare(testcase) testcase = "2_7" status("active") RC.push(["exit 1 5000"]) RC.push(["kid_7 1 1500"]) GRAB.compare(testcase) testcase = "2_8" status("active") RC.push(["exit 1 5000"]) RC.push(["kid_8 1 1500"]) GRAB.compare(testcase) testcase = "2_9" status("active") RC.push(["exit 1 5000"]) RC.push(["kid_9 1 1500"]) GRAB.compare(testcase) testcase = "2_10" status("active") RC.push(["exit 1 5000"]) RC.push(["kid_0 1 1500"]) GRAB.compare(testcase) testcase = "2_11" status("active") RC.push(["exit 1 5000"]) RC.push(["kid_up 1 1500"]) GRAB.compare(testcase) testcase = "2_12" status("active") RC.push(["exit 1 5000"]) RC.push(["kid_down 1 1500"]) GRAB.compare(testcase) testcase = "2_13" status("active") RC.push(["exit 1 5000"]) RC.push(["kid_right 1 1500"]) GRAB.compare(testcase) testcase = "2_14" status("active") RC.push(["exit 1 5000"]) RC.push(["kid_left 1 1500"]) GRAB.compare(testcase) testcase = "2_15" status("active") RC.push(["exit 1 5000"]) RC.push(["kid_ok 1 1500"]) GRAB.compare(testcase) testcase = "2_16" status("active") RC.push(["exit 1 5000"]) RC.push(["kid_ChUp 1 1500"]) GRAB.compare(testcase) testcase = "2_17" status("active") RC.push(["exit 1 5000"]) RC.push(["kid_ChDown 1 1500"]) GRAB.compare(testcase) testcase = "2_18" status("active") RC.push(["exit 1 5000"]) RC.push(["kid_VolUp 1 1500"]) GRAB.compare(testcase) testcase = "2_19" status("active") RC.push(["exit 1 5000"]) RC.push(["kid_VolDown 1 1500"]) GRAB.compare(testcase) testcase = "2_20" status("active") RC.push(["exit 1 5000"]) RC.push(["kid_star 1 1500"]) GRAB.compare(testcase) ############################ TestCase 3 ########################################## testcase = "3_1" status("active") UART.default_settings() OPER.search() RC.push(["exit 2 7500"]) RC.push(["kid_1 1 4000"]) GRAB.compare(testcase) testcase = "3_2" status("active") RC.push(["exit 1 2000", "OK 1 1000", "exit 1 1000"]) RC.push(["kid_2 1 4000"]) GRAB.compare(testcase) testcase = "3_3" status("active") RC.push(["exit 1 2000", "OK 1 1000", "exit 1 1000"]) RC.push(["kid_3 1 4000"]) GRAB.compare(testcase) testcase = "3_4" status("active") RC.push(["exit 1 2000", "OK 1 1000", "exit 1 1000"]) RC.push(["kid_4 1 4000"]) GRAB.compare(testcase) testcase = "3_5" status("active") RC.push(["exit 1 2000", "OK 1 1000", "exit 1 1000"]) RC.push(["kid_5 1 4000"]) GRAB.compare(testcase) testcase = "3_6" status("active") RC.push(["exit 1 2000", "OK 1 1000", "exit 1 1000"]) RC.push(["kid_6 1 4000"]) GRAB.compare(testcase) testcase = "3_7" status("active") RC.push(["exit 1 2000", "OK 1 1000", "exit 1 1000"]) RC.push(["kid_7 1 4000"]) GRAB.compare(testcase) testcase = "3_8" status("active") RC.push(["exit 1 2000", "OK 1 1000", "exit 1 1000"]) RC.push(["kid_8 1 4000"]) GRAB.compare(testcase) testcase = "3_9" status("active") RC.push(["exit 1 2000", "OK 1 1000", "exit 1 1000"]) RC.push(["kid_9 1 4000"]) GRAB.compare(testcase) testcase = "3_10" status("active") RC.push(["exit 1 2000", "OK 1 1000", "exit 1 1000"]) RC.push(["kid_0 1 4000"]) GRAB.compare(testcase) testcase = "3_11" status("active") RC.push(["exit 1 2000", "OK 1 1000", "exit 1 1000"]) RC.push(["kid_up 1 4000"]) GRAB.compare(testcase) testcase = "3_12" status("active") RC.push(["exit 1 2000", "OK 1 1000", "exit 1 1000"]) RC.push(["kid_down 1 4000"]) GRAB.compare(testcase) testcase = "3_13" status("active") RC.push(["exit 1 2000", "OK 1 1000", "exit 1 1000"]) RC.push(["kid_right 1 4000"]) GRAB.compare(testcase) testcase = "3_14" status("active") RC.push(["exit 1 2000", "OK 1 1000", "exit 1 1000"]) RC.push(["kid_left 1 4000"]) GRAB.compare(testcase) testcase = "3_15" status("active") RC.push(["exit 1 2000", "OK 1 1000", "exit 1 1000"]) RC.push(["kid_ok 1 4000"]) GRAB.compare(testcase) testcase = "3_16" status("active") RC.push(["exit 1 2000", "OK 1 1000", "exit 1 1000"]) RC.push(["kid_ChUp 1 4000"]) GRAB.compare(testcase) testcase = "3_17" status("active") RC.push(["exit 1 2000", "OK 1 1000", "exit 1 1000"]) RC.push(["kid_ChDown 1 4000"]) GRAB.compare(testcase) testcase = "3_18" status("active") RC.push(["exit 1 2000", "OK 1 1000", "exit 1 1000"]) RC.push(["kid_VolUp 1 4000"]) GRAB.compare(testcase) testcase = "3_19" status("active") RC.push(["exit 1 2000", "OK 1 1000", "exit 1 1000"]) RC.push(["kid_VolDown 1 4000"]) GRAB.compare(testcase) testcase = "3_20" status("active") RC.push(["exit 1 2000", "OK 1 1000", "exit 1 1000"]) RC.push(["kid_star 1 4000"]) GRAB.compare(testcase) ############################ TestCase 4 ########################################## testcase = 4 status("active") RC.push(["exit 1 2000", "OK 1 1000", "exit 1 1000"]) #UART.start_app("kidsmode") RC.push(["kid_star 1 4000"]) sleep(2) GRAB.compare(testcase) ############################ TestCase 5 ########################################## testcase = 5 status("active") UART.reboot() GRAB.compare(testcase) ############################ TestCase 6 ########################################## testcase = 6 status("manual") GRAB.compare(testcase) ############################ TestCase 7 ########################################## testcase = 7 status("manual") GRAB.compare(testcase) ############################ TestCase 8 ########################################## testcase = 8 status("active") #UART.start_app("kidsmode") RC.push(["kid_star 1 4000"]) sleep(2) RC.push(["standby 1 15000", "standby 1 6000"]) GRAB.compare(testcase) ############################ TestCase 9 ########################################## testcase = 9 status("active") #UART.start_app("kidsmode") RC.push(["kid_star 1 4000"]) RC.push(["kid_standby 1 15000", "kid_standby 1 6000"]) GRAB.compare(testcase) ############################ TestCase 10 ########################################## testcase = "10_1" #TricolorTV Search status("active") RC.push(["exit 1 1500", "ok 1 1500"]) UART.start_app("tricolorsearch") sleep(10) RC.push(["kid_OK 1 3000"]) GRAB.compare(testcase) testcase = "10_2" #Wizard status("active") RC.push(["exit 1 1500", "ok 1 1500"]) UART.start_app("wizard") sleep(3) RC.push(["kid_OK 1 3000"]) if platform in ["E501", "E502", "A230"]: RC.push(["kid_OK 1 1500"]) GRAB.compare(testcase) testcase = "10_3" #Manual Search #----------- #BUG 21762 is FIXED #----------- status("active") RC.push(["exit 1 1500", "ok 1 1500"]) UART.start_app("dvbsmanualscanner") sleep(10) RC.push(["kid_OK 1 3000"]) GRAB.compare(testcase) testcase = "10_4" #Terrestrial Search if platform == "E212": status("active") else: status("inactive") RC.push(["exit 1 1500", "ok 1 1500"]) UART.start_app("dvbtscanner") sleep(10) RC.push(["kid_OK 1 3000"]) GRAB.compare(testcase) ############################ TestCase 278 ########################################## testcase = 278 status("active") UART.start_app("channelsearch") RC.push(["kid_1 1 5500"]) GRAB.compare(testcase) ############################ TestCase 11 ########################################## testcase = 11 status("manual") GRAB.compare(testcase) ############################ TestCase 12 ########################################## testcase = 12 status("manual") GRAB.compare(testcase) ############################ TestCase 13 ########################################## testcase = 13 status("active") RC.push(["exit 1 1000", "exit 1 1000", "exit 1 1000", "mute 1 1000"]) RC.push(["kid_4 1 3500"]) GRAB.compare(testcase) ############################ TestCase 14 ########################################## testcase = "14_1" status("active") RC.push(["exit 1 1500", "OK 1 1500", "mute 1 6000"]) RC.push(["VolUp 1 100", "kid_4 1 1000"]) GRAB.compare(testcase) testcase = "14_2" status("active") RC.push(["exit 1 1500", "OK 1 1500", "exit 1 5500"]) RC.push(["VolDown 1 1000", "kid_ok 1 1000"]) GRAB.compare(testcase) testcase = "14_3" status("active") RC.push(["exit 1 1500", "OK 1 1500", "exit 1 5000"]) RC.push(["right 1 1000", "kid_star 1 1000"]) GRAB.compare(testcase) testcase = "14_4" status("active") RC.push(["exit 1 1500", "OK 1 1500", "exit 1 5000"]) RC.push(["left 1 1000", "kid_9 1 1000"]) GRAB.compare(testcase) ############################ TestCase 15 ########################################## testcase = 15 #TV Mail status("active") UART.default_settings() MOD.stop(Modulator) MOD.play_stream(Modulation, FEC, SR, Stream2, Frequency, Modulator) OPER.search() MOD.stop(Modulator) UART.reboot() MOD.play_stream(Modulation, FEC, SR, Stream2, Frequency, Modulator) sleep(60) RC.push(["kid_Left 1 1000", "kid_4 1 5500"]) GRAB.compare(testcase) ############################ TestCase 16 ########################################## testcase = 16 #InfoCAS status("active") UART.default_settings() MOD.stop(Modulator) MOD.play_stream(Modulation, FEC, SR, Stream2, Frequency, Modulator) OPER.search() MOD.stop(Modulator) RC.push(["exit 1 1000", "OK 1 1000", "exit 1 1000", "OK 1 1000", "exit 1 1000"]) MOD.stop(Modulator) UART.reboot() MOD.play_stream(Modulation, FEC, SR, Stream2, Frequency, Modulator) RC.push(["exit 1 1000"]) sleep(125) GRAB.compare("16_1") RC.push(["kid_star 1 4000"]) GRAB.compare_invert("16_2") ############################ TestCase 17 ########################################## testcase = 17 status("active") RC.push(["exit 1 1000", "OK 1 1000", "exit 1 1000", "OK 1 1000", "exit 1 1000"]) RC.push(["cinemahalls 1 1000", "kid_1 1 1000"]) GRAB.compare(testcase) ############################ TestCase 18 ########################################## testcase = 18 status("active") MOD.stop(Modulator) UART.default_settings() MOD.play_stream(Modulation, FEC, SR, Stream, Frequency, Modulator) OPER.search() RC.push(["exit 1 1500"]) UART.start_app("scheduler") if platform == "E212": RC.push(["red 1 1500", "ok 1 1500", "down 1 1000", "ok 1 1000", "ok 1 1000", "right 1 1000", "0 1 1000", "4 1 1000", "ok 1 2000", "ok 1 2000", "exit 1 1500", "exit 1 1500", "exit 1 1500", "exit 1 1500"]) else: RC.push(["red 1 1500", "ok 1 1500", "up 1 1000", "up 1 1000", "ok 1 1000", "ok 1 1000", "right 1 1000", "1 1 1000", "8 1 1000", "ok 1 2000", "ok 1 2000", "exit 1 1500", "exit 1 1500", "exit 1 1500", "exit 1 1500"]) RC.push(["kid_1 1 1500"]) if platform == "E212": sleep(41) else: sleep(70) GRAB.compare(testcase) ############################ TestCase 19 ########################################## testcase = 19 status("active") MOD.stop(Modulator) UART.default_settings() MOD.play_stream(Modulation, FEC, SR, Stream, Frequency, Modulator) OPER.search() RC.push(["exit 1 1500"]) UART.start_app("scheduler") if platform == "E212": RC.push(["red 1 1500", "ok 1 1500", "down 1 1000", "ok 1 1000", "ok 1 1000", "right 1 1000", "0 1 1000", "4 1 1000", "ok 1 2000", "ok 1 2000", "exit 1 1500", "exit 1 1500", "exit 1 1500", "exit 1 1500"]) else: RC.push(["red 1 1500", "ok 1 1500", "up 1 1000", "up 1 1000", "up 1 1000", "ok 1 1000", "ok 1 1000", "right 1 1000", "1 1 1000", "8 1 1000", "ok 1 2000", "ok 1 2000", "exit 1 1500", "exit 1 1500", "exit 1 1500", "exit 1 1500"]) RC.push(["kid_1 1 1500"]) if platform == "E212": sleep(41) else: sleep(70) GRAB.compare(testcase) ############################ TestCase 20 ########################################## testcase = 20 status("active") UART.default_settings() OPER.search() RC.push(["exit 1 1000", "clock 1 1000", "kid_1 1 10000"]) GRAB.compare(testcase) ############################ TestCase 21 ########################################## testcase = 21 status("active") RC.push(["clock 1 1000", "exit 1 1000", "OK 1 1000", "yellow 1 2000", "kid_OK 1 3500"]) GRAB.compare(testcase) ############################ TestCase 22 ########################################## testcase = 22 status("active") RC.push(["exit 1 1000", "OK 1 1000", "green 1 2000", "kid_OK 1 3500"]) GRAB.compare(testcase) ############################ TestCase 23 ########################################## testcase = 23 status("active") RC.push(["exit 1 1000", "OK 1 1000", "format 1 2000", "kid_OK 1 3500"]) GRAB.compare(testcase) ############################ TestCase 24 ########################################## testcase = 24 status("manual") GRAB.compare(testcase) ############################ TestCase 25 ########################################## testcase = 25 status("manual") GRAB.compare(testcase) ############################ TestCase 26 ########################################## testcase = 26 status("manual") GRAB.compare(testcase) ############################ TestCase 27 ########################################## testcase = 27 status("manual") GRAB.compare(testcase) ############################ TestCase 28 ########################################## testcase = 28 status("active") RC.push(["ok 1 2000"]) GRAB.compare(testcase) ############################ TestCase 29 ########################################## testcase = 29 status("active") RC.push(["exit 1 1000", "3 1 7000", "up 1 1000"]) GRAB.compare(testcase) ############################ TestCase 30 ########################################## testcase = 30 status("active") sleep(4) RC.push(["2 1 7000", "down 1 2000"]) GRAB.compare(testcase) ############################ TestCase 31 ########################################## testcase = 31 status("active") sleep(4) RC.push(["left 1 2000"]) GRAB.compare(testcase) ############################ TestCase 32 ########################################## testcase = 32 status("active") RC.push(["right 1 2000"]) GRAB.compare(testcase) ############################ TestCase 33 ########################################## testcase = 33 status("active") RC.push(["3 1 7000", "ChUp 1 3000"]) GRAB.compare(testcase) ############################ TestCase 34 ########################################## testcase = 34 status("active") RC.push(["2 1 7000", "ChDown 1 3000"]) GRAB.compare(testcase) ############################ TestCase 35 ########################################## testcase = 35 status("active") sleep(5) RC.push(["VolUp 1 1000"]) GRAB.compare(testcase) ############################ TestCase 36 ########################################## testcase = 36 status("active") RC.push(["VolDown 1 1000"]) GRAB.compare(testcase) ############################ TestCase 37 ########################################## testcase = "37_1" status("active") RC.push(["2 1 1000"]) sleep(5) RC.push(["1 1 1000"]) GRAB.compare(testcase) testcase = "37_2" status("active") sleep(5) RC.push(["2 1 1000"]) GRAB.compare(testcase) testcase = "37_3" status("active") sleep(5) RC.push(["3 1 1000"]) GRAB.compare(testcase) testcase = "37_4" status("active") sleep(5) RC.push(["4 1 1000"]) GRAB.compare(testcase) testcase = "37_5" status("active") sleep(5) RC.push(["5 1 1000"]) GRAB.compare(testcase) testcase = "37_6" status("active") sleep(5) RC.push(["6 1 1000"]) GRAB.compare(testcase) testcase = "37_7" status("active") sleep(5) RC.push(["7 1 1000"]) GRAB.compare(testcase) testcase = "37_8" status("active") sleep(5) RC.push(["8 1 1000"]) GRAB.compare(testcase) testcase = "37_9" status("active") sleep(5) RC.push(["9 1 1000"]) GRAB.compare(testcase) ############################ TestCase 38 ########################################## testcase = 38 status("active") sleep(5) RC.push(["9 1 1000"]) GRAB.compare(testcase) ############################ TestCase 39 ########################################## testcase = 39 status("active") RC.push(["standby 1 15000", "standby 1 5000"]) GRAB.compare(testcase) ############################ TestCase 40 ########################################## testcase = 40 status("active") UART.default_settings() OPER.search() OPER.set_pin() RC.push(["kid_OK 1 10000", "menu 1 1000"]) GRAB.compare(testcase) #____ВНИМАНИЕ! Дальше тесты идут не по порядку для удобства!_____# ############################ TestCase 41 ########################################## testcase = 42 status("active") RC.push(["exit 1 1000", "exit 1 1000"]) GRAB.compare(testcase) ############################ TestCase 42 ########################################## testcase = 44 status("active") RC.push(["exit 1 1000", "last 1 1000"]) GRAB.compare(testcase) RC.push(["0 1 1000", "0 1 1000", "0 1 1000", "0 1 1000"]) ############################ TestCase 43 ########################################## testcase = 41 status("active") OPER.unset_pin() RC.push(["kid_OK 1 10000", "menu 1 1000"]) GRAB.compare(testcase) ############################ TestCase 44 ########################################## testcase = 43 status("active") RC.push(["exit 1 1000", "exit 1 1000"]) GRAB.compare(testcase) ############################ TestCase 45 ########################################## testcase = 45 status("active") RC.push(["exit 1 1000", "last 1 1000"]) GRAB.compare(testcase) #____ВНИМАНИЕ! Дальше тесты идут снова по порядку!_____# ############################ TestCase 46 ########################################## testcase = 46 status("active") RC.push(["exit 1 1000", "format 1 1000"]) GRAB.compare(testcase) ############################ TestCase 47 ########################################## testcase = 47 status("active") RC.push(["exit 1 7000", "mute 1 1000"]) GRAB.compare(testcase) ############################ TestCase 48 ########################################## testcase = 48 status("active") RC.push(["mute 1 1000", "blue 1 1000"]) GRAB.compare(testcase) ############################ TestCase 49 ########################################## testcase = 49 status("active") RC.push(["exit 1 1000", "red 1 1000"]) GRAB.compare(testcase) ############################ TestCase 50 ########################################## testcase = 50 status("active") RC.push(["exit 1 1000", "yellow 1 1000"]) GRAB.compare(testcase) ############################ TestCase 51 ########################################## testcase = 51 status("active") RC.push(["exit 1 1000", "green 1 1000"]) GRAB.compare(testcase) ############################ TestCase 52 ########################################## testcase = 52 status("active") RC.push(["exit 1 1000", "Rec 1 1000"]) GRAB.compare(testcase) ############################ TestCase 53 ########################################## testcase = 53 status("active") RC.push(["play/pause 1 1000"]) GRAB.compare(testcase) ############################ TestCase 54 ########################################## testcase = 54 status("active") sleep(10) RC.push(["guide 1 1000"]) GRAB.compare(testcase) ############################ TestCase 55 ########################################## testcase = 55 status("active") RC.push(["exit 1 1000", "forward 1 1000"]) GRAB.compare(testcase) ############################ TestCase 56 ########################################## testcase = 56 status("active") sleep(10) RC.push(["backward 1 1000"]) GRAB.compare(testcase) ############################ TestCase 57 ########################################## testcase = 57 status("active") sleep(10) RC.push(["stop 1 1000"]) GRAB.compare(testcase) ############################ TestCase 58 ########################################## testcase = 58 status("active") sleep(10) RC.push(["clock 1 1000"]) GRAB.compare(testcase) ############################ TestCase 59 ########################################## testcase = "59_1" status("active") RC.push(["clock 1 1000", "status 1 3000"]) GRAB.compare(testcase) testcase = "59_2" status("active") sleep(10) RC.push(["help 1 3000"]) GRAB.compare(testcase) testcase = "59_3" status("active") sleep(10) RC.push(["reclist 1 3000"]) GRAB.compare(testcase) testcase = "59_4" status("active") sleep(10) RC.push(["cinemahalls 1 3000"]) GRAB.compare(testcase) testcase = "59_5" status("active") sleep(10) RC.push(["www 1 3000"]) GRAB.compare(testcase) testcase = "59_6" status("active") sleep(10) RC.push(["Tv/radio 1 3000"]) GRAB.compare(testcase) #STB-131 testcase = "59_7" status("active") sleep(10) RC.push(["stb 1 3000"]) GRAB.compare(testcase) testcase = "59_8" status("active") sleep(10) RC.push(["Tv/chat 1 3000"]) GRAB.compare(testcase) testcase = "59_9" status("active") sleep(10) RC.push(["mail 1 3000"]) GRAB.compare(testcase) ############################ TestCase 60 ########################################## testcase = 60 status("active") RC.push(["kid_4 1 5500", "kid_ok 1 3000"]) GRAB.compare(testcase) ############################ TestCase 61 ########################################## testcase = 61 status("active") RC.push(["exit 1 1000", "kid_3 1 6000", "kid_up 1 3500"]) GRAB.compare(testcase) ############################ TestCase 62 ########################################## testcase = 62 status("active") sleep(5) RC.push(["kid_1 1 6000", "kid_up 1 6000", "kid_down 1 2000"]) GRAB.compare(testcase) ############################ TestCase 63 ########################################## testcase = 63 status("active") sleep(5) RC.push(["kid_left 1 1500"]) GRAB.compare(testcase) ############################ TestCase 64 ########################################## testcase = 64 status("active") RC.push(["kid_right 1 1500"]) GRAB.compare(testcase) ############################ TestCase 65 ########################################## testcase = 65 status("active") RC.push(["kid_3 1 6000", "kid_3 1 6000", "kid_ChUp 1 2000"]) GRAB.compare(testcase) ############################ TestCase 66 ########################################## testcase = 66 status("active") RC.push(["kid_1 1 6000", "kid_up 1 6000", "kid_ChDown 1 2500"]) GRAB.compare(testcase) ############################ TestCase 67 ########################################## testcase = 67 status("active") RC.push(["kid_VolUp 1 1500"]) GRAB.compare(testcase) ############################ TestCase 68 ########################################## testcase = 68 status("active") RC.push(["kid_VolDown 1 1500"]) GRAB.compare(testcase) ############################ TestCase 69 ########################################## testcase = "69_1" status("active") RC.push(["2 1 6000", "kid_1 1 1500"]) GRAB.compare(testcase) testcase = "69_2" status("active") sleep(10) RC.push(["kid_2 1 1500"]) GRAB.compare(testcase) testcase = "69_3" status("active") sleep(10) RC.push(["kid_3 1 1000"]) GRAB.compare(testcase) testcase = "69_4" status("active") sleep(10) RC.push(["kid_4 1 1500"]) GRAB.compare(testcase) testcase = "69_5" status("active") sleep(10) RC.push(["kid_5 1 1500"]) GRAB.compare(testcase) testcase = "69_6" status("active") sleep(10) RC.push(["kid_6 1 1500"]) GRAB.compare(testcase) testcase = "69_7" status("active") sleep(10) RC.push(["kid_7 1 1500"]) GRAB.compare(testcase) testcase = "69_8" status("active") sleep(10) RC.push(["kid_8 1 1500"]) GRAB.compare(testcase) testcase = "69_9" status("active") sleep(10) RC.push(["kid_9 1 1500"]) GRAB.compare(testcase) ############################ TestCase 70 ########################################## testcase = 70 status("active") sleep(10) RC.push(["kid_0 1 1500"]) GRAB.compare(testcase) ############################ TestCase 71 ########################################## testcase = 71 status("active") RC.push(["kid_star 1 5500"]) GRAB.compare(testcase) ############################ TestCase 72 ########################################## testcase = 72 status("active") RC.push(["kid_standby 1 15000", "kid_standby 1 5000"]) GRAB.compare(testcase) ############################ TestCase 73 ########################################## testcase = 73 status("active") UART.default_settings() OPER.search() RC.push(["kid_star 1 7000", "OK 1 2000", "down 3 2000"]) RC.push(["OK 1 2000"]) GRAB.compare(testcase) ############################ TestCase 74 ########################################## testcase = 74 status("active") RC.push(["ok 1 2000", "up 1 1500"]) GRAB.compare(testcase) ############################ TestCase 75 ########################################## testcase = 75 status("active") RC.push(["down 1 1000"]) GRAB.compare(testcase) ############################ TestCase 76 ########################################## testcase = 76 status("active") RC.push(["left 1 1000"]) GRAB.compare(testcase) ############################ TestCase 77 ########################################## testcase = 77 status("active") RC.push(["right 1 1000"]) GRAB.compare(testcase) ############################ TestCase 78 ########################################## testcase = 78 status("active") RC.push(["left 1 1000", "chUp 1 2000"]) GRAB.compare(testcase) ############################ TestCase 79 ########################################## testcase = 79 status("active") RC.push(["ChDown 1 1000"]) GRAB.compare(testcase) ############################ TestCase 80 ########################################## testcase = 80 status("active") RC.push(["standby 1 15000", "standby 1 5000"]) GRAB.compare(testcase) ############################ TestCase 81 ########################################## testcase = 81 status("active") RC.push(["exit 1 6000", "blue 1 1000", "mute 1 1000"]) GRAB.compare(testcase) ############################ TestCase 82 ########################################## testcase = 82 status("active") RC.push(["mute 1 2000", "blue 1 2000"]) GRAB.compare(testcase) ############################ TestCase 83 ########################################## testcase = 83 status("active") RC.push(["blue 1 1000", "menu 1 2000"]) GRAB.compare(testcase) ############################ TestCase 84 ########################################## testcase = 84 status("active") RC.push(["blue 1 1000", "exit 1 1000"]) GRAB.compare(testcase) ############################ TestCase 85 ########################################## testcase = 85 status("active") RC.push(["blue 1 1000", "last 1 1000"]) GRAB.compare(testcase) ############################ TestCase 86 ########################################## testcase = 86 status("active") RC.push(["3 1 7000", "blue 1 1000", "down 1 1000", "kid_ok 1 2000"]) GRAB.compare(testcase) ############################ TestCase 87 ########################################## testcase = 87 status("active") RC.push(["blue 1 1000", "kid_up 1 1000"]) GRAB.compare(testcase) ############################ TestCase 88 ########################################## testcase = 88 status("active") RC.push(["kid_down 1 1000"]) GRAB.compare(testcase) ############################ TestCase 89 ########################################## testcase = 89 status("active") RC.push(["kid_left 1 1000"]) GRAB.compare(testcase) ############################ TestCase 90 ########################################## testcase = 90 status("active") RC.push(["kid_right 1 1000"]) GRAB.compare(testcase) ############################ TestCase 91 ########################################## testcase = 91 status("active") RC.push(["kid_left 1 1000", "kid_ChUp 1 1000"]) GRAB.compare(testcase) ############################ TestCase 92 ########################################## testcase = 92 status("active") RC.push(["kid_ChDown 1 1000"]) GRAB.compare(testcase) ############################ TestCase 93 ########################################## testcase = 93 status("active") RC.push(["kid_VolUp 1 1000"]) GRAB.compare(testcase) ############################ TestCase 94 ########################################## testcase = 94 status("active") RC.push(["kid_VolDown 1 1000"]) GRAB.compare(testcase) ############################ TestCase 95 ########################################## testcase = 95 status("active") RC.push(["kid_standby 1 5000", "kid_standby 1 5000"]) GRAB.compare(testcase) ############################ TestCase 96 ########################################## testcase = 96 status("active") RC.push(["blue 1 1000", "kid_star 1 1000"]) GRAB.compare(testcase) ############################ TestCase 97 ########################################## testcase = 97 status("active") UART.default_settings() OPER.search() RC.push(["kid_star 1 7000", "kid_star 1 7000", "down 3 2000"]) RC.push(["OK 1 2000"]) GRAB.compare(testcase) ############################ TestCase 98 ########################################## testcase = 98 status("active") RC.push(["up 1 1000"]) GRAB.compare(testcase) ############################ TestCase 99 ########################################## testcase = 99 status("active") RC.push(["down 1 1000"]) GRAB.compare(testcase) ############################ TestCase 100 ########################################## testcase = 100 status("active") RC.push(["left 1 1000"]) GRAB.compare(testcase) ############################ TestCase 101 ########################################## testcase = 101 status("active") RC.push(["up 1 1000", "up 1 1000", "up 1 1000", "right 1 1000"]) GRAB.compare(testcase) ############################ TestCase 102 ########################################## testcase = 102 status("active") RC.push(["down 1 1000", "ChUp 1 1000"]) GRAB.compare(testcase) ############################ TestCase 103 ########################################## testcase = 103 status("active") RC.push(["ChDown 1 1000"]) GRAB.compare(testcase) ############################ TestCase 104 ########################################## testcase = 104 status("active") RC.push(["standby 1 15000", "standby 1 7000"]) GRAB.compare(testcase) ############################ TestCase 105 ########################################## testcase = 105 status("active") RC.push(["exit 1 1000", "kid_star 1 2000", "mute 1 1000"]) GRAB.compare(testcase) ############################ TestCase 106 ########################################## testcase = 106 status("active") RC.push(["mute 1 1000", "guide 1 1000"]) GRAB.compare(testcase) ############################ TestCase 107 ########################################## testcase = 107 status("active") RC.push(["kid_star 1 2000", "menu 1 1000"]) GRAB.compare(testcase) ############################ TestCase 108 ########################################## testcase = 108 status("active") RC.push(["kid_star 1 2000", "exit 1 1000"]) GRAB.compare(testcase) ############################ TestCase 109 ########################################## testcase = 109 status("active") RC.push(["kid_star 1 2000", "last 1 1000"]) GRAB.compare(testcase) ############################ TestCase 110 ########################################## testcase = 110 status("active") RC.push(["3 1 5000", "kid_star 1 2000", "down 1 1000", "kid_ok 1 2000"]) GRAB.compare(testcase) ############################ TestCase 111 ########################################## testcase = 111 status("active") RC.push(["kid_up 1 2000"]) GRAB.compare(testcase) ############################ TestCase 112 ########################################## testcase = 112 status("active") RC.push(["kid_down 1 2000"]) GRAB.compare(testcase) ############################ TestCase 113 ########################################## testcase = 113 status("active") RC.push(["kid_left 1 1000"]) GRAB.compare(testcase) ############################ TestCase 114 ########################################## testcase = 114 status("active") RC.push(["kid_up 1 1000", "kid_up 1 1000", "kid_up 1 1000", "kid_right 1 1000"]) GRAB.compare(testcase) ############################ TestCase 115 ########################################## testcase = 115 status("active") RC.push(["kid_down 1 1000", "kid_ChUp 1 1000"]) GRAB.compare(testcase) ############################ TestCase 116 ########################################## testcase = 116 status("active") RC.push(["kid_ChDown 1 2000"]) GRAB.compare(testcase) ############################ TestCase 279 ########################################## testcase = 279 status("active") RC.push(["kid_VolUp 1 1000"]) GRAB.compare(testcase) ############################ TestCase 280 ########################################## testcase = 280 status("active") RC.push(["kid_VolDown 1 2000"]) GRAB.compare(testcase) ############################ TestCase 117 ########################################## testcase = 117 status("active") RC.push(["kid_standby 1 15000", "kid_standby 1 7000"]) GRAB.compare(testcase) ############################ TestCase 118 ########################################## testcase = 118 status("active") RC.push(["exit 1 1000", "kid_star 1 2000", "kid_star 1 1000"]) GRAB.compare(testcase) ############################ TestCase 119 ########################################## testcase = 119 status("active") UART.default_settings() OPER.search() RC.push(["kid_star 1 7000", "red 1 3000"]) RC.push(["OK 1 2000"]) GRAB.compare(testcase) ############################ TestCase 120 ########################################## testcase = 120 status("active") RC.push(["down 1 1000"]) GRAB.compare(testcase) ############################ TestCase 121 ########################################## testcase = 121 status("active") RC.push(["up 1 1000", "left 1 1000"]) GRAB.compare(testcase) ############################ TestCase 122 ########################################## testcase = 122 status("active") RC.push(["right 1 1000"]) GRAB.compare(testcase) ############################ TestCase 123 ########################################## testcase = 123 status("active") RC.push(["ChUp 1 1000"]) GRAB.compare(testcase) ############################ TestCase 124 ########################################## testcase = 124 status("active") RC.push(["ChDown 1 1000"]) GRAB.compare(testcase) ############################ TestCase 125 ########################################## testcase = 125 status("active") RC.push(["standby 1 15000", "standby 1 7000"]) GRAB.compare(testcase) ############################ TestCase 126 ########################################## testcase = 126 status("active") RC.push(["exit 1 1000", "red 1 2000", "mute 1 1000"]) GRAB.compare(testcase) ############################ TestCase 127 ########################################## testcase = 127 status("active") RC.push(["mute 1 1000", "red 1 1000"]) GRAB.compare(testcase) ############################ TestCase 128 ########################################## testcase = 128 status("active") RC.push(["red 1 1000", "menu 1 1000"]) GRAB.compare(testcase) ############################ TestCase 129 ########################################## testcase = 129 status("active") RC.push(["red 1 1000", "exit 1 1000"]) GRAB.compare(testcase) ############################ TestCase 130 ########################################## testcase = 130 status("active") RC.push(["red 1 1000", "last 1 1000"]) GRAB.compare(testcase) ############################ TestCase 131 ########################################## testcase = 131 status("active") RC.push(["red 1 1000", "kid_ok 1 1000"]) GRAB.compare(testcase) ############################ TestCase 132 ########################################## testcase = 132 status("active") RC.push(["kid_down 1 1000"]) GRAB.compare(testcase) ############################ TestCase 133 ########################################## testcase = 133 status("active") RC.push(["up 1 1000", "kid_left 1 1000"]) GRAB.compare(testcase) ############################ TestCase 134 ########################################## testcase = 134 status("active") RC.push(["kid_right 1 1000"]) GRAB.compare(testcase) ############################ TestCase 135 ########################################## testcase = 135 status("active") RC.push(["kid_ChUp 1 1000"]) GRAB.compare(testcase) ############################ TestCase 136 ########################################## testcase = 136 status("active") RC.push(["kid_ChDown 1 1000"]) GRAB.compare(testcase) ############################ TestCase 281 ########################################## testcase = 281 status("active") RC.push(["kid_VolUp 1 1000"]) GRAB.compare(testcase) ############################ TestCase 282 ########################################## testcase = 282 status("active") RC.push(["kid_VolDown 1 2000"]) GRAB.compare(testcase) ############################ TestCase 137 ########################################## testcase = 137 status("active") RC.push(["kid_standby 1 15000", "kid_standby 1 7000"]) GRAB.compare(testcase) ############################ TestCase 138 ########################################## testcase = 138 status("active") RC.push(["exit 1 1000", "red 1 1000", "kid_star 1 1000"]) GRAB.compare(testcase) ############################ TestCase 139 ########################################## testcase = 139 status("active") UART.default_settings() MOD.stop(Modulator) MOD.play_stream(Modulation, FEC, SR, Stream, Frequency, Modulator) OPER.search() RC.push(["kid_star 1 7000", "format 1 3000", "down 1 1000", "down 1 1000", "OK 1 1000"]) GRAB.compare(testcase) ############################ TestCase 140 ########################################## testcase = 140 status("active") RC.push(["format 1 3000", "up 1 1000", "up 1 1000", "OK 1 4000", "format 1 1000", "up 1 1000"]) GRAB.compare(testcase) ############################ TestCase 141 ########################################## testcase = 141 status("active") RC.push(["down 1 1000"]) GRAB.compare(testcase) ############################ TestCase 142 ########################################## testcase = 142 status("active") RC.push(["ChUp 1 1000"]) GRAB.compare(testcase) ############################ TestCase 143 ########################################## testcase = 143 status("active") RC.push(["ChDown 1 1000"]) GRAB.compare(testcase) ############################ TestCase 144 ########################################## testcase = 144 status("active") RC.push(["VolUp 1 1000"]) GRAB.compare(testcase) ############################ TestCase 145 ########################################## testcase = 145 status("active") RC.push(["VolDown 1 1000"]) GRAB.compare(testcase) ############################ TestCase 146 ########################################## testcase = 146 status("active") RC.push(["mute 1 1000"]) GRAB.compare(testcase) ############################ TestCase 147 ########################################## testcase = 147 status("active") RC.push(["mute 1 1000", "up 1 1000", "menu 1 1000"]) GRAB.compare(testcase) ############################ TestCase 148 ########################################## testcase = 148 status("active") RC.push(["format 1 1000", "up 1 1000", "exit 1 1000"]) GRAB.compare(testcase) ############################ TestCase 149 ########################################## testcase = 149 status("active") RC.push(["format 1 1000", "up 1 1000", "last 1 1000"]) GRAB.compare(testcase) ############################ TestCase 150 ########################################## testcase = 150 status("active") RC.push(["format 1 1000", "down 1 6000", "standby 1 15000", "standby 1 7000"]) GRAB.compare(testcase) ############################ TestCase 151 ########################################## testcase = 151 status("active") RC.push(["format 1 1000", "down 1 1000", "format 1 1000"]) GRAB.compare(testcase) ############################ TestCase 152 ########################################## testcase = 152 status("active") RC.push(["format 1 1000", "up 1 1000", "kid_ok 1 7000"]) GRAB.compare(testcase) ############################ TestCase 153 ########################################## testcase = 153 status("active") RC.push(["format 1 1000", "down 1 1000", "OK 1 1000", "format 1 1000", "kid_up 1 1000"]) GRAB.compare(testcase) ############################ TestCase 154 ########################################## testcase = 154 status("active") RC.push(["kid_down 1 7000"]) GRAB.compare(testcase) ############################ TestCase 155 ########################################## testcase = 155 status("active") RC.push(["kid_ChUp 1 7000"]) GRAB.compare(testcase) ############################ TestCase 156 ########################################## testcase = 156 status("active") RC.push(["kid_ChDown 1 7000"]) GRAB.compare(testcase) ############################ TestCase 157 ########################################## testcase = 157 status("active") RC.push(["kid_VolUp 1 1000"]) GRAB.compare(testcase) ############################ TestCase 158 ########################################## testcase = 158 status("active") RC.push(["kid_VolDown 1 1000"]) GRAB.compare(testcase) ############################ TestCase 159 ########################################## testcase = 159 status("active") RC.push(["kid_standby 1 15000", "kid_standby 1 7000"]) GRAB.compare(testcase) ############################ TestCase 160 ########################################## testcase = 160 status("active") RC.push(["exit 1 7000", "format 1 3000", "kid_0 1 500", "kid_1 1 500", "kid_2 1 500", "kid_3 1 500", "kid_4 1 500", "kid_5 1 500", "kid_6 1 500", "kid_7 1 500", "kid_8 1 500", "kid_9 1 500", "kid_right 1 500", "kid_left 1 500", "kid_star 1 1000"]) GRAB.compare(testcase) ############################ TestCase 161 ########################################## testcase = 161 status("active") UART.default_settings() MOD.stop(Modulator) MOD.play_stream(Modulation, FEC, SR, Stream, Frequency, Modulator) OPER.search() RC.push(["5 1 6000", "kid_star 1 7000", "menu 1 1000"]) GRAB.compare(testcase) ############################ TestCase 162 ########################################## testcase = 162 status("active") RC.push(["exit 1 7000", "last 1 1000"]) GRAB.compare(testcase) ############################ TestCase 163 ########################################## testcase = 163 status("active") RC.push(["exit 1 7000", "exit 1 1000"]) GRAB.compare(testcase) ############################ TestCase 164 ########################################## testcase = 164 status("active") RC.push(["exit 1 2000", "red 1 1000"]) GRAB.compare(testcase) ############################ TestCase 165 ########################################## testcase = 165 status("active") RC.push(["exit 1 1000", "exit 1 1000", "right 1 1000", "left 1 1000"]) GRAB.compare(testcase) ############################ TestCase 166 ########################################## testcase = 166 status("active") RC.push(["right 1 1000"]) GRAB.compare(testcase) ############################ TestCase 167 ########################################## testcase = 167 status("active") #CANCEL RC.push(["ok 1 1000", "red 1 2000"]) GRAB.compare(testcase) ############################ TestCase 168 ########################################## testcase = 168 status("active") #EXIT RC.push(["exit 1 1000", "exit 1 1000", "left 1 1000", "OK 1 1000"]) GRAB.compare(testcase) ############################ TestCase 169 ########################################## testcase = 169 status("active") sleep(7) RC.push(["kid_1 1 10000", "exit 1 3000", "standby 1 15000", "standby 1 7000"]) GRAB.compare(testcase) ############################ TestCase 170 ########################################## testcase = 170 status("active") RC.push(["exit 1 10000", "exit 1 1000", "right 1 1000", "kid_left 1 1000"]) GRAB.compare(testcase) ############################ TestCase 171 ########################################## testcase = 171 status("active") RC.push(["exit 1 1000", "exit 1 1000", "kid_right 1 1000"]) GRAB.compare(testcase) ############################ TestCase 172 ########################################## testcase = 172 #CANCEL status("active") RC.push(["kid_ok 1 1000", "red 1 2000"]) GRAB.compare(testcase) ############################ TestCase 173 ########################################## testcase = 173 #EXIT status("active") RC.push(["exit 1 1000", "exit 1 1000", "kid_left 1 1500", "kid_ok 1 1000"]) GRAB.compare(testcase) ############################ TestCase 174 ########################################## testcase = 174 status("active") RC.push(["kid_right 1 1000", "exit 1 1000", "kid_standby 1 15000", "kid_standby 1 5000"]) GRAB.compare(testcase) ############################ TestCase 175 ########################################## testcase = 175 status("active") UART.default_settings() OPER.search() OPER.set_pin() RC.push(["5 1 6000", "kid_star 1 7000", "menu 1 1000"]) GRAB.compare(testcase) ############################ TestCase 176 ########################################## testcase = 176 status("active") RC.push(["exit 1 4000", "last 1 1000"]) GRAB.compare(testcase) ############################ TestCase 177 ########################################## testcase = 177 status("active") RC.push(["exit 1 4000", "exit 1 1000"]) GRAB.compare(testcase) ############################ TestCase 178 ########################################## testcase = 178 status("active") RC.push(["exit 1 4000", "red 1 2000"]) GRAB.compare(testcase) ############################ TestCase 179 ########################################## testcase = 179 status("manual") GRAB.compare(testcase) ############################ TestCase 180 ########################################## testcase = 180 status("active") RC.push(["exit 1 4000", "exit 1 4000", "0 1 1000", "1 1 1000", "2 1 1000", "left 1 1000"]) GRAB.compare(testcase) ############################ TestCase 181 ########################################## testcase = 181 status("active") RC.push(["exit 1 4000", "exit 1 1000", "standby 1 15000", "standby 1 7000"]) GRAB.compare(testcase) ############################ TestCase 182 ########################################## testcase = 182 status("active") RC.push(["exit 1 4000", "exit 1 1000", "1 1 1000", "2 1 1000", "3 1 1000", "4 1 2000"]) GRAB.compare(testcase) ############################ TestCase 183 ########################################## testcase = 183 status("active") RC.push(["0 1 1000", "0 1 1000", "0 1 1000", "0 1 3000"]) GRAB.compare(testcase) ############################ TestCase 184 ########################################## testcase = 184 status("active") RC.push(["kid_ok 1 7000", "exit 1 1000", "yellow 1 1000", "red 1 1000", "red 1 1000", "green 1 1000", "blue 1 2500"]) GRAB.compare(testcase) ############################ TestCase 185 ########################################## testcase = 185 status("manual") GRAB.compare(testcase) ############################ TestCase 186 ########################################## testcase = 186 status("active") RC.push(["kid_ok 1 7000", "exit 1 4000", "kid_0 1 1000", "kid_1 1 1000", "kid_7 1 1000", "kid_left 1 1000"]) GRAB.compare(testcase) ############################ TestCase 187 ########################################## testcase = 187 status("active") RC.push(["kid_standby 1 15000", "kid_standby 1 7000"]) GRAB.compare(testcase) ############################ TestCase 188 ########################################## testcase = 188 status("active") RC.push(["exit 1 4000", "exit 1 1000", "kid_0 1 1000", "kid_1 1 1000", "kid_5 1 1000", "kid_9 1 2000"]) ############################ TestCase 189 ########################################## testcase = 189 status("active") RC.push(["kid_0 1 1000", "kid_0 1 1000", "kid_0 1 1000", "kid_0 1 1000"]) GRAB.compare(testcase) ############################ TestCase 190 ########################################## testcase = 190 status("active") UART.default_settings() OPER.search() RC.push(["kid_star 1 7000", "kid_star 1 2000"]) GRAB.compare(testcase) ############################ TestCase 191 ########################################## testcase = "191_1" status("active") RC.push(["exit 1 7000", "kid_3 1 2000", "kid_up 1 2000"]) GRAB.compare(testcase) testcase = "191_2" status("active") RC.push(["2 1 1000", "kid_down 1 2000"]) GRAB.compare(testcase) testcase = "191_3" status("active") RC.push(["3 1 1000", "kid_ChUp 1 2000"]) GRAB.compare(testcase) testcase = "191_4" status("active") RC.push(["2 1 1000", "kid_ChDown 1 2000"]) GRAB.compare(testcase) ############################ TestCase 192 ########################################## testcase = 192 status("manual") GRAB.compare(testcase) ############################ TestCase 193 ########################################## testcase = 193 status("manual") GRAB.compare(testcase) ############################ TestCase 194 ########################################## testcase = 194 status("manual") GRAB.compare(testcase) ############################ TestCase 195 ########################################## testcase = 195 status("manual") GRAB.compare(testcase) ############################ TestCase 196 ########################################## testcase = 196 status("manual") GRAB.compare(testcase) ############################ TestCase 197 ########################################## testcase = 197 status("manual") GRAB.compare(testcase) ############################ TestCase 198 ########################################## testcase = 198 status("manual") GRAB.compare(testcase) ############################ TestCase 199 ########################################## testcase = 199 status("manual") GRAB.compare(testcase) ############################ TestCase 200 ########################################## testcase = 200 status("inactive") UART.default_settings() MOD.stop(Modulator) MOD.play_stream(Modulation, FEC, SR, Stream, Frequency, Modulator) OPER.search() RC.push(["kid_OK 1 7500", "Rec 1 3000"]) RC.push(["OK 1 2000"]) GRAB.compare(testcase) ############################ TestCase 201 ########################################## testcase = 201 status("inactive") RC.push(["exit 1 2000", "4 1 5000", "up 1 6000", "red 1 1000"]) GRAB.compare(testcase) ############################ TestCase 202 ########################################## testcase = 202 status("inactive") RC.push(["exit 1 1000", "down 1 6000", "red 1 1000"]) GRAB.compare(testcase) ############################ TestCase 203 ########################################## testcase = 203 status("inactive") RC.push(["exit 1 1000", "left 1 1000"]) GRAB.compare(testcase) ############################ TestCase 204 ########################################## testcase = 204 status("inactive") RC.push(["right 1 1500"]) GRAB.compare(testcase) ############################ TestCase 205 ########################################## testcase = 205 status("inactive") RC.push(["4 1 5000", "ChUp 1 2000"]) GRAB.compare(testcase) ############################ TestCase 206 ########################################## testcase = 206 status("inactive") RC.push(["ChDown 1 2000"]) GRAB.compare(testcase) ############################ TestCase 207 ########################################## testcase = 207 status("inactive") RC.push(["standby 1 12000", "standby 1 8000"]) GRAB.compare(testcase) ############################ TestCase 208 ########################################## testcase = 208 status("inactive") RC.push(["exit 1 1000", "mute 1 2000"]) GRAB.compare(testcase) ############################ TestCase 209 ########################################## testcase = 209 status("inactive") RC.push(["mute 1 1000", "guide 1 1000"]) GRAB.compare(testcase) ############################ TestCase 210 ########################################## testcase = 210 status("inactive") RC.push(["exit 1 1000", "menu 1 1000"]) GRAB.compare(testcase) ############################ TestCase 211 ########################################## testcase = 211 status("inactive") RC.push(["exit 1 1000"]) GRAB.compare(testcase) ############################ TestCase 212 ########################################## testcase = 212 status("inactive") RC.push(["last 1 1000"]) GRAB.compare(testcase) ############################ TestCase 213 ########################################## testcase = 213 status("inactive") RC.push(["stop 1 1000"]) GRAB.compare(testcase) ############################ TestCase 214 ########################################## testcase = 214 status("inactive") RC.push(["exit 1 5000", "kid_ok 1 2000"]) GRAB.compare(testcase) ############################ TestCase 215 ########################################## testcase = 215 status("inactive") RC.push(["exit 1 5000", "kid_up 1 2000"]) GRAB.compare(testcase) ############################ TestCase 216 ########################################## testcase = 216 status("inactive") RC.push(["exit 1 5000", "kid_down 1 2000"]) GRAB.compare(testcase) ############################ TestCase 217 ########################################## testcase = 217 status("inactive") RC.push(["exit 1 5000", "kid_left 1 2000"]) GRAB.compare(testcase) ############################ TestCase 218 ########################################## testcase = 218 status("inactive") RC.push(["exit 1 5000", "kid_right 1 2000"]) GRAB.compare(testcase) RC.push(["stop 1 1000", "left 1 1000", "OK 1 1000"]) ############################ TestCase 219 ########################################## testcase = 219 status("inactive") RC.push(["exit 1 5000", "4 1 1000", "kid_ChUp 1 2000"]) GRAB.compare(testcase) ############################ TestCase 220 ########################################## testcase = 220 status("inactive") RC.push(["kid_ChDown 1 2000"]) GRAB.compare(testcase) ############################ TestCase 221 ########################################## testcase = 221 status("inactive") RC.push(["kid_standby 1 12000", "kid_standby 1 12000"]) GRAB.compare(testcase) ############################ TestCase 222 ########################################## testcase = 222 status("inactive") RC.push(["exit 1 5000", "kid_star 1 2000"]) GRAB.compare(testcase) ############################ TestCase 223 ########################################## testcase = 223 status("inactive") UART.default_settings() MOD.stop(Modulator) MOD.play_stream(Modulation, FEC, SR, Stream, Frequency, Modulator) OPER.search() RC.push(["kid_star 1 8000", "play/pause 1 1000"]) GRAB.compare(testcase) ############################ TestCase 224 ########################################## testcase = 224 status("inactive") RC.push(["stop 1 1000"]) GRAB.compare(testcase) ############################ TestCase 225 ########################################## testcase = 225 status("inactive") sleep(7) RC.push(["play/pause 1 1000", "OK 1 1000"]) GRAB.compare(testcase) ############################ TestCase 226 ########################################## testcase = 226 status("inactive") RC.push(["exit 1 1000", "stop 1 1000", "3 1 6000", "play/pause 1 5000", "up 1 1000"]) GRAB.compare(testcase) ############################ TestCase 227 ########################################## testcase = 227 status("inactive") RC.push(["2 1 6000", "play/pause 1 1000", "down 1 1000"]) GRAB.compare(testcase) ############################ TestCase 228 ########################################## testcase = 228 status("inactive") sleep(2) RC.push(["play/pause 1 5000", "left 1 1000"]) GRAB.compare(testcase) ############################ TestCase 229 ########################################## testcase = 229 status("inactive") RC.push(["right 1 1000"]) GRAB.compare(testcase) ############################ TestCase 230 ########################################## testcase = 230 status("inactive") RC.push(["stop 1 1000", "3 1 6000", "play/pause 1 1000", "ChUp 1 1000"]) GRAB.compare(testcase) ############################ TestCase 231 ########################################## testcase = 231 status("inactive") RC.push(["2 1 6000", "play/pause 1 1000", "ChDown 1 1000"]) GRAB.compare(testcase) ############################ TestCase 232 ########################################## testcase = 232 status("inactive") RC.push(["play/pause 1 1000", "VolUp 1 1000"]) GRAB.compare(testcase) ############################ TestCase 233 ########################################## testcase = 233 status("inactive") RC.push(["VolDown 1 1000"]) GRAB.compare(testcase) ############################ TestCase 234 ########################################## testcase = "234_1" status("inactive") RC.push(["stop 1 1000", "2 1 6000", "play/pause 1 5000", "1 1 1800"]) GRAB.compare(testcase) testcase = "234_2" status("inactive") RC.push(["play/pause 1 5000", "2 1 1800"]) GRAB.compare(testcase) testcase = "234_3" status("inactive") RC.push(["play/pause 1 5000", "3 1 1800"]) GRAB.compare(testcase) testcase = "234_4" status("inactive") RC.push(["play/pause 1 5000", "4 1 1800"]) GRAB.compare(testcase) ############################ TestCase 235 ########################################## testcase = 235 status("inactive") sleep(5) RC.push(["play/pause 1 1000", "8 1 700"]) GRAB.compare(testcase) ############################ TestCase 236 ########################################## testcase = 236 status("inactive") RC.push(["standby 1 12000", "standby 1 10000"]) GRAB.compare(testcase) #------- ВНИМАНИЕ! ДАЛЬШЕ ТЕСТЫ ИДУТ НЕ ПО ПОРЯДКУ! ----------- ############################ TestCase 237 ########################################## testcase = 237 status("inactive") RC.push(["exit 1 1000", "left 1 1000", "OK 1 1000"]) OPER.set_pin() RC.push(["kid_1 1 7000", "play/pause 1 1000", "menu 1 1000"]) GRAB.compare(testcase) ############################ TestCase 239 ########################################## testcase = 239 status("inactive") RC.push(["exit 1 1000", "exit 1 1000"]) GRAB.compare(testcase) ############################ TestCase 241 ########################################## testcase = 241 status("inactive") RC.push(["exit 1 1000", "last 1 1000"]) GRAB.compare(testcase) ############################ TestCase 238 ########################################## testcase = 238 status("inactive") RC.push(["0 1 1000", "0 1 1000", "0 1 1000", "0 1 1000"]) OPER.unset_pin() RC.push(["kid_1 1 7000", "play/pause 1 1000", "menu 1 1000"]) GRAB.compare(testcase) ############################ TestCase 240 ########################################## testcase = 240 status("inactive") RC.push(["exit 1 1000", "exit 1 1000"]) GRAB.compare(testcase) ############################ TestCase 242 ########################################## testcase = 242 status("inactive") RC.push(["exit 1 1000", "last 1 1000"]) GRAB.compare(testcase) #------- ВНИМАНИЕ! ДАЛЬШЕ ТЕСТЫ ИДУТ СНОВА ПО ПОРЯДКУ! ----------- ############################ TestCase 243 ########################################## testcase = 243 status("inactive") RC.push(["exit 1 1000", "format 1 1000"]) GRAB.compare(testcase) ############################ TestCase 244 ########################################## testcase = 244 status("inactive") RC.push(["exit 1 6000", "mute 1 1000"]) GRAB.compare(testcase) ############################ TestCase 245 ########################################## testcase = 245 status("inactive") RC.push(["mute 1 1000", "blue 1 1000"]) GRAB.compare(testcase) ############################ TestCase 246 ########################################## testcase = 246 status("inactive") RC.push(["exit 1 1000", "red 1 2000"]) GRAB.compare(testcase) ############################ TestCase 247 ########################################## testcase = 247 status("inactive") RC.push(["exit 1 1000", "yellow 1 2000"]) GRAB.compare(testcase) ############################ TestCase 248 ########################################## testcase = 248 status("inactive") RC.push(["exit 1 1000", "green 1 2000"]) GRAB.compare(testcase) ############################ TestCase 249 ########################################## testcase = 249 status("inactive") RC.push(["exit 1 1000", "kid_ok 1 1000"]) GRAB.compare(testcase) ############################ TestCase 250 ########################################## testcase = 250 status("inactive") RC.push(["exit 1 1000", "stop 1 1000", "3 1 6000", "play/pause 1 1000", "kid_up 1 1000"]) GRAB.compare(testcase) ############################ TestCase 251 ########################################## testcase = 251 status("inactive") RC.push(["2 1 6000", "play/pause 1 1000", "kid_down 1 1000"]) GRAB.compare(testcase) ############################ TestCase 252 ########################################## testcase = 252 status("inactive") sleep(5) RC.push(["kid_left 1 1500"]) GRAB.compare(testcase) ############################ TestCase 253 ########################################## testcase = 253 status("inactive") RC.push(["kid_right 1 1500"]) GRAB.compare(testcase) ############################ TestCase 254 ########################################## testcase = 254 status("inactive") RC.push(["stop 1 1000", "3 1 6000", "play/pause 1 1000", "kid_ChUp 1 1500"]) GRAB.compare(testcase) ############################ TestCase 255 ########################################## testcase = 255 status("inactive") RC.push(["2 1 6000", "play/pause 1 1000", "kid_ChDown 1 1500"]) GRAB.compare(testcase) ############################ TestCase 256 ########################################## testcase = 256 status("inactive") sleep(7) RC.push(["play/pause 1 1000", "kid_VolUp 1 1500"]) GRAB.compare(testcase) ############################ TestCase 257 ########################################## testcase = 257 status("inactive") RC.push(["play/pause 1 1000", "kid_VolDown 1 1500"]) GRAB.compare(testcase) ############################ TestCase 258 ########################################## testcase = "258_1" status("inactive") RC.push(["stop 1 1000", "2 1 6000", "play/pause 1 5000", "kid_1 1 1800"]) GRAB.compare(testcase) testcase = "258_2" status("inactive") RC.push(["play/pause 1 5000", "kid_2 1 1800"]) GRAB.compare(testcase) testcase = "258_3" status("inactive") RC.push(["play/pause 1 5000", "kid_3 1 1800"]) GRAB.compare(testcase) testcase = "258_4" status("inactive") RC.push(["play/pause 1 5000", "kid_4 1 1800"]) GRAB.compare(testcase) ############################ TestCase 259 ########################################## testcase = 259 status("inactive") sleep(5) RC.push(["play/pause 1 1000", "kid_6 1 2000"]) GRAB.compare(testcase) ############################ TestCase 260 ########################################## testcase = 260 status("inactive") sleep(3) RC.push(["kid_star 1 1500"]) GRAB.compare(testcase) ############################ TestCase 261 ########################################## testcase = 261 status("inactive") RC.push(["exit 1 1000", "kid_standby 1 12000", "kid_standby 1 12000"]) GRAB.compare(testcase) ############################ TestCase 262 ########################################## testcase = 262 status("manual") GRAB.compare(testcase) ############################ TestCase 263 ########################################## testcase = 263 status("manual") GRAB.compare(testcase) ############################ TestCase 264 ########################################## testcase = 264 status("manual") GRAB.compare(testcase) ############################ TestCase 265 ########################################## testcase = 265 status("active") UART.default_settings() sleep(10) MOD.stop(Modulator) MOD.play_stream(Modulation, FEC, SR, Stream2, Frequency, Modulator) OPER.search() MOD.stop(Modulator) UART.reboot() MOD.play_stream(Modulation, FEC, SR, Stream2, Frequency, Modulator) RC.push(["exit 1 7000", "kid_1 1 7000"]) sleep(250) RC.push(["exit 1 1000", "left 1 1000", "OK 1 3000"]) GRAB.compare(testcase) ############################ TestCase 266 ########################################## testcase = 266 status("active") OPER.set_pin() UART.start_app("channelseditor") RC.push(["right 1 1000", "up 1 1000"]) OPER.channel_block() RC.push(["kid_0 1 7000"]) GRAB.compare(testcase) OPER.unset_pin() ############################ TestCase 267 ########################################## testcase = 267 status("manual") GRAB.compare(testcase) ############################ TestCase 268 ########################################## testcase = 268 status("active") MOD.stop(Modulator) UART.reboot() MOD.play_stream(Modulation, FEC, SR, Stream2, Frequency, Modulator) RC.push(["3 1 7000", "kid_1 1 7000"]) sleep(25) GRAB.compare(testcase) ############################ TestCase 269 ########################################## testcase = 269 status("manual") GRAB.compare(testcase) ############################ TestCase 270 ########################################## testcase = 270 status("active") UART.default_settings() MOD.stop(Modulator) MOD.play_stream(Modulation, FEC, SR, Stream2, Frequency, Modulator) OPER.search() MOD.stop(Modulator) MOD.play_stream(Modulation, FEC, SR, Stream2, Frequency, Modulator) RC.push(["3 1 7000", "kid_1 1 7000", "3 1 1000"]) sleep(500) GRAB.compare(testcase) ############################ TestCase 271 ########################################## testcase = 271 status("manual") GRAB.compare(testcase) ############################ TestCase 272 ########################################## testcase = 272 status("manual") GRAB.compare(testcase) ############################ TestCase 273 ########################################## testcase = 273 status("manual") GRAB.compare(testcase) ############################ TestCase 274 ########################################## testcase = 274 status("manual") GRAB.compare(testcase) ############################ TestCase 275 ########################################## testcase = 275 status("manual") GRAB.compare(testcase) ############################ TestCase 276 ########################################## testcase = 276 status("manual") GRAB.compare(testcase) ############################ TestCase 277 ########################################## testcase = 277 status("manual") GRAB.compare(testcase) ############################ TestCase 278 ########################################## """testcase = 278 status("active") UART.default_settings() UART.start_app("") RC.push([""]) sleep(0) GRAB.compare(testcase) ############################ TestCase 273 ########################################## testcase = 273 status("active") UART.default_settings() UART.start_app("") RC.push([""]) sleep(0) GRAB.compare(testcase) ############################ TestCase 274 ########################################## testcase = 274 status("active") UART.default_settings() UART.start_app("") RC.push([""]) sleep(0) GRAB.compare(testcase) ############################ TestCase 275 ########################################## testcase = 275 status("active") UART.default_settings() UART.start_app("") RC.push([""]) sleep(0) GRAB.compare(testcase) ############################ TestCase 276 ########################################## testcase = 276 status("active") UART.default_settings() UART.start_app("") RC.push([""]) sleep(0) GRAB.compare(testcase) ############################ TestCase 277 ########################################## testcase = 277 status("active") UART.default_settings() UART.start_app("") RC.push([""]) sleep(0) GRAB.compare(testcase) ############################ TestCase 278 ########################################## testcase = 278 status("active") UART.default_settings() UART.start_app("") RC.push([""]) sleep(0) GRAB.compare(testcase) ############################ TestCase 279 ########################################## testcase = 279 status("active") UART.default_settings() UART.start_app("") RC.push([""]) sleep(0) GRAB.compare(testcase) ############################ TestCase 280 ########################################## testcase = 280 status("active") UART.default_settings() UART.start_app("") RC.push([""]) sleep(0) GRAB.compare(testcase) ############################ TestCase 281 ########################################## testcase = 281 status("active") UART.default_settings() UART.start_app("") RC.push([""]) sleep(0) GRAB.compare(testcase) ############################ TestCase 282 ########################################## testcase = 282 status("active") UART.default_settings() UART.start_app("") RC.push([""]) sleep(0) GRAB.compare(testcase) ############################ TestCase 283 ########################################## testcase = 283 status("active") UART.default_settings() UART.start_app("") RC.push([""]) sleep(0) GRAB.compare(testcase) ############################ TestCase 284 ########################################## testcase = 284 status("active") UART.default_settings() UART.start_app("") RC.push([""]) sleep(0) GRAB.compare(testcase) ############################ TestCase 285 ########################################## testcase = 285 status("active") UART.default_settings() UART.start_app("") RC.push([""]) sleep(0) GRAB.compare(testcase) ############################ TestCase 286 ########################################## testcase = 286 status("active") UART.default_settings() UART.start_app("") RC.push([""]) sleep(0) GRAB.compare(testcase) ############################ TestCase 287 ########################################## testcase = 287 status("active") UART.default_settings() UART.start_app("") RC.push([""]) sleep(0) GRAB.compare(testcase) ############################ TestCase 288 ########################################## testcase = 288 status("active") UART.default_settings() UART.start_app("") RC.push([""]) sleep(0) GRAB.compare(testcase) ############################ TestCase 289 ########################################## testcase = 289 status("active") UART.default_settings() UART.start_app("") RC.push([""]) sleep(0) GRAB.compare(testcase) ############################ TestCase 290 ########################################## testcase = 290 status("active") UART.default_settings() UART.start_app("") RC.push([""]) sleep(0) GRAB.compare(testcase)""" ################################################################################### status("active") MOD.stop(Modulator) GRAB.stop_capture()
35.316524
420
0.442834
7,958
78,438
4.306233
0.059563
0.074061
0.208468
0.289153
0.833552
0.812892
0.730106
0.580729
0.516239
0.459541
0
0.102511
0.178064
78,438
2,220
421
35.332432
0.429015
0.063694
0
0.623733
0
0
0.263581
0.002584
0
0
0
0
0
0
null
null
0
0.005963
null
null
0
0
0
0
null
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
7
9958604a53424f1b5fadbd3736b1f102c2b0bb12
49,069
py
Python
tests/scheduling/test_scheduler.py
AlirezaFarnia/PsyNeuLink
c66f8248d1391830e76c97df4b644e12a02c2b73
[ "Apache-2.0" ]
null
null
null
tests/scheduling/test_scheduler.py
AlirezaFarnia/PsyNeuLink
c66f8248d1391830e76c97df4b644e12a02c2b73
[ "Apache-2.0" ]
null
null
null
tests/scheduling/test_scheduler.py
AlirezaFarnia/PsyNeuLink
c66f8248d1391830e76c97df4b644e12a02c2b73
[ "Apache-2.0" ]
null
null
null
import logging import numpy as np import pytest from psyneulink.core.components.functions.statefulfunctions.integratorfunctions import DriftDiffusionIntegrator from psyneulink.core.components.functions.transferfunctions import Linear from psyneulink.core.components.mechanisms.processing.integratormechanism import IntegratorMechanism from psyneulink.core.components.mechanisms.processing.transfermechanism import TransferMechanism from psyneulink.core.components.process import Process from psyneulink.core.components.projections.pathway.mappingprojection import MappingProjection from psyneulink.core.components.system import System from psyneulink.core.compositions.composition import Composition from psyneulink.core.globals.context import Context from psyneulink.core.globals.keywords import VALUE from psyneulink.core.scheduling.condition import AfterNCalls, AfterNPasses, AfterNTrials, AfterPass, All, AllHaveRun, Always, Any, AtPass, BeforeNCalls, BeforePass, \ EveryNCalls, EveryNPasses, JustRan, WhenFinished from psyneulink.core.scheduling.scheduler import Scheduler from psyneulink.core.scheduling.time import TimeScale from psyneulink.library.components.mechanisms.processing.integrator.ddm import DDM logger = logging.getLogger(__name__) class TestScheduler: @classmethod def setup_class(self): self.orig_is_finished_flag = TransferMechanism.is_finished_flag self.orig_is_finished = TransferMechanism.is_finished TransferMechanism.is_finished_flag = True TransferMechanism.is_finished = lambda self, context: self.is_finished_flag @classmethod def teardown_class(self): del TransferMechanism.is_finished_flag del TransferMechanism.is_finished TransferMechanism.is_finished_flag = self.orig_is_finished_flag TransferMechanism.is_finished = self.orig_is_finished def test_copy(self): pass def test_deepcopy(self): pass def test_create_multiple_contexts(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') comp.add_node(A) comp.scheduler.clock._increment_time(TimeScale.TRIAL) eid = 'eid' eid1 = 'eid1' comp.scheduler._init_counts(execution_id=eid) assert comp.scheduler.clocks[eid].time.trial == 0 comp.scheduler.clock._increment_time(TimeScale.TRIAL) assert comp.scheduler.clocks[eid].time.trial == 0 comp.scheduler._init_counts(execution_id=eid1, base_execution_id=comp.scheduler.default_execution_id) assert comp.scheduler.clocks[eid1].time.trial == 2 def test_two_compositions_one_scheduler(self): comp1 = Composition() comp2 = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') comp1.add_node(A) comp2.add_node(A) sched = Scheduler(composition=comp1) sched.add_condition(A, BeforeNCalls(A, 5, time_scale=TimeScale.LIFE)) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(6) termination_conds[TimeScale.TRIAL] = AfterNPasses(1) comp1.run( inputs={A: [[0], [1], [2], [3], [4], [5]]}, scheduler=sched, termination_processing=termination_conds ) output = sched.execution_list[comp1.default_execution_id] expected_output = [ A, A, A, A, A, set() ] # pprint.pprint(output) assert output == pytest.helpers.setify_expected_output(expected_output) comp2.run( inputs={A: [[0], [1], [2], [3], [4], [5]]}, scheduler=sched, termination_processing=termination_conds ) output = sched.execution_list[comp2.default_execution_id] expected_output = [ A, A, A, A, A, set() ] # pprint.pprint(output) assert output == pytest.helpers.setify_expected_output(expected_output) def test_one_composition_two_contexts(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') comp.add_node(A) sched = Scheduler(composition=comp) sched.add_condition(A, BeforeNCalls(A, 5, time_scale=TimeScale.LIFE)) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(6) termination_conds[TimeScale.TRIAL] = AfterNPasses(1) eid = 'eid' comp.run( inputs={A: [[0], [1], [2], [3], [4], [5]]}, scheduler=sched, termination_processing=termination_conds, context=eid, ) output = sched.execution_list[eid] expected_output = [ A, A, A, A, A, set() ] # pprint.pprint(output) assert output == pytest.helpers.setify_expected_output(expected_output) comp.run( inputs={A: [[0], [1], [2], [3], [4], [5]]}, scheduler=sched, termination_processing=termination_conds, context=eid, ) output = sched.execution_list[eid] expected_output = [ A, A, A, A, A, set(), set(), set(), set(), set(), set(), set() ] # pprint.pprint(output) assert output == pytest.helpers.setify_expected_output(expected_output) eid = 'eid1' comp.run( inputs={A: [[0], [1], [2], [3], [4], [5]]}, scheduler=sched, termination_processing=termination_conds, context=eid, ) output = sched.execution_list[eid] expected_output = [ A, A, A, A, A, set() ] # pprint.pprint(output) assert output == pytest.helpers.setify_expected_output(expected_output) def test_change_termination_condition(self): D = DDM(function=DriftDiffusionIntegrator(threshold=10)) P = Process(pathway=[D]) S = System(processes=[P]) D.set_log_conditions(VALUE) def change_termination_processing(): if S.termination_processing is None: S.scheduler.termination_conds = {TimeScale.TRIAL: WhenFinished(D)} S.termination_processing = {TimeScale.TRIAL: WhenFinished(D)} elif isinstance(S.termination_processing[TimeScale.TRIAL], AllHaveRun): S.scheduler.termination_conds = {TimeScale.TRIAL: WhenFinished(D)} S.termination_processing = {TimeScale.TRIAL: WhenFinished(D)} else: S.scheduler.termination_conds = {TimeScale.TRIAL: AllHaveRun()} S.termination_processing = {TimeScale.TRIAL: AllHaveRun()} change_termination_processing() S.run(inputs={D: [[1.0], [2.0]]}, # termination_processing={TimeScale.TRIAL: WhenFinished(D)}, call_after_trial=change_termination_processing, num_trials=4) # Trial 0: # input = 1.0, termination condition = WhenFinished # 10 passes (value = 1.0, 2.0 ... 9.0, 10.0) # Trial 1: # input = 2.0, termination condition = AllHaveRun # 1 pass (value = 2.0) expected_results = [[np.array([[10.]]), np.array([[10.]])], [np.array([[2.]]), np.array([[1.]])], [np.array([[10.]]), np.array([[10.]])], [np.array([[2.]]), np.array([[1.]])]] assert np.allclose(expected_results, np.asfarray(S.results)) class TestLinear: @classmethod def setup_class(self): self.orig_is_finished_flag = TransferMechanism.is_finished_flag self.orig_is_finished = TransferMechanism.is_finished TransferMechanism.is_finished_flag = True TransferMechanism.is_finished = lambda self, context: self.is_finished_flag @classmethod def teardown_class(self): del TransferMechanism.is_finished_flag del TransferMechanism.is_finished TransferMechanism.is_finished_flag = self.orig_is_finished_flag TransferMechanism.is_finished = self.orig_is_finished def test_no_termination_conds(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) comp.add_projection(MappingProjection(), B, C) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, EveryNCalls(A, 2)) sched.add_condition(C, EveryNCalls(B, 3)) output = list(sched.run()) expected_output = [ A, A, B, A, A, B, A, A, B, C, ] # pprint.pprint(output) assert output == pytest.helpers.setify_expected_output(expected_output) # tests below are copied from old scheduler, need renaming def test_1(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) comp.add_projection(MappingProjection(), B, C) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, EveryNCalls(A, 2)) sched.add_condition(C, EveryNCalls(B, 3)) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(C, 4, time_scale=TimeScale.TRIAL) output = list(sched.run(termination_conds=termination_conds)) expected_output = [ A, A, B, A, A, B, A, A, B, C, A, A, B, A, A, B, A, A, B, C, A, A, B, A, A, B, A, A, B, C, A, A, B, A, A, B, A, A, B, C, ] # pprint.pprint(output) assert output == pytest.helpers.setify_expected_output(expected_output) def test_1b(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) comp.add_projection(MappingProjection(), B, C) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, Any(EveryNCalls(A, 2), AfterPass(1))) sched.add_condition(C, EveryNCalls(B, 3)) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(C, 4, time_scale=TimeScale.TRIAL) output = list(sched.run(termination_conds=termination_conds)) expected_output = [ A, A, B, A, B, A, B, C, A, B, A, B, A, B, C, A, B, A, B, A, B, C, A, B, A, B, A, B, C, ] # pprint.pprint(output) assert output == pytest.helpers.setify_expected_output(expected_output) def test_2(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) comp.add_projection(MappingProjection(), B, C) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, EveryNCalls(A, 2)) sched.add_condition(C, EveryNCalls(B, 2)) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(C, 1, time_scale=TimeScale.TRIAL) output = list(sched.run(termination_conds=termination_conds)) expected_output = [A, A, B, A, A, B, C] assert output == pytest.helpers.setify_expected_output(expected_output) def test_3(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) comp.add_projection(MappingProjection(), B, C) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, EveryNCalls(A, 2)) sched.add_condition(C, All(AfterNCalls(B, 2), EveryNCalls(B, 1))) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(C, 4, time_scale=TimeScale.TRIAL) output = list(sched.run(termination_conds=termination_conds)) expected_output = [ A, A, B, A, A, B, C, A, A, B, C, A, A, B, C, A, A, B, C ] # pprint.pprint(output) assert output == pytest.helpers.setify_expected_output(expected_output) def test_6(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) comp.add_projection(MappingProjection(), B, C) sched = Scheduler(composition=comp) sched.add_condition(A, BeforePass(5)) sched.add_condition(B, AfterNCalls(A, 5)) sched.add_condition(C, AfterNCalls(B, 1)) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(C, 3) output = list(sched.run(termination_conds=termination_conds)) expected_output = [ A, A, A, A, A, B, C, B, C, B, C ] # pprint.pprint(output) assert output == pytest.helpers.setify_expected_output(expected_output) def test_6_two_trials(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) comp.add_projection(MappingProjection(), B, C) sched = Scheduler(composition=comp) sched.add_condition(A, BeforePass(5)) sched.add_condition(B, AfterNCalls(A, 5)) sched.add_condition(C, AfterNCalls(B, 1)) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(2) termination_conds[TimeScale.TRIAL] = AfterNCalls(C, 3) comp.run( inputs={A: [[0], [1], [2], [3], [4], [5]]}, scheduler=sched, termination_processing=termination_conds ) output = sched.execution_list[comp.default_execution_id] expected_output = [ A, A, A, A, A, B, C, B, C, B, C, A, A, A, A, A, B, C, B, C, B, C ] # pprint.pprint(output) assert output == pytest.helpers.setify_expected_output(expected_output) def test_7(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, EveryNCalls(A, 2)) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = Any(AfterNCalls(A, 1), AfterNCalls(B, 1)) output = list(sched.run(termination_conds=termination_conds)) expected_output = [A] assert output == pytest.helpers.setify_expected_output(expected_output) def test_8(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, EveryNCalls(A, 2)) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = All(AfterNCalls(A, 1), AfterNCalls(B, 1)) output = list(sched.run(termination_conds=termination_conds)) expected_output = [A, A, B] assert output == pytest.helpers.setify_expected_output(expected_output) def test_9(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, WhenFinished(A)) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(B, 2) output = [] i = 0 A.is_finished_flag = False for step in sched.run(termination_conds=termination_conds): if i == 3: A.is_finished_flag = True output.append(step) i += 1 expected_output = [A, A, A, A, B, A, B] assert output == pytest.helpers.setify_expected_output(expected_output) def test_9b(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') A.is_finished_flag = False B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, WhenFinished(A)) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AtPass(5) output = list(sched.run(termination_conds=termination_conds)) expected_output = [A, A, A, A, A] assert output == pytest.helpers.setify_expected_output(expected_output) def test_10(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') A.is_finished_flag = True B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, Any(WhenFinished(A), AfterNCalls(A, 3))) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(B, 5) output = list(sched.run(termination_conds=termination_conds)) expected_output = [A, B, A, B, A, B, A, B, A, B] assert output == pytest.helpers.setify_expected_output(expected_output) def test_10b(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') A.is_finished_flag = False B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, Any(WhenFinished(A), AfterNCalls(A, 3))) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(B, 4) output = list(sched.run(termination_conds=termination_conds)) expected_output = [A, A, A, B, A, B, A, B, A, B] assert output == pytest.helpers.setify_expected_output(expected_output) def test_10c(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') A.is_finished_flag = True B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, All(WhenFinished(A), AfterNCalls(A, 3))) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(B, 4) output = list(sched.run(termination_conds=termination_conds)) expected_output = [A, A, A, B, A, B, A, B, A, B] assert output == pytest.helpers.setify_expected_output(expected_output) def test_10d(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') A.is_finished_flag = False B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, All(WhenFinished(A), AfterNCalls(A, 3))) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AtPass(10) output = list(sched.run(termination_conds=termination_conds)) expected_output = [A, A, A, A, A, A, A, A, A, A] assert output == pytest.helpers.setify_expected_output(expected_output) ######################################## # tests with linear compositions ######################################## def test_linear_AAB(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, EveryNCalls(A, 2)) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNCalls(B, 2, time_scale=TimeScale.RUN) termination_conds[TimeScale.TRIAL] = AfterNCalls(B, 2, time_scale=TimeScale.TRIAL) output = list(sched.run(termination_conds=termination_conds)) expected_output = [A, A, B, A, A, B] assert output == pytest.helpers.setify_expected_output(expected_output) def test_linear_ABB(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) sched = Scheduler(composition=comp) sched.add_condition(A, Any(AtPass(0), EveryNCalls(B, 2))) sched.add_condition(B, Any(EveryNCalls(A, 1), EveryNCalls(B, 1))) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(B, 8, time_scale=TimeScale.TRIAL) output = list(sched.run(termination_conds=termination_conds)) expected_output = [A, B, B, A, B, B, A, B, B, A, B, B] assert output == pytest.helpers.setify_expected_output(expected_output) def test_linear_ABBCC(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) comp.add_projection(MappingProjection(), B, C) sched = Scheduler(composition=comp) sched.add_condition(A, Any(AtPass(0), EveryNCalls(C, 2))) sched.add_condition(B, Any(JustRan(A), JustRan(B))) sched.add_condition(C, Any(EveryNCalls(B, 2), JustRan(C))) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(C, 4, time_scale=TimeScale.TRIAL) output = list(sched.run(termination_conds=termination_conds)) expected_output = [A, B, B, C, C, A, B, B, C, C] assert output == pytest.helpers.setify_expected_output(expected_output) def test_linear_ABCBC(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) comp.add_projection(MappingProjection(), B, C) sched = Scheduler(composition=comp) sched.add_condition(A, Any(AtPass(0), EveryNCalls(C, 2))) sched.add_condition(B, Any(EveryNCalls(A, 1), EveryNCalls(C, 1))) sched.add_condition(C, EveryNCalls(B, 1)) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(C, 4, time_scale=TimeScale.TRIAL) output = list(sched.run(termination_conds=termination_conds)) expected_output = [A, B, C, B, C, A, B, C, B, C] assert output == pytest.helpers.setify_expected_output(expected_output) ######################################## # tests with small branching compositions ######################################## class TestBranching: @classmethod def setup_class(self): self.orig_is_finished_flag = TransferMechanism.is_finished_flag self.orig_is_finished = TransferMechanism.is_finished TransferMechanism.is_finished_flag = True TransferMechanism.is_finished = lambda self, context: self.is_finished_flag @classmethod def teardown_class(self): del TransferMechanism.is_finished_flag del TransferMechanism.is_finished TransferMechanism.is_finished_flag = self.orig_is_finished_flag TransferMechanism.is_finished = self.orig_is_finished # triangle: A # / \ # B C def test_triangle_1(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) comp.add_projection(MappingProjection(), A, C) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, EveryNCalls(A, 1)) sched.add_condition(C, EveryNCalls(A, 1)) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(C, 3, time_scale=TimeScale.TRIAL) output = list(sched.run(termination_conds=termination_conds)) expected_output = [ A, set([B, C]), A, set([B, C]), A, set([B, C]), ] # pprint.pprint(output) assert output == pytest.helpers.setify_expected_output(expected_output) def test_triangle_2(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) comp.add_projection(MappingProjection(), A, C) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, EveryNCalls(A, 1)) sched.add_condition(C, EveryNCalls(A, 2)) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(C, 3, time_scale=TimeScale.TRIAL) output = list(sched.run(termination_conds=termination_conds)) expected_output = [ A, B, A, set([B, C]), A, B, A, set([B, C]), A, B, A, set([B, C]), ] # pprint.pprint(output) assert output == pytest.helpers.setify_expected_output(expected_output) def test_triangle_3(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) comp.add_projection(MappingProjection(), A, C) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, EveryNCalls(A, 2)) sched.add_condition(C, EveryNCalls(A, 3)) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(C, 2, time_scale=TimeScale.TRIAL) output = list(sched.run(termination_conds=termination_conds)) expected_output = [ A, A, B, A, C, A, B, A, A, set([B, C]) ] # pprint.pprint(output) assert output == pytest.helpers.setify_expected_output(expected_output) # this is test 11 of original constraint_scheduler.py def test_triangle_4(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) comp.add_projection(MappingProjection(), A, C) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, EveryNCalls(A, 2)) sched.add_condition(C, All(WhenFinished(A), AfterNCalls(B, 3))) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(C, 1) output = [] i = 0 A.is_finished_flag = False for step in sched.run(termination_conds=termination_conds): if i == 3: A.is_finished_flag = True output.append(step) i += 1 expected_output = [A, A, B, A, A, B, A, A, set([B, C])] # pprint.pprint(output) assert output == pytest.helpers.setify_expected_output(expected_output) def test_triangle_4b(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) comp.add_projection(MappingProjection(), A, C) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, EveryNCalls(A, 2)) sched.add_condition(C, All(WhenFinished(A), AfterNCalls(B, 3))) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(C, 1) output = [] i = 0 A.is_finished_flag = False for step in sched.run(termination_conds=termination_conds): if i == 10: A.is_finished_flag = True output.append(step) i += 1 expected_output = [A, A, B, A, A, B, A, A, B, A, A, set([B, C])] # pprint.pprint(output) assert output == pytest.helpers.setify_expected_output(expected_output) # inverted triangle: A B # \ / # C # this is test 4 of original constraint_scheduler.py # this test has an implicit priority set of A<B ! def test_invtriangle_1(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_node(m) comp.add_projection(MappingProjection(), A, C) comp.add_projection(MappingProjection(), B, C) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, EveryNCalls(A, 2)) sched.add_condition(C, Any(AfterNCalls(A, 3), AfterNCalls(B, 3))) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(C, 4, time_scale=TimeScale.TRIAL) output = list(sched.run(termination_conds=termination_conds)) expected_output = [ A, set([A, B]), A, C, set([A, B]), C, A, C, set([A, B]), C ] # pprint.pprint(output) assert output == pytest.helpers.setify_expected_output(expected_output) # this is test 5 of original constraint_scheduler.py # this test has an implicit priority set of A<B ! def test_invtriangle_2(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_node(m) comp.add_projection(MappingProjection(), A, C) comp.add_projection(MappingProjection(), B, C) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, EveryNCalls(A, 2)) sched.add_condition(C, All(AfterNCalls(A, 3), AfterNCalls(B, 3))) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(C, 2, time_scale=TimeScale.TRIAL) output = list(sched.run(termination_conds=termination_conds)) expected_output = [ A, set([A, B]), A, set([A, B]), A, set([A, B]), C, A, C ] assert output == pytest.helpers.setify_expected_output(expected_output) # checkmark: A # \ # B C # \ / # D # testing toposort def test_checkmark_1(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') D = TransferMechanism(function=Linear(intercept=.5), name='scheduler-pytests-D') for m in [A, B, C, D]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) comp.add_projection(MappingProjection(), B, D) comp.add_projection(MappingProjection(), C, D) sched = Scheduler(composition=comp) sched.add_condition(A, Always()) sched.add_condition(B, Always()) sched.add_condition(C, Always()) sched.add_condition(D, Always()) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(D, 1, time_scale=TimeScale.TRIAL) output = list(sched.run(termination_conds=termination_conds)) expected_output = [ set([A, C]), B, D ] assert output == pytest.helpers.setify_expected_output(expected_output) def test_checkmark_2(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') D = TransferMechanism(function=Linear(intercept=.5), name='scheduler-pytests-D') for m in [A, B, C, D]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) comp.add_projection(MappingProjection(), B, D) comp.add_projection(MappingProjection(), C, D) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, EveryNCalls(A, 2)) sched.add_condition(C, EveryNCalls(A, 2)) sched.add_condition(D, All(EveryNCalls(B, 2), EveryNCalls(C, 2))) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(D, 1, time_scale=TimeScale.TRIAL) output = list(sched.run(termination_conds=termination_conds)) expected_output = [ A, set([A, C]), B, A, set([A, C]), B, D ] assert output == pytest.helpers.setify_expected_output(expected_output) def test_checkmark2_1(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') D = TransferMechanism(function=Linear(intercept=.5), name='scheduler-pytests-D') for m in [A, B, C, D]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) comp.add_projection(MappingProjection(), A, D) comp.add_projection(MappingProjection(), B, D) comp.add_projection(MappingProjection(), C, D) sched = Scheduler(composition=comp) sched.add_condition(A, EveryNPasses(1)) sched.add_condition(B, EveryNCalls(A, 2)) sched.add_condition(C, EveryNCalls(A, 2)) sched.add_condition(D, All(EveryNCalls(B, 2), EveryNCalls(C, 2))) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(D, 1, time_scale=TimeScale.TRIAL) output = list(sched.run(termination_conds=termination_conds)) expected_output = [ A, set([A, C]), B, A, set([A, C]), B, D ] assert output == pytest.helpers.setify_expected_output(expected_output) # multi source: A1 A2 # / \ / \ # B1 B2 B3 # \ / \ / # C1 C2 def test_multisource_1(self): comp = Composition() A1 = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A1') A2 = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A2') B1 = TransferMechanism(function=Linear(intercept=4.0), name='B1') B2 = TransferMechanism(function=Linear(intercept=4.0), name='B2') B3 = TransferMechanism(function=Linear(intercept=4.0), name='B3') C1 = TransferMechanism(function=Linear(intercept=1.5), name='C1') C2 = TransferMechanism(function=Linear(intercept=.5), name='C2') for m in [A1, A2, B1, B2, B3, C1, C2]: comp.add_node(m) comp.add_projection(MappingProjection(), A1, B1) comp.add_projection(MappingProjection(), A1, B2) comp.add_projection(MappingProjection(), A2, B1) comp.add_projection(MappingProjection(), A2, B2) comp.add_projection(MappingProjection(), A2, B3) comp.add_projection(MappingProjection(), B1, C1) comp.add_projection(MappingProjection(), B2, C1) comp.add_projection(MappingProjection(), B1, C2) comp.add_projection(MappingProjection(), B3, C2) sched = Scheduler(composition=comp) for m in comp.nodes: sched.add_condition(m, Always()) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = All(AfterNCalls(C1, 1), AfterNCalls(C2, 1)) output = list(sched.run(termination_conds=termination_conds)) expected_output = [ set([A1, A2]), set([B1, B2, B3]), set([C1, C2]) ] assert output == pytest.helpers.setify_expected_output(expected_output) def test_multisource_2(self): comp = Composition() A1 = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A1') A2 = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A2') B1 = TransferMechanism(function=Linear(intercept=4.0), name='B1') B2 = TransferMechanism(function=Linear(intercept=4.0), name='B2') B3 = TransferMechanism(function=Linear(intercept=4.0), name='B3') C1 = TransferMechanism(function=Linear(intercept=1.5), name='C1') C2 = TransferMechanism(function=Linear(intercept=.5), name='C2') for m in [A1, A2, B1, B2, B3, C1, C2]: comp.add_node(m) comp.add_projection(MappingProjection(), A1, B1) comp.add_projection(MappingProjection(), A1, B2) comp.add_projection(MappingProjection(), A2, B1) comp.add_projection(MappingProjection(), A2, B2) comp.add_projection(MappingProjection(), A2, B3) comp.add_projection(MappingProjection(), B1, C1) comp.add_projection(MappingProjection(), B2, C1) comp.add_projection(MappingProjection(), B1, C2) comp.add_projection(MappingProjection(), B3, C2) sched = Scheduler(composition=comp) sched.add_condition_set({ A1: Always(), A2: Always(), B1: EveryNCalls(A1, 2), B3: EveryNCalls(A2, 2), B2: All(EveryNCalls(A1, 4), EveryNCalls(A2, 4)), C1: Any(AfterNCalls(B1, 2), AfterNCalls(B2, 2)), C2: Any(AfterNCalls(B2, 2), AfterNCalls(B3, 2)), }) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = All(AfterNCalls(C1, 1), AfterNCalls(C2, 1)) output = list(sched.run(termination_conds=termination_conds)) expected_output = [ set([A1, A2]), set([A1, A2]), set([B1, B3]), set([A1, A2]), set([A1, A2]), set([B1, B2, B3]), set([C1, C2]) ] assert output == pytest.helpers.setify_expected_output(expected_output) class TestTermination: @classmethod def setup_class(self): self.orig_is_finished_flag = TransferMechanism.is_finished_flag self.orig_is_finished = TransferMechanism.is_finished TransferMechanism.is_finished_flag = True TransferMechanism.is_finished = lambda self, context: self.is_finished_flag @classmethod def teardown_class(self): del TransferMechanism.is_finished_flag del TransferMechanism.is_finished TransferMechanism.is_finished_flag = self.orig_is_finished_flag TransferMechanism.is_finished = self.orig_is_finished def test_termination_conditions_reset(self): comp = Composition() A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) sched = Scheduler(composition=comp) sched.add_condition(B, EveryNCalls(A, 2)) termination_conds = {} termination_conds[TimeScale.RUN] = AfterNTrials(1) termination_conds[TimeScale.TRIAL] = AfterNCalls(B, 2) output = list(sched.run(termination_conds=termination_conds)) expected_output = [A, A, B, A, A, B] assert output == pytest.helpers.setify_expected_output(expected_output) # reset the RUN because schedulers run TRIALs sched.clock._increment_time(TimeScale.RUN) sched._reset_counts_total(TimeScale.RUN, execution_id=sched.default_execution_id) output = list(sched.run()) expected_output = [A, A, B] assert output == pytest.helpers.setify_expected_output(expected_output) def test_partial_override_scheduler(self): comp = Composition() A = TransferMechanism(name='scheduler-pytests-A') B = TransferMechanism(name='scheduler-pytests-B') for m in [A, B]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) sched = Scheduler(composition=comp) sched.add_condition(B, EveryNCalls(A, 2)) termination_conds = {TimeScale.TRIAL: AfterNCalls(B, 2)} output = list(sched.run(termination_conds=termination_conds)) expected_output = [A, A, B, A, A, B] assert output == pytest.helpers.setify_expected_output(expected_output) def test_partial_override_composition(self): comp = Composition() A = TransferMechanism(name='scheduler-pytests-A') B = IntegratorMechanism(name='scheduler-pytests-B') for m in [A, B]: comp.add_node(m) comp.add_projection(MappingProjection(), A, B) termination_conds = {TimeScale.TRIAL: AfterNCalls(B, 2)} output = comp.run(inputs={A: 1}, termination_processing=termination_conds) # two executions of B assert output == [.75]
41.408439
166
0.634922
5,851
49,069
5.185438
0.037942
0.011734
0.101154
0.081806
0.909987
0.904845
0.890244
0.882828
0.879103
0.877653
0
0.018802
0.239092
49,069
1,184
167
41.443412
0.793797
0.033524
0
0.787778
0
0
0.036707
0
0
0
0
0
0.048889
1
0.054444
false
0.041111
0.018889
0
0.077778
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
41e336b686d7c14756c464c70f911d5528562ba4
3,615
py
Python
low_level_simulation/build/rosbridge_suite/rosbridge_library/cmake/rosbridge_library-genmsg-context.py
abiantorres/autonomous-vehicles-system-simulation
3f0112036b2b270f5055729c648a1310976df933
[ "Apache-2.0" ]
null
null
null
low_level_simulation/build/rosbridge_suite/rosbridge_library/cmake/rosbridge_library-genmsg-context.py
abiantorres/autonomous-vehicles-system-simulation
3f0112036b2b270f5055729c648a1310976df933
[ "Apache-2.0" ]
null
null
null
low_level_simulation/build/rosbridge_suite/rosbridge_library/cmake/rosbridge_library-genmsg-context.py
abiantorres/autonomous-vehicles-system-simulation
3f0112036b2b270f5055729c648a1310976df933
[ "Apache-2.0" ]
null
null
null
# generated from genmsg/cmake/pkg-genmsg.context.in messages_str = "/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosbridge_library/msg/Num.msg;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosbridge_library/msg/TestChar.msg;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosbridge_library/msg/TestDurationArray.msg;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosbridge_library/msg/TestHeaderArray.msg;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosbridge_library/msg/TestHeader.msg;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosbridge_library/msg/TestHeaderTwo.msg;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosbridge_library/msg/TestTimeArray.msg;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosbridge_library/msg/TestUInt8.msg;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosbridge_library/msg/TestUInt8FixedSizeArray16.msg" services_str = "/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosbridge_library/srv/AddTwoInts.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosbridge_library/srv/SendBytes.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosbridge_library/srv/TestArrayRequest.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosbridge_library/srv/TestEmpty.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosbridge_library/srv/TestMultipleRequestFields.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosbridge_library/srv/TestMultipleResponseFields.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosbridge_library/srv/TestNestedService.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosbridge_library/srv/TestRequestAndResponse.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosbridge_library/srv/TestRequestOnly.srv;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosbridge_library/srv/TestResponseOnly.srv" pkg_name = "rosbridge_library" dependencies_str = "std_msgs;geometry_msgs" langs = "gencpp;geneus;genlisp;gennodejs;genpy" dep_include_paths_str = "rosbridge_library;/home/abiantorres/Documentos/tfg/autonomous-vehicles-system-simulation/low_level_simulation/src/rosbridge_suite/rosbridge_library/msg;std_msgs;/opt/ros/kinetic/share/std_msgs/cmake/../msg;geometry_msgs;/opt/ros/kinetic/share/geometry_msgs/cmake/../msg" PYTHON_EXECUTABLE = "/usr/bin/python" package_has_static_sources = 'TRUE' == 'TRUE' genmsg_check_deps_script = "/opt/ros/kinetic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
301.25
1,571
0.878838
459
3,615
6.694989
0.167756
0.114546
0.162707
0.182232
0.787829
0.773511
0.773511
0.773511
0.773511
0.773511
0
0.001117
0.009405
3,615
11
1,572
328.636364
0.857023
0.013555
0
0
1
0.333333
0.943042
0.931818
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
1
0
1
1
1
1
1
0
0
0
0
0
1
1
0
0
0
0
1
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
12
513987f48af8152e801e43ef06652a38bef6d361
56
py
Python
PASSta/__init__.py
dolevbas/PASSta
d98a62f4da3913e3f90a497817be57dfa03169c9
[ "MIT" ]
null
null
null
PASSta/__init__.py
dolevbas/PASSta
d98a62f4da3913e3f90a497817be57dfa03169c9
[ "MIT" ]
null
null
null
PASSta/__init__.py
dolevbas/PASSta
d98a62f4da3913e3f90a497817be57dfa03169c9
[ "MIT" ]
null
null
null
from PASSta.WED import WED from PASSta.ICED import ICED
28
28
0.821429
10
56
4.6
0.5
0.434783
0
0
0
0
0
0
0
0
0
0
0.142857
56
2
28
28
0.958333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
1
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
7
852b4200ea5680750a119cba7639aec4066261c6
19,110
py
Python
nova/tests/functional/api_sample_tests/test_keypairs.py
bopopescu/nova-token
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
[ "Apache-2.0" ]
null
null
null
nova/tests/functional/api_sample_tests/test_keypairs.py
bopopescu/nova-token
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
[ "Apache-2.0" ]
null
null
null
nova/tests/functional/api_sample_tests/test_keypairs.py
bopopescu/nova-token
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
[ "Apache-2.0" ]
2
2017-07-20T17:31:34.000Z
2020-07-24T02:42:19.000Z
begin_unit comment|'# Copyright 2012 Nebula, Inc.' nl|'\n' comment|'# Copyright 2013 IBM Corp.' nl|'\n' comment|'#' nl|'\n' comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may' nl|'\n' comment|'# not use this file except in compliance with the License. You may obtain' nl|'\n' comment|'# a copy of the License at' nl|'\n' comment|'#' nl|'\n' comment|'# http://www.apache.org/licenses/LICENSE-2.0' nl|'\n' comment|'#' nl|'\n' comment|'# Unless required by applicable law or agreed to in writing, software' nl|'\n' comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT' nl|'\n' comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the' nl|'\n' comment|'# License for the specific language governing permissions and limitations' nl|'\n' comment|'# under the License.' nl|'\n' nl|'\n' name|'import' name|'uuid' newline|'\n' nl|'\n' name|'from' name|'oslo_config' name|'import' name|'cfg' newline|'\n' nl|'\n' name|'from' name|'nova' op|'.' name|'objects' name|'import' name|'keypair' name|'as' name|'keypair_obj' newline|'\n' name|'from' name|'nova' op|'.' name|'tests' op|'.' name|'functional' op|'.' name|'api_sample_tests' name|'import' name|'api_sample_base' newline|'\n' name|'from' name|'nova' op|'.' name|'tests' op|'.' name|'unit' name|'import' name|'fake_crypto' newline|'\n' nl|'\n' DECL|variable|CONF name|'CONF' op|'=' name|'cfg' op|'.' name|'CONF' newline|'\n' name|'CONF' op|'.' name|'import_opt' op|'(' string|"'osapi_compute_extension'" op|',' nl|'\n' string|"'nova.api.openstack.compute.legacy_v2.extensions'" op|')' newline|'\n' nl|'\n' nl|'\n' DECL|class|KeyPairsSampleJsonTest name|'class' name|'KeyPairsSampleJsonTest' op|'(' name|'api_sample_base' op|'.' name|'ApiSampleTestBaseV21' op|')' op|':' newline|'\n' DECL|variable|microversion indent|' ' name|'microversion' op|'=' name|'None' newline|'\n' DECL|variable|sample_dir name|'sample_dir' op|'=' string|'"keypairs"' newline|'\n' DECL|variable|expected_delete_status_code name|'expected_delete_status_code' op|'=' number|'202' newline|'\n' DECL|variable|expected_post_status_code name|'expected_post_status_code' op|'=' number|'200' newline|'\n' nl|'\n' DECL|member|_get_flags name|'def' name|'_get_flags' op|'(' name|'self' op|')' op|':' newline|'\n' indent|' ' name|'f' op|'=' name|'super' op|'(' name|'KeyPairsSampleJsonTest' op|',' name|'self' op|')' op|'.' name|'_get_flags' op|'(' op|')' newline|'\n' name|'f' op|'[' string|"'osapi_compute_extension'" op|']' op|'=' name|'CONF' op|'.' name|'osapi_compute_extension' op|'[' op|':' op|']' newline|'\n' name|'f' op|'[' string|"'osapi_compute_extension'" op|']' op|'.' name|'append' op|'(' nl|'\n' string|"'nova.api.openstack.compute.contrib.keypairs.Keypairs'" op|')' newline|'\n' name|'return' name|'f' newline|'\n' nl|'\n' DECL|member|setUp dedent|'' name|'def' name|'setUp' op|'(' name|'self' op|')' op|':' newline|'\n' indent|' ' name|'super' op|'(' name|'KeyPairsSampleJsonTest' op|',' name|'self' op|')' op|'.' name|'setUp' op|'(' op|')' newline|'\n' name|'self' op|'.' name|'api' op|'.' name|'microversion' op|'=' name|'self' op|'.' name|'microversion' newline|'\n' nl|'\n' comment|'# TODO(sdague): this is only needed because we randomly choose the' nl|'\n' comment|'# uuid each time.' nl|'\n' DECL|member|generalize_subs dedent|'' name|'def' name|'generalize_subs' op|'(' name|'self' op|',' name|'subs' op|',' name|'vanilla_regexes' op|')' op|':' newline|'\n' indent|' ' name|'subs' op|'[' string|"'keypair_name'" op|']' op|'=' string|"'keypair-[0-9a-f-]+'" newline|'\n' name|'return' name|'subs' newline|'\n' nl|'\n' DECL|member|test_keypairs_post dedent|'' name|'def' name|'test_keypairs_post' op|'(' name|'self' op|')' op|':' newline|'\n' indent|' ' name|'return' name|'self' op|'.' name|'_check_keypairs_post' op|'(' op|')' newline|'\n' nl|'\n' DECL|member|_check_keypairs_post dedent|'' name|'def' name|'_check_keypairs_post' op|'(' name|'self' op|',' op|'**' name|'kwargs' op|')' op|':' newline|'\n' indent|' ' string|'"""Get api sample of key pairs post request."""' newline|'\n' name|'key_name' op|'=' string|"'keypair-'" op|'+' name|'str' op|'(' name|'uuid' op|'.' name|'uuid4' op|'(' op|')' op|')' newline|'\n' name|'subs' op|'=' name|'dict' op|'(' name|'keypair_name' op|'=' name|'key_name' op|',' op|'**' name|'kwargs' op|')' newline|'\n' name|'response' op|'=' name|'self' op|'.' name|'_do_post' op|'(' string|"'os-keypairs'" op|',' string|"'keypairs-post-req'" op|',' name|'subs' op|')' newline|'\n' name|'subs' op|'=' op|'{' string|"'keypair_name'" op|':' name|'key_name' op|'}' newline|'\n' nl|'\n' name|'self' op|'.' name|'_verify_response' op|'(' string|"'keypairs-post-resp'" op|',' name|'subs' op|',' name|'response' op|',' nl|'\n' name|'self' op|'.' name|'expected_post_status_code' op|')' newline|'\n' comment|'# NOTE(maurosr): return the key_name is necessary cause the' nl|'\n' comment|'# verification returns the label of the last compared information in' nl|'\n' comment|'# the response, not necessarily the key name.' nl|'\n' name|'return' name|'key_name' newline|'\n' nl|'\n' DECL|member|test_keypairs_import_key_post dedent|'' name|'def' name|'test_keypairs_import_key_post' op|'(' name|'self' op|')' op|':' newline|'\n' indent|' ' name|'public_key' op|'=' name|'fake_crypto' op|'.' name|'get_ssh_public_key' op|'(' op|')' newline|'\n' name|'self' op|'.' name|'_check_keypairs_import_key_post' op|'(' name|'public_key' op|')' newline|'\n' nl|'\n' DECL|member|_check_keypairs_import_key_post dedent|'' name|'def' name|'_check_keypairs_import_key_post' op|'(' name|'self' op|',' name|'public_key' op|',' op|'**' name|'kwargs' op|')' op|':' newline|'\n' comment|"# Get api sample of key pairs post to import user's key." nl|'\n' indent|' ' name|'key_name' op|'=' string|"'keypair-'" op|'+' name|'str' op|'(' name|'uuid' op|'.' name|'uuid4' op|'(' op|')' op|')' newline|'\n' name|'subs' op|'=' op|'{' nl|'\n' string|"'keypair_name'" op|':' name|'key_name' op|',' nl|'\n' op|'}' newline|'\n' name|'params' op|'=' name|'subs' op|'.' name|'copy' op|'(' op|')' newline|'\n' name|'params' op|'[' string|"'public_key'" op|']' op|'=' name|'public_key' newline|'\n' name|'params' op|'.' name|'update' op|'(' op|'**' name|'kwargs' op|')' newline|'\n' name|'response' op|'=' name|'self' op|'.' name|'_do_post' op|'(' string|"'os-keypairs'" op|',' string|"'keypairs-import-post-req'" op|',' nl|'\n' name|'params' op|')' newline|'\n' name|'self' op|'.' name|'_verify_response' op|'(' string|"'keypairs-import-post-resp'" op|',' name|'subs' op|',' name|'response' op|',' nl|'\n' name|'self' op|'.' name|'expected_post_status_code' op|')' newline|'\n' nl|'\n' DECL|member|test_keypairs_list dedent|'' name|'def' name|'test_keypairs_list' op|'(' name|'self' op|')' op|':' newline|'\n' comment|'# Get api sample of key pairs list request.' nl|'\n' indent|' ' name|'key_name' op|'=' name|'self' op|'.' name|'test_keypairs_post' op|'(' op|')' newline|'\n' name|'response' op|'=' name|'self' op|'.' name|'_do_get' op|'(' string|"'os-keypairs'" op|')' newline|'\n' name|'subs' op|'=' op|'{' string|"'keypair_name'" op|':' name|'key_name' op|'}' newline|'\n' name|'self' op|'.' name|'_verify_response' op|'(' string|"'keypairs-list-resp'" op|',' name|'subs' op|',' name|'response' op|',' number|'200' op|')' newline|'\n' nl|'\n' DECL|member|test_keypairs_get dedent|'' name|'def' name|'test_keypairs_get' op|'(' name|'self' op|')' op|':' newline|'\n' comment|'# Get api sample of key pairs get request.' nl|'\n' indent|' ' name|'key_name' op|'=' name|'self' op|'.' name|'test_keypairs_post' op|'(' op|')' newline|'\n' name|'response' op|'=' name|'self' op|'.' name|'_do_get' op|'(' string|"'os-keypairs/%s'" op|'%' name|'key_name' op|')' newline|'\n' name|'subs' op|'=' op|'{' string|"'keypair_name'" op|':' name|'key_name' op|'}' newline|'\n' name|'self' op|'.' name|'_verify_response' op|'(' string|"'keypairs-get-resp'" op|',' name|'subs' op|',' name|'response' op|',' number|'200' op|')' newline|'\n' nl|'\n' DECL|member|test_keypairs_delete dedent|'' name|'def' name|'test_keypairs_delete' op|'(' name|'self' op|')' op|':' newline|'\n' comment|'# Get api sample of key pairs delete request.' nl|'\n' indent|' ' name|'key_name' op|'=' name|'self' op|'.' name|'test_keypairs_post' op|'(' op|')' newline|'\n' name|'response' op|'=' name|'self' op|'.' name|'_do_delete' op|'(' string|"'os-keypairs/%s'" op|'%' name|'key_name' op|')' newline|'\n' name|'self' op|'.' name|'assertEqual' op|'(' name|'self' op|'.' name|'expected_delete_status_code' op|',' nl|'\n' name|'response' op|'.' name|'status_code' op|')' newline|'\n' nl|'\n' nl|'\n' DECL|class|KeyPairsV22SampleJsonTest dedent|'' dedent|'' name|'class' name|'KeyPairsV22SampleJsonTest' op|'(' name|'KeyPairsSampleJsonTest' op|')' op|':' newline|'\n' DECL|variable|microversion indent|' ' name|'microversion' op|'=' string|"'2.2'" newline|'\n' DECL|variable|expected_post_status_code name|'expected_post_status_code' op|'=' number|'201' newline|'\n' DECL|variable|expected_delete_status_code name|'expected_delete_status_code' op|'=' number|'204' newline|'\n' comment|'# NOTE(gmann): microversion tests do not need to run for v2 API' nl|'\n' comment|'# so defining scenarios only for v2.2 which will run the original tests' nl|'\n' comment|"# by appending '(v2_2)' in test_id." nl|'\n' DECL|variable|scenarios name|'scenarios' op|'=' op|'[' op|'(' string|"'v2_2'" op|',' op|'{' string|"'api_major_version'" op|':' string|"'v2.1'" op|'}' op|')' op|']' newline|'\n' nl|'\n' DECL|member|test_keypairs_post name|'def' name|'test_keypairs_post' op|'(' name|'self' op|')' op|':' newline|'\n' comment|'# NOTE(claudiub): overrides the method with the same name in' nl|'\n' comment|'# KeypairsSampleJsonTest, as it is used by other tests.' nl|'\n' indent|' ' name|'return' name|'self' op|'.' name|'_check_keypairs_post' op|'(' nl|'\n' name|'keypair_type' op|'=' name|'keypair_obj' op|'.' name|'KEYPAIR_TYPE_SSH' op|')' newline|'\n' nl|'\n' DECL|member|test_keypairs_post_x509 dedent|'' name|'def' name|'test_keypairs_post_x509' op|'(' name|'self' op|')' op|':' newline|'\n' indent|' ' name|'return' name|'self' op|'.' name|'_check_keypairs_post' op|'(' nl|'\n' name|'keypair_type' op|'=' name|'keypair_obj' op|'.' name|'KEYPAIR_TYPE_X509' op|')' newline|'\n' nl|'\n' DECL|member|test_keypairs_post_invalid dedent|'' name|'def' name|'test_keypairs_post_invalid' op|'(' name|'self' op|')' op|':' newline|'\n' indent|' ' name|'key_name' op|'=' string|"'keypair-'" op|'+' name|'str' op|'(' name|'uuid' op|'.' name|'uuid4' op|'(' op|')' op|')' newline|'\n' name|'subs' op|'=' name|'dict' op|'(' name|'keypair_name' op|'=' name|'key_name' op|',' name|'keypair_type' op|'=' string|"'fakey_type'" op|')' newline|'\n' name|'response' op|'=' name|'self' op|'.' name|'_do_post' op|'(' string|"'os-keypairs'" op|',' string|"'keypairs-post-req'" op|',' name|'subs' op|')' newline|'\n' nl|'\n' name|'self' op|'.' name|'assertEqual' op|'(' number|'400' op|',' name|'response' op|'.' name|'status_code' op|')' newline|'\n' nl|'\n' DECL|member|test_keypairs_import_key_post dedent|'' name|'def' name|'test_keypairs_import_key_post' op|'(' name|'self' op|')' op|':' newline|'\n' comment|'# NOTE(claudiub): overrides the method with the same name in' nl|'\n' comment|'# KeypairsSampleJsonTest, since the API sample expects a keypair_type.' nl|'\n' indent|' ' name|'public_key' op|'=' name|'fake_crypto' op|'.' name|'get_ssh_public_key' op|'(' op|')' newline|'\n' name|'self' op|'.' name|'_check_keypairs_import_key_post' op|'(' nl|'\n' name|'public_key' op|',' name|'keypair_type' op|'=' name|'keypair_obj' op|'.' name|'KEYPAIR_TYPE_SSH' op|')' newline|'\n' nl|'\n' DECL|member|test_keypairs_import_key_post_x509 dedent|'' name|'def' name|'test_keypairs_import_key_post_x509' op|'(' name|'self' op|')' op|':' newline|'\n' indent|' ' name|'public_key' op|'=' name|'fake_crypto' op|'.' name|'get_x509_cert_and_fingerprint' op|'(' op|')' op|'[' number|'0' op|']' newline|'\n' name|'public_key' op|'=' name|'public_key' op|'.' name|'replace' op|'(' string|"'\\n'" op|',' string|"'\\\\n'" op|')' newline|'\n' name|'self' op|'.' name|'_check_keypairs_import_key_post' op|'(' nl|'\n' name|'public_key' op|',' name|'keypair_type' op|'=' name|'keypair_obj' op|'.' name|'KEYPAIR_TYPE_X509' op|')' newline|'\n' nl|'\n' DECL|member|_check_keypairs_import_key_post_invalid dedent|'' name|'def' name|'_check_keypairs_import_key_post_invalid' op|'(' name|'self' op|',' name|'keypair_type' op|')' op|':' newline|'\n' indent|' ' name|'key_name' op|'=' string|"'keypair-'" op|'+' name|'str' op|'(' name|'uuid' op|'.' name|'uuid4' op|'(' op|')' op|')' newline|'\n' name|'subs' op|'=' op|'{' nl|'\n' string|"'keypair_name'" op|':' name|'key_name' op|',' nl|'\n' string|"'keypair_type'" op|':' name|'keypair_type' op|',' nl|'\n' string|"'public_key'" op|':' name|'fake_crypto' op|'.' name|'get_ssh_public_key' op|'(' op|')' nl|'\n' op|'}' newline|'\n' name|'response' op|'=' name|'self' op|'.' name|'_do_post' op|'(' string|"'os-keypairs'" op|',' string|"'keypairs-import-post-req'" op|',' nl|'\n' name|'subs' op|')' newline|'\n' nl|'\n' name|'self' op|'.' name|'assertEqual' op|'(' number|'400' op|',' name|'response' op|'.' name|'status_code' op|')' newline|'\n' nl|'\n' DECL|member|test_keypairs_import_key_post_invalid_type dedent|'' name|'def' name|'test_keypairs_import_key_post_invalid_type' op|'(' name|'self' op|')' op|':' newline|'\n' indent|' ' name|'self' op|'.' name|'_check_keypairs_import_key_post_invalid' op|'(' nl|'\n' name|'keypair_type' op|'=' string|"'fakey_type'" op|')' newline|'\n' nl|'\n' DECL|member|test_keypairs_import_key_post_invalid_combination dedent|'' name|'def' name|'test_keypairs_import_key_post_invalid_combination' op|'(' name|'self' op|')' op|':' newline|'\n' indent|' ' name|'self' op|'.' name|'_check_keypairs_import_key_post_invalid' op|'(' nl|'\n' name|'keypair_type' op|'=' name|'keypair_obj' op|'.' name|'KEYPAIR_TYPE_X509' op|')' newline|'\n' nl|'\n' nl|'\n' DECL|class|KeyPairsV210SampleJsonTest dedent|'' dedent|'' name|'class' name|'KeyPairsV210SampleJsonTest' op|'(' name|'KeyPairsSampleJsonTest' op|')' op|':' newline|'\n' DECL|variable|ADMIN_API indent|' ' name|'ADMIN_API' op|'=' name|'True' newline|'\n' DECL|variable|microversion name|'microversion' op|'=' string|"'2.10'" newline|'\n' DECL|variable|expected_post_status_code name|'expected_post_status_code' op|'=' number|'201' newline|'\n' DECL|variable|expected_delete_status_code name|'expected_delete_status_code' op|'=' number|'204' newline|'\n' DECL|variable|scenarios name|'scenarios' op|'=' op|'[' op|'(' string|"'v2_10'" op|',' op|'{' string|"'api_major_version'" op|':' string|"'v2.1'" op|'}' op|')' op|']' newline|'\n' nl|'\n' DECL|member|test_keypair_create_for_user name|'def' name|'test_keypair_create_for_user' op|'(' name|'self' op|')' op|':' newline|'\n' indent|' ' name|'subs' op|'=' op|'{' nl|'\n' string|"'keypair_type'" op|':' name|'keypair_obj' op|'.' name|'KEYPAIR_TYPE_SSH' op|',' nl|'\n' string|"'public_key'" op|':' name|'fake_crypto' op|'.' name|'get_ssh_public_key' op|'(' op|')' op|',' nl|'\n' string|"'user_id'" op|':' string|'"fake"' nl|'\n' op|'}' newline|'\n' name|'self' op|'.' name|'_check_keypairs_post' op|'(' op|'**' name|'subs' op|')' newline|'\n' nl|'\n' DECL|member|test_keypairs_post dedent|'' name|'def' name|'test_keypairs_post' op|'(' name|'self' op|')' op|':' newline|'\n' indent|' ' name|'return' name|'self' op|'.' name|'_check_keypairs_post' op|'(' nl|'\n' name|'keypair_type' op|'=' name|'keypair_obj' op|'.' name|'KEYPAIR_TYPE_SSH' op|',' nl|'\n' name|'user_id' op|'=' string|'"admin"' op|')' newline|'\n' nl|'\n' DECL|member|test_keypairs_import_key_post dedent|'' name|'def' name|'test_keypairs_import_key_post' op|'(' name|'self' op|')' op|':' newline|'\n' comment|'# NOTE(claudiub): overrides the method with the same name in' nl|'\n' comment|'# KeypairsSampleJsonTest, since the API sample expects a keypair_type.' nl|'\n' indent|' ' name|'public_key' op|'=' name|'fake_crypto' op|'.' name|'get_ssh_public_key' op|'(' op|')' newline|'\n' name|'self' op|'.' name|'_check_keypairs_import_key_post' op|'(' nl|'\n' name|'public_key' op|',' name|'keypair_type' op|'=' name|'keypair_obj' op|'.' name|'KEYPAIR_TYPE_SSH' op|',' nl|'\n' name|'user_id' op|'=' string|'"fake"' op|')' newline|'\n' nl|'\n' DECL|member|test_keypairs_delete_for_user dedent|'' name|'def' name|'test_keypairs_delete_for_user' op|'(' name|'self' op|')' op|':' newline|'\n' comment|'# Delete a keypair on behalf of a user' nl|'\n' indent|' ' name|'subs' op|'=' op|'{' nl|'\n' string|"'keypair_type'" op|':' name|'keypair_obj' op|'.' name|'KEYPAIR_TYPE_SSH' op|',' nl|'\n' string|"'public_key'" op|':' name|'fake_crypto' op|'.' name|'get_ssh_public_key' op|'(' op|')' op|',' nl|'\n' string|"'user_id'" op|':' string|'"fake"' nl|'\n' op|'}' newline|'\n' name|'key_name' op|'=' name|'self' op|'.' name|'_check_keypairs_post' op|'(' op|'**' name|'subs' op|')' newline|'\n' name|'response' op|'=' name|'self' op|'.' name|'_do_delete' op|'(' string|"'os-keypairs/%s?user_id=fake'" op|'%' name|'key_name' op|')' newline|'\n' name|'self' op|'.' name|'assertEqual' op|'(' name|'self' op|'.' name|'expected_delete_status_code' op|',' nl|'\n' name|'response' op|'.' name|'status_code' op|')' newline|'\n' nl|'\n' nl|'\n' DECL|class|KeyPairsV210SampleJsonTestNotAdmin dedent|'' dedent|'' name|'class' name|'KeyPairsV210SampleJsonTestNotAdmin' op|'(' name|'KeyPairsV210SampleJsonTest' op|')' op|':' newline|'\n' DECL|variable|ADMIN_API indent|' ' name|'ADMIN_API' op|'=' name|'False' newline|'\n' nl|'\n' DECL|member|test_keypairs_post name|'def' name|'test_keypairs_post' op|'(' name|'self' op|')' op|':' newline|'\n' indent|' ' name|'return' name|'self' op|'.' name|'_check_keypairs_post' op|'(' nl|'\n' name|'keypair_type' op|'=' name|'keypair_obj' op|'.' name|'KEYPAIR_TYPE_SSH' op|',' nl|'\n' name|'user_id' op|'=' string|'"fake"' op|')' newline|'\n' nl|'\n' DECL|member|test_keypairs_post_for_other_user dedent|'' name|'def' name|'test_keypairs_post_for_other_user' op|'(' name|'self' op|')' op|':' newline|'\n' indent|' ' name|'key_name' op|'=' string|"'keypair-'" op|'+' name|'str' op|'(' name|'uuid' op|'.' name|'uuid4' op|'(' op|')' op|')' newline|'\n' name|'subs' op|'=' name|'dict' op|'(' name|'keypair_name' op|'=' name|'key_name' op|',' nl|'\n' name|'keypair_type' op|'=' name|'keypair_obj' op|'.' name|'KEYPAIR_TYPE_SSH' op|',' nl|'\n' name|'user_id' op|'=' string|"'fake1'" op|')' newline|'\n' name|'response' op|'=' name|'self' op|'.' name|'_do_post' op|'(' string|"'os-keypairs'" op|',' string|"'keypairs-post-req'" op|',' name|'subs' op|')' newline|'\n' nl|'\n' name|'self' op|'.' name|'assertEqual' op|'(' number|'403' op|',' name|'response' op|'.' name|'status_code' op|')' newline|'\n' dedent|'' dedent|'' endmarker|'' end_unit
14.357626
88
0.644793
2,877
19,110
4.119569
0.078554
0.109855
0.07678
0.046574
0.819271
0.782315
0.754978
0.727388
0.707138
0.675329
0
0.006884
0.095395
19,110
1,330
89
14.368421
0.678718
0
0
0.919549
0
0
0.406175
0.079069
0
0
0
0
0.003759
0
null
null
0
0.02406
null
null
0.000752
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
8
51800cf5417ca27e45106fdbce81f3ac78869f1d
41,674
py
Python
example/test/core/room.py
dmilos/IceRay
4e01f141363c0d126d3c700c1f5f892967e3d520
[ "MIT-0" ]
2
2020-09-04T12:27:15.000Z
2022-01-17T14:49:40.000Z
example/test/core/room.py
dmilos/IceRay
4e01f141363c0d126d3c700c1f5f892967e3d520
[ "MIT-0" ]
null
null
null
example/test/core/room.py
dmilos/IceRay
4e01f141363c0d126d3c700c1f5f892967e3d520
[ "MIT-0" ]
1
2020-09-04T12:27:52.000Z
2020-09-04T12:27:52.000Z
import IceRayCpp import math #from test.core import * import material.medium.linear import decoration def vacuum( P_geometry, P_light, P_surfaceExponat, P_surfaceRoom, P_medium = material.medium.linear.make() ): geometry_cargo={} geometry_cargo['this'] = IceRayCpp.GeometryRTSSGeometry( ) geometry_cargo['list'] = IceRayCpp.GeometryRTSSList() geometry_cargo['this'].rtss( geometry_cargo['list'] ) if( True ): objectVacuum_cargo = {} objectVacuum_cargo['this'] = IceRayCpp.GeometryVolumetricVacuum( ) geometry_cargo['this'].push( objectVacuum_cargo['this'] ) geometry_cargo['vacuum'] = objectVacuum_cargo objectExponat_cargo = {} objectExponat_cargo['geometry'] = P_geometry['this'] objectExponat_cargo['this'] = IceRayCpp.CoreObject() objectExponat_cargo['this'].geometry( objectExponat_cargo['geometry'] ); objectExponat_cargo['this'].surface( P_surfaceExponat['this'] ); objectExponat_cargo['surface'] = P_surfaceExponat #objectExponat_cargo['this'].medium( P_medium['this'] ); #objectExponat_cargo['medium'] = P_medium geometry_cargo['this'].push( objectExponat_cargo['this'] ) geometry_cargo['exponat'] = objectExponat_cargo object = IceRayCpp.CoreObject() object.geometry( geometry_cargo['this'] ) #geometry_cargo['medium'] = material.medium.linear.make( ) #object.medium( geometry_cargo['medium']['this'] ); P_light['this'].barrier( geometry_cargo['this'] ) return { 'this': object, 'geometry': geometry_cargo } def plane( P_geometry, P_light, P_surfaceExponat, P_surfaceRoom, P_medium = material.medium.linear.make() ): level = -1 point = IceRayCpp.MathTypeCoord3D().load( 0, 0, level ) normal = IceRayCpp.MathTypeCoord3D().load(0,0,1) geometry_cargo={} geometry_cargo['this'] = IceRayCpp.GeometryRTSSGeometry( ) geometry_cargo['list'] = IceRayCpp.GeometryRTSSList() geometry_cargo['this'].rtss( geometry_cargo['list'] ) if( True ): objectPlane_cargo = {} objectPlane_cargo['this'] = IceRayCpp.CoreObject() objectPlane_cargo['geometry'] = IceRayCpp.GeometrySimplePlane( point, normal ) objectPlane_cargo['this'].geometry( objectPlane_cargo['geometry'] ); objectPlane_cargo['this'].surface( P_surfaceRoom['floor']['this'] ); objectPlane_cargo['surface'] = P_surfaceRoom['floor'] geometry_cargo['this'].push( objectPlane_cargo['this'] ) geometry_cargo['plate'] = objectPlane_cargo objectExponat_cargo = {} objectExponat_cargo['geometry'] = P_geometry['this'] objectExponat_cargo['this'] = IceRayCpp.CoreObject() objectExponat_cargo['this'].geometry( objectExponat_cargo['geometry'] ); objectExponat_cargo['this'].surface( P_surfaceExponat['this'] ); objectExponat_cargo['surface'] = P_surfaceExponat #objectExponat_cargo['this'].medium( P_medium['this'] ); #objectExponat_cargo['medium'] = P_medium geometry_cargo['this'].push( objectExponat_cargo['this'] ) geometry_cargo['exponat'] = objectExponat_cargo object = IceRayCpp.CoreObject() object.geometry( geometry_cargo['this'] ) #geometry_cargo['medium'] = material.medium.linear.make( ) #object.medium( geometry_cargo['medium']['this'] ); P_light['this'].barrier( geometry_cargo['this'] ) return { 'this': object, 'geometry': geometry_cargo } def plate( P_geometry, P_light, P_surfaceExponat, P_surfaceRoom, P_medium = material.medium.linear.make() ): level = - 1; width = 5 depth =5 lo = IceRayCpp.MathTypeCoord3D().load( -width, -depth, level - 0.01 ) hi = IceRayCpp.MathTypeCoord3D().load( +width, +depth,level) geometry_cargo={} geometry_cargo['this'] = IceRayCpp.GeometryRTSSGeometry( ) geometry_cargo['list'] = IceRayCpp.GeometryRTSSList() geometry_cargo['this'].rtss( geometry_cargo['list'] ) if( True ): objectPlate_cargo = {} objectPlate_cargo['this'] = IceRayCpp.CoreObject() objectPlate_cargo['geometry'] = IceRayCpp.GeometrySimpleBox( lo, hi ) objectPlate_cargo['this'].geometry( objectPlate_cargo['geometry'] ); objectPlate_cargo['this'].surface( P_surfaceRoom['floor']['this'] ); objectPlate_cargo['surface'] = P_surfaceRoom['floor'] geometry_cargo['this'].push( objectPlate_cargo['this'] ) geometry_cargo['plate'] = objectPlate_cargo objectExponat_cargo = {} objectExponat_cargo['geometry'] = P_geometry['this'] objectExponat_cargo['this'] = IceRayCpp.CoreObject() objectExponat_cargo['this'].geometry( objectExponat_cargo['geometry'] ); objectExponat_cargo['this'].surface( P_surfaceExponat['this'] ); objectExponat_cargo['surface'] = P_surfaceExponat #objectExponat_cargo['this'].medium( P_medium['this'] ); #objectExponat_cargo['medium'] = P_medium geometry_cargo['this'].push( objectExponat_cargo['this'] ) geometry_cargo['exponat'] = objectExponat_cargo object = IceRayCpp.CoreObject() object.geometry( geometry_cargo['this'] ) #geometry_cargo['medium'] = material.medium.linear.make( ) #object.medium( geometry_cargo['medium']['this'] ); P_light['this'].barrier( geometry_cargo['this'] ) return { 'this': object, 'geometry': geometry_cargo } def cube( P_geometry, P_light, P_surfaceExponat, P_surfaceRoom, P_medium = material.medium.linear.make() ): I_size = 6 / 2 geometry_cargo={} geometry_cargo['this'] = IceRayCpp.GeometryRTSSGeometry( ) geometry_cargo['list'] = IceRayCpp.GeometryRTSSList() geometry_cargo['this'].rtss( geometry_cargo['list'] ) if( True ): objectBox_cargo = {} objectBox_cargo['this'] = IceRayCpp.CoreObject() objectBox_cargo['geometry'] = IceRayCpp.GeometrySimpleBox( IceRayCpp.MathTypeCoord3D().fill( -I_size ), IceRayCpp.MathTypeCoord3D().fill( I_size ) ) objectBox_cargo['this'].geometry( objectBox_cargo['geometry'] ); objectBox_cargo['this'].surface( P_surfaceRoom['box']['this'] ); objectBox_cargo['surface'] = P_surfaceRoom['box'] geometry_cargo['this'].push( objectBox_cargo['this'] ) geometry_cargo['box'] = objectBox_cargo objectExponat_cargo = {} objectExponat_cargo['geometry'] = P_geometry['this'] objectExponat_cargo['this'] = IceRayCpp.CoreObject() objectExponat_cargo['this'].geometry( objectExponat_cargo['geometry'] ); objectExponat_cargo['this'].surface( P_surfaceExponat['this'] ); objectExponat_cargo['surface'] = P_surfaceExponat #objectExponat_cargo['this'].medium( P_medium['this'] ); #objectExponat_cargo['medium'] = P_medium geometry_cargo['this'].push( objectExponat_cargo['this'] ) geometry_cargo['exponat'] = objectExponat_cargo object = IceRayCpp.CoreObject() object.geometry( geometry_cargo['this'] ) #geometry_cargo['medium'] = material.medium.linear.make( ) #object.medium( geometry_cargo['medium']['this'] ); P_light['this'].barrier( P_geometry['this'] ) return { 'this': object, 'geometry': geometry_cargo } def cylinder( P_geometry, P_light, P_surfaceExponat, P_surfaceRoom, P_medium = material.medium.linear.make() ): radius = 5 geometry_cargo={} geometry_cargo['this'] = IceRayCpp.GeometryRTSSGeometry( ) geometry_cargo['list'] = IceRayCpp.GeometryRTSSList() geometry_cargo['this'].rtss( geometry_cargo['list'] ) if( True ): objectFloor_cargo = {} objectFloor_cargo['this'] = IceRayCpp.CoreObject() objectFloor_cargo['geometry'] = IceRayCpp.GeometrySimpleDisc( IceRayCpp.MathTypeCoord3D().load( 0, 0, -radius ), IceRayCpp.MathTypeCoord3D().load( 0,0,1 ), radius ) objectFloor_cargo['this'].geometry( objectFloor_cargo['geometry'] ); objectFloor_cargo['this'].surface( P_surfaceRoom['floor']['this'] ); objectFloor_cargo['surface'] = P_surfaceRoom['floor'] geometry_cargo['this'].push( objectFloor_cargo['this'] ) geometry_cargo['floor'] = objectFloor_cargo if( True ): objectCeil_cargo = {} objectCeil_cargo['this'] = IceRayCpp.CoreObject() objectCeil_cargo['geometry'] = IceRayCpp.GeometrySimpleDisc( IceRayCpp.MathTypeCoord3D().load( 0, 0, radius), IceRayCpp.MathTypeCoord3D().load( 0,0,1 ), radius ) objectCeil_cargo['this'].geometry( objectCeil_cargo['geometry'] ); objectCeil_cargo['this'].surface( P_surfaceRoom['ceil']['this'] ); objectCeil_cargo['surface'] = P_surfaceRoom['ceil'] geometry_cargo['this'].push( objectCeil_cargo['this'] ) geometry_cargo['ceil'] = objectCeil_cargo if( True ): objectBuble_cargo = {} objectBuble_cargo['this'] = IceRayCpp.CoreObject() objectBuble_cargo['geometry'] = IceRayCpp.GeometrySimpleCylinder( radius, 2*radius ) objectBuble_cargo['this'].geometry( objectBuble_cargo['geometry'] ); objectBuble_cargo['this'].surface( P_surfaceRoom['fence']['this'] ); objectBuble_cargo['surface'] = P_surfaceRoom['fence'] geometry_cargo['this'].push( objectBuble_cargo['this'] ) geometry_cargo['fence'] = objectBuble_cargo if( False ): objectPlusX_cargo = {} objectPlusX_cargo['this'] = IceRayCpp.CoreObject() objectPlusX_cargo['geometry'] = IceRayCpp.GeometrySimpleBox( IceRayCpp.MathTypeCoord3D().load( 1.9,-0.4,-0.4), IceRayCpp.MathTypeCoord3D().load(2,0.4,0.4) ) objectPlusX_cargo['this'].geometry( objectPlusX_cargo['geometry'] ); objectPlusX_cargo['this'].surface( P_surfaceRoom['left']['this'] ); objectPlusX_cargo['surface'] = P_surfaceRoom['left'] geometry_cargo['this'].push( objectPlusX_cargo['this'] ) geometry_cargo['plusX'] = objectPlusX_cargo objectExponat_cargo = {} objectExponat_cargo['geometry'] = P_geometry['this'] objectExponat_cargo['this'] = IceRayCpp.CoreObject() objectExponat_cargo['this'].geometry( objectExponat_cargo['geometry'] ); objectExponat_cargo['this'].surface( P_surfaceExponat['this'] ); objectExponat_cargo['surface'] = P_surfaceExponat #objectExponat_cargo['this'].medium( P_medium['this'] ); #objectExponat_cargo['medium'] = P_medium geometry_cargo['this'].push( objectExponat_cargo['this'] ) geometry_cargo['exponat'] = objectExponat_cargo object = IceRayCpp.CoreObject() object.geometry( geometry_cargo['this'] ) #geometry_cargo['medium'] = material.medium.linear.make( ) #object.medium( geometry_cargo['medium']['this'] ); P_light['this'].barrier( P_geometry['this'] ) return { 'this': object, 'geometry': geometry_cargo } def sphere( P_geometry, P_light, P_surfaceExponat, P_surfaceRoom, P_medium = material.medium.linear.make() ): radius = 4 geometry_cargo={} geometry_cargo['this'] = IceRayCpp.GeometryRTSSGeometry( ) geometry_cargo['list'] = IceRayCpp.GeometryRTSSList() geometry_cargo['this'].rtss( geometry_cargo['list'] ) if( True ): objectBuble_cargo = {} objectBuble_cargo['this'] = IceRayCpp.CoreObject() objectBuble_cargo['geometry'] = IceRayCpp.GeometrySimpleSphere( IceRayCpp.MathTypeCoord3D().fill( 0 ), radius ) objectBuble_cargo['this'].geometry( objectBuble_cargo['geometry'] ); objectBuble_cargo['this'].surface( P_surfaceRoom['bubble']['this'] ); objectBuble_cargo['surface'] = P_surfaceRoom['bubble'] geometry_cargo['this'].push( objectBuble_cargo['this'] ) geometry_cargo['bubble'] = objectBuble_cargo decoration.coordinat_system( geometry_cargo, P_surfaceRoom, 3, 0.2, ) objectExponat_cargo = {} objectExponat_cargo['geometry'] = P_geometry['this'] objectExponat_cargo['this'] = IceRayCpp.CoreObject() objectExponat_cargo['this'].geometry( objectExponat_cargo['geometry'] ); objectExponat_cargo['this'].surface( P_surfaceExponat['this'] ); objectExponat_cargo['surface'] = P_surfaceExponat #objectExponat_cargo['this'].medium( P_medium['this'] ); #objectExponat_cargo['medium'] = P_medium geometry_cargo['this'].push( objectExponat_cargo['this'] ) geometry_cargo['exponat'] = objectExponat_cargo object = IceRayCpp.CoreObject() object.geometry( geometry_cargo['this'] ) #geometry_cargo['medium'] = material.medium.linear.make( ) #object.medium( geometry_cargo['medium']['this'] ); P_light['this'].barrier( geometry_cargo['this'] ) return { 'this': object, 'geometry': geometry_cargo } def cornell_close( P_geometry, P_light, P_surfaceExponat, P_surfaceRoom, P_medium = material.medium.linear.make() ): move = [ 0,0, 5 ] I_dimension = [ 12, 12, 12 ] lo = IceRayCpp.MathTypeCoord3D().load(123,123,123) lo[0] = -I_dimension[0]/2 + move[0] lo[1] = -I_dimension[1]/2 + move[1] lo[2] = -I_dimension[2]/2 + move[2] hi = IceRayCpp.MathTypeCoord3D().load(123,123,123) hi[0] = +I_dimension[0]/2 + move[0] hi[1] = +I_dimension[1]/2 + move[1] hi[2] = +I_dimension[2]/2 + move[2] wall = 0.1 geometry_cargo={} geometry_cargo['this'] = IceRayCpp.GeometryRTSSGeometry( ) geometry_cargo['list'] = IceRayCpp.GeometryRTSSList() geometry_cargo['this'].rtss( geometry_cargo['list'] ) if( True ): objectFloor_cargo = {} objectFloor_cargo['this'] = IceRayCpp.CoreObject() objectFloor_cargo['geometry'] = IceRayCpp.GeometrySimpleBox( IceRayCpp.MathTypeCoord3D().load( lo[0], lo[1], lo[2]-wall ), IceRayCpp.MathTypeCoord3D().load( hi[0], hi[0], lo[2] ) ) objectFloor_cargo['this'].geometry( objectFloor_cargo['geometry'] ); objectFloor_cargo['this'].surface( P_surfaceRoom['floor']['this'] ); objectFloor_cargo['surface'] = P_surfaceRoom['floor'] geometry_cargo['this'].push( objectFloor_cargo['this'] ) geometry_cargo['floor'] = objectFloor_cargo if( True ): objectCeil_cargo = {} objectCeil_cargo['this'] = IceRayCpp.CoreObject() objectCeil_cargo['geometry'] = IceRayCpp.GeometrySimpleBox( IceRayCpp.MathTypeCoord3D().load( lo[0], lo[1], hi[2] ), IceRayCpp.MathTypeCoord3D().load( hi[0], hi[1], hi[2] + wall ) ) objectCeil_cargo['this'].geometry( objectCeil_cargo['geometry'] ); objectCeil_cargo['this'].surface( P_surfaceRoom['ceil']['this'] ); objectCeil_cargo['surface'] = P_surfaceRoom['ceil'] geometry_cargo['this'].push( objectCeil_cargo['this'] ) geometry_cargo['ceil'] = objectCeil_cargo if( True ): objectBg_cargo = {} objectBg_cargo['this'] = IceRayCpp.CoreObject() objectBg_cargo['geometry'] = IceRayCpp.GeometrySimpleBox( IceRayCpp.MathTypeCoord3D().load( lo[0], lo[1]-wall, lo[2]), IceRayCpp.MathTypeCoord3D().load( hi[0], lo[1], hi[2]) ) objectBg_cargo['this'].geometry( objectBg_cargo['geometry'] ); objectBg_cargo['this'].surface( P_surfaceRoom['background']['this'] ); objectBg_cargo['surface'] = P_surfaceRoom['background'] geometry_cargo['this'].push( objectBg_cargo['this'] ) geometry_cargo['background'] = objectBg_cargo if( True ): objectFg_cargo = {} objectFg_cargo['this'] = IceRayCpp.CoreObject() objectFg_cargo['geometry'] = IceRayCpp.GeometrySimpleBox( IceRayCpp.MathTypeCoord3D().load( lo[0], hi[1], lo[2] ), IceRayCpp.MathTypeCoord3D().load( hi[0], hi[1] + wall, hi[2] ) ) objectFg_cargo['this'].geometry( objectFg_cargo['geometry'] ); objectFg_cargo['this'].surface( P_surfaceRoom['foreground']['this'] ); objectFg_cargo['surface'] = P_surfaceRoom['foreground'] geometry_cargo['this'].push( objectFg_cargo['this'] ) geometry_cargo['foreground'] = objectFg_cargo if( True ): objectLeft_cargo = {} objectLeft_cargo['this'] = IceRayCpp.CoreObject() objectLeft_cargo['geometry'] = IceRayCpp.GeometrySimpleBox( IceRayCpp.MathTypeCoord3D().load(lo[0]-wall, lo[1], lo[2]), IceRayCpp.MathTypeCoord3D().load(lo[0], hi[1], hi[2]) ) objectLeft_cargo['this'].geometry( objectLeft_cargo['geometry'] ); objectLeft_cargo['this'].surface( P_surfaceRoom['left']['this'] ); objectLeft_cargo['surface'] = P_surfaceRoom['left'] geometry_cargo['this'].push( objectLeft_cargo['this'] ) geometry_cargo['left'] = objectLeft_cargo if( True ): objectRight_cargo = {} objectRight_cargo['this'] = IceRayCpp.CoreObject() objectRight_cargo['geometry'] = IceRayCpp.GeometrySimpleBox( IceRayCpp.MathTypeCoord3D().load( hi[0], lo[1], lo[2] ), IceRayCpp.MathTypeCoord3D().load( hi[0]+ wall,hi[1], hi[2] ) ) objectRight_cargo['this'].geometry( objectRight_cargo['geometry'] ); objectRight_cargo['this'].surface( P_surfaceRoom['right']['this'] ); objectRight_cargo['surface'] = P_surfaceRoom['right'] geometry_cargo['this'].push( objectRight_cargo['this'] ) geometry_cargo['right'] = objectRight_cargo #decoration.coordinat_system( geometry_cargo, P_surfaceRoom, 2, 0.1 ) if( 'light' in P_surfaceRoom ): objectLight_cargo = {} objectLight_cargo['this'] = IceRayCpp.CoreObject() objectLight_cargo['geometry'] = IceRayCpp.GeometrySimpleBox( IceRayCpp.MathTypeCoord3D().load( -0.25-2,-0.25-2,2.9-1), IceRayCpp.MathTypeCoord3D().load(0.25-2,0.25-2,3-1) ) objectLight_cargo['this'].geometry( objectLight_cargo['geometry'] ); objectLight_cargo['this'].surface( P_surfaceRoom['light']['this'] ); objectLight_cargo['surface'] = P_surfaceRoom['left'] geometry_cargo['this'].push( objectLight_cargo['this'] ) geometry_cargo['light'] = objectLight_cargo objectExponat_cargo = {} objectExponat_cargo['geometry'] = P_geometry['this'] objectExponat_cargo['this'] = IceRayCpp.CoreObject() objectExponat_cargo['this'].geometry( objectExponat_cargo['geometry'] ); objectExponat_cargo['this'].surface( P_surfaceExponat['this'] ); objectExponat_cargo['surface'] = P_surfaceExponat #objectExponat_cargo['this'].medium( P_medium['this'] ); #objectExponat_cargo['medium'] = P_medium geometry_cargo['this'].push( objectExponat_cargo['this'] ) P_light['this'].barrier( geometry_cargo['this'] ) geometry_cargo['exponat'] = objectExponat_cargo object = IceRayCpp.CoreObject() object.geometry( geometry_cargo['this'] ) #geometry_cargo['medium'] = material.medium.linear.make( ) #object.medium( geometry_cargo['medium']['this'] ); return { 'this': object, 'geometry': geometry_cargo } def cornell_open( P_geometry, P_light, P_surfaceExponat, P_surfaceRoom, P_medium = material.medium.linear.make() ): move = [ 0, 0, (1+1+1 + (math.sqrt(5)-1)/2 )/2-1 ] I_dimension= [ 5, 5, (1+1+1 + (math.sqrt(5)-1)/2 ) ] I_dimension= [ 4*((math.sqrt(5)-1)/2+1), 4*((math.sqrt(5)-1)/2+1), 4 ] move = [ 0, 0, I_dimension[2]/2 - 1 ] lo = IceRayCpp.MathTypeCoord3D().load(123,123,123) lo[0] = -I_dimension[0]/2 + move[0] lo[1] = -I_dimension[1]/2 + move[1] lo[2] = -I_dimension[2]/2 + move[2] hi = IceRayCpp.MathTypeCoord3D().load(123,123,123) hi[0] = +I_dimension[0]/2 + move[0] hi[1] = +I_dimension[1]/2 + move[1] hi[2] = +I_dimension[2]/2 + move[2] wall = 0.1 geometry_cargo={} geometry_cargo['this'] = IceRayCpp.GeometryRTSSGeometry( ) geometry_cargo['list'] = IceRayCpp.GeometryRTSSList() geometry_cargo['this'].rtss( geometry_cargo['list'] ) if( True ): objectFloor_cargo = {} objectFloor_cargo['this'] = IceRayCpp.CoreObject() objectFloor_cargo['geometry'] = IceRayCpp.GeometrySimpleBox( IceRayCpp.MathTypeCoord3D().load( lo[0], lo[1], lo[2]-wall ), IceRayCpp.MathTypeCoord3D().load( hi[0], hi[0], lo[2] ) ) objectFloor_cargo['this'].geometry( objectFloor_cargo['geometry'] ); objectFloor_cargo['this'].surface( P_surfaceRoom['floor']['this'] ); objectFloor_cargo['surface'] = P_surfaceRoom['floor'] geometry_cargo['this'].push( objectFloor_cargo['this'] ) geometry_cargo['floor'] = objectFloor_cargo if( True ): objectCeil_cargo = {} objectCeil_cargo['this'] = IceRayCpp.CoreObject() objectCeil_cargo['geometry'] = IceRayCpp.GeometrySimpleBox( IceRayCpp.MathTypeCoord3D().load( lo[0], lo[1], hi[2] ), IceRayCpp.MathTypeCoord3D().load( hi[0], hi[1], hi[2] + wall ) ) objectCeil_cargo['this'].geometry( objectCeil_cargo['geometry'] ); objectCeil_cargo['this'].surface( P_surfaceRoom['ceil']['this'] ); objectCeil_cargo['surface'] = P_surfaceRoom['ceil'] geometry_cargo['this'].push( objectCeil_cargo['this'] ) geometry_cargo['ceil'] = objectCeil_cargo if( True ): objectBg_cargo = {} objectBg_cargo['this'] = IceRayCpp.CoreObject() objectBg_cargo['geometry'] = IceRayCpp.GeometrySimpleBox( IceRayCpp.MathTypeCoord3D().load( lo[0], lo[1]-wall, lo[2]), IceRayCpp.MathTypeCoord3D().load( hi[0], lo[1], hi[2]) ) objectBg_cargo['this'].geometry( objectBg_cargo['geometry'] ); objectBg_cargo['this'].surface( P_surfaceRoom['background']['this'] ); objectBg_cargo['surface'] = P_surfaceRoom['background'] geometry_cargo['this'].push( objectBg_cargo['this'] ) geometry_cargo['background'] = objectBg_cargo if( False ): objectFg_cargo = {} objectFg_cargo['this'] = IceRayCpp.CoreObject() objectFg_cargo['geometry'] = IceRayCpp.GeometrySimpleBox( IceRayCpp.MathTypeCoord3D().load( lo[0], hi[1], lo[2] ), IceRayCpp.MathTypeCoord3D().load( hi[0], hi[1] + wall, hi[2] ) ) objectFg_cargo['this'].geometry( objectFg_cargo['geometry'] ); objectFg_cargo['this'].surface( P_surfaceRoom['foreground']['this'] ); objectFg_cargo['surface'] = P_surfaceRoom['foreground'] geometry_cargo['this'].push( objectFg_cargo['this'] ) geometry_cargo['foreground'] = objectFg_cargo if( True ): objectLeft_cargo = {} objectLeft_cargo['this'] = IceRayCpp.CoreObject() objectLeft_cargo['geometry'] = IceRayCpp.GeometrySimpleBox( IceRayCpp.MathTypeCoord3D().load(lo[0]-wall, lo[1], lo[2]), IceRayCpp.MathTypeCoord3D().load(lo[0], hi[1], hi[2]) ) objectLeft_cargo['this'].geometry( objectLeft_cargo['geometry'] ); objectLeft_cargo['this'].surface( P_surfaceRoom['left']['this'] ); objectLeft_cargo['surface'] = P_surfaceRoom['left'] geometry_cargo['this'].push( objectLeft_cargo['this'] ) geometry_cargo['left'] = objectLeft_cargo if( True ): objectRight_cargo = {} objectRight_cargo['this'] = IceRayCpp.CoreObject() objectRight_cargo['geometry'] = IceRayCpp.GeometrySimpleBox( IceRayCpp.MathTypeCoord3D().load( hi[0], lo[1], lo[2] ), IceRayCpp.MathTypeCoord3D().load( hi[0]+ wall,hi[1], hi[2] ) ) objectRight_cargo['this'].geometry( objectRight_cargo['geometry'] ); objectRight_cargo['this'].surface( P_surfaceRoom['right']['this'] ); objectRight_cargo['surface'] = P_surfaceRoom['right'] geometry_cargo['this'].push( objectRight_cargo['this'] ) geometry_cargo['right'] = objectRight_cargo #decoration.coordinat_system( geometry_cargo, P_surfaceRoom, 2, 0.1 ) if( 'light' in P_surfaceRoom ): objectLight_cargo = {} objectLight_cargo['this'] = IceRayCpp.CoreObject() objectLight_cargo['geometry'] = IceRayCpp.GeometrySimpleBox( IceRayCpp.MathTypeCoord3D().load( -0.25-2,-0.25-2,2.9-1), IceRayCpp.MathTypeCoord3D().load(0.25-2,0.25-2,3-1) ) objectLight_cargo['this'].geometry( objectLight_cargo['geometry'] ); objectLight_cargo['this'].surface( P_surfaceRoom['light']['this'] ); objectLight_cargo['surface'] = P_surfaceRoom['left'] geometry_cargo['this'].push( objectLight_cargo['this'] ) geometry_cargo['light'] = objectLight_cargo objectExponat_cargo = {} objectExponat_cargo['geometry'] = P_geometry['this'] objectExponat_cargo['this'] = IceRayCpp.CoreObject() objectExponat_cargo['this'].geometry( objectExponat_cargo['geometry'] ); objectExponat_cargo['this'].surface( P_surfaceExponat['this'] ); objectExponat_cargo['surface'] = P_surfaceExponat #objectExponat_cargo['this'].medium( P_medium['this'] ); #objectExponat_cargo['medium'] = P_medium geometry_cargo['this'].push( objectExponat_cargo['this'] ) P_light['this'].barrier( geometry_cargo['this'] ) geometry_cargo['exponat'] = objectExponat_cargo object = IceRayCpp.CoreObject() object.geometry( geometry_cargo['this'] ) #geometry_cargo['medium'] = material.medium.linear.make( ) #object.medium( geometry_cargo['medium']['this'] ); return { 'this': object, 'geometry': geometry_cargo } def tetra( P_geometry, P_light, P_surfaceExponat, P_surfaceRoom, P_medium = material.medium.linear.make() ): radius = 12 geometry_cargo={} geometry_cargo['this'] = IceRayCpp.GeometryRTSSGeometry( ) geometry_cargo['list'] = IceRayCpp.GeometryRTSSList() geometry_cargo['this'].rtss( geometry_cargo['list'] ) v0 = radius * IceRayCpp.MathTypeCoord3D().load(0,0,1); v1 = radius * IceRayCpp.MathTypeCoord3D().load( 2*math.sqrt(2)/3, 0, -0.3333333 ); v2 = radius * IceRayCpp.MathTypeCoord3D().load( - math.sqrt(2)/3, math.sqrt(0.666666), -0.3333333 ); v3 = radius * IceRayCpp.MathTypeCoord3D().load( - math.sqrt(2)/3, -math.sqrt(0.666666), -0.3333333 ); if( True ): objectSide0_cargo = {} objectSide0_cargo['this'] = IceRayCpp.CoreObject() objectSide0_cargo['geometry'] = IceRayCpp.GeometrySimpleTriangle( v0, v1, v2 ) objectSide0_cargo['this'].geometry( objectSide0_cargo['geometry'] ); objectSide0_cargo['this'].surface( P_surfaceRoom['Side0']['this'] ); objectSide0_cargo['surface'] = P_surfaceRoom['Side0'] geometry_cargo['this'].push( objectSide0_cargo['this'] ) geometry_cargo['Side0'] = objectSide0_cargo if( True ): objectSide1_cargo = {} objectSide1_cargo['this'] = IceRayCpp.CoreObject() objectSide1_cargo['geometry'] = IceRayCpp.GeometrySimpleTriangle( v0, v1, v3 ) objectSide1_cargo['this'].geometry( objectSide1_cargo['geometry'] ); objectSide1_cargo['this'].surface( P_surfaceRoom['Side1']['this'] ); objectSide1_cargo['surface'] = P_surfaceRoom['Side1'] geometry_cargo['this'].push( objectSide1_cargo['this'] ) geometry_cargo['Side1'] = objectSide1_cargo if( True ): objectSide2_cargo = {} objectSide2_cargo['this'] = IceRayCpp.CoreObject() objectSide2_cargo['geometry'] = IceRayCpp.GeometrySimpleTriangle( v0, v2, v3 ) objectSide2_cargo['this'].geometry( objectSide2_cargo['geometry'] ); objectSide2_cargo['this'].surface( P_surfaceRoom['Side2']['this'] ); objectSide2_cargo['surface'] = P_surfaceRoom['Side2'] geometry_cargo['this'].push( objectSide2_cargo['this'] ) geometry_cargo['Side2'] = objectSide2_cargo if( True ): objectSide3_cargo = {} objectSide3_cargo['this'] = IceRayCpp.CoreObject() objectSide3_cargo['geometry'] = IceRayCpp.GeometrySimpleTriangle( v1, v2, v3 ) objectSide3_cargo['this'].geometry( objectSide3_cargo['geometry'] ); objectSide3_cargo['this'].surface( P_surfaceRoom['Side3']['this'] ); objectSide3_cargo['surface'] = P_surfaceRoom['Side3'] geometry_cargo['this'].push( objectSide3_cargo['this'] ) geometry_cargo['Side3'] = objectSide3_cargo decoration.coordinat_system( geometry_cargo, P_surfaceRoom, 3, 0.2 ) objectExponat_cargo = {} objectExponat_cargo['geometry'] = P_geometry['this'] objectExponat_cargo['this'] = IceRayCpp.CoreObject() objectExponat_cargo['this'].geometry( objectExponat_cargo['geometry'] ); objectExponat_cargo['this'].surface( P_surfaceExponat['this'] ); objectExponat_cargo['surface'] = P_surfaceExponat #objectExponat_cargo['this'].medium( P_medium['this'] ); #objectExponat_cargo['medium'] = P_medium geometry_cargo['this'].push( objectExponat_cargo['this'] ) geometry_cargo['exponat'] = objectExponat_cargo object = IceRayCpp.CoreObject() object.geometry( geometry_cargo['this'] ) #geometry_cargo['medium'] = material.medium.linear.make( ) #object.medium( geometry_cargo['medium']['this'] ); P_light['this'].barrier( geometry_cargo['this'] ) return { 'this': object, 'geometry': geometry_cargo } def octa( P_geometry, P_light, P_surfaceExponat, P_surfaceRoom, P_medium = material.medium.linear.make() ): #P_light['this'].barrier( P_geometry['this'] ) radius = 5 geometry_cargo={} geometry_cargo['this'] = IceRayCpp.GeometryRTSSGeometry( ) geometry_cargo['list'] = IceRayCpp.GeometryRTSSList() geometry_cargo['this'].rtss( geometry_cargo['list'] ) vXp = radius * IceRayCpp.MathTypeCoord3D().load( +1, 0, 0 ); vXm = radius * IceRayCpp.MathTypeCoord3D().load( -1, 0, 0 ); vYm = radius * IceRayCpp.MathTypeCoord3D().load( 0, +1, 0 ); vYp = radius * IceRayCpp.MathTypeCoord3D().load( 0, -1, 0 ); vZp = radius * IceRayCpp.MathTypeCoord3D().load( 0, 0, +1 ); vZm = radius * IceRayCpp.MathTypeCoord3D().load( 0, 0, -1 ); if( True ): # vZp, vXp, vYp objectSide0_cargo = {} objectSide0_cargo['this'] = IceRayCpp.CoreObject() objectSide0_cargo['geometry'] = IceRayCpp.GeometrySimpleTriangle( vZp, vXp, vYp ) objectSide0_cargo['this'].geometry( objectSide0_cargo['geometry'] ); objectSide0_cargo['this'].surface( P_surfaceRoom['Side0']['this'] ); objectSide0_cargo['surface'] = P_surfaceRoom['Side0'] geometry_cargo['this'].push( objectSide0_cargo['this'] ) geometry_cargo['Side0'] = objectSide0_cargo if( True ): # vZp, vXm, vYm objectSide1_cargo = {} objectSide1_cargo['this'] = IceRayCpp.CoreObject() objectSide1_cargo['geometry'] = IceRayCpp.GeometrySimpleTriangle( vZp, vXm, vYm ) objectSide1_cargo['this'].geometry( objectSide1_cargo['geometry'] ); objectSide1_cargo['this'].surface( P_surfaceRoom['Side1']['this'] ); objectSide1_cargo['surface'] = P_surfaceRoom['Side1'] geometry_cargo['this'].push( objectSide1_cargo['this'] ) geometry_cargo['Side1'] = objectSide1_cargo if( True ): # vZp, vXp, vYm objectSide2_cargo = {} objectSide2_cargo['this'] = IceRayCpp.CoreObject() objectSide2_cargo['geometry'] = IceRayCpp.GeometrySimpleTriangle( vZp, vXp, vYm ) objectSide2_cargo['this'].geometry( objectSide2_cargo['geometry'] ); objectSide2_cargo['this'].surface( P_surfaceRoom['Side2']['this'] ); objectSide2_cargo['surface'] = P_surfaceRoom['Side2'] geometry_cargo['this'].push( objectSide2_cargo['this'] ) geometry_cargo['Side2'] = objectSide2_cargo if( True ): # vZp, vYp, vXm objectSide3_cargo = {} objectSide3_cargo['this'] = IceRayCpp.CoreObject() objectSide3_cargo['geometry'] = IceRayCpp.GeometrySimpleTriangle( vZp, vYp, vXm ) objectSide3_cargo['this'].geometry( objectSide3_cargo['geometry'] ); objectSide3_cargo['this'].surface( P_surfaceRoom['Side3']['this'] ); objectSide3_cargo['surface'] = P_surfaceRoom['Side3'] geometry_cargo['this'].push( objectSide3_cargo['this'] ) geometry_cargo['Side3'] = objectSide3_cargo if( True ): # vZm, vXp, vYp objectSide4_cargo = {} objectSide4_cargo['this'] = IceRayCpp.CoreObject() objectSide4_cargo['geometry'] = IceRayCpp.GeometrySimpleTriangle( vZm, vXp, vYp ) objectSide4_cargo['this'].geometry( objectSide4_cargo['geometry'] ); objectSide4_cargo['this'].surface( P_surfaceRoom['Side4']['this'] ); objectSide4_cargo['surface'] = P_surfaceRoom['Side4'] geometry_cargo['this'].push( objectSide4_cargo['this'] ) geometry_cargo['Side4'] = objectSide4_cargo if( True ): # vZm, vXm, vYm objectSide5_cargo = {} objectSide5_cargo['this'] = IceRayCpp.CoreObject() objectSide5_cargo['geometry'] = IceRayCpp.GeometrySimpleTriangle( vZm, vXm, vYm ) objectSide5_cargo['this'].geometry( objectSide5_cargo['geometry'] ); objectSide5_cargo['this'].surface( P_surfaceRoom['Side5']['this'] ); objectSide5_cargo['surface'] = P_surfaceRoom['Side5'] geometry_cargo['this'].push( objectSide5_cargo['this'] ) geometry_cargo['Side5'] = objectSide5_cargo if( True ): # vZm, vYp, vXm objectSide6_cargo = {} objectSide6_cargo['this'] = IceRayCpp.CoreObject() objectSide6_cargo['geometry'] = IceRayCpp.GeometrySimpleTriangle( vZm, vXp, vYm ) objectSide6_cargo['this'].geometry( objectSide6_cargo['geometry'] ); objectSide6_cargo['this'].surface( P_surfaceRoom['Side6']['this'] ); objectSide6_cargo['surface'] = P_surfaceRoom['Side6'] geometry_cargo['this'].push( objectSide6_cargo['this'] ) geometry_cargo['Side6'] = objectSide6_cargo if( True ): # vZm, vYp, vXm objectSide7_cargo = {} objectSide7_cargo['this'] = IceRayCpp.CoreObject() objectSide7_cargo['geometry'] = IceRayCpp.GeometrySimpleTriangle( vZm, vYp, vXm ) objectSide7_cargo['this'].geometry( objectSide7_cargo['geometry'] ); objectSide7_cargo['this'].surface( P_surfaceRoom['Side7']['this'] ); objectSide7_cargo['surface'] = P_surfaceRoom['Side7'] geometry_cargo['this'].push( objectSide7_cargo['this'] ) geometry_cargo['Side7'] = objectSide7_cargo decoration.coordinat_system( geometry_cargo, P_surfaceRoom, 3, 0.2 ) objectExponat_cargo = {} objectExponat_cargo['geometry'] = P_geometry['this'] objectExponat_cargo['this'] = IceRayCpp.CoreObject() objectExponat_cargo['this'].geometry( objectExponat_cargo['geometry'] ); objectExponat_cargo['this'].surface( P_surfaceExponat['this'] ); objectExponat_cargo['surface'] = P_surfaceExponat #objectExponat_cargo['this'].medium( P_medium['this'] ); #objectExponat_cargo['medium'] = P_medium geometry_cargo['this'].push( objectExponat_cargo['this'] ) geometry_cargo['exponat'] = objectExponat_cargo object = IceRayCpp.CoreObject() object.geometry( geometry_cargo['this'] ) #geometry_cargo['medium'] = material.medium.linear.make( ) #object.medium( geometry_cargo['medium']['this'] ); P_light['this'].barrier( geometry_cargo['this'] ) return { 'this': object, 'geometry': geometry_cargo } def _icosahedron_triangle( P_surface, P_p0, P_p1, P_p2 ): object_cargo = {} object_cargo['this'] = IceRayCpp.CoreObject() object_cargo['geometry'] = IceRayCpp.GeometrySimpleTriangle( P_p0, P_p1, P_p2 ) object_cargo['this'].geometry( object_cargo['geometry'] ); object_cargo['this'].surface( P_surface['this'] ); object_cargo['surface'] = P_surface return object_cargo def icosa( P_geometry, P_light, P_surfaceExponat, P_surfaceRoom, P_medium = material.medium.linear.make() ): radius = 5 a = radius / math.sin( math.radians(72) ) height = math.sqrt( 0.2 ) width = math.sqrt( 1- height*height) move = 36 top = radius * IceRayCpp.MathTypeCoord3D().load(0, 0, +1) bottom = radius * IceRayCpp.MathTypeCoord3D().load(0, 0, -1) up = [0,0,0,0,0] up[0] = radius * IceRayCpp.MathTypeCoord3D().load( width * math.cos( math.radians(0 * 72) ), width * math.sin( math.radians(0 * 72)), height ) up[1] = radius * IceRayCpp.MathTypeCoord3D().load( width * math.cos( math.radians(1 * 72) ), width * math.sin( math.radians(1 * 72)), height ) up[2] = radius * IceRayCpp.MathTypeCoord3D().load( width * math.cos( math.radians(2 * 72) ), width * math.sin( math.radians(2 * 72)), height ) up[3] = radius * IceRayCpp.MathTypeCoord3D().load( width * math.cos( math.radians(3 * 72) ), width * math.sin( math.radians(3 * 72)), height ) up[4] = radius * IceRayCpp.MathTypeCoord3D().load( width * math.cos( math.radians(4 * 72) ), width * math.sin( math.radians(4 * 72)), height ) down=[0,0,0,0,0] down[0] = radius * IceRayCpp.MathTypeCoord3D().load( width * math.cos( math.radians(move + 0 * 72) ), width * math.sin(math.radians(move + 0 * 72)), -height ) down[1] = radius * IceRayCpp.MathTypeCoord3D().load( width * math.cos( math.radians(move + 1 * 72) ), width * math.sin(math.radians(move + 1 * 72)), -height ) down[2] = radius * IceRayCpp.MathTypeCoord3D().load( width * math.cos( math.radians(move + 2 * 72) ), width * math.sin(math.radians(move + 2 * 72)), -height ) down[3] = radius * IceRayCpp.MathTypeCoord3D().load( width * math.cos( math.radians(move + 3 * 72) ), width * math.sin(math.radians(move + 3 * 72)), -height ) down[4] = radius * IceRayCpp.MathTypeCoord3D().load( width * math.cos( math.radians(move + 4 * 72) ), width * math.sin(math.radians(move + 4 * 72)), -height ) #print( 'Edge= ', a ); #print("Norms") #print( IceRayCpp.MathLinearVector3DLength( top ) ) #print( IceRayCpp.MathLinearVector3DLength( bottom ) ) # #for index in range(0,5) : # print( IceRayCpp.MathLinearVector3DLength( up[index] ) ) # print( IceRayCpp.MathLinearVector3DLength( down[index] ) ) # print( IceRayCpp.MathLinearVector3DLength( top - up[index] ) ) # print( IceRayCpp.MathLinearVector3DLength( bottom - down[index] ) ) # #for i in range(0,5) : # for j in range(0,5) : # print( str( IceRayCpp.MathLinearVector3DLength( up[i] - down[j] ) ) + " ", end='' ) # print() geometry_cargo={} geometry_cargo['this'] = IceRayCpp.GeometryRTSSGeometry( ) geometry_cargo['rtss'] = IceRayCpp.GeometryRTSSList() geometry_cargo['this'].rtss( geometry_cargo['rtss'] ) for index in range(0,5): object = _icosahedron_triangle( P_surfaceRoom['Side' + str(index)], top, up[index], up[(index+1)%5] ) geometry_cargo['this'].push( object['this'] ) geometry_cargo['Side'+ str(index)] = object object = _icosahedron_triangle( P_surfaceRoom['Side' + str(5+index)], down[index], up[index], up[(index+1)%5 ] ) geometry_cargo['this'].push( object['this'] ) geometry_cargo['Side'+ str( 5 +index)] = object object = _icosahedron_triangle( P_surfaceRoom['Side' + str(10+index)], up[(index+1)%5], down[index], down[(index+1)%5 ] ) geometry_cargo['this'].push( object['this'] ) geometry_cargo['Side'+ str( 10 +index)] = object object = _icosahedron_triangle( P_surfaceRoom['Side' + str(15+index)], bottom, down[index], down[(index+1)%5] ) geometry_cargo['this'].push( object['this'] ) geometry_cargo['Side'+ str( 15 +index)] = object decoration.coordinat_system( geometry_cargo, P_surfaceRoom, 3, 0.2 ) objectExponat_cargo = {} objectExponat_cargo['geometry'] = P_geometry['this'] objectExponat_cargo['this'] = IceRayCpp.CoreObject() objectExponat_cargo['this'].geometry( objectExponat_cargo['geometry'] ); objectExponat_cargo['this'].surface( P_surfaceExponat['this'] ); objectExponat_cargo['surface'] = P_surfaceExponat #objectExponat_cargo['this'].medium( P_medium['this'] ); #objectExponat_cargo['medium'] = P_medium geometry_cargo['this'].push( objectExponat_cargo['this'] ) geometry_cargo['exponat'] = objectExponat_cargo object = IceRayCpp.CoreObject() object.geometry( geometry_cargo['this'] ) #geometry_cargo['medium'] = material.medium.linear.make( ) #object.medium( geometry_cargo['medium']['this'] ); P_light['this'].barrier( geometry_cargo['this'] ) return { 'this': object, 'geometry': geometry_cargo }
47.791284
181
0.643135
4,328
41,674
5.983826
0.038124
0.100085
0.067611
0.048421
0.876747
0.849757
0.832381
0.799019
0.793073
0.773457
0
0.022154
0.204972
41,674
871
182
47.846154
0.759507
0.074771
0
0.72562
0
0
0.089191
0
0
0
0
0
0
1
0.019835
false
0
0.006612
0
0.046281
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
518df96b3b2e5e25aaf208af01035bbd019d75fc
1,800
py
Python
15_logging.py
zexhan17/Intro-to-computing-using-python
0f040a4f3cfe08b15af317cb27eafa0a1a38c573
[ "MIT" ]
null
null
null
15_logging.py
zexhan17/Intro-to-computing-using-python
0f040a4f3cfe08b15af317cb27eafa0a1a38c573
[ "MIT" ]
null
null
null
15_logging.py
zexhan17/Intro-to-computing-using-python
0f040a4f3cfe08b15af317cb27eafa0a1a38c573
[ "MIT" ]
null
null
null
def sqrt(x, guess = 1.0): if x < 0: print("Got a request for square root of negative number.") raise ValueError print("Find sqrt of {} starting with guess {}".format(x, guess)) if good_enough(guess, x): return guess else: print("Guess isn't good enough. Improve ...") new_guess = improve_guess(guess, x) return sqrt(x, new_guess) def good_enough(guess, x): print("Checking if {} is a good enough guess.".format(guess)) if abs(guess * guess - x) < 0.1: return True else: return False def avg(a, b): return (a+b)/2.0 def improve_guess(guess, x): new_guess = avg(guess, x/guess) print("Improved guess to: {}".format(new_guess)) return new_guess print sqrt(36) ### Alternatively # import logging # logging.basicConfig(level=logging.DEBUG) # DEBUG, INFO, WARN, ERROR # # # add: filename='sqrt.log' # # def sqrt(x, guess = 1.0): # if x < 0: # logging.error("Got a request for square root of negative number.") # raise ValueError # # logging.info("Find sqrt of {} starting with guess {}".format(x, guess)) # if good_enough(guess, x): # return guess # else: # logging.debug("Guess isn't good enough. Improve ...") # new_guess = improve_guess(guess, x) # return sqrt(x, new_guess) # # def good_enough(guess, x): # logging.debug("Checking if {} is a good enough guess.".format(guess)) # if abs(guess * guess - x) < 0.1: # return True # else: # return False # # # # def avg(a, b): # return (a+b)/2.0 # # def improve_guess(guess, x): # new_guess = avg(guess, x/guess) # logging.debug("Improved guess to: {}".format(new_guess)) # return new_guess # # print sqrt(36) # try sending -36
25
77
0.598333
258
1,800
4.104651
0.217054
0.067989
0.084986
0.060434
0.817753
0.817753
0.817753
0.817753
0.817753
0.78187
0
0.014993
0.258889
1,800
71
78
25.352113
0.778861
0.525
0
0.083333
0
0
0.224138
0
0
0
0
0
0
0
null
null
0
0
null
null
0.25
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
7
5c601f4a61f3df4b5d976d5a0cf53b0e3e19892e
2,351
py
Python
python/tvm/tensor_graph/testing/models/lenet.py
QinHan-Erin/AMOS
634bf48edf4015e4a69a8c32d49b96bce2b5f16f
[ "Apache-2.0" ]
22
2022-03-18T07:29:31.000Z
2022-03-23T14:54:32.000Z
python/tvm/tensor_graph/testing/models/lenet.py
QinHan-Erin/AMOS
634bf48edf4015e4a69a8c32d49b96bce2b5f16f
[ "Apache-2.0" ]
null
null
null
python/tvm/tensor_graph/testing/models/lenet.py
QinHan-Erin/AMOS
634bf48edf4015e4a69a8c32d49b96bce2b5f16f
[ "Apache-2.0" ]
2
2022-03-18T08:26:34.000Z
2022-03-20T06:02:48.000Z
from tvm.tensor_graph.nn.layers import Layer, Conv2d, BatchNorm2d, ReLU, \ AvgPool2d, GlobalAvgPool2d, Linear, Sequential from tvm.tensor_graph.nn.functional import elementwise_add class LeNet5(Layer): def __init__(self): super(LeNet5, self).__init__() self.conv1 = Conv2d(1, 6, kernel_size=5, stride=1, padding=0, bias=False) self.s2 = AvgPool2d(kernel_size=2, stride=2, padding=0) self.conv3 = Conv2d(6, 16, kernel_size=5, stride=1, padding=0, bias=False) self.s4 = AvgPool2d(kernel_size=2, stride=2, padding=0) self.conv5 = Conv2d(16, 120, kernel_size=5, stride=1, padding=0, bias=False) self.global_pool = GlobalAvgPool2d(keep_dim=False) self.fc6 = Linear(120, 84, bias=False) self.output = Linear(84, 10, bias=False) self.relu = ReLU() def forward(self, inputs): x = self.conv1(inputs) x = self.relu(x) x = self.s2(x) x = self.conv3(x) x = self.relu(x) x = self.s4(x) x = self.conv5(x) x = self.relu(x) x = self.global_pool(x) x = self.fc6(x) x = self.relu(x) x = self.output(x) return x class LeNet5Repeat(Layer): def __init__(self): super(LeNet5Repeat, self).__init__() self.conv1 = Conv2d(1, 6, kernel_size=5, stride=1, padding=0, bias=False) self.conv1_1 = Conv2d(6, 6, kernel_size=3, padding=1, bias=False) self.conv1_2 = Conv2d(6, 6, kernel_size=3, padding=1, bias=False) self.s2 = AvgPool2d(kernel_size=2, stride=2, padding=0) self.conv3 = Conv2d(6, 16, kernel_size=5, stride=1, padding=0, bias=False) self.s4 = AvgPool2d(kernel_size=2, stride=2, padding=0) self.conv5 = Conv2d(16, 120, kernel_size=5, stride=1, padding=0, bias=False) self.global_pool = GlobalAvgPool2d(keep_dim=False) self.fc6 = Linear(120, 84, bias=False) self.output = Linear(84, 10, bias=False) self.relu = ReLU() def forward(self, inputs): x = self.conv1(inputs) x = self.conv1_1(x) x = self.conv1_2(x) x = self.relu(x) x = self.s2(x) x = self.conv3(x) x = self.relu(x) x = self.s4(x) x = self.conv5(x) x = self.relu(x) x = self.global_pool(x) x = self.fc6(x) x = self.relu(x) x = self.output(x) return x def lenet5(): model = LeNet5() return model def lenet5_repeat(): model = LeNet5Repeat() return model
30.532468
80
0.638877
375
2,351
3.893333
0.154667
0.089041
0.090411
0.054795
0.823288
0.767123
0.767123
0.766438
0.766438
0.766438
0
0.072589
0.214802
2,351
76
81
30.934211
0.71831
0
0
0.769231
0
0
0
0
0
0
0
0
0
1
0.092308
false
0
0.030769
0
0.215385
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
5c76da6015219e3f9a15e5f895cc90081a7fda98
5,159
py
Python
Crypto-program.py
softuser25/python-text-crypter
efa4fa6747973c6590b898f4ca2c9a44bbc1dd7c
[ "MIT" ]
1
2018-05-21T06:27:49.000Z
2018-05-21T06:27:49.000Z
Crypto-program.py
softuser25/python-text-crypter
efa4fa6747973c6590b898f4ca2c9a44bbc1dd7c
[ "MIT" ]
null
null
null
Crypto-program.py
softuser25/python-text-crypter
efa4fa6747973c6590b898f4ca2c9a44bbc1dd7c
[ "MIT" ]
null
null
null
import random import string import getpass def encryption(): inputpassword1 = getpass.getpass("Password: ") password1 = ("") if password1 == inputpassword1: encrypt = input("Type something for encryption: ") def add_str(lst): _letters = ("1","2","3","4","5","6","7","8","9","0","q","w","e","r","t","z","u","i","o","p","a","s","d","f","g","h","j","k","l","y","x","c","v","b","n","m","!","#","$","%","&","/","(",")","=","?","*","+","_","-",";"," ") return [''.join(random.sample(set(_letters), 2)) + letter + ''.join(random.sample(set(_letters), 2))for letter in lst] print(''.join(add_str(encrypt))) input("") else: print("Wrong password") input("") def generate(): passwdinput2 = getpass.getpass("Password: ") passwd2 = ("") if passwdinput2 == passwd2: karakteri=("1","2","3","4","5","6","7","8","9","0","q","w","e","r","t","z","u","i","o","p","a","s","d","f","g","h","j","k","l","y","x","c","v","b","n","m","!","#","$","%","&","/","(",")","=","?","*","+","_","-",";") user_input=input("How many characters do you want?(6-15): ") if user_input == "6": print(random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)) elif user_input == "7": print(random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)) elif user_input == "8": print(random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)) elif user_input == "9": print(random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)) elif user_input == "10": print(random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)) elif user_input == "11": print(random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)) elif user_input == "12": print(random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)) elif user_input == "13": print(random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)) elif user_input == "14": print(random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)) elif user_input == "15": print(random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)) else: print("Error,make sure its 6-15 characters") input("") else: print("Wrong password") input("") def decryption(): passwdinput = getpass.getpass("Password: ") passwd = ("") if passwd == passwdinput: s = input("Type something for decryption: ") print(s[2::5]) input("") print("Welcome!") choice = input("Choose:Encrypt(e),Decrypt(d),Generate password(p): ") if choice == ("e"): encryption() elif choice == ("d"): decryption() elif choice == ("p"): generate() input("") else: print("Error,choose e,d or p.") input("")
62.156627
408
0.633262
585
5,159
5.553846
0.160684
0.387812
0.67867
0.789474
0.790397
0.790397
0.773777
0.752231
0.752231
0.752231
0
0.012535
0.164954
5,159
82
409
62.914634
0.741643
0
0
0.19403
0
0
0.078455
0.007294
0.029851
0
0
0
0
1
0.059701
false
0.19403
0.044776
0
0.119403
0.253731
0
0
0
null
1
1
1
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
9
7a40e5bf1f1d27594c293df6014353beb8fa58e5
40,537
py
Python
networks/popnet.py
Charleshhy/Progressive-One-shot-Human-Parsing
04cd3c14859ea45722377fce5403b686f82269c9
[ "Apache-2.0" ]
39
2020-12-23T03:42:31.000Z
2022-03-10T07:06:31.000Z
networks/popnet.py
Charleshhy/Progressive-One-shot-Human-Parsing
04cd3c14859ea45722377fce5403b686f82269c9
[ "Apache-2.0" ]
5
2021-05-13T14:08:53.000Z
2022-03-26T12:10:49.000Z
networks/popnet.py
Charleshhy/Progressive-One-shot-Human-Parsing
04cd3c14859ea45722377fce5403b686f82269c9
[ "Apache-2.0" ]
6
2021-06-23T05:26:10.000Z
2021-12-13T08:58:15.000Z
import torch import torch.nn as nn import torch.nn.functional as F from networks import deeplab_xception_synBN # For code release def model_mask(mask, delete_class_set): new_mask = mask.clone() for i in delete_class_set: new_mask = torch.where(mask == i, torch.zeros(1).cuda(), new_mask) return new_mask class popnet_kway_dp( deeplab_xception_synBN.DeepLabv3_plus_v2): def __init__(self, nInputChannels=3, n_classes=7, os=16, hidden_layers=256, beta=0.001, scaler=10., feature_lvl='high', DML_mode='fixed'): super(popnet_kway_dp, self).__init__(nInputChannels=nInputChannels, n_classes=n_classes, os=os, pretrained=True) # Settings self.hidden_layers = hidden_layers self.DML_mode = DML_mode self.cos_similarity_func = nn.CosineSimilarity() self.feature_lvl = feature_lvl self.beta = beta # AGM layers self.classifier_6 = nn.Conv2d(256, 2, kernel_size=1) self.bg_att_fusion = nn.Conv2d(1, 1, kernel_size=1) # NCM layers self.after_sim_fg = nn.Conv2d(1, 1, kernel_size=1) self.after_sim_bg = nn.Conv2d(1, 1, kernel_size=1) self.bg_sim_fusion = nn.Conv2d(1, 1, kernel_size=1) self.scaler = nn.Parameter(torch.tensor(scaler), requires_grad=True) # Learnable scaler # KIM layers self.feature_fusion = nn.Sequential( deeplab_xception_synBN.Decoder_module(512, 256), deeplab_xception_synBN.Decoder_module(256, 256), ) self.prototype = torch.nn.Parameter(torch.zeros(n_classes, 256), requires_grad=False) def mask2map(self, mask, class_num): # Helper function for getting feature indexes for each class (gpu) n, h, w = mask.shape maskmap_ave = torch.zeros(n, class_num, h, w).cuda() for i in range(class_num): class_pix = torch.where(mask == i, torch.ones(1).cuda(), torch.zeros(1).cuda()) class_sum = torch.sum(class_pix.view(n, h * w), dim=1) class_sum = torch.where(class_sum == 0, torch.ones(1).cuda(), class_sum) class_pix_ave = class_pix / class_sum.view(n, 1, 1) maskmap_ave[:, i, :, :] = class_pix_ave return maskmap_ave def mask2map_cpu(self, mask, class_num): # Helper function for getting feature indexes for each class n, h, w = mask.shape maskmap_ave = torch.zeros(n, class_num, h, w) for i in range(class_num): class_pix = torch.where(mask == i, torch.ones(1), torch.zeros(1)) class_sum = torch.sum(class_pix.view(n, h * w), dim=1) class_sum = torch.where(class_sum == 0, torch.ones(1), class_sum) class_pix_ave = class_pix / class_sum.view(n, 1, 1) maskmap_ave[:, i, :, :] = class_pix_ave return maskmap_ave def forward(self, input, cate_num=17, proto_prev_stage=True, prev_qry_fea=None, prev_sup_fea=None): nclasses = cate_num img, support, support_mask = input # Encoder img_features = self.oneshot_flex_forward(img, feature_lvl=self.feature_lvl) sup_features = self.oneshot_flex_forward(support, feature_lvl=self.feature_lvl) # Knowledge infusion module if prev_qry_fea is not None and prev_sup_fea is not None: if prev_qry_fea.shape != img_features.shape: prev_qry_fea = F.upsample(prev_qry_fea, size=img_features.size()[2:], mode='bilinear', align_corners=True) prev_sup_fea = F.upsample(prev_sup_fea, size=sup_features.size()[2:], mode='bilinear', align_corners=True) img_features = self.feature_fusion(torch.cat([prev_qry_fea, img_features], dim=1)) sup_features = self.feature_fusion(torch.cat([prev_sup_fea, sup_features], dim=1)) batch_n, _, mask_h, mask_w = sup_features.size() support_mask = F.upsample(support_mask, size=(mask_h, mask_w), mode='nearest') # Get indexes for each class maskmap = self.mask2map(support_mask.squeeze(1), nclasses) # Compute average features using the indexes sp_ave_features = torch.matmul(maskmap.view(batch_n, nclasses, mask_h * mask_w), # batch * class_num * hw sup_features.permute(0, 2, 3, 1).view(batch_n, mask_h * mask_w, self.hidden_layers) # batch * hw * feature channels ) # batch * classnum * feature channels sup_classes = torch.unique(support_mask).long() if self.DML_mode == 'fixed': dml = self.dual_metric_fixed else: dml = self.dual_metric_ucs att_mask, sim_mask = dml(sp_ave_features, img_features, nclasses, proto_prev_stage, sup_classes) return F.upsample(att_mask, size=img.size()[2:], mode='bilinear', align_corners=True), F.upsample( self.scaler * sim_mask, size=img.size()[2:], mode='bilinear', align_corners=True) def dual_metric_ucs(self, sp_ave_features, features, nclasses, proto_prev_stage, sup_classes_bg): # This is the DML methods using unseen class screening (ucs), # where the background prototype is aggregated by # (sum human cls prototypes) / (# of human cls prototypes annotated in support image). # Get rid of the background index and remain the existing indexes # We only calculate max_classes = nclasses sup_classes = sup_classes_bg[sup_classes_bg != 0] screened_sp = sp_ave_features[:, sup_classes, :] # Features initialization: AGM (denoted as att) and NCM (denoted as sim) sem_lis = [] sim_bg_fea = torch.zeros(features.shape[0], 1, features.shape[2], features.shape[3]).cuda() att_lis = [] att_bg_fea = torch.zeros(features.shape[0], 1, features.shape[2], features.shape[3]).cuda() # To reduce memory usage, we use loops to process features for each batch in each class for i in range(len(sup_classes)): class_fea = screened_sp[:, i, :] # Generate class features and get similarity mask temp_list = [] for b_ind in range(features.shape[0]): batch_fea = class_fea[b_ind, :] # When we don't want this class to be parsed, prototype for this class is set to 0 if torch.sum(batch_fea) == 0: proto_fea = torch.zeros_like(batch_fea) else: if proto_prev_stage or torch.sum(self.prototype[i]) == 0: # When we want to parse this class but the prototype is 0, we use the batch_fea on the go proto_fea = batch_fea else: proto_num = sup_classes[i] if proto_prev_stage or torch.sum(self.prototype[proto_num]) == 0: # When we want to parse this class but the prototype is 0, we use batch_fea proto_fea = batch_fea else: # When batch_fea != 0, calculate proto_fea proto_fea = (1 - self.beta) * self.prototype[ proto_num].clone() + self.beta * batch_fea # If training, update prototype if self.training: self.prototype[i] = proto_fea.detach() batch_tmp_seg_map = self.cos_similarity_func(features[b_ind, :, :, :].unsqueeze(0), proto_fea.unsqueeze(-1).unsqueeze(-1).unsqueeze(0)) temp_list.append(batch_tmp_seg_map) # Aggregate features for AGM (denoted as att) and NCM (denoted as sim) tmp_seg_map = torch.stack(temp_list, dim=0).squeeze(1) bg_tmp_seg_map = torch.ones_like(tmp_seg_map.unsqueeze(1)).cuda() - tmp_seg_map.unsqueeze(1) sem_lis.append(self.after_sim_fg(tmp_seg_map.unsqueeze(1))) sim_bg_fea += self.after_sim_bg(bg_tmp_seg_map) att_fea = self.decoder3(self.decoder2(tmp_seg_map.unsqueeze(1) * features + features)) att_mask = self.classifier_6(att_fea) att_lis.append(att_mask[:, 0, :, :].unsqueeze(1)) att_bg_fea += att_mask[:, 1, :, :].unsqueeze(1) if len(sup_classes) != 0: sim_bg_fea = sim_bg_fea / len(sup_classes) att_bg_fea = att_bg_fea / len(sup_classes) else: sim_bg_fea = torch.ones_like(sim_bg_fea).cuda() att_bg_fea = torch.ones_like(att_bg_fea).cuda() sem_lis = [self.bg_sim_fusion(sim_bg_fea)] + sem_lis att_lis = [self.bg_att_fusion(att_bg_fea)] + att_lis # bg has to be added sim_mask_screened = torch.cat(sem_lis, 1) att_mask_screened = torch.cat(att_lis, 1) sim_mask, att_mask = torch.zeros(features.shape[0], max_classes, features.shape[2], features.shape[3]).cuda(), \ torch.zeros(features.shape[0], max_classes, features.shape[2], features.shape[3]).cuda() # Follow the previous indexes sim_mask[:, sup_classes_bg], att_mask[:, sup_classes_bg] = sim_mask_screened, att_mask_screened return att_mask, sim_mask def dual_metric_fixed(self, sp_ave_features, features, nclasses, proto_prev_stage, sup_classes_bg): # This is the DML methods not using unseen class screening (ucs), # where the background prototype is aggregated by (sum of human cls prototypes) / (a fixed term). # (Variable: sup_classes_bg) is not used in this function. # Features initialization: AGM (denoted as att) and NCM (denoted as sim) sem_lis = [] sim_bg_fea = torch.zeros(features.shape[0], 1, features.shape[2], features.shape[3]).cuda() att_lis = [] att_bg_fea = torch.zeros(features.shape[0], 1, features.shape[2], features.shape[3]).cuda() # To reduce memory usage, we use loops to process features for each batch in each class for i in range(1, nclasses): class_fea = sp_ave_features[:, i, :] # Generate class features and get similarity mask temp_list = [] for b_ind in range(features.shape[0]): batch_fea = class_fea[b_ind, :] # When we don't want this class to be parsed, prototype for this class is set to 0 if torch.sum(batch_fea) == 0: proto_fea = torch.zeros_like(batch_fea) else: if proto_prev_stage or torch.sum(self.prototype[i]) == 0: # When we want to parse this class but the prototype is 0, we use the batch_fea on the go proto_fea = batch_fea else: # If batch_fea != 0, we form dymanic proto_fea proto_fea = (1 - self.beta) * self.prototype[ i].clone() + self.beta * batch_fea # If training, update prototype if self.training: self.prototype[i] = proto_fea.detach() batch_tmp_seg_map = self.cos_similarity_func(features[b_ind, :, :, :].unsqueeze(0), proto_fea.unsqueeze(-1).unsqueeze(-1).unsqueeze(0)) temp_list.append(batch_tmp_seg_map) # Aggregate features for AGM (denoted as att) and NCM (denoted as sim) tmp_seg_map = torch.stack(temp_list, dim=0).squeeze(1) bg_tmp_seg_map = torch.ones_like(tmp_seg_map.unsqueeze(1)).cuda() - tmp_seg_map.unsqueeze(1) sem_lis.append(self.after_sim_fg(tmp_seg_map.unsqueeze(1))) sim_bg_fea += self.after_sim_bg(bg_tmp_seg_map) att_fea = self.decoder3(self.decoder2(tmp_seg_map.unsqueeze(1) * features + features)) att_mask = self.classifier_6(att_fea) att_lis.append(att_mask[:, 0, :, :].unsqueeze(1)) att_bg_fea += att_mask[:, 1, :, :].unsqueeze(1) sim_bg_fea = sim_bg_fea / (nclasses - 1) att_bg_fea = att_bg_fea / (nclasses - 1) sem_lis = [self.bg_sim_fusion(sim_bg_fea)] + sem_lis att_lis = [self.bg_att_fusion(att_bg_fea)] + att_lis sim_mask = torch.cat(sem_lis, 1) att_mask = torch.cat(att_lis, 1) return att_mask, sim_mask class popnet_kway_dp_more_dataset( deeplab_xception_synBN.DeepLabv3_plus_v2): def __init__(self, nInputChannels=3, os=16, hidden_layers=256, beta=0.001, scaler=10., feature_lvl='high', DML_mode='fixed', cate_num=17): super(popnet_kway_dp_more_dataset, self).__init__(nInputChannels=nInputChannels, n_classes=cate_num, os=os, pretrained=True) # Settings self.hidden_layers = hidden_layers self.DML_mode = DML_mode self.cos_similarity_func = nn.CosineSimilarity() self.feature_lvl = feature_lvl self.beta = beta self.cate_num = cate_num # AGM layers self.classifier_6 = nn.Conv2d(256, 2, kernel_size=1) self.bg_att_fusion = nn.Conv2d(1, 1, kernel_size=1) # NCM layers self.after_sim_fg = nn.Conv2d(1, 1, kernel_size=1) self.after_sim_bg = nn.Conv2d(1, 1, kernel_size=1) self.bg_sim_fusion = nn.Conv2d(1, 1, kernel_size=1) self.scaler = nn.Parameter(torch.tensor(scaler), requires_grad=True) # Learnable scaler # KIM layers self.feature_fusion = nn.Sequential( deeplab_xception_synBN.Decoder_module(512, 256), deeplab_xception_synBN.Decoder_module(256, 256), ) self.prototype = torch.nn.Parameter(torch.zeros(cate_num, 256), requires_grad=False) def mask2map(self, mask, class_num): # Helper function for getting feature indexes for each class (gpu) n, h, w = mask.shape maskmap_ave = torch.zeros(n, class_num, h, w).cuda() for i in range(class_num): class_pix = torch.where(mask == i, torch.ones(1).cuda(), torch.zeros(1).cuda()) class_sum = torch.sum(class_pix.view(n, h * w), dim=1) class_sum = torch.where(class_sum == 0, torch.ones(1).cuda(), class_sum) class_pix_ave = class_pix / class_sum.view(n, 1, 1) maskmap_ave[:, i, :, :] = class_pix_ave return maskmap_ave def mask2map_cpu(self, mask, class_num): # Helper function for getting feature indexes for each class n, h, w = mask.shape maskmap_ave = torch.zeros(n, class_num, h, w) for i in range(class_num): class_pix = torch.where(mask == i, torch.ones(1), torch.zeros(1)) class_sum = torch.sum(class_pix.view(n, h * w), dim=1) class_sum = torch.where(class_sum == 0, torch.ones(1), class_sum) class_pix_ave = class_pix / class_sum.view(n, 1, 1) maskmap_ave[:, i, :, :] = class_pix_ave return maskmap_ave def forward(self, input, proto_prev_stage=True, prev_qry_fea=None, prev_sup_fea=None, cate_mapping=None): img, support, support_mask = input # Encoder img_features, _, _ = self.oneshot_highlvl_forward(img) sup_features, _, _ = self.oneshot_highlvl_forward(support) # Knowledge infusion module if prev_qry_fea is not None and prev_sup_fea is not None: if prev_qry_fea.shape != img_features.shape: prev_qry_fea = F.upsample(prev_qry_fea, size=img_features.size()[2:], mode='bilinear', align_corners=True) prev_sup_fea = F.upsample(prev_sup_fea, size=sup_features.size()[2:], mode='bilinear', align_corners=True) img_features = self.feature_fusion(torch.cat([prev_qry_fea, img_features], dim=1)) sup_features = self.feature_fusion(torch.cat([prev_sup_fea, sup_features], dim=1)) batch_n, _, mask_h, mask_w = sup_features.size() support_mask = F.upsample(support_mask, size=(mask_h, mask_w), mode='nearest') # Get indexes for each class maskmap = self.mask2map(support_mask.squeeze(1), self.cate_num) # Compute average features using the indexes sp_ave_features = torch.matmul(maskmap.view(batch_n, self.cate_num, mask_h * mask_w), # batch * class_num * hw sup_features.permute(0, 2, 3, 1).view(batch_n, mask_h * mask_w, self.hidden_layers)) # batch * hw * feature channels # batch * classnum * feature channels sup_classes = torch.unique(support_mask).long() if self.DML_mode == 'fixed': dml = self.dual_metric_fixed else: dml = self.dual_metric_ucs att_mask, sim_mask = dml(sp_ave_features, img_features, self.cate_num, proto_prev_stage, sup_classes) return F.upsample(att_mask, size=img.size()[2:], mode='bilinear', align_corners=True), F.upsample( self.scaler * sim_mask, size=img.size()[2:], mode='bilinear', align_corners=True) def dual_metric_ucs(self, sp_ave_features, features, nclasses, proto_prev_stage, sup_classes_bg): # This is the DML methods using unseen class screening (ucs), # where the background prototype is aggregated by # (sum human cls prototypes) / (# of human cls prototypes annotated in support image). # Get rid of the background index and remain the existing indexes # We only calculate max_classes = nclasses sup_classes = sup_classes_bg[sup_classes_bg != 0] screened_sp = sp_ave_features[:, sup_classes, :] # Features initialization: AGM (denoted as att) and NCM (denoted as sim) sem_lis = [] sim_bg_fea = torch.zeros(features.shape[0], 1, features.shape[2], features.shape[3]).cuda() att_lis = [] att_bg_fea = torch.zeros(features.shape[0], 1, features.shape[2], features.shape[3]).cuda() # To reduce memory usage, we use loops to process features for each batch in each class for i in range(len(sup_classes)): class_fea = screened_sp[:, i, :] # Generate class features and get similarity mask temp_list = [] for b_ind in range(features.shape[0]): batch_fea = class_fea[b_ind, :] # When we don't want this class to be parsed, prototype for this class is set to 0 if torch.sum(batch_fea) == 0: proto_fea = torch.zeros_like(batch_fea) else: if proto_prev_stage or torch.sum(self.prototype[i]) == 0: # When we want to parse this class but the prototype is 0, we use the batch_fea on the go proto_fea = batch_fea else: proto_num = sup_classes[i] if proto_prev_stage or torch.sum(self.prototype[proto_num]) == 0: # When we want to parse this class but the prototype is 0, we use batch_fea proto_fea = batch_fea else: # When batch_fea != 0, calculate proto_fea proto_fea = (1 - self.beta) * self.prototype[ proto_num].clone() + self.beta * batch_fea # If training, update prototype if self.training: self.prototype[i] = proto_fea.detach() batch_tmp_seg_map = self.cos_similarity_func(features[b_ind, :, :, :].unsqueeze(0), proto_fea.unsqueeze(-1).unsqueeze(-1).unsqueeze(0)) temp_list.append(batch_tmp_seg_map) # Aggregate features for AGM (denoted as att) and NCM (denoted as sim) tmp_seg_map = torch.stack(temp_list, dim=0).squeeze(1) bg_tmp_seg_map = torch.ones_like(tmp_seg_map.unsqueeze(1)).cuda() - tmp_seg_map.unsqueeze(1) sem_lis.append(self.after_sim_fg(tmp_seg_map.unsqueeze(1))) sim_bg_fea += self.after_sim_bg(bg_tmp_seg_map) att_fea = self.decoder3(self.decoder2(tmp_seg_map.unsqueeze(1) * features + features)) att_mask = self.classifier_6(att_fea) att_lis.append(att_mask[:, 0, :, :].unsqueeze(1)) att_bg_fea += att_mask[:, 1, :, :].unsqueeze(1) if len(sup_classes) != 0: sim_bg_fea = sim_bg_fea / len(sup_classes) att_bg_fea = att_bg_fea / len(sup_classes) else: sim_bg_fea = torch.ones_like(sim_bg_fea).cuda() att_bg_fea = torch.ones_like(att_bg_fea).cuda() sem_lis = [self.bg_sim_fusion(sim_bg_fea)] + sem_lis att_lis = [self.bg_att_fusion(att_bg_fea)] + att_lis # bg has to be added sim_mask_screened = torch.cat(sem_lis, 1) att_mask_screened = torch.cat(att_lis, 1) sim_mask, att_mask = torch.zeros(features.shape[0], max_classes, features.shape[2], features.shape[3]).cuda(), \ torch.zeros(features.shape[0], max_classes, features.shape[2], features.shape[3]).cuda() # Follow the previous indexes sim_mask[:, sup_classes_bg], att_mask[:, sup_classes_bg] = sim_mask_screened, att_mask_screened return att_mask, sim_mask def dual_metric_fixed(self, sp_ave_features, features, nclasses, proto_prev_stage, sup_classes_bg): # This is the DML methods not using unseen class screening (ucs), # where the background prototype is aggregated by (sum of human cls prototypes) / (a fixed term). # (Variable: sup_classes_bg) is not used in this function. # Features initialization: AGM (denoted as att) and NCM (denoted as sim) sem_lis = [] sim_bg_fea = torch.zeros(features.shape[0], 1, features.shape[2], features.shape[3]).cuda() att_lis = [] att_bg_fea = torch.zeros(features.shape[0], 1, features.shape[2], features.shape[3]).cuda() # To reduce memory usage, we use loops to process features for each batch in each class for i in range(1, nclasses): class_fea = sp_ave_features[:, i, :] # Generate class features and get similarity mask temp_list = [] for b_ind in range(features.shape[0]): batch_fea = class_fea[b_ind, :] # When we don't want this class to be parsed, prototype for this class is set to 0 if torch.sum(batch_fea) == 0: proto_fea = torch.zeros_like(batch_fea) else: if proto_prev_stage or torch.sum(self.prototype[i]) == 0: # When we want to parse this class but the prototype is 0, we use the batch_fea on the go proto_fea = batch_fea else: # If batch_fea != 0, we form dymanic proto_fea proto_fea = (1 - self.beta) * self.prototype[ i].clone() + self.beta * batch_fea # If training, update prototype if self.training: self.prototype[i] = proto_fea.detach() batch_tmp_seg_map = self.cos_similarity_func(features[b_ind, :, :, :].unsqueeze(0), proto_fea.unsqueeze(-1).unsqueeze(-1).unsqueeze(0)) temp_list.append(batch_tmp_seg_map) # Aggregate features for AGM (denoted as att) and NCM (denoted as sim) tmp_seg_map = torch.stack(temp_list, dim=0).squeeze(1) bg_tmp_seg_map = torch.ones_like(tmp_seg_map.unsqueeze(1)).cuda() - tmp_seg_map.unsqueeze(1) sem_lis.append(self.after_sim_fg(tmp_seg_map.unsqueeze(1))) sim_bg_fea += self.after_sim_bg(bg_tmp_seg_map) att_fea = self.decoder3(self.decoder2(tmp_seg_map.unsqueeze(1) * features + features)) att_mask = self.classifier_6(att_fea) att_lis.append(att_mask[:, 0, :, :].unsqueeze(1)) att_bg_fea += att_mask[:, 1, :, :].unsqueeze(1) sim_bg_fea = sim_bg_fea / (nclasses - 1) att_bg_fea = att_bg_fea / (nclasses - 1) sem_lis = [self.bg_sim_fusion(sim_bg_fea)] + sem_lis att_lis = [self.bg_att_fusion(att_bg_fea)] + att_lis sim_mask = torch.cat(sem_lis, 1) att_mask = torch.cat(att_lis, 1) return att_mask, sim_mask # Test whether batch-wise same support works class popnet_kway_dp_samesup( deeplab_xception_synBN.DeepLabv3_plus_v2): def __init__(self, nInputChannels=3, n_classes=7, os=16, hidden_layers=256, beta=0.001, scaler=10., feature_lvl='high', DML_mode='fixed'): super(popnet_kway_dp_samesup, self).__init__(nInputChannels=nInputChannels, n_classes=n_classes, os=os, pretrained=True) # Settings self.hidden_layers = hidden_layers self.DML_mode = DML_mode self.cos_similarity_func = nn.CosineSimilarity() self.feature_lvl = feature_lvl self.beta = beta # AGM layers self.classifier_6 = nn.Conv2d(256, 2, kernel_size=1) self.bg_att_fusion = nn.Conv2d(1, 1, kernel_size=1) # NCM layers self.after_sim_fg = nn.Conv2d(1, 1, kernel_size=1) self.after_sim_bg = nn.Conv2d(1, 1, kernel_size=1) self.bg_sim_fusion = nn.Conv2d(1, 1, kernel_size=1) self.scaler = nn.Parameter(torch.tensor(scaler), requires_grad=True) # Learnable scaler # KIM layers self.feature_fusion = nn.Sequential( deeplab_xception_synBN.Decoder_module(512, 256), deeplab_xception_synBN.Decoder_module(256, 256), ) self.prototype = torch.nn.Parameter(torch.zeros(17, 256), requires_grad=False) def mask2map(self, mask, class_num): # Helper function for getting feature indexes for each class (gpu) n, h, w = mask.shape maskmap_ave = torch.zeros(n, class_num, h, w).cuda() for i in range(class_num): class_pix = torch.where(mask == i, torch.ones(1).cuda(), torch.zeros(1).cuda()) class_sum = torch.sum(class_pix.view(n, h * w), dim=1) class_sum = torch.where(class_sum == 0, torch.ones(1).cuda(), class_sum) class_pix_ave = class_pix / class_sum.view(n, 1, 1) maskmap_ave[:, i, :, :] = class_pix_ave return maskmap_ave def mask2map_cpu(self, mask, class_num): # Helper function for getting feature indexes for each class n, h, w = mask.shape maskmap_ave = torch.zeros(n, class_num, h, w) for i in range(class_num): class_pix = torch.where(mask == i, torch.ones(1), torch.zeros(1)) class_sum = torch.sum(class_pix.view(n, h * w), dim=1) class_sum = torch.where(class_sum == 0, torch.ones(1), class_sum) class_pix_ave = class_pix / class_sum.view(n, 1, 1) maskmap_ave[:, i, :, :] = class_pix_ave return maskmap_ave def forward(self, input, cate_num=17, proto_prev_stage=True, prev_qry_fea=None, prev_sup_fea=None): nclasses = cate_num img, support_difsup, support_mask_difsup = input support, support_mask = support_difsup[0], support_mask_difsup[0] support, support_mask = support.expand(support_difsup.shape), support_mask.expand(support_mask_difsup.shape) # print(support.shape, support_mask.shape) # Encoder img_features = self.oneshot_flex_forward(img, feature_lvl=self.feature_lvl) sup_features = self.oneshot_flex_forward(support, feature_lvl=self.feature_lvl) # Knowledge infusion module if prev_qry_fea is not None and prev_sup_fea is not None: if prev_qry_fea.shape != img_features.shape: prev_qry_fea = F.upsample(prev_qry_fea, size=img_features.size()[2:], mode='bilinear', align_corners=True) prev_sup_fea = F.upsample(prev_sup_fea, size=sup_features.size()[2:], mode='bilinear', align_corners=True) img_features = self.feature_fusion(torch.cat([prev_qry_fea, img_features], dim=1)) sup_features = self.feature_fusion(torch.cat([prev_sup_fea, sup_features], dim=1)) batch_n, _, mask_h, mask_w = sup_features.size() support_mask = F.upsample(support_mask, size=(mask_h, mask_w), mode='nearest') # Get indexes for each class maskmap = self.mask2map(support_mask.squeeze(1), nclasses) # Compute average features using the indexes sp_ave_features = torch.matmul(maskmap.view(batch_n, nclasses, mask_h * mask_w), # batch * class_num * hw sup_features.permute(0, 2, 3, 1).view(batch_n, mask_h * mask_w, self.hidden_layers) # batch * hw * feature channels ) # batch * classnum * feature channels sup_classes = torch.unique(support_mask).long() if self.DML_mode == 'fixed': dml = self.dual_metric_fixed else: dml = self.dual_metric_ucs att_mask, sim_mask = dml(sp_ave_features, img_features, nclasses, proto_prev_stage, sup_classes) return F.upsample(att_mask, size=img.size()[2:], mode='bilinear', align_corners=True), F.upsample( self.scaler * sim_mask, size=img.size()[2:], mode='bilinear', align_corners=True) def dual_metric_ucs(self, sp_ave_features, features, nclasses, proto_prev_stage, sup_classes_bg): # This is the DML methods using unseen class screening (ucs), # where the background prototype is aggregated by # (sum human cls prototypes) / (# of human cls prototypes annotated in support image). # Get rid of the background index and remain the existing indexes # We only calculate max_classes = nclasses sup_classes = sup_classes_bg[sup_classes_bg != 0] screened_sp = sp_ave_features[:, sup_classes, :] # Features initialization: AGM (denoted as att) and NCM (denoted as sim) sem_lis = [] sim_bg_fea = torch.zeros(features.shape[0], 1, features.shape[2], features.shape[3]).cuda() att_lis = [] att_bg_fea = torch.zeros(features.shape[0], 1, features.shape[2], features.shape[3]).cuda() # To reduce memory usage, we use loops to process features for each batch in each class for i in range(len(sup_classes)): class_fea = screened_sp[:, i, :] # Generate class features and get similarity mask temp_list = [] for b_ind in range(features.shape[0]): batch_fea = class_fea[b_ind, :] # When we don't want this class to be parsed, prototype for this class is set to 0 if torch.sum(batch_fea) == 0: proto_fea = torch.zeros_like(batch_fea) else: if proto_prev_stage or torch.sum(self.prototype[i]) == 0: # When we want to parse this class but the prototype is 0, we use the batch_fea on the go proto_fea = batch_fea else: proto_num = sup_classes[i] if proto_prev_stage or torch.sum(self.prototype[proto_num]) == 0: # When we want to parse this class but the prototype is 0, we use batch_fea proto_fea = batch_fea else: # When batch_fea != 0, calculate proto_fea proto_fea = (1 - self.beta) * self.prototype[ proto_num].clone() + self.beta * batch_fea # If training, update prototype if self.training: self.prototype[i] = proto_fea.detach() batch_tmp_seg_map = self.cos_similarity_func(features[b_ind, :, :, :].unsqueeze(0), proto_fea.unsqueeze(-1).unsqueeze(-1).unsqueeze(0)) temp_list.append(batch_tmp_seg_map) # Aggregate features for AGM (denoted as att) and NCM (denoted as sim) tmp_seg_map = torch.stack(temp_list, dim=0).squeeze(1) bg_tmp_seg_map = torch.ones_like(tmp_seg_map.unsqueeze(1)).cuda() - tmp_seg_map.unsqueeze(1) sem_lis.append(self.after_sim_fg(tmp_seg_map.unsqueeze(1))) sim_bg_fea += self.after_sim_bg(bg_tmp_seg_map) att_fea = self.decoder3(self.decoder2(tmp_seg_map.unsqueeze(1) * features + features)) att_mask = self.classifier_6(att_fea) att_lis.append(att_mask[:, 0, :, :].unsqueeze(1)) att_bg_fea += att_mask[:, 1, :, :].unsqueeze(1) if len(sup_classes) != 0: sim_bg_fea = sim_bg_fea / len(sup_classes) att_bg_fea = att_bg_fea / len(sup_classes) else: sim_bg_fea = torch.ones_like(sim_bg_fea).cuda() att_bg_fea = torch.ones_like(att_bg_fea).cuda() sem_lis = [self.bg_sim_fusion(sim_bg_fea)] + sem_lis att_lis = [self.bg_att_fusion(att_bg_fea)] + att_lis # bg has to be added sim_mask_screened = torch.cat(sem_lis, 1) att_mask_screened = torch.cat(att_lis, 1) sim_mask, att_mask = torch.zeros(features.shape[0], max_classes, features.shape[2], features.shape[3]).cuda(), \ torch.zeros(features.shape[0], max_classes, features.shape[2], features.shape[3]).cuda() # Follow the previous indexes sim_mask[:, sup_classes_bg], att_mask[:, sup_classes_bg] = sim_mask_screened, att_mask_screened return att_mask, sim_mask def dual_metric_fixed(self, sp_ave_features, features, nclasses, proto_prev_stage, sup_classes_bg): # This is the DML methods not using unseen class screening (ucs), # where the background prototype is aggregated by (sum of human cls prototypes) / (a fixed term). # (Variable: sup_classes_bg) is not used in this function. # Features initialization: AGM (denoted as att) and NCM (denoted as sim) sem_lis = [] sim_bg_fea = torch.zeros(features.shape[0], 1, features.shape[2], features.shape[3]).cuda() att_lis = [] att_bg_fea = torch.zeros(features.shape[0], 1, features.shape[2], features.shape[3]).cuda() # To reduce memory usage, we use loops to process features for each batch in each class for i in range(1, nclasses): class_fea = sp_ave_features[:, i, :] # Generate class features and get similarity mask temp_list = [] for b_ind in range(features.shape[0]): batch_fea = class_fea[b_ind, :] # When we don't want this class to be parsed, prototype for this class is set to 0 if torch.sum(batch_fea) == 0: proto_fea = torch.zeros_like(batch_fea) else: if proto_prev_stage or torch.sum(self.prototype[i]) == 0: # When we want to parse this class but the prototype is 0, we use the batch_fea on the go proto_fea = batch_fea else: # If batch_fea != 0, we form dymanic proto_fea proto_fea = (1 - self.beta) * self.prototype[ i].clone() + self.beta * batch_fea # If training, update prototype if self.training: self.prototype[i] = proto_fea.detach() batch_tmp_seg_map = self.cos_similarity_func(features[b_ind, :, :, :].unsqueeze(0), proto_fea.unsqueeze(-1).unsqueeze(-1).unsqueeze(0)) temp_list.append(batch_tmp_seg_map) # Aggregate features for AGM (denoted as att) and NCM (denoted as sim) tmp_seg_map = torch.stack(temp_list, dim=0).squeeze(1) bg_tmp_seg_map = torch.ones_like(tmp_seg_map.unsqueeze(1)).cuda() - tmp_seg_map.unsqueeze(1) sem_lis.append(self.after_sim_fg(tmp_seg_map.unsqueeze(1))) sim_bg_fea += self.after_sim_bg(bg_tmp_seg_map) att_fea = self.decoder3(self.decoder2(tmp_seg_map.unsqueeze(1) * features + features)) att_mask = self.classifier_6(att_fea) att_lis.append(att_mask[:, 0, :, :].unsqueeze(1)) att_bg_fea += att_mask[:, 1, :, :].unsqueeze(1) sim_bg_fea = sim_bg_fea / (nclasses - 1) att_bg_fea = att_bg_fea / (nclasses - 1) sem_lis = [self.bg_sim_fusion(sim_bg_fea)] + sem_lis att_lis = [self.bg_att_fusion(att_bg_fea)] + att_lis sim_mask = torch.cat(sem_lis, 1) att_mask = torch.cat(att_lis, 1) return att_mask, sim_mask class popnet_1way( deeplab_xception_synBN.DeepLabv3_plus_v2): def __init__(self, nInputChannels=3, n_classes=7, os=16, hidden_layers=256, alpha=20): super(popnet_1way, self).__init__(nInputChannels=nInputChannels, n_classes=n_classes, os=os, pretrained=True) # Settings self.hidden_layers = hidden_layers self.cos_similarity_func = nn.CosineSimilarity() # AGM layers self.classifier_6 = nn.Sequential( nn.Conv2d(256, 2, kernel_size=1), ) # NCM layers self.after_sim = nn.Sequential( nn.Conv2d(1, 2, kernel_size=1), ) # KIM layers self.feature_fusion = nn.Sequential( deeplab_xception_synBN.Decoder_module(512, 256), deeplab_xception_synBN.Decoder_module(256, 256), ) def cateogrory_feature(self, features, mask): # Helper function for getting feature indexes for the only class batch_n, _, mask_w, mask_h = features.size() pos_sum = torch.sum( mask.view(batch_n, mask_h * mask_w), dim=1).unsqueeze(1) pos_sum = torch.where(pos_sum == 0, torch.ones(1).cuda(), pos_sum) vec_pos = torch.sum(torch.sum(features * mask, dim=3), dim=2) / pos_sum return vec_pos def forward(self, input, cate_num=None, prev_qry_fea=None, prev_sup_fea=None): img, support, sup_known = input # Encoder img_features = self.oneshot_forward(img) sup_features = self.oneshot_forward(support) # Knowledge infusion module img_features = self.feature_fusion(torch.cat([prev_qry_fea, img_features], dim=1)) sup_features = self.feature_fusion(torch.cat([prev_sup_fea, sup_features], dim=1)) batch_n, _, mask_w, mask_h = sup_features.size() sup_known = F.upsample(sup_known, size=(mask_w, mask_h), mode='nearest') # Get indexes for the class sup_known_features = self.cateogrory_feature(sup_features, sup_known) # DML tmp_seg = self.cos_similarity_func(img_features, sup_known_features.unsqueeze(dim=2).unsqueeze(dim=3)) tmp_seg = tmp_seg.unsqueeze(dim=1) res_features = img_features + img_features * tmp_seg res_features = self.decoder3(self.decoder2(res_features)) sim_mask = tmp_seg sim_mask = self.after_sim(sim_mask) img_mask = self.classifier_6(res_features) return F.upsample(img_mask, size=img.size()[2:], mode='bilinear', align_corners=True), \ F.upsample(sim_mask, size=img.size()[2:], mode='bilinear', align_corners=True) def prev_forward(self, input, prev_qry_fea=None, prev_sup_fea=None): # prepare parent qry_features and parent sup_features for the next stage img, support = input img_features = self.oneshot_forward(img) sup_features = self.oneshot_forward(support) img_features = self.feature_fusion(torch.cat([prev_qry_fea, img_features], dim=1)) sup_features = self.feature_fusion(torch.cat([prev_sup_fea, sup_features], dim=1)) return img_features, sup_features class popnet_1way_proto( deeplab_xception_synBN.DeepLabv3_plus_v2): def __init__(self, nInputChannels=3, n_classes=7, os=16, hidden_layers=256, alpha=20, beta=0.001, scaler=10., cate_num=17): super(popnet_1way_proto, self).__init__(nInputChannels=nInputChannels, n_classes=n_classes, os=os, pretrained=False) # Settings self.hidden_layers = hidden_layers self.cos_similarity_func = nn.CosineSimilarity() self.scaler = nn.Parameter(torch.tensor(scaler), requires_grad=True) self.beta = beta # AGM layers self.classifier_6 = nn.Sequential( nn.Conv2d(256, 2, kernel_size=1), ) # NCM layers self.after_sim = nn.Sequential( nn.Conv2d(1, 2, kernel_size=1), ) # KIM layers self.feature_fusion = nn.Sequential( deeplab_xception_synBN.Decoder_module(512, 256), deeplab_xception_synBN.Decoder_module(256, 256), ) self.prototype = torch.nn.Parameter(torch.zeros(cate_num, 256), requires_grad=False) def cateogrory_feature(self, features, mask): batch_n, _, mask_w, mask_h = features.size() pos_sum = torch.sum( mask.view(batch_n, mask_h * mask_w), dim=1).unsqueeze(1) pos_sum = torch.where(pos_sum == 0, torch.ones(1).cuda(), pos_sum) # temp_sum = torch.sum(torch.sum(features * mask, dim=3), dim=2) # print(torch.sum(torch.sum(features * mask, dim=3), dim=2).shape, pos_sum.shape) vec_pos = torch.sum(torch.sum(features * mask, dim=3), dim=2) / pos_sum # print(torch.sum(temp_sum)) # assert not (temp_sum != temp_sum).any() # assert not (vec_pos != vec_pos).any() return vec_pos def forward(self, input, proto_prev_stage=True, prev_qry_fea=None, prev_sup_fea=None, cate_mapping=None): # In 1way OSHP, all the foreground classes are annotated as 1, # hence we track a mapping: (category_index -> prototype_index). # cate_mapping = [index_for_batch1, index_for_batch2] assert cate_mapping is not None img, support, support_mask = input img_high, _, _ = self.oneshot_highlvl_forward(img) sup_high, _, _ = self.oneshot_highlvl_forward(support) img_high = self.feature_fusion(torch.cat([prev_qry_fea, img_high], dim=1)) sup_high = self.feature_fusion(torch.cat([prev_sup_fea, sup_high], dim=1)) batch_n, _, mask_h, mask_w = sup_high.size() support_mask = F.upsample(support_mask, size=(mask_h, mask_w), mode='nearest') sp_ave_features = self.cateogrory_feature(sup_high, support_mask) # sem_lis = [bg, fg1, fg2, fg3...] att_mask, sim_mask = self.dual_metric(sp_ave_features, img_high, proto_prev_stage, cate_mapping) return F.upsample(att_mask, size=img.size()[2:], mode='bilinear', align_corners=True), F.upsample( self.scaler * sim_mask, size=img.size()[2:], mode='bilinear', align_corners=True) def prev_forward(self, input, prev_qry_fea=None, prev_sup_fea=None): # prepare parent qry_features and parent sup_features for the next stage img, support = input img_features = self.oneshot_forward(img) sup_features = self.oneshot_forward(support) img_features = self.feature_fusion(torch.cat([prev_qry_fea, img_features], dim=1)) sup_features = self.feature_fusion(torch.cat([prev_sup_fea, sup_features], dim=1)) return img_features, sup_features def dual_metric(self, sp_ave_features, features, proto_prev_stage, cate_mapping): # Return att_mask and sim_mask # Generate category features and get similarity mask temp_list = [] for b_ind in range(features.shape[0]): batch_fea = sp_ave_features[b_ind, :] i = cate_mapping[b_ind] # When we don't want this class to be parsed, prototype for this class is set to 0 if torch.sum(batch_fea) == 0: proto_fea = torch.zeros_like(batch_fea) else: if proto_prev_stage or torch.sum(self.prototype[i]) == 0: # When we want to parse this class but the prototype is 0, we use the batch_fea on the go proto_fea = batch_fea else: # If batch_fea != 0, we form dymanic proto_fea proto_fea = (1 - self.beta) * self.prototype[ i].clone() + self.beta * batch_fea # If training, update prototype if self.training: self.prototype[i] = proto_fea.detach() batch_tmp_seg_map = self.cos_similarity_func(features[b_ind, :, :, :].unsqueeze(0), proto_fea.unsqueeze(-1).unsqueeze(-1).unsqueeze(0)) temp_list.append(batch_tmp_seg_map) # Aggregate features for AGM (denoted as att) and NCM (denoted as sim) tmp_seg_map = torch.stack(temp_list, dim=0).squeeze(1) tmp_seg = tmp_seg_map.unsqueeze(dim=1) res_features = features * tmp_seg res_features = self.decoder3(self.decoder2(res_features)) sim_mask = tmp_seg sim_mask = self.after_sim(sim_mask) att_mask = self.classifier_6(res_features) return att_mask, sim_mask
39.664384
124
0.692676
6,330
40,537
4.167141
0.04297
0.013648
0.019789
0.01706
0.952688
0.936197
0.925051
0.918834
0.917772
0.913564
0
0.019463
0.192639
40,537
1,021
125
39.703232
0.786513
0.181637
0
0.881686
0
0
0.00621
0
0
0
0
0
0.001621
1
0.045381
false
0
0.006483
0
0.097245
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
7a4c8f7beb3c01d64f1bab80e2febac94ec74961
35,569
py
Python
test/drawCalibrationResults_test.py
tzolnai/tobii_calibration
768ff4e672186840c8de655799a6b943daf35ea7
[ "Apache-2.0" ]
null
null
null
test/drawCalibrationResults_test.py
tzolnai/tobii_calibration
768ff4e672186840c8de655799a6b943daf35ea7
[ "Apache-2.0" ]
null
null
null
test/drawCalibrationResults_test.py
tzolnai/tobii_calibration
768ff4e672186840c8de655799a6b943daf35ea7
[ "Apache-2.0" ]
null
null
null
# Authors: # Tamás Zolnai (zolnaitamas2000@gmail.com) # License: Apache License 2.0, see LICENSE.txt for more details. import unittest import sys # Add the local path of the calibrator module, # test that instead of the system installed one. sys.path = ["../tobii_calibration"] + ["../externals/psychopy_mock"] + sys.path import tobii_calibration as calibrator import tobii_research as tobii from psychopy import visual, event, logging import psychopy_visual_mock as pvm import collections # ignore warnings comming from psychopy logging.console.setLevel(logging.ERROR) class DummyClass: def leave_calibration_mode(): pass class drawCalibrationResultTest(unittest.TestCase): def setUp(self): print ("Current test: ", self.id()) self.calibWin = None def tearDown(self): if self.calibWin is not None: self.calibWin.close() def initAll(self, tobii_helper): tobii_helper.calibration = DummyClass tobii_helper.disableLogging() tobii_helper.setMonitor() calibration_point0 = tobii.CalibrationPoint((0.0, 0.0),( tobii.CalibrationSample(tobii.CalibrationEyeData((0.0, 0.0), True), tobii.CalibrationEyeData((0.0, 0.0), True)),)) calibration_point = tobii.CalibrationPoint((0.1, 0.1),( tobii.CalibrationSample(tobii.CalibrationEyeData((0.08, 0.08), True), tobii.CalibrationEyeData((0.09, 0.08), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.12, 0.11), True), tobii.CalibrationEyeData((0.18, 0.12), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.11, 0.12), True), tobii.CalibrationEyeData((0.10, 0.10), True)))) calibration_point2 = tobii.CalibrationPoint((0.9, 0.1),( tobii.CalibrationSample(tobii.CalibrationEyeData((0.98, 0.08), True), tobii.CalibrationEyeData((0.99, 0.09), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.91, 0.12), True), tobii.CalibrationEyeData((0.90, 0.11), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.89, 0.13), True), tobii.CalibrationEyeData((0.98, 0.09), True)))) calibration_point3 = tobii.CalibrationPoint((0.5, 0.5),( tobii.CalibrationSample(tobii.CalibrationEyeData((0.51, 0.51), True), tobii.CalibrationEyeData((0.53, 0.45), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.43, 0.48), True), tobii.CalibrationEyeData((0.49, 0.49), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.50, 0.53), True), tobii.CalibrationEyeData((0.49, 0.54), True)))) calibration_point4 = tobii.CalibrationPoint((0.1, 0.9),( tobii.CalibrationSample(tobii.CalibrationEyeData((0.08, 0.98), True), tobii.CalibrationEyeData((0.09, 0.8), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.12, 0.91), True), tobii.CalibrationEyeData((0.18, 0.92), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.11, 0.92), True), tobii.CalibrationEyeData((0.10, 0.90), True)))) calibration_point5 = tobii.CalibrationPoint((0.9, 0.9),( tobii.CalibrationSample(tobii.CalibrationEyeData((0.98, 0.98), True), tobii.CalibrationEyeData((0.99, 0.98), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.92, 0.91), True), tobii.CalibrationEyeData((0.98, 0.92), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.91, 0.92), True), tobii.CalibrationEyeData((0.90, 0.90), True)))) calibration_points = (calibration_point0, calibration_point, calibration_point2, calibration_point3, calibration_point4, calibration_point5) self.calibResult = tobii.CalibrationResult(tobii.CALIBRATION_STATUS_SUCCESS, calibration_points) self.calibWin = visual.Window(size = [1366, 768], pos = [0, 0], units = 'pix', fullscr = True, allowGUI = True, monitor = tobii_helper.win, winType = 'pyglet', color = [0.4, 0.4, 0.4]) pointList = [('1',(0.1, 0.1)), ('2',(0.9, 0.1)), ('3',(0.5, 0.5)), ('4',(0.1, 0.9)), ('5',(0.9, 0.9))] self.calibDict = collections.OrderedDict(pointList) def testNotInitedThingOrWrongParam(self): tobii_helper = calibrator.TobiiHelper() tobii_helper.disableLogging() tobii_helper.setMonitor() # no calibration with self.assertRaises(RuntimeError): tobii_helper._TobiiHelper__drawCalibrationResults(None, None, None) tobii_helper.calibration = "dummy" # no calibration results with self.assertRaises(TypeError): tobii_helper._TobiiHelper__drawCalibrationResults(None, None, None) calibration_point0 = tobii.CalibrationPoint((0.0, 0.0),( tobii.CalibrationSample(tobii.CalibrationEyeData((0.0, 0.0), True), tobii.CalibrationEyeData((0.0, 0.0), True)),)) calibration_point = tobii.CalibrationPoint((0.1, 0.1),( tobii.CalibrationSample(tobii.CalibrationEyeData((0.08, 0.08), True), tobii.CalibrationEyeData((0.09, 0.08), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.12, 0.11), True), tobii.CalibrationEyeData((0.18, 0.12), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.11, 0.12), True), tobii.CalibrationEyeData((0.10, 0.10), True)))) calibration_point2 = tobii.CalibrationPoint((0.9, 0.9),( tobii.CalibrationSample(tobii.CalibrationEyeData((0.98, 0.98), True), tobii.CalibrationEyeData((0.99, 0.98), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.91, 0.90), True), tobii.CalibrationEyeData((0.90, 0.97), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.89, 0.87), True), tobii.CalibrationEyeData((0.98, 0.99), True)))) calibration_points = (calibration_point0, calibration_point, calibration_point2) calibResult = tobii.CalibrationResult(tobii.CALIBRATION_STATUS_SUCCESS, calibration_points) # no window with self.assertRaises(TypeError): tobii_helper._TobiiHelper__drawCalibrationResults(calibResult, None, None) with visual.Window(size = [1366, 768], pos = [0, 0], units = 'pix', fullscr = True, allowGUI = True, monitor = tobii_helper.win, winType = 'pyglet', color = [0.4, 0.4, 0.4]) as calibWin: # no calib points with self.assertRaises(TypeError): tobii_helper._TobiiHelper__drawCalibrationResults(calibResult, calibWin, None) pointList = [('1',(0.1, 0.1))] calibDict = collections.OrderedDict(pointList) # inconsitent data: calibDict has less items as calibResult with self.assertRaises(ValueError): tobii_helper._TobiiHelper__drawCalibrationResults(calibResult, calibWin, calibDict) pointList = [('1',(0.1, 0.1)), ('2',(0.5, 0.5))] calibDict = collections.OrderedDict(pointList) # inconsitent data: calibDict has different items as calibResult with self.assertRaises(ValueError): tobii_helper._TobiiHelper__drawCalibrationResults(calibResult, calibWin, calibDict) pointList = [('1',(0.1, 0.1)), ('2',(0.9, 0.9))] calibDict = collections.OrderedDict(pointList) # we are good now visual_mock = pvm.PsychoPyVisualMock() visual_mock.setReturnKeyList(['c']) tobii_helper._TobiiHelper__drawCalibrationResults(calibResult, calibWin, calibDict) calibWin.close() def testTwoCalibPoints(self): tobii_helper = calibrator.TobiiHelper() self.initAll(tobii_helper) calibration_point0 = tobii.CalibrationPoint((0.0, 0.0),( tobii.CalibrationSample(tobii.CalibrationEyeData((0.0, 0.0), True), tobii.CalibrationEyeData((0.0, 0.0), True)),)) calibration_point = tobii.CalibrationPoint((0.1, 0.1),( tobii.CalibrationSample(tobii.CalibrationEyeData((0.08, 0.08), True), tobii.CalibrationEyeData((0.09, 0.08), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.12, 0.11), True), tobii.CalibrationEyeData((0.18, 0.12), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.11, 0.12), True), tobii.CalibrationEyeData((0.10, 0.10), True)))) calibration_point2 = tobii.CalibrationPoint((0.9, 0.9),( tobii.CalibrationSample(tobii.CalibrationEyeData((0.98, 0.98), True), tobii.CalibrationEyeData((0.99, 0.98), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.91, 0.90), True), tobii.CalibrationEyeData((0.90, 0.97), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.89, 0.87), True), tobii.CalibrationEyeData((0.98, 0.99), True)))) calibration_points = (calibration_point0, calibration_point, calibration_point2) self.calibResult = tobii.CalibrationResult(tobii.CALIBRATION_STATUS_SUCCESS, calibration_points) pointList = [('1',(0.1, 0.1)), ('2',(0.9, 0.9))] self.calibDict = collections.OrderedDict(pointList) visual_mock = pvm.PsychoPyVisualMock() visual_mock.setReturnKeyList(['c']) tobii_helper._TobiiHelper__drawCalibrationResults(self.calibResult, self.calibWin, self.calibDict) drawing_list = visual_mock.getListOfDrawings() self.assertEqual(10, len(drawing_list)) # first calib point's circle calibPoint1_circle = drawing_list[0] self.assertTrue(isinstance(calibPoint1_circle, pvm.Circle)) # size self.assertEqual(50, calibPoint1_circle.radius) # pos self.assertEqual(-546, calibPoint1_circle.pos[0]) self.assertEqual(307, calibPoint1_circle.pos[1]) # color self.assertEqual([0.4, 0.4, 0.4], calibPoint1_circle.fillColor.tolist()) self.assertEqual([1.0, 1.0, 1.0], calibPoint1_circle.lineColor.tolist()) # first calib point's text calibPoint1_text = drawing_list[1] self.assertTrue(isinstance(calibPoint1_text, pvm.TextStim)) # size self.assertEqual(60, calibPoint1_text.height) # pos self.assertEqual(-546, calibPoint1_text.pos[0]) self.assertEqual(307, calibPoint1_text.pos[1]) # color self.assertEqual([1.0, 1.0, 1.0], calibPoint1_text.color.tolist()) # text self.assertEqual(str("1") , calibPoint1_text.text) # first calib point's left eye line calibPoint1_left_eye = drawing_list[2] self.assertTrue(isinstance(calibPoint1_left_eye, pvm.Line)) # size self.assertEqual(20, calibPoint1_left_eye.lineWidth) # pos self.assertEqual(-546, calibPoint1_left_eye.start[0]) self.assertEqual(307, calibPoint1_left_eye.start[1]) self.assertEqual(-541, calibPoint1_left_eye.end[0]) self.assertEqual(304, calibPoint1_left_eye.end[1]) # color self.assertEqual("yellow", calibPoint1_left_eye.lineColor) # first calib point's right eye line calibPoint1_right_eye = drawing_list[3] self.assertTrue(isinstance(calibPoint1_right_eye, pvm.Line)) # size self.assertEqual(20, calibPoint1_right_eye.lineWidth) # pos self.assertEqual(-546, calibPoint1_right_eye.start[0]) self.assertEqual(307, calibPoint1_right_eye.start[1]) self.assertEqual(-514, calibPoint1_right_eye.end[0]) self.assertEqual(307, calibPoint1_right_eye.end[1]) # color self.assertEqual("red", calibPoint1_right_eye.lineColor) # second calib point's circle calibPoint2_circle = drawing_list[4] self.assertTrue(isinstance(calibPoint2_circle, pvm.Circle)) # size self.assertEqual(50, calibPoint2_circle.radius) # pos self.assertEqual(546, calibPoint2_circle.pos[0]) self.assertEqual(-307, calibPoint2_circle.pos[1]) # color self.assertEqual([0.4, 0.4, 0.4], calibPoint2_circle.fillColor.tolist()) self.assertEqual([1.0, 1.0, 1.0], calibPoint2_circle.lineColor.tolist()) # second calib point's text calibPoint2_text = drawing_list[5] self.assertTrue(isinstance(calibPoint2_text, pvm.TextStim)) # size self.assertEqual(60, calibPoint2_text.height) # pos self.assertEqual(546, calibPoint2_text.pos[0]) self.assertEqual(-307, calibPoint2_text.pos[1]) # color self.assertEqual([1.0, 1.0, 1.0], calibPoint2_text.color.tolist()) # text self.assertEqual(str("2") , calibPoint2_text.text) # second calib point's left eye line calibPoint2_left_eye = drawing_list[6] self.assertTrue(isinstance(calibPoint2_left_eye, pvm.Line)) # size self.assertEqual(20, calibPoint2_left_eye.lineWidth) # pos self.assertEqual(546, calibPoint2_left_eye.start[0]) self.assertEqual(-307, calibPoint2_left_eye.start[1]) self.assertEqual(582, calibPoint2_left_eye.end[0]) self.assertEqual(-320, calibPoint2_left_eye.end[1]) # color self.assertEqual("yellow", calibPoint2_left_eye.lineColor) # second calib point's right eye line calibPoint2_right_eye = drawing_list[7] self.assertTrue(isinstance(calibPoint2_right_eye, pvm.Line)) # size self.assertEqual(20, calibPoint2_right_eye.lineWidth) # pos self.assertEqual(546, calibPoint2_right_eye.start[0]) self.assertEqual(-307, calibPoint2_right_eye.start[1]) self.assertEqual(623, calibPoint2_right_eye.end[0]) self.assertEqual(-368, calibPoint2_right_eye.end[1]) # color self.assertEqual("red", calibPoint2_right_eye.lineColor) # text feedback_text = drawing_list[8] self.assertTrue(isinstance(feedback_text, pvm.TextStim)) self.assertEqual(str("Wait for the experimenter. \nUse number keys to select points for recalibration."), feedback_text.text) # text feedback_text = drawing_list[9] self.assertTrue(isinstance(feedback_text, pvm.TextStim)) self.assertEqual(str("Finished checking. Resuming calibration."), feedback_text.text) def testFiveCalibPoints(self): tobii_helper = calibrator.TobiiHelper() self.initAll(tobii_helper) visual_mock = pvm.PsychoPyVisualMock() visual_mock.setReturnKeyList(['c']) tobii_helper._TobiiHelper__drawCalibrationResults(self.calibResult, self.calibWin, self.calibDict) drawing_list = visual_mock.getListOfDrawings() self.assertEqual(22, len(drawing_list)) for i in range(0,5): index = i * 4 # calib point's circle calibPoint_circle = drawing_list[index] self.assertTrue(isinstance(calibPoint_circle, pvm.Circle)) if i is 0: self.assertEqual(-546, calibPoint_circle.pos[0]) self.assertEqual(307, calibPoint_circle.pos[1]) elif i is 1: self.assertEqual(546, calibPoint_circle.pos[0]) self.assertEqual(307, calibPoint_circle.pos[1]) elif i is 2: self.assertEqual(0, calibPoint_circle.pos[0]) self.assertEqual(0, calibPoint_circle.pos[1]) elif i is 3: self.assertEqual(-546, calibPoint_circle.pos[0]) self.assertEqual(-307, calibPoint_circle.pos[1]) elif i is 4: self.assertEqual(546, calibPoint_circle.pos[0]) self.assertEqual(-307, calibPoint_circle.pos[1]) # color self.assertEqual([1.0, 1.0, 1.0], calibPoint_circle.lineColor.tolist()) # calib point's text calibPoint_text = drawing_list[index + 1] self.assertTrue(isinstance(calibPoint_text, pvm.TextStim)) if i is 0: self.assertEqual(-546, calibPoint_text.pos[0]) self.assertEqual(307, calibPoint_text.pos[1]) # text self.assertEqual(str("1") , calibPoint_text.text) elif i is 1: self.assertEqual(546, calibPoint_text.pos[0]) self.assertEqual(307, calibPoint_text.pos[1]) # text self.assertEqual(str("2") , calibPoint_text.text) elif i is 2: self.assertEqual(0, calibPoint_text.pos[0]) self.assertEqual(0, calibPoint_text.pos[1]) # text self.assertEqual(str("3") , calibPoint_text.text) elif i is 3: self.assertEqual(-546, calibPoint_text.pos[0]) self.assertEqual(-307, calibPoint_text.pos[1]) # text self.assertEqual(str("4") , calibPoint_text.text) elif i is 5: self.assertEqual(546, calibPoint_text.pos[0]) self.assertEqual(-307, calibPoint_text.pos[1]) # text self.assertEqual(str("5") , calibPoint_text.text) # calib point's left eye line calibPoint_left_eye = drawing_list[index + 2] self.assertTrue(isinstance(calibPoint_left_eye, pvm.Line)) # pos if i is 0: self.assertEqual(-546, calibPoint_left_eye.start[0]) self.assertEqual(307, calibPoint_left_eye.start[1]) self.assertEqual(-541, calibPoint_left_eye.end[0]) self.assertEqual(304, calibPoint_left_eye.end[1]) elif i is 1: self.assertEqual(546, calibPoint_left_eye.start[0]) self.assertEqual(307, calibPoint_left_eye.start[1]) self.assertEqual(582, calibPoint_left_eye.end[0]) self.assertEqual(299, calibPoint_left_eye.end[1]) elif i is 2: self.assertEqual(0, calibPoint_left_eye.start[0]) self.assertEqual(0, calibPoint_left_eye.start[1]) self.assertEqual(-27, calibPoint_left_eye.end[0]) self.assertEqual(-5, calibPoint_left_eye.end[1]) elif i is 3: self.assertEqual(-546, calibPoint_left_eye.start[0]) self.assertEqual(-307, calibPoint_left_eye.start[1]) self.assertEqual(-541, calibPoint_left_eye.end[0]) self.assertEqual(-335, calibPoint_left_eye.end[1]) elif i is 4: self.assertEqual(546, calibPoint_left_eye.start[0]) self.assertEqual(-307, calibPoint_left_eye.start[1]) self.assertEqual(596, calibPoint_left_eye.end[0]) self.assertEqual(-335, calibPoint_left_eye.end[1]) # first calib point's right eye line calibPoint_right_eye = drawing_list[index + 3] self.assertTrue(isinstance(calibPoint_right_eye, pvm.Line)) if i is 0: self.assertEqual(-546, calibPoint_right_eye.start[0]) self.assertEqual(307, calibPoint_right_eye.start[1]) self.assertEqual(-514, calibPoint_right_eye.end[0]) self.assertEqual(307, calibPoint_right_eye.end[1]) elif i is 1: self.assertEqual(546, calibPoint_right_eye.start[0]) self.assertEqual(307, calibPoint_right_eye.start[1]) self.assertEqual(623, calibPoint_right_eye.end[0]) self.assertEqual(309, calibPoint_right_eye.end[1]) elif i is 2: self.assertEqual(0, calibPoint_right_eye.start[0]) self.assertEqual(0, calibPoint_right_eye.start[1]) self.assertEqual(4, calibPoint_right_eye.end[0]) self.assertEqual(5, calibPoint_right_eye.end[1]) elif i is 3: self.assertEqual(-546, calibPoint_right_eye.start[0]) self.assertEqual(-307, calibPoint_right_eye.start[1]) self.assertEqual(-514, calibPoint_right_eye.end[0]) self.assertEqual(-286, calibPoint_right_eye.end[1]) elif i is 4: self.assertEqual(546, calibPoint_right_eye.start[0]) self.assertEqual(-307, calibPoint_right_eye.start[1]) self.assertEqual(623, calibPoint_right_eye.end[0]) self.assertEqual(-332, calibPoint_right_eye.end[1]) # text feedback_text = drawing_list[index + 4] self.assertTrue(isinstance(feedback_text, pvm.TextStim)) self.assertEqual(str("Wait for the experimenter. \nUse number keys to select points for recalibration."), feedback_text.text) def testNoReturnedValues(self): tobii_helper = calibrator.TobiiHelper() self.initAll(tobii_helper) visual_mock = pvm.PsychoPyVisualMock() visual_mock.setReturnKeyList(['c']) result = tobii_helper._TobiiHelper__drawCalibrationResults(self.calibResult, self.calibWin, self.calibDict) self.assertEqual(0, len(result)) def testHasOneRedoPoint(self): tobii_helper = calibrator.TobiiHelper() self.initAll(tobii_helper) visual_mock = pvm.PsychoPyVisualMock() visual_mock.setReturnKeyList(['3', 'c']) result = tobii_helper._TobiiHelper__drawCalibrationResults(self.calibResult, self.calibWin, self.calibDict) self.assertEqual(1, len(result)) self.assertEqual(collections.OrderedDict([('3',(0.5, 0.5))]), result) def testHasSomeRedoPoints(self): tobii_helper = calibrator.TobiiHelper() self.initAll(tobii_helper) visual_mock = pvm.PsychoPyVisualMock() visual_mock.setReturnKeyList(['5', '1', '2', 'c']) result = tobii_helper._TobiiHelper__drawCalibrationResults(self.calibResult, self.calibWin, self.calibDict) self.assertEqual(3, len(result)) self.assertEqual(collections.OrderedDict([('5',(0.9, 0.9)), ('1',(0.1, 0.1)), ('2',(0.9, 0.1))]), result) def testRedundantRedoPoints(self): tobii_helper = calibrator.TobiiHelper() self.initAll(tobii_helper) visual_mock = pvm.PsychoPyVisualMock() visual_mock.setReturnKeyList(['1', '2', '1', '3', '3', '1', 'c']) result = tobii_helper._TobiiHelper__drawCalibrationResults(self.calibResult, self.calibWin, self.calibDict) self.assertEqual(2, len(result)) self.assertEqual(collections.OrderedDict([('2',(0.9, 0.1)), ('1',(0.1, 0.1))]), result) def testRedoPointDrawing(self): tobii_helper = calibrator.TobiiHelper() self.initAll(tobii_helper) visual_mock = pvm.PsychoPyVisualMock() visual_mock.setReturnKeyList(['1', '4', 'c']) tobii_helper._TobiiHelper__drawCalibrationResults(self.calibResult, self.calibWin, self.calibDict) drawing_list = visual_mock.getListOfDrawings() self.assertEqual(64, len(drawing_list)) # first calib point's color before pushing any button calibPoint_circle = drawing_list[0] self.assertTrue(isinstance(calibPoint_circle, pvm.Circle)) self.assertEqual([1.0, 1.0, 1.0], calibPoint_circle.lineColor.tolist()) # first calib point's color after first keyboard input calibPoint_circle = drawing_list[21] self.assertTrue(isinstance(calibPoint_circle, pvm.Circle)) self.assertEqual([-1.0, 1.0, -1.0], calibPoint_circle.lineColor.tolist()) # first calib point's color after second keyboard input calibPoint_circle = drawing_list[42] self.assertTrue(isinstance(calibPoint_circle, pvm.Circle)) self.assertEqual([-1.0, 1.0, -1.0], calibPoint_circle.lineColor.tolist()) # fifth calib point's color before pushing any button calibPoint_circle = drawing_list[12] self.assertTrue(isinstance(calibPoint_circle, pvm.Circle)) self.assertEqual([1.0, 1.0, 1.0], calibPoint_circle.lineColor.tolist()) # fifth calib point's color after first keyboard input calibPoint_circle = drawing_list[33] self.assertTrue(isinstance(calibPoint_circle, pvm.Circle)) self.assertEqual([1.0, 1.0, 1.0], calibPoint_circle.lineColor.tolist()) # fifth calib point's color after second keyboard input calibPoint_circle = drawing_list[54] self.assertTrue(isinstance(calibPoint_circle, pvm.Circle)) self.assertEqual([-1.0, 1.0, -1.0], calibPoint_circle.lineColor.tolist()) def testTwoCalibPointsWithoutNullItem(self): tobii_helper = calibrator.TobiiHelper() self.initAll(tobii_helper) calibration_point = tobii.CalibrationPoint((0.1, 0.1),( tobii.CalibrationSample(tobii.CalibrationEyeData((0.08, 0.08), True), tobii.CalibrationEyeData((0.09, 0.08), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.12, 0.11), True), tobii.CalibrationEyeData((0.18, 0.12), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.11, 0.12), True), tobii.CalibrationEyeData((0.10, 0.10), True)))) calibration_point2 = tobii.CalibrationPoint((0.9, 0.9),( tobii.CalibrationSample(tobii.CalibrationEyeData((0.98, 0.98), True), tobii.CalibrationEyeData((0.99, 0.98), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.91, 0.90), True), tobii.CalibrationEyeData((0.90, 0.97), True)), tobii.CalibrationSample(tobii.CalibrationEyeData((0.89, 0.87), True), tobii.CalibrationEyeData((0.98, 0.99), True)))) calibration_points = (calibration_point, calibration_point2) self.calibResult = tobii.CalibrationResult(tobii.CALIBRATION_STATUS_SUCCESS, calibration_points) pointList = [('1',(0.1, 0.1)), ('2',(0.9, 0.9))] self.calibDict = collections.OrderedDict(pointList) visual_mock = pvm.PsychoPyVisualMock() visual_mock.setReturnKeyList(['c']) tobii_helper._TobiiHelper__drawCalibrationResults(self.calibResult, self.calibWin, self.calibDict) drawing_list = visual_mock.getListOfDrawings() self.assertEqual(10, len(drawing_list)) # first calib point's circle calibPoint1_circle = drawing_list[0] self.assertTrue(isinstance(calibPoint1_circle, pvm.Circle)) # size self.assertEqual(50, calibPoint1_circle.radius) # pos self.assertEqual(-546, calibPoint1_circle.pos[0]) self.assertEqual(307, calibPoint1_circle.pos[1]) # color self.assertEqual([0.4, 0.4, 0.4], calibPoint1_circle.fillColor.tolist()) self.assertEqual([1.0, 1.0, 1.0], calibPoint1_circle.lineColor.tolist()) # first calib point's text calibPoint1_text = drawing_list[1] self.assertTrue(isinstance(calibPoint1_text, pvm.TextStim)) # size self.assertEqual(60, calibPoint1_text.height) # pos self.assertEqual(-546, calibPoint1_text.pos[0]) self.assertEqual(307, calibPoint1_text.pos[1]) # color self.assertEqual([1.0, 1.0, 1.0], calibPoint1_text.color.tolist()) # text self.assertEqual(str("1") , calibPoint1_text.text) # first calib point's left eye line calibPoint1_left_eye = drawing_list[2] self.assertTrue(isinstance(calibPoint1_left_eye, pvm.Line)) # size self.assertEqual(20, calibPoint1_left_eye.lineWidth) # pos self.assertEqual(-546, calibPoint1_left_eye.start[0]) self.assertEqual(307, calibPoint1_left_eye.start[1]) self.assertEqual(-541, calibPoint1_left_eye.end[0]) self.assertEqual(304, calibPoint1_left_eye.end[1]) # color self.assertEqual("yellow", calibPoint1_left_eye.lineColor) # first calib point's right eye line calibPoint1_right_eye = drawing_list[3] self.assertTrue(isinstance(calibPoint1_right_eye, pvm.Line)) # size self.assertEqual(20, calibPoint1_right_eye.lineWidth) # pos self.assertEqual(-546, calibPoint1_right_eye.start[0]) self.assertEqual(307, calibPoint1_right_eye.start[1]) self.assertEqual(-514, calibPoint1_right_eye.end[0]) self.assertEqual(307, calibPoint1_right_eye.end[1]) # color self.assertEqual("red", calibPoint1_right_eye.lineColor) # second calib point's circle calibPoint2_circle = drawing_list[4] self.assertTrue(isinstance(calibPoint2_circle, pvm.Circle)) # size self.assertEqual(50, calibPoint2_circle.radius) # pos self.assertEqual(546, calibPoint2_circle.pos[0]) self.assertEqual(-307, calibPoint2_circle.pos[1]) # color self.assertEqual([0.4, 0.4, 0.4], calibPoint2_circle.fillColor.tolist()) self.assertEqual([1.0, 1.0, 1.0], calibPoint2_circle.lineColor.tolist()) # second calib point's text calibPoint2_text = drawing_list[5] self.assertTrue(isinstance(calibPoint2_text, pvm.TextStim)) # size self.assertEqual(60, calibPoint2_text.height) # pos self.assertEqual(546, calibPoint2_text.pos[0]) self.assertEqual(-307, calibPoint2_text.pos[1]) # color self.assertEqual([1.0, 1.0, 1.0], calibPoint2_text.color.tolist()) # text self.assertEqual(str("2") , calibPoint2_text.text) # second calib point's left eye line calibPoint2_left_eye = drawing_list[6] self.assertTrue(isinstance(calibPoint2_left_eye, pvm.Line)) # size self.assertEqual(20, calibPoint2_left_eye.lineWidth) # pos self.assertEqual(546, calibPoint2_left_eye.start[0]) self.assertEqual(-307, calibPoint2_left_eye.start[1]) self.assertEqual(582, calibPoint2_left_eye.end[0]) self.assertEqual(-320, calibPoint2_left_eye.end[1]) # color self.assertEqual("yellow", calibPoint2_left_eye.lineColor) # second calib point's right eye line calibPoint2_right_eye = drawing_list[7] self.assertTrue(isinstance(calibPoint2_right_eye, pvm.Line)) # size self.assertEqual(20, calibPoint2_right_eye.lineWidth) # pos self.assertEqual(546, calibPoint2_right_eye.start[0]) self.assertEqual(-307, calibPoint2_right_eye.start[1]) self.assertEqual(623, calibPoint2_right_eye.end[0]) self.assertEqual(-368, calibPoint2_right_eye.end[1]) # color self.assertEqual("red", calibPoint2_right_eye.lineColor) # text feedback_text = drawing_list[8] self.assertTrue(isinstance(feedback_text, pvm.TextStim)) self.assertEqual(str("Wait for the experimenter. \nUse number keys to select points for recalibration."), feedback_text.text) # text feedback_text = drawing_list[9] self.assertTrue(isinstance(feedback_text, pvm.TextStim)) self.assertEqual(str("Finished checking. Resuming calibration."), feedback_text.text) def testQuitByQ(self): tobii_helper = calibrator.TobiiHelper() self.initAll(tobii_helper) visual_mock = pvm.PsychoPyVisualMock() visual_mock.setReturnKeyList(['q']) with self.assertRaises(SystemExit): tobii_helper._TobiiHelper__drawCalibrationResults(self.calibResult, self.calibWin, self.calibDict) def testCustomAccuracy(self): tobii_helper = calibrator.TobiiHelper() self.initAll(tobii_helper) tobii_helper.setAccuracy(30) visual_mock = pvm.PsychoPyVisualMock() visual_mock.setReturnKeyList(['c']) tobii_helper._TobiiHelper__drawCalibrationResults(self.calibResult, self.calibWin, self.calibDict) drawing_list = visual_mock.getListOfDrawings() self.assertEqual(22, len(drawing_list)) for i in range(0,5): index = i * 4 # calib point's circle calibPoint_circle = drawing_list[index] self.assertTrue(isinstance(calibPoint_circle, pvm.Circle)) self.assertEqual(30, calibPoint_circle.radius) if __name__ == "__main__": unittest.main() # run all tests
50.596017
133
0.592623
3,730
35,569
5.499464
0.069973
0.130161
0.084239
0.00741
0.908643
0.899771
0.859504
0.834154
0.800322
0.777068
0
0.0564
0.299137
35,569
703
134
50.596017
0.766457
0.048104
0
0.739044
0
0
0.014784
0.00077
0
0
0
0
0.432271
1
0.02988
false
0.001992
0.013944
0
0.047809
0.001992
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
8
7a4f3591e95ec672859a2ff3ff2eb4015799adf2
2,993
py
Python
azure-kusto-ingest/tests/test_descriptors.py
cclauss/azure-kusto-python
92e2d5c97079b96f78cf6ef1619ebe89cb4d4985
[ "MIT" ]
null
null
null
azure-kusto-ingest/tests/test_descriptors.py
cclauss/azure-kusto-python
92e2d5c97079b96f78cf6ef1619ebe89cb4d4985
[ "MIT" ]
null
null
null
azure-kusto-ingest/tests/test_descriptors.py
cclauss/azure-kusto-python
92e2d5c97079b96f78cf6ef1619ebe89cb4d4985
[ "MIT" ]
null
null
null
import sys from os import path import unittest from azure.kusto.ingest import FileDescriptor class DescriptorsTest(unittest.TestCase): """Test class for FileDescriptor and BlobDescriptor.""" def test_unzipped_file_with_size(self): """Tests FileDescriptor with size and unzipped file.""" filePath = path.join(path.dirname(path.abspath(__file__)), "input", "dataset.csv") descriptor = FileDescriptor(filePath, 10) with descriptor.open(True) as stream: assert descriptor.size == 10 assert descriptor.stream_name.endswith(".csv.gz") if sys.version_info[0] >= 3: assert stream.readable() assert stream.tell() == 0 assert stream.closed == True def test_unzipped_file_without_size(self): """Tests FileDescriptor without size and unzipped file.""" filePath = path.join(path.dirname(path.abspath(__file__)), "input", "dataset.csv") descriptor = FileDescriptor(filePath, 0) with descriptor.open(True) as stream: assert descriptor.size > 0 assert descriptor.stream_name.endswith(".csv.gz") if sys.version_info[0] >= 3: assert stream.readable() assert stream.tell() == 0 assert stream.closed == True def test_zipped_file_with_size(self): """Tests FileDescriptor with size and zipped file.""" filePath = path.join(path.dirname(path.abspath(__file__)), "input", "dataset.csv.gz") descriptor = FileDescriptor(filePath, 10) with descriptor.open(False) as stream: assert descriptor.size > 10 assert descriptor.stream_name.endswith(".csv.gz") if sys.version_info[0] >= 3: assert stream.readable() assert stream.tell() == 0 assert stream.closed == True def test_zipped_file_without_size(self): """Tests FileDescriptor without size and zipped file.""" filePath = path.join(path.dirname(path.abspath(__file__)), "input", "dataset.csv.gz") descriptor = FileDescriptor(filePath, 0) with descriptor.open(False) as stream: assert descriptor.size == 5071 assert descriptor.stream_name.endswith(".csv.gz") if sys.version_info[0] >= 3: assert stream.readable() assert stream.tell() == 0 assert stream.closed == True def test_unzipped_file_dont_compress(self): """Tests FileDescriptor with size and unzipped file.""" filePath = path.join(path.dirname(path.abspath(__file__)), "input", "dataset.csv") descriptor = FileDescriptor(filePath, 10) with descriptor.open(False) as stream: assert descriptor.size == 10 assert descriptor.stream_name.endswith(".csv") if sys.version_info[0] >= 3: assert stream.readable() assert stream.tell() == 0 assert stream.closed == True
40.445946
93
0.628132
340
2,993
5.382353
0.161765
0.098361
0.062842
0.054645
0.896175
0.896175
0.896175
0.895082
0.895082
0.767213
0
0.015406
0.262613
2,993
73
94
41
0.813774
0.100568
0
0.745455
0
0
0.044344
0
0
0
0
0
0.454545
1
0.090909
false
0
0.072727
0
0.181818
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
8
7a6953687cf544b52f0ea5170461e7f1d14a1649
5,328
py
Python
Boards.py
mohamedgalia/tower-defense
1755f3a138c10e50997483c26aa5e76f925f4261
[ "MIT" ]
null
null
null
Boards.py
mohamedgalia/tower-defense
1755f3a138c10e50997483c26aa5e76f925f4261
[ "MIT" ]
null
null
null
Boards.py
mohamedgalia/tower-defense
1755f3a138c10e50997483c26aa5e76f925f4261
[ "MIT" ]
null
null
null
import random board0 = [[0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0], [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0], [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0], [0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0], [0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0], [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0], [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 3]] board1 = [[0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0], [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0], [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0], [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3]] board2 = [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0], [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0], [0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0], [0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0], [0, 1, 0, 1, 0, 1, 1, 3, 0, 1, 0, 1, 0, 1, 0], [0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0], [0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0], [0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0], [0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0], [0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1], [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1], [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]] board3 = [[0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0], [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], [0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0], [0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0], [0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0], [0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0], [0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0], [0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0], [0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0], [0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0], [0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0], [0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0]] board4 = [[0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0]] board5 = [[0, 2, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]] boards = [board0, board1, board2, board3, board4, board5] class BoardsClass: @staticmethod def GetBoard(): board = random.choice(boards) return board
49.333333
57
0.283971
1,376
5,328
1.099564
0.015262
1.016523
1.278916
1.512227
0.890284
0.890284
0.888962
0.888962
0.888962
0.888962
0
0.443359
0.423423
5,328
107
58
49.794393
0.049154
0
0
0.57732
0
0
0
0
0
0
0
0
0
1
0.010309
false
0
0.010309
0
0.041237
0
0
0
1
null
1
1
1
1
1
1
1
1
1
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
12
8f8a1afa1e7e31045a63c846f55b0258860eaf25
116
py
Python
Desafios/exercicio115/biblioteca/interface.py
gustavodoamaral/115_Desafios_Python
8baa1c0353a40f7a63f442293bc0f6852fd94da0
[ "MIT" ]
1
2022-02-07T01:12:19.000Z
2022-02-07T01:12:19.000Z
Desafios/exercicio115/biblioteca/interface.py
gustavodoamaral/desafios_python_gustavo_guanabara
8baa1c0353a40f7a63f442293bc0f6852fd94da0
[ "MIT" ]
null
null
null
Desafios/exercicio115/biblioteca/interface.py
gustavodoamaral/desafios_python_gustavo_guanabara
8baa1c0353a40f7a63f442293bc0f6852fd94da0
[ "MIT" ]
null
null
null
def linha(tam = 42): return '-' * tam def cabecalho(txt): print(linha()) print(txt) print(linha())
16.571429
21
0.560345
15
116
4.333333
0.533333
0.246154
0.4
0
0
0
0
0
0
0
0
0.023256
0.258621
116
7
22
16.571429
0.732558
0
0
0.333333
0
0
0.008547
0
0
0
0
0
0
1
0.333333
false
0
0
0.166667
0.5
0.5
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
1
0
7
8fe6d319da618ec3ed3595e2dd6fd1a4ef55d8e1
22,643
py
Python
sdk/python/pulumi_cloudflare/zone.py
pulumi/pulumi-cloudflare
d444af2fab6101b388a15cf2e3933e45e9935cc6
[ "ECL-2.0", "Apache-2.0" ]
35
2019-03-14T21:29:29.000Z
2022-03-30T00:00:59.000Z
sdk/python/pulumi_cloudflare/zone.py
pulumi/pulumi-cloudflare
d444af2fab6101b388a15cf2e3933e45e9935cc6
[ "ECL-2.0", "Apache-2.0" ]
128
2019-03-08T23:45:58.000Z
2022-03-31T21:05:22.000Z
sdk/python/pulumi_cloudflare/zone.py
pulumi/pulumi-cloudflare
d444af2fab6101b388a15cf2e3933e45e9935cc6
[ "ECL-2.0", "Apache-2.0" ]
6
2019-05-10T12:52:56.000Z
2020-03-24T15:02:14.000Z
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from . import _utilities __all__ = ['ZoneArgs', 'Zone'] @pulumi.input_type class ZoneArgs: def __init__(__self__, *, zone: pulumi.Input[str], jump_start: Optional[pulumi.Input[bool]] = None, paused: Optional[pulumi.Input[bool]] = None, plan: Optional[pulumi.Input[str]] = None, type: Optional[pulumi.Input[str]] = None): """ The set of arguments for constructing a Zone resource. :param pulumi.Input[str] zone: The DNS zone name which will be added. :param pulumi.Input[bool] jump_start: Boolean of whether to scan for DNS records on creation. Ignored after zone is created. Default: false. :param pulumi.Input[bool] paused: Boolean of whether this zone is paused (traffic bypasses Cloudflare). Default: false. :param pulumi.Input[str] plan: The name of the commercial plan to apply to the zone, can be updated once the zone is created; one of `free`, `pro`, `business`, `enterprise`. :param pulumi.Input[str] type: A full zone implies that DNS is hosted with Cloudflare. A partial zone is typically a partner-hosted zone or a CNAME setup. Valid values: `full`, `partial`. Default is `full`. """ pulumi.set(__self__, "zone", zone) if jump_start is not None: pulumi.set(__self__, "jump_start", jump_start) if paused is not None: pulumi.set(__self__, "paused", paused) if plan is not None: pulumi.set(__self__, "plan", plan) if type is not None: pulumi.set(__self__, "type", type) @property @pulumi.getter def zone(self) -> pulumi.Input[str]: """ The DNS zone name which will be added. """ return pulumi.get(self, "zone") @zone.setter def zone(self, value: pulumi.Input[str]): pulumi.set(self, "zone", value) @property @pulumi.getter(name="jumpStart") def jump_start(self) -> Optional[pulumi.Input[bool]]: """ Boolean of whether to scan for DNS records on creation. Ignored after zone is created. Default: false. """ return pulumi.get(self, "jump_start") @jump_start.setter def jump_start(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "jump_start", value) @property @pulumi.getter def paused(self) -> Optional[pulumi.Input[bool]]: """ Boolean of whether this zone is paused (traffic bypasses Cloudflare). Default: false. """ return pulumi.get(self, "paused") @paused.setter def paused(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "paused", value) @property @pulumi.getter def plan(self) -> Optional[pulumi.Input[str]]: """ The name of the commercial plan to apply to the zone, can be updated once the zone is created; one of `free`, `pro`, `business`, `enterprise`. """ return pulumi.get(self, "plan") @plan.setter def plan(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "plan", value) @property @pulumi.getter def type(self) -> Optional[pulumi.Input[str]]: """ A full zone implies that DNS is hosted with Cloudflare. A partial zone is typically a partner-hosted zone or a CNAME setup. Valid values: `full`, `partial`. Default is `full`. """ return pulumi.get(self, "type") @type.setter def type(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "type", value) @pulumi.input_type class _ZoneState: def __init__(__self__, *, jump_start: Optional[pulumi.Input[bool]] = None, meta: Optional[pulumi.Input[Mapping[str, pulumi.Input[bool]]]] = None, name_servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, paused: Optional[pulumi.Input[bool]] = None, plan: Optional[pulumi.Input[str]] = None, status: Optional[pulumi.Input[str]] = None, type: Optional[pulumi.Input[str]] = None, vanity_name_servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, verification_key: Optional[pulumi.Input[str]] = None, zone: Optional[pulumi.Input[str]] = None): """ Input properties used for looking up and filtering Zone resources. :param pulumi.Input[bool] jump_start: Boolean of whether to scan for DNS records on creation. Ignored after zone is created. Default: false. :param pulumi.Input[Sequence[pulumi.Input[str]]] name_servers: Cloudflare-assigned name servers. This is only populated for zones that use Cloudflare DNS. :param pulumi.Input[bool] paused: Boolean of whether this zone is paused (traffic bypasses Cloudflare). Default: false. :param pulumi.Input[str] plan: The name of the commercial plan to apply to the zone, can be updated once the zone is created; one of `free`, `pro`, `business`, `enterprise`. :param pulumi.Input[str] status: Status of the zone. Valid values: `active`, `pending`, `initializing`, `moved`, `deleted`, `deactivated`. :param pulumi.Input[str] type: A full zone implies that DNS is hosted with Cloudflare. A partial zone is typically a partner-hosted zone or a CNAME setup. Valid values: `full`, `partial`. Default is `full`. :param pulumi.Input[Sequence[pulumi.Input[str]]] vanity_name_servers: List of Vanity Nameservers (if set). * `meta.wildcard_proxiable` - Indicates whether wildcard DNS records can receive Cloudflare security and performance features. * `meta.phishing_detected` - Indicates if URLs on the zone have been identified as hosting phishing content. :param pulumi.Input[str] verification_key: Contains the TXT record value to validate domain ownership. This is only populated for zones of type `partial`. :param pulumi.Input[str] zone: The DNS zone name which will be added. """ if jump_start is not None: pulumi.set(__self__, "jump_start", jump_start) if meta is not None: pulumi.set(__self__, "meta", meta) if name_servers is not None: pulumi.set(__self__, "name_servers", name_servers) if paused is not None: pulumi.set(__self__, "paused", paused) if plan is not None: pulumi.set(__self__, "plan", plan) if status is not None: pulumi.set(__self__, "status", status) if type is not None: pulumi.set(__self__, "type", type) if vanity_name_servers is not None: pulumi.set(__self__, "vanity_name_servers", vanity_name_servers) if verification_key is not None: pulumi.set(__self__, "verification_key", verification_key) if zone is not None: pulumi.set(__self__, "zone", zone) @property @pulumi.getter(name="jumpStart") def jump_start(self) -> Optional[pulumi.Input[bool]]: """ Boolean of whether to scan for DNS records on creation. Ignored after zone is created. Default: false. """ return pulumi.get(self, "jump_start") @jump_start.setter def jump_start(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "jump_start", value) @property @pulumi.getter def meta(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[bool]]]]: return pulumi.get(self, "meta") @meta.setter def meta(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[bool]]]]): pulumi.set(self, "meta", value) @property @pulumi.getter(name="nameServers") def name_servers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ Cloudflare-assigned name servers. This is only populated for zones that use Cloudflare DNS. """ return pulumi.get(self, "name_servers") @name_servers.setter def name_servers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "name_servers", value) @property @pulumi.getter def paused(self) -> Optional[pulumi.Input[bool]]: """ Boolean of whether this zone is paused (traffic bypasses Cloudflare). Default: false. """ return pulumi.get(self, "paused") @paused.setter def paused(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "paused", value) @property @pulumi.getter def plan(self) -> Optional[pulumi.Input[str]]: """ The name of the commercial plan to apply to the zone, can be updated once the zone is created; one of `free`, `pro`, `business`, `enterprise`. """ return pulumi.get(self, "plan") @plan.setter def plan(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "plan", value) @property @pulumi.getter def status(self) -> Optional[pulumi.Input[str]]: """ Status of the zone. Valid values: `active`, `pending`, `initializing`, `moved`, `deleted`, `deactivated`. """ return pulumi.get(self, "status") @status.setter def status(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "status", value) @property @pulumi.getter def type(self) -> Optional[pulumi.Input[str]]: """ A full zone implies that DNS is hosted with Cloudflare. A partial zone is typically a partner-hosted zone or a CNAME setup. Valid values: `full`, `partial`. Default is `full`. """ return pulumi.get(self, "type") @type.setter def type(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "type", value) @property @pulumi.getter(name="vanityNameServers") def vanity_name_servers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ List of Vanity Nameservers (if set). * `meta.wildcard_proxiable` - Indicates whether wildcard DNS records can receive Cloudflare security and performance features. * `meta.phishing_detected` - Indicates if URLs on the zone have been identified as hosting phishing content. """ return pulumi.get(self, "vanity_name_servers") @vanity_name_servers.setter def vanity_name_servers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "vanity_name_servers", value) @property @pulumi.getter(name="verificationKey") def verification_key(self) -> Optional[pulumi.Input[str]]: """ Contains the TXT record value to validate domain ownership. This is only populated for zones of type `partial`. """ return pulumi.get(self, "verification_key") @verification_key.setter def verification_key(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "verification_key", value) @property @pulumi.getter def zone(self) -> Optional[pulumi.Input[str]]: """ The DNS zone name which will be added. """ return pulumi.get(self, "zone") @zone.setter def zone(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "zone", value) class Zone(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, jump_start: Optional[pulumi.Input[bool]] = None, paused: Optional[pulumi.Input[bool]] = None, plan: Optional[pulumi.Input[str]] = None, type: Optional[pulumi.Input[str]] = None, zone: Optional[pulumi.Input[str]] = None, __props__=None): """ ## Import Zone resource can be imported using a zone ID, e.g. ```sh $ pulumi import cloudflare:index/zone:Zone example d41d8cd98f00b204e9800998ecf8427e ``` where* `d41d8cd98f00b204e9800998ecf8427e` - zone ID, as returned from [API](https://api.cloudflare.com/#zone-list-zones) :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[bool] jump_start: Boolean of whether to scan for DNS records on creation. Ignored after zone is created. Default: false. :param pulumi.Input[bool] paused: Boolean of whether this zone is paused (traffic bypasses Cloudflare). Default: false. :param pulumi.Input[str] plan: The name of the commercial plan to apply to the zone, can be updated once the zone is created; one of `free`, `pro`, `business`, `enterprise`. :param pulumi.Input[str] type: A full zone implies that DNS is hosted with Cloudflare. A partial zone is typically a partner-hosted zone or a CNAME setup. Valid values: `full`, `partial`. Default is `full`. :param pulumi.Input[str] zone: The DNS zone name which will be added. """ ... @overload def __init__(__self__, resource_name: str, args: ZoneArgs, opts: Optional[pulumi.ResourceOptions] = None): """ ## Import Zone resource can be imported using a zone ID, e.g. ```sh $ pulumi import cloudflare:index/zone:Zone example d41d8cd98f00b204e9800998ecf8427e ``` where* `d41d8cd98f00b204e9800998ecf8427e` - zone ID, as returned from [API](https://api.cloudflare.com/#zone-list-zones) :param str resource_name: The name of the resource. :param ZoneArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """ ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(ZoneArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, jump_start: Optional[pulumi.Input[bool]] = None, paused: Optional[pulumi.Input[bool]] = None, plan: Optional[pulumi.Input[str]] = None, type: Optional[pulumi.Input[str]] = None, zone: Optional[pulumi.Input[str]] = None, __props__=None): if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = ZoneArgs.__new__(ZoneArgs) __props__.__dict__["jump_start"] = jump_start __props__.__dict__["paused"] = paused __props__.__dict__["plan"] = plan __props__.__dict__["type"] = type if zone is None and not opts.urn: raise TypeError("Missing required property 'zone'") __props__.__dict__["zone"] = zone __props__.__dict__["meta"] = None __props__.__dict__["name_servers"] = None __props__.__dict__["status"] = None __props__.__dict__["vanity_name_servers"] = None __props__.__dict__["verification_key"] = None super(Zone, __self__).__init__( 'cloudflare:index/zone:Zone', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, jump_start: Optional[pulumi.Input[bool]] = None, meta: Optional[pulumi.Input[Mapping[str, pulumi.Input[bool]]]] = None, name_servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, paused: Optional[pulumi.Input[bool]] = None, plan: Optional[pulumi.Input[str]] = None, status: Optional[pulumi.Input[str]] = None, type: Optional[pulumi.Input[str]] = None, vanity_name_servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, verification_key: Optional[pulumi.Input[str]] = None, zone: Optional[pulumi.Input[str]] = None) -> 'Zone': """ Get an existing Zone resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[bool] jump_start: Boolean of whether to scan for DNS records on creation. Ignored after zone is created. Default: false. :param pulumi.Input[Sequence[pulumi.Input[str]]] name_servers: Cloudflare-assigned name servers. This is only populated for zones that use Cloudflare DNS. :param pulumi.Input[bool] paused: Boolean of whether this zone is paused (traffic bypasses Cloudflare). Default: false. :param pulumi.Input[str] plan: The name of the commercial plan to apply to the zone, can be updated once the zone is created; one of `free`, `pro`, `business`, `enterprise`. :param pulumi.Input[str] status: Status of the zone. Valid values: `active`, `pending`, `initializing`, `moved`, `deleted`, `deactivated`. :param pulumi.Input[str] type: A full zone implies that DNS is hosted with Cloudflare. A partial zone is typically a partner-hosted zone or a CNAME setup. Valid values: `full`, `partial`. Default is `full`. :param pulumi.Input[Sequence[pulumi.Input[str]]] vanity_name_servers: List of Vanity Nameservers (if set). * `meta.wildcard_proxiable` - Indicates whether wildcard DNS records can receive Cloudflare security and performance features. * `meta.phishing_detected` - Indicates if URLs on the zone have been identified as hosting phishing content. :param pulumi.Input[str] verification_key: Contains the TXT record value to validate domain ownership. This is only populated for zones of type `partial`. :param pulumi.Input[str] zone: The DNS zone name which will be added. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _ZoneState.__new__(_ZoneState) __props__.__dict__["jump_start"] = jump_start __props__.__dict__["meta"] = meta __props__.__dict__["name_servers"] = name_servers __props__.__dict__["paused"] = paused __props__.__dict__["plan"] = plan __props__.__dict__["status"] = status __props__.__dict__["type"] = type __props__.__dict__["vanity_name_servers"] = vanity_name_servers __props__.__dict__["verification_key"] = verification_key __props__.__dict__["zone"] = zone return Zone(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name="jumpStart") def jump_start(self) -> pulumi.Output[Optional[bool]]: """ Boolean of whether to scan for DNS records on creation. Ignored after zone is created. Default: false. """ return pulumi.get(self, "jump_start") @property @pulumi.getter def meta(self) -> pulumi.Output[Mapping[str, bool]]: return pulumi.get(self, "meta") @property @pulumi.getter(name="nameServers") def name_servers(self) -> pulumi.Output[Sequence[str]]: """ Cloudflare-assigned name servers. This is only populated for zones that use Cloudflare DNS. """ return pulumi.get(self, "name_servers") @property @pulumi.getter def paused(self) -> pulumi.Output[Optional[bool]]: """ Boolean of whether this zone is paused (traffic bypasses Cloudflare). Default: false. """ return pulumi.get(self, "paused") @property @pulumi.getter def plan(self) -> pulumi.Output[str]: """ The name of the commercial plan to apply to the zone, can be updated once the zone is created; one of `free`, `pro`, `business`, `enterprise`. """ return pulumi.get(self, "plan") @property @pulumi.getter def status(self) -> pulumi.Output[str]: """ Status of the zone. Valid values: `active`, `pending`, `initializing`, `moved`, `deleted`, `deactivated`. """ return pulumi.get(self, "status") @property @pulumi.getter def type(self) -> pulumi.Output[Optional[str]]: """ A full zone implies that DNS is hosted with Cloudflare. A partial zone is typically a partner-hosted zone or a CNAME setup. Valid values: `full`, `partial`. Default is `full`. """ return pulumi.get(self, "type") @property @pulumi.getter(name="vanityNameServers") def vanity_name_servers(self) -> pulumi.Output[Sequence[str]]: """ List of Vanity Nameservers (if set). * `meta.wildcard_proxiable` - Indicates whether wildcard DNS records can receive Cloudflare security and performance features. * `meta.phishing_detected` - Indicates if URLs on the zone have been identified as hosting phishing content. """ return pulumi.get(self, "vanity_name_servers") @property @pulumi.getter(name="verificationKey") def verification_key(self) -> pulumi.Output[str]: """ Contains the TXT record value to validate domain ownership. This is only populated for zones of type `partial`. """ return pulumi.get(self, "verification_key") @property @pulumi.getter def zone(self) -> pulumi.Output[str]: """ The DNS zone name which will be added. """ return pulumi.get(self, "zone")
45.929006
214
0.642185
2,785
22,643
5.051706
0.079354
0.08835
0.064681
0.050039
0.863174
0.832966
0.802118
0.785202
0.759827
0.732461
0
0.004996
0.248642
22,643
492
215
46.022358
0.82196
0.387581
0
0.652778
1
0
0.073402
0.002035
0
0
0
0
0
1
0.163194
false
0.003472
0.017361
0.006944
0.28125
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
64ee32f1fff760bdf8bd337d9872e663c2f3161f
7,670
py
Python
setup.py
senuraa/ReadDiabetesReports
df5470d9ac1b658707ff8b2facd9b463ab48cd6b
[ "MIT" ]
null
null
null
setup.py
senuraa/ReadDiabetesReports
df5470d9ac1b658707ff8b2facd9b463ab48cd6b
[ "MIT" ]
5
2021-04-30T20:40:57.000Z
2022-03-11T23:28:41.000Z
setup.py
senuraa/ReadDiabetesReports
df5470d9ac1b658707ff8b2facd9b463ab48cd6b
[ "MIT" ]
null
null
null
from setuptools import setup setup( name='diabetesReportReader', version='0.0.1', packages=['venv.lib.python3.7.site-packages.bs4', 'venv.lib.python3.7.site-packages.bs4.tests', 'venv.lib.python3.7.site-packages.bs4.builder', 'venv.lib.python3.7.site-packages.cv2', 'venv.lib.python3.7.site-packages.cv2.data', 'venv.lib.python3.7.site-packages.PIL', 'venv.lib.python3.7.site-packages.rsa', 'venv.lib.python3.7.site-packages.enum', 'venv.lib.python3.7.site-packages.grpc', 'venv.lib.python3.7.site-packages.grpc.beta', 'venv.lib.python3.7.site-packages.grpc._cython', 'venv.lib.python3.7.site-packages.grpc._cython._cygrpc', 'venv.lib.python3.7.site-packages.grpc.framework', 'venv.lib.python3.7.site-packages.grpc.framework.common', 'venv.lib.python3.7.site-packages.grpc.framework.foundation', 'venv.lib.python3.7.site-packages.grpc.framework.interfaces', 'venv.lib.python3.7.site-packages.grpc.framework.interfaces.base', 'venv.lib.python3.7.site-packages.grpc.framework.interfaces.face', 'venv.lib.python3.7.site-packages.grpc.experimental', 'venv.lib.python3.7.site-packages.idna', 'venv.lib.python3.7.site-packages.click', 'venv.lib.python3.7.site-packages.flask', 'venv.lib.python3.7.site-packages.flask.json', 'venv.lib.python3.7.site-packages.numpy', 'venv.lib.python3.7.site-packages.numpy.ma', 'venv.lib.python3.7.site-packages.numpy.ma.tests', 'venv.lib.python3.7.site-packages.numpy.doc', 'venv.lib.python3.7.site-packages.numpy.fft', 'venv.lib.python3.7.site-packages.numpy.fft.tests', 'venv.lib.python3.7.site-packages.numpy.lib', 'venv.lib.python3.7.site-packages.numpy.lib.tests', 'venv.lib.python3.7.site-packages.numpy.core', 'venv.lib.python3.7.site-packages.numpy.core.tests', 'venv.lib.python3.7.site-packages.numpy.f2py', 'venv.lib.python3.7.site-packages.numpy.f2py.tests', 'venv.lib.python3.7.site-packages.numpy.tests', 'venv.lib.python3.7.site-packages.numpy.compat', 'venv.lib.python3.7.site-packages.numpy.compat.tests', 'venv.lib.python3.7.site-packages.numpy.linalg', 'venv.lib.python3.7.site-packages.numpy.linalg.tests', 'venv.lib.python3.7.site-packages.numpy.random', 'venv.lib.python3.7.site-packages.numpy.random.tests', 'venv.lib.python3.7.site-packages.numpy.testing', 'venv.lib.python3.7.site-packages.numpy.testing.tests', 'venv.lib.python3.7.site-packages.numpy.testing._private', 'venv.lib.python3.7.site-packages.numpy.distutils', 'venv.lib.python3.7.site-packages.numpy.distutils.tests', 'venv.lib.python3.7.site-packages.numpy.distutils.command', 'venv.lib.python3.7.site-packages.numpy.distutils.fcompiler', 'venv.lib.python3.7.site-packages.numpy.matrixlib', 'venv.lib.python3.7.site-packages.numpy.matrixlib.tests', 'venv.lib.python3.7.site-packages.numpy.polynomial', 'venv.lib.python3.7.site-packages.numpy.polynomial.tests', 'venv.lib.python3.7.site-packages.google.api', 'venv.lib.python3.7.site-packages.google.rpc', 'venv.lib.python3.7.site-packages.google.auth', 'venv.lib.python3.7.site-packages.google.auth.crypt', 'venv.lib.python3.7.site-packages.google.auth.transport', 'venv.lib.python3.7.site-packages.google.auth.compute_engine', 'venv.lib.python3.7.site-packages.google.type', 'venv.lib.python3.7.site-packages.google.cloud.vision_v1', 'venv.lib.python3.7.site-packages.google.cloud.vision_v1.gapic', 'venv.lib.python3.7.site-packages.google.cloud.vision_v1.gapic.transports', 'venv.lib.python3.7.site-packages.google.cloud.vision_v1.proto', 'venv.lib.python3.7.site-packages.google.cloud.vision_helpers', 'venv.lib.python3.7.site-packages.google.cloud.vision_v1p1beta1', 'venv.lib.python3.7.site-packages.google.cloud.vision_v1p1beta1.gapic', 'venv.lib.python3.7.site-packages.google.cloud.vision_v1p1beta1.gapic.transports', 'venv.lib.python3.7.site-packages.google.cloud.vision_v1p1beta1.proto', 'venv.lib.python3.7.site-packages.google.cloud.vision_v1p2beta1', 'venv.lib.python3.7.site-packages.google.cloud.vision_v1p2beta1.gapic', 'venv.lib.python3.7.site-packages.google.cloud.vision_v1p2beta1.gapic.transports', 'venv.lib.python3.7.site-packages.google.cloud.vision_v1p2beta1.proto', 'venv.lib.python3.7.site-packages.google.cloud.vision_v1p3beta1', 'venv.lib.python3.7.site-packages.google.cloud.vision_v1p3beta1.gapic', 'venv.lib.python3.7.site-packages.google.cloud.vision_v1p3beta1.gapic.transports', 'venv.lib.python3.7.site-packages.google.cloud.vision_v1p3beta1.proto', 'venv.lib.python3.7.site-packages.google.oauth2', 'venv.lib.python3.7.site-packages.google.logging.type', 'venv.lib.python3.7.site-packages.google.api_core', 'venv.lib.python3.7.site-packages.google.api_core.future', 'venv.lib.python3.7.site-packages.google.api_core.gapic_v1', 'venv.lib.python3.7.site-packages.google.api_core.operations_v1', 'venv.lib.python3.7.site-packages.google.protobuf', 'venv.lib.python3.7.site-packages.google.protobuf.util', 'venv.lib.python3.7.site-packages.google.protobuf.pyext', 'venv.lib.python3.7.site-packages.google.protobuf.compiler', 'venv.lib.python3.7.site-packages.google.protobuf.internal', 'venv.lib.python3.7.site-packages.google.protobuf.internal.import_test_package', 'venv.lib.python3.7.site-packages.google.longrunning', 'venv.lib.python3.7.site-packages.pyasn1', 'venv.lib.python3.7.site-packages.pyasn1.type', 'venv.lib.python3.7.site-packages.pyasn1.codec', 'venv.lib.python3.7.site-packages.pyasn1.codec.ber', 'venv.lib.python3.7.site-packages.pyasn1.codec.cer', 'venv.lib.python3.7.site-packages.pyasn1.codec.der', 'venv.lib.python3.7.site-packages.pyasn1.codec.native', 'venv.lib.python3.7.site-packages.pyasn1.compat', 'venv.lib.python3.7.site-packages.certifi', 'venv.lib.python3.7.site-packages.chardet', 'venv.lib.python3.7.site-packages.chardet.cli', 'venv.lib.python3.7.site-packages.urllib3', 'venv.lib.python3.7.site-packages.urllib3.util', 'venv.lib.python3.7.site-packages.urllib3.contrib', 'venv.lib.python3.7.site-packages.urllib3.contrib._securetransport', 'venv.lib.python3.7.site-packages.urllib3.packages', 'venv.lib.python3.7.site-packages.urllib3.packages.backports', 'venv.lib.python3.7.site-packages.urllib3.packages.ssl_match_hostname', 'venv.lib.python3.7.site-packages.requests', 'venv.lib.python3.7.site-packages.cachetools', 'venv.lib.python3.7.site-packages.concurrent', 'venv.lib.python3.7.site-packages.concurrent.futures', 'venv.lib.python3.7.site-packages.pytesseract', 'venv.lib.python3.7.site-packages.googlesearch', 'venv.lib.python3.7.site-packages.pyasn1_modules'], url='', license='', author='Senura Seneviratne', author_email='senuraa@msn.com', description='' )
81.595745
120
0.6691
1,028
7,670
4.958171
0.114786
0.157936
0.315872
0.338434
0.907789
0.907789
0.860114
0.688248
0.285462
0.18815
0
0.046909
0.16897
7,670
93
121
82.473118
0.752746
0
0
0
0
0.043478
0.770535
0.762973
0
0
0
0
0
1
0
true
0
0.021739
0
0.021739
0
0
0
0
null
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
1
0
0
0
0
0
0
8
8f3715781c4400409ef649a6e92614599f74fe55
169
py
Python
code-exercises-etc/section_02_(strings)/ajm.str-format-phone-ex.20151024.py
hannahkwarren/CLaG-Sp2016
a75862d187176d9f2f1778eb6300056364292b44
[ "MIT" ]
null
null
null
code-exercises-etc/section_02_(strings)/ajm.str-format-phone-ex.20151024.py
hannahkwarren/CLaG-Sp2016
a75862d187176d9f2f1778eb6300056364292b44
[ "MIT" ]
null
null
null
code-exercises-etc/section_02_(strings)/ajm.str-format-phone-ex.20151024.py
hannahkwarren/CLaG-Sp2016
a75862d187176d9f2f1778eb6300056364292b44
[ "MIT" ]
null
null
null
phone = "315-481-2904" print "Area Code: {0}".format(phone[0:3]) print "Local: {0}".format(phone[4:]) print "Different format: ({0}) {1}".format(phone[0:3], phone[4:])
28.166667
65
0.627219
29
169
3.655172
0.482759
0.311321
0.226415
0.245283
0
0
0
0
0
0
0
0.131579
0.100592
169
5
66
33.8
0.565789
0
0
0
0
0
0.372781
0
0
0
0
0
0
0
null
null
0
0
null
null
0.75
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
7
8f44df3f286af6e3dca7e0325338692050c2bf7f
14,128
py
Python
electrum/tests/test_storage_upgrade.py
traysi/electrum-raven
b2a64a459da32afd2987149460253cfadec03384
[ "MIT" ]
5
2018-10-31T18:47:54.000Z
2021-09-20T02:04:42.000Z
electrum/tests/test_storage_upgrade.py
project-mynt/electrum-mynt
ca1548e008854f2a3eff900a69365307cc20bd57
[ "MIT" ]
null
null
null
electrum/tests/test_storage_upgrade.py
project-mynt/electrum-mynt
ca1548e008854f2a3eff900a69365307cc20bd57
[ "MIT" ]
11
2018-10-31T19:46:05.000Z
2019-09-25T20:18:37.000Z
import shutil import tempfile from electrum.storage import WalletStorage from electrum.wallet import Wallet from .test_wallet import WalletTestCase # TODO add other wallet types: 2fa, xpub-only # TODO hw wallet with client version 2.6.x (single-, and multiacc) class TestStorageUpgrade(WalletTestCase): def test_upgrade_from_client_2_9_3_seeded(self): wallet_str = '{"addr_history": {"M86vvgYRgPBW3QfyUnxbKBeUzbGtgp6NXG": [], "M98MvBtibhTcnz8tLPNk3ooFDE5qpck83r": [], "M9caHU9o66Wvmc2gqqW2DHPcnshqdj36kG": [], "MABcFFZv7v3FRAEKU5kqoB6xt9VKupEt8K": [["4dc927c6174608d8a08a9e6182da88010a8bfd3d7ee1ee2e6ce1ab0750a66084", 1244811]], "MAeRNwBYorbvbeAwiJfp7cEiPHThPhcut2": [], "MBMpHGd7nKhFn7SPYkNb2SJd8WiGpTH5aB": [], "MBavRPSte8woScUxXQwybiwvvWNpEZ1WWy": [["63e0cc7890079c623d9dbec3b97c5f827ca31dbc17609a4d9c019deac56a471c", 1244771], ["c8f330a6a0daad48c4ef86cf509658313e7fe400932011bf279619d444b3f9f5", 1244799]], "MC9S8ipqNu4pjodYA7ns6sS15GEEUqeuU3": [], "MEhUENYZXhwuKtiCYep6AjpRGqLBLLxa4y": [], "MFbXqsdyVSnEGmGMpduND5kp93ExhYgtbu": [],"MFgJcgrbEnV1ANbPcAsAV7ADBF63NEpXK3": [], "MGUJMzZeA1PBUsv3rUNcjLzVnAjaWkS95n": [], "MGtCxT8kLcqqi66Spwr98JnYSYJHU7ShGZ": [], "MGwggCpjuwpCfbjTCbYoX1ngzNSUnTEtEn": [], "MLoyw8Bt5zVoVRcFxUPbKuW5FF3NNfLUVf": [], "MLy8usGiKXVqo4VtWAUq1jdgKASdN8JTij": [], "MPMNgAeu7B6PTr9WVM9mhRycmQVy14VUvL": [], "MPQzvFN6fvcdwZC1KsrjagZizdFx46yp1V": [], "MPbw4Njqk6neGMcBNmxHpDjxoBGbMPwJiL": [], "MQycguJjCxpW7qBGKHPptGqMVUP28VJhp6": [], "MRR8gRePoj33M9Sx4UDEAC1fdLhppit71k": [], "MSJTC3sSuVSyQ2Fehrxg5UkwpYQn2bkNjC": [], "MSeTEqGyJfAD6TxsFgfGwMtvWf3fbaRZeW": [], "MSi7o6WRptrcpQEZBvVX2rrZdKz5WHcXyJ": [], "MSjeTpS3Kcij8GvCnatG17mPdFd3FfF3Ra": [], "MTVWS92xnBVqopgTo3F7eYqQq24b3EQpNB": [], "MUuGCeLx98GQxq1Y2UTSQbZd7PMmc2iDAs": [], "MWfHXccPyKPeY5Nf5kSk4GsCKHhsBSymxU": []}, "addresses": {"change": ["MPbw4Njqk6neGMcBNmxHpDjxoBGbMPwJiL", "MPMNgAeu7B6PTr9WVM9mhRycmQVy14VUvL", "MC9S8ipqNu4pjodYA7ns6sS15GEEUqeuU3", "MGwggCpjuwpCfbjTCbYoX1ngzNSUnTEtEn", "MGtCxT8kLcqqi66Spwr98JnYSYJHU7ShGZ", "MRR8gRePoj33M9Sx4UDEAC1fdLhppit71k"], "receiving": ["MBavRPSte8woScUxXQwybiwvvWNpEZ1WWy", "MABcFFZv7v3FRAEKU5kqoB6xt9VKupEt8K", "MGUJMzZeA1PBUsv3rUNcjLzVnAjaWkS95n", "MQycguJjCxpW7qBGKHPptGqMVUP28VJhp6", "MWfHXccPyKPeY5Nf5kSk4GsCKHhsBSymxU", "MAeRNwBYorbvbeAwiJfp7cEiPHThPhcut2", "MSJTC3sSuVSyQ2Fehrxg5UkwpYQn2bkNjC", "M98MvBtibhTcnz8tLPNk3ooFDE5qpck83r", "M9caHU9o66Wvmc2gqqW2DHPcnshqdj36kG", "MBMpHGd7nKhFn7SPYkNb2SJd8WiGpTH5aB", "MSjeTpS3Kcij8GvCnatG17mPdFd3FfF3Ra", "MEhUENYZXhwuKtiCYep6AjpRGqLBLLxa4y", "MLy8usGiKXVqo4VtWAUq1jdgKASdN8JTij", "MFbXqsdyVSnEGmGMpduND5kp93ExhYgtbu", "MSi7o6WRptrcpQEZBvVX2rrZdKz5WHcXyJ", "MFgJcgrbEnV1ANbPcAsAV7ADBF63NEpXK3", "M86vvgYRgPBW3QfyUnxbKBeUzbGtgp6NXG", "MLoyw8Bt5zVoVRcFxUPbKuW5FF3NNfLUVf", "MPQzvFN6fvcdwZC1KsrjagZizdFx46yp1V", "MUuGCeLx98GQxq1Y2UTSQbZd7PMmc2iDAs", "MTVWS92xnBVqopgTo3F7eYqQq24b3EQpNB", "MSeTEqGyJfAD6TxsFgfGwMtvWf3fbaRZeW"]}, "keystore": {"seed": "cereal wise two govern top pet frog nut rule sketch bundle logic", "type": "bip32", "xprv": "xprv9s21ZrQH143K29XjRjUs6MnDB9wXjXbJP2kG1fnRk8zjdDYWqVkQYUqaDtgZp5zPSrH5PZQJs8sU25HrUgT1WdgsPU8GbifKurtMYg37d4v", "xpub": "xpub661MyMwAqRbcEdcCXm1sTViwjBn28zK9kFfrp4C3JUXiW1sfP34f6HA45B9yr7EH5XGzWuTfMTdqpt9XPrVQVUdgiYb5NW9m8ij1FSZgGBF"}, "pruned_txo": {}, "seed_type": "standard", "seed_version": 13, "stored_height": 1244820, "transactions": {"4dc927c6174608d8a08a9e6182da88010a8bfd3d7ee1ee2e6ce1ab0750a66084": "0100000001c2c9a2674e18b405a3da7c8b3517e9ee66d0e0eaea301826851a47b63f7b5892010000009200483045022100cebcf08aa10f780d4de0d615d54d74747a4e0a9d0112ae20d43da24cc65d102602201cc9e12c1f27fad3bf680005c49564460b2234de44b20e5c6814bcdb888ac9cc01475121021a52f873609007d4fbb3762146bcf392a38add103d158b85be84234a6deec95d2102df15712f8b35ca672cd0c7a5ad97581699449cd01a0b0a7ff1e4e8e52f43cb3452aefeffffff01e069f902000000001976a914190c9f5b8dad15b8daa7f1ef821968820980b48e88ac89fe1200", "63e0cc7890079c623d9dbec3b97c5f827ca31dbc17609a4d9c019deac56a471c": "010000000182dc3b573b4b93b02331f9ae63a9d4319d3aa6d408cba9da945559e28b90f1db000000006a47304402203e1826d088c0cd7e04b55392c70beaf14ce4e3f5d29d9d41cbc13492a1369cb102206da8ec16c5159c1a309e65d0cdc2889801e0f4a53e012c9f633c8ee9dc70780f012103581803a5795674e8ba65765d7d8bc4c89ce96835e19538437390b010a0e693f7feffffff01e50bf802000000001976a914286d7ca6a657816ceedb90eecb7afe8f361da39488ac62fe1200", "c8f330a6a0daad48c4ef86cf509658313e7fe400932011bf279619d444b3f9f5": "01000000011c476ac5ea9d019c4d9a6017bc1da37c825f7cb9c3be9d3d629c079078cce063000000006b483045022100fe2b1267e7897cff328a483389606ba74c23073d4a3b09aa1a6b41e66a43729f02206471fa10f6b2a0088bf68bd76432cc36bf6df917adb8bb968c8814e918f05fbc012102276b2411cf66966a9b6198f7e52347cd63a5adfc189cd7681649df4ce433058cfeffffff014585f602000000001976a914b2ca5937a6c75347d3c70a790802efd1b3f14d7d88ac7efe1200"}, "tx_fees": {"4dc927c6174608d8a08a9e6182da88010a8bfd3d7ee1ee2e6ce1ab0750a66084": 100000, "63e0cc7890079c623d9dbec3b97c5f827ca31dbc17609a4d9c019deac56a471c": 100000, "c8f330a6a0daad48c4ef86cf509658313e7fe400932011bf279619d444b3f9f5": 100000}, "txi": {"4dc927c6174608d8a08a9e6182da88010a8bfd3d7ee1ee2e6ce1ab0750a66084": {}, "63e0cc7890079c623d9dbec3b97c5f827ca31dbc17609a4d9c019deac56a471c": {}, "c8f330a6a0daad48c4ef86cf509658313e7fe400932011bf279619d444b3f9f5": {"MBavRPSte8woScUxXQwybiwvvWNpEZ1WWy": [["63e0cc7890079c623d9dbec3b97c5f827ca31dbc17609a4d9c019deac56a471c:0", 49810405]]}}, "txo": {"4dc927c6174608d8a08a9e6182da88010a8bfd3d7ee1ee2e6ce1ab0750a66084": {"MABcFFZv7v3FRAEKU5kqoB6xt9VKupEt8K": [[0, 49900000, false]]}, "63e0cc7890079c623d9dbec3b97c5f827ca31dbc17609a4d9c019deac56a471c": {"MBavRPSte8woScUxXQwybiwvvWNpEZ1WWy": [[0, 49810405, false]]}, "c8f330a6a0daad48c4ef86cf509658313e7fe400932011bf279619d444b3f9f5": {}}, "use_encryption": false, "verified_tx3": {"4dc927c6174608d8a08a9e6182da88010a8bfd3d7ee1ee2e6ce1ab0750a66084": [1244811, 1518548087, 2], "63e0cc7890079c623d9dbec3b97c5f827ca31dbc17609a4d9c019deac56a471c": [1244771, 1518545016, 1], "c8f330a6a0daad48c4ef86cf509658313e7fe400932011bf279619d444b3f9f5": [1244799, 1518547429, 1]}, "wallet_type": "standard", "winpos-qt": [1016, 143, 840, 400]}' self._upgrade_storage(wallet_str) def test_upgrade_from_client_2_9_3_importedkeys(self): wallet_str = '{"addr_history": {"MJNDhNyzYPbcFE5uZAg2j6YyUQVdLDhuP3": []}, "addresses": {"change": [], "receiving": ["MJNDhNyzYPbcFE5uZAg2j6YyUQVdLDhuP3"]}, "keystore": {"keypairs": {"03c2725dae5de0cbf0101cf57a3aadfb301bc3b432fa8ea38515198e41df12199f": "TPxZYPTaBiwFVo5kVmBYuJctGVDMRaCLNEEu8nsxLednda1zmVGS"}, "type": "imported"}, "pruned_txo": {}, "seed_version": 13, "stored_height": 1244824, "transactions": {}, "tx_fees": {}, "txi": {}, "txo": {}, "use_encryption": false, "verified_tx3": {}, "wallet_type": "standard", "winpos-qt": [314, 230, 840, 400]}' self._upgrade_storage(wallet_str) def test_upgrade_from_client_2_9_3_watchaddresses(self): wallet_str = '{"addr_history": {"MFMy9FwJsV6HiN5eZDqDETw4pw52q3UGrb": []}, "addresses": ["MFMy9FwJsV6HiN5eZDqDETw4pw52q3UGrb"], "pruned_txo": {}, "seed_version": 13, "stored_height": 1244820, "transactions": {}, "tx_fees": {}, "txi": {}, "txo": {}, "verified_tx3": {}, "wallet_type": "imported", "winpos-qt": [100, 100, 840, 400]}' self._upgrade_storage(wallet_str) def test_upgrade_from_client_2_9_3_multisig(self): wallet_str = '{"addr_history": {"P8jgDdDDshff1QpvMvpBWvQ6eNzxCUtPzt": [], "P9gS5tcr9DrKJBXhoCsttz4z3f8Rc1JNsp": [], "PAh6JWaw5rhTjiuy3oA3qPxw9VMP7WHFG9": [["92587b3fb6471a85261830eaeae0d066eee917358b7cdaa305b4184e67a2c9c2", 1244811], ["4dc927c6174608d8a08a9e6182da88010a8bfd3d7ee1ee2e6ce1ab0750a66084", 1244811]], "PCBauNwA3Cv7pSX4w2Jo5gqeFAjmuSPzFz": [], "PDQnZfcGTT8NxjLxDnUBywmHTAiaUA1eaW": [], "PEB5ypNYcqahQR1pVZPKyvXV6D1fowzaWW": [], "PELNR4HeSFqaY175fBwfCtfRMc7xxEwzrA": [], "PFNhoTkyVt6pnwM8SPvKz7p1i31FT47TMf": [], "PFiuNDiPJ7KfswEHbt86Kgzmu1MyGEifAo": [], "PKUd3FD55GhWuB2YYXouv6tdyzMk868A99": [], "PMCQaLUfGqGjSk931W1DnqMakjN7BNphzd": [], "PMaHpXqBxb4t8ugM4Pck7cVTVG7c7LNVhm": [], "PPsdnu4wwnmzr8gvpcTJAfeCb1f1LdRT1p": [], "PQSVheR3xP32bnviHAoUdBivmupxqVrpkm": [], "PRwQwsMR1doV435PTAmEX4YiR4KPCpD5tW": [], "PSR9rkJGNN6sp3YfNcL4SVshM6urNyDbEN": [], "PSs5WAEBaS5LfVxUBDBGzaqEZvmwpbUaRq": [], "PSsxMiTNivb2bZhHYcXQEPQXiD2RRiikrW": [], "PTNCJhHMBPG3RVVJ6rGXfSnvLg9HMD7VA8": [], "PUGX2QjJBiq8CMTcf1MfgA2nMdYB8UZVib": [], "PUZdh3MNKX7qT9eWDmguoiDudU2SNhQbGd": [], "PUfNE6rBYjGewm6HT3U1t1QrgeDuJhTa4v": [], "PUgxpEyhTz5wXBgan6HoBDRxndkszkRzsw": [], "PUwsdVq7dyQGTdjWaqYxdVkP9kMtpoA2Uz": [], "PVderjbckPSqkN33hw5B4gBsrKD4tb7mAp": [], "PViDr7H8qhgvW8ajY8teTFYzFMWQ9C25CQ": [], "PXu1FBDpr7XVpoKUQ6HZDQzuNz8V3fCRww": []}, "addresses": {"change": ["PUwsdVq7dyQGTdjWaqYxdVkP9kMtpoA2Uz", "PUfNE6rBYjGewm6HT3U1t1QrgeDuJhTa4v", "PPsdnu4wwnmzr8gvpcTJAfeCb1f1LdRT1p", "PMCQaLUfGqGjSk931W1DnqMakjN7BNphzd", "PVderjbckPSqkN33hw5B4gBsrKD4tb7mAp", "P9gS5tcr9DrKJBXhoCsttz4z3f8Rc1JNsp"], "receiving": ["PAh6JWaw5rhTjiuy3oA3qPxw9VMP7WHFG9", "PKUd3FD55GhWuB2YYXouv6tdyzMk868A99", "PUZdh3MNKX7qT9eWDmguoiDudU2SNhQbGd", "PUGX2QjJBiq8CMTcf1MfgA2nMdYB8UZVib", "PRwQwsMR1doV435PTAmEX4YiR4KPCpD5tW", "PSsxMiTNivb2bZhHYcXQEPQXiD2RRiikrW", "PCBauNwA3Cv7pSX4w2Jo5gqeFAjmuSPzFz", "PDQnZfcGTT8NxjLxDnUBywmHTAiaUA1eaW", "PEB5ypNYcqahQR1pVZPKyvXV6D1fowzaWW", "PUgxpEyhTz5wXBgan6HoBDRxndkszkRzsw", "PQSVheR3xP32bnviHAoUdBivmupxqVrpkm", "PTNCJhHMBPG3RVVJ6rGXfSnvLg9HMD7VA8", "PViDr7H8qhgvW8ajY8teTFYzFMWQ9C25CQ", "PXu1FBDpr7XVpoKUQ6HZDQzuNz8V3fCRww", "PMaHpXqBxb4t8ugM4Pck7cVTVG7c7LNVhm", "PSs5WAEBaS5LfVxUBDBGzaqEZvmwpbUaRq", "PFiuNDiPJ7KfswEHbt86Kgzmu1MyGEifAo", "P8jgDdDDshff1QpvMvpBWvQ6eNzxCUtPzt", "PELNR4HeSFqaY175fBwfCtfRMc7xxEwzrA", "PSR9rkJGNN6sp3YfNcL4SVshM6urNyDbEN", "PFNhoTkyVt6pnwM8SPvKz7p1i31FT47TMf"]}, "pruned_txo": {}, "seed_version": 13, "stored_height": 1244820, "transactions": {"4dc927c6174608d8a08a9e6182da88010a8bfd3d7ee1ee2e6ce1ab0750a66084": "0100000001c2c9a2674e18b405a3da7c8b3517e9ee66d0e0eaea301826851a47b63f7b5892010000009200483045022100cebcf08aa10f780d4de0d615d54d74747a4e0a9d0112ae20d43da24cc65d102602201cc9e12c1f27fad3bf680005c49564460b2234de44b20e5c6814bcdb888ac9cc01475121021a52f873609007d4fbb3762146bcf392a38add103d158b85be84234a6deec95d2102df15712f8b35ca672cd0c7a5ad97581699449cd01a0b0a7ff1e4e8e52f43cb3452aefeffffff01e069f902000000001976a914190c9f5b8dad15b8daa7f1ef821968820980b48e88ac89fe1200", "92587b3fb6471a85261830eaeae0d066eee917358b7cdaa305b4184e67a2c9c2": "0200000001393444c30c55cbad286d4d23772d026b73c24865fa8dba894b611c21d95f077d010000006a47304402207f0df76f0105de92d02ec69f39b41682b6c5c984ea6c61b427a0f170ab5855cc022001b3369c652c367890744f3469b54e66f71db8c64182b16f45b0e0db63e5c2ac0121034db136fd5fe036ba9c95170947dc1302f98d48e4fec816d15f2ce35fa4a5ebe2feffffff0294512303000000001976a9142df179daddeea6fbc0bc582758bdcde728a6429288ac80f0fa020000000017a914170eb2491149d327a42f983fb6be32fe889c710f8789fe1200"}, "tx_fees": {"4dc927c6174608d8a08a9e6182da88010a8bfd3d7ee1ee2e6ce1ab0750a66084": 100000, "92587b3fb6471a85261830eaeae0d066eee917358b7cdaa305b4184e67a2c9c2": 23004}, "txi": {"4dc927c6174608d8a08a9e6182da88010a8bfd3d7ee1ee2e6ce1ab0750a66084": {"PAh6JWaw5rhTjiuy3oA3qPxw9VMP7WHFG9": [["92587b3fb6471a85261830eaeae0d066eee917358b7cdaa305b4184e67a2c9c2:1", 50000000]]}, "92587b3fb6471a85261830eaeae0d066eee917358b7cdaa305b4184e67a2c9c2": {}}, "txo": {"4dc927c6174608d8a08a9e6182da88010a8bfd3d7ee1ee2e6ce1ab0750a66084": {}, "92587b3fb6471a85261830eaeae0d066eee917358b7cdaa305b4184e67a2c9c2": {"PAh6JWaw5rhTjiuy3oA3qPxw9VMP7WHFG9": [[1, 50000000, false]]}}, "use_encryption": false, "verified_tx3": {"4dc927c6174608d8a08a9e6182da88010a8bfd3d7ee1ee2e6ce1ab0750a66084": [1244811, 1518548087, 2], "92587b3fb6471a85261830eaeae0d066eee917358b7cdaa305b4184e67a2c9c2": [1244811, 1518548087, 1]}, "wallet_type": "1of2", "winpos-qt": [201, 254, 840, 400], "x1/": {"seed": "speed cruise market wasp ability alarm hold essay grass coconut tissue recipe", "type": "bip32", "xprv": "xprv9s21ZrQH143K48ig2wcAuZoEKaYdNRaShKFR3hLrgwsNW13QYRhXH6gAG1khxim6dw2RtAzF8RWbQxr1vvWUJFfEu2SJZhYbv6pfreMpuLB", "xpub": "xpub661MyMwAqRbcGco98y9BGhjxscP7mtJJ4YB1r5kUFHQMNoNZ5y1mptze7J37JypkbrmBdnqTvSNzxL7cE1FrHg16qoj9S12MUpiYxVbTKQV"}, "x2/": {"type": "bip32", "xprv": null, "xpub": "xpub661MyMwAqRbcGrCDZaVs9VC7Z6579tsGvpqyDYZEHKg2MXoDkxhrWoukqvwDPXKdxVkYA6Hv9XHLETptfZfNpcJZmsUThdXXkTNGoBjQv1o"}}' self._upgrade_storage(wallet_str) ########## @classmethod def setUpClass(cls): super().setUpClass() from electrum.plugin import Plugins from electrum.simple_config import SimpleConfig cls.electrum_path = tempfile.mkdtemp() config = SimpleConfig({'electrum_path': cls.electrum_path}) gui_name = 'cmdline' # TODO it's probably wasteful to load all plugins... only need Trezor Plugins(config, gui_name) @classmethod def tearDownClass(cls): super().tearDownClass() shutil.rmtree(cls.electrum_path) def _upgrade_storage(self, wallet_json, accounts=1): storage = self._load_storage_from_json_string(wallet_json, manual_upgrades=True) if accounts == 1: self.assertFalse(storage.requires_split()) if storage.requires_upgrade(): storage.upgrade() self._sanity_check_upgraded_storage(storage) else: self.assertTrue(storage.requires_split()) new_paths = storage.split_accounts() self.assertEqual(accounts, len(new_paths)) for new_path in new_paths: new_storage = WalletStorage(new_path, manual_upgrades=False) self._sanity_check_upgraded_storage(new_storage) def _sanity_check_upgraded_storage(self, storage): self.assertFalse(storage.requires_split()) self.assertFalse(storage.requires_upgrade()) w = Wallet(storage) def _load_storage_from_json_string(self, wallet_json, manual_upgrades=True): with open(self.wallet_path, "w") as f: f.write(wallet_json) storage = WalletStorage(self.wallet_path, manual_upgrades=manual_upgrades) return storage
185.894737
5,778
0.832673
675
14,128
17.232593
0.361481
0.006878
0.004814
0.00619
0.184061
0.14314
0.14314
0.140217
0.137895
0.015475
0
0.284953
0.070003
14,128
75
5,779
188.373333
0.60035
0.012458
0
0.148148
0
0.074074
0.832903
0.678146
0
0
0
0.013333
0.092593
1
0.166667
false
0
0.185185
0
0.388889
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
1
0
0
0
0
0
0
1
1
null
0
0
1
0
0
0
0
0
0
0
0
0
0
7
56bd33196bad961947d1c07b0ad088051699ecd5
463
py
Python
content/code/python-i18n-gettext/demo.py
eduard1963/userpages
98a00c9ef3d82363dba1a89375574daaa5a86d14
[ "Unlicense" ]
77
2015-07-18T15:56:54.000Z
2022-03-10T06:22:06.000Z
content/code/python-i18n-gettext/demo.py
vicmanbrile/userpages
76d540b447ea0baccc91e1db5f9c22408fe420f4
[ "Unlicense" ]
5
2017-02-23T10:17:21.000Z
2022-01-13T19:51:15.000Z
content/code/python-i18n-gettext/demo.py
vicmanbrile/userpages
76d540b447ea0baccc91e1db5f9c22408fe420f4
[ "Unlicense" ]
35
2015-04-22T05:10:00.000Z
2022-01-21T12:34:50.000Z
#!/usr/bin/env python # -*- coding:utf-8 -*- import i18n if __name__ == '__main__': i18n.setLocale("zh_TW") print(i18n.gettext("Home")) print(i18n.gettext("Canon")) print(i18n.gettext("About")) print(i18n.gettext("Setting")) print(i18n.gettext("Translation")) i18n.setLocale("vi_VN") print(i18n.gettext("Home")) print(i18n.gettext("Canon")) print(i18n.gettext("About")) print(i18n.gettext("Setting")) print(i18n.gettext("Translation"))
23.15
36
0.676026
60
463
5.05
0.4
0.29703
0.528053
0.132013
0.739274
0.739274
0.739274
0.739274
0.739274
0.739274
0
0.065534
0.110151
463
19
37
24.368421
0.669903
0.088553
0
0.714286
0
0
0.195238
0
0
0
0
0
0
1
0
true
0
0.071429
0
0.071429
0.714286
0
0
0
null
1
1
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
10
56ded95cb533f77e49b0d0c3ef9d7b07674519ca
7,032
py
Python
tests/adapter/mongo/test_comment_handling.py
gmc-norr/scout
ea8eaaa079c63e4033af6216ec08da4a314f9b5c
[ "BSD-3-Clause" ]
111
2015-01-15T11:53:20.000Z
2022-03-26T19:55:24.000Z
tests/adapter/mongo/test_comment_handling.py
gmc-norr/scout
ea8eaaa079c63e4033af6216ec08da4a314f9b5c
[ "BSD-3-Clause" ]
2,995
2015-01-15T16:14:20.000Z
2022-03-31T13:36:32.000Z
tests/adapter/mongo/test_comment_handling.py
gmc-norr/scout
ea8eaaa079c63e4033af6216ec08da4a314f9b5c
[ "BSD-3-Clause" ]
55
2015-05-31T19:09:49.000Z
2021-11-01T10:50:31.000Z
""" tests/adapter/mongo/test_comment_handling.py We need to break out some tests from the event function. """ from pprint import pprint as pp def test_specific_comment(adapter, institute_obj, case_obj, user_obj, variant_obj): content = "specific comment for a variant" # GIVEN a populated database with a variant and no events adapter.variant_collection.insert_one(variant_obj) assert sum(1 for i in adapter.variant_collection.find()) == 1 assert sum(1 for i in adapter.event_collection.find()) == 0 # WHEN commenting a specific comment on a variant updated_variant = adapter.comment( institute=institute_obj, case=case_obj, user=user_obj, link="commentlink", variant=variant_obj, content=content, comment_level="specific", ) # THEN assert that the events function returns the correct event. comments = adapter.events( institute_obj, case=case_obj, variant_id=variant_obj["variant_id"], comments=True, ) for comment in comments: assert comment["content"] == content ## THEN assert that when looking for the comments for a variant in a different case it will not be found other_case = case_obj other_case["_id"] = "case2" comments = adapter.events( institute_obj, case=other_case, variant_id=variant_obj["variant_id"], comments=True, ) assert sum(1 for i in comments) == 0 def test_global_comment(adapter, institute_obj, case_obj, user_obj, variant_obj): content = "global comment for a variant" # GIVEN a populated database with two variants from two cases and no events adapter.variant_collection.insert_one(variant_obj) assert sum(1 for i in adapter.variant_collection.find()) == 1 assert sum(1 for i in adapter.event_collection.find()) == 0 # WHEN commenting a global comment on a variant updated_variant = adapter.comment( institute=institute_obj, case=case_obj, user=user_obj, link="commentlink", variant=variant_obj, content=content, comment_level="global", ) # THEN assert that the events function returns the correct event. comments = adapter.events( institute_obj, case=case_obj, variant_id=variant_obj["variant_id"], comments=True, ) for comment in comments: assert comment["content"] == content ## THEN assert that when looking for the comments for a variant in a different case one comment should be found other_case = case_obj other_case["_id"] = "case2" comments = adapter.events( institute_obj, case=other_case, variant_id=variant_obj["variant_id"], comments=True, ) assert sum(1 for i in comments) == 1 def test_global_and_specific_comments_one_case( adapter, institute_obj, case_obj, user_obj, variant_obj ): ## GIVEN an adapter with a variant and no events adapter.variant_collection.insert_one(variant_obj) ## WHEN adding a global and a specific comments global_content = "global" specific_content = "specific" global_comment = adapter.comment( institute=institute_obj, case=case_obj, user=user_obj, link="globalcommentlink", variant=variant_obj, content=global_content, comment_level="global", ) specific_comment = adapter.comment( institute=institute_obj, case=case_obj, user=user_obj, link="specificcommentlink", variant=variant_obj, content=specific_content, comment_level="specific", ) ## THEN assert that when fetching comments for a variant two events are returned comments = adapter.events( institute=institute_obj, case=case_obj, variant_id=variant_obj["variant_id"], comments=True, ) assert sum(1 for i in comments) == 2 def test_global_and_specific_comments_two_cases_same_institute( adapter, institute_obj, case_obj, user_obj, variant_obj ): ## GIVEN an adapter with a variant and no events adapter.variant_collection.insert_one(variant_obj) ## WHEN adding a global and a specific comments for the first variant global_content = "global" specific_content = "specific" # Add a global comment global_comment = adapter.comment( institute=institute_obj, case=case_obj, user=user_obj, link="globalcommentlink", variant=variant_obj, content=global_content, comment_level="global", ) # Add a specific comment specific_comment = adapter.comment( institute=institute_obj, case=case_obj, user=user_obj, link="globalcommentlink", variant=variant_obj, content=specific_content, comment_level="specific", ) other_case = case_obj other_case["case_id"] = other_case["_id"] = "case2" other_variant = variant_obj other_variant["case_id"] = other_case["case_id"] other_variant["_id"] = other_variant["document_id"] = "other_id" ## THEN assert that when fetching comments for other variant one global comment should be returned comments = adapter.events( institute=institute_obj, case=other_case, variant_id=other_variant["variant_id"], comments=True, ) assert sum(1 for i in comments) == 1 def test_global_and_specific_comments_two_cases_different_institutes( adapter, institute_obj, case_obj, user_obj, variant_obj ): ## GIVEN an adapter with a variant and no events adapter.variant_collection.insert_one(variant_obj) ## WHEN adding a global and a specific comments for the first variant global_content = "global" specific_content = "specific" # Add a global comment global_comment = adapter.comment( institute=institute_obj, case=case_obj, user=user_obj, link="globalcommentlink", variant=variant_obj, content=global_content, comment_level="global", ) # Add a specific comment specific_comment = adapter.comment( institute=institute_obj, case=case_obj, user=user_obj, link="globalcommentlink", variant=variant_obj, content=specific_content, comment_level="specific", ) other_case = case_obj other_case["case_id"] = other_case["_id"] = "case2" other_variant = variant_obj other_variant["case_id"] = other_case["case_id"] other_variant["_id"] = other_variant["document_id"] = "other_id" other_institute = institute_obj other_institute["_id"] = "inst2" ## THEN assert that when fetching comments for other variant one global comment should be returned comments = adapter.events( institute=other_institute, case=other_case, variant_id=other_variant["variant_id"], comments=True, ) assert sum(1 for i in comments) == 1
30.180258
115
0.674346
882
7,032
5.14059
0.102041
0.055139
0.067049
0.048522
0.9206
0.9206
0.911337
0.901412
0.879797
0.847817
0
0.004331
0.244881
7,032
232
116
30.310345
0.849529
0.1907
0
0.827381
0
0
0.087049
0
0
0
0
0
0.065476
1
0.029762
false
0
0.005952
0
0.035714
0.005952
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
7105c26d909d8086b882467139d052ab743c4ca1
44
py
Python
nuc_led/__init__.py
RedMoon32/intel_nuc_led_python_api
4a3de3458d38dd4fc12876063ceeb07d04744593
[ "Apache-2.0" ]
null
null
null
nuc_led/__init__.py
RedMoon32/intel_nuc_led_python_api
4a3de3458d38dd4fc12876063ceeb07d04744593
[ "Apache-2.0" ]
null
null
null
nuc_led/__init__.py
RedMoon32/intel_nuc_led_python_api
4a3de3458d38dd4fc12876063ceeb07d04744593
[ "Apache-2.0" ]
null
null
null
from nuc_led.nuc_led import RingLED,PowerLED
44
44
0.886364
8
44
4.625
0.75
0.324324
0
0
0
0
0
0
0
0
0
0
0.068182
44
1
44
44
0.902439
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
85b590dfca0766d1b28d312d2e6ddcc581939507
176
py
Python
sigla/__init__.py
mintyPT/sigla
f4f2a77d7da637f1dd9f6db97be8fd70af610bd0
[ "MIT" ]
7
2021-01-25T23:09:21.000Z
2021-05-27T02:37:32.000Z
sigla/__init__.py
mintyPT/sigla
f4f2a77d7da637f1dd9f6db97be8fd70af610bd0
[ "MIT" ]
null
null
null
sigla/__init__.py
mintyPT/sigla
f4f2a77d7da637f1dd9f6db97be8fd70af610bd0
[ "MIT" ]
null
null
null
from sigla.config import config # noqa F401 from sigla.utils.filters import register_filter # noqa F401 from sigla.main import load_node # noqa F401 __version__ = "0.0.63"
29.333333
60
0.772727
28
176
4.642857
0.571429
0.207692
0.184615
0.261538
0
0
0
0
0
0
0
0.087838
0.159091
176
5
61
35.2
0.790541
0.164773
0
0
0
0
0.041958
0
0
0
0
0
0
1
0
false
0
0.75
0
0.75
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
7
85b5f8c3c2efb1f2ae10e9a4b4e2ef45504bc0fc
140
py
Python
pydeep/nn/__init__.py
jytan17/deep_learning_framework
c0a55c0d9d201aacfe03e4d49b9f0d1b75278eb5
[ "MIT" ]
null
null
null
pydeep/nn/__init__.py
jytan17/deep_learning_framework
c0a55c0d9d201aacfe03e4d49b9f0d1b75278eb5
[ "MIT" ]
null
null
null
pydeep/nn/__init__.py
jytan17/deep_learning_framework
c0a55c0d9d201aacfe03e4d49b9f0d1b75278eb5
[ "MIT" ]
null
null
null
from pydeep.nn.Module import * from pydeep.layer import * from pydeep.optim import * from pydeep.loss import * from pydeep.function import *
28
30
0.785714
21
140
5.238095
0.428571
0.454545
0.581818
0
0
0
0
0
0
0
0
0
0.135714
140
5
31
28
0.909091
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
a42b672eb854ba40f1a5641e61af9fd68ceb8537
7,956
py
Python
util/write_json.py
lorenghoh/oh_tracker
13cc82e6a05029089f48bffee570726c8b25df92
[ "BSD-2-Clause" ]
1
2015-11-26T20:35:57.000Z
2015-11-26T20:35:57.000Z
util/write_json.py
lorenghoh/oh_tracker
13cc82e6a05029089f48bffee570726c8b25df92
[ "BSD-2-Clause" ]
null
null
null
util/write_json.py
lorenghoh/oh_tracker
13cc82e6a05029089f48bffee570726c8b25df92
[ "BSD-2-Clause" ]
1
2015-10-26T22:16:35.000Z
2015-10-26T22:16:35.000Z
import sys, json, pprint def main(case_name): json_dict = {} if case_name == 'BOMEX': #---- BOMEX json_dict = {} json_dict['config'] = {} stat_file = 'BOMEX_256x256x128_25m_25m_1s_stat.nc' location = '/newtera/loh/data/BOMEX' json_dict['case_name'] = case_name json_dict['location'] = location json_dict['condensed'] = '%s/condensed_entrain' % location json_dict['core'] = '%s/core_entrain' % location json_dict['stat_file'] = '%s/%s' % (location, stat_file) json_dict['tracking'] = '%s/tracking' % location json_dict['variables'] = '%s/variables' % location # Model parameters json_dict['config']['nx'] = 256 json_dict['config']['ny'] = 256 json_dict['config']['nz'] = 128 json_dict['config']['nt'] = 180 json_dict['config']['dx'] = 25 json_dict['config']['dy'] = 25 json_dict['config']['dz'] = 25 json_dict['config']['dt'] = 60 json_dict['config']['ug'] = -8. json_dict['config']['vg'] = 0. elif case_name == 'CGILS_CTL': #---- CGILS_CTL json_dict = {} json_dict['config'] = {} stat_file = 'ENT_CGILS_CTL_S6_3D_384x384x194_25m_1s_stat.nc' location = '/newtera/loh/data/CGILS_CTL' json_dict['case_name'] = case_name json_dict['location'] = location json_dict['condensed'] = '%s/condensed_entrain' % location json_dict['core'] = '%s/core_entrain' % location json_dict['stat_file'] = '%s/%s' % (location, stat_file) json_dict['tracking'] = '%s/tracking' % location json_dict['variables'] = '%s/variables' % location # Model parameters json_dict['config']['nt'] = 360 json_dict['config']['nx'] = 384 json_dict['config']['ny'] = 384 json_dict['config']['nz'] = 194 json_dict['config']['dx'] = 25 json_dict['config']['dy'] = 25 json_dict['config']['dz'] = 25 json_dict['config']['dt'] = 60 json_dict['config']['ug'] = 0. json_dict['config']['vg'] = 0. elif case_name == 'CGILS_300K': #---- CGILS_300K json_dict = {} json_dict['config'] = {} stat_file = 'ENT_CGILS_S6_IDEAL_3D_SST_300K_384x384x194_25m_1s_stat.nc' location = '/newtera/loh/data/CGILS_300K' json_dict['case_name'] = case_name json_dict['location'] = location json_dict['condensed'] = '%s/condensed_entrain' % location json_dict['core'] = '%s/core_entrain' % location json_dict['stat_file'] = '%s/%s' % (location, stat_file) json_dict['tracking'] = '%s/tracking' % location json_dict['variables'] = '%s/variables' % location # Model parameters json_dict['config']['nt'] = 360 json_dict['config']['nx'] = 384 json_dict['config']['ny'] = 384 json_dict['config']['nz'] = 194 json_dict['config']['dx'] = 25 json_dict['config']['dy'] = 25 json_dict['config']['dz'] = 25 json_dict['config']['dt'] = 60 json_dict['config']['ug'] = 0. json_dict['config']['vg'] = 0. elif case_name == 'CGILS_301K': #---- CGILS_301K json_dict = {} json_dict['config'] = {} stat_file = 'ENT_CGILS_S6_IDEAL_3D_SST_301K_384x384x194_25m_1s_stat.nc' location = '/newtera/loh/data/CGILS_301K' json_dict['case_name'] = case_name json_dict['location'] = location json_dict['condensed'] = '%s/condensed_entrain' % location json_dict['core'] = '%s/core_entrain' % location json_dict['stat_file'] = '%s/%s' % (location, stat_file) json_dict['tracking'] = '%s/tracking' % location json_dict['variables'] = '%s/variables' % location # Model parameters json_dict['config']['nt'] = 360 json_dict['config']['nx'] = 384 json_dict['config']['ny'] = 384 json_dict['config']['nz'] = 194 json_dict['config']['dx'] = 25 json_dict['config']['dy'] = 25 json_dict['config']['dz'] = 25 json_dict['config']['dt'] = 60 json_dict['config']['ug'] = 0. json_dict['config']['vg'] = 0. elif case_name == 'GCSSARM': #---- GCSSARM json_dict = {} json_dict['config'] = {} stat_file = 'GCSSARM_256x256x208_25m_25m_1s_stat.nc' location = '/newtera/loh/data/GCSSARM' json_dict['case_name'] = case_name json_dict['location'] = location json_dict['condensed'] = '%s/condensed_entrain' % location json_dict['core'] = '%s/core_entrain' % location json_dict['stat_file'] = '%s/%s' % (location, stat_file) json_dict['tracking'] = '%s/tracking' % location json_dict['variables'] = '%s/variables' % location # Model parameters json_dict['config']['nx'] = 256 json_dict['config']['ny'] = 256 json_dict['config']['nz'] = 128 json_dict['config']['nt'] = 510 json_dict['config']['dx'] = 25 json_dict['config']['dy'] = 25 json_dict['config']['dz'] = 25 json_dict['config']['dt'] = 60 json_dict['config']['ug'] = 10. json_dict['config']['vg'] = 0. elif case_name == 'GATE_BDL': #---- GATE_BDL json_dict = {} json_dict['config'] = {} stat_file = 'GATE_1920x1920x512_50m_1s_ent_stat.nc' location = '/newtera/loh/data/GATE_BDL' json_dict['case_name'] = case_name json_dict['location'] = location json_dict['condensed'] = '%s/condensed_entrain' % location json_dict['core'] = '%s/core_entrain' % location json_dict['stat_file'] = '%s/%s' % (location, stat_file) json_dict['tracking'] = '%s/tracking' % location json_dict['variables'] = '%s/variables' % location # Model parameters json_dict['config']['nx'] = 1728 json_dict['config']['ny'] = 1728 json_dict['config']['nz'] = 80 json_dict['config']['nt'] = 180 json_dict['config']['dx'] = 50 json_dict['config']['dy'] = 50 json_dict['config']['dz'] = 50 json_dict['config']['dt'] = 60 json_dict['config']['ug'] = -8. json_dict['config']['vg'] = 0. elif case_name == 'GATE': #---- GATE json_dict = {} json_dict['config'] = {} stat_file = 'GATE_1920x1920x512_50m_1s_ent_stat.nc' location = '/newtera/loh/data/GATE' json_dict['case_name'] = case_name json_dict['location'] = location json_dict['condensed'] = '%s/condensed_entrain' % location json_dict['core'] = '%s/core_entrain' % location json_dict['stat_file'] = '%s/%s' % (location, stat_file) json_dict['tracking'] = '/tera/loh/cloudtracker/cloudtracker/hdf5' json_dict['variables'] = '%s/variables' % location # Model parameters json_dict['config']['nx'] = 1728 json_dict['config']['ny'] = 1728 json_dict['config']['nz'] = 320 json_dict['config']['nt'] = 30 json_dict['config']['dx'] = 50 json_dict['config']['dy'] = 50 json_dict['config']['dz'] = 50 json_dict['config']['dt'] = 60 json_dict['config']['ug'] = -8. json_dict['config']['vg'] = 0. else: raise ValueError('Case name not found') with open('model_config.json','w') as f: json.dump(json_dict, f,indent=1) print('Wrote {} using util.write_json'.format('model_config.json')) pp = pprint.PrettyPrinter(indent=1) pp.pprint(json_dict) if __name__ == '__main__': if len(sys.argv) == 2: main(sys.argv[-1]) else: print("Missing parameter") print("For example, run python write_json.py BOMEX \n") raise ValueError('Case name is not given')
33.288703
79
0.557315
958
7,956
4.351775
0.115866
0.260974
0.258575
0.057568
0.861358
0.861358
0.861358
0.846966
0.846966
0.805229
0
0.048655
0.26634
7,956
238
80
33.428571
0.665582
0.026144
0
0.77193
0
0
0.282002
0.068141
0
0
0
0
0
1
0.005848
false
0
0.005848
0
0.011696
0.035088
0
0
0
null
1
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
8ef7fbac6c6d69791c8f3b49e5b5dd3b88edb328
151
py
Python
src/version.py
ESDLMapEditorESSIM/esdl-mapeditor
c17090e19de8ff3a0cc552f347639bac67840d22
[ "Apache-2.0" ]
null
null
null
src/version.py
ESDLMapEditorESSIM/esdl-mapeditor
c17090e19de8ff3a0cc552f347639bac67840d22
[ "Apache-2.0" ]
14
2020-09-30T21:16:46.000Z
2021-11-08T18:54:34.000Z
src/version.py
ESDLMapEditorESSIM/esdl-mapeditor
c17090e19de8ff3a0cc552f347639bac67840d22
[ "Apache-2.0" ]
1
2020-09-17T12:48:57.000Z
2020-09-17T12:48:57.000Z
__version__ = "21.10.0" __long_version__ = "21.10.0-gf7246ee-main" __git_commit__ = "f7246ee33528369704d4b6c90efa114ea5f4688d" __git_branch__ = "main"
30.2
59
0.801325
17
151
6
0.647059
0.176471
0.215686
0.235294
0
0
0
0
0
0
0
0.294964
0.07947
151
4
60
37.75
0.438849
0
0
0
0
0
0.476821
0.403974
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
8efc29f72a43bcc13482b162711b19bffd8ff609
90
py
Python
roman_numerals_webservice/roman_numerals/__init__.py
DerThorsten/roman_numerals_webservice
8d29c5141951cead9a15cd62131220bd402eb851
[ "MIT" ]
null
null
null
roman_numerals_webservice/roman_numerals/__init__.py
DerThorsten/roman_numerals_webservice
8d29c5141951cead9a15cd62131220bd402eb851
[ "MIT" ]
1
2019-07-23T20:54:02.000Z
2019-07-23T20:56:43.000Z
roman_numerals_webservice/roman_numerals/__init__.py
DerThorsten/roman_numerals_webservice
8d29c5141951cead9a15cd62131220bd402eb851
[ "MIT" ]
null
null
null
from .arabic_to_roman import arabic_to_roman from .roman_to_arabic import roman_to_arabic
30
44
0.888889
16
90
4.5
0.3125
0.222222
0.361111
0
0
0
0
0
0
0
0
0
0.088889
90
2
45
45
0.878049
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
f131e0bdb1a06993875010b521de510f57220040
21,035
py
Python
tests/gameforum/test_game_rights_cases.py
kozzztik/tulius
81b8f6484eefdc453047f62173a08f5e6f640cd6
[ "MIT" ]
1
2020-04-21T15:09:18.000Z
2020-04-21T15:09:18.000Z
tests/gameforum/test_game_rights_cases.py
kozzztik/tulius
81b8f6484eefdc453047f62173a08f5e6f640cd6
[ "MIT" ]
70
2019-04-10T22:32:32.000Z
2022-03-11T23:12:54.000Z
tests/gameforum/test_game_rights_cases.py
kozzztik/tulius
81b8f6484eefdc453047f62173a08f5e6f640cd6
[ "MIT" ]
1
2019-04-12T14:55:39.000Z
2019-04-12T14:55:39.000Z
from django.db import transaction from tulius.forum.threads import models from tulius.games import models as game_models from tulius.stories import models as story_models from tulius.gameforum import core def test_thread_with_wrong_variation( story, game, admin, variation_forum): variation = story_models.Variation(story=story, name='Variation2') variation.save() base_url = f'/api/game_forum/variation/{variation.pk}/' response = admin.get(base_url + f'thread/{variation_forum.pk}/') assert response.status_code == 403 def test_access_to_variation(variation, variation_forum, client, user): response = client.get(variation_forum.get_absolute_url()) assert response.status_code == 403 response = user.get(variation_forum.get_absolute_url()) assert response.status_code == 403 def test_guest_access_to_game(game, variation_forum, admin, game_guest): game.status = game_models.GAME_STATUS_IN_PROGRESS with transaction.atomic(): game.save() # create thread with "no read" and no role response = admin.put( variation_forum.get_absolute_url(), { 'title': 'thread', 'body': 'thread description', 'room': False, 'default_rights': models.NO_ACCESS, 'granted_rights': [], 'important': True, 'closed': True, 'media': {}}) assert response.status_code == 200 thread = response.json() # check guest can read it response = game_guest.get(thread['url']) assert response.status_code == 200 data = response.json() assert data['body'] == 'thread description' # create thread with no specified rights. There was a problem with # fail on it response = admin.put( variation_forum.get_absolute_url(), { 'title': 'thread', 'body': 'thread description', 'room': False, 'default_rights': None, 'granted_rights': [], 'important': True, 'closed': True, 'media': {}}) assert response.status_code == 200 thread = response.json() # check guest can read it response = game_guest.get(thread['url']) assert response.status_code == 200 def test_finishing_game_rights( game, variation_forum, admin, user, detective, client): game.status = game_models.GAME_STATUS_IN_PROGRESS with transaction.atomic(): game.save() # create thread with "no read" and no role response = admin.put( variation_forum.get_absolute_url(), { 'title': 'thread', 'body': 'thread description', 'room': False, 'default_rights': models.NO_ACCESS, 'granted_rights': [], 'important': False, 'media': {}}) assert response.status_code == 200 thread = response.json() # create own user thread response = user.put( variation_forum.get_absolute_url(), { 'title': 'thread', 'body': 'thread description', 'room': False, 'default_rights': models.NO_ACCESS, 'granted_rights': [], 'role_id': detective.pk, 'media': {}}) assert response.status_code == 200 thread2 = response.json() # check user can add comments response = user.post( thread2['url'] + 'comments_page/', { 'reply_id': thread2['first_comment_id'], 'title': 'Hello', 'body': 'my comment is awesome', 'media': {}, 'role_id': detective.pk, }) assert response.status_code == 200 data = response.json() assert len(data['comments']) == 2 # check user can't read first thread response = user.get(thread['url']) assert response.status_code == 403 # change game status game.status = game_models.GAME_STATUS_FINISHING with transaction.atomic(): game.save() # check user still can write response = user.post( thread2['url'] + 'comments_page/', { 'reply_id': thread2['first_comment_id'], 'title': 'Hello', 'body': 'my comment is awesome', 'media': {}, 'role_id': detective.pk, }) assert response.status_code == 200 data = response.json() assert len(data['comments']) == 3 # And thread is opened now response = user.get(thread['url']) assert response.status_code == 200 # Finish game game.status = game_models.GAME_STATUS_COMPLETED with transaction.atomic(): game.save() # check user can't write any more response = user.post( thread2['url'] + 'comments_page/', { 'reply_id': thread2['first_comment_id'], 'title': 'Hello', 'body': 'my comment is awesome', 'media': {}, 'role_id': detective.pk, }) assert response.status_code == 403 # Thread is still opened response = user.get(thread['url']) assert response.status_code == 200 # but still not for anonymous response = client.get(thread['url']) assert response.status_code == 403 # Open game game.status = game_models.GAME_STATUS_COMPLETED_OPEN with transaction.atomic(): game.save() # now anyone can read response = client.get(thread['url']) assert response.status_code == 200 def test_grant_moderator_rights(game, variation_forum, admin, user, detective): game.status = game_models.GAME_STATUS_IN_PROGRESS with transaction.atomic(): game.save() base_url = f'/api/game_forum/variation/{game.variation.pk}/' # create thread with "no read" and no role response = admin.put( base_url + f'thread/{variation_forum.id}/', { 'title': 'thread', 'body': 'thread description', 'room': False, 'default_rights': models.ACCESS_OPEN, 'granted_rights': [], 'important': False, 'media': {}}) assert response.status_code == 200 thread = response.json() # add a comment by admin response = admin.post( thread['url'] + 'comments_page/', { 'reply_id': thread['first_comment_id'], 'title': 'Hello', 'body': 'my comment is awesome', 'media': {}, 'role_id': detective.pk, }) assert response.status_code == 200 data = response.json() assert len(data['comments']) == 2 comment = data['comments'][1] # check user can read thread and cant edit comment response = user.get(thread['url']) assert response.status_code == 200 data = response.json() assert data['body'] == 'thread description' response = user.post(comment['url'], { 'title': 'Hello', 'body': 'my comment is awesome2', 'media': {}, 'role_id': detective.pk}) assert response.status_code == 403 # grant moderate rights response = admin.post( thread['url'] + 'granted_rights/', { 'user': {'id': detective.pk}, 'access_level': models.ACCESS_MODERATE } ) assert response.status_code == 200 # check we can update comment response = user.post(comment['url'], { 'title': 'Hello', 'body': 'my comment is awesome2', 'media': {}, 'role_id': detective.pk, 'edit_role_id': detective.pk}) assert response.status_code == 200 data = response.json() assert data['body'] == 'my comment is awesome2' def test_chain_strict_read( game, variation_forum, admin, user, detective, murderer): game.status = game_models.GAME_STATUS_IN_PROGRESS with transaction.atomic(): game.save() # create room with read limits response = admin.put( variation_forum.get_absolute_url(), { 'title': 'room', 'body': 'room description', 'room': True, 'default_rights': models.NO_ACCESS, 'granted_rights': [], 'media': {}}) assert response.status_code == 200 room = response.json() # create thread with "no read" and no role and detective grants response = admin.put( room['url'], { 'title': 'thread', 'body': 'thread description', 'room': False, 'default_rights': models.NO_ACCESS, 'granted_rights': [{ 'user': {'id': detective.pk}, 'access_level': models.ACCESS_READ }], 'important': False, 'media': {}}) assert response.status_code == 200 thread = response.json() # check user can read thread, because have exceptions, even have no access # to parent room. response = user.get(thread['url']) assert response.status_code == 200 # and user don't see room response = user.get(variation_forum.get_absolute_url()) assert response.status_code == 200 data = response.json() assert not data['rooms'] # but admin see it even if he play murderer.user = admin.user murderer.save() response = admin.get(variation_forum.get_absolute_url()) assert response.status_code == 200 data = response.json() assert len(data['rooms']) == 1 # grant read rights to room response = admin.post( room['url'] + 'granted_rights/', { 'user': {'id': detective.pk}, 'access_level': models.ACCESS_READ } ) assert response.status_code == 200 # check thread now response = user.get(thread['url']) assert response.status_code == 200 data = response.json() assert data['body'] == 'thread description' # check root response = user.get(variation_forum.get_absolute_url()) assert response.status_code == 200 data = response.json() assert len(data['rooms']) == 1 # check room response = user.get(room['url']) assert response.status_code == 200 data = response.json() assert data['threads'][0]['accessed_users'][0]['id'] == detective.pk assert data['threads'][0]['accessed_users'][0]['title'] == detective.name def test_chain_write_rights(game, variation_forum, admin, user, detective): game.status = game_models.GAME_STATUS_IN_PROGRESS with transaction.atomic(): game.save() # create thread room with read limits response = admin.put( variation_forum.get_absolute_url(), { 'title': 'room', 'body': 'room description', 'room': True, 'default_rights': models.ACCESS_READ, 'granted_rights': [], 'media': {}}) assert response.status_code == 200 room = response.json() # create middle room with "not set" type and write rights response = admin.put( room['url'], { 'title': 'room', 'body': 'room description', 'room': True, 'default_rights': None, 'granted_rights': [], 'media': {}}) assert response.status_code == 200 room2 = response.json() # create thread with "no read" and no role and detective grants response = admin.put( room2['url'], { 'title': 'thread', 'body': 'thread description', 'room': False, 'default_rights': models.NO_ACCESS, 'granted_rights': [{ 'user': {'id': detective.pk}, 'access_level': models.ACCESS_READ }], 'important': False, 'media': {}}) assert response.status_code == 200 thread = response.json() # check user can read thread response = user.get(thread['url']) assert response.status_code == 200 # but can't write response = user.post( thread['url'] + 'comments_page/', { 'reply_id': thread['first_comment_id'], 'title': 'Hello', 'body': 'my comment is awesome', 'media': {}, 'role_id': detective.pk, }) assert response.status_code == 403 # grant rights to middle room response = admin.post( room2['url'] + 'granted_rights/', { 'user': {'id': detective.pk}, 'access_level': models.ACCESS_WRITE } ) assert response.status_code == 200 # and now still cant write response = user.post( thread['url'] + 'comments_page/', { 'reply_id': thread['first_comment_id'], 'title': 'Hello', 'body': 'my comment is awesome', 'media': {}, 'role_id': detective.pk, }) assert response.status_code == 403 def test_broken_tree_rights(game, variation_forum, admin): game.status = game_models.GAME_STATUS_IN_PROGRESS with transaction.atomic(): game.save() base_url = f'/api/game_forum/variation/{game.variation.pk}/' # create thread room with read limits response = admin.put( base_url + f'thread/{variation_forum.id}/', { 'title': 'room', 'body': 'room description', 'room': True, 'default_rights': models.ACCESS_READ, 'granted_rights': [], 'media': {}}) assert response.status_code == 200 thread = response.json() # break forum tree game.variation.thread = core.create_game_forum(admin.user, game.variation) game.variation.save() # now get thread. Previously it caused 500 on tree rights check. response = admin.get(thread['url']) assert response.status_code == 200 def test_grant_rights_to_variation(variation, variation_forum, user): response = user.get(variation_forum.get_absolute_url()) assert response.status_code == 403 # grant rights admin = story_models.StoryAdmin(story=variation.story, user=user.user) admin.save() # check response = user.get(variation_forum.get_absolute_url()) assert response.status_code == 200 # delete admin.delete() response = user.get(variation_forum.get_absolute_url()) assert response.status_code == 403 def test_grant_rights_to_game(game, variation_forum, user): game.status = game_models.GAME_STATUS_IN_PROGRESS with transaction.atomic(): game.save() response = user.get(variation_forum.get_absolute_url()) assert response.status_code == 403 # grant rights admin = game_models.GameAdmin(game=game, user=user.user) admin.save() # check response = user.get(variation_forum.get_absolute_url()) assert response.status_code == 200 response = user.put( variation_forum.get_absolute_url(), { 'title': 'room', 'body': 'room description', 'room': True, 'default_rights': models.ACCESS_READ, 'granted_rights': [], 'media': {}}) assert response.status_code == 200 # delete admin.delete() response = user.get(variation_forum.get_absolute_url()) assert response.status_code == 403 # grant guest rights guest = game_models.GameGuest(game=game, user=user.user) guest.save() # check response = user.get(variation_forum.get_absolute_url()) assert response.status_code == 200 response = user.put( variation_forum.get_absolute_url(), { 'title': 'room', 'body': 'room description', 'room': True, 'default_rights': models.ACCESS_READ, 'granted_rights': [], 'media': {}}) assert response.status_code == 403 # delete guest.delete() response = user.get(variation_forum.get_absolute_url()) assert response.status_code == 403 def test_not_inherited_read_only_root( game, variation_forum, user, admin, detective): response = admin.put( variation_forum.get_absolute_url() + 'granted_rights/', {'default_rights': models.ACCESS_READ + models.ACCESS_NO_INHERIT}) assert response.status_code == 200 # create sub room response = admin.put( variation_forum.get_absolute_url(), { 'title': 'room', 'body': 'room description', 'room': True, 'default_rights': None, 'granted_rights': [], 'media': {}}) assert response.status_code == 200 room = response.json() # start game. Reload game to update thread caches. game = game_models.Game.objects.get(pk=game.pk) game.status = game_models.GAME_STATUS_IN_PROGRESS with transaction.atomic(): game.save() # check user can read and can't write at root response = user.get(variation_forum.get_absolute_url()) assert response.status_code == 200 response = user.put( variation_forum.get_absolute_url(), { 'title': 'room', 'body': 'room description', 'room': True, 'default_rights': None, 'role_id': detective.pk, 'granted_rights': [], 'media': {}}) assert response.status_code == 403 # check we can read and write in sub room response = user.get(room['url']) assert response.status_code == 200 response = user.put( room['url'], { 'title': 'room', 'body': 'room description', 'room': True, 'default_rights': None, 'role_id': detective.pk, 'granted_rights': [], 'media': {}}) assert response.status_code == 200 def test_not_inherited_read_only_room( game, variation_forum, user, admin, detective): # create sub room response = admin.put( variation_forum.get_absolute_url(), { 'title': 'room', 'body': 'room description', 'room': True, 'default_rights': models.ACCESS_READ + models.ACCESS_NO_INHERIT, 'granted_rights': [], 'media': {}}) assert response.status_code == 200 room1 = response.json() # create sub sub room response = admin.put( variation_forum.get_absolute_url(), { 'title': 'room', 'body': 'room description', 'room': True, 'default_rights': None, 'granted_rights': [], 'media': {}}) assert response.status_code == 200 room2 = response.json() # start game game.status = game_models.GAME_STATUS_IN_PROGRESS with transaction.atomic(): game.save() # check we can read and write in root response = user.get(variation_forum.get_absolute_url()) assert response.status_code == 200 response = user.put( variation_forum.get_absolute_url(), { 'title': 'room', 'body': 'room description', 'room': True, 'default_rights': None, 'role_id': detective.pk, 'granted_rights': [], 'media': {}}) assert response.status_code == 200 # check user can read and can't write at room response = user.get(room1['url']) assert response.status_code == 200 response = user.put( room1['url'], { 'title': 'room', 'body': 'room description', 'room': True, 'default_rights': None, 'role_id': detective.pk, 'granted_rights': [], 'media': {}}) assert response.status_code == 403 # check we can read and write in sub room response = user.get(room2['url']) assert response.status_code == 200 response = user.put( room2['url'], { 'title': 'room', 'body': 'room description', 'room': True, 'default_rights': None, 'role_id': detective.pk, 'granted_rights': [], 'media': {}}) assert response.status_code == 200 def test_not_defined_rights_on_root( game, variation_forum, user, admin, detective): response = admin.put( variation_forum.get_absolute_url() + 'granted_rights/', {'default_rights': None}) assert response.status_code == 200 # start game. Reload game to update thread caches. game = game_models.Game.objects.get(pk=game.pk) game.status = game_models.GAME_STATUS_IN_PROGRESS with transaction.atomic(): game.save() # check user can read and write at root response = user.get(variation_forum.get_absolute_url()) assert response.status_code == 200 response = user.put( variation_forum.get_absolute_url(), { 'title': 'room', 'body': 'room description', 'room': True, 'default_rights': None, 'role_id': detective.pk, 'granted_rights': [], 'media': {}}) assert response.status_code == 200 def test_rights_override(game, variation_forum, user, admin, detective): game.status = game_models.GAME_STATUS_IN_PROGRESS with transaction.atomic(): game.save() response = admin.put( variation_forum.get_absolute_url(), { 'title': 'room', 'body': 'room description', 'room': True, 'default_rights': models.ACCESS_READ, 'role_id': None, 'granted_rights': [], 'media': {}}) assert response.status_code == 200 room1 = response.json() response = admin.put( room1['url'], { 'title': 'room2', 'body': 'room2 description', 'room': True, 'default_rights': models.ACCESS_OPEN, 'role_id': None, 'granted_rights': [], 'media': {}}) assert response.status_code == 200 room2 = response.json() # check no write room1 response = user.put( room1['url'], { 'title': 'thread', 'body': 'thread description', 'room': False, 'default_rights': None, 'role_id': detective.pk, 'granted_rights': [], 'media': {}}) assert response.status_code == 403 # check write room2 response = user.put( room2['url'], { 'title': 'thread', 'body': 'thread description', 'room': False, 'default_rights': None, 'role_id': detective.pk, 'granted_rights': [], 'media': {}}) assert response.status_code == 200
39.244403
79
0.622106
2,470
21,035
5.115789
0.068826
0.079772
0.11396
0.136752
0.875119
0.843701
0.813469
0.796455
0.779677
0.755461
0
0.016198
0.242786
21,035
535
80
39.317757
0.777122
0.087663
0
0.829545
0
0
0.16895
0.011347
0
0
0
0
0.190909
1
0.031818
false
0
0.025
0
0.056818
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
f1428aff09b43664f562ae1b28fb20d098c4e61b
63,662
py
Python
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/__init__.py
ckishimo/napalm-yang
8f2bd907bd3afcde3c2f8e985192de74748baf6c
[ "Apache-2.0" ]
64
2016-10-20T15:47:18.000Z
2021-11-11T11:57:32.000Z
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/__init__.py
ckishimo/napalm-yang
8f2bd907bd3afcde3c2f8e985192de74748baf6c
[ "Apache-2.0" ]
126
2016-10-05T10:36:14.000Z
2019-05-15T08:43:23.000Z
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/__init__.py
ckishimo/napalm-yang
8f2bd907bd3afcde3c2f8e985192de74748baf6c
[ "Apache-2.0" ]
63
2016-11-07T15:23:08.000Z
2021-09-22T14:41:16.000Z
# -*- coding: utf-8 -*- from operator import attrgetter from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType from pyangbind.lib.yangtypes import RestrictedClassType from pyangbind.lib.yangtypes import TypedListType from pyangbind.lib.yangtypes import YANGBool from pyangbind.lib.yangtypes import YANGListType from pyangbind.lib.yangtypes import YANGDynClass from pyangbind.lib.yangtypes import ReferenceType from pyangbind.lib.base import PybindBase from collections import OrderedDict from decimal import Decimal from bitarray import bitarray import six # PY3 support of some PY2 keywords (needs improved) if six.PY3: import builtins as __builtin__ long = int elif six.PY2: import __builtin__ from . import config from . import state from . import system_level_counters from . import link_state_database from . import traffic_engineering from . import route_preference from . import authentication class level(PybindBase): """ This class was auto-generated by the PythonClass plugin for PYANG from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level. Each member element of the container is represented as a class variable - with a specific YANG type. YANG Description: Configuration and operational state parameters related to a particular level within the IS-IS protocol instance """ __slots__ = ( "_path_helper", "_extmethods", "__level_number", "__config", "__state", "__system_level_counters", "__link_state_database", "__traffic_engineering", "__route_preference", "__authentication", ) _yang_name = "level" _pybind_generated_by = "container" def __init__(self, *args, **kwargs): self._path_helper = False self._extmethods = False self.__level_number = YANGDynClass( base=six.text_type, is_leaf=True, yang_name="level-number", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="leafref", is_config=True, ) self.__config = YANGDynClass( base=config.config, is_container="container", yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) self.__state = YANGDynClass( base=state.state, is_container="container", yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) self.__system_level_counters = YANGDynClass( base=system_level_counters.system_level_counters, is_container="container", yang_name="system-level-counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) self.__link_state_database = YANGDynClass( base=link_state_database.link_state_database, is_container="container", yang_name="link-state-database", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) self.__traffic_engineering = YANGDynClass( base=traffic_engineering.traffic_engineering, is_container="container", yang_name="traffic-engineering", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) self.__route_preference = YANGDynClass( base=route_preference.route_preference, is_container="container", yang_name="route-preference", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) self.__authentication = YANGDynClass( base=authentication.authentication, is_container="container", yang_name="authentication", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) load = kwargs.pop("load", None) if args: if len(args) > 1: raise TypeError("cannot create a YANG container with >1 argument") all_attr = True for e in self._pyangbind_elements: if not hasattr(args[0], e): all_attr = False break if not all_attr: raise ValueError("Supplied object did not have the correct attributes") for e in self._pyangbind_elements: nobj = getattr(args[0], e) if nobj._changed() is False: continue setmethod = getattr(self, "_set_%s" % e) if load is None: setmethod(getattr(args[0], e)) else: setmethod(getattr(args[0], e), load=load) def _path(self): if hasattr(self, "_parent"): return self._parent._path() + [self._yang_name] else: return [ "network-instances", "network-instance", "protocols", "protocol", "isis", "levels", "level", ] def _get_level_number(self): """ Getter method for level_number, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/level_number (leafref) YANG Description: Reference to ISIS level-number. """ return self.__level_number def _set_level_number(self, v, load=False): """ Setter method for level_number, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/level_number (leafref) If this variable is read-only (config: false) in the source YANG file, then _set_level_number is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_level_number() directly. YANG Description: Reference to ISIS level-number. """ parent = getattr(self, "_parent", None) if parent is not None and load is False: raise AttributeError( "Cannot set keys directly when" + " within an instantiated list" ) if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=six.text_type, is_leaf=True, yang_name="level-number", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="leafref", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """level_number must be of a type compatible with leafref""", "defined-type": "leafref", "generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="level-number", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""", } ) self.__level_number = t if hasattr(self, "_set"): self._set() def _unset_level_number(self): self.__level_number = YANGDynClass( base=six.text_type, is_leaf=True, yang_name="level-number", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="leafref", is_config=True, ) def _get_config(self): """ Getter method for config, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/config (container) YANG Description: This container defines ISIS level based configuration. """ return self.__config def _set_config(self, v, load=False): """ Setter method for config, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/config (container) If this variable is read-only (config: false) in the source YANG file, then _set_config is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_config() directly. YANG Description: This container defines ISIS level based configuration. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=config.config, is_container="container", yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """config must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""", } ) self.__config = t if hasattr(self, "_set"): self._set() def _unset_config(self): self.__config = YANGDynClass( base=config.config, is_container="container", yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) def _get_state(self): """ Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/state (container) YANG Description: This container defines ISIS level state information. """ return self.__state def _set_state(self, v, load=False): """ Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/state (container) If this variable is read-only (config: false) in the source YANG file, then _set_state is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_state() directly. YANG Description: This container defines ISIS level state information. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=state.state, is_container="container", yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """state must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""", } ) self.__state = t if hasattr(self, "_set"): self._set() def _unset_state(self): self.__state = YANGDynClass( base=state.state, is_container="container", yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) def _get_system_level_counters(self): """ Getter method for system_level_counters, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters (container) YANG Description: This container defines ISIS system level counters. """ return self.__system_level_counters def _set_system_level_counters(self, v, load=False): """ Setter method for system_level_counters, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters (container) If this variable is read-only (config: false) in the source YANG file, then _set_system_level_counters is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_system_level_counters() directly. YANG Description: This container defines ISIS system level counters. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=system_level_counters.system_level_counters, is_container="container", yang_name="system-level-counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """system_level_counters must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=system_level_counters.system_level_counters, is_container='container', yang_name="system-level-counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""", } ) self.__system_level_counters = t if hasattr(self, "_set"): self._set() def _unset_system_level_counters(self): self.__system_level_counters = YANGDynClass( base=system_level_counters.system_level_counters, is_container="container", yang_name="system-level-counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) def _get_link_state_database(self): """ Getter method for link_state_database, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database (container) YANG Description: This container defines ISIS LSDB. """ return self.__link_state_database def _set_link_state_database(self, v, load=False): """ Setter method for link_state_database, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database (container) If this variable is read-only (config: false) in the source YANG file, then _set_link_state_database is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_link_state_database() directly. YANG Description: This container defines ISIS LSDB. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=link_state_database.link_state_database, is_container="container", yang_name="link-state-database", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """link_state_database must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=link_state_database.link_state_database, is_container='container', yang_name="link-state-database", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""", } ) self.__link_state_database = t if hasattr(self, "_set"): self._set() def _unset_link_state_database(self): self.__link_state_database = YANGDynClass( base=link_state_database.link_state_database, is_container="container", yang_name="link-state-database", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) def _get_traffic_engineering(self): """ Getter method for traffic_engineering, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/traffic_engineering (container) YANG Description: This container defines ISIS TE. """ return self.__traffic_engineering def _set_traffic_engineering(self, v, load=False): """ Setter method for traffic_engineering, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/traffic_engineering (container) If this variable is read-only (config: false) in the source YANG file, then _set_traffic_engineering is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_traffic_engineering() directly. YANG Description: This container defines ISIS TE. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=traffic_engineering.traffic_engineering, is_container="container", yang_name="traffic-engineering", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """traffic_engineering must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=traffic_engineering.traffic_engineering, is_container='container', yang_name="traffic-engineering", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""", } ) self.__traffic_engineering = t if hasattr(self, "_set"): self._set() def _unset_traffic_engineering(self): self.__traffic_engineering = YANGDynClass( base=traffic_engineering.traffic_engineering, is_container="container", yang_name="traffic-engineering", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) def _get_route_preference(self): """ Getter method for route_preference, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/route_preference (container) YANG Description: This container defines Administrative Distance (or preference) assigned to ISIS routes (level1 internal, level2 internal, level1 external, level2 external). """ return self.__route_preference def _set_route_preference(self, v, load=False): """ Setter method for route_preference, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/route_preference (container) If this variable is read-only (config: false) in the source YANG file, then _set_route_preference is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_route_preference() directly. YANG Description: This container defines Administrative Distance (or preference) assigned to ISIS routes (level1 internal, level2 internal, level1 external, level2 external). """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=route_preference.route_preference, is_container="container", yang_name="route-preference", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """route_preference must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=route_preference.route_preference, is_container='container', yang_name="route-preference", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""", } ) self.__route_preference = t if hasattr(self, "_set"): self._set() def _unset_route_preference(self): self.__route_preference = YANGDynClass( base=route_preference.route_preference, is_container="container", yang_name="route-preference", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) def _get_authentication(self): """ Getter method for authentication, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/authentication (container) YANG Description: This container defines ISIS authentication. """ return self.__authentication def _set_authentication(self, v, load=False): """ Setter method for authentication, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/authentication (container) If this variable is read-only (config: false) in the source YANG file, then _set_authentication is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_authentication() directly. YANG Description: This container defines ISIS authentication. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=authentication.authentication, is_container="container", yang_name="authentication", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """authentication must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=authentication.authentication, is_container='container', yang_name="authentication", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""", } ) self.__authentication = t if hasattr(self, "_set"): self._set() def _unset_authentication(self): self.__authentication = YANGDynClass( base=authentication.authentication, is_container="container", yang_name="authentication", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) level_number = __builtin__.property(_get_level_number, _set_level_number) config = __builtin__.property(_get_config, _set_config) state = __builtin__.property(_get_state, _set_state) system_level_counters = __builtin__.property( _get_system_level_counters, _set_system_level_counters ) link_state_database = __builtin__.property( _get_link_state_database, _set_link_state_database ) traffic_engineering = __builtin__.property( _get_traffic_engineering, _set_traffic_engineering ) route_preference = __builtin__.property( _get_route_preference, _set_route_preference ) authentication = __builtin__.property(_get_authentication, _set_authentication) _pyangbind_elements = OrderedDict( [ ("level_number", level_number), ("config", config), ("state", state), ("system_level_counters", system_level_counters), ("link_state_database", link_state_database), ("traffic_engineering", traffic_engineering), ("route_preference", route_preference), ("authentication", authentication), ] ) from . import config from . import state from . import system_level_counters from . import link_state_database from . import traffic_engineering from . import route_preference from . import authentication class level(PybindBase): """ This class was auto-generated by the PythonClass plugin for PYANG from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level. Each member element of the container is represented as a class variable - with a specific YANG type. YANG Description: Configuration and operational state parameters related to a particular level within the IS-IS protocol instance """ __slots__ = ( "_path_helper", "_extmethods", "__level_number", "__config", "__state", "__system_level_counters", "__link_state_database", "__traffic_engineering", "__route_preference", "__authentication", ) _yang_name = "level" _pybind_generated_by = "container" def __init__(self, *args, **kwargs): self._path_helper = False self._extmethods = False self.__level_number = YANGDynClass( base=six.text_type, is_leaf=True, yang_name="level-number", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="leafref", is_config=True, ) self.__config = YANGDynClass( base=config.config, is_container="container", yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) self.__state = YANGDynClass( base=state.state, is_container="container", yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) self.__system_level_counters = YANGDynClass( base=system_level_counters.system_level_counters, is_container="container", yang_name="system-level-counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) self.__link_state_database = YANGDynClass( base=link_state_database.link_state_database, is_container="container", yang_name="link-state-database", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) self.__traffic_engineering = YANGDynClass( base=traffic_engineering.traffic_engineering, is_container="container", yang_name="traffic-engineering", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) self.__route_preference = YANGDynClass( base=route_preference.route_preference, is_container="container", yang_name="route-preference", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) self.__authentication = YANGDynClass( base=authentication.authentication, is_container="container", yang_name="authentication", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) load = kwargs.pop("load", None) if args: if len(args) > 1: raise TypeError("cannot create a YANG container with >1 argument") all_attr = True for e in self._pyangbind_elements: if not hasattr(args[0], e): all_attr = False break if not all_attr: raise ValueError("Supplied object did not have the correct attributes") for e in self._pyangbind_elements: nobj = getattr(args[0], e) if nobj._changed() is False: continue setmethod = getattr(self, "_set_%s" % e) if load is None: setmethod(getattr(args[0], e)) else: setmethod(getattr(args[0], e), load=load) def _path(self): if hasattr(self, "_parent"): return self._parent._path() + [self._yang_name] else: return [ "network-instances", "network-instance", "protocols", "protocol", "isis", "levels", "level", ] def _get_level_number(self): """ Getter method for level_number, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/level_number (leafref) YANG Description: Reference to ISIS level-number. """ return self.__level_number def _set_level_number(self, v, load=False): """ Setter method for level_number, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/level_number (leafref) If this variable is read-only (config: false) in the source YANG file, then _set_level_number is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_level_number() directly. YANG Description: Reference to ISIS level-number. """ parent = getattr(self, "_parent", None) if parent is not None and load is False: raise AttributeError( "Cannot set keys directly when" + " within an instantiated list" ) if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=six.text_type, is_leaf=True, yang_name="level-number", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="leafref", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """level_number must be of a type compatible with leafref""", "defined-type": "leafref", "generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="level-number", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""", } ) self.__level_number = t if hasattr(self, "_set"): self._set() def _unset_level_number(self): self.__level_number = YANGDynClass( base=six.text_type, is_leaf=True, yang_name="level-number", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="leafref", is_config=True, ) def _get_config(self): """ Getter method for config, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/config (container) YANG Description: This container defines ISIS level based configuration. """ return self.__config def _set_config(self, v, load=False): """ Setter method for config, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/config (container) If this variable is read-only (config: false) in the source YANG file, then _set_config is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_config() directly. YANG Description: This container defines ISIS level based configuration. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=config.config, is_container="container", yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """config must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""", } ) self.__config = t if hasattr(self, "_set"): self._set() def _unset_config(self): self.__config = YANGDynClass( base=config.config, is_container="container", yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) def _get_state(self): """ Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/state (container) YANG Description: This container defines ISIS level state information. """ return self.__state def _set_state(self, v, load=False): """ Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/state (container) If this variable is read-only (config: false) in the source YANG file, then _set_state is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_state() directly. YANG Description: This container defines ISIS level state information. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=state.state, is_container="container", yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """state must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""", } ) self.__state = t if hasattr(self, "_set"): self._set() def _unset_state(self): self.__state = YANGDynClass( base=state.state, is_container="container", yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) def _get_system_level_counters(self): """ Getter method for system_level_counters, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters (container) YANG Description: This container defines ISIS system level counters. """ return self.__system_level_counters def _set_system_level_counters(self, v, load=False): """ Setter method for system_level_counters, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters (container) If this variable is read-only (config: false) in the source YANG file, then _set_system_level_counters is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_system_level_counters() directly. YANG Description: This container defines ISIS system level counters. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=system_level_counters.system_level_counters, is_container="container", yang_name="system-level-counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """system_level_counters must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=system_level_counters.system_level_counters, is_container='container', yang_name="system-level-counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""", } ) self.__system_level_counters = t if hasattr(self, "_set"): self._set() def _unset_system_level_counters(self): self.__system_level_counters = YANGDynClass( base=system_level_counters.system_level_counters, is_container="container", yang_name="system-level-counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) def _get_link_state_database(self): """ Getter method for link_state_database, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database (container) YANG Description: This container defines ISIS LSDB. """ return self.__link_state_database def _set_link_state_database(self, v, load=False): """ Setter method for link_state_database, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database (container) If this variable is read-only (config: false) in the source YANG file, then _set_link_state_database is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_link_state_database() directly. YANG Description: This container defines ISIS LSDB. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=link_state_database.link_state_database, is_container="container", yang_name="link-state-database", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """link_state_database must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=link_state_database.link_state_database, is_container='container', yang_name="link-state-database", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""", } ) self.__link_state_database = t if hasattr(self, "_set"): self._set() def _unset_link_state_database(self): self.__link_state_database = YANGDynClass( base=link_state_database.link_state_database, is_container="container", yang_name="link-state-database", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) def _get_traffic_engineering(self): """ Getter method for traffic_engineering, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/traffic_engineering (container) YANG Description: This container defines ISIS TE. """ return self.__traffic_engineering def _set_traffic_engineering(self, v, load=False): """ Setter method for traffic_engineering, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/traffic_engineering (container) If this variable is read-only (config: false) in the source YANG file, then _set_traffic_engineering is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_traffic_engineering() directly. YANG Description: This container defines ISIS TE. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=traffic_engineering.traffic_engineering, is_container="container", yang_name="traffic-engineering", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """traffic_engineering must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=traffic_engineering.traffic_engineering, is_container='container', yang_name="traffic-engineering", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""", } ) self.__traffic_engineering = t if hasattr(self, "_set"): self._set() def _unset_traffic_engineering(self): self.__traffic_engineering = YANGDynClass( base=traffic_engineering.traffic_engineering, is_container="container", yang_name="traffic-engineering", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) def _get_route_preference(self): """ Getter method for route_preference, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/route_preference (container) YANG Description: This container defines Administrative Distance (or preference) assigned to ISIS routes (level1 internal, level2 internal, level1 external, level2 external). """ return self.__route_preference def _set_route_preference(self, v, load=False): """ Setter method for route_preference, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/route_preference (container) If this variable is read-only (config: false) in the source YANG file, then _set_route_preference is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_route_preference() directly. YANG Description: This container defines Administrative Distance (or preference) assigned to ISIS routes (level1 internal, level2 internal, level1 external, level2 external). """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=route_preference.route_preference, is_container="container", yang_name="route-preference", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """route_preference must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=route_preference.route_preference, is_container='container', yang_name="route-preference", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""", } ) self.__route_preference = t if hasattr(self, "_set"): self._set() def _unset_route_preference(self): self.__route_preference = YANGDynClass( base=route_preference.route_preference, is_container="container", yang_name="route-preference", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) def _get_authentication(self): """ Getter method for authentication, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/authentication (container) YANG Description: This container defines ISIS authentication. """ return self.__authentication def _set_authentication(self, v, load=False): """ Setter method for authentication, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/authentication (container) If this variable is read-only (config: false) in the source YANG file, then _set_authentication is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_authentication() directly. YANG Description: This container defines ISIS authentication. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=authentication.authentication, is_container="container", yang_name="authentication", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """authentication must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=authentication.authentication, is_container='container', yang_name="authentication", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""", } ) self.__authentication = t if hasattr(self, "_set"): self._set() def _unset_authentication(self): self.__authentication = YANGDynClass( base=authentication.authentication, is_container="container", yang_name="authentication", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) level_number = __builtin__.property(_get_level_number, _set_level_number) config = __builtin__.property(_get_config, _set_config) state = __builtin__.property(_get_state, _set_state) system_level_counters = __builtin__.property( _get_system_level_counters, _set_system_level_counters ) link_state_database = __builtin__.property( _get_link_state_database, _set_link_state_database ) traffic_engineering = __builtin__.property( _get_traffic_engineering, _set_traffic_engineering ) route_preference = __builtin__.property( _get_route_preference, _set_route_preference ) authentication = __builtin__.property(_get_authentication, _set_authentication) _pyangbind_elements = OrderedDict( [ ("level_number", level_number), ("config", config), ("state", state), ("system_level_counters", system_level_counters), ("link_state_database", link_state_database), ("traffic_engineering", traffic_engineering), ("route_preference", route_preference), ("authentication", authentication), ] )
41.827858
422
0.620401
6,494
63,662
5.819526
0.032492
0.065887
0.048158
0.054138
0.990077
0.984335
0.984335
0.984335
0.984335
0.984335
0
0.000751
0.288649
63,662
1,521
423
41.855358
0.833768
0.195297
0
0.886054
0
0.013605
0.264668
0.100331
0
0
0
0
0
1
0.044218
false
0
0.02466
0
0.107993
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
2d3e657fed8aa31cf660e876ba6e75ef7825f4b8
14,431
py
Python
src/dataload/contrib/exac/__init__.py
IsmailM/myvariant.info
5af6ad68fc2c1eb539ab9e683a34bafd51ed5cb1
[ "Apache-2.0" ]
null
null
null
src/dataload/contrib/exac/__init__.py
IsmailM/myvariant.info
5af6ad68fc2c1eb539ab9e683a34bafd51ed5cb1
[ "Apache-2.0" ]
null
null
null
src/dataload/contrib/exac/__init__.py
IsmailM/myvariant.info
5af6ad68fc2c1eb539ab9e683a34bafd51ed5cb1
[ "Apache-2.0" ]
1
2018-11-17T09:16:59.000Z
2018-11-17T09:16:59.000Z
from .exac_parser import load_data as _load_data EXAC_INPUT_FILE = '<file_path_to_exac_vcf_file>' __METADATA__ = { "src_name": 'exac', "src_url": 'ftp://ftp.broadinstitute.org/pub/ExAC_release/release0.3/ExAC.r0.3.nonpsych.sites.vcf.gz', "version": '0.3', "field": 'exac' } def load_data(): exac_data = _load_data(EXAC_INPUT_FILE) return exac_data def get_mapping(): mapping = { "exac": { "properties": { "chrom": { "type": "string", "analyzer": "string_lowercase" }, "pos": { "type": "long" }, "ref": { "type": "string", "analyzer": "string_lowercase" }, "alt": { "type": "string", "analyzer": "string_lowercase" }, "alleles": { "type": "string", "analyzer": "string_lowercase" }, "type": { "type": "string", "analyzer": "string_lowercase" }, "qual": { "type": "float" }, "filter": { "type": "string", "analyzer": "string_lowercase" }, "ac": { "properties": { "ac": { "type": "integer" }, "ac_afr": { "type": "integer" }, "ac_amr": { "type": "integer" }, "ac_adj": { "type": "integer" }, "ac_eas": { "type": "integer" }, "ac_fin": { "type": "integer" }, "ac_nfe": { "type": "integer" }, "ac_oth": { "type": "integer" }, "ac_sas": { "type": "integer" }, "ac_male": { "type": "integer" }, "ac_female": { "type": "integer" } } }, "af": { "type": "float" }, "an": { "properties": { "an": { "type": "integer" }, "an_afr": { "type": "integer" }, "an_amr": { "type": "integer" }, "an_adj": { "type": "integer" }, "an_eas": { "type": "integer" }, "an_fin": { "type": "integer" }, "an_nfe": { "type": "integer" }, "an_oth": { "type": "integer" }, "an_sas": { "type": "integer" }, "an_female": { "type": "integer" }, "an_male": { "type": "integer" } } }, "baseqranksum": { "type": "float" }, "clippingranksum": { "type": "float" }, "fs": { "type": "float" }, "dp": { "type": "long" }, "het": { "properties": { "het_afr": { "type": "integer" }, "het_amr": { "type": "integer" }, "het_eas": { "type": "integer" }, "het_fin": { "type": "integer" }, "het_nfe": { "type": "integer" }, "het_oth": { "type": "integer" }, "het_sas": { "type": "integer" }, "ac_het": { "type": "integer" } } }, "hom": { "properties": { "hom_afr": { "type": "integer" }, "hom_amr": { "type": "integer" }, "hom_eas": { "type": "integer" }, "hom_fin": { "type": "integer" }, "hom_nfe": { "type": "integer" }, "hom_oth": { "type": "integer" }, "hom_sas": { "type": "integer" }, "ac_hom": { "type": "integer" } } }, "inbreedingcoeff": { "type": "float" }, "mq": { "properties": { "mq": { "type": "float" }, "mq0": { "type": "integer" }, "mqranksum": { "type": "float" } } }, "ncc": { "type": "long" }, "qd": { "type": "float" }, "readposranksum": { "type": "float" }, "vqslod": { "type": "float" }, "culprit": { "type": "string", "analyzer": "string_lowercase" } } }, "exac_nontcga": { "properties": { "chrom": { "type": "string", "analyzer": "string_lowercase" }, "pos": { "type": "long" }, "ref": { "type": "string", "analyzer": "string_lowercase" }, "alt": { "type": "string", "analyzer": "string_lowercase" }, "alleles": { "type": "string", "analyzer": "string_lowercase" }, "type": { "type": "string", "analyzer": "string_lowercase" }, "qual": { "type": "float" }, "filter": { "type": "string", "analyzer": "string_lowercase" }, "ac": { "properties": { "ac": { "type": "integer" }, "ac_afr": { "type": "integer" }, "ac_amr": { "type": "integer" }, "ac_adj": { "type": "integer" }, "ac_eas": { "type": "integer" }, "ac_fin": { "type": "integer" }, "ac_nfe": { "type": "integer" }, "ac_oth": { "type": "integer" }, "ac_sas": { "type": "integer" }, "ac_male": { "type": "integer" }, "ac_female": { "type": "integer" } } }, "af": { "type": "float" }, "an": { "properties": { "an": { "type": "integer" }, "an_afr": { "type": "integer" }, "an_amr": { "type": "integer" }, "an_adj": { "type": "integer" }, "an_eas": { "type": "integer" }, "an_fin": { "type": "integer" }, "an_nfe": { "type": "integer" }, "an_oth": { "type": "integer" }, "an_sas": { "type": "integer" }, "an_female": { "type": "integer" }, "an_male": { "type": "integer" } } }, "baseqranksum": { "type": "float" }, "clippingranksum": { "type": "float" }, "fs": { "type": "float" }, "dp": { "type": "long" }, "het": { "properties": { "het_afr": { "type": "integer" }, "het_amr": { "type": "integer" }, "het_eas": { "type": "integer" }, "het_fin": { "type": "integer" }, "het_nfe": { "type": "integer" }, "het_oth": { "type": "integer" }, "het_sas": { "type": "integer" }, "ac_het": { "type": "integer" } } }, "hom": { "properties": { "hom_afr": { "type": "integer" }, "hom_amr": { "type": "integer" }, "hom_eas": { "type": "integer" }, "hom_fin": { "type": "integer" }, "hom_nfe": { "type": "integer" }, "hom_oth": { "type": "integer" }, "hom_sas": { "type": "integer" }, "ac_hom": { "type": "integer" } } }, "inbreedingcoeff": { "type": "float" }, "mq": { "properties": { "mq": { "type": "float" }, "mq0": { "type": "integer" }, "mqranksum": { "type": "float" } } }, "ncc": { "type": "long" }, "qd": { "type": "float" }, "readposranksum": { "type": "float" }, "vqslod": { "type": "float" }, "culprit": { "type": "string", "analyzer": "string_lowercase" } } } } return mapping
32.502252
106
0.19271
563
14,431
4.738899
0.138544
0.321589
0.116942
0.125937
0.907796
0.892054
0.892054
0.892054
0.892054
0.892054
0
0.001833
0.697526
14,431
443
107
32.575621
0.609393
0
0
0.631579
0
0.002288
0.182662
0.008038
0
0
0
0
0
1
0.004577
false
0
0.002288
0
0.011442
0
0
0
0
null
1
0
0
1
1
1
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
740fc30821c9d57dd3eb2269a9e3511642015b31
48
py
Python
models/__init__.py
shshnk94/autoencoding_vi_for_topic_models
015354a877703c5772e69fbc791a51e978612fce
[ "MIT" ]
null
null
null
models/__init__.py
shshnk94/autoencoding_vi_for_topic_models
015354a877703c5772e69fbc791a51e978612fce
[ "MIT" ]
null
null
null
models/__init__.py
shshnk94/autoencoding_vi_for_topic_models
015354a877703c5772e69fbc791a51e978612fce
[ "MIT" ]
2
2020-09-09T20:32:03.000Z
2020-09-09T22:14:36.000Z
from .prodlda import VAE from .nvlda import VAE
16
24
0.791667
8
48
4.75
0.625
0.473684
0
0
0
0
0
0
0
0
0
0
0.166667
48
2
25
24
0.95
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
7479caa4674cb17f054b4f420631130b4ef529c1
347
py
Python
boardwatch_models/__init__.py
nathanalexanderpage/boardwatch_models
80d6047f0aebc66ddc04842531782f217f749510
[ "MIT" ]
null
null
null
boardwatch_models/__init__.py
nathanalexanderpage/boardwatch_models
80d6047f0aebc66ddc04842531782f217f749510
[ "MIT" ]
null
null
null
boardwatch_models/__init__.py
nathanalexanderpage/boardwatch_models
80d6047f0aebc66ddc04842531782f217f749510
[ "MIT" ]
null
null
null
from boardwatch_models.board import Board from boardwatch_models.game import Game from boardwatch_models.listing import Listing from boardwatch_models.platform import Platform from boardwatch_models.platform_edition import PlatformEdition from boardwatch_models.platform_name_group import PlatformNameGroup from boardwatch_models.user import User
43.375
67
0.899135
45
347
6.711111
0.311111
0.324503
0.463576
0.278146
0
0
0
0
0
0
0
0
0.080692
347
7
68
49.571429
0.946708
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
747ada228946fa059c601dc2fb8eb33d51a43240
43
py
Python
python/testData/quickdoc/EmptyTupleType.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/quickdoc/EmptyTupleType.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/quickdoc/EmptyTupleType.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
def func(): return () fu<the_ref>nc()
8.6
15
0.55814
7
43
3.285714
1
0
0
0
0
0
0
0
0
0
0
0
0.232558
43
5
15
8.6
0.69697
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
true
0
0
0.333333
0.666667
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
1
1
0
0
7
77c38b22047849e15665b181e2b8b10b713bc749
3,714
py
Python
tests/controller/authcontroller_test.py
rindow/skeleton-mini-flask
4a4124682ad9dcd6b30e95a5a745707cbe41c5a0
[ "BSD-3-Clause" ]
null
null
null
tests/controller/authcontroller_test.py
rindow/skeleton-mini-flask
4a4124682ad9dcd6b30e95a5a745707cbe41c5a0
[ "BSD-3-Clause" ]
null
null
null
tests/controller/authcontroller_test.py
rindow/skeleton-mini-flask
4a4124682ad9dcd6b30e95a5a745707cbe41c5a0
[ "BSD-3-Clause" ]
null
null
null
def test_register_success(client): response = client.get('/register') assert response.status_code == 200 assert b'>Register<' in response.data response = client.post('/register', data={'username':'user2@demo.com', 'password':'password', 'confirm':'password'}) assert response.status_code == 302 assert response.headers['Location'] == 'http://localhost/login' response = client.get('/login') assert b'User registered' in response.data def off_test_register_confirm_error(client): response = client.get('/register') assert response.status_code == 200 assert b'>Register<' in response.data response = client.post('/register', data={'username':'user@demo.com', 'password':'password', 'confirm':'wrong'}) assert response.status_code == 302 assert response.headers['Location'] == 'http://localhost/register' response = client.get('/register') assert response.status_code == 200 assert b'password and confirm do not match' in response.data def off_test_register_duplicate_error(client): response = client.get('/register') assert response.status_code == 200 assert b'>Register<' in response.data response = client.post('/register', data={'username':'user@demo.com', 'password':'password', 'confirm':'password'}) assert response.status_code == 302 assert response.headers['Location'] == 'http://localhost/register' response = client.get('/register') assert response.status_code == 200 assert b'The user cannot register' in response.data def off_test_login_success(client): # redirect response = client.get('/create') assert response.status_code == 302 assert response.headers['Location'] == 'http://localhost/login?next=%2Fcreate' response = client.get('/login?next=%2Fcreate') assert response.status_code == 200 assert b'>Login<' in response.data # login response = client.post('/login?next=%2Fcreate', data={'username':'user@demo.com', 'password':'password'}) assert response.status_code == 302 assert response.headers['Location'] == 'http://localhost/create' response = client.get('/create') assert response.status_code == 200 assert b'Logged in successfully' in response.data def off_test_login_password_incorect(client): # redirect response = client.get('/create') assert response.status_code == 302 assert response.headers['Location'] == 'http://localhost/login?next=%2Fcreate' response = client.get('/login?next=%2Fcreate') assert response.status_code == 200 assert b'>Login<' in response.data # login response = client.post('/login?next=%2Fcreate', data={'username':'user@demo.com', 'password':'invalid'}) assert response.status_code == 302 assert response.headers['Location'] == 'http://localhost/login' response = client.get('/login') assert response.status_code == 200 assert b'Invalid username or password' in response.data def off_test_login_user_notfound(client): # redirect response = client.get('/create') assert response.status_code == 302 assert response.headers['Location'] == 'http://localhost/login?next=%2Fcreate' response = client.get('/login?next=%2Fcreate') assert response.status_code == 200 assert b'>Login<' in response.data # login response = client.post('/login?next=%2Fcreate', data={'username':'nouser@demo.com', 'password':'password'}) assert response.status_code == 302 assert response.headers['Location'] == 'http://localhost/login' response = client.get('/login') assert response.status_code == 200 assert b'Invalid username or password' in response.data
39.935484
88
0.688207
447
3,714
5.626398
0.111857
0.161431
0.159046
0.190855
0.926044
0.926044
0.926044
0.867594
0.848907
0.848907
0
0.022588
0.16559
3,714
92
89
40.369565
0.788964
0.011847
0
0.77027
0
0
0.282163
0.034417
0
0
0
0
0.554054
1
0.081081
false
0.135135
0
0
0.081081
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
1
0
0
0
1
0
0
0
0
0
10
77dab682726a1f57ad22ac30e720388469847a5f
78
py
Python
aerosandbox/visualization/__init__.py
tackoo/AeroSandbox
eb31fe604c3f3299a685c98fee18a99106c800df
[ "MIT" ]
1
2021-04-07T08:59:31.000Z
2021-04-07T08:59:31.000Z
aerosandbox/visualization/__init__.py
tackoo/AeroSandbox
eb31fe604c3f3299a685c98fee18a99106c800df
[ "MIT" ]
null
null
null
aerosandbox/visualization/__init__.py
tackoo/AeroSandbox
eb31fe604c3f3299a685c98fee18a99106c800df
[ "MIT" ]
1
2021-09-11T03:28:45.000Z
2021-09-11T03:28:45.000Z
from .matplotlib import * from .plotly import * from .plotly_Figure3D import *
26
30
0.782051
10
78
6
0.5
0.333333
0.533333
0
0
0
0
0
0
0
0
0.014925
0.141026
78
3
30
26
0.880597
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
7ad23a2e1f5d551a446526a10354114084dd6177
42
py
Python
napari_io_test/__init__.py
sofroniewn/napari-io-test
221a4e5c763281a2d3c9e56ebd66b6e12564561c
[ "BSD-3-Clause" ]
null
null
null
napari_io_test/__init__.py
sofroniewn/napari-io-test
221a4e5c763281a2d3c9e56ebd66b6e12564561c
[ "BSD-3-Clause" ]
5
2020-03-15T17:46:25.000Z
2020-05-06T00:26:34.000Z
napari_io_test/__init__.py
sofroniewn/napari-io-test
221a4e5c763281a2d3c9e56ebd66b6e12564561c
[ "BSD-3-Clause" ]
null
null
null
from .get_reader import napari_get_reader
21
41
0.880952
7
42
4.857143
0.714286
0.529412
0
0
0
0
0
0
0
0
0
0
0.095238
42
1
42
42
0.894737
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
2483bc68be9f8ae2927cf51683c6759f7ad356dd
350
py
Python
tests/expectations/mr-x-cat-weighted-col-std-err.py
Crunch-io/crunch-cube
80986d5b2106c774f05176fb6c6a5ea0d840f09d
[ "MIT" ]
3
2021-01-22T20:42:31.000Z
2021-06-02T17:53:19.000Z
tests/expectations/mr-x-cat-weighted-col-std-err.py
Crunch-io/crunch-cube
80986d5b2106c774f05176fb6c6a5ea0d840f09d
[ "MIT" ]
331
2017-11-13T22:41:56.000Z
2021-12-02T21:59:43.000Z
tests/expectations/mr-x-cat-weighted-col-std-err.py
Crunch-io/crunch-cube
80986d5b2106c774f05176fb6c6a5ea0d840f09d
[ "MIT" ]
1
2021-02-19T02:49:00.000Z
2021-02-19T02:49:00.000Z
[ [ 0.00564723, 0.00585537, 0.00798203, 0.01164323, 0.01289225, 0.00573883, 0.02665202, 0.02114537, ], [ 0.00564723, 0.00585537, 0.00798203, 0.01164323, 0.01289225, 0.00573883, 0.02665202, 0.02114537, ], ]
15.217391
19
0.411429
32
350
4.5
0.28125
0.125
0.138889
0.25
1
1
1
1
1
1
0
0.782609
0.474286
350
22
20
15.909091
0
0
0
0.818182
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
1
0
0
0
0
0
1
0
0
1
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
11
24ca437371fcbed2a95d448bb9c8502862005b93
8,204
py
Python
findy/vendor/baostock/login/loginout.py
doncat99/FinanceDataCenter
1538c8347ed5bff9a99a3cca07507a7605108124
[ "MIT" ]
null
null
null
findy/vendor/baostock/login/loginout.py
doncat99/FinanceDataCenter
1538c8347ed5bff9a99a3cca07507a7605108124
[ "MIT" ]
null
null
null
findy/vendor/baostock/login/loginout.py
doncat99/FinanceDataCenter
1538c8347ed5bff9a99a3cca07507a7605108124
[ "MIT" ]
null
null
null
# -*- coding:utf-8 -*- """ 登录登出 @author: baostock.com @group : baostock.com @contact: baostock@163.com """ import datetime import zlib import findy.vendor.baostock.util.socketutil as sock import findy.vendor.baostock.data.resultset as rs import findy.vendor.baostock.common.contants as cons import findy.vendor.baostock.data.messageheader as msgheader import findy.vendor.baostock.common.context as conx def login(user_id='anonymous', password='123456', options=0): """登录系统 :param user_id:用户ID :param password:密码 :param options:可选项,00.5.00版本暂未使用 :return: ResultData() """ data = rs.ResultData() if user_id is None or user_id == "": print("用户ID不能为空。") data.error_msg = "用户ID不能为空。" data.error_code = cons.BSERR_USERNAME_EMPTY return data setattr(conx, "user_id", user_id) if password is None or password == "": print("密码不能为空。") data.error_msg = "密码不能为空。" data.error_code = cons.BSERR_PASSWORD_EMPTY return data # 组织体信息 msg_body = "login" + cons.MESSAGE_SPLIT + user_id + cons.MESSAGE_SPLIT + \ password + cons.MESSAGE_SPLIT + str(options) # 组织头信息 msg_header = msgheader.to_message_header( cons.MESSAGE_TYPE_LOGIN_REQUEST, len(msg_body)) head_body = msg_header + msg_body crc32str = zlib.crc32(bytes(head_body, encoding='utf-8')) # 发送并接收消息 mySocketUtil = sock.SocketUtil() # 创建连接 mySocketUtil.connect() receive_data = sock.send_msg( head_body + cons.MESSAGE_SPLIT + str(crc32str)) if receive_data is None or receive_data.strip() == "": data.error_code = cons.BSERR_RECVSOCK_FAIL data.error_msg = "网络接收错误。" return data msg_header = receive_data[0:cons.MESSAGE_HEADER_LENGTH] msg_body = receive_data[cons.MESSAGE_HEADER_LENGTH:-1] header_arr = msg_header.split(cons.MESSAGE_SPLIT) body_arr = msg_body.split(cons.MESSAGE_SPLIT) data.msg_type = header_arr[1] data.msg_body_length = header_arr[2] data.error_code = body_arr[0] data.error_msg = body_arr[1] if cons.BSERR_SUCCESS == data.error_code: # print("login success!") data.method = body_arr[2] data.user_id = body_arr[3] else: print("login failed!") return data def logout(user_id='anonymous'): """登出系统,默认用户ID:anonymous :param user_id:用户ID :return:ResultData() """ now_time = datetime.datetime.now().strftime('%Y%m%d%H%M%S') if hasattr(conx, "user_id"): user_id = getattr(conx, "user_id") if user_id is None or user_id == "": print("you don't login, logout failed!") return # 组织体信息 msg_body = "logout" + cons.MESSAGE_SPLIT + \ user_id + cons.MESSAGE_SPLIT + now_time # 组织头信息 msg_header = msgheader.to_message_header( cons.MESSAGE_TYPE_LOGOUT_REQUEST, len(msg_body)) head_body = msg_header + msg_body crc32str = zlib.crc32(bytes(head_body, encoding='utf-8')) # 发送并接收消息 receive_data = sock.send_msg( head_body + cons.MESSAGE_SPLIT + str(crc32str)) data = rs.ResultData() if receive_data is None or receive_data.strip() == "": data.error_code = cons.BSERR_RECVSOCK_FAIL data.error_msg = "网络接收错误。" return data msg_header = receive_data[0:cons.MESSAGE_HEADER_LENGTH] msg_body = receive_data[cons.MESSAGE_HEADER_LENGTH:-1] header_arr = msg_header.split(cons.MESSAGE_SPLIT) body_arr = msg_body.split(cons.MESSAGE_SPLIT) data.msg_type = header_arr[1] data.msg_body_length = header_arr[2] data.error_code = body_arr[0] data.error_msg = body_arr[1] if cons.BSERR_SUCCESS == data.error_code: print("logout success!") data.method = body_arr[2] data.user_id = body_arr[3] else: print("logout failed!") if hasattr(conx, "defallt_socket"): if getattr(conx, "default_socket") is not None: getattr(conx, "default_socket").close() return data def login_real_time(user_id='anonymous', password='123456', options=0): """登录实时订阅系统系统 :param user_id:用户ID :param password:密码 :param options:可选项,00.5.00版本暂未使用 :return: ResultData() """ data = rs.ResultData() if user_id is None or user_id == "": print("用户ID不能为空。") data.error_msg = "用户ID不能为空。" data.error_code = cons.BSERR_USERNAME_EMPTY return data setattr(conx, "user_id", user_id) if password is None or password == "": print("密码不能为空。") data.error_msg = "密码不能为空。" data.error_code = cons.BSERR_PASSWORD_EMPTY return data # 组织体信息 msg_body = "login_real_time" + cons.MESSAGE_SPLIT + user_id + cons.MESSAGE_SPLIT + \ password + cons.MESSAGE_SPLIT + str(options) # 组织头信息 msg_header = msgheader.to_message_header( cons.MESSAGE_TYPE_LOGIN_REAL_TIME_REQUEST, len(msg_body)) head_body = msg_header + msg_body crc32str = zlib.crc32(bytes(head_body, encoding='utf-8')) # 发送并接收消息 mySocketUtil = sock.SocketRealTimeUtil() # 创建连接 mySocketUtil.connect() receive_data = sock.send_real_time_msg( head_body + cons.MESSAGE_SPLIT + str(crc32str)) if receive_data is None or receive_data.strip() == "": data.error_code = cons.BSERR_RECVSOCK_FAIL data.error_msg = "网络接收错误。" return data msg_header = receive_data[0:cons.MESSAGE_HEADER_LENGTH] msg_body = receive_data[cons.MESSAGE_HEADER_LENGTH:-1] header_arr = msg_header.split(cons.MESSAGE_SPLIT) body_arr = msg_body.split(cons.MESSAGE_SPLIT) data.msg_type = header_arr[1] data.msg_body_length = header_arr[2] data.error_code = body_arr[0] data.error_msg = body_arr[1] if cons.BSERR_SUCCESS == data.error_code: # print("login success!") data.method = body_arr[2] data.user_id = body_arr[3] else: print("login failed!") delattr(conx, "user_id") return data def logout_real_time(user_id='anonymous'): """登出系统,默认用户ID:anonymous :param user_id:用户ID :return:ResultData() """ now_time = datetime.datetime.now().strftime('%Y%m%d%H%M%S') if hasattr(conx, "user_id"): user_id = getattr(conx, "user_id") if user_id is None or user_id == "": print("you don't login, logout failed!") return # 组织体信息 msg_body = "logout_real_time" + cons.MESSAGE_SPLIT + \ user_id + cons.MESSAGE_SPLIT + now_time # 组织头信息 msg_header = msgheader.to_message_header( cons.MESSAGE_TYPE_LOGOUT_REAL_TIME_REQUEST, len(msg_body)) head_body = msg_header + msg_body crc32str = zlib.crc32(bytes(head_body, encoding='utf-8')) # 发送并接收消息 receive_data = sock.send_real_time_msg( head_body + cons.MESSAGE_SPLIT + str(crc32str)) data = rs.ResultData() # if receive_data is None or receive_data.strip() == "": # data.error_code = cons.BSERR_RECVSOCK_FAIL # data.error_msg = "网络接收错误。" # return data # # msg_header = receive_data[0:cons.MESSAGE_HEADER_LENGTH] # msg_body = receive_data[cons.MESSAGE_HEADER_LENGTH:-1] # # header_arr = msg_header.split(cons.MESSAGE_SPLIT) # body_arr = msg_body.split(cons.MESSAGE_SPLIT) # # data.msg_type = header_arr[1] # data.msg_body_length = header_arr[2] # # data.error_code = body_arr[0] # data.error_msg = body_arr[1] # # if cons.BSERR_SUCCESS == data.error_code: # print("logout success!") # data.method = body_arr[2] # data.user_id = body_arr[3] # else: # print("logout failed!") data.error_code = cons.BSERR_SUCCESS data.error_msg = "SUCCESS" if hasattr(conx, "socket_real_time"): if getattr(conx, "socket_real_time") is not None: getattr(conx, "socket_real_time").close() return data
28.887324
89
0.632618
1,075
8,204
4.55907
0.113488
0.042848
0.071822
0.031218
0.90859
0.856968
0.856968
0.832483
0.832483
0.832483
0
0.014426
0.25646
8,204
283
90
28.989399
0.789016
0.15724
0
0.786667
0
0
0.074211
0
0
0
0
0
0
1
0.026667
false
0.053333
0.046667
0
0.16
0.066667
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
8
704d02dfaf6d9e28df899944a4c0a20fc5af8437
4,579
py
Python
tests/test_match.py
Michedev/shapeguard
751cdafeac29020c52676052704074ac2f2c76e3
[ "Apache-2.0" ]
5
2021-02-27T13:55:37.000Z
2021-09-04T22:29:31.000Z
tests/test_match.py
Michedev/shapeguard
751cdafeac29020c52676052704074ac2f2c76e3
[ "Apache-2.0" ]
null
null
null
tests/test_match.py
Michedev/shapeguard
751cdafeac29020c52676052704074ac2f2c76e3
[ "Apache-2.0" ]
null
null
null
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import tensorflow as tf import torch import numpy as np from tensorguard.guard import TensorGuard # ======== tensorflow ================= def test_matches_basic_numerical_tensorflow(): tg = TensorGuard() a = tf.ones([1, 2, 3]) assert tg.matches(a, "1, 2, 3") assert not tg.matches(a, "1, 2, 4") assert not tg.matches(a, "1, 2, 3, 4") assert not tg.matches(a, "1, 2") def test_matches_ignores_spaces_tensorflow(): tg = TensorGuard() a = tf.ones([1, 2, 3]) assert tg.matches(a, "1,2,3") assert tg.matches(a, "1 , 2, 3 ") assert tg.matches(a, "1, 2,3 ") def test_matches_named_dims_tensorflow(): tg = TensorGuard(dims={"N": 24, "Z": 16}) z = tf.ones([24, 16]) assert tg.matches(z, "N, Z") assert tg.matches(z, "24, Z") assert not tg.matches(z, "N, N") def test_matches_wildcards_tensorflow(): tg = TensorGuard() z = tf.ones([1, 2, 4, 8]) assert tg.matches(z, "1, 2, 4, *") assert tg.matches(z, "*, *, *, 8") assert not tg.matches(z, "*") assert not tg.matches(z, "*, *, *") # ================= pytorch ================== def test_matches_basic_numerical_pytorch(): tg = TensorGuard() a = torch.ones([1, 2, 3]) assert tg.matches(a, "1, 2, 3") assert not tg.matches(a, "1, 2, 4") assert not tg.matches(a, "1, 2, 3, 4") assert not tg.matches(a, "1, 2") def test_matches_ignores_spaces_pytorch(): tg = TensorGuard() a = torch.ones([1, 2, 3]) assert tg.matches(a, "1,2,3") assert tg.matches(a, "1 , 2, 3 ") assert tg.matches(a, "1, 2,3 ") def test_matches_named_dims_pytorch(): tg = TensorGuard(dims={"N": 24, "Z": 16}) z = torch.ones([24, 16]) assert tg.matches(z, "N, Z") assert tg.matches(z, "24, Z") assert not tg.matches(z, "N, N") def test_matches_wildcards_pytorch(): tg = TensorGuard() z = torch.ones([1, 2, 4, 8]) assert tg.matches(z, "1, 2, 4, *") assert tg.matches(z, "*, *, *, 8") assert not tg.matches(z, "*") assert not tg.matches(z, "*, *, *") # ================== numpy =================== def test_matches_basic_numerical_numpy(): tg = TensorGuard() a = np.ones([1, 2, 3]) assert tg.matches(a, "1, 2, 3") assert not tg.matches(a, "1, 2, 4") assert not tg.matches(a, "1, 2, 3, 4") assert not tg.matches(a, "1, 2") def test_matches_ignores_spaces_numpy(): tg = TensorGuard() a = np.ones([1, 2, 3]) assert tg.matches(a, "1,2,3") assert tg.matches(a, "1 , 2, 3 ") assert tg.matches(a, "1, 2,3 ") def test_matches_named_dims_numpy(): tg = TensorGuard(dims={"N": 24, "Z": 16}) z = np.ones([24, 16]) assert tg.matches(z, "N, Z") assert tg.matches(z, "24, Z") assert not tg.matches(z, "N, N") def test_matches_wildcards_numpy(): tg = TensorGuard() z = np.ones([1, 2, 4, 8]) assert tg.matches(z, "1, 2, 4, *") assert tg.matches(z, "*, *, *, 8") assert not tg.matches(z, "*") assert not tg.matches(z, "*, *, *") # ================ global ===================== def test_matches_basic_numerical_global(): import tensorguard as tg; tg.reset() a = np.ones([1, 2, 3]) assert tg.matches(a, "1, 2, 3") assert not tg.matches(a, "1, 2, 4") assert not tg.matches(a, "1, 2, 3, 4") assert not tg.matches(a, "1, 2") def test_matches_ignores_spaces_global(): import tensorguard as tg; tg.reset() a = np.ones([1, 2, 3]) assert tg.matches(a, "1,2,3") assert tg.matches(a, "1 , 2, 3 ") assert tg.matches(a, "1, 2,3 ") def test_matches_named_dims_global(): tg = TensorGuard(dims={"N": 24, "Z": 16}) z = np.ones([24, 16]) assert tg.matches(z, "N, Z") assert tg.matches(z, "24, Z") assert not tg.matches(z, "N, N") def test_matches_wildcards_global(): import tensorguard as tg; tg.reset() z = np.ones([1, 2, 4, 8]) assert tg.matches(z, "1, 2, 4, *") assert tg.matches(z, "*, *, *, 8") assert not tg.matches(z, "*") assert not tg.matches(z, "*, *, *")
28.61875
74
0.583752
729
4,579
3.584362
0.1262
0.192882
0.183697
0.117872
0.75775
0.714887
0.714887
0.701875
0.683506
0.683506
0
0.051368
0.217733
4,579
159
75
28.798742
0.678113
0.151998
0
0.777778
0
0
0.099302
0
0
0
0
0
0.518519
1
0.148148
false
0
0.064815
0
0.212963
0
0
0
0
null
0
1
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
7
705f280334835e99314d8edfc3680b0da7dd80e2
23,164
py
Python
sdk/python/pulumi_azure/core/tenant_template_deployment.py
ScriptBox99/pulumi-azure
1b8c6d5479ccabc39094741eac25a8ca44c8833a
[ "ECL-2.0", "Apache-2.0" ]
109
2018-06-18T00:19:44.000Z
2022-02-20T05:32:57.000Z
sdk/python/pulumi_azure/core/tenant_template_deployment.py
ScriptBox99/pulumi-azure
1b8c6d5479ccabc39094741eac25a8ca44c8833a
[ "ECL-2.0", "Apache-2.0" ]
663
2018-06-18T21:08:46.000Z
2022-03-31T20:10:11.000Z
sdk/python/pulumi_azure/core/tenant_template_deployment.py
ScriptBox99/pulumi-azure
1b8c6d5479ccabc39094741eac25a8ca44c8833a
[ "ECL-2.0", "Apache-2.0" ]
41
2018-07-19T22:37:38.000Z
2022-03-14T10:56:26.000Z
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities __all__ = ['TenantTemplateDeploymentArgs', 'TenantTemplateDeployment'] @pulumi.input_type class TenantTemplateDeploymentArgs: def __init__(__self__, *, debug_level: Optional[pulumi.Input[str]] = None, location: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, parameters_content: Optional[pulumi.Input[str]] = None, tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, template_content: Optional[pulumi.Input[str]] = None, template_spec_version_id: Optional[pulumi.Input[str]] = None): """ The set of arguments for constructing a TenantTemplateDeployment resource. :param pulumi.Input[str] debug_level: The Debug Level which should be used for this Resource Group Template Deployment. Possible values are `none`, `requestContent`, `responseContent` and `requestContent, responseContent`. :param pulumi.Input[str] location: The Azure Region where the Template should exist. Changing this forces a new Template to be created. :param pulumi.Input[str] name: The name which should be used for this Template. Changing this forces a new Template to be created. :param pulumi.Input[str] parameters_content: The contents of the ARM Template parameters file - containing a JSON list of parameters. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags which should be assigned to the Template. :param pulumi.Input[str] template_content: The contents of the ARM Template which should be deployed into this Resource Group. Cannot be specified with `template_spec_version_id`. :param pulumi.Input[str] template_spec_version_id: The ID of the Template Spec Version to deploy. Cannot be specified with `template_content`. """ if debug_level is not None: pulumi.set(__self__, "debug_level", debug_level) if location is not None: pulumi.set(__self__, "location", location) if name is not None: pulumi.set(__self__, "name", name) if parameters_content is not None: pulumi.set(__self__, "parameters_content", parameters_content) if tags is not None: pulumi.set(__self__, "tags", tags) if template_content is not None: pulumi.set(__self__, "template_content", template_content) if template_spec_version_id is not None: pulumi.set(__self__, "template_spec_version_id", template_spec_version_id) @property @pulumi.getter(name="debugLevel") def debug_level(self) -> Optional[pulumi.Input[str]]: """ The Debug Level which should be used for this Resource Group Template Deployment. Possible values are `none`, `requestContent`, `responseContent` and `requestContent, responseContent`. """ return pulumi.get(self, "debug_level") @debug_level.setter def debug_level(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "debug_level", value) @property @pulumi.getter def location(self) -> Optional[pulumi.Input[str]]: """ The Azure Region where the Template should exist. Changing this forces a new Template to be created. """ return pulumi.get(self, "location") @location.setter def location(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "location", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ The name which should be used for this Template. Changing this forces a new Template to be created. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter(name="parametersContent") def parameters_content(self) -> Optional[pulumi.Input[str]]: """ The contents of the ARM Template parameters file - containing a JSON list of parameters. """ return pulumi.get(self, "parameters_content") @parameters_content.setter def parameters_content(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "parameters_content", value) @property @pulumi.getter def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]: """ A mapping of tags which should be assigned to the Template. """ return pulumi.get(self, "tags") @tags.setter def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]): pulumi.set(self, "tags", value) @property @pulumi.getter(name="templateContent") def template_content(self) -> Optional[pulumi.Input[str]]: """ The contents of the ARM Template which should be deployed into this Resource Group. Cannot be specified with `template_spec_version_id`. """ return pulumi.get(self, "template_content") @template_content.setter def template_content(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "template_content", value) @property @pulumi.getter(name="templateSpecVersionId") def template_spec_version_id(self) -> Optional[pulumi.Input[str]]: """ The ID of the Template Spec Version to deploy. Cannot be specified with `template_content`. """ return pulumi.get(self, "template_spec_version_id") @template_spec_version_id.setter def template_spec_version_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "template_spec_version_id", value) @pulumi.input_type class _TenantTemplateDeploymentState: def __init__(__self__, *, debug_level: Optional[pulumi.Input[str]] = None, location: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, output_content: Optional[pulumi.Input[str]] = None, parameters_content: Optional[pulumi.Input[str]] = None, tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, template_content: Optional[pulumi.Input[str]] = None, template_spec_version_id: Optional[pulumi.Input[str]] = None): """ Input properties used for looking up and filtering TenantTemplateDeployment resources. :param pulumi.Input[str] debug_level: The Debug Level which should be used for this Resource Group Template Deployment. Possible values are `none`, `requestContent`, `responseContent` and `requestContent, responseContent`. :param pulumi.Input[str] location: The Azure Region where the Template should exist. Changing this forces a new Template to be created. :param pulumi.Input[str] name: The name which should be used for this Template. Changing this forces a new Template to be created. :param pulumi.Input[str] output_content: The JSON Content of the Outputs of the ARM Template Deployment. :param pulumi.Input[str] parameters_content: The contents of the ARM Template parameters file - containing a JSON list of parameters. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags which should be assigned to the Template. :param pulumi.Input[str] template_content: The contents of the ARM Template which should be deployed into this Resource Group. Cannot be specified with `template_spec_version_id`. :param pulumi.Input[str] template_spec_version_id: The ID of the Template Spec Version to deploy. Cannot be specified with `template_content`. """ if debug_level is not None: pulumi.set(__self__, "debug_level", debug_level) if location is not None: pulumi.set(__self__, "location", location) if name is not None: pulumi.set(__self__, "name", name) if output_content is not None: pulumi.set(__self__, "output_content", output_content) if parameters_content is not None: pulumi.set(__self__, "parameters_content", parameters_content) if tags is not None: pulumi.set(__self__, "tags", tags) if template_content is not None: pulumi.set(__self__, "template_content", template_content) if template_spec_version_id is not None: pulumi.set(__self__, "template_spec_version_id", template_spec_version_id) @property @pulumi.getter(name="debugLevel") def debug_level(self) -> Optional[pulumi.Input[str]]: """ The Debug Level which should be used for this Resource Group Template Deployment. Possible values are `none`, `requestContent`, `responseContent` and `requestContent, responseContent`. """ return pulumi.get(self, "debug_level") @debug_level.setter def debug_level(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "debug_level", value) @property @pulumi.getter def location(self) -> Optional[pulumi.Input[str]]: """ The Azure Region where the Template should exist. Changing this forces a new Template to be created. """ return pulumi.get(self, "location") @location.setter def location(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "location", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ The name which should be used for this Template. Changing this forces a new Template to be created. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter(name="outputContent") def output_content(self) -> Optional[pulumi.Input[str]]: """ The JSON Content of the Outputs of the ARM Template Deployment. """ return pulumi.get(self, "output_content") @output_content.setter def output_content(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "output_content", value) @property @pulumi.getter(name="parametersContent") def parameters_content(self) -> Optional[pulumi.Input[str]]: """ The contents of the ARM Template parameters file - containing a JSON list of parameters. """ return pulumi.get(self, "parameters_content") @parameters_content.setter def parameters_content(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "parameters_content", value) @property @pulumi.getter def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]: """ A mapping of tags which should be assigned to the Template. """ return pulumi.get(self, "tags") @tags.setter def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]): pulumi.set(self, "tags", value) @property @pulumi.getter(name="templateContent") def template_content(self) -> Optional[pulumi.Input[str]]: """ The contents of the ARM Template which should be deployed into this Resource Group. Cannot be specified with `template_spec_version_id`. """ return pulumi.get(self, "template_content") @template_content.setter def template_content(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "template_content", value) @property @pulumi.getter(name="templateSpecVersionId") def template_spec_version_id(self) -> Optional[pulumi.Input[str]]: """ The ID of the Template Spec Version to deploy. Cannot be specified with `template_content`. """ return pulumi.get(self, "template_spec_version_id") @template_spec_version_id.setter def template_spec_version_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "template_spec_version_id", value) class TenantTemplateDeployment(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, debug_level: Optional[pulumi.Input[str]] = None, location: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, parameters_content: Optional[pulumi.Input[str]] = None, tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, template_content: Optional[pulumi.Input[str]] = None, template_spec_version_id: Optional[pulumi.Input[str]] = None, __props__=None): """ ## Import Tenant Template Deployments can be imported using the `resource id`, e.g. ```sh $ pulumi import azure:core/tenantTemplateDeployment:TenantTemplateDeployment example /providers/Microsoft.Resources/deployments/deploy1 ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] debug_level: The Debug Level which should be used for this Resource Group Template Deployment. Possible values are `none`, `requestContent`, `responseContent` and `requestContent, responseContent`. :param pulumi.Input[str] location: The Azure Region where the Template should exist. Changing this forces a new Template to be created. :param pulumi.Input[str] name: The name which should be used for this Template. Changing this forces a new Template to be created. :param pulumi.Input[str] parameters_content: The contents of the ARM Template parameters file - containing a JSON list of parameters. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags which should be assigned to the Template. :param pulumi.Input[str] template_content: The contents of the ARM Template which should be deployed into this Resource Group. Cannot be specified with `template_spec_version_id`. :param pulumi.Input[str] template_spec_version_id: The ID of the Template Spec Version to deploy. Cannot be specified with `template_content`. """ ... @overload def __init__(__self__, resource_name: str, args: Optional[TenantTemplateDeploymentArgs] = None, opts: Optional[pulumi.ResourceOptions] = None): """ ## Import Tenant Template Deployments can be imported using the `resource id`, e.g. ```sh $ pulumi import azure:core/tenantTemplateDeployment:TenantTemplateDeployment example /providers/Microsoft.Resources/deployments/deploy1 ``` :param str resource_name: The name of the resource. :param TenantTemplateDeploymentArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """ ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(TenantTemplateDeploymentArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, debug_level: Optional[pulumi.Input[str]] = None, location: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, parameters_content: Optional[pulumi.Input[str]] = None, tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, template_content: Optional[pulumi.Input[str]] = None, template_spec_version_id: Optional[pulumi.Input[str]] = None, __props__=None): if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = TenantTemplateDeploymentArgs.__new__(TenantTemplateDeploymentArgs) __props__.__dict__["debug_level"] = debug_level __props__.__dict__["location"] = location __props__.__dict__["name"] = name __props__.__dict__["parameters_content"] = parameters_content __props__.__dict__["tags"] = tags __props__.__dict__["template_content"] = template_content __props__.__dict__["template_spec_version_id"] = template_spec_version_id __props__.__dict__["output_content"] = None super(TenantTemplateDeployment, __self__).__init__( 'azure:core/tenantTemplateDeployment:TenantTemplateDeployment', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, debug_level: Optional[pulumi.Input[str]] = None, location: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, output_content: Optional[pulumi.Input[str]] = None, parameters_content: Optional[pulumi.Input[str]] = None, tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, template_content: Optional[pulumi.Input[str]] = None, template_spec_version_id: Optional[pulumi.Input[str]] = None) -> 'TenantTemplateDeployment': """ Get an existing TenantTemplateDeployment resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] debug_level: The Debug Level which should be used for this Resource Group Template Deployment. Possible values are `none`, `requestContent`, `responseContent` and `requestContent, responseContent`. :param pulumi.Input[str] location: The Azure Region where the Template should exist. Changing this forces a new Template to be created. :param pulumi.Input[str] name: The name which should be used for this Template. Changing this forces a new Template to be created. :param pulumi.Input[str] output_content: The JSON Content of the Outputs of the ARM Template Deployment. :param pulumi.Input[str] parameters_content: The contents of the ARM Template parameters file - containing a JSON list of parameters. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags which should be assigned to the Template. :param pulumi.Input[str] template_content: The contents of the ARM Template which should be deployed into this Resource Group. Cannot be specified with `template_spec_version_id`. :param pulumi.Input[str] template_spec_version_id: The ID of the Template Spec Version to deploy. Cannot be specified with `template_content`. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _TenantTemplateDeploymentState.__new__(_TenantTemplateDeploymentState) __props__.__dict__["debug_level"] = debug_level __props__.__dict__["location"] = location __props__.__dict__["name"] = name __props__.__dict__["output_content"] = output_content __props__.__dict__["parameters_content"] = parameters_content __props__.__dict__["tags"] = tags __props__.__dict__["template_content"] = template_content __props__.__dict__["template_spec_version_id"] = template_spec_version_id return TenantTemplateDeployment(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name="debugLevel") def debug_level(self) -> pulumi.Output[Optional[str]]: """ The Debug Level which should be used for this Resource Group Template Deployment. Possible values are `none`, `requestContent`, `responseContent` and `requestContent, responseContent`. """ return pulumi.get(self, "debug_level") @property @pulumi.getter def location(self) -> pulumi.Output[str]: """ The Azure Region where the Template should exist. Changing this forces a new Template to be created. """ return pulumi.get(self, "location") @property @pulumi.getter def name(self) -> pulumi.Output[str]: """ The name which should be used for this Template. Changing this forces a new Template to be created. """ return pulumi.get(self, "name") @property @pulumi.getter(name="outputContent") def output_content(self) -> pulumi.Output[str]: """ The JSON Content of the Outputs of the ARM Template Deployment. """ return pulumi.get(self, "output_content") @property @pulumi.getter(name="parametersContent") def parameters_content(self) -> pulumi.Output[str]: """ The contents of the ARM Template parameters file - containing a JSON list of parameters. """ return pulumi.get(self, "parameters_content") @property @pulumi.getter def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]: """ A mapping of tags which should be assigned to the Template. """ return pulumi.get(self, "tags") @property @pulumi.getter(name="templateContent") def template_content(self) -> pulumi.Output[str]: """ The contents of the ARM Template which should be deployed into this Resource Group. Cannot be specified with `template_spec_version_id`. """ return pulumi.get(self, "template_content") @property @pulumi.getter(name="templateSpecVersionId") def template_spec_version_id(self) -> pulumi.Output[Optional[str]]: """ The ID of the Template Spec Version to deploy. Cannot be specified with `template_content`. """ return pulumi.get(self, "template_spec_version_id")
49.285106
230
0.673502
2,761
23,164
5.446215
0.062296
0.083394
0.092173
0.084857
0.873246
0.859014
0.84864
0.840194
0.836803
0.820509
0
0.000168
0.231091
23,164
469
231
49.390192
0.844085
0.361984
0
0.797834
1
0
0.098655
0.030171
0
0
0
0
0
1
0.162455
false
0.00361
0.018051
0
0.277978
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
706769e01afcf31b706f6275b099b287fbcdc73d
1,919
py
Python
tbs/helper/tests/packages_test.py
ODEX-TOS/tos-build-system
019de4d7b47a55123802f5aea4ac0c12d3660f2d
[ "MIT" ]
1
2021-05-06T23:06:41.000Z
2021-05-06T23:06:41.000Z
tbs/helper/tests/packages_test.py
ODEX-TOS/tos-build-system
019de4d7b47a55123802f5aea4ac0c12d3660f2d
[ "MIT" ]
null
null
null
tbs/helper/tests/packages_test.py
ODEX-TOS/tos-build-system
019de4d7b47a55123802f5aea4ac0c12d3660f2d
[ "MIT" ]
null
null
null
import unittest import tbs.helper.packages as packages # installed packages as a mock installed = ["base", "systemd", "zlib", "awesome-tos"] class TestPackage(unittest.TestCase): def test_package_not_installed(self): package = packages.MockPackage("git") package.setMockInstalled(installed) self.assertFalse(package.isInstalled()) self.assertTrue(package.Install()) self.assertTrue(package.isInstalled) def test_package_already_installed(self): package = packages.MockPackage(installed[0]) package.setMockInstalled(installed) self.assertTrue(package.isInstalled()) self.assertTrue(package.Install()) self.assertTrue(package.isInstalled) class TestPackages(unittest.TestCase): def test_packages_not_installed(self): package = packages.MockPackage(["git"]) package.setMockInstalled(installed) self.assertFalse(package.isInstalled()) self.assertTrue(package.Install()) self.assertTrue(package.isInstalled) def test_packages_already_installed(self): package = packages.MockPackage([installed[0]]) package.setMockInstalled(installed) self.assertTrue(package.isInstalled()) self.assertTrue(package.Install()) self.assertTrue(package.isInstalled) def test_packages_not_installed_multiple(self): package = packages.MockPackage(["git", "awesome-tos"]) package.setMockInstalled(installed) self.assertFalse(package.isInstalled()) self.assertTrue(package.Install()) self.assertTrue(package.isInstalled) def test_packages_already_installed(self): package = packages.MockPackage([installed[0], "gattlib"]) package.setMockInstalled(installed) self.assertFalse(package.isInstalled()) self.assertTrue(package.Install()) self.assertTrue(package.isInstalled)
37.627451
65
0.705055
184
1,919
7.25
0.184783
0.146927
0.22039
0.191904
0.838081
0.793103
0.793103
0.793103
0.793103
0.793103
0
0.001921
0.186034
1,919
50
66
38.38
0.852113
0.014591
0
0.634146
0
0
0.028072
0
0
0
0
0
0.439024
1
0.146341
false
0
0.04878
0
0.243902
0
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
9
70a10c0157ddcd03a1ba4abd6ac3651ade3461c9
9,242
py
Python
demo.py
JingkangZhang/SlidingPuzzle
f2fb38cc372072717b131c45318eb0a8c06e8a35
[ "MIT" ]
1
2021-01-31T04:32:08.000Z
2021-01-31T04:32:08.000Z
demo.py
JingkangZhang/SlidingPuzzle
f2fb38cc372072717b131c45318eb0a8c06e8a35
[ "MIT" ]
null
null
null
demo.py
JingkangZhang/SlidingPuzzle
f2fb38cc372072717b131c45318eb0a8c06e8a35
[ "MIT" ]
null
null
null
import base64 myscript ='''#################################
## Project: Sliding Puzzle     ##
## Copyright ReadyPython Sp19  ##

#################################

from copy import deepcopy # Used to copy boards represented as 2D lists
from random import choice # Used in shuffle() to choose randomly from legal actions
from time import sleep # Used in visualization
import os # useful for clear console screen utility

sample_board = [ #represented as a 2D list
    [1,2,3],
    [4,5,6],
    [7,8,0] # 0 stands for the empty slot
]
ACTIONS = ["up", "down", "left", "right"] #Actions are represented as ways to move the empty slot (0) around.

def print_board(board):
    '''Prints BOARD to console in the following format:
    -------------
    | 4 | 3 | 6 |
    -------------
    |   | 5 | 7 |
    -------------
    | 1 | 8 | 4 |
    -------------
    '''
    print("-------------")
    for row in range(len(board)):
        print("| ", end="")
        for col in range(len(board[0])):
            content = board[row][col]
            print(content if content != 0 else " ", end=" | ")
        print("\n-------------")

def play():
    ''' Plays a sliding puzzle game by
    1. Shuffling the sample_board to get initial board state
    2. Solve the puzzle and obtain a list of actions
    3. Visualize the solution
    '''
    board = shuffle(sample_board)
    print("This is the randomly shuffled initial state:")
    print_board(board)
    print("Solving...")
    actions = solve(board)
    if actions == "NO_SOLUTION": # A correct shuffle function should NOT result in NO_SOLUTION.
        print("There is no solution from current state.")
        return
    print("Solved!")
    input("Press Enter to start visualization: ")
    visualize(board, actions)

def visualize(board, actions):
    ''' Visualize the transitions in BOARD by printing each states after taking actions.
    Transition time interval: 1 second. The screen is cleared by calling cls() before printing. '''
    copy = deepcopy(board)
    for action in actions:
        copy = take(action, copy)
        cls()
        print_board(copy)
        sleep(1)
    print("Solved! Total steps:", len(actions))
    print("Initial state:")
    print_board(board)
    print("Actions took:", actions, "(Actions are defined as ways the empty slot is moved around.)")

def find_zero(board):
    '''Returns the coordinate as (row_number, column_number)
    of "0" (the empty slot) in BOARD.

    E.g.
    The zero coordinate of the following board is (1, 0)
    -------------
    | 2 | 3 | 6 |
    -------------
    |   | 5 | 7 |
    -------------
    | 1 | 8 | 4 |
    -------------
    '''
    for row in range(len(board)):
        for col in range(len(board[0])):
            if board[row][col] == 0:
                return row, col

def get_legal_actions(board):
    '''Returns a list of legal actions in BOARD. Actions are represented
    as ways to move the empty slot (0) around. An action should be in
    ["up", "down", "left", "right"].

    E.g. In the following board, the legal actions are ["up", "down", "right"].
    -------------
    | 2 | 3 | 6 |
    -------------
    |   | 5 | 7 |
    -------------
    | 1 | 8 | 4 |
    -------------'''
    zero_pos = find_zero(board)
    board_rows = len(board)
    board_cols = len(board[0])
    actions = ACTIONS[:]
    if zero_pos[0] == 0:
        actions.remove("up")
    if zero_pos[0] == board_rows - 1:
        actions.remove("down")
    if zero_pos[1] == 0:
        actions.remove("left")
    if zero_pos[1] == board_cols - 1:
        actions.remove("right")
    return actions

def take(action, board):
    '''Returns the resulting board after taking ACTION on BOARD,
    assuming ACTION is legal. ACTION should be in ["up", "down", "left", "right"].
    Actions are represented as ways to move the empty slot (0) around.

    E.g.
    -------------
    | 2 | 3 | 6 |
    -------------
    |   | 5 | 7 |
    -------------
    | 1 | 8 | 4 |
    -------------
    Taking action "up" in the above board will result in the following board:
    -------------
    |   | 3 | 6 |
    -------------
    | 2 | 5 | 7 |
    -------------
    | 1 | 8 | 4 |
    -------------
    '''
    assert action in ACTIONS, "Invalid action: '{}'".format(action)
    zero_pos = find_zero(board)
    zero_row = zero_pos[0]
    zero_col = zero_pos[1]
    new_board = deepcopy(board)
    if action == "up":
        new_board[zero_row][zero_col], new_board[zero_row - 1][zero_col] = new_board[zero_row - 1][zero_col], new_board[zero_row][zero_col]
    if action == "down":
        new_board[zero_row][zero_col], new_board[zero_row + 1][zero_col] = new_board[zero_row + 1][zero_col], new_board[zero_row][zero_col]
    if action == "left":
        new_board[zero_row][zero_col], new_board[zero_row][zero_col - 1] = new_board[zero_row][zero_col - 1], new_board[zero_row][zero_col]
    if action == "right":
        new_board[zero_row][zero_col], new_board[zero_row][zero_col + 1] = new_board[zero_row][zero_col + 1], new_board[zero_row][zero_col]
    return new_board

def shuffle(board):
    '''Return a new board obtained by taking 50 random
    actions from BOARD. '''
    new_board = deepcopy(board)
    for i in range(50):
        action = choice(get_legal_actions(new_board))
        new_board = take(action, new_board)
    return new_board

def is_goal(board):
    '''Returns True iff BOARD is
    -------------
    | 1 | 2 | 3 |
    -------------
    | 4 | 5 | 6 |
    -------------
    | 7 | 8 |   |
    -------------'''
    return board == [[1,2,3],[4,5,6],[7,8,0]]

def solve(board):
    '''Returns a list of actions which, taken on BOARD, solves the puzzle by
    turning the board into the following form:
    -------------
    | 1 | 2 | 3 |
    -------------
    | 4 | 5 | 6 |
    -------------
    | 7 | 8 |   |
    -------------
    Returns "NO_SOLUTION" if there is no solution.
    '''
    visited = set() # This stores boards converted to strings from 2D lists.
    q = []
    q.append([[board, None]]) # The elements on the fringe are (board_state, last_action)
    while q:
        path = q.pop(0)
        last_board = path[-1][0]
        if str(last_board) not in visited:
            visited.add(str(last_board))
            if is_goal(last_board):
                # return path
                return [state[1] for state in path][1:] # We only need to return a list of actions
            for action in get_legal_actions(last_board):
                new_state = [take(action, last_board), action]
                new_path = path + [new_state]
                q.append(new_path)
    return "NO_SOLUTION"
    
def cls():
    '''Clears the terminal screen.'''
    os.system('cls' if os.name=='nt' else 'clear')

if __name__ == '__main__':
    print("====================================================")
    print("Welcome to ReadyPython Project: Silding Puzzle!")
    play()
''' eval(compile(base64.b64decode(myscript),'<string>','exec'))
3,080.666667
9,168
0.997079
11
9,242
837.727273
0.818182
0
0
0
0
0
0
0
0
0
0
0.114202
0.000433
9,242
3
9,169
3,080.666667
0.883308
0
0
0
0
0
0.991453
0.990155
0
1
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
1
0
0
0
0
0
0
1
1
null
1
0
0
0
0
0
0
0
1
0
0
0
0
9
70a12e75df9673e4055f04b9db595c38827057bc
14,800
py
Python
Large_areas/MIV.py
mapeee/Accessibility_Tools
f6eece284c11f7e30a78a130171ae1cccb024b88
[ "MIT" ]
null
null
null
Large_areas/MIV.py
mapeee/Accessibility_Tools
f6eece284c11f7e30a78a130171ae1cccb024b88
[ "MIT" ]
null
null
null
Large_areas/MIV.py
mapeee/Accessibility_Tools
f6eece284c11f7e30a78a130171ae1cccb024b88
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Created on Mon Jul 30 10:42:41 2018 @author: mape """ import psycopg2 import arcpy import time import pandas import os import gc from pathlib import Path path = Path.home() / 'python32' / 'python_dir.txt' f = open(path, mode='r') for i in f: path = i path = Path.joinpath(Path(r'C:'+path),'Accessibility_Tools','Large_areas','MIV.txt') f = path.read_text() f = f.split('\n') start_time = time.clock() print "start script seconds: "+str(int(time.clock() - start_time)) arcpy.env.overwriteOutput = True ##delete all layers #--Geodaten--# Raster_o = "C:"+f[0] Raster_d100 = "C:"+f[1] Raster_d500 = "C:"+f[2] Raster_d1 = "C:"+f[3] Network = "C:"+f[4] #--csv-files--# csv_line = 'C:'+f[5] csv_error = 'V:'+f[6] f = open(csv_error, "w") ##clear the error csv f.truncate() f.close() #--Parameter--# Kosten = "dist_miv" Max_Kosten = [5200,35500,61500,96000] loop = [50,30,50,50] ###--pgadmin database--### #--connection to database--# pgadmin = psycopg2.connect(f[7]) pgcur = pgadmin.cursor() #--Create table--# pgcur.execute('DROP TABLE IF EXISTS tmp_errbk."marcus_test"') pgcur.execute('CREATE TABLE tmp_errbk."marcus_test" (von integer, nach integer, meter integer, minuten integer,bezug integer)') pgadmin.commit() ###--------------------------### ###--ArcGIS routing problem--### ###--------------------------### arcpy.MakeFeatureLayer_management(Raster_o, 'R500_start') arcpy.MakeFeatureLayer_management(Raster_d100, 'R100_ziel','"MIV"=1') arcpy.MakeFeatureLayer_management(Raster_d500, 'R500_ziel','"MIV"=1') arcpy.MakeFeatureLayer_management(Raster_d1, 'R1_ziel','"MIV"=1') arcpy.MakeFeatureLayer_management(Raster_d1, 'R1_ziel_bew','"MIV"=1 and "bew"=1') #Gebiete = "C:\\Geodaten\\Niedersachsen.gdb\\Gebiete\\Nds_Bremen_Kreise_2013" #arcpy.MakeFeatureLayer_management(Gebiete,"Heidekreis",'"OBJECTID" = 3') #arcpy.SelectLayerByLocation_management('R500_start', 'intersect', 'Heidekreis') #arcpy.SelectLayerByLocation_management('R100_ziel', 'WITHIN_A_DISTANCE', 'Heidekreis',Max_Kosten[0]) #arcpy.SelectLayerByLocation_management('R500_ziel', 'WITHIN_A_DISTANCE', 'Heidekreis',Max_Kosten[1]) ############# ##100m loop## ############# print "start ArcGIS OD-routing 100 meter seconds: "+str(int(time.clock() - start_time)) #--loops 100 meter--# length = int(arcpy.GetCount_management('R500_start')[0]) print "Zeilen: "+str(length) loops = (length/loop[0])+1 loops = range(loops) print "Anzahl loops: "+str(len(loops)) ##--Counter destinations raster--# ##Only select 100-meter destinations within certain distance to origins ##Do this every 1.000 origins for performance reasons ni = 0 ni_counter = 1000/loop[0] ##Number of loops per 1000 origins for i in loops: print "start loop "+str(i)+" seconds: "+str(int(time.clock() - start_time)) arcpy.Delete_management("ODMATRIX") ODLayer = arcpy.MakeODCostMatrixLayer_na(Network,"ODMATRIX",Kosten,Max_Kosten[0],"",[Kosten, "t_miv"],"","","","","NO_LINES") #--add origins centroids--# arcpy.Delete_management("origi") arcpy.MakeFeatureLayer_management('R500_start', "origi","OBJECTID >"+str(i*loop[0])+" and OBJECTID <="+str((i*loop[0])+loop[0])) field_mappings = arcpy.na.NAClassFieldMappings(ODLayer.getOutput(0),"Origins",True,arcpy.ListFields("ODMATRIX\Origins")) arcpy.AddLocations_na(ODLayer,"Origins","origi",field_mappings,"","","","","CLEAR") #--add destination centroids--# arcpy.Delete_management("desti") if i == ni: print "Select destinations: OBJECTID >"+str(ni*loop[0])+" and OBJECTID <="+str((ni*loop[0])+1000) arcpy.MakeFeatureLayer_management('R500_start', "ni_bezug","OBJECTID >"+str(ni*loop[0])+" and OBJECTID <="+str((ni*loop[0])+1000)) arcpy.SelectLayerByLocation_management('R100_ziel', 'WITHIN_A_DISTANCE', 'ni_bezug',Max_Kosten[0]) ni = ni + ni_counter print "Next selection loop: "+str(ni) arcpy.Delete_management("ni_bezug") arcpy.MakeFeatureLayer_management('R100_ziel', "desti") arcpy.AddLocations_na(ODLayer,"Destinations","desti",field_mappings,"") print "Vorbereitung Ende: "+str(int(time.clock() - start_time)) #--solving--# arcpy.CheckOutExtension("Network") try:arcpy.na.Solve(ODLayer) except: print "solver failed loop :"+str(i) f = open(csv_error, 'a') f.write("loop :"+str(i)+" 100 \n") f.close() continue print "solving seconds: "+str(int(time.clock() - start_time)) ###--Data preparation--### #--get lines--# Lines = arcpy.da.FeatureClassToNumPyArray("ODMATRIX\Lines",["Name","Total_"+Kosten,"Total_t_miv"]) print "Num lines: "+str(len(Lines)) Lines = pandas.DataFrame(Lines) a = pandas.DataFrame(Lines.Name.str.split(' - ').tolist(), columns = "Start Ziel".split()) #Lines = pandas.DataFrame.reset_index(Lines) Lines["Start"] = a["Start"] Lines["Ziel"] = a["Ziel"] Lines[["Start","Ziel"]] = Lines[["Start","Ziel"]].astype(int) Lines[["Total_dist_miv","Total_t_miv"]] = Lines[["Total_dist_miv","Total_t_miv"]].round(0).astype(int) ##rounding of numeric values Lines = Lines[["Start","Ziel","Total_dist_miv","Total_t_miv"]] ##new ordering as in postgresql table Lines['bezug'] = 100 #adding cell size try: os.remove("C:"+f[5]) except: pass Lines.to_csv("C:"+f[5], header=False, index=False) print "copy seconds: "+str(int(time.clock() - start_time)) ###--saving into database--### #--open and write data--# f = open(csv_line, 'r') pgcur.copy_from(f, 'tmp_errbk."marcus_test"', sep=',') print "pgadmin seconds: "+str(int(time.clock() - start_time)) pgadmin.commit() #--memory management--# del Lines, a, f gc.collect() arcpy.Delete_management("ODMATRIX") arcpy.Delete_management("desti") arcpy.Delete_management("origi") ############# ##500m loop## ############# print "start ArcGIS OD-routing 500 meter seconds: "+str(int(time.clock() - start_time)) #--loops 500 meter--# loops = (length/loop[1])+1 loops = range(loops) print "Anzahl loops: "+str(len(loops)) for i in loops: print "start loop (500) "+str(i)+" seconds: "+str(int(time.clock() - start_time)) arcpy.Delete_management("ODMATRIX") ODLayer = arcpy.MakeODCostMatrixLayer_na(Network,"ODMATRIX",Kosten,Max_Kosten[1],"",[Kosten, "t_miv"],"","","","","NO_LINES") #--add origins centroids--# arcpy.Delete_management("centroide") arcpy.Delete_management("origi") arcpy.MakeFeatureLayer_management('R500_start', "origi","OBJECTID >"+str(i*loop[1])+" and OBJECTID <="+str((i*loop[1])+loop[1])) field_mappings = arcpy.na.NAClassFieldMappings(ODLayer.getOutput(0),"Origins",True,arcpy.ListFields("ODMATRIX\Origins")) arcpy.AddLocations_na(ODLayer,"Origins","origi",field_mappings,"","","","","CLEAR") #--add destination centroids--# arcpy.Delete_management("desti") arcpy.MakeFeatureLayer_management('R500_ziel', "desti") arcpy.AddLocations_na(ODLayer,"Destinations","desti",field_mappings,"") print "Vorbereitung Ende: "+str(int(time.clock() - start_time)) #--solving--# arcpy.CheckOutExtension("Network") try:arcpy.na.Solve(ODLayer) except: print "solver failed loop :"+str(i) f = open(csv_error, 'a') f.write("loop :"+str(i)+" 500 \n") f.close() continue print "solving seconds: "+str(int(time.clock() - start_time)) ###--Data preparation--### #--get lines--# Lines = arcpy.da.FeatureClassToNumPyArray("ODMATRIX\Lines",["Name","Total_"+Kosten,"Total_t_miv"],"Total_"+Kosten+" >"+str(Max_Kosten[0]-400)) print "Num lines: "+str(len(Lines)) Lines = pandas.DataFrame(Lines) a = pandas.DataFrame(Lines.Name.str.split(' - ').tolist(), columns = "Start Ziel".split()) #Lines = pandas.DataFrame.reset_index(Lines) Lines["Start"] = a["Start"] Lines["Ziel"] = a["Ziel"] Lines[["Start","Ziel"]] = Lines[["Start","Ziel"]].astype(int) Lines[["Total_dist_miv","Total_t_miv"]] = Lines[["Total_dist_miv","Total_t_miv"]].round(0).astype(int) ##rounding of numeric values Lines = Lines[["Start","Ziel","Total_dist_miv","Total_t_miv"]] ##new ordering as in postgresql table Lines['bezug'] = 500 #adding cell size try: os.remove("C:"+f[5]) except: pass Lines.to_csv("C:"+f[5], header=False, index=False) print "copy seconds: "+str(int(time.clock() - start_time)) ###--saving into database--### #--open and write data--# f = open(csv_line, 'r') pgcur.copy_from(f, 'tmp_errbk."marcus_test"', sep=',') print "pgadmin seconds: "+str(int(time.clock() - start_time)) pgadmin.commit() #--memory management--# del Lines, a, f gc.collect() ############# ##1km loop## ############# print "start ArcGIS OD-routing 500 meter seconds: "+str(int(time.clock() - start_time)) #--loops 1 kilometer--# loops = (length/loop[2])+1 loops = range(loops) print "Anzahl loops: "+str(len(loops)) for i in loops: print "start loop (1) "+str(i)+" seconds: "+str(int(time.clock() - start_time)) arcpy.Delete_management("ODMATRIX") ODLayer = arcpy.MakeODCostMatrixLayer_na(Network,"ODMATRIX",Kosten,Max_Kosten[2],"",[Kosten, "t_miv"],"","","","","NO_LINES") #--add destination centroids--# arcpy.Delete_management("desti") arcpy.MakeFeatureLayer_management('R1_ziel', "desti") field_mappings = arcpy.na.NAClassFieldMappings(ODLayer.getOutput(0),"Destinations",True,arcpy.ListFields("ODMATRIX\Destinations")) arcpy.AddLocations_na(ODLayer,"Destinations","desti",field_mappings,"") arcpy.Delete_management("centroide") arcpy.Delete_management("origi") arcpy.MakeFeatureLayer_management('R500_start', "origi","OBJECTID >"+str(i*loop[2])+" and OBJECTID <="+str((i*loop[2])+loop[2])) arcpy.AddLocations_na(ODLayer,"Origins","origi",field_mappings,"","","","","CLEAR") print "Vorbereitung Ende: "+str(int(time.clock() - start_time)) #--solving--# arcpy.CheckOutExtension("Network") try:arcpy.na.Solve(ODLayer) except: print "solver failed loop :"+str(i) f = open(csv_error, 'a') f.write("loop :"+str(i)+" 1km \n") f.close() continue print "solving seconds: "+str(int(time.clock() - start_time)) ###--Data preparation--### #--get lines--# Lines = arcpy.da.FeatureClassToNumPyArray("ODMATRIX\Lines",["Name","Total_"+Kosten,"Total_t_miv"],"Total_"+Kosten+" >"+str(Max_Kosten[1]-1000)) print "Num lines: "+str(len(Lines)) Lines = pandas.DataFrame(Lines) a = pandas.DataFrame(Lines.Name.str.split(' - ').tolist(), columns = "Start Ziel".split()) #Lines = pandas.DataFrame.reset_index(Lines) Lines["Start"] = a["Start"] Lines["Ziel"] = a["Ziel"] Lines[["Start","Ziel"]] = Lines[["Start","Ziel"]].astype(int) Lines[["Total_dist_miv","Total_t_miv"]] = Lines[["Total_dist_miv","Total_t_miv"]].round(0).astype(int) ##rounding of numeric values Lines = Lines[["Start","Ziel","Total_dist_miv","Total_t_miv"]] ##new ordering as in postgresql table Lines['bezug'] = 1 #adding cell size try: os.remove("C:"+f[5]) except: pass Lines.to_csv("C:"+f[5], header=False, index=False) print "copy seconds: "+str(int(time.clock() - start_time)) ###--saving into database--### #--open and write data--# f = open(csv_line, 'r') pgcur.copy_from(f, 'tmp_errbk."marcus_test"', sep=',') print "pgadmin seconds: "+str(int(time.clock() - start_time)) pgadmin.commit() #--memory management--# del Lines, a, f gc.collect() ###################### ##1km loop populated## ###################### print "start ArcGIS OD-routing 500 meter seconds: "+str(int(time.clock() - start_time)) #--loops 1 kilometer--# loops = (length/loop[2])+1 loops = range(loops) print "Anzahl loops: "+str(len(loops)) for i in loops: print "start loop (1 populated) "+str(i)+" seconds: "+str(int(time.clock() - start_time)) arcpy.Delete_management("ODMATRIX") ODLayer = arcpy.MakeODCostMatrixLayer_na(Network,"ODMATRIX",Kosten,Max_Kosten[3],"",[Kosten, "t_miv"],"","","","","NO_LINES") #--add destination centroids--# arcpy.Delete_management("desti") arcpy.MakeFeatureLayer_management('R1_ziel_bew', "desti") field_mappings = arcpy.na.NAClassFieldMappings(ODLayer.getOutput(0),"Destinations",True,arcpy.ListFields("ODMATRIX\Destinations")) arcpy.AddLocations_na(ODLayer,"Destinations","desti",field_mappings,"") arcpy.Delete_management("centroide") arcpy.Delete_management("origi") arcpy.MakeFeatureLayer_management('R500_start', "origi","OBJECTID >"+str(i*loop[3])+" and OBJECTID <="+str((i*loop[3])+loop[3])) arcpy.AddLocations_na(ODLayer,"Origins","origi",field_mappings,"","","","","CLEAR") print "Vorbereitung Ende: "+str(int(time.clock() - start_time)) #--solving--# arcpy.CheckOutExtension("Network") try:arcpy.na.Solve(ODLayer) except: print "solver failed loop :"+str(i) f = open(csv_error, 'a') f.write("loop :"+str(i)+" 1km \n") f.close() continue print "solving seconds: "+str(int(time.clock() - start_time)) ###--Data preparation--### #--get lines--# Lines = arcpy.da.FeatureClassToNumPyArray("ODMATRIX\Lines",["Name","Total_"+Kosten,"Total_t_miv"],"Total_"+Kosten+" >"+str(Max_Kosten[2]-1000)) print "Num lines: "+str(len(Lines)) Lines = pandas.DataFrame(Lines) a = pandas.DataFrame(Lines.Name.str.split(' - ').tolist(), columns = "Start Ziel".split()) #Lines = pandas.DataFrame.reset_index(Lines) Lines["Start"] = a["Start"] Lines["Ziel"] = a["Ziel"] Lines[["Start","Ziel"]] = Lines[["Start","Ziel"]].astype(int) Lines[["Total_dist_miv","Total_t_miv"]] = Lines[["Total_dist_miv","Total_t_miv"]].round(0).astype(int) ##rounding of numeric values Lines = Lines[["Start","Ziel","Total_dist_miv","Total_t_miv"]] ##new ordering as in postgresql table Lines['bezug'] = 1 #adding cell size try: os.remove("C:"+f[5]) except: pass Lines.to_csv("C:"+f[5], header=False, index=False) print "copy seconds: "+str(int(time.clock() - start_time)) ###--saving into database--### #--open and write data--# f = open(csv_line, 'r') pgcur.copy_from(f, 'tmp_errbk."marcus_test"', sep=',') print "pgadmin seconds: "+str(int(time.clock() - start_time)) pgadmin.commit() #--memory management--# del Lines, a, f gc.collect() #--create index--# pgcur.execute('CREATE INDEX indexvon ON tmp_errbk."marcus_test" (von)') pgadmin.commit() ###--end--### pgcur.close() ##pgadmin pgadmin.close() ##pgadmin print "END seconds: "+str(int(time.clock() - start_time))
39.892183
147
0.649527
1,918
14,800
4.877477
0.12878
0.025975
0.027793
0.041689
0.80759
0.787066
0.783966
0.767076
0.751149
0.746766
0
0.020729
0.146014
14,800
371
148
39.892183
0.71944
0.142162
0
0.672199
0
0.004149
0.235643
0.016489
0
0
0
0
0
0
null
null
0.016598
0.029046
null
null
0.170124
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
7
3b84cda948c5de49521ccf87b7b0bf3c64ef1570
7,761
py
Python
Protheus_WebApp/Modules/SIGABI/BIXLinkTestCase.py
98llm/tir-script-samples
0bff8393b79356aa562e9e6512c11ee6e039b177
[ "MIT" ]
17
2018-09-24T17:27:08.000Z
2021-09-16T19:09:46.000Z
Protheus_WebApp/Modules/SIGABI/BIXLinkTestCase.py
98llm/tir-script-samples
0bff8393b79356aa562e9e6512c11ee6e039b177
[ "MIT" ]
4
2018-09-24T17:30:32.000Z
2022-01-03T11:39:30.000Z
Protheus_WebApp/Modules/SIGABI/BIXLinkTestCase.py
98llm/tir-script-samples
0bff8393b79356aa562e9e6512c11ee6e039b177
[ "MIT" ]
18
2019-06-07T17:41:34.000Z
2022-01-31T18:17:31.000Z
# -*- coding: utf-8 -*- import unittest from tir import Webapp class BIXLINK(unittest.TestCase): @classmethod def setUpClass(inst): inst.oHelper = Webapp() inst.oHelper.Setup('SIGACFG','11/12/2018','T1','D MG 01 ') inst.oHelper.SetLateralMenu('Ambiente > Extrator Bi') def test_BIXLINK_CT001(self): self.oHelper.SetLateralMenu('Configuração de Parâmetros') self.oHelper.SetButton('Avançar >>') # Bem vindo self.oHelper.SetValue("Servidor DBAccess", "10.171.67.220") self.oHelper.SetValue("Porta do Servidor DBAccess", "7920") self.oHelper.SetValue("Alias no DBAcess", "STAGE") self.oHelper.SetButton('Avançar >>') # Configuração de acesso self.oHelper.SetButton('Fechar') # Fechar self.oHelper.WaitShow("Selecione as áreas utilizadas na configuração dos extratores Protheus.") self.oHelper.SetButton('Avançar >>') # Definição das Áreas self.oHelper.WaitShow("Selecione as Moedas utilizadas nas fatos") self.oHelper.SetButton('Avançar >>') # Moedas utilizadas nas fatos self.oHelper.WaitShow("Configuração da Macrorregião") self.oHelper.SetButton('>>') # Seleciona todos os países self.oHelper.SetButton('<<') # Deseleciona todos os países self.oHelper.SetButton('Avançar >>') # Configuração de macroregião self.oHelper.WaitShow("Consolidação de Moedas") self.oHelper.SetButton('Avançar >>') # Consolidação de moedas self.oHelper.WaitShow("Caso seja necessário, configure o(s) parâmetro(s) abaixo para adequar a execução dos") self.oHelper.ClickLabel("Considerar dados do cliente na dimensão região geográfica?") self.oHelper.SetButton('Avançar >>') # Parâmetros Genéricos self.oHelper.WaitShow("Área Comercial") self.oHelper.ClickLabel("Indica se o representante será obtido através do cadastro do cliente, ao invés dos dados da nota/pedido.") self.oHelper.ClickLabel("Devolução de Vendas - Considerar documentos de entrada com cliente diferente do documento de saída.") self.oHelper.SetButton('Avançar >>') # Área Comercial self.oHelper.WaitShow("Área Comercial, Financeiro e Materiais.") self.oHelper.ClickLabel("Comercial, Financeiro, Materiais - Utilizar a Taxa Negociada na conversão de moeda.") self.oHelper.SetButton('Avançar >>') # Área Comercial, Financeiro e Materiais self.oHelper.WaitShow("Selecione os itens que serão utilizados como Contas de Resultado") self.oHelper.SetButton('Avançar >>') # Área Controladoria self.oHelper.WaitShow("Configuração de Parâmetros da área Controladoria") self.oHelper.ClickLabel("Considerar as Contas Bloqueadas no plano de contas?") # Marcar self.oHelper.SetButton('Avançar >>') # Considerar as contas bloqueadas self.oHelper.WaitShow("Área Materiais") self.oHelper.SetButton('Avançar >>') # Área Materiais self.oHelper.WaitShow("Área de Produção") self.oHelper.SetButton('Avançar >>') # Área Produção self.oHelper.WaitShow("Informe os dados utilizados pelo extrator para classificar as informações da área de RH.") self.oHelper.SetValue("Até", "3", grid=True) self.oHelper.LoadGrid() self.oHelper.SetButton('Avançar >>') # Área RH - Tempo de Cargo self.oHelper.WaitShow("Informe os dados utilizados pelo extrator para classificar as informações da área de RH.") self.oHelper.SetButton('Avançar >>') # Área RH - Tempo de Casa self.oHelper.WaitShow("Informe os dados utilizados pelo extrator para classificar as informações da área de RH.") self.oHelper.SetButton('Avançar >>') # Área RH - Faixas etárias self.oHelper.WaitShow("Informe os dados utilizados pelo extrator para classificar as informações da área de RH.") self.oHelper.SetButton('Avançar >>') # Área RH - Faixas salariais self.oHelper.WaitShow("Área de DL") self.oHelper.SetButton('Avançar >>') # Área de DL self.oHelper.WaitShow("Área de Varejo") self.oHelper.SetButton('Avançar >>') # Área de Varejo self.oHelper.WaitShow("Área CRM") self.oHelper.SetButton('Finalizar') self.oHelper.AssertTrue() def test_BIXLINK_CT002(self): self.oHelper.SetLateralMenu('Configuração de Parâmetros') self.oHelper.SetButton('Avançar >>') # Bem vindo self.oHelper.SetButton('Avançar >>') # Configuração de acesso self.oHelper.SetButton('Fechar') # Fechar self.oHelper.WaitShow("Selecione as áreas utilizadas na configuração dos extratores Protheus.") self.oHelper.SetButton('Avançar >>') # Definição das Áreas self.oHelper.WaitShow("Selecione as Moedas utilizadas nas fatos") self.oHelper.SetButton('Avançar >>') # Moedas utilizadas nas fatos self.oHelper.WaitShow("Configuração da Macrorregião") self.oHelper.SetButton('>>') # Seleciona todos os países self.oHelper.SetButton('<<') # Deseleciona todos os países self.oHelper.SetButton('Avançar >>') # Configuração de macroregião self.oHelper.WaitShow("Ao habilitar a opção de consolidação, será necessário informar pelo menos um código de moeda.") self.oHelper.SetButton('Avançar >>') # Consolidação de moedas self.oHelper.WaitShow("Caso seja necessário, configure o(s) parâmetro(s) abaixo para adequar a execução dos") self.oHelper.SetButton('Avançar >>') # Parâmetros Genéricos self.oHelper.WaitShow("Área Comercial") self.oHelper.SetButton('Avançar >>') # Área Comercial self.oHelper.WaitShow("Área Comercial, Financeiro e Materiais.") self.oHelper.SetButton('Avançar >>') # Área Comercial, Financeiro e Materiais self.oHelper.WaitShow("Selecione os itens que serão utilizados como Contas de Resultado") self.oHelper.SetButton('Avançar >>') # Área Controladoria self.oHelper.WaitShow("Configuração de Parâmetros da área Controladoria") self.oHelper.SetButton('Avançar >>') # Considerar as contas bloqueadas self.oHelper.WaitShow("Área Materiais") self.oHelper.SetButton('<< Voltar') # Área Materiais para Área Comercial, Financeiro e Materiais self.oHelper.WaitShow("Configuração de Parâmetros da área Controladoria") self.oHelper.SetButton('Avançar >>') # Considerar as contas bloqueadas self.oHelper.WaitShow("Área Materiais") self.oHelper.SetButton('Avançar >>') # Área Materiais para Área Comercial, Financeiro e Materiais self.oHelper.WaitShow("Área de Produção") self.oHelper.SetButton('Avançar >>') # Área Produção self.oHelper.WaitShow("Informe os dados utilizados pelo extrator para classificar as informações da área de RH.") self.oHelper.SetValue("Até", "3", grid=True) self.oHelper.LoadGrid() self.oHelper.SetButton('Avançar >>') # Área RH - Tempo de Cargo self.oHelper.WaitShow("Informe os dados utilizados pelo extrator para classificar as informações da área de RH.") self.oHelper.SetButton('<< Voltar') # Área RH self.oHelper.WaitShow("Informe os dados utilizados pelo extrator para classificar as informações da área de RH.") self.oHelper.SetButton('Avançar >>') self.oHelper.WaitShow("Informe os dados utilizados pelo extrator para classificar as informações da área de RH.") self.oHelper.SetButton('Avançar >>') self.oHelper.WaitShow("Informe os dados utilizados pelo extrator para classificar as informações da área de RH.") self.oHelper.SetButton('Avançar >>') # Área RH - Faixas etárias self.oHelper.WaitShow("Informe os dados utilizados pelo extrator para classificar as informações da área de RH.") self.oHelper.SetButton('Avançar >>') # Área RH - Faixas salariais # self.oHelper.WaitShow("Área de Varejo") self.oHelper.WaitShow("Área de DL") self.oHelper.SetButton('Avançar >>') # Área de DL self.oHelper.WaitShow("Área de Varejo") self.oHelper.SetButton('Avançar >>') # Área de Varejo self.oHelper.WaitShow("Área CRM") self.oHelper.SetButton('Finalizar') self.oHelper.AssertTrue() @classmethod def tearDownClass(inst): inst.oHelper.TearDown() if __name__ == '__main__': unittest.main()
44.603448
133
0.74913
975
7,761
5.950769
0.177436
0.202861
0.172354
0.186143
0.846605
0.8404
0.837987
0.837987
0.835746
0.835746
0
0.005054
0.13323
7,761
173
134
44.861272
0.85744
0.146759
0
0.809917
0
0.008264
0.457091
0
0
0
0
0.00578
0.016529
1
0.033058
false
0
0.016529
0
0.057851
0
0
0
0
null
1
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
8e8139a60e8e4afb99703e5eefcef51c07dbb279
6,571
py
Python
dtf/affiliates/migrations/0002_auto__add_partner__add_partnerproduct.py
WebPowerLabs/django-trainings
97f7a96c0fbeb85a001201c74713f7944cb77236
[ "BSD-3-Clause" ]
null
null
null
dtf/affiliates/migrations/0002_auto__add_partner__add_partnerproduct.py
WebPowerLabs/django-trainings
97f7a96c0fbeb85a001201c74713f7944cb77236
[ "BSD-3-Clause" ]
null
null
null
dtf/affiliates/migrations/0002_auto__add_partner__add_partnerproduct.py
WebPowerLabs/django-trainings
97f7a96c0fbeb85a001201c74713f7944cb77236
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'Partner' db.create_table(u'affiliates_partner', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('name', self.gf('django.db.models.fields.CharField')(max_length=255)), ('slug', self.gf('django_extensions.db.fields.AutoSlugField')(allow_duplicates=False, max_length=50, separator=u'-', blank=True, populate_from='name', overwrite=False)), ('description', self.gf('django.db.models.fields.TextField')(blank=True)), ('thumbnail', self.gf('django.db.models.fields.files.ImageField')(max_length=100)), ('thumbnail_height', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)), ('thumbnail_width', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)), ('active', self.gf('django.db.models.fields.BooleanField')(default=True)), ('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), ('updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)), )) db.send_create_signal(u'affiliates', ['Partner']) # Adding model 'PartnerProduct' db.create_table(u'affiliates_partnerproduct', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('name', self.gf('django.db.models.fields.CharField')(max_length=255)), ('slug', self.gf('django_extensions.db.fields.AutoSlugField')(allow_duplicates=False, max_length=50, separator=u'-', blank=True, populate_from='name', overwrite=False)), ('description', self.gf('django.db.models.fields.TextField')(blank=True)), ('thumbnail', self.gf('django.db.models.fields.files.ImageField')(max_length=100)), ('thumbnail_height', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)), ('thumbnail_width', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)), ('active', self.gf('django.db.models.fields.BooleanField')(default=True)), ('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), ('updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)), ('partner', self.gf('django.db.models.fields.related.ForeignKey')(related_name='products', to=orm['affiliates.Partner'])), ('link', self.gf('django.db.models.fields.URLField')(max_length=200)), )) db.send_create_signal(u'affiliates', ['PartnerProduct']) def backwards(self, orm): # Deleting model 'Partner' db.delete_table(u'affiliates_partner') # Deleting model 'PartnerProduct' db.delete_table(u'affiliates_partnerproduct') models = { u'affiliates.affiliate': { 'Meta': {'object_name': 'Affiliate'}, 'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}) }, u'affiliates.partner': { 'Meta': {'object_name': 'Partner'}, 'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'name'", 'overwrite': 'False'}), 'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}), 'thumbnail_height': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'thumbnail_width': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}) }, u'affiliates.partnerproduct': { 'Meta': {'object_name': 'PartnerProduct'}, 'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'link': ('django.db.models.fields.URLField', [], {'max_length': '200'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'partner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'products'", 'to': u"orm['affiliates.Partner']"}), 'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'name'", 'overwrite': 'False'}), 'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}), 'thumbnail_height': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'thumbnail_width': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}) }, u'affiliates.zip': { 'Meta': {'object_name': 'Zip'}, 'affiliate': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['affiliates.Affiliate']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'postal_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10'}) } } complete_apps = ['affiliates']
69.168421
208
0.605692
730
6,571
5.336986
0.126027
0.096509
0.165298
0.23614
0.840092
0.809292
0.781828
0.781828
0.751027
0.727926
0
0.013346
0.178968
6,571
95
209
69.168421
0.708804
0.019936
0
0.55
0
0
0.492929
0.296037
0
0
0
0
0
1
0.025
false
0
0.05
0
0.1125
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
d92502c49523f6311c0a671247d360ba15197b35
90
py
Python
alexia/api/v1/__init__.py
LaudateCorpus1/alexia-1
9c0d3c90c0ffe2237299a561b755b9c17905e354
[ "BSD-3-Clause" ]
8
2015-06-29T20:01:22.000Z
2020-10-19T13:49:38.000Z
alexia/api/v1/__init__.py
LaudateCorpus1/alexia-1
9c0d3c90c0ffe2237299a561b755b9c17905e354
[ "BSD-3-Clause" ]
67
2015-10-05T16:57:14.000Z
2022-03-28T19:57:36.000Z
alexia/api/v1/__init__.py
LaudateCorpus1/alexia-1
9c0d3c90c0ffe2237299a561b755b9c17905e354
[ "BSD-3-Clause" ]
6
2015-10-05T13:54:34.000Z
2021-11-30T05:11:58.000Z
from .config import * # NOQA from .methods import * # NOQA from .views import * # NOQA
22.5
30
0.666667
12
90
5
0.5
0.5
0.466667
0
0
0
0
0
0
0
0
0
0.233333
90
3
31
30
0.869565
0.155556
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
d935a1b2ad4fb4ff2eb8770a0b96a4eb1c76301b
10,500
py
Python
tests/tests_constraints/test_queryExtensions.py
victorgabr/pps
dfe3fae64fd4dedde85204643f9c797c0373f96c
[ "BSD-3-Clause" ]
7
2018-11-18T07:11:05.000Z
2021-05-06T21:53:40.000Z
tests/tests_constraints/test_queryExtensions.py
victorgabr/pps
dfe3fae64fd4dedde85204643f9c797c0373f96c
[ "BSD-3-Clause" ]
9
2019-09-23T16:34:09.000Z
2020-05-26T18:49:43.000Z
tests/tests_constraints/test_queryExtensions.py
victorgabr/pps
dfe3fae64fd4dedde85204643f9c797c0373f96c
[ "BSD-3-Clause" ]
2
2019-04-18T14:34:31.000Z
2019-06-19T19:34:33.000Z
# initializing the objects from pyplanscoring.constraints.query import QueryExtensions from pyplanscoring.core.types import VolumePresentation, DoseValue, DoseUnit def test_get_dose_presentation(): # Dose at % volume Gy rd = QueryExtensions() query = 'D90%[Gy]' mq = rd.read(query) assert rd.get_dose_presentation(mq) == 1 query = 'DC90%[Gy]' mq = rd.read(query) assert rd.get_dose_presentation(mq) == 1 # Dose at % volume cGy query = 'D90%[cGy]' mq = rd.read(query) assert rd.get_dose_presentation(mq) == 1 # Dose at cc volume cGy query = 'D0.1cc[cGy]' mq = rd.read(query) assert rd.get_dose_presentation(mq) == 1 # volume at % dose query = 'V95%[%]' mq = rd.read(query) assert rd.get_dose_presentation(mq) == 0 # volume at cGy dose query = 'V95%[cGy]' mq = rd.read(query) assert rd.get_dose_presentation(mq) == 0 # volume at cGy dose query = 'V20Gy[cGy]' mq = rd.read(query) assert rd.get_dose_presentation(mq) == 1 # mean dose query = 'Mean[cGy]' mq = rd.read(query) assert rd.get_dose_presentation(mq) == 1 # min dose query = 'Min[cGy]' mq = rd.read(query) assert rd.get_dose_presentation(mq) == 1 # max dose query = 'Max[cGy]' mq = rd.read(query) assert rd.get_dose_presentation(mq) == 1 # CI query = 'CI47Gy[]' mq = rd.read(query) assert rd.get_dose_presentation(mq) == 1 query = 'CI47.5Gy[]' mq = rd.read(query) assert rd.get_dose_presentation(mq) == 1 query = 'HI47.5Gy[]' mq = rd.read(query) assert rd.get_dose_presentation(mq) == 1 query = "GI47.5Gy[]" mq = rd.read(query) assert rd.get_dose_presentation(mq) == 1 def test_get_dose_unit(): # Dose at % volume Gy rd = QueryExtensions() query = 'D90%[Gy]' mq = rd.read(query) un = rd.get_dose_unit(mq) assert un.symbol == 'Gy' # Dose at % volume cGy query = 'D90%[cGy]' mq = rd.read(query) un = rd.get_dose_unit(mq) assert un.symbol == 'cGy' # Dose at cc volume cGy query = 'D0.1cc[cGy]' mq = rd.read(query) un = rd.get_dose_unit(mq) assert un.symbol == 'cGy' # volume at % dose query = 'V95%[%]' mq = rd.read(query) un = rd.get_dose_unit(mq) assert un.symbol == '%' # volume at cGy dose query = 'V95%[cc]' mq = rd.read(query) un = rd.get_dose_unit(mq) assert un.symbol == '%' # volume at cGy dose query = 'V20Gy[%]' mq = rd.read(query) un = rd.get_dose_unit(mq) assert un.symbol == 'Gy' # mean dose query = 'Mean[cGy]' mq = rd.read(query) un = rd.get_dose_unit(mq) assert un.symbol == 'cGy' # min dose query = 'Min[Gy]' mq = rd.read(query) un = rd.get_dose_unit(mq) assert un.symbol == 'Gy' # max dose query = 'Max[cGy]' mq = rd.read(query) un = rd.get_dose_unit(mq) assert un.symbol == 'cGy' # CI query = 'CI47Gy[]' mq = rd.read(query) un = rd.get_dose_unit(mq) assert un.symbol == 'Gy' # HI query = 'HI47Gy[]' mq = rd.read(query) un = rd.get_dose_unit(mq) assert un.symbol == 'Gy' # HI query = 'GI47Gy[]' mq = rd.read(query) un = rd.get_dose_unit(mq) assert un.symbol == 'Gy' def test_get_volume_presentation(): rd = QueryExtensions() query = 'D90%[Gy]' mq = rd.read(query) un = rd.get_volume_presentation(mq) assert un.symbol == '%' query = 'D90cc[Gy]' mq = rd.read(query) un = rd.get_volume_presentation(mq) assert un.symbol == 'cc' query = 'Min[Gy]' mq = rd.read(query) un = rd.get_volume_presentation(mq) assert un.symbol == 'dimensionless' query = 'V95%[cc]' mq = rd.read(query) un = rd.get_volume_presentation(mq) assert un.symbol == 'cc' query = 'V95%[cc]' mq = rd.read(query) un = rd.get_volume_presentation(mq) assert un.symbol == 'cc' query = 'V95%[%]' mq = rd.read(query) un = rd.get_volume_presentation(mq) assert un.symbol == '%' query = 'CI47Gy[]' mq = rd.read(query) un = rd.get_volume_presentation(mq) assert un.symbol == 'dimensionless' query = 'HI47Gy[]' mq = rd.read(query) un = rd.get_volume_presentation(mq) assert un.symbol == 'dimensionless' query = 'GI47Gy[]' mq = rd.read(query) un = rd.get_volume_presentation(mq) assert un.symbol == 'dimensionless' query = 'CV47Gy[cc]' mq = rd.read(query) un = rd.get_volume_presentation(mq) assert un.symbol == 'cc' query = 'CV47Gy[%]' mq = rd.read(query) un = rd.get_volume_presentation(mq) assert un.symbol == '%' def test_query_dose(test_case, dvh): rd = QueryExtensions() query_str = 'D95%[cGy]' # read query into que object rd.read(query_str) # execute the static method md = rd.query_dose(dvh, rd) test_case.assertAlmostEqual(md.value, 6103.854532025905) query_str = 'D95%[Gy]' # read query into que object rd.read(query_str) # execute the static method md = rd.query_dose(dvh, rd) test_case.assertAlmostEqual(md.value, 61.03854532025905) query_str = 'D100%[Gy]' # read query into que object rd.read(query_str) # execute the static method md = rd.query_dose(dvh, rd) test_case.assertAlmostEqual(md.value, 34.5701388876389) # absolute volume D100% query_str = 'D689.501173445633cc[Gy]' rd.read(query_str) # execute the static method md = rd.query_dose(dvh, rd) test_case.assertAlmostEqual(md.value, 34.5701388876389) # Do 95% from absolute values query_str = 'D655.0261147733513cc[Gy]' rd.read(query_str) # execute the static method md = rd.query_dose(dvh, rd) test_case.assertAlmostEqual(md.value, 61.03854532025905) # # TODO query percent doses normalized # query_str = 'D95%[%]' # # read query into que object # rd.read(query_str) # # execute the static method # md = rd.query_dose(dvh, rd) # test_case.assertAlmostEqual(md.value, 6103.854532025905) def test_query_dose_compliment(test_case, dvh): rd = QueryExtensions() query_str = 'DC95%[cGy]' # read query into que object rd.read(query_str) # execute the static method md = rd.query_dose_compliment(dvh, rd) test_case.assertAlmostEqual(md.value, 7401.78624315853) # Do 95% from absolute values query_str = 'DC655.0261147733513cc[cGy]' rd.read(query_str) # execute the static method md = rd.query_dose_compliment(dvh, rd) test_case.assertAlmostEqual(md.value, 7401.78624315853) query_str = 'DC95%[Gy]' # read query into que object rd.read(query_str) # execute the static method md = rd.query_dose_compliment(dvh, rd) test_case.assertAlmostEqual(md.value, 74.0178624315853) query_str = 'DC100%[Gy]' # read query into que object rd.read(query_str) # execute the static method md = rd.query_dose_compliment(dvh, rd) test_case.assertAlmostEqual(md.value, 76.31604166666671) # absolute volume D100% query_str = 'DC689.501173445633cc[Gy]' rd.read(query_str) # execute the static method md = rd.query_dose_compliment(dvh, rd) test_case.assertAlmostEqual(md.value, 76.3160416666667) def test_query_max_dose(test_case, dvh): rd = QueryExtensions() query_str = 'Max[cGy]' # read query into que object rd.read(query_str) md = rd.query_max_dose(dvh, rd) test_case.assertAlmostEqual(md.value, 7631.604166666671) query_str = 'Max[Gy]' # read query into que object rd.read(query_str) md = rd.query_max_dose(dvh, rd) test_case.assertAlmostEqual(md.value, 76.31604166666671) def test_query_min_dose(test_case, dvh): rd = QueryExtensions() query_str = 'Min[cGy]' # read query into que object rd.read(query_str) # execute the static method md = rd.query_min_dose(dvh, rd) test_case.assertAlmostEqual(md.value, 3457.0138887638896) query_str = 'Min[Gy]' # read query into que object rd.read(query_str) md = rd.query_min_dose(dvh, rd) test_case.assertAlmostEqual(md.value, 34.570138887638896) def test_query_mean_dose(test_case, dvh): rd = QueryExtensions() query_str = 'Mean[cGy]' # read query into que object rd.read(query_str) # execute the static method md = rd.query_mean_dose(dvh, rd) test_case.assertAlmostEqual(md.value, 6949.34536891202) query_str = 'Mean[Gy]' # read query into que object rd.read(query_str) md = rd.query_mean_dose(dvh, rd) test_case.assertAlmostEqual(md.value, 69.4934536891202) # Todo test relative dose and volume ? def test_query_volume(test_case, dvh): rd = QueryExtensions() query_str = 'V6103.854532025905cGy[%]' # read query into que object rd.read(query_str) # execute the static method md = rd.query_volume(dvh, rd) test_case.assertAlmostEqual(md, 95.0 * VolumePresentation.relative) query_str = 'V6103.854532025905cGy[cc]' # read query into que object rd.read(query_str) # execute the static method md = rd.query_volume(dvh, rd) test_case.assertAlmostEqual(md, 655.0261147733513 * VolumePresentation.absolute_cm3) # Test Query volumes at relative doses local_dvh = dvh query_str = 'V87.1979218860843%[%]' # read query into que object rd.read(query_str) # execute the static method local_dvh.to_relative_dose(DoseValue(7000, DoseUnit.cGy)) md_pp = rd.query_volume(local_dvh, rd) test_case.assertAlmostEqual(md_pp, 95.0 * VolumePresentation.relative) query_str = 'V87.19792188608436%[cc]' # read query into que object rd.read(query_str) \ # execute the static method md_cc = rd.query_volume(local_dvh, rd) test_case.assertAlmostEqual(md_cc, 655.0261147733513 * VolumePresentation.absolute_cm3) def test_query_compliment_volume(test_case, dvh): rd = QueryExtensions() query_str = 'CV6103.854532025905cGy[%]' # read query into que object rd.read(query_str) # execute the static method md = rd.query_compliment_volume(dvh, rd) test_case.assertAlmostEqual(md, 5 * VolumePresentation.relative) query_str = 'CV6103.854532025905cGy[cc]' # read query into que object rd.read(query_str) # execute the static method md = rd.query_compliment_volume(dvh, rd) test_case.assertAlmostEqual(md, 34.47505867228165 * VolumePresentation.absolute_cm3)
28.767123
91
0.652476
1,474
10,500
4.493894
0.084803
0.107337
0.099638
0.072615
0.855676
0.834239
0.821105
0.804348
0.767512
0.740791
0
0.072396
0.226476
10,500
364
92
28.846154
0.743167
0.159429
0
0.739837
0
0
0.085371
0.027543
0
0
0
0.002747
0.239837
1
0.04065
false
0
0.00813
0
0.04878
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
d954e8cc621c3b4c46c94804f2861adb4e66f2a6
35,023
py
Python
web/handlers_web.py
PCNI/homeless-helper
2216199da2e4b41be6eb5c287d9c1a054fec3945
[ "MIT" ]
3
2015-05-04T14:52:10.000Z
2020-02-22T07:03:20.000Z
web/handlers_web.py
PCNI/homeless-helper
2216199da2e4b41be6eb5c287d9c1a054fec3945
[ "MIT" ]
1
2015-01-12T18:25:03.000Z
2015-01-12T18:53:33.000Z
web/handlers_web.py
PCNI/homeless-helper
2216199da2e4b41be6eb5c287d9c1a054fec3945
[ "MIT" ]
null
null
null
import time import logging from datetime import datetime, date import tornado.web from pprint import pprint import sys from pygeocoder import Geocoder import handlers_base BaseHandler = handlers_base.BaseHandler from HomelessHelper.config import Config from HomelessHelper.database import DbPrimary from HomelessHelper.system import System from HomelessHelper.resource import Resource from HomelessHelper.google_geo import GoogleGeo from HomelessHelper.outreach import Outreach config = Config() class MainHandler(BaseHandler): def get(self): self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'home' kwargs['config'] = config self.write(self.application.loader.load('home.html').generate(**kwargs)) class JobNew(BaseHandler): def get(self): self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'job_new' kwargs['config'] = config kwargs['done'] = self.get_argument('done', None) kwargs['job_id'] = self.get_argument('job_id', None) self.write(self.application.loader.load('job_new.html').generate(**kwargs)) def post(self): title = self.get_argument('title', None) description = self.get_argument('description', '') address = self.get_argument('address', '') city = self.get_argument('city', '') state = self.get_argument('state', '') zipcode = self.get_argument('zipcode', '') phone = self.get_argument('phone', '') url = self.get_argument('url', '') email = self.get_argument('email', '') opp_type = self.get_argument('opp_type', '') resource = Resource(self.db, config) request_result = resource.new_job_post(title, description, address, city, state, zipcode, phone, url, email, opp_type) if request_result['meta']['status'] == 'ERROR': self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'ios_job_new' kwargs['config'] = config kwargs['done'] = 0 kwargs['job_id'] = None self.write(self.application.loader.load('job_new.html').generate(**kwargs)) else: next_url = '/job/new?done=1&job_id=%s' % (request_result['response']['resource_id']) self.redirect(next_url) class IOSJobNew(BaseHandler): def get(self): self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'ios_job_new' kwargs['config'] = config kwargs['done'] = self.get_argument('done', None) kwargs['job_id'] = self.get_argument('job_id', None) self.write(self.application.loader.load('ios_job_new.html').generate(**kwargs)) def post(self): title = self.get_argument('title', None) description = self.get_argument('description', '') address = self.get_argument('address', '') city = self.get_argument('city', '') state = self.get_argument('state', '') zipcode = self.get_argument('zipcode', '') phone = self.get_argument('phone', '') url = self.get_argument('url', '') email = self.get_argument('email', '') opp_type = self.get_argument('opp_type', '') resource = Resource(self.db, config) request_result = resource.new_job_post(title, description, address, city, state, zipcode, phone, url, email, opp_type) if request_result['meta']['status'] == 'ERROR': self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'ios_job_new' kwargs['config'] = config kwargs['done'] = 0 kwargs['job_id'] = None self.write(self.application.loader.load('ios_job_new.html').generate(**kwargs)) else: next_url = '/ios/job/new?done=1&job_id=%s' % (request_result['response']['resource_id']) self.redirect(next_url) class GeoFailed(BaseHandler): def get(self): self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'Geolocation' kwargs['config'] = config self.write(self.application.loader.load('geo_failed.html').generate(**kwargs)) class Developer(BaseHandler): def get(self): self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'developer' kwargs['config'] = config self.write(self.application.loader.load('developer.html').generate(**kwargs)) class DeveloperRegister(BaseHandler): def get(self): self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'developer_register' kwargs['config'] = config kwargs['done'] = self.get_argument('done', None) self.write(self.application.loader.load('developer_register.html').generate(**kwargs)) def post(self): name = self.get_argument('name', None) email = self.get_argument('email', None) resource = Resource(self.db, config) request_result = resource.request_apikey(name, email) self.redirect('/developer/register?done=1') class Terms(BaseHandler): def get(self): self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'terms' kwargs['config'] = config self.write(self.application.loader.load('tos.html').generate(**kwargs)) class Privacy(BaseHandler): def get(self): self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'privacy' kwargs['config'] = config self.write(self.application.loader.load('privacy.html').generate(**kwargs)) class Faq(BaseHandler): def get(self): self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'faq' kwargs['config'] = config self.write(self.application.loader.load('faq.html').generate(**kwargs)) class About(BaseHandler): def get(self): print time.time() self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'about' kwargs['config'] = config self.write(self.application.loader.load('about.html').generate(**kwargs)) class Admin(BaseHandler): def get(self): self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'admin' kwargs['config'] = config kwargs['access_level'] = None ## check for logout logout_value = self.get_argument('logout', None) if logout_value == '1': self.clear_cookie('admin_code') self.redirect('/admin') ## check admin code admin_code = self.get_argument('admin_code', None) if admin_code is None: admin_code = self.get_secure_cookie('admin_code') if admin_code is None: kwargs['admin_status'] = False self.write(self.application.loader.load('admin.html').generate(**kwargs)) return resource = Resource(self.db, config) results = resource.verify_admin(admin_code) if results['meta']['status'] == 'OK': kwargs['admin_status'] = True kwargs['access_level'] = results['access_level'] kwargs['resource_object'] = results['response'] self.set_secure_cookie('admin_code', admin_code) else: kwargs['admin_status'] = False kwargs['access_level'] = False ## bedcount if kwargs['admin_status'] is True: addbed = self.get_argument('addbed', None) delbed = self.get_argument('delbed', None) if addbed == '1': resource = Resource(self.db, config) results2 = resource.add_bed(admin_code) kwargs['resource_object'] = results['response'] if delbed == '1': resource = Resource(self.db, config) results2 = resource.del_bed(admin_code) kwargs['resource_object'] = results['response'] resource = Resource(self.db, config) results = resource.verify_admin(admin_code) kwargs['resource_object'] = results['response'] self.write(self.application.loader.load('admin.html').generate(**kwargs)) class AdminUpdate(BaseHandler): def get(self): self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'admin_update' kwargs['config'] = config kwargs['access_level'] = None ## check for logout logout_value = self.get_argument('logout', None) if logout_value == '1': self.clear_cookie('admin_code') self.redirect('/admin') ## check admin code admin_code = self.get_argument('admin_code', None) if admin_code is None: admin_code = self.get_secure_cookie('admin_code') if admin_code is None: kwargs['admin_status'] = False self.write(self.application.loader.load('admin.html').generate(**kwargs)) return resource = Resource(self.db, config) results = resource.verify_admin(admin_code) if results['meta']['status'] == 'OK': kwargs['admin_status'] = True kwargs['access_level'] = results['access_level'] if results['response']['name_2'] is None: results['response']['name_2'] = '' if results['response']['street_2'] is None: results['response']['street_2'] = '' if results['response']['phone'] is None: results['response']['phone'] = '' if results['response']['url'] is None: results['response']['url'] = '' if results['response']['hours'] is None: results['response']['hours'] = '' if results['response']['notes'] is None: results['response']['notes'] = '' kwargs['resource_object'] = results['response'] self.set_secure_cookie('admin_code', admin_code) else: kwargs['admin_status'] = False kwargs['access_level'] = False self.write(self.application.loader.load('admin_update.html').generate(**kwargs)) def post(self): self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'admin_update' kwargs['config'] = config kwargs['access_level'] = None ## check admin code admin_code = self.get_argument('admin_code', None) if admin_code is None: admin_code = self.get_secure_cookie('admin_code') if admin_code is None: kwargs['admin_status'] = False self.write(self.application.loader.load('admin.html').generate(**kwargs)) return resource = Resource(self.db, config) results = resource.verify_admin(admin_code) if results['meta']['status'] == 'OK': kwargs['admin_status'] = True kwargs['access_level'] = results['access_level'] kwargs['resource_object'] = results['response'] self.set_secure_cookie('admin_code', admin_code) else: kwargs['admin_status'] = False kwargs['access_level'] = results['access_level'] ## resource update if kwargs['admin_status'] is True: input_data = {} input_data['name_1'] = self.get_argument('name_1', None) input_data['name_2'] = self.get_argument('name_2', None) input_data['street_1'] = self.get_argument('street_1', None) input_data['street_2'] = self.get_argument('street_2', None) input_data['city'] = self.get_argument('city', None) input_data['state'] = self.get_argument('state', None) input_data['zipcode'] = self.get_argument('zipcode', None) input_data['phone'] = self.get_argument('phone', None) input_data['url'] = self.get_argument('url', None) input_data['hours'] = self.get_argument('hours', None) input_data['notes'] = self.get_argument('notes', None) input_data['va_status'] = self.get_argument('va_status', None) input_data['contact_name'] = self.get_argument('contact_name', None) input_data['email_address'] = self.get_argument('email_address', None) resource = Resource(self.db, config) results = resource.update_resource(admin_code, input_data) self.redirect('/admin/update') class IOSAdminHandler(BaseHandler): def get(self): self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'ios_admin' kwargs['config'] = config kwargs['access_level'] = None ## check for logout logout_value = self.get_argument('logout', None) if logout_value == '1': self.clear_cookie('admin_code') self.redirect('/ios/admin') ## check admin code admin_code = self.get_argument('admin_code', None) if admin_code is None: admin_code = self.get_secure_cookie('admin_code') if admin_code is None: kwargs['admin_status'] = False self.write(self.application.loader.load('ios_admin.html').generate(**kwargs)) return resource = Resource(self.db, config) results = resource.verify_admin(admin_code) if results['meta']['status'] == 'OK': kwargs['admin_status'] = True kwargs['access_level'] = results['access_level'] kwargs['resource_object'] = results['response'] self.set_secure_cookie('admin_code', admin_code) else: kwargs['admin_status'] = False kwargs['access_level'] = False ## bedcount if kwargs['admin_status'] is True: addbed = self.get_argument('addbed', None) delbed = self.get_argument('delbed', None) if addbed == '1': resource = Resource(self.db, config) results = resource.add_bed(admin_code) kwargs['resource_object'] = results['response'] if delbed == '1': resource = Resource(self.db, config) results = resource.del_bed(admin_code) kwargs['resource_object'] = results['response'] resource = Resource(self.db, config) results = resource.verify_admin(admin_code) kwargs['resource_object'] = results['response'] self.write(self.application.loader.load('ios_admin.html').generate(**kwargs)) class IOSAdminUpdateHandler(BaseHandler): def get(self): self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'ios/admin_update' kwargs['config'] = config kwargs['access_level'] = None ## check for logout logout_value = self.get_argument('logout', None) if logout_value == '1': self.clear_cookie('admin_code') self.redirect('/ios/admin') ## check admin code admin_code = self.get_argument('admin_code', None) if admin_code is None: admin_code = self.get_secure_cookie('admin_code') if admin_code is None: kwargs['admin_status'] = False self.write(self.application.loader.load('ios/admin.html').generate(**kwargs)) return resource = Resource(self.db, config) results = resource.verify_admin(admin_code) if results['meta']['status'] == 'OK': kwargs['admin_status'] = True kwargs['access_level'] = results['access_level'] if results['response']['name_2'] is None: results['response']['name_2'] = '' if results['response']['street_2'] is None: results['response']['street_2'] = '' if results['response']['phone'] is None: results['response']['phone'] = '' if results['response']['url'] is None: results['response']['url'] = '' if results['response']['hours'] is None: results['response']['hours'] = '' if results['response']['notes'] is None: results['response']['notes'] = '' kwargs['resource_object'] = results['response'] self.set_secure_cookie('admin_code', admin_code) else: kwargs['admin_status'] = False kwargs['access_level'] = results['access_level'] self.write(self.application.loader.load('ios_admin_update.html').generate(**kwargs)) def post(self): self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'ios_admin_update' kwargs['config'] = config kwargs['access_level'] = None ## check admin code admin_code = self.get_argument('admin_code', None) if admin_code is None: admin_code = self.get_secure_cookie('admin_code') if admin_code is None: kwargs['admin_status'] = False self.write(self.application.loader.load('ios_admin.html').generate(**kwargs)) return resource = Resource(self.db, config) results = resource.verify_admin(admin_code) if results['meta']['status'] == 'OK': kwargs['admin_status'] = True kwargs['access_level'] = results['access_level'] kwargs['resource_object'] = results['response'] self.set_secure_cookie('admin_code', admin_code) else: kwargs['admin_status'] = False kwargs['access_level'] = results['access_level'] ## resource update if kwargs['admin_status'] is True: input_data = {} input_data['name_1'] = self.get_argument('name_1', None) input_data['name_2'] = self.get_argument('name_2', None) input_data['street_1'] = self.get_argument('street_1', None) input_data['street_2'] = self.get_argument('street_2', None) input_data['city'] = self.get_argument('city', None) input_data['state'] = self.get_argument('state', None) input_data['zipcode'] = self.get_argument('zipcode', None) input_data['phone'] = self.get_argument('phone', None) input_data['url'] = self.get_argument('url', None) input_data['hours'] = self.get_argument('hours', None) input_data['notes'] = self.get_argument('notes', None) input_data['va_status'] = self.get_argument('va_status', None) input_data['contact_name'] = self.get_argument('contact_name', None) input_data['email_address'] = self.get_argument('email_address', None) resource = Resource(self.db, config) results = resource.update_resource(admin_code, input_data) self.redirect('/ios/admin') class ShelterScrape(BaseHandler): def post(self): name_1 = self.get_argument('name_1', '') street_1 = self.get_argument('street_1', '') city = self.get_argument('city', '') state = self.get_argument('state', '') zipcode = self.get_argument('zipcode', '') phone = self.get_argument('phone', '') email = self.get_argument('email', '') url = self.get_argument('url', '') lat = self.get_argument('lat', '') lng = self.get_argument('lng', '') resource = Resource(self.db, config) results = resource.add_shelter_from_scrape(name_1, street_1, city, state, zipcode, phone, email, url, lat, lng) pprint(results) self.write(results) class MedicalScrape(BaseHandler): def post(self): name_1 = self.get_argument('name_1', '') street_1 = self.get_argument('street_1', '') city = self.get_argument('city', '') state = self.get_argument('state', '') zipcode = self.get_argument('zipcode', '') phone = self.get_argument('phone', '') url = self.get_argument('url', '') lat = self.get_argument('lat', '') lng = self.get_argument('lng', '') resource = Resource(self.db, config) results = resource.add_medical_from_scrape(name_1, street_1, city, state, zipcode, phone, url, lat, lng) pprint(results) self.write(results) class FoodScrape(BaseHandler): def post(self): name_1 = self.get_argument('name_1', '') street_1 = self.get_argument('street_1', '') city = self.get_argument('city', '') state = self.get_argument('state', '') zipcode = self.get_argument('zipcode', '') phone = self.get_argument('phone', '') lat = self.get_argument('lat', '') lng = self.get_argument('lng', '') resource = Resource(self.db, config) results = resource.add_food_from_scrape(name_1, street_1, city, state, zipcode, phone, lat, lng) pprint(results) self.write(results) class Resources(BaseHandler): def get(self): self.get_current_user() kwargs = dict(self.application.static_kwargs) kind = self.get_argument('kind') kwargs['page_name'] = kind kwargs['config'] = config query = self.get_argument('query', None) radius = self.get_argument('radius', None) logging.info("entered Resources line 504") if query is None or query == '': lat = self.get_argument('lat', None) lng = self.get_argument('lng', None) if lat is None or lng is None: lat = '40.050137' lng = '-74.221251' if radius is None: radius = '100' else: try: query = self.get_argument('query', None) google = GoogleGeo(self.db, config) query_results = google.address_search(query) query_array = query_results['response'] ## get 1st result lat = query_array[0]['geometry']['location']['lat'] lng = query_array[0]['geometry']['location']['lng'] if radius is None: radius = '100' except: self.write(self.application.loader.load('resources_none.html').generate(**kwargs)) return geo_results = Geocoder.reverse_geocode(float(lat), float(lng)) kwargs['full_address'] = geo_results[0] resource = Resource(self.db, config) results = resource.list(kind, lat, lng, radius) kwargs['resource_objects'] = results['response'] if int(len(results['response'])) == 0: if kind != 'employment': self.write(self.application.loader.load('resources_none.html').generate(**kwargs)) return else: jobs = resource.list('job', lat, lng, radius) kwargs['jobs'] = jobs['response'] job_gig = resource.list('job_gig', lat, lng, radius) kwargs['job_gig'] = job_gig['response'] self.write(self.application.loader.load('resources_job_only.html').generate(**kwargs)) return if kind == 'employment': jobs = resource.list('job', lat, lng, radius) kwargs['jobs'] = jobs['response'] job_gig = resource.list('job_gig', lat, lng, radius) kwargs['job_gig'] = job_gig['response'] self.write(self.application.loader.load('resources.html').generate(**kwargs)) class ResourceProfile(BaseHandler): def get(self): self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'resource_profile' kwargs['config'] = config resource_id = self.get_argument('resource_id', None) try: resource = Resource(self.db, config) results = resource.get(resource_id) kwargs['resource_object'] = results['response'] self.write(self.application.loader.load('resource_profile.html').generate(**kwargs)) except: self.redirect('/') class ShelterRegister(BaseHandler): def get(self): logging.info("entered GET method") self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'shelter_register' kwargs['config'] = config kwargs['done'] = self.get_argument('done', None) kwargs['resource_id'] = self.get_argument('resource_id', None) self.write(self.application.loader.load('shelter_register.html').generate(**kwargs)) def post(self): logging.info("entered POST method") resource_type = self.get_argument('resource_type', None) name = self.get_argument('name', '') email = self.get_argument('email', '') address = self.get_argument('address', '') city = self.get_argument('city', '') state = self.get_argument('state', '') zipcode = self.get_argument('zipcode', '') hours = self.get_argument('hours', '') notes = self.get_argument('notes', '') phone = self.get_argument('phone', '') url = self.get_argument('url', '') va_status = self.get_argument('va_status', '') resource = Resource(self.db, config) logging.info(email) request_result = resource.request_shelter(resource_type, name, email, address, city, state, zipcode, hours, notes, phone, url, va_status) if request_result['meta']['status'] == 'ERROR': self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'shelter_register' kwargs['config'] = config kwargs['done'] = 0 kwargs['resource_id'] = None self.write(self.application.loader.load('shelter_register.html').generate(**kwargs)) else: next_url = '/shelter/register?done=1' self.redirect(next_url) class IOSShelterRegister(BaseHandler): def get(self): self.get_current_user() kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'ios_shelter_register' kwargs['config'] = config kwargs['done'] = self.get_argument('done', None) self.write(self.application.loader.load('ios_shelter_register.html').generate(**kwargs)) def post(self): resource_type = self.get_argument('resource_type', None) name = self.get_argument('name', '') email = self.get_argument('email', '') address = self.get_argument('address', '') city = self.get_argument('city', '') state = self.get_argument('state', '') zipcode = self.get_argument('zipcode', '') hours = self.get_argument('hours', '') notes = self.get_argument('notes', '') phone = self.get_argument('phone', '') url = self.get_argument('url', '') va_status = self.get_argument('va_status', '') resource = Resource(self.db, config) request_result = resource.request_shelter(resource_type, name, email, address, city, state, zipcode, hours, notes, phone, url, va_status) self.redirect('/ios/shelter/register?done=1') ## OUTREACH class OutreachClient(BaseHandler): def get(self): username = self.get_secure_cookie('openhmis1') password = self.get_secure_cookie('openhmis2') kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'outreach' kwargs['config'] = config kwargs['error'] = False outreach = Outreach(self.db, config) person = outreach.client_get(username, password, '6') if person['meta']['status'] != 'OK': kwargs['error'] = True self.write(self.application.loader.load('outreach_client.html').generate(**kwargs)) def post(self): username = self.get_argument('username', '') password = self.get_argument('password', '') kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'outreach' kwargs['config'] = config kwargs['error'] = False outreach = Outreach(self.db, config) person = outreach.client_get(username, password, '6') if person['meta']['status'] != 'OK': kwargs['error'] = True self.set_secure_cookie('openhmis1', username) self.set_secure_cookie('openhmis2', password) self.write(self.application.loader.load('outreach_client.html').generate(**kwargs)) class OutreachClientGet(BaseHandler): def get(self): username = self.get_secure_cookie('openhmis1') password = self.get_secure_cookie('openhmis2') kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'mobile_outreach_client_get' kwargs['config'] = config kwargs['client_id'] = self.get_argument('client_id', '') kwargs['error'] = '' if kwargs['client_id'] == '': client_id = '6' else: client_id = kwargs['client_id'] outreach = Outreach(self.db, config) person = outreach.client_get(username, password, client_id) kwargs['error'] = person['meta']['status'] try: kwargs['dob'] = person['response']['dob'] except: kwargs['dob'] = '' try: kwargs['ethnicity'] = person['response']['ethnicity'] except: kwargs['ethnicity'] = '' try: kwargs['gender'] = person['response']['gender'] except: kwargs['gender'] = '' try: kwargs['name_first'] = person['response']['name_first'] except: kwargs['name_first'] = '' try: kwargs['name_last'] = person['response']['name_last'] except: kwargs['name_last'] = '' try: kwargs['name_middle'] = person['response']['name_middle'] except: kwargs['name_middle'] = '' try: kwargs['race'] = person['response']['race'] except: kwargs['race'] = '' if person['meta']['status'] != 'OK': kwargs['error'] = True if person['meta']['status'] != 'OK': kwargs['error'] = True self.write(self.application.loader.load('outreach_client_get.html').generate(**kwargs)) def post(self): kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'mobile_outreach_client_get' kwargs['config'] = config kwargs['client_id'] = '' kwargs['error'] = '' username = self.get_secure_cookie('openhmis1') password = self.get_secure_cookie('openhmis2') client_id = self.get_argument('client_id', None) outreach = Outreach(self.db, config) kwargs['client_id'] = client_id person = outreach.client_get(username, password, client_id) if person['meta']['status'] != 'OK': kwargs['error'] = True self.write(self.application.loader.load('outreach_client_get.html').generate(**kwargs)) kwargs['client_id'] = client_id kwargs['error'] = person['meta']['status'] try: kwargs['dob'] = person['response']['dob'] except: kwargs['dob'] = '' try: kwargs['ethnicity'] = person['response']['ethnicity'] except: kwargs['ethnicity'] = '' try: kwargs['gender'] = person['response']['gender'] except: kwargs['gender'] = '' try: kwargs['name_first'] = person['response']['name_first'] except: kwargs['name_first'] = '' try: kwargs['name_last'] = person['response']['name_last'] except: kwargs['name_last'] = '' try: kwargs['name_middle'] = person['response']['name_middle'] except: kwargs['name_middle'] = '' try: kwargs['race'] = person['response']['race'] except: kwargs['race'] = '' self.write(self.application.loader.load('outreach_client_get.html').generate(**kwargs)) class OutreachClientAdd(BaseHandler): def get(self): username = self.get_secure_cookie('openhmis1') password = self.get_secure_cookie('openhmis2') kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'mobile_outreach_client_add' kwargs['config'] = config kwargs['client_id'] = self.get_argument('client_id', None) kwargs['error'] = '' client_id = '6' outreach = Outreach(self.db, config) person = outreach.client_get(username, password, client_id) if person['meta']['status'] != 'OK': kwargs['error'] = True self.write(self.application.loader.load('outreach_client_add.html').generate(**kwargs)) def post(self): username = self.get_secure_cookie('openhmis1') password = self.get_secure_cookie('openhmis2') kwargs = dict(self.application.static_kwargs) kwargs['page_name'] = 'mobile_outreach_client_add' kwargs['config'] = config kwargs['error'] = '' data = {} data['name_first'] = self.get_argument('name_first', None) data['name_middle'] = self.get_argument('name_middle', None) data['name_last'] = self.get_argument('name_last', None) data['dob'] = self.get_argument('dob', None) data['ethnicity'] = self.get_argument('ethnicity', None) data['gender'] = self.get_argument('gender', None) outreach = Outreach(self.db, config) response = outreach.client_add(username, password, data) if response['meta']['status'] != 'OK': kwargs['error'] = True print 'ERROR' pprint(response) self.redirect('/outreach/client/add') return url = '/outreach/client/get?client_id=%s' % (response['response']) self.redirect(url)
39.48478
145
0.585872
3,814
35,023
5.187205
0.05139
0.063688
0.106905
0.043672
0.872725
0.85928
0.848312
0.840831
0.822685
0.794531
0
0.004169
0.27402
35,023
886
146
39.529345
0.773932
0.007852
0
0.803051
0
0
0.155927
0.014979
0
0
0
0
0
0
null
null
0.018031
0.019417
null
null
0.009709
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
8
d98eae40852829262c6439e5d30a2febcb06352b
188
py
Python
python/ql/test/experimental/dataflow/ApiGraphs-py2/test.py
timoles/codeql
2d24387e9e300bf03be35694816b1e76ae88a50c
[ "MIT" ]
4,036
2020-04-29T00:09:57.000Z
2022-03-31T14:16:38.000Z
python/ql/test/experimental/dataflow/ApiGraphs-py2/test.py
timoles/codeql
2d24387e9e300bf03be35694816b1e76ae88a50c
[ "MIT" ]
2,970
2020-04-28T17:24:18.000Z
2022-03-31T22:40:46.000Z
python/ql/test/experimental/dataflow/ApiGraphs-py2/test.py
ScriptBox99/github-codeql
2ecf0d3264db8fb4904b2056964da469372a235c
[ "MIT" ]
794
2020-04-29T00:28:25.000Z
2022-03-30T08:21:46.000Z
def python2_style(): from __builtin__ import open #$ use=moduleImport("builtins").getMember("open") open("hello.txt") #$ use=moduleImport("builtins").getMember("open").getReturn()
47
83
0.712766
21
188
6.142857
0.666667
0.232558
0.356589
0.496124
0.55814
0
0
0
0
0
0
0.005917
0.101064
188
3
84
62.666667
0.757396
0.574468
0
0
0
0
0.115385
0
0
0
0
0
0
1
0.333333
true
0
0.333333
0
0.666667
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
9