hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
797d6da18d9ee6e4e226eb39ec9be670c5c92a63
| 46
|
py
|
Python
|
indra_network_search/autocomplete/__init__.py
|
kkaris/indra_network_service
|
4209f1c3ea010fd543bb2fc73905e9146e9a78fe
|
[
"BSD-2-Clause"
] | null | null | null |
indra_network_search/autocomplete/__init__.py
|
kkaris/indra_network_service
|
4209f1c3ea010fd543bb2fc73905e9146e9a78fe
|
[
"BSD-2-Clause"
] | 13
|
2021-08-17T13:43:54.000Z
|
2022-03-06T02:05:26.000Z
|
indra_network_search/autocomplete/__init__.py
|
kkaris/indra_network_service
|
4209f1c3ea010fd543bb2fc73905e9146e9a78fe
|
[
"BSD-2-Clause"
] | null | null | null |
from .autocomplete import NodesTrie, Prefixes
| 23
| 45
| 0.847826
| 5
| 46
| 7.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108696
| 46
| 1
| 46
| 46
| 0.95122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8df9aaa81e271ab04f80377e0c39fdda66af9100
| 18,963
|
py
|
Python
|
EE_API_automation/pytest/src/test_planner.py
|
sunilk747/fabric8-test
|
e394343938cf9eec7ca995a0b3174da3a0923b9b
|
[
"Apache-2.0"
] | null | null | null |
EE_API_automation/pytest/src/test_planner.py
|
sunilk747/fabric8-test
|
e394343938cf9eec7ca995a0b3174da3a0923b9b
|
[
"Apache-2.0"
] | null | null | null |
EE_API_automation/pytest/src/test_planner.py
|
sunilk747/fabric8-test
|
e394343938cf9eec7ca995a0b3174da3a0923b9b
|
[
"Apache-2.0"
] | null | null | null |
import pytest
import requests as req
from support.constants import request_detail, launch_detail, dynamic_vars, workitem_constants
import support.helpers as helpers
class TestClass_Setup(object):
def test_setup(self, sut, offline_token, userid):
print "\n\nTest Setup Start....\n"
if sut is None:
launch_detail.base_url[launch_detail.base_wit] = r"https://api.openshift.io"
print "SUT (WIT Target) not provided!!! Using default production SUT = ", launch_detail.base_url[launch_detail.base_wit]
else:
launch_detail.base_url[launch_detail.base_wit] = sut
print "SUT set to = ", sut
if userid is None:
launch_detail.userid_primary = launch_detail.userid_prod_primary_default
print "USERID not provided! Going ahead with the default USERID = ", launch_detail.userid_prod_primary_default
else:
launch_detail.userid_primary = userid
print "USERID set to = ", launch_detail.userid_primary
if offline_token is None:
pytest.exit("OFFLINE_TOKEN not provided!!! Terminating the run!!!!!!!!!!!")
else:
launch_detail.offref_token_userid_primary = offline_token
launch_detail.token_userid_primary = launch_detail.get_access_token_from_refresh()
print "OFFLINE_TOKEN set to = A secret in Jenkins ;)"
#### Define Request Header, that includes Access Token
request_detail.headers_default = {request_detail.content_type_key_default:request_detail.content_type_default, request_detail.authorization_key_default:request_detail.authorization_carrier_default+launch_detail.token_userid_primary}
print "\nTest Setup Complete....\n"
print "+++++++++++++++++ Running API Tests ++++++++++++++++\n"
class TestClass_CreateSpace(object):
def test_get_user_details(self):
#Design the URL
api = "api/users?filter[username]=" + launch_detail.userid_primary
url = launch_detail.create_url(api)
##Make the request
r = req.get(url, headers=request_detail.headers_default)
##Analyze the response
global loggedin_user_id, loggedin_user_name
loggedin_user_id = helpers.extract_value("data[0].id", r)
loggedin_user_name = helpers.extract_value("data[0].attributes.username", r)
content_type_header = helpers.extract_header("Content-Type", r)
##Save and retain dynamic data for later use
dynamic_vars.username = loggedin_user_name
dynamic_vars.userfullname = helpers.extract_value("data[0].attributes.fullName", r)
dynamic_vars.userid = loggedin_user_id
##Validate the response
assert r.status_code == 200
assert content_type_header == request_detail.content_type_default
def test_create_new_space(self):
#Design the URL
api = "api/spaces"
url = launch_detail.create_url(api)
space_name = helpers.create_space_name()
f = helpers.read_post_data_file('create_space.json', replace={'$space_name_var':space_name, '$loggedin_user_id':dynamic_vars.userid})
##Make the request
r = req.post(url, headers=request_detail.headers_default, json=f)
##Analyze the response
global spaceid, spacename
spaceid = helpers.extract_value("data.id", r)
spacename = helpers.extract_value("data.attributes.name", r)
spacelink = helpers.extract_value("data.links.self", r)
content_type_header = helpers.extract_header("Content-Type", r)
##Save and retain dynamic data for later use
dynamic_vars.spaceid = spaceid
dynamic_vars.spacename = spacename
dynamic_vars.spacelink = spacelink
##Validate the response
assert r.status_code == 201
assert content_type_header == request_detail.content_type_default
assert helpers.extract_value("data.type", r) == 'spaces'
#
def test_get_space_details(self):
#Design the URL
api = "api/spaces/" + spaceid
url = launch_detail.create_url(api)
##Make the request
r = req.get(url, headers=request_detail.headers_default)
##Validate the response
assert r.status_code == 200
assert helpers.extract_header("Content-Type", r) == request_detail.content_type_default
assert helpers.extract_value("data.type", r) == "spaces"
assert helpers.extract_value("data.attributes.name", r) == spacename
def test_enable_experimental_features(self):
#Design the URL
api = "api/users"
url = launch_detail.create_url(api)
f = helpers.read_post_data_file('enable_experimental.json', replace={'$loggedin_user_id':dynamic_vars.userid})
##Make the request
r = req.patch(url, headers=request_detail.headers_default, json=f)
##Validate the response
assert r.status_code == 200
class TestClass_CreateAreas(object):
def test_get_parent_area(self):
#Design the URL
api = "api/spaces/" + spaceid + "/areas"
url = launch_detail.create_url(api)
##Make the request
r = req.get(url, headers=request_detail.headers_default)
##Analyze the response
dynamic_vars.parent_area_id = helpers.extract_value("data[0].id", r)
dynamic_vars.parent_area_name = helpers.extract_value("data[0].attributes.name", r)
##Validate the response
r.raise_for_status()
@pytest.mark.parametrize("area_name", helpers.generate_entity_names("Area", 5))
def test_create_child_areas(self, area_name):
#Design the URL
api = "api/areas/" + dynamic_vars.parent_area_id
url = launch_detail.create_url(api)
f = helpers.read_post_data_file('create_child_area.json', replace={'$area_name_generated':area_name})
##Make the request
r = req.post(url, headers=request_detail.headers_default, json=f)
##Analyze the response
dynamic_vars.area_names_to_ids[area_name] = helpers.extract_value("data.id", r)
##Validate the response
assert r.status_code == 201
class TestClass_CreateIterations(object):
def test_get_root_iter(self):
#Design the URL
api = "api/spaces/" + spaceid + "/iterations"
url = launch_detail.create_url(api)
##Make the request
r = req.get(url, headers=request_detail.headers_default)
##Analyze the response
dynamic_vars.parent_iteration_id = helpers.extract_value("data[0].id", r)
dynamic_vars.parent_iteration_name = helpers.extract_value("data[0].attributes.name", r)
##Validate the response
r.raise_for_status()
@pytest.mark.parametrize("iter_name", helpers.generate_entity_names("Iteration", 5, True, reset_counter = True))
def test_create_child_iters(self, iter_name):
#Design the URL
api = "api/iterations/" + dynamic_vars.parent_iteration_id
url = launch_detail.create_url(api)
f = helpers.read_post_data_file('create_child_iteration.json', replace={'$iteration_name_generated': iter_name, '$spaceid': spaceid})
##Make the request
r = req.post(url, headers=request_detail.headers_default, json=f)
##Analyze the response
dynamic_vars.iteration_names_to_ids[iter_name] = helpers.extract_value("data.id", r)
##Validate the response
assert r.status_code == 201
@pytest.mark.parametrize("iter_name", helpers.generate_entity_names("Iteration1", 5, True, reset_counter = True))
def test_create_nested_iters(self, iter_name):
#Design the URL
api = "api/iterations/" + dynamic_vars.iteration_names_to_ids[workitem_constants.iteration_1]
url = launch_detail.create_url(api)
f = helpers.read_post_data_file('create_child_iteration.json', replace={'$iteration_name_generated': iter_name, '$spaceid': spaceid})
##Make the request
r = req.post(url, headers=request_detail.headers_default, json=f)
##Analyze the response
dynamic_vars.nested_iters_names_to_ids[iter_name] = helpers.extract_value("data.id", r)
##Validate the response
assert r.status_code == 201
#### Workitem related tests follows::::::::
class TestClass_CreateWorkitems(object):
##Create workitems in Iteration-1
@pytest.mark.parametrize("wi_name", helpers.generate_entity_names("Workitem_Title", 2, reset_counter = True))
def test_create_iter1_workitems(self, wi_name):
r = helpers.create_workitem(title=wi_name, spaceid=dynamic_vars.spaceid, witype=workitem_constants.witypetask, iterationid=dynamic_vars.nested_iters_names_to_ids[workitem_constants.iteration1_1])
##Validate the response
assert r.status_code == 201
##Create workitems in Iteration-2
@pytest.mark.parametrize("wi_name", helpers.generate_entity_names("Workitem_Title", 2))
def test_create_iter2_workitems(self, wi_name):
r = helpers.create_workitem(title=wi_name, spaceid=dynamic_vars.spaceid, witype=workitem_constants.witypetask, iterationid=dynamic_vars.iteration_names_to_ids[workitem_constants.iteration_2])
##Validate the response
assert r.status_code == 201
@pytest.mark.parametrize("wi_name", helpers.generate_entity_names("Workitem_Title", 2))
def test_create_backlog_scenarios(self, wi_name):
r = helpers.create_workitem(title=wi_name, spaceid=dynamic_vars.spaceid, witype=workitem_constants.witypescenario)
## Add a couple of comments to the workitem
helpers.add_workitem_comment(dynamic_vars.wi_names_to_links[wi_name], workitem_constants.comment_1_text)
helpers.add_workitem_comment(dynamic_vars.wi_names_to_links[wi_name], workitem_constants.comment_2_text)
## Add a label to the workitem. If label doen't exist, add one
try:
unused = dynamic_vars.labels_names_to_ids[workitem_constants.label_1]
except KeyError:
r, dynamic_vars.labels_names_to_ids[workitem_constants.label_1] = helpers.add_workitem_label(workitem_link=dynamic_vars.wi_names_to_links[wi_name], label_text=workitem_constants.label_1, label_id=None)
r.raise_for_status()
@pytest.mark.parametrize("wi_name", helpers.generate_entity_names("Workitem_Title", 2))
def test_create_backlog_fundamentals(self, wi_name):
r = helpers.create_workitem(title=wi_name, spaceid=dynamic_vars.spaceid, witype=workitem_constants.witypefundamental)
## Add a couple of comments to the workitem
helpers.add_workitem_comment(dynamic_vars.wi_names_to_links[wi_name], workitem_constants.comment_1_text)
helpers.add_workitem_comment(dynamic_vars.wi_names_to_links[wi_name], workitem_constants.comment_2_text)
## Add a label to the workitem
try:
unused = dynamic_vars.labels_names_to_ids[workitem_constants.label_2]
except KeyError:
r, dynamic_vars.labels_names_to_ids[workitem_constants.label_2] = helpers.add_workitem_label(workitem_link=dynamic_vars.wi_names_to_links[wi_name], label_text=workitem_constants.label_2, label_id=None)
r.raise_for_status()
@pytest.mark.parametrize("wi_name", helpers.generate_entity_names("Workitem_Title", 2))
def test_create_backlog_papercuts(self, wi_name):
r = helpers.create_workitem(title=wi_name, spaceid=dynamic_vars.spaceid, witype=workitem_constants.witypepapercut)
## Add a couple of comments to the workitem
helpers.add_workitem_comment(dynamic_vars.wi_names_to_links[wi_name], workitem_constants.comment_1_text)
helpers.add_workitem_comment(dynamic_vars.wi_names_to_links[wi_name], workitem_constants.comment_2_text)
## Add a label to the workitem
try:
unused = dynamic_vars.labels_names_to_ids[workitem_constants.label_3]
except KeyError:
r, dynamic_vars.labels_names_to_ids[workitem_constants.label_3] = helpers.add_workitem_label(workitem_link=dynamic_vars.wi_names_to_links[wi_name], label_text=workitem_constants.label_3, label_id=None)
r.raise_for_status()
@pytest.mark.parametrize("wi_name", helpers.generate_entity_names("Workitem_Title", 2))
def test_create_backlog_valueprops(self, wi_name):
r = helpers.create_workitem(title=wi_name, spaceid=dynamic_vars.spaceid, witype=workitem_constants.witypevalue)
## Add a couple of comments to the workitem
helpers.add_workitem_comment(dynamic_vars.wi_names_to_links[wi_name], workitem_constants.comment_1_text)
helpers.add_workitem_comment(dynamic_vars.wi_names_to_links[wi_name], workitem_constants.comment_2_text)
## Add a label to the workitem
try:
unused = dynamic_vars.labels_names_to_ids[workitem_constants.label_4]
except KeyError:
r, dynamic_vars.labels_names_to_ids[workitem_constants.label_4] = helpers.add_workitem_label(workitem_link=dynamic_vars.wi_names_to_links[wi_name], label_text=workitem_constants.label_4, label_id=None)
r.raise_for_status()
@pytest.mark.parametrize("wi_name", helpers.generate_entity_names("Workitem_Title", 2))
def test_create_backlog_experiences(self, wi_name):
r = helpers.create_workitem(title=wi_name, spaceid=dynamic_vars.spaceid, witype=workitem_constants.witypeexperience)
## Add a couple of comments to the workitem
helpers.add_workitem_comment(dynamic_vars.wi_names_to_links[wi_name], workitem_constants.comment_1_text)
helpers.add_workitem_comment(dynamic_vars.wi_names_to_links[wi_name], workitem_constants.comment_2_text)
## Add a label to the workitem
try:
unused = dynamic_vars.labels_names_to_ids[workitem_constants.label_5]
except KeyError:
r, dynamic_vars.labels_names_to_ids[workitem_constants.label_5] = helpers.add_workitem_label(workitem_link=dynamic_vars.wi_names_to_links[wi_name], label_text=workitem_constants.label_5, label_id=None)
r.raise_for_status()
@pytest.mark.parametrize("wi_name", helpers.generate_entity_names("Workitem_Title", 2))
def test_create_backlog_bugs(self, wi_name):
r = helpers.create_workitem(title=wi_name, spaceid=dynamic_vars.spaceid, witype=workitem_constants.witypebug)
## Add a couple of comments to the workitem
helpers.add_workitem_comment(dynamic_vars.wi_names_to_links[wi_name], workitem_constants.comment_1_text)
helpers.add_workitem_comment(dynamic_vars.wi_names_to_links[wi_name], workitem_constants.comment_2_text)
## Add a few labels to the workitem
helpers.add_workitem_label(workitem_link=dynamic_vars.wi_names_to_links[wi_name], label_id=[dynamic_vars.labels_names_to_ids[workitem_constants.label_1], dynamic_vars.labels_names_to_ids[workitem_constants.label_2], dynamic_vars.labels_names_to_ids[workitem_constants.label_3]])
@pytest.mark.parametrize("wi_name", helpers.generate_entity_names("Workitem_Title", 2))
def test_create_backlog_features(self, wi_name):
r = helpers.create_workitem(title=wi_name, spaceid=dynamic_vars.spaceid, witype=workitem_constants.witypefeature)
## Add a couple of comments to the workitem
helpers.add_workitem_comment(dynamic_vars.wi_names_to_links[wi_name], workitem_constants.comment_1_text)
helpers.add_workitem_comment(dynamic_vars.wi_names_to_links[wi_name], workitem_constants.comment_2_text)
### Add a few labels to the workitem
helpers.add_workitem_label(workitem_link=dynamic_vars.wi_names_to_links[wi_name], label_id=[dynamic_vars.labels_names_to_ids[workitem_constants.label_1], dynamic_vars.labels_names_to_ids[workitem_constants.label_2], dynamic_vars.labels_names_to_ids[workitem_constants.label_3]])
@pytest.mark.parametrize("wi_name", helpers.generate_entity_names("Workitem_Title", 2))
def test_create_backlog_tasks(self, wi_name):
r = helpers.create_workitem(title=wi_name, spaceid=dynamic_vars.spaceid, witype=workitem_constants.witypetask)
## Add a couple of comments to the workitem
helpers.add_workitem_comment(dynamic_vars.wi_names_to_links[wi_name], workitem_constants.comment_1_text)
helpers.add_workitem_comment(dynamic_vars.wi_names_to_links[wi_name], workitem_constants.comment_2_text)
## Add a few labels to the workitem
helpers.add_workitem_label(workitem_link=dynamic_vars.wi_names_to_links[wi_name], label_id=[dynamic_vars.labels_names_to_ids[workitem_constants.label_1], dynamic_vars.labels_names_to_ids[workitem_constants.label_2], dynamic_vars.labels_names_to_ids[workitem_constants.label_3]])
def test_create_wi5_wi11_link(self):
r = helpers.add_workitem_parent_link("Workitem_Title_5", "Workitem_Title_11")
##Validate the response
assert r.status_code == 201
def test_create_wi11_wi17_link(self):
r = helpers.add_workitem_parent_link("Workitem_Title_11", "Workitem_Title_17")
##Validate the response
assert r.status_code == 201
def test_create_wi17_wi19_link(self):
r = helpers.add_workitem_parent_link("Workitem_Title_17", "Workitem_Title_19")
##Validate the response
assert r.status_code == 201
def test_create_wi7_wi13_link(self):
r = helpers.add_workitem_parent_link("Workitem_Title_7", "Workitem_Title_13")
##Validate the response
assert r.status_code == 201
def test_create_wi13_wi15_link(self):
r = helpers.add_workitem_parent_link("Workitem_Title_13", "Workitem_Title_15")
##Validate the response
assert r.status_code == 201
class TestClass_Teardown(object):
def test_teardown(self):
import os, json
launch_detail.launch_details_dict["space_name"] = dynamic_vars.spacename
launch_detail.launch_details_dict["user_fullname"] = dynamic_vars.userfullname
launch_detail.launch_details_dict["user_name"] = dynamic_vars.username
launch_detail.launch_details_dict["user_id"] = dynamic_vars.userid
launch_detail.launch_details_dict["token"] = launch_detail.token_userid_primary
launch_detail.launch_details_dict["offline_token"] = launch_detail.offref_token_userid_primary
try:
curr_dir = os.path.dirname(__file__)
filepath = os.path.join(curr_dir, '..' , 'launch_info_dump.json')
with open(filepath, 'w') as f:
json.dump(launch_detail.launch_details_dict, f, sort_keys=True, indent=4)
except:
print "Exception creating launch_info_dump.json"
print "\n+++++++++++++++++ API Tests Complete ++++++++++++++++\n"
| 59.074766
| 286
| 0.717924
| 2,515
| 18,963
| 5.048907
| 0.093837
| 0.06757
| 0.041109
| 0.034021
| 0.808553
| 0.783037
| 0.749173
| 0.727752
| 0.682942
| 0.65782
| 0
| 0.009626
| 0.189211
| 18,963
| 321
| 287
| 59.074766
| 0.81626
| 0.083267
| 0
| 0.387387
| 0
| 0
| 0.096228
| 0.018481
| 0
| 0
| 0
| 0
| 0.09009
| 0
| null | null | 0
| 0.022523
| null | null | 0.045045
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5c2a183dc6e7d91824efbdf112ff6f22133cf7d0
| 140
|
py
|
Python
|
homeworks/alexei_rakhmanko/test_ex.py
|
tgrx/Z22
|
b2539682ff26c8b6d9f63a7670c8a9c6b614a8ff
|
[
"Apache-2.0"
] | null | null | null |
homeworks/alexei_rakhmanko/test_ex.py
|
tgrx/Z22
|
b2539682ff26c8b6d9f63a7670c8a9c6b614a8ff
|
[
"Apache-2.0"
] | 8
|
2019-11-15T18:15:56.000Z
|
2020-02-03T18:05:05.000Z
|
homeworks/alexei_rakhmanko/test_ex.py
|
tgrx/Z22
|
b2539682ff26c8b6d9f63a7670c8a9c6b614a8ff
|
[
"Apache-2.0"
] | null | null | null |
"""тест"""
from homeworks.alexei_rakhmanko import ex
def test_rev():
"""функция теста"""
assert ex.my_per([1, 2, 3]) == [3, 2, 1]
| 17.5
| 44
| 0.6
| 22
| 140
| 3.681818
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053097
| 0.192857
| 140
| 7
| 45
| 20
| 0.663717
| 0.128571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5c3fa57b0904e9f2445101a7ef69bc7a1523d2d9
| 95
|
py
|
Python
|
petrel/predict/__init__.py
|
DanielMorton/Petrel
|
07aee79087bf071f0906f806258c0a32d9cd44c6
|
[
"Apache-2.0"
] | null | null | null |
petrel/predict/__init__.py
|
DanielMorton/Petrel
|
07aee79087bf071f0906f806258c0a32d9cd44c6
|
[
"Apache-2.0"
] | null | null | null |
petrel/predict/__init__.py
|
DanielMorton/Petrel
|
07aee79087bf071f0906f806258c0a32d9cd44c6
|
[
"Apache-2.0"
] | null | null | null |
from .eval import model_eval
from .predict import prediction, prediction_df, val_prediction_df
| 31.666667
| 65
| 0.852632
| 14
| 95
| 5.5
| 0.571429
| 0.311688
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 95
| 2
| 66
| 47.5
| 0.905882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
30b07202914716b9542ee84a8836cdd59e329f91
| 17,411
|
py
|
Python
|
my_scripts/fits_eg.py
|
ameya30/IMaX_pole_data_scripts
|
815f9b4cf3f7c827901daa8c90bcacaaaead0e66
|
[
"MIT"
] | null | null | null |
my_scripts/fits_eg.py
|
ameya30/IMaX_pole_data_scripts
|
815f9b4cf3f7c827901daa8c90bcacaaaead0e66
|
[
"MIT"
] | null | null | null |
my_scripts/fits_eg.py
|
ameya30/IMaX_pole_data_scripts
|
815f9b4cf3f7c827901daa8c90bcacaaaead0e66
|
[
"MIT"
] | null | null | null |
from astropy.io import fits
>>> imax = fits.open('imax_find_sun_input.fits')[0].data
>>> imax.shape
(4, 5, 936, 936)
>>>
>>> imax_i = imax[0,4,:,:]
>>> imax_i.shape
(936, 936)
>>> from matplotlib import pyplot as plt
>>> plt.imshow(imax_i, cmap='gray')
<matplotlib.image.AxesImage object at 0x7fceca9c7b70>
>>> plt.show()
>>> from scipy.fftpack import fftn
>>> i_fourier = fftn(imax_i)
Traceback (most recent call last):
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/scipy/fftpack/basic.py", line 630, in _raw_fftn_dispatch
work_function = _DTYPE_TO_FFTN[tmp.dtype]
KeyError: dtype('>f4')
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/scipy/fftpack/basic.py", line 623, in fftn
return _raw_fftn_dispatch(x, shape, axes, overwrite_x, 1)
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/scipy/fftpack/basic.py", line 632, in _raw_fftn_dispatch
raise ValueError("type %s is not supported" % tmp.dtype)
ValueError: type >f4 is not supported
>>> imax_i.shape
(936, 936)
>>> imax_i.type
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'numpy.ndarray' object has no attribute 'type'
>>> imax_i.mean()
3484.7219
>>>
>>>
>>> from scipy import fftpack as fft
>>> fft.fftn(imax_i)
Traceback (most recent call last):
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/scipy/fftpack/basic.py", line 630, in _raw_fftn_dispatch
work_function = _DTYPE_TO_FFTN[tmp.dtype]
KeyError: dtype('>f4')
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/scipy/fftpack/basic.py", line 623, in fftn
return _raw_fftn_dispatch(x, shape, axes, overwrite_x, 1)
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/scipy/fftpack/basic.py", line 632, in _raw_fftn_dispatch
raise ValueError("type %s is not supported" % tmp.dtype)
ValueError: type >f4 is not supported
>>> fft.fft2(imax_i)
Traceback (most recent call last):
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/scipy/fftpack/basic.py", line 630, in _raw_fftn_dispatch
work_function = _DTYPE_TO_FFTN[tmp.dtype]
KeyError: dtype('>f4')
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/scipy/fftpack/basic.py", line 675, in fft2
return fftn(x,shape,axes,overwrite_x)
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/scipy/fftpack/basic.py", line 623, in fftn
return _raw_fftn_dispatch(x, shape, axes, overwrite_x, 1)
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/scipy/fftpack/basic.py", line 632, in _raw_fftn_dispatch
raise ValueError("type %s is not supported" % tmp.dtype)
ValueError: type >f4 is not supported
>>> from scipy.fftpack import fftn, ifftn
>>>
>>>
>>>
>>> arr = np.zeros((100,100))
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'np' is not defined
>>> import numpy as np
>>> arr = np.zeros((100,100))
>>>
>>>
>>> arr.shape
(100, 100)
>>>
>>>
>>> fftn(arr)
array([[ 0.+0.j, 0.+0.j, 0.+0.j, ..., 0.+0.j, 0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j, ..., 0.+0.j, 0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j, ..., 0.+0.j, 0.+0.j, 0.+0.j],
...,
[ 0.+0.j, 0.+0.j, 0.+0.j, ..., 0.+0.j, 0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j, ..., 0.+0.j, 0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j, ..., 0.+0.j, 0.+0.j, 0.+0.j]])
>>> imax_i.shape
(936, 936)
>>> dir(imax_i)
['T', '__abs__', '__add__', '__and__', '__array__', '__array_finalize__', '__array_interface__', '__array_prepare__', '__array_priority__', '__array_struct__', '__array_wrap__', '__bool__', '__class__', '__complex__', '__contains__', '__copy__', '__deepcopy__', '__delattr__', '__delitem__', '__dir__', '__divmod__', '__doc__', '__eq__', '__float__', '__floordiv__', '__format__', '__ge__', '__getattribute__', '__getitem__', '__gt__', '__hash__', '__iadd__', '__iand__', '__ifloordiv__', '__ilshift__', '__imatmul__', '__imod__', '__imul__', '__index__', '__init__', '__init_subclass__', '__int__', '__invert__', '__ior__', '__ipow__', '__irshift__', '__isub__', '__iter__', '__itruediv__', '__ixor__', '__le__', '__len__', '__lshift__', '__lt__', '__matmul__', '__mod__', '__mul__', '__ne__', '__neg__', '__new__', '__or__', '__pos__', '__pow__', '__radd__', '__rand__', '__rdivmod__', '__reduce__', '__reduce_ex__', '__repr__', '__rfloordiv__', '__rlshift__', '__rmatmul__', '__rmod__', '__rmul__', '__ror__', '__rpow__', '__rrshift__', '__rshift__', '__rsub__', '__rtruediv__', '__rxor__', '__setattr__', '__setitem__', '__setstate__', '__sizeof__', '__str__', '__sub__', '__subclasshook__', '__truediv__', '__xor__', 'all', 'any', 'argmax', 'argmin', 'argpartition', 'argsort', 'astype', 'base', 'byteswap', 'choose', 'clip', 'compress', 'conj', 'conjugate', 'copy', 'ctypes', 'cumprod', 'cumsum', 'data', 'diagonal', 'dot', 'dtype', 'dump', 'dumps', 'fill', 'flags', 'flat', 'flatten', 'getfield', 'imag', 'item', 'itemset', 'itemsize', 'max', 'mean', 'min', 'nbytes', 'ndim', 'newbyteorder', 'nonzero', 'partition', 'prod', 'ptp', 'put', 'ravel', 'real', 'repeat', 'reshape', 'resize', 'round', 'searchsorted', 'setfield', 'setflags', 'shape', 'size', 'sort', 'squeeze', 'std', 'strides', 'sum', 'swapaxes', 'take', 'tobytes', 'tofile', 'tolist', 'tostring', 'trace', 'transpose', 'var', 'view']
>>> dir(arr)
['T', '__abs__', '__add__', '__and__', '__array__', '__array_finalize__', '__array_interface__', '__array_prepare__', '__array_priority__', '__array_struct__', '__array_wrap__', '__bool__', '__class__', '__complex__', '__contains__', '__copy__', '__deepcopy__', '__delattr__', '__delitem__', '__dir__', '__divmod__', '__doc__', '__eq__', '__float__', '__floordiv__', '__format__', '__ge__', '__getattribute__', '__getitem__', '__gt__', '__hash__', '__iadd__', '__iand__', '__ifloordiv__', '__ilshift__', '__imatmul__', '__imod__', '__imul__', '__index__', '__init__', '__init_subclass__', '__int__', '__invert__', '__ior__', '__ipow__', '__irshift__', '__isub__', '__iter__', '__itruediv__', '__ixor__', '__le__', '__len__', '__lshift__', '__lt__', '__matmul__', '__mod__', '__mul__', '__ne__', '__neg__', '__new__', '__or__', '__pos__', '__pow__', '__radd__', '__rand__', '__rdivmod__', '__reduce__', '__reduce_ex__', '__repr__', '__rfloordiv__', '__rlshift__', '__rmatmul__', '__rmod__', '__rmul__', '__ror__', '__rpow__', '__rrshift__', '__rshift__', '__rsub__', '__rtruediv__', '__rxor__', '__setattr__', '__setitem__', '__setstate__', '__sizeof__', '__str__', '__sub__', '__subclasshook__', '__truediv__', '__xor__', 'all', 'any', 'argmax', 'argmin', 'argpartition', 'argsort', 'astype', 'base', 'byteswap', 'choose', 'clip', 'compress', 'conj', 'conjugate', 'copy', 'ctypes', 'cumprod', 'cumsum', 'data', 'diagonal', 'dot', 'dtype', 'dump', 'dumps', 'fill', 'flags', 'flat', 'flatten', 'getfield', 'imag', 'item', 'itemset', 'itemsize', 'max', 'mean', 'min', 'nbytes', 'ndim', 'newbyteorder', 'nonzero', 'partition', 'prod', 'ptp', 'put', 'ravel', 'real', 'repeat', 'reshape', 'resize', 'round', 'searchsorted', 'setfield', 'setflags', 'shape', 'size', 'sort', 'squeeze', 'std', 'strides', 'sum', 'swapaxes', 'take', 'tobytes', 'tofile', 'tolist', 'tostring', 'trace', 'transpose', 'var', 'view']
>>> imax_i = np.asarray(imax_i)
>>> fftn(imax_i)
Traceback (most recent call last):
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/scipy/fftpack/basic.py", line 630, in _raw_fftn_dispatch
work_function = _DTYPE_TO_FFTN[tmp.dtype]
KeyError: dtype('>f4')
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/scipy/fftpack/basic.py", line 623, in fftn
return _raw_fftn_dispatch(x, shape, axes, overwrite_x, 1)
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/scipy/fftpack/basic.py", line 632, in _raw_fftn_dispatch
raise ValueError("type %s is not supported" % tmp.dtype)
ValueError: type >f4 is not supported
>>> imax_i = imax_i.astype('double')
>>> fftn(imax_i)
array([[ 3.05295120e+09 +0.00000000e+00j,
-4.45528321e+06 -3.97743810e+07j,
-2.30864281e+06 -2.24633673e+07j, ...,
8.97037882e+05 +7.55868541e+06j,
-2.30864281e+06 +2.24633673e+07j,
-4.45528321e+06 +3.97743810e+07j],
[ -2.74862921e+08 +4.59451277e+08j,
4.54499692e+05 -4.50381246e+07j,
1.51455374e+07 -4.92938199e+07j, ...,
3.17040833e+07 -3.94271849e+07j,
4.03006463e+07 -4.20544416e+07j,
5.80681573e+07 -4.04828348e+07j],
[ -3.45686068e+07 +3.41872591e+08j,
-8.42641488e+06 -1.25036175e+07j,
1.38791061e+06 -2.17912880e+07j, ...,
8.15512838e+06 -3.58171974e+07j,
5.38385485e+06 -4.43471742e+07j,
2.13552506e+07 -5.87010622e+07j],
...,
[ 9.24529851e+07 -1.94898063e+08j,
-2.13681331e+07 +4.82335392e+07j,
-1.86307292e+07 +3.30545584e+07j, ...,
-2.89143665e+06 +9.52290443e+06j,
-4.23619958e+06 +1.01733692e+07j,
6.22079190e+05 +3.52366233e+06j],
[ -3.45686068e+07 -3.41872591e+08j,
2.13552506e+07 +5.87010622e+07j,
5.38385485e+06 +4.43471742e+07j, ...,
2.73515014e+05 +2.50096257e+07j,
1.38791061e+06 +2.17912880e+07j,
-8.42641488e+06 +1.25036175e+07j],
[ -2.74862921e+08 -4.59451277e+08j,
5.80681573e+07 +4.04828348e+07j,
4.03006463e+07 +4.20544416e+07j, ...,
1.83588547e+07 +4.34432978e+07j,
1.51455374e+07 +4.92938199e+07j,
4.54499692e+05 +4.50381246e+07j]])
>>> i_fourier = fftn(imax_i)
>>> i_fourier.shape
(936, 936)
>>> plt.imshow(i_fourier.real, cmap='gray')
<matplotlib.image.AxesImage object at 0x7fceca5b66a0>
>>> plt.show()
>>> plt.imshow(i_fourier.real, cmap='gray', vmin=-10, vmax=10)
<matplotlib.image.AxesImage object at 0x7fced668d630>
>>> plt.show()
>>> plt.imshow(i_fourier.real, cmap='gray', vmin=-30, vmax=30)
<matplotlib.image.AxesImage object at 0x7fceca7d69b0>
>>> plt.show()
>>> plt.imshow(i_fourier.real, cmap='gray', vmin=-100, vmax=100)
<matplotlib.image.AxesImage object at 0x7fceca47b3c8>
>>> plt.show()
>>> plt.imshow(i_fourier, cmap='gray', vmin=-100, vmax=100)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/matplotlib/pyplot.py", line 3157, in imshow
**kwargs)
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/matplotlib/__init__.py", line 1898, in inner
return func(ax, *args, **kwargs)
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/matplotlib/axes/_axes.py", line 5124, in imshow
im.set_data(X)
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/matplotlib/image.py", line 596, in set_data
raise TypeError("Image data can not convert to float")
TypeError: Image data can not convert to float
>>> plt.imshow(i_fourier.imag, cmap='gray', vmin=-100, vmax=100)
<matplotlib.image.AxesImage object at 0x7fceca4008d0>
>>> plt.show()
>>>
>>>
>>> plt.imshow(i_fourier.real, cmap='gray', vmin=-100, vmax=100)
<matplotlib.image.AxesImage object at 0x7fceca395c18>
>>> plt.clf()
>>> plt.imshow(i_fourier.real, cmap='gray', vmin=-200, vmax=200)
<matplotlib.image.AxesImage object at 0x7fcec8b17940>
>>> plt.show()
>>> plt.imshow(imax_i, cmap='gray')
<matplotlib.image.AxesImage object at 0x7fcec8459e80>
>>> plt.show()
>>> i_fourier = imax_i[400:800,200:600]
>>> imax_crop = imax_i[400:800,200:600]
>>> plt.imshow(imax_crop, cmap='gray')
<matplotlib.image.AxesImage object at 0x7fcec8364240>
>>> plt.show()
>>> fou_crop = fftn(imax_crop)
>>> plt.imshow(fou_crop.real, cmap='gray', vmin=-100,vmax=100)
<matplotlib.image.AxesImage object at 0x7fcec81ba080>
>>> plt.show()
>>> plt.imshow(fou_crop.real, cmap='gray')
<matplotlib.image.AxesImage object at 0x7fcec80d05f8>
>>> plt.show()
>>> plt.imshow(fou_crop.real, cmap='gray', vmin=-500,vmax=500)
<matplotlib.image.AxesImage object at 0x7fcec8077240>
>>> fou_crop.mean()
(3861.56689453125-4.6566128730773928e-14j)
>>> np.std(fou_crop.real)
1708928.9097907722
>>> np.mean(fou_crop.real)
3861.5668945312491
>>> plt.imshow(fou_crop.real, cmap='gray', vmin=-50000,vmax=50000)
<matplotlib.image.AxesImage object at 0x7fcec8077828>
>>> plt.show()
>>> power = np.abs(fou_crop)
>>> fou_crop.shape
(400, 400)
>>> plt.imshow(power)
<matplotlib.image.AxesImage object at 0x7fcec812ce10>
>>> power.mean()
31121.731802815866
>>> power.std()
1712433.617166939
>>> plt.clf()
>>> plt.imshow(power, cmap='gray', vmin=-100000,vmax=100000)
<matplotlib.image.AxesImage object at 0x7fcec818d7f0>
>>> plt.show()
>>> ymean = fou_crop[:,150:250]
>>> plt.imshow(ymean, cmap='gray', vmin=-100000, vmax=100000)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/matplotlib/pyplot.py", line 3157, in imshow
**kwargs)
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/matplotlib/__init__.py", line 1898, in inner
return func(ax, *args, **kwargs)
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/matplotlib/axes/_axes.py", line 5124, in imshow
im.set_data(X)
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/matplotlib/image.py", line 596, in set_data
raise TypeError("Image data can not convert to float")
TypeError: Image data can not convert to float
>>> plt.imshow(ymean.real, cmap='gray', vmin=-100000, vmax=100000)
<matplotlib.image.AxesImage object at 0x7fceca400860>
>>> plt.show()
>>> ymean.shape
(400, 100)
>>> ym = np.mean(ymean, axis=0)
>>> ym.shape
(100,)
>>> ym = np.mean(ymean, axis=1)
>>> ym.shape
(400,)
>>>
>>>
>>> plt.plot(ym)
/home/prabhu/anaconda3/lib/python3.6/site-packages/numpy/core/numeric.py:531: ComplexWarning: Casting complex values to real discards the imaginary part
return array(a, dtype, copy=False, order=order)
[<matplotlib.lines.Line2D object at 0x7fceca5ae978>]
>>> plt.plot(ym.real)
[<matplotlib.lines.Line2D object at 0x7fceca5ae3c8>]
>>> ym.shape
(400,)
>>> plt.show()
>>> fou_crop.real /= ym.real
>>> xmean = fou_crop[150:250,:]
>>> xm = np.mean(xmean, axis=1)
>>> xm.shape
(100,)
>>> xm = np.mean(xmean, axis=0)
>>> xm.shape
(400,)
>>>
>>>
>>> fou_crop.real /= xm.real
>>> plt.imshow(fou_crop.real, cmap='gray', vmin=-100000, vmax=100000)
<matplotlib.image.AxesImage object at 0x7fced668d198>
>>> plt.show()
>>> fou_crop = fftn(imax_crop)
>>> plt.imshow(fou_crop.real, cmap='gray', vmin=-100000, vmax=100000)
<matplotlib.image.AxesImage object at 0x7fcec2cccc50>
>>> plt.show9)
File "<stdin>", line 1
plt.show9)
^
SyntaxError: invalid syntax
>>> plt.show()
>>> x = fou_crop[200,:]
>>> plt.plot(x.real)
[<matplotlib.lines.Line2D object at 0x7fcec8190780>]
>>> plt.show()
>>> fou2 = fou_crop.real/x.real
>>> fou2.shape
(400, 400)
>>>
>>>
>>> plt.imshow(fou2, cmap='gray', vmin=-100000, vmax=100000)
<matplotlib.image.AxesImage object at 0x7fcec8af18d0>
>>> plt.show()
>>> plt.imshow(fou2, cmap='gray', vmin=-100, vmax=100)
<matplotlib.image.AxesImage object at 0x7fcec837b668>
>>> plt.show()
>>> xmean = fou_crop[150:250,:]
>>> xmean.real = np.mean(xmean.real, axis=0)
>>> xmean.real.shape
(100, 400)
>>> xmean.shape
(100, 400)
>>>
>>>
>>> xm = xmean.real
>>> xm.shape
(100, 400)
>>>
>>>
>>> xm = np.mean(xm, axis=0)
>>> xm.shape
(400,)
>>> plt.plot(xm)
[<matplotlib.lines.Line2D object at 0x7fcec845ef60>]
>>> plt.show()
>>> fou2 = fou_crop.real/xm
>>> plt.imshow(fou2, cmap='gray', vmin=-100, vmax=100)
<matplotlib.image.AxesImage object at 0x7fcec36ed0b8>
>>> plt.show()
>>> xmean = fou_crop[:,:].real
>>> xmean.shape
(400, 400)
>>> xm = np.mean(xmean, axis=0)
>>> xm.shape
(400,)
>>> fou2 = fou_crop.real/xm
>>> plt.imshow(fou2, cmap='gray', vmin=-100, vmax=100)
<matplotlib.image.AxesImage object at 0x7fceca359668>
>>> plt.show()
>>> plt.show()
>>> plt.imshow(fou2, cmap='gray', vmin=-100, vmax=100)
<matplotlib.image.AxesImage object at 0x7fceca59c588>
>>> plt.show()
>>> from scipy.fftpack import ifftn
>>> image = ifftn(fou2)
>>> plt.imshow(image,cmap='gray')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/matplotlib/pyplot.py", line 3157, in imshow
**kwargs)
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/matplotlib/__init__.py", line 1898, in inner
return func(ax, *args, **kwargs)
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/matplotlib/axes/_axes.py", line 5124, in imshow
im.set_data(X)
File "/home/prabhu/anaconda3/lib/python3.6/site-packages/matplotlib/image.py", line 596, in set_data
raise TypeError("Image data can not convert to float")
TypeError: Image data can not convert to float
>>> type(image)
<class 'numpy.ndarray'>
>>> image.shape
(400, 400)
>>> plt.imshow(image.real,cmap='gray')
<matplotlib.image.AxesImage object at 0x7fceca315f60>
>>> plt.show()
| 47.70137
| 1,902
| 0.675378
| 2,393
| 17,411
| 4.550773
| 0.17593
| 0.006612
| 0.009917
| 0.012856
| 0.813958
| 0.764279
| 0.709826
| 0.684757
| 0.652893
| 0.641322
| 0
| 0.112784
| 0.126644
| 17,411
| 364
| 1,903
| 47.832418
| 0.60338
| 0
| 0
| 0.55618
| 0
| 0.070225
| 0.275286
| 0.10568
| 0
| 0
| 0.023319
| 0
| 0
| 0
| null | null | 0
| 0.019663
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
30cdd9a36bf23ba176b41159baab58a6cf134851
| 171
|
bzl
|
Python
|
rules/py/grpc_py_library.bzl
|
heartless-clown/rules_proto
|
99c0d0c7a00c1df7221afc3331b5d859a02c420f
|
[
"Apache-2.0"
] | 249
|
2018-10-24T21:11:08.000Z
|
2022-03-31T03:28:34.000Z
|
rules/py/grpc_py_library.bzl
|
heartless-clown/rules_proto
|
99c0d0c7a00c1df7221afc3331b5d859a02c420f
|
[
"Apache-2.0"
] | 147
|
2018-12-05T18:58:13.000Z
|
2022-03-26T15:41:07.000Z
|
rules/py/grpc_py_library.bzl
|
heartless-clown/rules_proto
|
99c0d0c7a00c1df7221afc3331b5d859a02c420f
|
[
"Apache-2.0"
] | 126
|
2018-11-20T22:34:48.000Z
|
2022-03-18T13:42:05.000Z
|
"grpc_py_library.bzl provides a py_library for grpc files."
load("@rules_python//python:defs.bzl", "py_library")
def grpc_py_library(**kwargs):
py_library(**kwargs)
| 24.428571
| 59
| 0.748538
| 27
| 171
| 4.444444
| 0.518519
| 0.375
| 0.216667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 171
| 6
| 60
| 28.5
| 0.784314
| 0.333333
| 0
| 0
| 0
| 0
| 0.567251
| 0.175439
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ebb75d81ecfd36f9e817bcf55888b8a7ce076ebb
| 34
|
py
|
Python
|
fathomlite/deepq/__init__.py
|
rdadolf/fathom-lite
|
abd56bda25f9b54d3b4a8f2b1a355e5bfd53ce9b
|
[
"Apache-2.0"
] | 2
|
2018-07-14T22:59:32.000Z
|
2018-10-05T16:10:29.000Z
|
fathomlite/deepq/__init__.py
|
rdadolf/fathom-lite
|
abd56bda25f9b54d3b4a8f2b1a355e5bfd53ce9b
|
[
"Apache-2.0"
] | null | null | null |
fathomlite/deepq/__init__.py
|
rdadolf/fathom-lite
|
abd56bda25f9b54d3b4a8f2b1a355e5bfd53ce9b
|
[
"Apache-2.0"
] | null | null | null |
from deepq import DeepQ, DeepQFwd
| 17
| 33
| 0.823529
| 5
| 34
| 5.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147059
| 34
| 1
| 34
| 34
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ebe2756d655c3d68dd7817ffd4762d66fb8fdaab
| 156
|
py
|
Python
|
app/sovolo/context_processors.py
|
Sovol2018/sovolo
|
54250e42b4af3391d2f99690f45b93ab240563c2
|
[
"MIT"
] | 2
|
2017-06-06T11:34:49.000Z
|
2017-10-24T13:09:50.000Z
|
app/sovolo/context_processors.py
|
Sovol2018/sovolo
|
54250e42b4af3391d2f99690f45b93ab240563c2
|
[
"MIT"
] | 346
|
2016-08-09T20:50:57.000Z
|
2018-08-28T06:52:17.000Z
|
app/sovolo/context_processors.py
|
hejob/sovolo
|
8b73253d7bf0427c7ae0ebb6d8e3d70e118e8427
|
[
"MIT"
] | 3
|
2017-11-27T14:07:57.000Z
|
2018-08-13T15:51:01.000Z
|
from django.conf import settings
def google_analytics(request):
prop_id = settings.GOOGLE_ANALYTICS_PROP
return {"GOOGLE_ANALYTICS_PROP": prop_id}
| 26
| 45
| 0.794872
| 21
| 156
| 5.571429
| 0.571429
| 0.384615
| 0.324786
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134615
| 156
| 5
| 46
| 31.2
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0.134615
| 0.134615
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
69021b958f3c8793b3a40b5bca86c7534a0839c0
| 4,243
|
py
|
Python
|
tests/test_simple_email.py
|
hmiladhia/Dmail
|
386350331cc5973f79cd4ce42a9a23cfb3a8b87a
|
[
"MIT"
] | 6
|
2020-11-28T18:00:04.000Z
|
2021-07-07T02:19:48.000Z
|
tests/test_simple_email.py
|
hmiladhia/Dmail
|
386350331cc5973f79cd4ce42a9a23cfb3a8b87a
|
[
"MIT"
] | 1
|
2021-06-23T10:58:45.000Z
|
2021-07-24T22:11:44.000Z
|
tests/test_simple_email.py
|
hmiladhia/Dmail
|
386350331cc5973f79cd4ce42a9a23cfb3a8b87a
|
[
"MIT"
] | 1
|
2021-07-07T02:20:24.000Z
|
2021-07-07T02:20:24.000Z
|
import re
import pytest
from Dmail import SimpleEmail
from configDmanager import import_config
from tests.mock_helper import String
@pytest.fixture(scope='session')
def config():
return import_config('tests.EmailConfig')
@pytest.fixture(scope='session', autouse=True)
def dmail(config):
with SimpleEmail(**config.email) as email:
yield email
def test_email_base_send_text(dmail, config, mocker):
mocked_email = mocker.patch.object(dmail.server, 'sendmail')
message, subject = 'abc', 'subject'
dmail.send(message, config.receiver, subject)
expected_msg_regex = (r'Content-Type: multipart/alternative; boundary="===============\d+=="''\n'
r'MIME-Version: 1\.0' '\n'
f'From: {config.email.sender_email}' '\n'
f'Subject: {subject}' '\n'
f'To: {config.receiver}' '\n' '\n'
r'--===============\d+==' '\n'
'Content-Type: text/plain; charset="us-ascii"' '\n'
r'MIME-Version: 1\.0' '\n'
'Content-Transfer-Encoding: 7bit' '\n' '\n'
f'{message}' '\n'
r'--===============\d+==--' '\n')
mocked_email.assert_called_with(config.email.sender_email, [config.receiver], String(expected_msg_regex, re.S))
def test_email_base_send_html(dmail, config, mocker):
mocked_email = mocker.patch.object(dmail.server, 'sendmail')
message, subject = 'abc', 'subject'
html_msg = f'<strong>{message}</strong>'
dmail.send(html_msg, config.receiver, subject, subtype='html')
expected_msg_regex = (r'Content-Type: multipart/alternative; boundary="===============\d+=="''\n*'
r'MIME-Version: 1\.0' '\n*'
f'From: {config.email.sender_email}' '\n*'
f'Subject: {subject}' '\n*'
f'To: {config.receiver}' '\n*'
r'--===============\d+==' '\n'
'Content-Type: text/plain; charset="us-ascii"' '\n*'
r'MIME-Version: 1\.0' '\n*'
'Content-Transfer-Encoding: 7bit' '\n*'
f'{message}' '\n*'
r'--===============\d+==' '\n'
'Content-Type: text/html; charset="us-ascii"' '\n'
r'MIME-Version: 1\.0' '\n'
'Content-Transfer-Encoding: 7bit' '\n' '\n'
f'.*<strong\\s*.*>{message}</strong>.*' '\n'
r'--===============\d+==--' '\n')
mocked_email.assert_called_with(config.email.sender_email, [config.receiver], String(expected_msg_regex, re.S))
def test_email_base_send_html_sender(dmail, config, mocker):
mocked_email = mocker.patch.object(dmail.server, 'sendmail')
message, subject = 'abc', 'subject'
html_msg = f'<strong>{message}</strong>'
dmail.send(html_msg, config.receiver, subject, subtype='html', sender='cusom@email.com')
expected_msg_regex = (r'Content-Type: multipart/alternative; boundary="===============\d+=="''\n*'
r'MIME-Version: 1\.0' '\n*'
f'From: cusom@email.com' '\n*'
f'Subject: {subject}' '\n*'
f'To: {config.receiver}' '\n*'
r'--===============\d+==' '\n'
'Content-Type: text/plain; charset="us-ascii"' '\n*'
r'MIME-Version: 1\.0' '\n*'
'Content-Transfer-Encoding: 7bit' '\n*'
f'{message}' '\n*'
r'--===============\d+==' '\n'
'Content-Type: text/html; charset="us-ascii"' '\n'
r'MIME-Version: 1\.0' '\n'
'Content-Transfer-Encoding: 7bit' '\n' '\n'
f'.*<strong\\s*.*>{message}</strong>.*' '\n'
r'--===============\d+==--' '\n')
mocked_email.assert_called_with(config.email.sender_email, [config.receiver], String(expected_msg_regex, re.S))
| 48.770115
| 115
| 0.475607
| 444
| 4,243
| 4.434685
| 0.15991
| 0.016252
| 0.024378
| 0.052819
| 0.820721
| 0.810564
| 0.810564
| 0.807009
| 0.807009
| 0.807009
| 0
| 0.007307
| 0.322649
| 4,243
| 86
| 116
| 49.337209
| 0.677801
| 0
| 0
| 0.638889
| 0
| 0
| 0.348338
| 0.152958
| 0
| 0
| 0
| 0
| 0.041667
| 1
| 0.069444
| false
| 0
| 0.083333
| 0.013889
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
69203837de061895a513a1304a71627dc310244c
| 12,202
|
py
|
Python
|
test/test__photos.py
|
maemori/photo_organizing
|
f65eaeda860b3715928fe5adeeef068434f54aff
|
[
"MIT"
] | 2
|
2016-10-22T00:26:49.000Z
|
2018-08-21T02:28:46.000Z
|
test/test__photos.py
|
maemori/photo_organizing
|
f65eaeda860b3715928fe5adeeef068434f54aff
|
[
"MIT"
] | 8
|
2016-09-28T22:09:03.000Z
|
2016-10-10T03:53:07.000Z
|
test/test__photos.py
|
maemori/photo_organizing
|
f65eaeda860b3715928fe5adeeef068434f54aff
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import sys
import shutil
import unittest
sys.path.append(os.pardir)
import organize.cleaning as photo
from photos import Photos
import organize.exception as exception
import test_setting
class PhotoTest(unittest.TestCase):
"""Test of photo organizing class."""
TEST_FILE_DIR = "./resource/test_input_photo/set3"
TEST_DIR = "test"
TEST_OUTPUT_DIR = "./photo/public/2016/08/2016-08-04"
TEST_OUTPUT_FILES = [
"20160804_021113_000.mp4",
"20160804_060848_000.jpg",
"20160804_060920_000.jpg",
"20160804_064317_000.jpg",
"20160804_064349_000.jpg",
"20160804_065149_000.jpg",
"20160804_065221_000.jpg",
"20160804_090506_000.jpg",
"20160804_090538_000.jpg",
"20160804_090610_000.jpg",
"thumbnail.png"]
@classmethod
def setUpClass(cls):
"""テスト初期化時に実行"""
pass
@classmethod
def tearDownClass(cls):
"""テスト終了時に実行"""
pass
def setUp(self):
"""テストメソッド前処理"""
self.setting = test_setting.Setting()
def tearDown(self):
"""テストメソッド後処理"""
# 設定の復元
self.setting.config_file_set(os.path.join(
self.setting.TEST_CONFIG_FILES_DIR, "config.ini"))
self.setting.config_organize_file_set(os.path.join(
self.setting.TEST_CONFIG_FILES_DIR, self.setting.TEST_ORGANIZE_CONFIG_DIR, "config.ini"))
photo.Cleaning._CONFIG = {}
photo.Cleaning.config()
photo.Cleaning._CASCADE = {}
photo.Cleaning.cascade()
def test_config_none(self):
"""テスト:設定ファイルなし"""
expected = "Failed to read the configuration file!"
try:
# テストディレクトリ初期化
self.setting.test_directory_initialization()
# テスト用入力ファイルの設置
shutil.copytree(self.TEST_FILE_DIR, os.path.join(self.setting.INPUT_DIR, self.TEST_DIR))
# からの設定ファイルを設置
self.setting.config_file_set(os.path.join(
self.setting.TEST_CONFIG_FILES_DIR, "config_ng_none.ini"))
photo.Cleaning.config()
# オブジェクトの生成
Photos()
except exception.Photo_setting_exception as ex:
actual = repr(ex)
self.assertEqual(expected, actual)
def test_move_files_func_exception(self):
"""テスト:指定ファイル移動処理例外"""
expected = "Photo organizing exception!"
try:
# オブジェクトの生成
target = Photos()
target._move_files_func("./test/test_delete_unneeded_func_exception.mp4")
except exception.Photo_exception as ex:
actual = repr(ex)
self.assertEqual(expected, actual)
def test_delete_unneeded_func_exception(self):
"""テスト:指定ファイル削除処理例外"""
expected = "Photo organizing exception!"
try:
# オブジェクトの生成
target = Photos()
target._delete_unneeded_func("./test/test_delete_unneeded_func_exception.json")
except exception.Photo_exception as ex:
actual = repr(ex)
self.assertEqual(expected, actual)
def test_input_dir_none(self):
"""テスト:設定ファイル・入力ディレクトリ指定なし"""
# テストディレクトリ初期化
self.setting.test_directory_initialization()
# テスト用入力ファイルの設置
shutil.copytree(self.TEST_FILE_DIR, os.path.join(self.setting.INPUT_DIR, self.TEST_DIR))
# テスト用の設定ファイルを設置
self.setting.config_file_set(os.path.join(
self.setting.TEST_CONFIG_FILES_DIR, "config_ng_input_none.ini"))
# 設定のクリア
photo.Cleaning._CONFIG = {}
# オブジェクトの生成
target = Photos()
# テストターゲットの実行
target.organize()
# 出力先が空であることを確認
result = False
if not os.path.isdir(self.TEST_OUTPUT_DIR):
result = True
self.assertTrue(result)
def test_output_dir_none(self):
"""テスト:設定ファイル・出力ディレクトリ指定なし"""
expected = "Photo organizing exception!"
try:
# テストディレクトリ初期化
self.setting.test_directory_initialization()
# テスト用入力ファイルの設置
shutil.copytree(self.TEST_FILE_DIR, os.path.join(self.setting.INPUT_DIR, self.TEST_DIR))
# テスト用の設定ファイルを設置
self.setting.config_file_set(os.path.join(
self.setting.TEST_CONFIG_FILES_DIR, "config_ng_output_none.ini"))
# 設定のクリア
photo.Cleaning._CONFIG = {}
# オブジェクトの生成
target = Photos()
# テストターゲットの実行
target.organize()
except exception.Photo_exception as ex:
actual = repr(ex)
self.assertEqual(expected, actual)
def test_organize_func_exception(self):
"""テスト:写真整理処理例外(設定値なし)"""
expected = "Failed to read the configuration file!"
try:
# テストディレクトリ初期化
self.setting.test_directory_initialization()
# テスト用入力ファイルの設置
shutil.copytree(self.TEST_FILE_DIR, os.path.join(self.setting.INPUT_DIR, self.TEST_DIR))
# 空の設定ファイルを設置
self.setting.config_organize_file_set(os.path.join(
self.setting.TEST_CONFIG_FILES_DIR, self.setting.TEST_ORGANIZE_CONFIG_DIR, "config_ng_none.ini"))
# 設定のクリア
photo.Cleaning._CONFIG = {}
photo.Cleaning._CASCADE = {}
# オブジェクトの生成
target = Photos()
# テストターゲットの実行
target.organize()
except exception.Photo_setting_exception as ex:
actual = repr(ex)
self.assertEqual(expected, actual)
def test_organize_func_mosaic_value_exception(self):
"""テスト:写真整理処理例外(カスケード設置値異常)"""
expected = "Fail to recognize the face!"
try:
# テストディレクトリ初期化
self.setting.test_directory_initialization()
# テスト用入力ファイルの設置
shutil.copytree(self.TEST_FILE_DIR, os.path.join(self.setting.INPUT_DIR, self.TEST_DIR))
# 値が不正なの設定ファイルを設置
self.setting.config_organize_file_set(os.path.join(
self.setting.TEST_CONFIG_FILES_DIR, self.setting.TEST_ORGANIZE_CONFIG_DIR, "config_ng_val.ini"))
# 設定のクリア
photo.Cleaning._CONFIG = {}
photo.Cleaning._CASCADE = {}
# オブジェクトの生成
target = Photos()
# テストターゲットの実行
target.organize()
except exception.Photo_cascade_exception as ex:
actual = repr(ex)
self.assertEqual(expected, actual)
def test_thumbnail_exception(self):
"""テスト:サムネイル処理例外"""
expected = "It failed to create a thumbnail image!"
try:
# テストディレクトリ初期化
self.setting.test_directory_initialization()
# テスト用入力ファイルの設置
shutil.copytree(self.TEST_FILE_DIR, os.path.join(self.setting.INPUT_DIR, self.TEST_DIR))
# 空の設定ファイルを設置
self.setting.config_file_set(os.path.join(
self.setting.TEST_CONFIG_FILES_DIR, "config_ng_thumbnail_exception.ini"))
# オブジェクトの生成
target = Photos()
# テストターゲットの実行
target.organize()
except exception.Photo_thumbnail_exception as ex:
actual = repr(ex)
self.assertEqual(expected, actual)
def test_base_flow(self):
"""テスト:基本フローのテスト"""
expected = set(self.TEST_OUTPUT_FILES)
# テストディレクトリ初期化
self.setting.test_directory_initialization()
# テスト用入力ファイルの設置
shutil.copytree(self.TEST_FILE_DIR, os.path.join(self.setting.INPUT_DIR, self.TEST_DIR))
# オブジェクトの生成
target = Photos()
# テストターゲットの実行
target.organize()
# 作成された写真の確認
actual = set(os.listdir(self.TEST_OUTPUT_DIR))
# 部分集合で結果を確認
compare = expected.issuperset(actual)
self.assertTrue(compare)
def test_base_flow_mosaic_off(self):
"""テスト:基本フローのテスト(モザイク指定なし)"""
expected = set(self.TEST_OUTPUT_FILES)
# テストディレクトリ初期化
self.setting.test_directory_initialization()
# テスト用入力ファイルの設置
shutil.copytree(self.TEST_FILE_DIR, os.path.join(self.setting.INPUT_DIR, self.TEST_DIR))
# テスト用の設定ファイルを設置
self.setting.config_file_set(os.path.join(
self.setting.TEST_CONFIG_FILES_DIR, "config_mosaic_off.ini"))
# オブジェクトの生成
target = Photos()
# テストターゲットの実行
target.organize()
# 作成された写真の確認
actual = set(os.listdir(self.TEST_OUTPUT_DIR))
# 部分集合で結果を確認
compare = expected.issuperset(actual)
self.assertTrue(compare)
def test_base_flow_thumbnail(self):
"""テスト:基本フローのテスト(サムネイル2段出力)"""
expected = set(self.TEST_OUTPUT_FILES)
# テストディレクトリ初期化
self.setting.test_directory_initialization()
# テスト用入力ファイルの設置
shutil.copytree(self.TEST_FILE_DIR, os.path.join(self.setting.INPUT_DIR, self.TEST_DIR))
# テスト用の設定ファイルを設置
self.setting.config_file_set(os.path.join(
self.setting.TEST_CONFIG_FILES_DIR, "config_thumbnail.ini"))
# オブジェクトの生成
target = Photos()
# テストターゲットの実行
target.organize()
# 作成された写真の確認
actual = set(os.listdir(self.TEST_OUTPUT_DIR))
# 部分集合で結果を確認
compare = expected.issuperset(actual)
self.assertTrue(compare)
def test_base_flow_backup_dir_none(self):
"""テスト:設定ファイル・バックアップディレクトリ指定なし"""
expected = set(self.TEST_OUTPUT_FILES)
# テストディレクトリ初期化
self.setting.test_directory_initialization()
# テスト用入力ファイルの設置
shutil.copytree(self.TEST_FILE_DIR, os.path.join(self.setting.INPUT_DIR, self.TEST_DIR))
# テスト用の設定ファイルを設置
self.setting.config_file_set(os.path.join(
self.setting.TEST_CONFIG_FILES_DIR, "config_backup_none.ini"))
# 設定のクリア
photo.Cleaning._CONFIG = {}
# オブジェクトの生成
target = Photos()
# テストターゲットの実行
target.organize()
# ターゲットのディクトリが空であること
result = False
if not os.path.isdir("./photo/backup"):
result = True
# 作成された写真の確認
actual = set(os.listdir(self.TEST_OUTPUT_DIR))
# 部分集合で結果を確認
compare = expected.issuperset(actual)
self.assertTrue(compare and result)
def test_base_flow_trash_dir_none(self):
"""テスト:設定ファイル・破棄ディレクトリ指定なし"""
expected = set(self.TEST_OUTPUT_FILES)
# テストディレクトリ初期化
self.setting.test_directory_initialization()
# テスト用入力ファイルの設置
shutil.copytree(self.TEST_FILE_DIR, os.path.join(self.setting.INPUT_DIR, self.TEST_DIR))
# テスト用の設定ファイルを設置
self.setting.config_file_set(os.path.join(
self.setting.TEST_CONFIG_FILES_DIR, "config_trash_none.ini"))
# オブジェクトの生成
target = Photos()
# テストターゲットの実行
target.organize()
# ターゲットのディクトリが空であること
result = False
if not os.path.isdir("./photo/trash"):
result = True
# 作成された写真の確認
actual = set(os.listdir(self.TEST_OUTPUT_DIR))
# 部分集合で結果を確認
compare = expected.issuperset(actual)
self.assertTrue(compare and result)
def test_base_flow_backup_trash_dir_none(self):
"""テスト:設定ファイル・バックアップ&破棄ディレクトリ指定なし"""
expected = set(self.TEST_OUTPUT_FILES)
# テストディレクトリ初期化
self.setting.test_directory_initialization()
# テスト用入力ファイルの設置
shutil.copytree(self.TEST_FILE_DIR, os.path.join(self.setting.INPUT_DIR, self.TEST_DIR))
# テスト用の設定ファイルを設置
self.setting.config_file_set(os.path.join(
self.setting.TEST_CONFIG_FILES_DIR, "config_backup_trash_none.ini"))
# オブジェクトの生成
target = Photos()
# テストターゲットの実行
target.organize()
# ターゲットのディクトリが空であること
result_backup = False
if not os.path.isdir("./photo/backup"):
result_backup = True
result_trash = False
if not os.path.isdir("./photo/trash"):
result_trash = True
# 作成された写真の確認
actual = set(os.listdir(self.TEST_OUTPUT_DIR))
# 部分集合で結果を確認
compare = expected.issuperset(actual)
self.assertTrue(compare and result_backup and result_trash)
| 36.315476
| 113
| 0.621865
| 1,306
| 12,202
| 5.567381
| 0.11562
| 0.081694
| 0.059827
| 0.048136
| 0.803466
| 0.790125
| 0.775547
| 0.753541
| 0.753541
| 0.715582
| 0
| 0.021551
| 0.281265
| 12,202
| 335
| 114
| 36.423881
| 0.806956
| 0.114326
| 0
| 0.660377
| 0
| 0
| 0.08919
| 0.052874
| 0
| 0
| 0
| 0
| 0.066038
| 1
| 0.084906
| false
| 0.009434
| 0.037736
| 0
| 0.146226
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
695200dba09115b8641d682abbb903d726c1c260
| 21
|
py
|
Python
|
tools/__init__.py
|
MichaelSaah/sps-percolation
|
46caebdfc79f240d3a92d1671e28df04965dc8dd
|
[
"MIT"
] | null | null | null |
tools/__init__.py
|
MichaelSaah/sps-percolation
|
46caebdfc79f240d3a92d1671e28df04965dc8dd
|
[
"MIT"
] | null | null | null |
tools/__init__.py
|
MichaelSaah/sps-percolation
|
46caebdfc79f240d3a92d1671e28df04965dc8dd
|
[
"MIT"
] | null | null | null |
from . import graphs
| 10.5
| 20
| 0.761905
| 3
| 21
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 21
| 1
| 21
| 21
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
696e06de9b306bfffc9bc483106feaa210547819
| 91
|
py
|
Python
|
molecool/io/__init__.py
|
miahj1/molssi_best_practices_2021
|
1fb9ba08c23ff50d451e10f0b40f87fb38124b1b
|
[
"BSD-3-Clause"
] | null | null | null |
molecool/io/__init__.py
|
miahj1/molssi_best_practices_2021
|
1fb9ba08c23ff50d451e10f0b40f87fb38124b1b
|
[
"BSD-3-Clause"
] | null | null | null |
molecool/io/__init__.py
|
miahj1/molssi_best_practices_2021
|
1fb9ba08c23ff50d451e10f0b40f87fb38124b1b
|
[
"BSD-3-Clause"
] | null | null | null |
"""
init for IO package
"""
from .pdb import open_pdb
from .xyz import open_xyz, write_xyz
| 15.166667
| 36
| 0.736264
| 16
| 91
| 4
| 0.625
| 0.3125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164835
| 91
| 6
| 36
| 15.166667
| 0.842105
| 0.208791
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
15c6d3dce2609305eb37240795ec49009da48d73
| 21
|
py
|
Python
|
bones/aws/__init__.py
|
vishnubob/bones
|
78074581046664dd4d5ed9f77e5db44a1afb648f
|
[
"MIT"
] | 1
|
2018-12-21T02:09:52.000Z
|
2018-12-21T02:09:52.000Z
|
bones/aws/__init__.py
|
vishnubob/bones
|
78074581046664dd4d5ed9f77e5db44a1afb648f
|
[
"MIT"
] | null | null | null |
bones/aws/__init__.py
|
vishnubob/bones
|
78074581046664dd4d5ed9f77e5db44a1afb648f
|
[
"MIT"
] | 4
|
2016-11-04T17:58:13.000Z
|
2022-01-20T16:55:36.000Z
|
import s3
import ddb
| 7
| 10
| 0.809524
| 4
| 21
| 4.25
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0.190476
| 21
| 2
| 11
| 10.5
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
15e8277603aca9b4cd75e10133ad3fc6baa7f0d6
| 20
|
py
|
Python
|
dac/algos/__init__.py
|
seungyulhan/dac
|
e2bf0e657bbc77412877e93468743844a0d0a6da
|
[
"MIT"
] | 4
|
2021-07-04T16:46:21.000Z
|
2021-12-10T03:12:00.000Z
|
dac/algos/__init__.py
|
seungyulhan/dac
|
e2bf0e657bbc77412877e93468743844a0d0a6da
|
[
"MIT"
] | null | null | null |
dac/algos/__init__.py
|
seungyulhan/dac
|
e2bf0e657bbc77412877e93468743844a0d0a6da
|
[
"MIT"
] | null | null | null |
from .dac import DAC
| 20
| 20
| 0.8
| 4
| 20
| 4
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 20
| 1
| 20
| 20
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c60c2172726b6cd2bae75b3c0d43095c9a4322e5
| 8,716
|
py
|
Python
|
search_engine/fuzzy_matching.py
|
lcsig/oeis-search
|
8ca3856501dd1bc6ae163f8519c02960a8175117
|
[
"MIT"
] | 1
|
2021-12-28T21:11:04.000Z
|
2021-12-28T21:11:04.000Z
|
search_engine/fuzzy_matching.py
|
lcsig/Sequence-Searcher
|
8ca3856501dd1bc6ae163f8519c02960a8175117
|
[
"MIT"
] | null | null | null |
search_engine/fuzzy_matching.py
|
lcsig/Sequence-Searcher
|
8ca3856501dd1bc6ae163f8519c02960a8175117
|
[
"MIT"
] | null | null | null |
from search_engine import seq_list_numeric
from search_engine import convert_str_to_list
from search_engine import seq_list
import search_engine.utils as utils
"""
This file contains the implementations for fuzzy match algorithm
"""
def fuzzy_match_two_seq_t1(seq1: list, seq2: list, max_off_terms: int):
"""
returns: The number of remaining terms to drop, Otherwise -1 is returned.
"""
seq1_idx, seq2_idx = 0, 0
while True:
if seq2_idx > len(seq2) - 1:
return max_off_terms
elif seq1_idx > len(seq1) - 1 and max_off_terms > len(seq2) - 1 - seq2_idx:
return max_off_terms - ((len(seq2) - 1) - (seq2_idx - 1))
elif seq1_idx > len(seq1) - 1:
return -1
if seq1[seq1_idx] == seq2[seq2_idx]:
seq1_idx, seq2_idx = seq1_idx + 1, seq2_idx + 1
continue
else:
if max_off_terms == 0:
return -1
max_off_terms = max_off_terms - 1
# Avoid out of index error
if seq2_idx + 1 >= len(seq2) or seq1_idx + 1 >= len(seq1):
return -1
if seq1[seq1_idx] == seq2[seq2_idx + 1]:
seq2_idx = seq2_idx + 1
elif seq1[seq1_idx + 1] == seq2[seq2_idx]:
seq1_idx = seq1_idx + 1
elif seq1[seq1_idx + 1] == seq2[seq2_idx + 1]:
seq1_idx, seq2_idx = seq1_idx + 1, seq2_idx + 1
continue
else:
return -1
def fuzzy_match_type1(num_list: str, max_off_terms: int):
"""
This function will match a target sequence with the database while taking into consideration that the sequence
may not contain some terms.
num_list: The sequence that you are looking for
max_off_terms: The maximum number of terms that are allowed to be dropped off while searching
returns: A dictionary where the key value represents the rank and the dictionary value represents a list of the
matched sequences
"""
seq2 = convert_str_to_list(num_list, True, False)
return_dic = {}
for i in range(len(seq_list_numeric)):
seq1 = list(seq_list_numeric[i])
# if str(seq_list[i]).startswith('A348440'):
# print("debug")
dropped_terms = []
for n in range(len(seq1) - len(seq2) + max_off_terms + 1):
seq1_cut = seq1[n:]
rank = fuzzy_match_two_seq_t1(seq1_cut, seq2, max_off_terms)
if rank != -1:
dropped_terms.append(rank)
# Sort and Reverse to take the best rank found in the sequence
if len(dropped_terms) > 0:
dropped_terms.sort()
dropped_terms.reverse()
dropped_terms = dropped_terms[0]
return_dic.setdefault(dropped_terms, []).append(seq_list[i])
utils.waiting(i, len(seq_list_numeric))
return return_dic
def fuzzy_match_two_seq_t2(seq1: list, seq2: list, max_off_terms: int, max_gap_size: int):
"""
returns: The number of remaining terms to drop, Otherwise -1 is returned.
"""
seq1_idx, seq2_idx = 0, 0
while True:
if seq2_idx > len(seq2) - 1:
return max_off_terms
elif seq1_idx > len(seq1) - 1 \
and max_off_terms > len(seq2) - 1 - seq2_idx \
and max_gap_size > len(seq2) - 1 - seq2_idx:
return max_off_terms - ((len(seq2) - 1) - (seq2_idx - 1))
elif seq1_idx > len(seq1) - 1:
return -1
if seq1[seq1_idx] == seq2[seq2_idx]:
seq1_idx, seq2_idx = seq1_idx + 1, seq2_idx + 1
continue
else:
i_gap_range = max_gap_size if seq1_idx + max_gap_size < len(seq1) else len(seq1) - seq1_idx - 1
n_gap_range = max_gap_size if seq2_idx + max_gap_size < len(seq2) else len(seq2) - seq2_idx - 1
break_flag = False
for i_gap in range(0, i_gap_range + 1):
for n_gap in range(0, n_gap_range + 1):
if seq1[seq1_idx + i_gap] == seq2[seq2_idx + n_gap]:
seq1_idx, seq2_idx = seq1_idx + i_gap, seq2_idx + n_gap
if i_gap != 0 and n_gap != 0:
max_off_terms = max_off_terms - (i_gap if i_gap <= n_gap else n_gap) # To be specified ...
else:
max_off_terms = max_off_terms - (i_gap if i_gap >= n_gap else n_gap)
break_flag = True
break
if break_flag:
break
# If the break is false, then there is no match between the next two parts of the sequences
if not break_flag:
return -1
# If the number of the terms allowed to be dropped is below zero
if max_off_terms < 0:
return -1
if i_gap_range == 0 or n_gap_range == 0:
return -1
def fuzzy_match_type2(num_list: str, max_off_terms: int, max_gap_size: int):
"""
This function will match a target sequence with the database while taking into consideration that the sequence
may not contain some terms.
num_list: The sequence that you are looking for
max_off_terms: The maximum number of terms that are allowed to be dropped off while searching
max_gap_size: The maximum number of terms that can be dropped at a time.
returns: A dictionary where the key value represents the rank and the dictionary value represents a list of the
matched sequences
"""
seq2 = convert_str_to_list(num_list, True, False)
return_dic = {}
for i in range(len(seq_list_numeric)):
seq1 = list(seq_list_numeric[i])
# if str(seq_list[i]).startswith('A333516'):
# print("debug")
dropped_terms = []
for n in range(len(seq1) - len(seq2) + max_off_terms + 1):
seq1_cut = seq1[n:]
rank = fuzzy_match_two_seq_t2(seq1_cut, seq2, max_off_terms, max_gap_size)
if rank != -1:
dropped_terms.append(rank)
# Sort and Reverse to take the best rank found in the sequence
if len(dropped_terms) > 0:
dropped_terms.sort()
dropped_terms.reverse()
dropped_terms = dropped_terms[0]
return_dic.setdefault(dropped_terms, []).append(seq_list[i])
utils.waiting(i, len(seq_list_numeric))
return return_dic
def fuzzy_match_two_seq_t3(seq1: list, seq2: list, max_off_terms: int):
"""
returns: The number of remaining terms to drop, Otherwise -1 is returned.
"""
seq1_idx, seq2_idx = 0, 0
while True:
if max_off_terms < 0:
return -1
if seq2_idx > len(seq2) - 1:
return max_off_terms
if seq1_idx > len(seq1) - 1 and max_off_terms >= len(seq2) - seq2_idx:
return max_off_terms - ((len(seq2) - 1) - (seq2_idx - 1))
if seq1_idx > len(seq1) - 1:
return -1
if seq2[seq2_idx] not in seq1[seq1_idx:]:
seq1_idx -= 0
seq2_idx += 1
max_off_terms -= 1
elif seq2[seq2_idx] in seq1[seq1_idx:]:
seq1_idx = seq1_idx + seq1[seq1_idx:].index(seq2[seq2_idx]) + 1
seq2_idx += 1
max_off_terms -= 0
def fuzzy_match_type3(num_list: str, max_off_terms: int):
"""
This function will match a target sequence with the database while taking into consideration that the sequence
may not contain some terms.
num_list: The sequence that you are looking for
max_off_terms: The maximum number of terms that are allowed to be dropped off while searching
returns: A dictionary where the key value represents the rank and the dictionary value represents a list of the
matched sequences
"""
seq2 = convert_str_to_list(num_list, True, False)
return_dic = {}
for i in range(len(seq_list_numeric)):
seq1 = list(seq_list_numeric[i])
# if str(seq_list[i]).startswith('A000027'):
# print("debug")
dropped_terms = []
for n in range(len(seq1) - len(seq2) + max_off_terms + 1):
seq1_cut = seq1[n:]
rank = fuzzy_match_two_seq_t3(seq1_cut, seq2, max_off_terms)
if rank != -1:
dropped_terms.append(rank)
# Sort and Reverse to take the best rank found in the sequence
if len(dropped_terms) > 0:
dropped_terms.sort()
dropped_terms.reverse()
dropped_terms = dropped_terms[0]
return_dic.setdefault(dropped_terms, []).append(seq_list[i])
utils.waiting(i, len(seq_list_numeric))
return return_dic
| 37.731602
| 119
| 0.59821
| 1,262
| 8,716
| 3.884311
| 0.102219
| 0.052836
| 0.078539
| 0.019992
| 0.860261
| 0.830681
| 0.765402
| 0.759078
| 0.732558
| 0.726234
| 0
| 0.042341
| 0.319872
| 8,716
| 231
| 120
| 37.731602
| 0.784582
| 0.242887
| 0
| 0.664286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042857
| false
| 0
| 0.028571
| 0
| 0.207143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d662f9780d1a4d82921e27cd772145bba017ed12
| 193
|
py
|
Python
|
models/init.py
|
LaudateCorpus1/learning-subspaces
|
240122f8a875f580eca92325fb06a98bf71bbf3e
|
[
"AML"
] | 105
|
2021-03-30T06:03:44.000Z
|
2022-03-03T15:50:41.000Z
|
models/init.py
|
LaudateCorpus1/learning-subspaces
|
240122f8a875f580eca92325fb06a98bf71bbf3e
|
[
"AML"
] | 3
|
2021-07-16T04:46:25.000Z
|
2022-01-29T04:47:26.000Z
|
models/init.py
|
LaudateCorpus1/learning-subspaces
|
240122f8a875f580eca92325fb06a98bf71bbf3e
|
[
"AML"
] | 12
|
2021-03-23T17:24:43.000Z
|
2022-03-26T11:43:12.000Z
|
#
# For licensing see accompanying LICENSE file.
# Copyright (C) 2020 Apple Inc. All Rights Reserved.
#
import torch.nn as nn
def kaiming_normal(weight):
nn.init.kaiming_normal_(weight,)
| 19.3
| 52
| 0.746114
| 28
| 193
| 5.035714
| 0.821429
| 0.184397
| 0.269504
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024691
| 0.160622
| 193
| 9
| 53
| 21.444444
| 0.845679
| 0.492228
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d6651407d80a3b1bbfa23b10b0cb55177311ad49
| 80
|
py
|
Python
|
micro-benchmark/snippets/returns/call/main.py
|
WenJinfeng/PyCG
|
b45e8e04fe697d8301cf27222a8f37646d69f168
|
[
"Apache-2.0"
] | 121
|
2020-12-16T20:31:37.000Z
|
2022-03-21T20:32:43.000Z
|
micro-benchmark/snippets/returns/call/main.py
|
WenJinfeng/PyCG
|
b45e8e04fe697d8301cf27222a8f37646d69f168
|
[
"Apache-2.0"
] | 24
|
2021-03-13T00:04:00.000Z
|
2022-03-21T17:28:11.000Z
|
micro-benchmark/snippets/returns/call/main.py
|
WenJinfeng/PyCG
|
b45e8e04fe697d8301cf27222a8f37646d69f168
|
[
"Apache-2.0"
] | 19
|
2021-03-23T10:58:47.000Z
|
2022-03-24T19:46:50.000Z
|
def return_func():
pass
def func():
return return_func
a = func()
a()
| 8.888889
| 22
| 0.6
| 12
| 80
| 3.833333
| 0.416667
| 0.434783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2625
| 80
| 8
| 23
| 10
| 0.779661
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.166667
| 0
| 0.166667
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
|
0
| 6
|
d671bae8cfe436a06506bc2409e9322469d21cfa
| 245
|
py
|
Python
|
test/generate/autotest/24_ADD_A_i.py
|
Aimini/hm-51
|
2d46323388a0679b2f99d1a33f5a0d55a5f838e6
|
[
"MIT"
] | null | null | null |
test/generate/autotest/24_ADD_A_i.py
|
Aimini/hm-51
|
2d46323388a0679b2f99d1a33f5a0d55a5f838e6
|
[
"MIT"
] | 20
|
2020-01-13T04:19:37.000Z
|
2020-02-12T14:25:44.000Z
|
test/generate/autotest/24_ADD_A_i.py
|
Aimini/hm-51
|
2d46323388a0679b2f99d1a33f5a0d55a5f838e6
|
[
"MIT"
] | null | null | null |
#########################################################
# 2020-01-22 09:32:49
# AI
# ins: ADD A, #immed
#########################################################
from .common.INS_XXX_A_i import INS_XXX_A_I
p = INS_XXX_A_I("ADD").gen(0,5503)
| 27.222222
| 57
| 0.363265
| 31
| 245
| 2.580645
| 0.645161
| 0.225
| 0.2625
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084821
| 0.085714
| 245
| 9
| 58
| 27.222222
| 0.272321
| 0.163265
| 0
| 0
| 0
| 0
| 0.034884
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
d674322922d5dbbf1176f16e54010e75c5946d99
| 1,273
|
py
|
Python
|
tests/little_endian_baselines/modify_ascii_same_len_hex.py
|
TNThieding/exif
|
2e59701aec7416fbb3b2db76e7d090f166f1f132
|
[
"MIT"
] | 51
|
2018-12-28T19:48:40.000Z
|
2021-12-10T00:35:41.000Z
|
tests/little_endian_baselines/modify_ascii_same_len_hex.py
|
TNThieding/exif
|
2e59701aec7416fbb3b2db76e7d090f166f1f132
|
[
"MIT"
] | 33
|
2019-02-08T10:15:25.000Z
|
2022-02-11T18:37:45.000Z
|
tests/little_endian_baselines/modify_ascii_same_len_hex.py
|
TNThieding/exif
|
2e59701aec7416fbb3b2db76e7d090f166f1f132
|
[
"MIT"
] | 11
|
2019-10-24T14:03:02.000Z
|
2020-12-10T04:07:20.000Z
|
"""APP1 segment hexadecimal baseline for test_modify."""
from baseline import Baseline
LITTLE_ENDIAN_MODIFY_BASELINE = Baseline(
"""
FFE1021045786966000049492A000800000009001001020009000000940000001A01050001000000A20000001B
01050001000000AA0000002801030001000000020000001302030001000000020000001402050006000000B200
00006987040001000000100100002588040001000000960100000F0102002E000000E200000000000000000000
00000000000000000000000000000000000000000000004D6F646966696564000000000000C800000001000000
C8000000010000000000000001000000FF000000010000008000000001000000FF000000010000008000000001
000000FF0000000100000056616C756520666F72204D616B65205461672074686174206973204C6F6E67657220
7468616E204265666F726500090000900700040000003032323003900200140000008201000001910700040000
003B02030000A00700040000003031303001A00300010000000100000002A0030001000000D803000003A00300
010000006505000009A4030001000000010000000AA40300010000000100000000000000323031393A30323A30
382032313A34343A33350005000000010004000000020300000100020002000000530000000200050003000000
D80100000300020002000000570000000400050003000000F0010000000000004F000000010000002400000001
000000B96B0100A30600000C000000010000002200000001000000D5DD0000E8030000FFDB
"""
)
| 60.619048
| 94
| 0.919874
| 28
| 1,273
| 41.678571
| 0.821429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.846672
| 0.067557
| 1,273
| 20
| 95
| 63.65
| 0.136479
| 0.039277
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
d69555b13ab0c9ca62ef0716f339807ea76d39e8
| 282
|
py
|
Python
|
common/exceptions.py
|
VenmoTools/test-tool-box
|
d3db83eb82ce2eeb854435a4ce6f843401655fd4
|
[
"Apache-2.0"
] | 1
|
2020-06-25T18:55:22.000Z
|
2020-06-25T18:55:22.000Z
|
common/exceptions.py
|
VenmoTools/test-tool-box
|
d3db83eb82ce2eeb854435a4ce6f843401655fd4
|
[
"Apache-2.0"
] | null | null | null |
common/exceptions.py
|
VenmoTools/test-tool-box
|
d3db83eb82ce2eeb854435a4ce6f843401655fd4
|
[
"Apache-2.0"
] | null | null | null |
class AdbNoStartError(Exception):
pass
class NoSuchProcessNameError(Exception):
pass
class CommandError(Exception):
pass
class ConfigError(Exception): pass
class PlatformNoSupport(Warning):
""" Base class for warnings about deprecated features. """
pass
| 14.842105
| 62
| 0.737589
| 27
| 282
| 7.703704
| 0.555556
| 0.25
| 0.346154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184397
| 282
| 18
| 63
| 15.666667
| 0.904348
| 0.177305
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.555556
| 0
| 0
| 0.555556
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
d6de6d619f9d35cf5af045158166c0fbc599b8ad
| 42
|
py
|
Python
|
tests/__init__.py
|
VashonHu/MyWebsiteWithFlask
|
52d23a68e042ae2405e03964eb0dfbf07b1df8c4
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
VashonHu/MyWebsiteWithFlask
|
52d23a68e042ae2405e03964eb0dfbf07b1df8c4
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
VashonHu/MyWebsiteWithFlask
|
52d23a68e042ae2405e03964eb0dfbf07b1df8c4
|
[
"MIT"
] | null | null | null |
from . import test_basics, test_user_model
| 42
| 42
| 0.857143
| 7
| 42
| 4.714286
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 42
| 1
| 42
| 42
| 0.868421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ba34ff21f9102d6d1e7ddbb6a0315018c1a27f74
| 9,089
|
py
|
Python
|
cogs/views.py
|
Joystickplays/GoMod
|
f48af46b08e095136cb048d9dbb76a5f539f4ea1
|
[
"Apache-2.0"
] | 1
|
2022-02-25T04:25:21.000Z
|
2022-02-25T04:25:21.000Z
|
cogs/views.py
|
Joystickplays/GoMod
|
f48af46b08e095136cb048d9dbb76a5f539f4ea1
|
[
"Apache-2.0"
] | null | null | null |
cogs/views.py
|
Joystickplays/GoMod
|
f48af46b08e095136cb048d9dbb76a5f539f4ea1
|
[
"Apache-2.0"
] | null | null | null |
import discord
class Caseactionsview(discord.ui.View):
def __init__(self, ctx):
super().__init__()
self.value = None
self.ctx = ctx
@discord.ui.button(label='Ban', style=discord.ButtonStyle.red)
async def ban(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "b"
self.stop()
@discord.ui.button(label='Kick', style=discord.ButtonStyle.red)
async def kick(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "k"
self.stop()
@discord.ui.button(label='Delete', style=discord.ButtonStyle.green)
async def delete(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "d"
self.stop()
@discord.ui.button(label='Ignore', style=discord.ButtonStyle.gray)
async def ignore(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "i"
self.stop()
class Helpview(discord.ui.View):
def __init__(self, ctx):
super().__init__()
self.value = None
self.ctx = ctx
self.timeout = 60
@discord.ui.button(label='Moderator', style=discord.ButtonStyle.gray)
async def mod(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "m"
self.stop()
@discord.ui.button(label='AiMod [BETA]', style=discord.ButtonStyle.gray)
async def ai(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "a"
self.stop()
@discord.ui.button(label='Server backups', style=discord.ButtonStyle.gray, disabled=True)
async def server(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "s"
self.stop()
@discord.ui.button(label='Logging', style=discord.ButtonStyle.gray, disabled=True)
async def log(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "l"
self.stop()
@discord.ui.button(label='ModRep', style=discord.ButtonStyle.gray)
async def modrep(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "mr"
self.stop()
@discord.ui.button(label='CC', style=discord.ButtonStyle.gray)
async def cc(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "cc"
self.stop()
@discord.ui.button(label='Others', style=discord.ButtonStyle.gray)
async def other(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "o"
self.stop()
@discord.ui.button(label='Exit', style=discord.ButtonStyle.gray)
async def ex(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "x"
self.stop()
async def getvotes(self, member):
upvotes = await self.bot.db.fetch("SELECT COUNT(*) FROM repvotes WHERE who = $1 AND type = 'up'", member.id)
downvotes = await self.bot.db.fetch("SELECT COUNT(*) FROM repvotes WHERE who = $1 AND type = 'down'", member.id)
votes = upvotes[0]["count"] - downvotes[0]["count"]
return discord.Embed(title="Reputation", description=f"{member.mention} has {votes} votes.", color=0x00b2ff)
class UpDownvote(discord.ui.View):
def __init__(self, bot, mem):
super().__init__()
self.value = None
self.bot = bot
self.mem = mem
@discord.ui.button(label='Upvote', style=discord.ButtonStyle.green)
async def upvote(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.mem.id == interaction.user.id:
await interaction.response.send_message("You cannot upvote or downvote yourself.", ephemeral=True)
return
lookup = await self.bot.db.fetchrow("SELECT * FROM repvotes WHERE who = $1 AND voted = $2", self.mem.id, interaction.user.id)
if lookup:
if lookup["type"] == "down":
await self.bot.db.execute("DELETE FROM repvotes WHERE who = $1 AND voted = $2 AND type = 'down'", self.mem.id, interaction.user.id)
await self.bot.db.execute("INSERT INTO repvotes (who, voted, type) VALUES ($1, $2, 'up')", self.mem.id, interaction.user.id)
embed = await getvotes(self, self.mem)
await interaction.message.edit(content=f"Update: {interaction.user.mention} cancelled the downvote and upvote this member.", embed=embed)
return
elif lookup["type"] == "up":
await self.bot.db.execute("DELETE FROM repvotes WHERE who = $1 AND voted = $2 AND type = 'up'", self.mem.id, interaction.user.id)
embed = await getvotes(self, self.mem)
await interaction.message.edit(f"Update: {interaction.user.mention} cancelled the upvote for this user.", embed=embed)
return
await self.bot.db.execute("INSERT INTO repvotes (who, voted, type) VALUES ($1, $2, 'up')", self.mem.id, interaction.user.id)
embed = await getvotes(self, self.mem)
await interaction.message.edit(content=f"Update: {interaction.user.mention} upvoted this member.", embed=embed)
@discord.ui.button(label='Downvote', style=discord.ButtonStyle.red)
async def downvote(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.mem.id == interaction.user.id:
await interaction.response.send_message("You cannot upvote or downvote yourself.", ephemeral=True)
return
lookup = await self.bot.db.fetchrow("SELECT * FROM repvotes WHERE who = $1 AND voted = $2", self.mem.id, interaction.user.id)
if lookup:
if lookup["type"] == "down":
await self.bot.db.execute("DELETE FROM repvotes WHERE who = $1 AND voted = $2 AND type = 'down'", self.mem.id, interaction.user.id)
embed = await getvotes(self, self.mem)
await interaction.message.edit(f"Update: {interaction.user.mention} cancelled the downvote for this user.", embed=embed)
return
elif lookup["type"] == "up":
await self.bot.db.execute("DELETE FROM repvotes WHERE who = $1 AND voted = $2 AND type = 'up'", self.mem.id, interaction.user.id)
await self.bot.db.execute("INSERT INTO repvotes (who, voted, type) VALUES ($1, $2, 'down')", self.mem.id, interaction.user.id)
embed = await getvotes(self, self.mem)
await interaction.message.edit(content=f"Update: {interaction.user.mention} cancelled the upvote and downvoted this member.", embed=embed)
return
await self.bot.db.execute("INSERT INTO repvotes (who, voted, type) VALUES ($1, $2, 'down')", self.mem.id, interaction.user.id)
embed = await getvotes(self, self.mem)
await interaction.message.edit(content=f"Update: {interaction.user.mention} downvoted this member.", embed=embed)
# class SingleConfirm(discord.ui.View):
# def __init__(self, bot, ):
# super().__init__()
# self.value = None
# self.bot = bot
| 47.586387
| 154
| 0.642205
| 1,147
| 9,089
| 5.048823
| 0.111595
| 0.049732
| 0.072526
| 0.048351
| 0.898809
| 0.888793
| 0.764289
| 0.754965
| 0.722846
| 0.722846
| 0
| 0.004286
| 0.229838
| 9,089
| 190
| 155
| 47.836842
| 0.823
| 0.015953
| 0
| 0.604027
| 0
| 0
| 0.19322
| 0.017454
| 0
| 0
| 0.000895
| 0
| 0
| 1
| 0.020134
| false
| 0
| 0.006711
| 0
| 0.174497
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ba38ff3805280441c46057d993c1cb20ba01b8c4
| 288
|
py
|
Python
|
text2sql/training/metrics/__init__.py
|
inbaroren/improving-compgen-in-semparse
|
06463b94f3d1b291759c08783d5a8661e2960f2e
|
[
"MIT"
] | 15
|
2020-09-30T12:24:29.000Z
|
2021-12-24T13:45:25.000Z
|
text2sql/training/metrics/__init__.py
|
inbaroren/improving-compgen-in-semparse
|
06463b94f3d1b291759c08783d5a8661e2960f2e
|
[
"MIT"
] | 2
|
2021-04-21T14:07:41.000Z
|
2021-12-28T13:26:59.000Z
|
text2sql/training/metrics/__init__.py
|
inbaroren/improving-compgen-in-semparse
|
06463b94f3d1b291759c08783d5a8661e2960f2e
|
[
"MIT"
] | 2
|
2020-10-19T22:06:45.000Z
|
2021-02-05T22:08:23.000Z
|
from text2sql.training.metrics.bleu import BLEU
from text2sql.training.metrics.token_sequence_accuracy import TokenSequenceAccuracy
from text2sql.training.metrics.sql_kb_acc import KnowledgeBaseConstsAccuracy
from text2sql.training.metrics.sql_global_templ_acc import GlobalTemplAccuracy
| 57.6
| 83
| 0.902778
| 35
| 288
| 7.228571
| 0.485714
| 0.189723
| 0.316206
| 0.426877
| 0.237154
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014706
| 0.055556
| 288
| 4
| 84
| 72
| 0.915441
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ba4dd6e949394e23d86b5f0d9d421cd629e9b507
| 27,744
|
py
|
Python
|
statsmetrics/couchbase.py
|
toxot/statsmetrics-vertis
|
f1a9de18dc76753a593bca44f976b28a85d4950f
|
[
"MIT"
] | null | null | null |
statsmetrics/couchbase.py
|
toxot/statsmetrics-vertis
|
f1a9de18dc76753a593bca44f976b28a85d4950f
|
[
"MIT"
] | null | null | null |
statsmetrics/couchbase.py
|
toxot/statsmetrics-vertis
|
f1a9de18dc76753a593bca44f976b28a85d4950f
|
[
"MIT"
] | null | null | null |
# Couchbase metrics list
couchbase_metrics = {
'cluster': {
'url': '/pools/default/',
'metrics': [
{'name':'storageTotals.ram.total','id':'storageTotals.ram.total','suffix':'bytes','labels':['name']},
{'name':'storageTotals.ram.used','id':'storageTotals.ram.used','suffix':'bytes','labels':['name']},
{'name':'storageTotals.ram.usedByData','id':'storageTotals.ram.usedByData','suffix':'bytes','labels':['name']},
{'name':'storageTotals.ram.quotaTotal','id':'storageTotals.ram.quotaTotal','suffix':'bytes','labels':['name']},
{'name':'storageTotals.ram.quotaUsed','id':'storageTotals.ram.quotaUsed','suffix':'bytes','labels':['name']},
{'name':'storageTotals.ram.quotaUsedPerNode','id':'storageTotals.ram.quotaUsedPerNode','suffix':'bytes','labels':['name']},
{'name':'storageTotals.ram.quotaTotalPerNode','id':'storageTotals.ram.quotaTotalPerNode','suffix':'bytes','labels':['name']},
{'name':'storageTotals.hdd.total','id':'storageTotals.hdd.total','suffix':'bytes','labels':['name']},
{'name':'storageTotals.hdd.used','id':'storageTotals.hdd.used','suffix':'bytes','labels':['name']},
{'name':'storageTotals.hdd.usedByData','id':'storageTotals.hdd.usedByData','suffix':'bytes','labels':['name']},
{'name':'storageTotals.hdd.quotaTotal','id':'storageTotals.hdd.quotaTotal','suffix':'bytes','labels':['name']},
{'name':'storageTotals.hdd.free','id':'storageTotals.hdd.free','suffix':'bytes','labels':['name']},
{'name':'storageTotals.hdd.quotaUsed','id':'storageTotals.hdd.quotaUsed','suffix':'bytes','labels':['name']},
{'name':'storageTotals.hdd.quotaUsedPerNode','id':'storageTotals.hdd.quotaUsedPerNode','suffix':'bytes','labels':['name']},
{'name':'storageTotals.hdd.quotaTotalPerNode','id':'storageTotals.hdd.quotaTotalPerNode','suffix':'bytes','labels':['name']},
{'name':'counters.rebalance_success','id':'counters.rebalance_success','suffix':'count','labels':['name']},
{'name':'counters.rebalance_start','id':'counters.rebalance_start','suffix':'count','labels':['name']},
{'name':'counters.rebalance_fail','id':'counters.rebalance_fail','suffix':'count','labels':['name']},
{'name':'counters.rebalance_node','id':'counters.rebalance_node','suffix':'count','labels':['name']}
]
},
'nodes': {
'url': '/pools/nodes/',
'metrics': [
{'name':'systemStats.cpu_utilization_rate','id':'systemStats.cpu_utilization_rate','suffix':'percent','labels':['name','hostname']},
{'name':'systemStats.swap_total','id':'systemStats.swap_total','suffix':'bytes','labels':['name','hostname']},
{'name':'systemStats.swap_used','id':'systemStats.swap_used','suffix':'bytes','labels':['name','hostname']},
{'name':'systemStats.mem_total','id':'systemStats.mem_total','suffix':'bytes','labels':['name','hostname']},
{'name':'systemStats.mem_free','id':'systemStats.mem_free','suffix':'bytes','labels':['name','hostname']},
{'name':'interestingStats.couch_docs_actual_disk_size','id':'interestingStats.couch_docs_actual_disk_size','suffix':'bytes','labels':['name','hostname']},
{'name':'interestingStats.couch_docs_data_size','id':'interestingStats.couch_docs_data_size','suffix':'bytes','labels':['name','hostname']},
{'name':'interestingStats.couch_views_actual_disk_size','id':'interestingStats.couch_views_actual_disk_size','suffix':'bytes','labels':['name','hostname']},
{'name':'interestingStats.couch_views_data_size','id':'interestingStats.couch_views_data_size','suffix':'bytes','labels':['name','hostname']},
{'name':'interestingStats.mem_used','id':'interestingStats.mem_used','suffix':'bytes','labels':['name','hostname']},
{'name':'interestingStats.ops','id':'interestingStats.ops','suffix':'count','labels':['name','hostname']},
{'name':'interestingStats.curr_items','id':'interestingStats.curr_items','suffix':'count','labels':['name','hostname']},
{'name':'interestingStats.curr_items_tot','id':'interestingStats.curr_items_tot','suffix':'count','labels':['name','hostname']},
{'name':'interestingStats.vb_replica_curr_items','id':'interestingStats.vb_replica_curr_items','suffix':'count','labels':['name','hostname']},
{'name':'interestingStats.couch_spatial_disk_size','id':'interestingStats.couch_spatial_disk_size','suffix':'bytes','labels':['name','hostname']},
{'name':'interestingStats.couch_spatial_data_size','id':'interestingStats.couch_spatial_data_size','suffix':'bytes','labels':['name','hostname']},
{'name':'interestingStats.cmd_get','id':'interestingStats.cmd_get','suffix':'count','labels':['name','hostname']},
{'name':'interestingStats.get_hits','id':'interestingStats.get_hits','suffix':'count','labels':['name','hostname']},
{'name':'interestingStats.ep_bg_fetched','id':'interestingStats.ep_bg_fetched','suffix':'count','labels':['name','hostname']}
]
},
'buckets': {
'url': '/pools/default/buckets/',
'metrics': [
{'name':'basicStats.quotaPercentUsed','id':'basicStats.quotaPercentUsed','suffix':'percent','labels':['name','bucket']},
{'name':'basicStats.opsPerSec','id':'basicStats.opsPerSec','suffix':'count','labels':['name','bucket']},
{'name':'basicStats.diskFetches','id':'basicStats.diskFetches','suffix':'percent','labels':['name','bucket']},
{'name':'basicStats.itemCount','id':'basicStats.itemCount','suffix':'percent','labels':['name','bucket']},
{'name':'basicStats.diskUsed','id':'basicStats.diskUsed','suffix':'bytes','labels':['name','bucket']},
{'name':'basicStats.dataUsed','id':'basicStats.dataUsed','suffix':'bytes','labels':['name','bucket']},
{'name':'basicStats.memUsed','id':'basicStats.memUsed','suffix':'bytes','labels':['name','bucket']}
],
'bucket_stats': [
{'name':'avg_bg_wait_time','id':'avg_bg_wait_time','suffix':'seconds','labels':['name','bucket']},
{'name':'avg_disk_commit_time','id':'avg_disk_commit_time','suffix':'seconds','labels':['name','bucket']},
{'name':'avg_disk_update_time','id':'avg_disk_update_time','suffix':'seconds','labels':['name','bucket']},
{'name':'bg_wait_count','id':'bg_wait_count','suffix':'count','labels':['name','bucket']},
{'name':'bg_wait_total','id':'bg_wait_total','suffix':'count','labels':['name','bucket']},
{'name':'bytes_read','id':'bytes_read','suffix':'bytes','labels':['name','bucket']},
{'name':'bytes_written','id':'bytes_written','suffix':'bytes','labels':['name','bucket']},
{'name':'cas_badval','id':'cas_badval','suffix':'count','labels':['name','bucket']},
{'name':'cas_hits','id':'cas_hits','suffix':'count','labels':['name','bucket']},
{'name':'cas_misses','id':'cas_misses','suffix':'count','labels':['name','bucket']},
{'name':'cmd_get','id':'cmd_get','suffix':'count','labels':['name','bucket']},
{'name':'cmd_set','id':'cmd_set','suffix':'count','labels':['name','bucket']},
{'name':'couch_docs_data_size','id':'couch_docs_data_size','suffix':'bytes','labels':['name','bucket']},
{'name':'couch_docs_disk_size','id':'couch_docs_disk_size','suffix':'bytes','labels':['name','bucket']},
{'name':'cpu_idle_ms','id':'cpu_idle_ms','suffix':'milliseconds','labels':['name','bucket']},
{'name':'cpu_local_ms','id':'cpu_local_ms','suffix':'milliseconds','labels':['name','bucket']},
{'name':'cpu_utilization_rate','id':'cpu_utilization_rate','suffix':'percent','labels':['name','bucket']},
{'name':'curr_connections','id':'curr_connections','suffix':'count','labels':['name','bucket']},
{'name':'curr_items','id':'curr_items','suffix':'count','labels':['name','bucket']},
{'name':'curr_items_tot','id':'curr_items_tot','suffix':'count','labels':['name','bucket']},
{'name':'decr_hits','id':'decr_hits','suffix':'count','labels':['name','bucket']},
{'name':'decr_misses','id':'decr_misses','suffix':'count','labels':['name','bucket']},
{'name':'delete_hits','id':'delete_hits','suffix':'count','labels':['name','bucket']},
{'name':'delete_misses','id':'delete_misses','suffix':'count','labels':['name','bucket']},
{'name':'disk_commit_count','id':'disk_commit_count','suffix':'count','labels':['name','bucket']},
{'name':'disk_commit_total','id':'disk_commit_total','suffix':'count','labels':['name','bucket']},
{'name':'disk_update_count','id':'disk_update_count','suffix':'count','labels':['name','bucket']},
{'name':'disk_update_total','id':'disk_update_total','suffix':'count','labels':['name','bucket']},
{'name':'disk_write_queue','id':'disk_write_queue','suffix':'count','labels':['name','bucket']},
{'name':'ep_bg_fetched','id':'ep_bg_fetched','suffix':'fetches/second','labels':['name','bucket']},
{'name':'ep_cache_miss_rate','id':'ep_cache_miss_rate','suffix':'percent','labels':['name','bucket']},
{'name':'ep_dcp_2i_backoff','id':'ep_dcp_2i_backoff','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_2i_count','id':'ep_dcp_2i_count','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_2i_items_remaining','id':'ep_dcp_2i_items_remaining','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_2i_items_sent','id':'ep_dcp_2i_items_sent','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_2i_producer_count','id':'ep_dcp_2i_producer_count','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_2i_total_backlog_size','id':'ep_dcp_2i_total_backlog_size','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_2i_total_bytes','id':'ep_dcp_2i_total_bytes','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_other_backoff','id':'ep_dcp_other_backoff','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_other_count','id':'ep_dcp_other_count','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_other_items_remaining','id':'ep_dcp_other_items_remaining','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_other_items_sent','id':'ep_dcp_other_items_sent','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_other_producer_count','id':'ep_dcp_other_producer_count','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_other_total_backlog_size','id':'ep_dcp_other_total_backlog_size','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_other_total_bytes','id':'ep_dcp_other_total_bytes','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_replica_backoff','id':'ep_dcp_replica_backoff','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_replica_count','id':'ep_dcp_replica_count','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_replica_items_remaining','id':'ep_dcp_replica_items_remaining','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_replica_items_sent','id':'ep_dcp_replica_items_sent','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_replica_producer_count','id':'ep_dcp_replica_producer_count','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_replica_total_backlog_size','id':'ep_dcp_replica_total_backlog_size','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_replica_total_bytes','id':'ep_dcp_replica_total_bytes','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_views_backoff','id':'ep_dcp_views_backoff','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_views_count','id':'ep_dcp_views_count','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_views_items_remaining','id':'ep_dcp_views_items_remaining','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_views_items_sent','id':'ep_dcp_views_items_sent','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_views_producer_count','id':'ep_dcp_views_producer_count','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_views_total_backlog_size','id':'ep_dcp_views_total_backlog_size','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_views_total_bytes','id':'ep_dcp_views_total_bytes','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_xdcr_backoff','id':'ep_dcp_xdcr_backoff','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_xdcr_count','id':'ep_dcp_xdcr_count','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_xdcr_items_remaining','id':'ep_dcp_xdcr_items_remaining','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_xdcr_items_sent','id':'ep_dcp_xdcr_items_sent','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_xdcr_producer_count','id':'ep_dcp_xdcr_producer_count','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_xdcr_total_backlog_size','id':'ep_dcp_xdcr_total_backlog_size','suffix':'NA','labels':['name','bucket']},
{'name':'ep_dcp_xdcr_total_bytes','id':'ep_dcp_xdcr_total_bytes','suffix':'NA','labels':['name','bucket']},
{'name':'ep_diskqueue_drain','id':'ep_diskqueue_drain','suffix':'count','labels':['name','bucket']},
{'name':'ep_diskqueue_fill','id':'ep_diskqueue_fill','suffix':'count','labels':['name','bucket']},
{'name':'ep_diskqueue_items','id':'ep_diskqueue_items','suffix':'NA','labels':['name','bucket']},
{'name':'ep_flusher_todo','id':'ep_flusher_todo','suffix':'count','labels':['name','bucket']},
{'name':'ep_item_commit_failed','id':'ep_item_commit_failed','suffix':'count','labels':['name','bucket']},
{'name':'ep_kv_size','id':'ep_kv_size','suffix':'NA','labels':['name','bucket']},
{'name':'ep_max_size','id':'ep_max_size','suffix':'bytes','labels':['name','bucket']},
{'name':'ep_mem_high_wat','id':'ep_mem_high_wat','suffix':'bytes','labels':['name','bucket']},
{'name':'ep_mem_low_wat','id':'ep_mem_low_wat','suffix':'bytes','labels':['name','bucket']},
{'name':'ep_meta_data_memory','id':'ep_meta_data_memory','suffix':'NA','labels':['name','bucket']},
{'name':'ep_num_non_resident','id':'ep_num_non_resident','suffix':'count','labels':['name','bucket']},
{'name':'ep_num_ops_del_meta','id':'ep_num_ops_del_meta','suffix':'NA','labels':['name','bucket']},
{'name':'ep_num_ops_del_ret_meta','id':'ep_num_ops_del_ret_meta','suffix':'NA','labels':['name','bucket']},
{'name':'ep_num_ops_get_meta','id':'ep_num_ops_get_meta','suffix':'NA','labels':['name','bucket']},
{'name':'ep_num_ops_set_meta','id':'ep_num_ops_set_meta','suffix':'NA','labels':['name','bucket']},
{'name':'ep_num_ops_set_ret_meta','id':'ep_num_ops_set_ret_meta','suffix':'NA','labels':['name','bucket']},
{'name':'ep_num_value_ejects','id':'ep_num_value_ejects','suffix':'count','labels':['name','bucket']},
{'name':'ep_oom_errors','id':'ep_oom_errors','suffix':'count','labels':['name','bucket']},
{'name':'ep_ops_create','id':'ep_ops_create','suffix':'count','labels':['name','bucket']},
{'name':'ep_ops_update','id':'ep_ops_update','suffix':'count','labels':['name','bucket']},
{'name':'ep_overhead','id':'ep_overhead','suffix':'bytes','labels':['name','bucket']},
{'name':'ep_queue_size','id':'ep_queue_size','suffix':'count','labels':['name','bucket']},
{'name':'ep_resident_items_rate','id':'ep_resident_items_rate','suffix':'count','labels':['name','bucket']},
{'name':'ep_tap_rebalance_count','id':'ep_tap_rebalance_count','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_rebalance_qlen','id':'ep_tap_rebalance_qlen','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_rebalance_queue_backfillremaining','id':'ep_tap_rebalance_queue_backfillremaining','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_rebalance_queue_backoff','id':'ep_tap_rebalance_queue_backoff','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_rebalance_queue_drain','id':'ep_tap_rebalance_queue_drain','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_rebalance_queue_fill','id':'ep_tap_rebalance_queue_fill','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_rebalance_queue_itemondisk','id':'ep_tap_rebalance_queue_itemondisk','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_rebalance_total_backlog_size','id':'ep_tap_rebalance_total_backlog_size','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_replica_count','id':'ep_tap_replica_count','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_replica_qlen','id':'ep_tap_replica_qlen','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_replica_queue_backfillremaining','id':'ep_tap_replica_queue_backfillremaining','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_replica_queue_backoff','id':'ep_tap_replica_queue_backoff','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_replica_queue_drain','id':'ep_tap_replica_queue_drain','suffix':'count','labels':['name','bucket']},
{'name':'ep_tap_replica_queue_fill','id':'ep_tap_replica_queue_fill','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_replica_queue_itemondisk','id':'ep_tap_replica_queue_itemondisk','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_replica_total_backlog_size','id':'ep_tap_replica_total_backlog_size','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_total_count','id':'ep_tap_total_count','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_total_qlen','id':'ep_tap_total_qlen','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_total_queue_backfillremaining','id':'ep_tap_total_queue_backfillremaining','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_total_queue_backoff','id':'ep_tap_total_queue_backoff','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_total_queue_drain','id':'ep_tap_total_queue_drain','suffix':'count','labels':['name','bucket']},
{'name':'ep_tap_total_queue_fill','id':'ep_tap_total_queue_fill','suffix':'count','labels':['name','bucket']},
{'name':'ep_tap_total_queue_itemondisk','id':'ep_tap_total_queue_itemondisk','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_total_total_backlog_size','id':'ep_tap_total_total_backlog_size','suffix':'count','labels':['name','bucket']},
{'name':'ep_tap_user_count','id':'ep_tap_user_count','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_user_qlen','id':'ep_tap_user_qlen','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_user_queue_backfillremaining','id':'ep_tap_user_queue_backfillremaining','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_user_queue_backoff','id':'ep_tap_user_queue_backoff','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_user_queue_drain','id':'ep_tap_user_queue_drain','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_user_queue_fill','id':'ep_tap_user_queue_fill','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_user_queue_itemondisk','id':'ep_tap_user_queue_itemondisk','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tap_user_total_backlog_size','id':'ep_tap_user_total_backlog_size','suffix':'NA','labels':['name','bucket']},
{'name':'ep_tmp_oom_errors','id':'ep_tmp_oom_errors','suffix':'count','labels':['name','bucket']},
{'name':'ep_vb_total','id':'ep_vb_total','suffix':'NA','labels':['name','bucket']},
{'name':'evictions','id':'evictions','suffix':'count','labels':['name','bucket']},
{'name':'get_hits','id':'get_hits','suffix':'count','labels':['name','bucket']},
{'name':'get_misses','id':'get_misses','suffix':'count','labels':['name','bucket']},
{'name':'hibernated_requests','id':'hibernated_requests','suffix':'NA','labels':['name','bucket']},
{'name':'hibernated_waked','id':'hibernated_waked','suffix':'NA','labels':['name','bucket']},
{'name':'hit_ratio','id':'hit_ratio','suffix':'percent','labels':['name','bucket']},
{'name':'incr_hits','id':'incr_hits','suffix':'count','labels':['name','bucket']},
{'name':'incr_misses','id':'incr_misses','suffix':'count','labels':['name','bucket']},
{'name':'mem_actual_free','id':'mem_actual_free','suffix':'NA','labels':['name','bucket']},
{'name':'mem_actual_used','id':'mem_actual_used','suffix':'NA','labels':['name','bucket']},
{'name':'mem_free','id':'mem_free','suffix':'bytes','labels':['name','bucket']},
{'name':'mem_total','id':'mem_total','suffix':'bytes','labels':['name','bucket']},
{'name':'mem_used','id':'mem_used','suffix':'bytes','labels':['name','bucket']},
{'name':'mem_used_sys','id':'mem_used_sys','suffix':'bytes','labels':['name','bucket']},
{'name':'misses','id':'misses','suffix':'count','labels':['name','bucket']},
{'name':'ops','id':'ops','suffix':'count','labels':['name','bucket']},
{'name':'rest_requests','id':'rest_requests','suffix':'NA','labels':['name','bucket']},
{'name':'swap_total','id':'swap_total','suffix':'NA','labels':['name','bucket']},
{'name':'swap_used','id':'swap_used','suffix':'NA','labels':['name','bucket']},
{'name':'timestamp','id':'timestamp','suffix':'NA','labels':['name','bucket']},
{'name':'vb_active_eject','id':'vb_active_eject','suffix':'NA','labels':['name','bucket']},
{'name':'vb_active_itm_memory','id':'vb_active_itm_memory','suffix':'NA','labels':['name','bucket']},
{'name':'vb_active_meta_data_memory','id':'vb_active_meta_data_memory','suffix':'NA','labels':['name','bucket']},
{'name':'vb_active_num','id':'vb_active_num','suffix':'count','labels':['name','bucket']},
{'name':'vb_active_num_non_resident','id':'vb_active_num_non_resident','suffix':'NA','labels':['name','bucket']},
{'name':'vb_active_ops_create','id':'vb_active_ops_create','suffix':'NA','labels':['name','bucket']},
{'name':'vb_active_ops_update','id':'vb_active_ops_update','suffix':'NA','labels':['name','bucket']},
{'name':'vb_active_queue_age','id':'vb_active_queue_age','suffix':'NA','labels':['name','bucket']},
{'name':'vb_active_queue_drain','id':'vb_active_queue_drain','suffix':'count','labels':['name','bucket']},
{'name':'vb_active_queue_fill','id':'vb_active_queue_fill','suffix':'NA','labels':['name','bucket']},
{'name':'vb_active_queue_size','id':'vb_active_queue_size','suffix':'count','labels':['name','bucket']},
{'name':'vb_active_resident_items_ratio','id':'vb_active_resident_items_ratio','suffix':'count','labels':['name','bucket']},
{'name':'vb_avg_active_queue_age','id':'vb_avg_active_queue_age','suffix':'NA','labels':['name','bucket']},
{'name':'vb_avg_pending_queue_age','id':'vb_avg_pending_queue_age','suffix':'NA','labels':['name','bucket']},
{'name':'vb_avg_replica_queue_age','id':'vb_avg_replica_queue_age','suffix':'NA','labels':['name','bucket']},
{'name':'vb_avg_total_queue_age','id':'vb_avg_total_queue_age','suffix':'seconds','labels':['name','bucket']},
{'name':'vb_pending_curr_items','id':'vb_pending_curr_items','suffix':'NA','labels':['name','bucket']},
{'name':'vb_pending_eject','id':'vb_pending_eject','suffix':'NA','labels':['name','bucket']},
{'name':'vb_pending_itm_memory','id':'vb_pending_itm_memory','suffix':'NA','labels':['name','bucket']},
{'name':'vb_pending_meta_data_memory','id':'vb_pending_meta_data_memory','suffix':'NA','labels':['name','bucket']},
{'name':'vb_pending_num','id':'vb_pending_num','suffix':'NA','labels':['name','bucket']},
{'name':'vb_pending_num_non_resident','id':'vb_pending_num_non_resident','suffix':'NA','labels':['name','bucket']},
{'name':'vb_pending_ops_create','id':'vb_pending_ops_create','suffix':'count','labels':['name','bucket']},
{'name':'vb_pending_ops_update','id':'vb_pending_ops_update','suffix':'NA','labels':['name','bucket']},
{'name':'vb_pending_queue_age','id':'vb_pending_queue_age','suffix':'NA','labels':['name','bucket']},
{'name':'vb_pending_queue_drain','id':'vb_pending_queue_drain','suffix':'NA','labels':['name','bucket']},
{'name':'vb_pending_queue_fill','id':'vb_pending_queue_fill','suffix':'count','labels':['name','bucket']},
{'name':'vb_pending_queue_size','id':'vb_pending_queue_size','suffix':'NA','labels':['name','bucket']},
{'name':'vb_pending_resident_items_ratio','id':'vb_pending_resident_items_ratio','suffix':'count','labels':['name','bucket']},
{'name':'vb_replica_curr_items','id':'vb_replica_curr_items','suffix':'count','labels':['name','bucket']},
{'name':'vb_replica_eject','id':'vb_replica_eject','suffix':'NA','labels':['name','bucket']},
{'name':'vb_replica_itm_memory','id':'vb_replica_itm_memory','suffix':'NA','labels':['name','bucket']},
{'name':'vb_replica_meta_data_memory','id':'vb_replica_meta_data_memory','suffix':'bytes','labels':['name','bucket']},
{'name':'vb_replica_num','id':'vb_replica_num','suffix':'bytes','labels':['name','bucket']},
{'name':'vb_replica_num_non_resident','id':'vb_replica_num_non_resident','suffix':'bytes','labels':['name','bucket']},
{'name':'vb_replica_ops_create','id':'vb_replica_ops_create','suffix':'NA','labels':['name','bucket']},
{'name':'vb_replica_ops_update','id':'vb_replica_ops_update','suffix':'NA','labels':['name','bucket']},
{'name':'vb_replica_queue_age','id':'vb_replica_queue_age','suffix':'NA','labels':['name','bucket']},
{'name':'vb_replica_queue_drain','id':'vb_replica_queue_drain','suffix':'NA','labels':['name','bucket']},
{'name':'vb_replica_queue_fill','id':'vb_replica_queue_fill','suffix':'NA','labels':['name','bucket']},
{'name':'vb_replica_queue_size','id':'vb_replica_queue_size','suffix':'count','labels':['name','bucket']},
{'name':'vb_replica_resident_items_ratio','id':'vb_replica_resident_items_ratio','suffix':'count','labels':['name','bucket']},
{'name':'vb_total_queue_age','id':'vb_total_queue_age','suffix':'NA','labels':['name','bucket']},
{'name':'xdc_ops','id':'xdc_ops','suffix':'count','labels':['name','bucket']}
]
}
}
"""
Return available metrics
:rtype: dictionary
"""
def get_metrics():
return couchbase_metrics
| 106.707692
| 168
| 0.620819
| 3,384
| 27,744
| 4.745863
| 0.04669
| 0.144458
| 0.193275
| 0.239103
| 0.805853
| 0.702677
| 0.645704
| 0.480075
| 0.372105
| 0.248443
| 0
| 0.00057
| 0.114872
| 27,744
| 259
| 169
| 107.119691
| 0.653419
| 0.000793
| 0
| 0.011858
| 0
| 0
| 0.623405
| 0.236398
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003953
| false
| 0
| 0
| 0.003953
| 0.007905
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
79e0d9668e594b73835c74ef110cce2be52447f0
| 31
|
py
|
Python
|
test2.py
|
TK5QK/cs3240-labdemo
|
3b41990e21c9c03ba3026a5fd6e2d86e04b8ac01
|
[
"MIT"
] | null | null | null |
test2.py
|
TK5QK/cs3240-labdemo
|
3b41990e21c9c03ba3026a5fd6e2d86e04b8ac01
|
[
"MIT"
] | null | null | null |
test2.py
|
TK5QK/cs3240-labdemo
|
3b41990e21c9c03ba3026a5fd6e2d86e04b8ac01
|
[
"MIT"
] | null | null | null |
print("This is a second test")
| 15.5
| 30
| 0.709677
| 6
| 31
| 3.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 31
| 1
| 31
| 31
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0.677419
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
03112a5403db3eea8424625ce545e846bd29c314
| 24
|
py
|
Python
|
CMSIS/DSP/PythonWrapper/cmsisdsp/__init__.py
|
DavidLesnjak/CMSIS_5
|
e0848410d137758a3356a5ee94ca4501cea708a8
|
[
"Apache-2.0"
] | 2,293
|
2016-02-25T06:47:33.000Z
|
2022-03-29T16:44:02.000Z
|
CMSIS/DSP/PythonWrapper/cmsisdsp/__init__.py
|
DavidLesnjak/CMSIS_5
|
e0848410d137758a3356a5ee94ca4501cea708a8
|
[
"Apache-2.0"
] | 1,125
|
2016-02-27T09:56:01.000Z
|
2022-03-31T13:57:05.000Z
|
CMSIS/DSP/PythonWrapper/cmsisdsp/__init__.py
|
DavidLesnjak/CMSIS_5
|
e0848410d137758a3356a5ee94ca4501cea708a8
|
[
"Apache-2.0"
] | 1,160
|
2016-02-27T09:06:10.000Z
|
2022-03-31T19:06:24.000Z
|
from internal import *
| 8
| 22
| 0.75
| 3
| 24
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208333
| 24
| 2
| 23
| 12
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
031c23470f490e0d4d988e9eb586168cbcd654ca
| 46
|
py
|
Python
|
napari_properties_plotter/_dock_widget.py
|
brisvag/napari-properties-plotter
|
6d62b9a3b61fdca30c10a61474fd6ae8ad9d617a
|
[
"BSD-3-Clause"
] | 8
|
2021-05-21T20:58:09.000Z
|
2021-12-21T14:36:04.000Z
|
napari_properties_plotter/_dock_widget.py
|
brisvag/napari-properties-plotter
|
6d62b9a3b61fdca30c10a61474fd6ae8ad9d617a
|
[
"BSD-3-Clause"
] | 8
|
2021-06-02T07:26:06.000Z
|
2021-12-21T12:45:11.000Z
|
napari_properties_plotter/_dock_widget.py
|
brisvag/napari-properties-plotter
|
6d62b9a3b61fdca30c10a61474fd6ae8ad9d617a
|
[
"BSD-3-Clause"
] | 2
|
2021-07-07T09:06:26.000Z
|
2021-12-21T01:37:43.000Z
|
from .property_plotter import PropertyPlotter
| 23
| 45
| 0.891304
| 5
| 46
| 8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 46
| 1
| 46
| 46
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
032351e89949b13d0d7e7e0b11e77e54570f3c23
| 34
|
py
|
Python
|
cotk/scripts/__init__.py
|
Cospui/cotk
|
9038420787f7251049534baf3b35eac538a82148
|
[
"Apache-2.0"
] | null | null | null |
cotk/scripts/__init__.py
|
Cospui/cotk
|
9038420787f7251049534baf3b35eac538a82148
|
[
"Apache-2.0"
] | null | null | null |
cotk/scripts/__init__.py
|
Cospui/cotk
|
9038420787f7251049534baf3b35eac538a82148
|
[
"Apache-2.0"
] | null | null | null |
from .report import main as report
| 34
| 34
| 0.823529
| 6
| 34
| 4.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147059
| 34
| 1
| 34
| 34
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ceed23eb19da9a54f88062041421bd749d870eee
| 28
|
py
|
Python
|
Libs/TeraStatus/__init__.py
|
Haato3o/Heeto-Bot
|
50f5fe4468cabd9ac61097cfdeb77411a0e61d16
|
[
"MIT"
] | 1
|
2019-09-09T23:52:53.000Z
|
2019-09-09T23:52:53.000Z
|
Libs/TeraStatus/__init__.py
|
Haato3o/Heeto-Bot
|
50f5fe4468cabd9ac61097cfdeb77411a0e61d16
|
[
"MIT"
] | 4
|
2020-01-03T06:40:18.000Z
|
2020-01-09T05:35:37.000Z
|
Libs/TeraStatus/__init__.py
|
Haato3o/Heeto-Bot
|
50f5fe4468cabd9ac61097cfdeb77411a0e61d16
|
[
"MIT"
] | null | null | null |
from .Servers import Servers
| 28
| 28
| 0.857143
| 4
| 28
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 28
| 1
| 28
| 28
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3045170f69d34e90050241e6da389decf4835f2c
| 46
|
py
|
Python
|
haloPropertyAnalysis/__init__.py
|
rbiswas4/HaloComparisons
|
33252412edfec78aad5564c92f7d4a67753da69a
|
[
"MIT"
] | null | null | null |
haloPropertyAnalysis/__init__.py
|
rbiswas4/HaloComparisons
|
33252412edfec78aad5564c92f7d4a67753da69a
|
[
"MIT"
] | null | null | null |
haloPropertyAnalysis/__init__.py
|
rbiswas4/HaloComparisons
|
33252412edfec78aad5564c92f7d4a67753da69a
|
[
"MIT"
] | null | null | null |
from . import comparisons
from . import utils
| 15.333333
| 25
| 0.782609
| 6
| 46
| 6
| 0.666667
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 46
| 2
| 26
| 23
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
304a0fe9ef887ce64fad41a52197c5d753095c3c
| 34
|
py
|
Python
|
pyqt_timer_label/__init__.py
|
yjg30737/pyqt-timer-label
|
33c72e010f2af8833d4a22d85738d0947075c913
|
[
"MIT"
] | null | null | null |
pyqt_timer_label/__init__.py
|
yjg30737/pyqt-timer-label
|
33c72e010f2af8833d4a22d85738d0947075c913
|
[
"MIT"
] | null | null | null |
pyqt_timer_label/__init__.py
|
yjg30737/pyqt-timer-label
|
33c72e010f2af8833d4a22d85738d0947075c913
|
[
"MIT"
] | null | null | null |
from .timerLabel import TimerLabel
| 34
| 34
| 0.882353
| 4
| 34
| 7.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 34
| 1
| 34
| 34
| 0.967742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
305f65785718284ad93fc4c86b08d51377a27cec
| 42
|
py
|
Python
|
app/pyfatoora/__init__.py
|
NafieAlhilaly/api-fatoora
|
da9e0026943d9ed84b7954454e45ee38db76bc74
|
[
"MIT"
] | 13
|
2021-11-02T13:43:36.000Z
|
2022-01-12T13:02:10.000Z
|
app/pyfatoora/__init__.py
|
SudanAlhilali/api-fatoora
|
49ac5dc7be851d778b38998d3ea745df47aca0cf
|
[
"MIT"
] | 1
|
2022-01-07T20:23:52.000Z
|
2022-01-07T20:28:01.000Z
|
app/pyfatoora/__init__.py
|
SudanAlhilali/api-fatoora
|
49ac5dc7be851d778b38998d3ea745df47aca0cf
|
[
"MIT"
] | 10
|
2021-11-03T15:24:43.000Z
|
2021-12-27T16:36:08.000Z
|
from pyfatoora.pyfatoora import PyFatoora
| 21
| 41
| 0.880952
| 5
| 42
| 7.4
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 42
| 1
| 42
| 42
| 0.973684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
305ffe6bcbfa69dcc84690f4812251ef2de0bfac
| 20,792
|
py
|
Python
|
networking_cisco/tests/unit/cisco/cfg_agent/test_routing_svc_helper_aci.py
|
mail2nsrajesh/networking-cisco
|
e072b149d26e0a9dd9d68755c874dfc047dc9c91
|
[
"Apache-2.0"
] | null | null | null |
networking_cisco/tests/unit/cisco/cfg_agent/test_routing_svc_helper_aci.py
|
mail2nsrajesh/networking-cisco
|
e072b149d26e0a9dd9d68755c874dfc047dc9c91
|
[
"Apache-2.0"
] | null | null | null |
networking_cisco/tests/unit/cisco/cfg_agent/test_routing_svc_helper_aci.py
|
mail2nsrajesh/networking-cisco
|
e072b149d26e0a9dd9d68755c874dfc047dc9c91
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import mock
from oslo_config import cfg
from oslo_utils import uuidutils
from neutron.tests import base
from networking_cisco import backwards_compatibility as bc
from networking_cisco.plugins.cisco.cfg_agent import cfg_agent
from networking_cisco.plugins.cisco.cfg_agent.service_helpers import (
routing_svc_helper as svc_helper)
from networking_cisco.plugins.cisco.cfg_agent.service_helpers import (
routing_svc_helper_aci as aci_svc_helper)
from networking_cisco.tests.unit.cisco.cfg_agent import (
test_routing_svc_helper as helper)
_uuid = uuidutils.generate_uuid
TEST_PHYS_IF = 'GigabitEthernet0/1/0'
TEST_VLAN = '3333'
TEST_GW_IP = '4.3.2.1'
TEST_CIDR = '4.3.2.0/24'
TEST_NET1 = 'mynewnet'
TEST_VLAN2 = '4444'
TEST_GW_IP2 = '5.4.3.1'
TEST_CIDR2 = '5.4.3.0/24'
TEST_NET2 = 'myothernet'
def create_hosting_info(vrf=None, net_name=TEST_NET1,
vlan=TEST_VLAN, gw_ip=TEST_GW_IP,
cidr=TEST_CIDR, if_config=None,
global_config=None, snat_subnets=None):
if vrf is None:
vrf = _uuid()
hosting_info = {
'vrf_id': vrf,
'physical_interface': TEST_PHYS_IF,
'network_name': net_name,
'segmentation_id': vlan,
'segmentation_id': vlan,
'gateway_ip': gw_ip,
'cidr_exposed': cidr,
}
if if_config:
hosting_info['interface_config'] = if_config
if global_config:
hosting_info['global_config'] = global_config
if snat_subnets:
hosting_info['snat_subnets'] = snat_subnets
return hosting_info
class TestBasicRoutingOperationsAci(helper.TestBasicRoutingOperations):
def setUp(self):
super(TestBasicRoutingOperationsAci, self).setUp()
self.routing_helper = aci_svc_helper.RoutingServiceHelperAci(
helper.HOST, self.conf, self.agent)
self.routing_helper._internal_network_added = mock.Mock()
self.routing_helper._external_gateway_added = mock.Mock()
self.routing_helper._internal_network_removed = mock.Mock()
self.routing_helper._external_gateway_removed = mock.Mock()
self.routing_helper._enable_router_interface = mock.Mock()
self.routing_helper._disable_router_interface = mock.Mock()
self.driver = self._mock_driver_and_hosting_device(
self.routing_helper)
def test_process_router_throw_multiple_ipv4_subnets_error(self):
ri, router = (
self._test_process_router_throw_multiple_ipv4_subnets_error())
router['gw_port'] = {'id': ''}
self.assertRaises(svc_helper.MultipleIPv4SubnetsException,
self.routing_helper._process_router, ri)
def test_process_router(self):
super(TestBasicRoutingOperationsAci,
self).test_process_router(test_admin_state=False)
def test_process_msn_router(self):
self._test_process_msn_router(test_admin_state=False)
def test_process_router_2_rids_1_vrf(self):
driver = self._mock_driver_and_hosting_device(self.routing_helper)
router1, ports = helper.prepare_router_data()
ri1 = svc_helper.RouterInfo(router1['id'], router=router1)
# Router #2 is like #1, except with some different IDs
router2 = copy.deepcopy(router1)
router2['id'] = _uuid()
ri2 = svc_helper.RouterInfo(router2['id'], router=router2)
h_info1 = create_hosting_info()
h_info2 = copy.deepcopy(h_info1)
ri1.router['hosting_info'] = h_info1
ri2.router['hosting_info'] = h_info2
driver._get_vrf_name = mock.Mock(
return_value=ri1.router['hosting_info']['vrf_id'])
self.routing_helper._process_router(ri1)
vrf = ri1.router['hosting_info']['vrf_id']
driver._get_vrf_name.assert_called_with(ri1)
driver._do_create_vrf.assert_called_with(vrf)
self.assertEqual(1, len(self.routing_helper._router_ids_by_vrf))
self.assertEqual(1, len(self.routing_helper._router_ids_by_vrf[vrf]))
driver._get_vrf_name.reset_mock()
driver._do_create_vrf.reset_mock()
self.routing_helper._process_router(ri2)
driver._get_vrf_name.assert_called_with(ri2)
driver._do_create_vrf.assert_not_called()
self.assertEqual(1, len(self.routing_helper._router_ids_by_vrf))
self.assertEqual(2, len(self.routing_helper._router_ids_by_vrf[vrf]))
del ri1.router['gw_port']
driver._get_vrf_name.reset_mock()
self.routing_helper._process_router(ri1)
driver._get_vrf_name.assert_called_with(ri1)
driver._remove_vrf.assert_not_called()
self.assertEqual(1, len(self.routing_helper._router_ids_by_vrf))
self.assertEqual(1, len(self.routing_helper._router_ids_by_vrf[vrf]))
del ri2.router['gw_port']
driver._get_vrf_name.reset_mock()
driver._remove_vrf.reset_mock()
self.routing_helper._process_router(ri2)
driver._get_vrf_name.assert_called_with(ri2)
driver._remove_vrf.assert_called_with(ri2)
self.assertEqual({}, self.routing_helper._router_ids_by_vrf)
def _mock_driver_and_hosting_device(svc_helper):
svc_helper._dev_status.is_hosting_device_reachable = mock.MagicMock(
return_value=True)
driver = mock.MagicMock()
svc_helper._drivermgr.get_driver = mock.Mock(return_value=driver)
svc_helper._drivermgr.set_driver = mock.Mock(return_value=driver)
return driver
class TestNetworkRoutingOperationsAci(base.BaseTestCase):
def setUp(self):
super(TestNetworkRoutingOperationsAci, self).setUp()
self.agent = mock.Mock()
self.conf = cfg.ConfigOpts()
self.conf.register_opts(bc.core_opts)
self.conf.register_opts(cfg_agent.OPTS, "cfg_agent")
self.l3pluginApi_cls_p = mock.patch(
'networking_cisco.plugins.cisco.cfg_agent.service_helpers.'
'routing_svc_helper.CiscoRoutingPluginApi')
l3plugin_api_cls = self.l3pluginApi_cls_p.start()
self.plugin_api = mock.Mock()
l3plugin_api_cls.return_value = self.plugin_api
self.plugin_api.get_routers = mock.MagicMock()
self.looping_call_p = mock.patch(
'oslo_service.loopingcall.FixedIntervalLoopingCall')
self.looping_call_p.start()
mock.patch('neutron.common.rpc.create_connection').start()
self.routing_helper = aci_svc_helper.RoutingServiceHelperAci(
helper.HOST, self.conf, self.agent)
self.routing_helper._external_gateway_added = mock.Mock()
self.routing_helper._external_gateway_removed = mock.Mock()
def _set_driver_port_mocks(self, driver):
driver.internal_network_added = mock.Mock()
driver.internal_network_removed = mock.Mock()
driver.enable_internal_network_NAT = mock.Mock()
driver.disable_internal_network_NAT = mock.Mock()
def test_process_router_2_rids_1_vrf_1_network(self):
driver = _mock_driver_and_hosting_device(self.routing_helper)
self._set_driver_port_mocks(driver)
router1, ports = helper.prepare_router_data()
ri1 = svc_helper.RouterInfo(router1['id'], router=router1)
# Router #2 is like #1, except with some different IDs
router2 = copy.deepcopy(router1)
router2['id'] = _uuid()
ri2 = svc_helper.RouterInfo(router2['id'], router=router2)
h_info1 = create_hosting_info()
h_info2 = copy.deepcopy(h_info1)
ri1.router['hosting_info'] = h_info1
ri2.router['hosting_info'] = h_info2
ex_gw_port1 = ri1.router.get('gw_port')
ex_gw_port2 = ri2.router.get('gw_port')
ex_gw_port1['hosting_info'] = h_info1
ex_gw_port2['hosting_info'] = h_info2
vrf = ri1.router['hosting_info']['vrf_id']
driver._get_vrf_name = mock.Mock(return_value=vrf)
self.routing_helper._process_router(ri1)
driver.internal_network_added.assert_called_with(
ri1, ports[0])
driver.enable_internal_network_NAT.assert_called_with(
ri1, ports[0], ex_gw_port1)
v_n_r_dict = self.routing_helper._router_ids_by_vrf_and_ext_net
network_name = h_info1['network_name']
self.assertEqual(1, len(v_n_r_dict))
self.assertEqual(1, len(v_n_r_dict[vrf]))
self.assertEqual(1, len(v_n_r_dict[vrf][network_name]))
driver.internal_network_added.reset_mock()
driver.enable_internal_network_NAT.reset_mock()
self.routing_helper._process_router(ri2)
driver.internal_network_added.assert_called_with(
ri2, ports[0])
driver.enable_internal_network_NAT.assert_called_with(
ri2, ports[0], ex_gw_port2)
network_name = h_info1['network_name']
self.assertEqual(1, len(v_n_r_dict))
self.assertEqual(1, len(v_n_r_dict[vrf]))
self.assertEqual(2, len(v_n_r_dict[vrf][network_name]))
del ri1.router[bc.constants.INTERFACE_KEY]
self.routing_helper._process_router(ri1)
driver.internal_network_removed.assert_called_with(
ri1, ports[0], itfc_deleted=False)
driver.disable_internal_network_NAT.assert_called_with(
ri1, ports[0], ex_gw_port1, itfc_deleted=False)
self.assertEqual(1, len(v_n_r_dict))
self.assertEqual(1, len(v_n_r_dict[vrf]))
self.assertEqual(1, len(v_n_r_dict[vrf][network_name]))
driver.internal_network_removed.reset_mock()
driver.disable_internal_network_NAT.reset_mock()
del ri2.router[bc.constants.INTERFACE_KEY]
self.routing_helper._process_router(ri2)
driver.internal_network_removed.assert_called_with(
ri2, ports[0], itfc_deleted=True)
driver.disable_internal_network_NAT.assert_called_with(
ri2, ports[0], ex_gw_port2, itfc_deleted=True)
self.assertEqual({}, v_n_r_dict)
def test_process_router_2_rids_2_vrfs_1_network(self):
driver = _mock_driver_and_hosting_device(self.routing_helper)
self._set_driver_port_mocks(driver)
router1, ports = helper.prepare_router_data()
ri1 = svc_helper.RouterInfo(router1['id'], router=router1)
# Router #2 is like #1, except with some different IDs
router2 = copy.deepcopy(router1)
router2['id'] = _uuid()
ri2 = svc_helper.RouterInfo(router2['id'], router=router2)
h_info1 = create_hosting_info()
h_info2 = copy.deepcopy(h_info1)
h_info2['vrf_id'] = _uuid()
ri1.router['hosting_info'] = h_info1
ri2.router['hosting_info'] = h_info2
ex_gw_port1 = ri1.router.get('gw_port')
ex_gw_port2 = ri2.router.get('gw_port')
ex_gw_port1['hosting_info'] = h_info1
ex_gw_port2['hosting_info'] = h_info2
vrf1 = ri1.router['hosting_info']['vrf_id']
vrf2 = ri2.router['hosting_info']['vrf_id']
driver._get_vrf_name = mock.Mock(return_value=vrf1)
self.routing_helper._process_router(ri1)
driver.internal_network_added.assert_called_with(
ri1, ports[0])
driver.enable_internal_network_NAT.assert_called_with(
ri1, ports[0], ex_gw_port1)
v_n_r_dict = self.routing_helper._router_ids_by_vrf_and_ext_net
network_name = h_info1['network_name']
self.assertEqual(1, len(v_n_r_dict))
self.assertEqual(1, len(v_n_r_dict[vrf1]))
self.assertEqual(1, len(v_n_r_dict[vrf1][network_name]))
driver.internal_network_added.reset_mock()
driver.enable_internal_network_NAT.reset_mock()
driver._get_vrf_name = mock.Mock(return_value=vrf2)
self.routing_helper._process_router(ri2)
driver.internal_network_added.assert_called_with(
ri2, ports[0])
driver.enable_internal_network_NAT.assert_called_with(
ri2, ports[0], ex_gw_port2)
network_name = h_info1['network_name']
self.assertEqual(2, len(v_n_r_dict))
self.assertEqual(1, len(v_n_r_dict[vrf1]))
self.assertEqual(1, len(v_n_r_dict[vrf2]))
self.assertEqual(1, len(v_n_r_dict[vrf1][network_name]))
self.assertEqual(1, len(v_n_r_dict[vrf2][network_name]))
del ri1.router[bc.constants.INTERFACE_KEY]
driver._get_vrf_name = mock.Mock(return_value=vrf1)
self.routing_helper._process_router(ri1)
driver.internal_network_removed.assert_called_with(
ri1, ports[0], itfc_deleted=True)
driver.disable_internal_network_NAT.assert_called_with(
ri1, ports[0], ex_gw_port1, itfc_deleted=True)
self.assertEqual(1, len(v_n_r_dict))
self.assertFalse(v_n_r_dict.get(vrf1))
self.assertEqual(1, len(v_n_r_dict[vrf2]))
self.assertEqual(1, len(v_n_r_dict[vrf2][network_name]))
driver.internal_network_removed.reset_mock()
driver.disable_internal_network_NAT.reset_mock()
del ri2.router[bc.constants.INTERFACE_KEY]
driver._get_vrf_name = mock.Mock(return_value=vrf2)
self.routing_helper._process_router(ri2)
driver.internal_network_removed.assert_called_with(
ri2, ports[0], itfc_deleted=True)
driver.disable_internal_network_NAT.assert_called_with(
ri2, ports[0], ex_gw_port2, itfc_deleted=True)
self.assertEqual({}, v_n_r_dict)
def test_process_router_2_rids_1_vrf_2_networks(self):
driver = _mock_driver_and_hosting_device(self.routing_helper)
self._set_driver_port_mocks(driver)
router1, ports = helper.prepare_router_data()
ri1 = svc_helper.RouterInfo(router1['id'], router=router1)
# Router #2 is like #1, except with different IDs and host info
router2 = copy.deepcopy(router1)
router2['id'] = _uuid()
ri2 = svc_helper.RouterInfo(router2['id'], router=router2)
h_info1 = create_hosting_info()
h_info2 = create_hosting_info(vrf=h_info1['vrf_id'],
net_name=TEST_NET2, vlan=TEST_VLAN2, gw_ip=TEST_GW_IP2,
cidr=TEST_CIDR2)
ri1.router['hosting_info'] = h_info1
ri2.router['hosting_info'] = h_info2
ex_gw_port1 = ri1.router.get('gw_port')
ex_gw_port2 = ri2.router.get('gw_port')
ex_gw_port1['hosting_info'] = h_info1
ex_gw_port2['hosting_info'] = h_info2
network_name1 = h_info1['network_name']
network_name2 = h_info2['network_name']
vrf = ri1.router['hosting_info']['vrf_id']
driver._get_vrf_name = mock.Mock(return_value=vrf)
self.routing_helper._process_router(ri1)
driver.internal_network_added.assert_called_with(
ri1, ports[0])
driver.enable_internal_network_NAT.assert_called_with(
ri1, ports[0], ex_gw_port1)
v_n_r_dict = self.routing_helper._router_ids_by_vrf_and_ext_net
self.assertEqual(1, len(v_n_r_dict))
self.assertEqual(1, len(v_n_r_dict[vrf]))
self.assertEqual(1, len(v_n_r_dict[vrf][network_name1]))
driver.internal_network_added.reset_mock()
driver.enable_internal_network_NAT.reset_mock()
self.routing_helper._process_router(ri2)
driver.internal_network_added.assert_called_with(
ri2, ports[0])
driver.enable_internal_network_NAT.assert_called_with(
ri2, ports[0], ex_gw_port2)
self.assertEqual(1, len(v_n_r_dict))
self.assertEqual(2, len(v_n_r_dict[vrf]))
self.assertEqual(1, len(v_n_r_dict[vrf][network_name1]))
self.assertEqual(1, len(v_n_r_dict[vrf][network_name2]))
del ri1.router[bc.constants.INTERFACE_KEY]
self.routing_helper._process_router(ri1)
driver.internal_network_removed.assert_called_with(
ri1, ports[0], itfc_deleted=True)
driver.disable_internal_network_NAT.assert_called_with(
ri1, ports[0], ex_gw_port1, itfc_deleted=True)
self.assertEqual(1, len(v_n_r_dict))
self.assertEqual(1, len(v_n_r_dict[vrf]))
self.assertFalse(v_n_r_dict[vrf].get(network_name1))
self.assertEqual(1, len(v_n_r_dict[vrf][network_name2]))
driver.internal_network_removed.reset_mock()
driver.disable_internal_network_NAT.reset_mock()
del ri2.router[bc.constants.INTERFACE_KEY]
self.routing_helper._process_router(ri2)
driver.internal_network_removed.assert_called_with(
ri2, ports[0], itfc_deleted=True)
driver.disable_internal_network_NAT.assert_called_with(
ri2, ports[0], ex_gw_port2, itfc_deleted=True)
self.assertEqual({}, v_n_r_dict)
def test_process_router_2_rids_2_vrfs_2_networks(self):
driver = _mock_driver_and_hosting_device(self.routing_helper)
self._set_driver_port_mocks(driver)
router1, ports = helper.prepare_router_data()
ri1 = svc_helper.RouterInfo(router1['id'], router=router1)
# Router #2 is like #1, except with different IDs and host info
router2 = copy.deepcopy(router1)
router2['id'] = _uuid()
ri2 = svc_helper.RouterInfo(router2['id'], router=router2)
h_info1 = create_hosting_info()
h_info2 = create_hosting_info(net_name=TEST_NET2,
vlan=TEST_VLAN2, gw_ip=TEST_GW_IP2, cidr=TEST_CIDR2)
ri1.router['hosting_info'] = h_info1
ri2.router['hosting_info'] = h_info2
ex_gw_port1 = ri1.router.get('gw_port')
ex_gw_port2 = ri2.router.get('gw_port')
ex_gw_port1['hosting_info'] = h_info1
ex_gw_port2['hosting_info'] = h_info2
vrf1 = ri1.router['hosting_info']['vrf_id']
vrf2 = ri2.router['hosting_info']['vrf_id']
network_name1 = h_info1['network_name']
network_name2 = h_info2['network_name']
driver._get_vrf_name = mock.Mock(return_value=vrf1)
self.routing_helper._process_router(ri1)
driver.internal_network_added.assert_called_with(
ri1, ports[0])
driver.enable_internal_network_NAT.assert_called_with(
ri1, ports[0], ex_gw_port1)
v_n_r_dict = self.routing_helper._router_ids_by_vrf_and_ext_net
self.assertEqual(1, len(v_n_r_dict))
self.assertEqual(1, len(v_n_r_dict[vrf1]))
self.assertEqual(1, len(v_n_r_dict[vrf1][network_name1]))
driver.internal_network_added.reset_mock()
driver.enable_internal_network_NAT.reset_mock()
driver._get_vrf_name = mock.Mock(return_value=vrf2)
self.routing_helper._process_router(ri2)
driver.internal_network_added.assert_called_with(
ri2, ports[0])
driver.enable_internal_network_NAT.assert_called_with(
ri2, ports[0], ex_gw_port2)
self.assertEqual(2, len(v_n_r_dict))
self.assertEqual(1, len(v_n_r_dict[vrf1]))
self.assertEqual(1, len(v_n_r_dict[vrf2]))
self.assertEqual(1, len(v_n_r_dict[vrf1][network_name1]))
self.assertEqual(1, len(v_n_r_dict[vrf2][network_name2]))
del ri1.router[bc.constants.INTERFACE_KEY]
driver._get_vrf_name = mock.Mock(return_value=vrf1)
self.routing_helper._process_router(ri1)
driver.internal_network_removed.assert_called_with(
ri1, ports[0], itfc_deleted=True)
driver.disable_internal_network_NAT.assert_called_with(
ri1, ports[0], ex_gw_port1, itfc_deleted=True)
self.assertEqual(1, len(v_n_r_dict))
self.assertEqual(1, len(v_n_r_dict[vrf2]))
self.assertFalse(v_n_r_dict.get(vrf1))
self.assertEqual(1, len(v_n_r_dict[vrf2][network_name2]))
driver.internal_network_removed.reset_mock()
driver.disable_internal_network_NAT.reset_mock()
del ri2.router[bc.constants.INTERFACE_KEY]
driver._get_vrf_name = mock.Mock(return_value=vrf2)
self.routing_helper._process_router(ri2)
driver.internal_network_removed.assert_called_with(
ri2, ports[0], itfc_deleted=True)
driver.disable_internal_network_NAT.assert_called_with(
ri2, ports[0], ex_gw_port2, itfc_deleted=True)
self.assertEqual({}, v_n_r_dict)
| 42.346232
| 78
| 0.694017
| 2,862
| 20,792
| 4.640112
| 0.084906
| 0.060994
| 0.011747
| 0.02741
| 0.813554
| 0.792395
| 0.775
| 0.764458
| 0.740738
| 0.728238
| 0
| 0.025565
| 0.206089
| 20,792
| 490
| 79
| 42.432653
| 0.778942
| 0.042276
| 0
| 0.69821
| 0
| 0
| 0.049135
| 0.009153
| 0
| 0
| 0
| 0
| 0.245524
| 1
| 0.033248
| false
| 0
| 0.025575
| 0
| 0.069054
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
06232f8c7620f03cf784aa1b5d24ec9984ebba4d
| 25
|
py
|
Python
|
src/playlistsRecomender/gaPlaylistGenerator/__init__.py
|
Drob-AI/music-queue-rec
|
96db3335b448ee6f4b09c58c305c390054cf61d6
|
[
"MIT"
] | null | null | null |
src/playlistsRecomender/gaPlaylistGenerator/__init__.py
|
Drob-AI/music-queue-rec
|
96db3335b448ee6f4b09c58c305c390054cf61d6
|
[
"MIT"
] | null | null | null |
src/playlistsRecomender/gaPlaylistGenerator/__init__.py
|
Drob-AI/music-queue-rec
|
96db3335b448ee6f4b09c58c305c390054cf61d6
|
[
"MIT"
] | null | null | null |
from ga_starters import *
| 25
| 25
| 0.84
| 4
| 25
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 25
| 1
| 25
| 25
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0667bf527b4f2e25104cfb680c9c04f69ce8b75f
| 100
|
py
|
Python
|
kink/errors/service_error.py
|
dosisod/kink
|
3602a0ccca13759c574e8676ce27d5547b4b1173
|
[
"MIT"
] | 95
|
2020-04-11T09:23:04.000Z
|
2022-03-30T06:08:31.000Z
|
kink/errors/service_error.py
|
dosisod/kink
|
3602a0ccca13759c574e8676ce27d5547b4b1173
|
[
"MIT"
] | 14
|
2020-07-09T21:10:34.000Z
|
2022-03-28T07:27:48.000Z
|
kink/errors/service_error.py
|
dosisod/kink
|
3602a0ccca13759c574e8676ce27d5547b4b1173
|
[
"MIT"
] | 7
|
2021-04-27T07:29:41.000Z
|
2022-02-13T00:10:20.000Z
|
from .conainer_error import ContainerError
class ServiceError(ContainerError, KeyError):
pass
| 16.666667
| 45
| 0.81
| 10
| 100
| 8
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.14
| 100
| 5
| 46
| 20
| 0.930233
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
0679616ba76317aff463f70f8be503b6e7641d56
| 760
|
py
|
Python
|
__init__.py
|
guitarpoet/python-configurator
|
c470c2e4175c51f214fc0314a9324729dcbc3b5c
|
[
"Apache-2.0"
] | null | null | null |
__init__.py
|
guitarpoet/python-configurator
|
c470c2e4175c51f214fc0314a9324729dcbc3b5c
|
[
"Apache-2.0"
] | null | null | null |
__init__.py
|
guitarpoet/python-configurator
|
c470c2e4175c51f214fc0314a9324729dcbc3b5c
|
[
"Apache-2.0"
] | null | null | null |
################################################################################
# #
# Configurator project #
# #
# @author Jack <jack@thinkingcloud.info> #
# @version 0.0.1 #
# @date 2021-06-01 17:40:46 #
# #
################################################################################
from .configuratorpy import *
| 63.333333
| 80
| 0.134211
| 21
| 760
| 4.857143
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058419
| 0.617105
| 760
| 11
| 81
| 69.090909
| 0.292096
| 0.439474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
234e61672d65647babffa7507a3c32f3eccc1c36
| 48
|
py
|
Python
|
TickerAnalysis/__init__.py
|
Andre-Ceschia/TickerAnalysis
|
7ade87729510c2bdebeec3264967fd135e18d8a5
|
[
"MIT"
] | null | null | null |
TickerAnalysis/__init__.py
|
Andre-Ceschia/TickerAnalysis
|
7ade87729510c2bdebeec3264967fd135e18d8a5
|
[
"MIT"
] | null | null | null |
TickerAnalysis/__init__.py
|
Andre-Ceschia/TickerAnalysis
|
7ade87729510c2bdebeec3264967fd135e18d8a5
|
[
"MIT"
] | null | null | null |
from TickerAnalysis.TickerAnalysis import Ticker
| 48
| 48
| 0.916667
| 5
| 48
| 8.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 48
| 1
| 48
| 48
| 0.977778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
00019a5ed4b162b067056d73d3dcabf5a6bc8c6e
| 39
|
py
|
Python
|
src/applications/authentication/forms/__init__.py
|
luisito666/Mt2Web.py-V2
|
86db949be98d824cf827e519958438e77022a42a
|
[
"MIT"
] | 1
|
2020-01-10T01:21:49.000Z
|
2020-01-10T01:21:49.000Z
|
src/applications/authentication/forms/__init__.py
|
luisito666/Mt2Web.py-V2
|
86db949be98d824cf827e519958438e77022a42a
|
[
"MIT"
] | 7
|
2020-06-05T20:00:41.000Z
|
2021-09-22T18:04:37.000Z
|
src/applications/authentication/forms/__init__.py
|
luisito666/Mt2Web.py-V2
|
86db949be98d824cf827e519958438e77022a42a
|
[
"MIT"
] | null | null | null |
from .forms import AccountCreationForm
| 19.5
| 38
| 0.871795
| 4
| 39
| 8.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.971429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
00426a45d108d9efcc9dd77a7cda97292f1db709
| 2,165
|
py
|
Python
|
api/uploader/doc.py
|
StepaTa/vkbottle
|
3b04a5343380cbabe782151e7cb1c1645a9fa9ce
|
[
"MIT"
] | null | null | null |
api/uploader/doc.py
|
StepaTa/vkbottle
|
3b04a5343380cbabe782151e7cb1c1645a9fa9ce
|
[
"MIT"
] | null | null | null |
api/uploader/doc.py
|
StepaTa/vkbottle
|
3b04a5343380cbabe782151e7cb1c1645a9fa9ce
|
[
"MIT"
] | null | null | null |
import typing
from .base import Uploader
from io import BytesIO
class DocUploader(Uploader):
FILE_EXTENSIONS = [
".txt",
".docx",
".mp3",
]
async def upload_doc_to_wall(
self, pathlike: typing.Union[str, BytesIO], group_id: int = None, **params
) -> typing.Union[str, dict]:
server = await self.api.request(
"docs.getWallUploadServer", {"group_id": group_id} if group_id else {}
)
uploader = await self.upload(
server, {"file": self.open_pathlike(pathlike)}, params
)
params = {**uploader, **params}
doc = await self.api.request("docs.save", params)
if self.gas:
doc = doc[doc["type"]]
return self.generate_attachment_string("doc", doc["owner_id"], doc["id"])
return doc
async def upload_doc(
self, pathlike: typing.Union[str, BytesIO], group_id: int = None, **params
) -> typing.Union[str, dict]:
server = await self.api.request(
"docs.getUploadServer", {"group_id": group_id} if group_id else {}
)
uploader = await self.upload(
server, {"file": self.open_pathlike(pathlike)}, params
)
params = {**uploader, **params}
doc = await self.api.request("docs.save", params)
if self.gas:
doc = doc[doc["type"]]
return self.generate_attachment_string("doc", doc["owner_id"], doc["id"])
return doc
async def upload_doc_to_message(
self,
pathlike: typing.Union[str, BytesIO],
peer_id: int,
doc_type: str = "doc",
**params
):
server = await self.api.request(
"docs.getMessagesUploadServer", {"type": doc_type, "peer_id": peer_id}
)
uploader = await self.upload(
server, {"file": self.open_pathlike(pathlike)}, params
)
params = {**uploader, **params}
doc = await self.api.request("docs.save", params)
if self.gas:
doc = doc[doc["type"]]
return self.generate_attachment_string("doc", doc["owner_id"], doc["id"])
return doc
| 33.828125
| 85
| 0.568129
| 248
| 2,165
| 4.826613
| 0.205645
| 0.067669
| 0.06015
| 0.095238
| 0.816207
| 0.798663
| 0.746867
| 0.746867
| 0.746867
| 0.746867
| 0
| 0.000656
| 0.295612
| 2,165
| 63
| 86
| 34.365079
| 0.784262
| 0
| 0
| 0.534483
| 0
| 0
| 0.094688
| 0.024018
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.051724
| 0
| 0.189655
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
00463f7011d18537bcd3b6523f8aec34b2a80cd4
| 107
|
py
|
Python
|
rosbuild_ws/src/controllers/src/controllers/srv/__init__.py
|
Boberito25/ButlerBot
|
959f961bbc8c43be0ccb533dd2e2af5c55b0cc2a
|
[
"BSD-3-Clause"
] | null | null | null |
rosbuild_ws/src/controllers/src/controllers/srv/__init__.py
|
Boberito25/ButlerBot
|
959f961bbc8c43be0ccb533dd2e2af5c55b0cc2a
|
[
"BSD-3-Clause"
] | 1
|
2015-06-08T19:55:40.000Z
|
2015-06-08T19:55:40.000Z
|
rosbuild_ws/src/controllers/src/controllers/srv/__init__.py
|
Boberito25/ButlerBot
|
959f961bbc8c43be0ccb533dd2e2af5c55b0cc2a
|
[
"BSD-3-Clause"
] | null | null | null |
from ._armMove import *
from ._armAngles import *
from ._BasicArmPlan import *
from ._TestService import *
| 21.4
| 28
| 0.775701
| 12
| 107
| 6.583333
| 0.5
| 0.379747
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149533
| 107
| 4
| 29
| 26.75
| 0.868132
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cc4f2819ce11210e73495b38e1e5719a9efb8d16
| 49
|
py
|
Python
|
aat/core/exchange/__init__.py
|
mthomascarcamo/aat
|
fd86f513ccf79625516d2236be655498b24ec742
|
[
"Apache-2.0"
] | 305
|
2020-02-24T02:25:43.000Z
|
2022-03-26T22:53:43.000Z
|
aat/core/exchange/__init__.py
|
mthomascarcamo/aat
|
fd86f513ccf79625516d2236be655498b24ec742
|
[
"Apache-2.0"
] | 79
|
2020-02-20T21:00:58.000Z
|
2022-03-27T14:06:26.000Z
|
aat/core/exchange/__init__.py
|
mthomascarcamo/aat
|
fd86f513ccf79625516d2236be655498b24ec742
|
[
"Apache-2.0"
] | 71
|
2020-05-10T11:52:25.000Z
|
2022-03-29T07:51:48.000Z
|
from .exchange import ExchangeType # noqa: F401
| 24.5
| 48
| 0.77551
| 6
| 49
| 6.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 0.163265
| 49
| 1
| 49
| 49
| 0.853659
| 0.204082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cc72f1f9ebb5a0f23a9c5f046d93a248b0f7aa8a
| 117
|
py
|
Python
|
test/ok.py
|
EvidenceN/exp-flask
|
aac7780829ccee1358a91aec68545526d58badeb
|
[
"MIT"
] | null | null | null |
test/ok.py
|
EvidenceN/exp-flask
|
aac7780829ccee1358a91aec68545526d58badeb
|
[
"MIT"
] | null | null | null |
test/ok.py
|
EvidenceN/exp-flask
|
aac7780829ccee1358a91aec68545526d58badeb
|
[
"MIT"
] | null | null | null |
from flask import Flask
import requests
APP = Flask(__name__)
@APP.route("/")
def home():
return f"Hello World"
| 14.625
| 25
| 0.700855
| 17
| 117
| 4.588235
| 0.764706
| 0.282051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.17094
| 117
| 8
| 25
| 14.625
| 0.804124
| 0
| 0
| 0
| 0
| 0
| 0.101695
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
cc95b57eab1323a39007febc86b022c464a1d375
| 7,211
|
py
|
Python
|
data/feature_extraction_test.py
|
googleinterns/cl_analysis
|
686cf49cd57ce61cba3cc11f0574b2a2cec596be
|
[
"Apache-2.0"
] | 5
|
2020-07-06T19:43:41.000Z
|
2020-09-27T02:29:37.000Z
|
data/feature_extraction_test.py
|
googleinterns/cl_analysis
|
686cf49cd57ce61cba3cc11f0574b2a2cec596be
|
[
"Apache-2.0"
] | 3
|
2020-07-30T20:35:25.000Z
|
2020-08-10T22:39:31.000Z
|
data/feature_extraction_test.py
|
googleinterns/cl_analysis
|
686cf49cd57ce61cba3cc11f0574b2a2cec596be
|
[
"Apache-2.0"
] | 3
|
2020-07-09T22:49:39.000Z
|
2020-09-26T07:36:16.000Z
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from data.feature_extraction import *
from data.test_constants import *
import numpy as np
class FeatureExtractionTest(unittest.TestCase):
def test_compute_count(self):
self.assertEqual(FeatureExtractor.compute_count(LIST1), 7)
def test_compute_count_on_nan_input(self):
self.assertEqual(FeatureExtractor.compute_count(np.nan), 0)
def test_compute_count_on_empty_lst(self):
self.assertEqual(FeatureExtractor.compute_count(EMPTY_LIST), 0)
def test_compute_avg(self):
self.assertEqual(FeatureExtractor.compute_avg(LIST1), 20/7)
def test_compute_avg_on_nan_input(self):
self.assertEqual(FeatureExtractor.compute_avg(np.nan), 0)
def test_compute_avg_on_empty_lst(self):
self.assertEqual(FeatureExtractor.compute_avg(EMPTY_LIST), 0)
def test_compute_sum(self):
self.assertEqual(FeatureExtractor.compute_sum(LIST1), 20)
def test_compute_sum_on_nan_input(self):
self.assertEqual(FeatureExtractor.compute_sum(np.nan), 0)
def test_compute_sum_on_empty_lst(self):
self.assertEqual(FeatureExtractor.compute_sum(EMPTY_LIST), 0)
def test_compute_nonzero_count(self):
self.assertEqual(
FeatureExtractor.compute_nonzero_count(LIST2), 5)
def test_compute_nonzero_count_on_nan_input(self):
self.assertEqual(FeatureExtractor.compute_nonzero_count(np.nan), 0)
def test_compute_nonzero_count_on_empty_lst(self):
self.assertEqual(FeatureExtractor.compute_nonzero_count(EMPTY_LIST), 0)
def test_compute_nonzero_avg(self):
self.assertEqual(
FeatureExtractor.compute_nonzero_avg(LIST2), 12/5)
def test_compute_nonzero_avg_on_nan_input(self):
self.assertEqual(FeatureExtractor.compute_nonzero_avg(np.nan), 0)
def test_compute_nonzero_avg_on_empty_lst(self):
self.assertEqual(FeatureExtractor.compute_nonzero_avg(EMPTY_LIST), 0)
def test_compute_nonzero_sum(self):
self.assertEqual(
FeatureExtractor.compute_nonzero_sum(LIST2), 12)
def test_compute_nonzero_sum_on_nan_input(self):
self.assertEqual(FeatureExtractor.compute_nonzero_sum(np.nan), 0)
def test_compute_nonzero_sum_on_empty_lst(self):
self.assertEqual(FeatureExtractor.compute_nonzero_sum(EMPTY_LIST), 0)
def test_compute_avg_count(self):
self.assertEqual(
FeatureExtractor.compute_avg_count(
COMMENT_LIST, SERIES), 2/5)
def test_compute_avg_count_on_nan_input(self):
self.assertEqual(
FeatureExtractor.compute_avg_count(np.nan, SERIES),0)
def test_compute_avg_count_on_empty_lst(self):
self.assertEqual(
FeatureExtractor.compute_avg_count(EMPTY_LIST, SERIES),0)
def test_compute_avg_count_on_empty_series(self):
self.assertEqual(
FeatureExtractor.compute_avg_count(EMPTY_LIST, pd.Series([])), 0)
def test_compute_total_check_runs(self):
self.assertEqual(
FeatureExtractor.compute_total_check_runs(CHECK_RUNS1, 0), 8)
self.assertEqual(
FeatureExtractor.compute_total_check_runs(CHECK_RUNS1, 1), 11)
def test_compute_total_check_runs_on_nan_input(self):
self.assertEqual(
FeatureExtractor.compute_total_check_runs(np.nan, 0), 0)
self.assertEqual(
FeatureExtractor.compute_total_check_runs(np.nan, 1), 0)
def test_compute_total_check_runs_on_empty_lst(self):
mock_lst = "[]"
self.assertEqual(
FeatureExtractor.compute_total_check_runs(EMPTY_LIST, 0), 0)
self.assertEqual(
FeatureExtractor.compute_total_check_runs(EMPTY_LIST, 1), 0)
def test_compute_avg_check_runs(self):
self.assertEqual(
FeatureExtractor.compute_avg_check_runs(CHECK_RUNS2, 0), 8/4)
self.assertEqual(
FeatureExtractor.compute_avg_check_runs(CHECK_RUNS2, 1), 11/4)
def test_compute_avg_check_runs_on_nan_input(self):
self.assertEqual(
FeatureExtractor.compute_avg_check_runs(np.nan, 0), 0)
self.assertEqual(
FeatureExtractor.compute_avg_check_runs(np.nan, 1), 0)
def test_compute_avg_check_runs_on_empty_lst(self):
self.assertEqual(
FeatureExtractor.compute_avg_check_runs(EMPTY_LIST, 0), 0)
self.assertEqual(
FeatureExtractor.compute_avg_check_runs(EMPTY_LIST, 1), 0)
def test_compute_total_file_changes(self):
self.assertEqual(
FeatureExtractor.compute_total_check_runs(FILES_CHANGES, 0), 260)
self.assertEqual(
FeatureExtractor.compute_total_check_runs(FILES_CHANGES, 1), 100)
self.assertEqual(
FeatureExtractor.compute_total_check_runs(FILES_CHANGES, 2), 360)
def test_compute_total_file_changes_on_nan_input(self):
self.assertEqual(
FeatureExtractor.compute_total_check_runs(np.nan, 0), 0)
self.assertEqual(
FeatureExtractor.compute_total_check_runs(np.nan, 1), 0)
self.assertEqual(
FeatureExtractor.compute_total_check_runs(np.nan, 2), 0)
def test_compute_total_file_changes_on_empty_lst(self):
mock_lst = "[]"
self.assertEqual(
FeatureExtractor.compute_total_check_runs(EMPTY_LIST, 0), 0)
self.assertEqual(
FeatureExtractor.compute_total_check_runs(EMPTY_LIST, 1), 0)
self.assertEqual(
FeatureExtractor.compute_total_check_runs(EMPTY_LIST, 2), 0)
def test_compute_avg_file_changes(self):
self.assertEqual(
FeatureExtractor.compute_avg_check_runs(FILES_CHANGES, 0), 260/3)
self.assertEqual(
FeatureExtractor.compute_avg_check_runs(FILES_CHANGES, 1), 100/3)
self.assertEqual(
FeatureExtractor.compute_avg_check_runs(FILES_CHANGES, 2), 360/3)
def test_compute_avg_file_changes_on_nan_input(self):
self.assertEqual(
FeatureExtractor.compute_avg_check_runs(np.nan, 0), 0)
self.assertEqual(
FeatureExtractor.compute_avg_check_runs(np.nan, 1), 0)
self.assertEqual(
FeatureExtractor.compute_avg_check_runs(np.nan, 2), 0)
def test_compute_avg_file_changes_on_empty_lst(self):
self.assertEqual(
FeatureExtractor.compute_avg_check_runs(EMPTY_LIST, 0), 0)
self.assertEqual(
FeatureExtractor.compute_avg_check_runs(EMPTY_LIST, 1), 0)
self.assertEqual(
FeatureExtractor.compute_avg_check_runs(EMPTY_LIST, 2), 0)
if __name__ == '__main__':
unittest.main()
| 38.768817
| 79
| 0.718208
| 921
| 7,211
| 5.254072
| 0.128122
| 0.16119
| 0.333127
| 0.408349
| 0.857615
| 0.847283
| 0.792106
| 0.645381
| 0.628022
| 0.306675
| 0
| 0.02183
| 0.199556
| 7,211
| 185
| 80
| 38.978378
| 0.816528
| 0.075995
| 0
| 0.413534
| 0
| 0
| 0.001805
| 0
| 0
| 0
| 0
| 0
| 0.390977
| 1
| 0.255639
| false
| 0
| 0.030075
| 0
| 0.293233
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9da4ef91e3ec5ce57ebb0eadecc8c1ab4eaf21ab
| 143
|
py
|
Python
|
scarlet/lite/__init__.py
|
aboucaud/scarlet
|
8300d195a29c634e2c09f935ed5e7481d8934510
|
[
"MIT"
] | 16
|
2018-03-01T17:31:28.000Z
|
2019-06-27T13:15:28.000Z
|
scarlet/lite/__init__.py
|
aboucaud/scarlet
|
8300d195a29c634e2c09f935ed5e7481d8934510
|
[
"MIT"
] | 91
|
2017-12-15T17:59:23.000Z
|
2019-07-25T20:58:48.000Z
|
scarlet/lite/__init__.py
|
aboucaud/scarlet
|
8300d195a29c634e2c09f935ed5e7481d8934510
|
[
"MIT"
] | 9
|
2018-03-01T17:31:28.000Z
|
2019-05-04T18:32:19.000Z
|
from .initialization import *
from .models import *
from .parameters import *
from .utils import *
from .measure import *
from . import display
| 23.833333
| 29
| 0.762238
| 18
| 143
| 6.055556
| 0.444444
| 0.458716
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160839
| 143
| 6
| 30
| 23.833333
| 0.908333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9dbbf950cf2127ee1cea29e509fb8c69650d12ed
| 572
|
py
|
Python
|
tests/test_dkbuild-apacheconf_import.py
|
datakortet/dkbuild-apacheconf
|
740bed45a33631144c967af5e5fc4288b33d537d
|
[
"Apache-2.0"
] | null | null | null |
tests/test_dkbuild-apacheconf_import.py
|
datakortet/dkbuild-apacheconf
|
740bed45a33631144c967af5e5fc4288b33d537d
|
[
"Apache-2.0"
] | 1
|
2020-05-06T13:51:24.000Z
|
2020-05-10T10:46:46.000Z
|
tests/test_dkbuild-apacheconf_import.py
|
datakortet/dkbuild-apacheconf
|
740bed45a33631144c967af5e5fc4288b33d537d
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Test that all modules are importable.
"""
import dkbuild_apacheconf
import dkbuild_apacheconf.__main__
import dkbuild_apacheconf.context
import dkbuild_apacheconf.defaults
import dkbuild_apacheconf.errors
import dkbuild_apacheconf.mergesettings
def test_import_():
"Test that all modules are importable."
assert dkbuild_apacheconf
assert dkbuild_apacheconf.__main__
assert dkbuild_apacheconf.context
assert dkbuild_apacheconf.defaults
assert dkbuild_apacheconf.errors
assert dkbuild_apacheconf.mergesettings
| 24.869565
| 43
| 0.802448
| 64
| 572
| 6.828125
| 0.296875
| 0.466819
| 0.315789
| 0.08238
| 0.141876
| 0.141876
| 0
| 0
| 0
| 0
| 0
| 0.002037
| 0.141608
| 572
| 22
| 44
| 26
| 0.887984
| 0.171329
| 0
| 0
| 0
| 0
| 0.073267
| 0
| 0
| 0
| 0
| 0
| 0.428571
| 1
| 0.071429
| true
| 0
| 0.571429
| 0
| 0.642857
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9dbe28a3cd8e3c16c5d52b0bfb16dde5841ab5d0
| 1,656
|
py
|
Python
|
homepage/portfolio/migrations/0050_auto_20200706_1128.py
|
FabianVolkers/portfolio
|
b5008818b6ca7b2b10c587e2beab4265157bc694
|
[
"MIT"
] | null | null | null |
homepage/portfolio/migrations/0050_auto_20200706_1128.py
|
FabianVolkers/portfolio
|
b5008818b6ca7b2b10c587e2beab4265157bc694
|
[
"MIT"
] | 7
|
2021-03-30T13:55:39.000Z
|
2022-01-13T03:01:58.000Z
|
homepage/portfolio/migrations/0050_auto_20200706_1128.py
|
FabianVolkers/homepage
|
b5008818b6ca7b2b10c587e2beab4265157bc694
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.7 on 2020-07-06 11:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('portfolio', '0049_footerlink_navlink'),
]
operations = [
migrations.AlterField(
model_name='footerlink',
name='position',
field=models.CharField(choices=[('navbar-0', 'navbar 0'), ('navbar-1', 'navbar 1'), ('navbar-2', 'navbar 2'), ('navbar-3', 'navbar 3'), ('navbar-4', 'navbar 4'), ('navbar-5', 'navbar 5'), ('navbar-6', 'navbar 6'), ('navbar-7', 'navbar 7'), ('navbar-8', 'navbar 8'), ('navbar-9', 'navbar 9'), ('footer-0', 'footer 0'), ('footer-1', 'footer 1'), ('footer-2', 'footer 2'), ('footer-3', 'footer 3'), ('footer-4', 'footer 4'), ('footer-5', 'footer 5'), ('footer-6', 'footer 6'), ('footer-7', 'footer 7'), ('footer-8', 'footer 8'), ('footer-9', 'footer 9')], max_length=8, null=True, unique=True),
),
migrations.AlterField(
model_name='navlink',
name='position',
field=models.CharField(choices=[('navbar-0', 'navbar 0'), ('navbar-1', 'navbar 1'), ('navbar-2', 'navbar 2'), ('navbar-3', 'navbar 3'), ('navbar-4', 'navbar 4'), ('navbar-5', 'navbar 5'), ('navbar-6', 'navbar 6'), ('navbar-7', 'navbar 7'), ('navbar-8', 'navbar 8'), ('navbar-9', 'navbar 9'), ('footer-0', 'footer 0'), ('footer-1', 'footer 1'), ('footer-2', 'footer 2'), ('footer-3', 'footer 3'), ('footer-4', 'footer 4'), ('footer-5', 'footer 5'), ('footer-6', 'footer 6'), ('footer-7', 'footer 7'), ('footer-8', 'footer 8'), ('footer-9', 'footer 9')], max_length=8, null=True, unique=True),
),
]
| 69
| 603
| 0.560386
| 224
| 1,656
| 4.116071
| 0.205357
| 0.030369
| 0.056399
| 0.062907
| 0.752712
| 0.752712
| 0.752712
| 0.752712
| 0.752712
| 0.752712
| 0
| 0.074047
| 0.176329
| 1,656
| 23
| 604
| 72
| 0.601906
| 0.027174
| 0
| 0.470588
| 1
| 0
| 0.43816
| 0.014295
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.235294
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d19e09ed9e497bb34776ac872472bb7a8c69d913
| 345
|
py
|
Python
|
gofer_multi_worked.py
|
Srinath-tr/Goferbot
|
0f734d01c6504c6c97dbdf45f5adf8b25c0f9fd9
|
[
"Apache-2.0",
"bzip2-1.0.6"
] | 1
|
2019-04-23T21:50:08.000Z
|
2019-04-23T21:50:08.000Z
|
gofer_multi_worked.py
|
Srinath-tr/Goferbot
|
0f734d01c6504c6c97dbdf45f5adf8b25c0f9fd9
|
[
"Apache-2.0",
"bzip2-1.0.6"
] | null | null | null |
gofer_multi_worked.py
|
Srinath-tr/Goferbot
|
0f734d01c6504c6c97dbdf45f5adf8b25c0f9fd9
|
[
"Apache-2.0",
"bzip2-1.0.6"
] | 2
|
2019-02-14T08:13:33.000Z
|
2019-04-23T21:47:48.000Z
|
import subprocess
#subprocess.Popen(['python', "G:/Python27/Howie-master_voice/runme.py"])
#subprocess.Popen(['python','simple_videofacerec_worked.py','-t','G:\Python27\Faces','my_model.pkl'])
#subprocess.Popen(['python', "process1.py"])
#subprocess.Popen(['python',"process2.py"])
subprocess.call(['python','controlRover.py','come front'])
| 43.125
| 101
| 0.724638
| 43
| 345
| 5.72093
| 0.581395
| 0.243902
| 0.341463
| 0.186992
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018127
| 0.04058
| 345
| 7
| 102
| 49.285714
| 0.725076
| 0.747826
| 0
| 0
| 0
| 0
| 0.373494
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
0614fa122f5783589c21c7928374ebd0d47f579d
| 40,172
|
py
|
Python
|
sqlathanor/declarative/_csv_support.py
|
edupo/sqlathanor
|
a5cfd349d092b25a3ffb3950b996b13878e1db17
|
[
"MIT"
] | 101
|
2018-07-21T00:20:59.000Z
|
2022-02-09T21:33:09.000Z
|
sqlathanor/declarative/_csv_support.py
|
edupo/sqlathanor
|
a5cfd349d092b25a3ffb3950b996b13878e1db17
|
[
"MIT"
] | 85
|
2018-06-16T02:15:08.000Z
|
2022-02-24T14:57:24.000Z
|
sqlathanor/declarative/_csv_support.py
|
edupo/sqlathanor
|
a5cfd349d092b25a3ffb3950b996b13878e1db17
|
[
"MIT"
] | 6
|
2018-07-25T09:51:02.000Z
|
2022-02-24T14:04:27.000Z
|
# -*- coding: utf-8 -*-
# The lack of a module docstring for this module is **INTENTIONAL**.
# The module is imported into the documentation using Sphinx's autodoc
# extension, and its member function documentation is automatically incorporated
# there as needed.
import csv
from validator_collection import checkers, validators
from sqlathanor._compat import StringIO, dict as dict_
from sqlathanor.utilities import read_csv_data, get_attribute_names
from sqlathanor.errors import SerializableAttributeError, \
UnsupportedSerializationError, CSVStructureError, DeserializationError
class CSVSupportMixin(object):
"""Mixin that provides CSV serialization/de-serialization support."""
@classmethod
def get_csv_column_names(cls,
deserialize = True,
serialize = True,
config_set = None):
"""Retrieve a list of CSV column names.
:param deserialize: If ``True``, returns columns that support
:term:`de-serialization`. If ``False``, returns columns that do *not*
support deserialization. If :obj:`None <python:None>`, does not take
deserialization into account. Defaults to ``True``.
:type deserialize: :class:`bool <python:bool>`
:param serialize: If ``True``, returns columns that support
:term:`serialization`. If ``False``, returns columns that do *not*
support serialization. If :obj:`None <python:None>`, does not take
serialization into account. Defaults to ``True``.
:type serialize: :class:`bool <python:bool>`
:param config_set: If not :obj:`None <python:None>`, the named configuration set
to use. Defaults to :obj:`None <python:None>`.
:type config_set: :class:`str <python:str>` / :obj:`None <python:None>`
:returns: List of CSV column names, sorted according to their configuration.
:rtype: :class:`list <python:list>` of :class:`str <python:str>`
"""
config = cls.get_csv_serialization_config(deserialize = deserialize,
serialize = serialize,
config_set = config_set)
attribute_names = [x.name for x in config]
display_names = [x.display_name for x in config]
return [x[0] or x[1] for x in zip(display_names, attribute_names)]
@classmethod
def _get_csv_attribute_names(cls,
deserialize = True,
serialize = True,
config_set = None):
"""Retrieve a list of the attribute names that are to be serialized to CSV.
:param deserialize: If ``True``, returns columns that support
:term:`de-serialization`. If ``False``, returns columns that do *not*
support deserialization. If :obj:`None <python:None>`, does not take
deserialization into account. Defaults to ``True``.
:type deserialize: :class:`bool <python:bool>`
:param serialize: If ``True``, returns columns that support
:term:`serialization`. If ``False``, returns columns that do *not*
support serialization. If :obj:`None <python:None>`, does not take
serialization into account. Defaults to ``True``.
:type serialize: :class:`bool <python:bool>`
:returns: List of attribute names, sorted according to their configuration.
:rtype: :class:`list <python:list>` of :class:`str <python:str>`
"""
config = cls.get_csv_serialization_config(deserialize = deserialize,
serialize = serialize,
config_set = config_set)
return [x.name for x in config]
@classmethod
def _get_attribute_csv_header(cls,
attributes,
delimiter = '|',
wrap_all_strings = False,
wrapper_character = "'",
double_wrapper_character_when_nested = False,
escape_character = "\\",
line_terminator = '\r\n'):
r"""Retrieve a header string for a CSV representation of the model.
:param attributes: List of :term:`model attributes <model attribute>` to
include.
:type attributes: :class:`list <python:list>` of :class:`str <python:str>`
:param delimiter: The character(s) to utilize between columns. Defaults to
a pipe (``|``).
:type delimiter: :class:`str <python:str>`
:param wrap_all_strings: If ``True``, wraps any string data in the
``wrapper_character``. If ``None``, only wraps string data if it contains
the ``delimiter``. Defaults to ``False``.
:type wrap_all_strings: :class:`bool <python:bool>`
:param wrapper_character: The string used to wrap string values when
wrapping is necessary. Defaults to ``'``.
:type wrapper_character: :class:`str <python:str>`
:param double_wrapper_character_when_nested: If ``True``, will double the
``wrapper_character`` when it is found inside a column value. If ``False``,
will precede the ``wrapper_character`` by the ``escape_character`` when
it is found inside a column value. Defaults to ``False``.
:type double_wrapper_character_when_nested: :class:`bool <python:bool>`
:param escape_character: The character to use when escaping nested wrapper
characters. Defaults to ``\``.
:type escape_character: :class:`str <python:str>`
:param line_terminator: The character used to mark the end of a line.
Defaults to ``\r\n``.
:type line_terminator: :class:`str <python:str>`
:returns: A string ending in ``line_terminator`` with the model's CSV column names
listed, separated by the ``delimiter``.
:rtype: :class:`str <python:str>`
"""
if not wrapper_character:
wrapper_character = '\''
if wrap_all_strings:
quoting = csv.QUOTE_NONNUMERIC
else:
quoting = csv.QUOTE_MINIMAL
if 'sqlathanor' in csv.list_dialects():
csv.unregister_dialect('sqlathanor')
csv.register_dialect('sqlathanor',
delimiter = delimiter,
doublequote = double_wrapper_character_when_nested,
escapechar = escape_character,
quotechar = wrapper_character,
quoting = quoting,
lineterminator = line_terminator)
output = StringIO()
csv_writer = csv.DictWriter(output,
fieldnames = attributes,
dialect = 'sqlathanor')
csv_writer.writeheader()
header_string = output.getvalue()
output.close()
csv.unregister_dialect('sqlathanor')
return header_string
def _get_attribute_csv_data(self,
attributes,
is_dumping = False,
delimiter = '|',
wrap_all_strings = False,
null_text = 'None',
wrapper_character = "'",
double_wrapper_character_when_nested = False,
escape_character = "\\",
line_terminator = '\r\n',
config_set = None):
r"""Return the CSV representation of ``attributes`` extracted from the
model instance (record).
:param attributes: Names of :term:`model attributes <model attribute>` to
include in the CSV output.
:type attributes: :class:`list <python:list>` of :class:`str <python:str>`
:param is_dumping: If ``True``, then allow
:exc:`UnsupportedSerializationError <sqlathanor.errors.UnsupportedSerializationError>`.
Defaults to ``False``.
:type is_dumping: :class:`bool <python:bool>`
:param delimiter: The delimiter used between columns. Defaults to ``|``.
:type delimiter: :class:`str <python:str>`
:param wrap_all_strings: If ``True``, wraps any string data in the
``wrapper_character``. If ``None``, only wraps string data if it contains
the ``delimiter``. Defaults to ``False``.
:type wrap_all_strings: :class:`bool <python:bool>`
:param null_text: The text value to use in place of empty values. Only
applies if ``wrap_empty_values`` is ``True``. Defaults to ``'None'``.
:type null_text: :class:`str <python:str>`
:param wrapper_character: The string used to wrap string values when
wrapping is necessary. Defaults to ``'``.
:type wrapper_character: :class:`str <python:str>`
:param double_wrapper_character_when_nested: If ``True``, will double the
``wrapper_character`` when it is found inside a column value. If ``False``,
will precede the ``wrapper_character`` by the ``escape_character`` when
it is found inside a column value. Defaults to ``False``.
:type double_wrapper_character_when_nested: :class:`bool <python:bool>`
:param escape_character: The character to use when escaping nested wrapper
characters. Defaults to ``\``.
:type escape_character: :class:`str <python:str>`
:param line_terminator: The character used to mark the end of a line.
Defaults to ``\r\n``.
:type line_terminator: :class:`str <python:str>`
:param config_set: If not :obj:`None <python:None>`, the named configuration set
to use. Defaults to :obj:`None <python:None>`.
:type config_set: :class:`str <python:str>` / :obj:`None <python:None>`
:returns: Data from the object in CSV format ending in ``line_terminator``.
:rtype: :class:`str <python:str>`
"""
if not wrapper_character:
wrapper_character = '\''
if not attributes:
raise SerializableAttributeError("attributes cannot be empty")
if wrap_all_strings:
quoting = csv.QUOTE_NONNUMERIC
else:
quoting = csv.QUOTE_MINIMAL
if 'sqlathanor' in csv.list_dialects():
csv.unregister_dialect('sqlathanor')
csv.register_dialect('sqlathanor',
delimiter = delimiter,
doublequote = double_wrapper_character_when_nested,
escapechar = escape_character,
quotechar = wrapper_character,
quoting = quoting,
lineterminator = line_terminator)
data = []
for item in attributes:
try:
value = self._get_serialized_value(format = 'csv',
attribute = item,
config_set = config_set)
except UnsupportedSerializationError as error:
if is_dumping:
value = getattr(self, item)
else:
raise error
data.append(value)
for index, item in enumerate(data):
if item == '' or item is None or item == 'None':
data[index] = null_text
elif not checkers.is_string(item) and not checkers.is_numeric(item):
data[index] = str(item)
data_dict = dict_()
for index, column_name in enumerate(attributes):
data_dict[column_name] = data[index]
output = StringIO()
csv_writer = csv.DictWriter(output,
fieldnames = attributes,
dialect = 'sqlathanor')
csv_writer.writerow(data_dict)
data_row = output.getvalue()
output.close()
csv.unregister_dialect('sqlathanor')
return data_row
@classmethod
def get_csv_header(cls,
deserialize = None,
serialize = True,
delimiter = '|',
wrap_all_strings = False,
wrapper_character = "'",
double_wrapper_character_when_nested = False,
escape_character = "\\",
line_terminator = '\r\n',
config_set = None):
r"""Retrieve a header string for a CSV representation of the model.
:param attributes: List of :term:`model attributes <model attribute>` to
include.
:type attributes: :class:`list <python:list>` of :class:`str <python:str>`
:param delimiter: The character(s) to utilize between columns. Defaults to
a pipe (``|``).
:type delimiter: :class:`str <python:str>`
:param wrap_all_strings: If ``True``, wraps any string data in the
``wrapper_character``. If ``None``, only wraps string data if it contains
the ``delimiter``. Defaults to ``False``.
:type wrap_all_strings: :class:`bool <python:bool>`
:param null_text: The text value to use in place of empty values. Only
applies if ``wrap_empty_values`` is ``True``. Defaults to ``'None'``.
:type null_text: :class:`str <python:str>`
:param null_text: The text value to use in place of empty values. Only
applies if ``wrap_empty_values`` is ``True``. Defaults to ``'None'``.
:type null_text: :class:`str <python:str>`
:param wrapper_character: The string used to wrap string values when
wrapping is necessary. Defaults to ``'``.
:type wrapper_character: :class:`str <python:str>`
:param double_wrapper_character_when_nested: If ``True``, will double the
``wrapper_character`` when it is found inside a column value. If ``False``,
will precede the ``wrapper_character`` by the ``escape_character`` when
it is found inside a column value. Defaults to ``False``.
:type double_wrapper_character_when_nested: :class:`bool <python:bool>`
:param escape_character: The character to use when escaping nested wrapper
characters. Defaults to ``\``.
:type escape_character: :class:`str <python:str>`
:param line_terminator: The character used to mark the end of a line.
Defaults to ``\r\n``.
:type line_terminator: :class:`str <python:str>`
:param config_set: If not :obj:`None <python:None>`, the named configuration set
to use. Defaults to :obj:`None <python:None>`.
:type config_set: :class:`str <python:str>` / :obj:`None <python:None>`
:returns: A string ending in ``line_terminator`` with the model's CSV column names
listed, separated by the ``delimiter``.
:rtype: :class:`str <python:str>`
"""
# pylint: disable=line-too-long
column_names = cls.get_csv_column_names(deserialize = deserialize,
serialize = serialize,
config_set = config_set)
header_string = cls._get_attribute_csv_header(column_names,
delimiter = delimiter,
wrap_all_strings = wrap_all_strings,
wrapper_character = wrapper_character,
double_wrapper_character_when_nested = double_wrapper_character_when_nested,
escape_character = escape_character,
line_terminator = line_terminator)
return header_string
def get_csv_data(self,
delimiter = '|',
wrap_all_strings = False,
null_text = 'None',
wrapper_character = "'",
double_wrapper_character_when_nested = False,
escape_character = "\\",
line_terminator = '\r\n',
config_set = None):
r"""Return the CSV representation of the model instance (record).
:param delimiter: The delimiter used between columns. Defaults to ``|``.
:type delimiter: :class:`str <python:str>`
:param wrap_all_strings: If ``True``, wraps any string data in the
``wrapper_character``. If ``None``, only wraps string data if it contains
the ``delimiter``. Defaults to ``False``.
:type wrap_all_strings: :class:`bool <python:bool>`
:param null_text: The text value to use in place of empty values. Only
applies if ``wrap_empty_values`` is ``True``. Defaults to ``'None'``.
:type null_text: :class:`str <python:str>`
:param wrapper_character: The string used to wrap string values when
wrapping is necessary. Defaults to ``'``.
:type wrapper_character: :class:`str <python:str>`
:param double_wrapper_character_when_nested: If ``True``, will double the
``wrapper_character`` when it is found inside a column value. If ``False``,
will precede the ``wrapper_character`` by the ``escape_character`` when
it is found inside a column value. Defaults to ``False``.
:type double_wrapper_character_when_nested: :class:`bool <python:bool>`
:param escape_character: The character to use when escaping nested wrapper
characters. Defaults to ``\``.
:type escape_character: :class:`str <python:str>`
:param line_terminator: The character used to mark the end of a line.
Defaults to ``\r\n``.
:type line_terminator: :class:`str <python:str>`
:param config_set: If not :obj:`None <python:None>`, the named configuration set
to use. Defaults to :obj:`None <python:None>`.
:type config_set: :class:`str <python:str>` / :obj:`None <python:None>`
:returns: Data from the object in CSV format ending in ``line_terminator``.
:rtype: :class:`str <python:str>`
"""
# pylint: disable=line-too-long
csv_attribute_names = [x
for x in self._get_csv_attribute_names(deserialize = None,
serialize = True,
config_set = config_set)
if hasattr(self, x)]
if not csv_attribute_names:
raise SerializableAttributeError("no 'csv' serializable attributes found")
data_row = self._get_attribute_csv_data(csv_attribute_names,
is_dumping = False,
delimiter = delimiter,
wrap_all_strings = wrap_all_strings,
null_text = null_text,
wrapper_character = wrapper_character,
double_wrapper_character_when_nested = double_wrapper_character_when_nested,
escape_character = escape_character,
line_terminator = line_terminator,
config_set = config_set)
return data_row
def to_csv(self,
include_header = False,
delimiter = '|',
wrap_all_strings = False,
null_text = 'None',
wrapper_character = "'",
double_wrapper_character_when_nested = False,
escape_character = "\\",
line_terminator = '\r\n',
config_set = None):
r"""Retrieve a CSV string with the object's data.
:param include_header: If ``True``, will include a header row with column
labels. If ``False``, will not include a header row. Defaults to ``True``.
:type include_header: :class:`bool <python:bool>`
:param delimiter: The delimiter used between columns. Defaults to ``|``.
:type delimiter: :class:`str <python:str>`
:param wrap_all_strings: If ``True``, wraps any string data in the
``wrapper_character``. If ``None``, only wraps string data if it contains
the ``delimiter``. Defaults to ``False``.
:type wrap_all_strings: :class:`bool <python:bool>`
:param null_text: The text value to use in place of empty values. Only
applies if ``wrap_empty_values`` is ``True``. Defaults to ``'None'``.
:type null_text: :class:`str <python:str>`
:param wrapper_character: The string used to wrap string values when
wrapping is necessary. Defaults to ``'``.
:type wrapper_character: :class:`str <python:str>`
:param double_wrapper_character_when_nested: If ``True``, will double the
``wrapper_character`` when it is found inside a column value. If ``False``,
will precede the ``wrapper_character`` by the ``escape_character`` when
it is found inside a column value. Defaults to ``False``.
:type double_wrapper_character_when_nested: :class:`bool <python:bool>`
:param escape_character: The character to use when escaping nested wrapper
characters. Defaults to ``\``.
:type escape_character: :class:`str <python:str>`
:param line_terminator: The character used to mark the end of a line.
Defaults to ``\r\n``.
:type line_terminator: :class:`str <python:str>`
:param config_set: If not :obj:`None <python:None>`, the named configuration set
to use. Defaults to :obj:`None <python:None>`.
:type config_set: :class:`str <python:str>` / :obj:`None <python:None>`
:returns: Data from the object in CSV format ending in a newline (``\n``).
:rtype: :class:`str <python:str>`
"""
if include_header:
return self.get_csv_header(delimiter = delimiter,
config_set = config_set) + \
self.get_csv_data(delimiter = delimiter,
wrap_all_strings = wrap_all_strings,
null_text = null_text,
wrapper_character = wrapper_character,
double_wrapper_character_when_nested = double_wrapper_character_when_nested, # pylint: disable=line-too-long
escape_character = escape_character,
line_terminator = line_terminator,
config_set = config_set)
return self.get_csv_data(delimiter = delimiter,
wrap_all_strings = wrap_all_strings,
null_text = null_text,
wrapper_character = wrapper_character,
double_wrapper_character_when_nested = double_wrapper_character_when_nested, # pylint: disable=line-too-long
escape_character = escape_character,
line_terminator = line_terminator,
config_set = config_set)
@classmethod
def _parse_csv(cls,
csv_data,
delimiter = '|',
wrap_all_strings = False,
null_text = 'None',
wrapper_character = "'",
double_wrapper_character_when_nested = False,
escape_character = "\\",
line_terminator = '\r\n',
config_set = None):
"""Generate a :class:`dict <python:dict>` from a CSV record.
.. tip::
Unwrapped empty column values are automatically interpreted as null
(:obj:`None <python:None>`).
:param csv_data: The CSV record. Should be a single row and should **not**
include column headers.
:type csv_data: :class:`str <python:str>`
:param delimiter: The delimiter used between columns. Defaults to ``|``.
:type delimiter: :class:`str <python:str>`
:param wrapper_character: The string used to wrap string values when
wrapping is applied. Defaults to ``'``.
:type wrapper_character: :class:`str <python:str>`
:param null_text: The string used to indicate an empty value if empty
values are wrapped. Defaults to `None`.
:type null_text: :class:`str <python:str>`
:param config_set: If not :obj:`None <python:None>`, the named configuration set
to use. Defaults to :obj:`None <python:None>`.
:type config_set: :class:`str <python:str>` / :obj:`None <python:None>`
:returns: A :class:`dict <python:dict>` representation of the CSV record.
:rtype: :class:`dict <python:dict>`
:raises DeserializationError: if ``csv_data`` is not a valid
:class:`str <python:str>`
:raises CSVStructureError: if the columns in ``csv_data`` do not match
the expected columns returned by
:func:`get_csv_column_names() <BaseModel.get_csv_column_names>`
:raises ValueDeserializationError: if a value extracted from the CSV
failed when executing its :term:`de-serialization function`.
"""
try:
csv_data = validators.string(csv_data, allow_empty = False)
except (ValueError, TypeError):
raise DeserializationError("csv_data expects a 'str', received '%s'" \
% type(csv_data))
if not wrapper_character:
wrapper_character = '\''
if wrap_all_strings:
quoting = csv.QUOTE_NONNUMERIC
else:
quoting = csv.QUOTE_MINIMAL
if 'sqlathanor' in csv.list_dialects():
csv.unregister_dialect('sqlathanor')
csv.register_dialect('sqlathanor',
delimiter = delimiter,
doublequote = double_wrapper_character_when_nested,
escapechar = escape_character,
quotechar = wrapper_character,
quoting = quoting,
lineterminator = line_terminator)
csv_column_names = [x
for x in cls.get_csv_column_names(deserialize = True,
serialize = None,
config_set = config_set)]
csv_reader = csv.DictReader([csv_data],
fieldnames = csv_column_names,
dialect = 'sqlathanor',
restkey = None,
restval = None)
rows = [x for x in csv_reader]
if len(rows) > 1:
raise CSVStructureError('expected 1 row of data, received %s' % len(csv_reader))
elif len(rows) == 0:
data = dict_()
for column_name in csv_column_names:
data[column_name] = None
else:
data = rows[0]
if data.get(None, None) is not None:
raise CSVStructureError('expected %s fields, found %s' % (len(csv_column_names),
len(data.keys())))
deserialized_data = dict_()
for key in data:
if data[key] == null_text:
deserialized_data[key] = None
continue
attribute_name = cls._get_attribute_name(key)
deserialized_value = cls._get_deserialized_value(data[key],
'csv',
key,
config_set = config_set)
deserialized_data[attribute_name] = deserialized_value
csv.unregister_dialect('sqlathanor')
return deserialized_data
def update_from_csv(self,
csv_data,
delimiter = '|',
wrap_all_strings = False,
null_text = 'None',
wrapper_character = "'",
double_wrapper_character_when_nested = False,
escape_character = "\\",
line_terminator = '\r\n',
config_set = None):
"""Update the model instance from a CSV record.
.. tip::
Unwrapped empty column values are automatically interpreted as null
(:obj:`None <python:None>`).
:param csv_data: The CSV data. If a Path-like object, will read the first
record from a file that is assumed to include a header row. If a
:class:`str <python:str>` and has more than one record (line), will assume
the first line is a header row.
:type csv_data: :class:`str <python:str>` / Path-like object
:param delimiter: The delimiter used between columns. Defaults to ``|``.
:type delimiter: :class:`str <python:str>`
:param wrapper_character: The string used to wrap string values when
wrapping is applied. Defaults to ``'``.
:type wrapper_character: :class:`str <python:str>`
:param null_text: The string used to indicate an empty value if empty
values are wrapped. Defaults to `None`.
:type null_text: :class:`str <python:str>`
:param config_set: If not :obj:`None <python:None>`, the named configuration set
to use. Defaults to :obj:`None <python:None>`.
:type config_set: :class:`str <python:str>` / :obj:`None <python:None>`
:raises DeserializationError: if ``csv_data`` is not a valid
:class:`str <python:str>`
:raises CSVStructureError: if the columns in ``csv_data`` do not match
the expected columns returned by
:func:`get_csv_column_names() <BaseModel.get_csv_column_names>`
:raises ValueDeserializationError: if a value extracted from the CSV
failed when executing its :term:`de-serialization function`.
"""
csv_data = read_csv_data(csv_data,
single_record = True)
data = self._parse_csv(csv_data,
delimiter = delimiter,
wrap_all_strings = wrap_all_strings,
null_text = null_text,
wrapper_character = wrapper_character,
double_wrapper_character_when_nested = double_wrapper_character_when_nested,
escape_character = escape_character,
line_terminator = line_terminator,
config_set = config_set)
for key in data:
setattr(self, key, data[key])
@classmethod
def new_from_csv(cls,
csv_data,
delimiter = '|',
wrap_all_strings = False,
null_text = 'None',
wrapper_character = "'",
double_wrapper_character_when_nested = False,
escape_character = "\\",
line_terminator = '\r\n',
config_set = None):
"""Create a new model instance from a CSV record.
.. tip::
Unwrapped empty column values are automatically interpreted as null
(:obj:`None <python:None>`).
:param csv_data: The CSV data. If a Path-like object, will read the first
record from a file that is assumed to include a header row. If a
:class:`str <python:str>` and has more than one record (line), will assume
the first line is a header row.
:type csv_data: :class:`str <python:str>` / Path-like object
:param delimiter: The delimiter used between columns. Defaults to ``|``.
:type delimiter: :class:`str <python:str>`
:param wrapper_character: The string used to wrap string values when
wrapping is applied. Defaults to ``'``.
:type wrapper_character: :class:`str <python:str>`
:param null_text: The string used to indicate an empty value if empty
values are wrapped. Defaults to `None`.
:type null_text: :class:`str <python:str>`
:param config_set: If not :obj:`None <python:None>`, the named configuration set
to use. Defaults to :obj:`None <python:None>`.
:type config_set: :class:`str <python:str>` / :obj:`None <python:None>`
:returns: A :term:`model instance` created from the record.
:rtype: model instance
:raises DeserializationError: if ``csv_data`` is not a valid
:class:`str <python:str>`
:raises CSVStructureError: if the columns in ``csv_data`` do not match
the expected columns returned by
:func:`get_csv_column_names() <BaseModel.get_csv_column_names>`
:raises ValueDeserializationError: if a value extracted from the CSV
failed when executing its :term:`de-serialization function`.
"""
csv_data = read_csv_data(csv_data,
single_record = True)
data = cls._parse_csv(csv_data,
delimiter = delimiter,
wrap_all_strings = wrap_all_strings,
null_text = null_text,
wrapper_character = wrapper_character,
double_wrapper_character_when_nested = double_wrapper_character_when_nested,
escape_character = escape_character,
line_terminator = line_terminator,
config_set = config_set)
return cls(**data)
def dump_to_csv(self,
include_header = False,
delimiter = '|',
wrap_all_strings = False,
null_text = 'None',
wrapper_character = "'",
double_wrapper_character_when_nested = False,
escape_character = "\\",
line_terminator = '\r\n',
config_set = None):
r"""Retrieve a :term:`CSV <Comma-Separated Value (CSV)>` representation of
the object, *with all attributes* serialized regardless of configuration.
.. caution::
Nested objects (such as :term:`relationships <relationship>` or
:term:`association proxies <association proxy>`) will **not**
be serialized.
.. note::
This method ignores any ``display_name`` contributed on the
:class:`AttributeConfiguration`.
:param include_header: If ``True``, will include a header row with column
labels. If ``False``, will not include a header row. Defaults to ``True``.
:type include_header: :class:`bool <python:bool>`
:param delimiter: The delimiter used between columns. Defaults to ``|``.
:type delimiter: :class:`str <python:str>`
:param wrap_all_strings: If ``True``, wraps any string data in the
``wrapper_character``. If ``None``, only wraps string data if it contains
the ``delimiter``. Defaults to ``False``.
:type wrap_all_strings: :class:`bool <python:bool>`
:param null_text: The text value to use in place of empty values. Only
applies if ``wrap_empty_values`` is ``True``. Defaults to ``'None'``.
:type null_text: :class:`str <python:str>`
:param wrapper_character: The string used to wrap string values when
wrapping is necessary. Defaults to ``'``.
:type wrapper_character: :class:`str <python:str>`
:param double_wrapper_character_when_nested: If ``True``, will double the
``wrapper_character`` when it is found inside a column value. If ``False``,
will precede the ``wrapper_character`` by the ``escape_character`` when
it is found inside a column value. Defaults to ``False``.
:type double_wrapper_character_when_nested: :class:`bool <python:bool>`
:param escape_character: The character to use when escaping nested wrapper
characters. Defaults to ``\``.
:type escape_character: :class:`str <python:str>`
:param line_terminator: The character used to mark the end of a line.
Defaults to ``\r\n``.
:type line_terminator: :class:`str <python:str>`
:param config_set: If not :obj:`None <python:None>`, the named configuration set
to use. Defaults to :obj:`None <python:None>`.
:type config_set: :class:`str <python:str>` / :obj:`None <python:None>`
:returns: Data from the object in CSV format ending in a newline (``\n``).
:rtype: :class:`str <python:str>`
"""
# pylint: disable=line-too-long
attributes = [x for x in get_attribute_names(self,
include_callable = False,
include_nested = False,
include_private = True,
include_special = False,
include_utilities = False)
if x[0:2] != '__']
if include_header:
return self._get_attribute_csv_header(attributes,
delimiter = delimiter) + \
self._get_attribute_csv_data(attributes,
is_dumping = True,
delimiter = delimiter,
wrap_all_strings = wrap_all_strings,
null_text = null_text,
wrapper_character = wrapper_character,
double_wrapper_character_when_nested = double_wrapper_character_when_nested,
escape_character = escape_character,
line_terminator = line_terminator,
config_set = config_set)
return self._get_attribute_csv_data(attributes,
is_dumping = True,
delimiter = delimiter,
wrap_all_strings = wrap_all_strings,
null_text = null_text,
wrapper_character = wrapper_character,
double_wrapper_character_when_nested = double_wrapper_character_when_nested,
escape_character = escape_character,
line_terminator = line_terminator,
config_set = config_set)
| 47.261176
| 149
| 0.550956
| 4,186
| 40,172
| 5.107979
| 0.068562
| 0.082312
| 0.043869
| 0.053269
| 0.833926
| 0.821018
| 0.814423
| 0.813114
| 0.806192
| 0.800486
| 0
| 0.000351
| 0.36157
| 40,172
| 849
| 150
| 47.316843
| 0.833353
| 0.465971
| 0
| 0.673295
| 0
| 0
| 0.022866
| 0
| 0
| 0
| 0
| 0.001178
| 0
| 1
| 0.03125
| false
| 0
| 0.014205
| 0
| 0.082386
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ae2734936eeed6f997fcf96dd801eccf77bb1a41
| 97
|
py
|
Python
|
graphite_datadog/__init__.py
|
iksaif/graphite-datadog
|
64a43aaae5dedc7d2a5c646b7e7a2d09b3ed23d0
|
[
"Apache-2.0"
] | null | null | null |
graphite_datadog/__init__.py
|
iksaif/graphite-datadog
|
64a43aaae5dedc7d2a5c646b7e7a2d09b3ed23d0
|
[
"Apache-2.0"
] | null | null | null |
graphite_datadog/__init__.py
|
iksaif/graphite-datadog
|
64a43aaae5dedc7d2a5c646b7e7a2d09b3ed23d0
|
[
"Apache-2.0"
] | null | null | null |
from graphite_datadog.finder import DataDogFinder
from graphite_datadog.tags import DataDogTagDB
| 32.333333
| 49
| 0.896907
| 12
| 97
| 7.083333
| 0.666667
| 0.282353
| 0.447059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082474
| 97
| 2
| 50
| 48.5
| 0.955056
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
882596c30ce388aeb7e0ea8d7d3dd26be264c533
| 15,990
|
py
|
Python
|
autopandas_v2/generators/ml/training/cli.py
|
chyanju/autopandas
|
16080ad12f0e8e7b0a614671aea1ed57b3fed7fe
|
[
"BSD-3-Clause"
] | 16
|
2019-08-13T02:49:44.000Z
|
2022-02-08T03:14:34.000Z
|
autopandas_v2/generators/ml/training/cli.py
|
chyanju/autopandas
|
16080ad12f0e8e7b0a614671aea1ed57b3fed7fe
|
[
"BSD-3-Clause"
] | 2
|
2020-09-25T22:40:40.000Z
|
2022-02-09T23:42:53.000Z
|
autopandas_v2/generators/ml/training/cli.py
|
chyanju/autopandas
|
16080ad12f0e8e7b0a614671aea1ed57b3fed7fe
|
[
"BSD-3-Clause"
] | 3
|
2021-07-06T10:30:36.000Z
|
2022-01-11T23:21:31.000Z
|
import glob
import json
import os
import pandas as pd
from argparse import ArgumentParser
from autopandas_v2.utils import logger
from autopandas_v2.utils.cli import ArgNamespace, subcommand
def parse_args(parser: ArgumentParser):
parser_common = ArgumentParser(add_help=False)
parser_common.add_argument("--device", type=str, default=None,
help="ID of Device (GPU) to use")
@subcommand(parser, cmd='train-generators', help='Perform Training for Generators', dest='training_subcommand',
inherit_from=[parser_common])
def cmd_train_generators(parser):
parser.add_argument("modeldir", type=str,
help="Path to the directory to save the model(s) in")
parser.add_argument("--config", type=str, default=None,
help="File containing hyper-parameter configuration (JSON format)")
parser.add_argument("--config-str", type=str, default=None,
help="String containing hyper-parameter configuration (JSON format)")
parser.add_argument("--use-memory", default=False, action="store_true",
help="Store all processed graphs in memory. Fastest processing, but can easily"
"run out of memory")
parser.add_argument("--use-disk", default=False, action="store_true",
help="Use disk for storing processed graphs as opposed to computing them every time"
"Speeds things up a lot but can take a lot of space")
parser.add_argument("--train", default=None, type=str, required=True,
help="Path to train file")
parser.add_argument("--valid", default=None, type=str, required=True,
help="Path to validation file")
parser.add_argument("--restore-file", default=None, type=str,
help="File to restore weights from")
parser.add_argument("--restore-params", default=None, type=str,
help="File to restore params from (pkl)")
parser.add_argument("--freeze-graph-model", default=False, action="store_true",
help="Freeze graph model components")
parser.add_argument("--load-shuffle", default=False, action="store_true",
help="Shuffle data when loading. Useful when passing num-training-points")
parser.add_argument("--num-epochs", default=100, type=int,
help="Maximum number of epochs to run training for")
parser.add_argument("--patience", default=25, type=int,
help="Maximum number of epochs to wait for validation accuracy to increase")
parser.add_argument("--num-training-points", default=-1, type=int,
help="Number of training points to use. Default : -1 (all)")
parser.add_argument("--include", nargs="+", type=str, default=None,
help="fn:identifier tuples to include in training list")
parser.add_argument("--restore-if-exists", default=False, action='store_true',
help="If a model already exists, pick up training from there")
parser.add_argument("--ignore-if-exists", default=False, action="store_true",
help="If the model exists, skip.")
@subcommand(parser, cmd='train-functions', help='Perform Training for Predicting Functions',
dest='training_subcommand', inherit_from=[parser_common])
def cmd_train_functions(parser):
parser.add_argument("modeldir", type=str,
help="Path to the directory to save the model in")
parser.add_argument("--config", type=str, default=None,
help="File containing hyper-parameter configuration (JSON format)")
parser.add_argument("--config-str", type=str, default=None,
help="String containing hyper-parameter configuration (JSON format)")
parser.add_argument("--use-memory", default=False, action="store_true",
help="Store all processed graphs in memory. Fastest processing, but can easily"
"run out of memory")
parser.add_argument("--use-disk", default=False, action="store_true",
help="Use disk for storing processed graphs as opposed to computing them every time"
"Speeds things up a lot but can take a lot of space")
parser.add_argument("--train", default=None, type=str, required=True,
help="Path to train file")
parser.add_argument("--valid", default=None, type=str, required=True,
help="Path to validation file")
parser.add_argument("--restore-file", default=None, type=str,
help="File to restore weights from")
parser.add_argument("--restore-params", default=None, type=str,
help="File to restore params from (pkl)")
parser.add_argument("--freeze-graph-model", default=False, action="store_true",
help="Freeze graph model components")
parser.add_argument("--load-shuffle", default=False, action="store_true",
help="Shuffle data when loading. Useful when passing num-training-points")
parser.add_argument("--num-epochs", default=100, type=int,
help="Maximum number of epochs to run training for")
parser.add_argument("--patience", default=25, type=int,
help="Maximum number of epochs to wait for validation accuracy to increase")
parser.add_argument("--num-training-points", default=-1, type=int,
help="Number of training points to use. Default : -1 (all)")
@subcommand(parser, cmd='analyze', help='Perform Analysis of Model', dest='training_subcommand')
def cmd_analyze(parser):
parser.add_argument("modeldir", type=str,
help="Path to the directory to save the model(s) in")
parser.add_argument("outfile", type=str,
help="Path to output file")
parser.add_argument("--config", type=str, default=None,
help="File containing hyper-parameter configuration (JSON format)")
parser.add_argument("--test", default=None, type=str, required=True,
help="Path to test")
parser.add_argument("--top-k", default=1, type=int, required=True,
help="Top-k")
parser.add_argument("--use-memory", default=False, action="store_true",
help="Store all processed graphs in memory. Fastest processing, but can easily"
"run out of memory")
parser.add_argument("--use-disk", default=False, action="store_true",
help="Use disk for storing processed graphs as opposed to computing them every time"
"Speeds things up a lot but can take a lot of space")
parser.add_argument("--include", nargs="+", type=str, default=None,
help="fn:identifier tuples to include in testing list")
def run_training_generators(cmd_args: ArgNamespace):
# Get the functions for which training data has been generated
fnames = list(map(os.path.basename, glob.glob(cmd_args.train + '/*')))
for fname in fnames:
identifiers = list(map(os.path.basename, glob.glob(cmd_args.train + '/' + fname + '/*.pkl')))
for identifier in identifiers:
identifier = identifier[:-len(".pkl")]
if cmd_args.include is not None and '{}:{}'.format(fname, identifier) not in cmd_args.include:
continue
logger.info("Performing training for {}:{}".format(fname, identifier))
try:
run_training_generators_helper(fname, identifier, cmd_args)
except:
continue
def run_training_generators_helper(fname: str, identifier: str, cmd_args: ArgNamespace):
from autopandas_v2.generators.ml.networks.ggnn.ops.choice import ModelChoice
from autopandas_v2.generators.ml.networks.ggnn.ops.chain import ModelChain
from autopandas_v2.generators.ml.networks.ggnn.ops.select import ModelSelect
from autopandas_v2.generators.ml.networks.ggnn.ops.subsets import ModelSubsets
from autopandas_v2.generators.ml.networks.ggnn.ops.orderedsubsets import ModelOrderedSubsets
from autopandas_v2.ml.networks.ggnn.utils import ParamsNamespace
train_path = '{}/{}/{}.pkl'.format(cmd_args.train, fname, identifier)
valid_path = '{}/{}/{}.pkl'.format(cmd_args.valid, fname, identifier)
model_path = '{}/{}/{}'.format(cmd_args.modeldir, fname, identifier)
if not os.path.exists(train_path):
raise Exception("Training data path {} does not exist".format(train_path))
if not os.path.exists(valid_path):
raise Exception("Validation data path {} does not exist".format(valid_path))
if cmd_args.ignore_if_exists and os.path.exists(model_path + '/model_best.pickle'):
logger.info("Skipping training for {}:{} as model already exists".format(fname, identifier))
return
os.system('mkdir -p ' + model_path)
ggnn_args = ArgNamespace.from_namespace(cmd_args)
ggnn_args.train = train_path
ggnn_args.valid = valid_path
ggnn_args.outdir = model_path
ggnn_args.mode = 'train'
if cmd_args.restore_if_exists and os.path.exists(model_path + '/model_best.pickle'):
ggnn_args.restore = model_path + '/model_best.pickle'
params = ParamsNamespace()
if cmd_args.config is not None:
with open(cmd_args.config, 'r') as f:
params.update(json.load(f))
if cmd_args.config_str is not None:
params.update(json.loads(cmd_args.config_str))
params.args = ParamsNamespace()
params.args.update(ggnn_args)
params.use_directed_edges = True
if identifier.startswith("choice"):
model = ModelChoice.from_params(params)
elif identifier.startswith("chain"):
model = ModelChain.from_params(params)
elif identifier.startswith("select"):
model = ModelSelect.from_params(params)
elif identifier.startswith("subsets"):
model = ModelSubsets.from_params(params)
elif identifier.startswith("orderedsubsets"):
model = ModelOrderedSubsets.from_params(params)
else:
raise NotImplementedError("Model not defined for operator {}".format(identifier.split('_')[0]))
model.run()
def run_training_functions(cmd_args: ArgNamespace):
from autopandas_v2.ml.networks.ggnn.models.sparse.seq.static_rnn import GGNNSeqStaticRNN
from autopandas_v2.ml.networks.ggnn.utils import ParamsNamespace
train_path = cmd_args.train
valid_path = cmd_args.valid
model_path = cmd_args.modeldir
if not os.path.exists(model_path):
os.system('mkdir -p ' + model_path)
ggnn_args = ArgNamespace.from_namespace(cmd_args)
ggnn_args.train = train_path
ggnn_args.valid = valid_path
ggnn_args.outdir = model_path
ggnn_args.mode = 'train'
params = ParamsNamespace()
if cmd_args.config is not None:
with open(cmd_args.config, 'r') as f:
params.update(json.load(f))
if cmd_args.config_str is not None:
params.update(json.loads(cmd_args.config_str))
params.args = ParamsNamespace()
params.args.update(ggnn_args)
params.use_directed_edges = True
model = GGNNSeqStaticRNN.from_params(params)
model.run()
def run_training_next_function(cmd_args: ArgNamespace):
from autopandas_v2.ml.networks.ggnn.models.sparse.base import SparseGGNN
from autopandas_v2.ml.networks.ggnn.utils import ParamsNamespace
train_path = cmd_args.train
valid_path = cmd_args.valid
model_path = cmd_args.modeldir
if not os.path.exists(model_path):
os.system('mkdir -p ' + model_path)
ggnn_args = ArgNamespace.from_namespace(cmd_args)
ggnn_args.train = train_path
ggnn_args.valid = valid_path
ggnn_args.outdir = model_path
ggnn_args.mode = 'train'
params = ParamsNamespace()
if cmd_args.config is not None:
with open(cmd_args.config, 'r') as f:
params.update(json.load(f))
if cmd_args.config_str is not None:
params.update(json.loads(cmd_args.config_str))
params.args = ParamsNamespace()
params.args.update(ggnn_args)
params.use_directed_edges = True
model = SparseGGNN.from_params(params)
model.run()
def run_analysis(cmd_args: ArgNamespace):
# Get the functions for which training data has been generated
fnames = list(map(os.path.basename, glob.glob(cmd_args.test + '/*')))
results = []
for fname in fnames:
identifiers = list(map(os.path.basename, glob.glob(cmd_args.test + '/' + fname + '/*.pkl')))
for identifier in identifiers:
identifier = identifier[:-len(".pkl")]
if cmd_args.include is not None and '{}:{}'.format(fname, identifier) not in cmd_args.include:
continue
logger.info("Performing Analysis for {}:{}".format(fname, identifier))
result = run_analysis_helper(fname, identifier, cmd_args)
result['Name'] = '{}:{}'.format(fname, identifier)
results.append(result)
with open(cmd_args.outfile, 'w') as f:
print(pd.DataFrame(results).to_csv(), file=f)
def run_analysis_helper(fname: str, identifier: str, cmd_args: ArgNamespace):
from autopandas_v2.generators.ml.networks.ggnn.ops.choice import ModelChoice
from autopandas_v2.generators.ml.networks.ggnn.ops.chain import ModelChain
from autopandas_v2.generators.ml.networks.ggnn.ops.select import ModelSelect
from autopandas_v2.generators.ml.networks.ggnn.ops.subsets import ModelSubsets
from autopandas_v2.generators.ml.networks.ggnn.ops.orderedsubsets import ModelOrderedSubsets
from autopandas_v2.ml.networks.ggnn.utils import ParamsNamespace
test_path = '{}/{}/{}.pkl'.format(cmd_args.test, fname, identifier)
model_path = '{}/{}/{}'.format(cmd_args.modeldir, fname, identifier)
if not os.path.exists(test_path):
raise Exception("Test data path {} does not exist".format(test_path))
os.system('mkdir -p ' + model_path)
ggnn_args = ArgNamespace.from_namespace(cmd_args)
ggnn_args.test = test_path
ggnn_args.outdir = model_path
ggnn_args.mode = 'train'
ggnn_args.model = model_path
params = ParamsNamespace()
if cmd_args.config is not None:
with open(cmd_args.config, 'r') as f:
params.update(json.load(f))
params.args = ParamsNamespace()
params.args.update(ggnn_args)
params.use_directed_edges = True
if identifier.startswith("choice"):
model = ModelChoice.from_params(params)
elif identifier.startswith("chain"):
model = ModelChain.from_params(params)
elif identifier.startswith("select"):
model = ModelSelect.from_params(params)
elif identifier.startswith("subsets"):
model = ModelSubsets.from_params(params)
elif identifier.startswith("orderedsubsets"):
model = ModelOrderedSubsets.from_params(params)
else:
raise NotImplementedError("Model not defined for operator {}".format(identifier.split('_')[0]))
return model.run_analysis(test_path)
def run(cmd_args: ArgNamespace):
if cmd_args.training_subcommand == 'train-generators':
run_training_generators(cmd_args)
elif cmd_args.training_subcommand == 'train-functions':
run_training_functions(cmd_args)
elif cmd_args.training_subcommand == 'analyze':
run_analysis(cmd_args)
| 45.426136
| 115
| 0.649406
| 1,950
| 15,990
| 5.185128
| 0.114872
| 0.038077
| 0.065572
| 0.027297
| 0.854416
| 0.817822
| 0.810108
| 0.797053
| 0.797053
| 0.784987
| 0
| 0.002888
| 0.241964
| 15,990
| 351
| 116
| 45.555556
| 0.831285
| 0.00763
| 0
| 0.7
| 0
| 0
| 0.215772
| 0.002648
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040741
| false
| 0.007407
| 0.085185
| 0
| 0.133333
| 0.003704
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8838aab3ee2a3c99700dbfc0d21ec355b307ecd5
| 28
|
py
|
Python
|
bot/code/main.py
|
luckydonald/docker-telegram-bot
|
df1bcd46b7005276486ea9de95e8067e8ffd0bf9
|
[
"MIT"
] | 10
|
2018-07-31T14:51:34.000Z
|
2021-01-17T12:41:24.000Z
|
bot/code/main.py
|
luckydonald/docker-telegram-bot
|
df1bcd46b7005276486ea9de95e8067e8ffd0bf9
|
[
"MIT"
] | 1
|
2019-04-08T13:50:12.000Z
|
2019-05-06T09:23:06.000Z
|
bot/code/main.py
|
luckydonald/docker-telegram-bot
|
df1bcd46b7005276486ea9de95e8067e8ffd0bf9
|
[
"MIT"
] | 6
|
2018-12-15T19:50:44.000Z
|
2021-06-02T23:25:43.000Z
|
from example.main import app
| 28
| 28
| 0.857143
| 5
| 28
| 4.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 28
| 1
| 28
| 28
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
88af2289542f490d45b93ac4ab80daec18c133c3
| 156
|
py
|
Python
|
tests/mods/bad_import/bad_import.py
|
Akm0d/pop
|
77d9f6e6de8e02aa2ee5520d0aa0052fabd53243
|
[
"Apache-2.0"
] | 48
|
2019-05-21T16:10:49.000Z
|
2021-12-04T18:02:20.000Z
|
tests/mods/bad_import/bad_import.py
|
Akm0d/pop
|
77d9f6e6de8e02aa2ee5520d0aa0052fabd53243
|
[
"Apache-2.0"
] | 43
|
2019-05-21T22:39:44.000Z
|
2020-02-07T16:37:29.000Z
|
tests/mods/bad_import/bad_import.py
|
Akm0d/pop
|
77d9f6e6de8e02aa2ee5520d0aa0052fabd53243
|
[
"Apache-2.0"
] | 18
|
2019-05-21T16:10:42.000Z
|
2019-12-13T16:28:36.000Z
|
# -*- coding: utf-8 -*-
'''
Fail to load to test load errors
'''
# pylint: disable=unused-import
import foobar123456foobar
def func():
return 'wha?'
| 13
| 32
| 0.647436
| 20
| 156
| 5.05
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 0.192308
| 156
| 11
| 33
| 14.181818
| 0.746032
| 0.544872
| 0
| 0
| 0
| 0
| 0.064516
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
ee0c0fa1540d08a13eb2e616ab4a6ce4421688a1
| 42
|
py
|
Python
|
randm/__init__.py
|
Bejjox/BejjoLib
|
ba0e595d40b49a025ef37ca3122c7b25d1a7ecca
|
[
"MIT"
] | 1
|
2020-07-14T17:09:19.000Z
|
2020-07-14T17:09:19.000Z
|
randm/__init__.py
|
Bejjox/BejjoLib
|
ba0e595d40b49a025ef37ca3122c7b25d1a7ecca
|
[
"MIT"
] | 1
|
2020-11-01T15:24:36.000Z
|
2020-11-01T15:25:09.000Z
|
randm/__init__.py
|
Bejjox/BejjoLib
|
ba0e595d40b49a025ef37ca3122c7b25d1a7ecca
|
[
"MIT"
] | 1
|
2020-07-14T17:09:31.000Z
|
2020-07-14T17:09:31.000Z
|
from .string import *
from .char import *
| 14
| 21
| 0.714286
| 6
| 42
| 5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 42
| 2
| 22
| 21
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ee5332b519d2301568e935b122ad7a38590f63cb
| 2,643
|
py
|
Python
|
project/dataset/download_dataset.py
|
shashank2000/JeopardyContrastive
|
6cfbcc78d1f1e3adff87f6854d71c33ae6a9a7f0
|
[
"Apache-2.0"
] | null | null | null |
project/dataset/download_dataset.py
|
shashank2000/JeopardyContrastive
|
6cfbcc78d1f1e3adff87f6854d71c33ae6a9a7f0
|
[
"Apache-2.0"
] | 2
|
2020-11-11T13:58:43.000Z
|
2021-01-19T12:41:44.000Z
|
project/dataset/download_dataset.py
|
shashank2000/JeopardyContrastive
|
6cfbcc78d1f1e3adff87f6854d71c33ae6a9a7f0
|
[
"Apache-2.0"
] | null | null | null |
# download the VQA questions and answers (annotations)
import os
from os import path as osp
dir_raw = '/data5/shashank2000/' # enter directory here
dir_zip = osp.join(dir_raw, 'zipfiles')
os.system('mkdir -p ' + dir_zip)
dir_ann = osp.join(dir_raw, 'final_json')
os.system('mkdir -p ' + dir_ann)
os.system('wget https://s3.amazonaws.com/cvmlp/vqa/mscoco/vqa/v2_Questions_Train_mscoco.zip -P '+dir_zip)
os.system('wget https://s3.amazonaws.com/cvmlp/vqa/mscoco/vqa/v2_Questions_Val_mscoco.zip -P '+dir_zip)
os.system('wget https://s3.amazonaws.com/cvmlp/vqa/mscoco/vqa/v2_Questions_Test_mscoco.zip -P '+dir_zip)
os.system('wget https://s3.amazonaws.com/cvmlp/vqa/mscoco/vqa/v2_Annotations_Train_mscoco.zip -P '+dir_zip)
os.system('wget https://s3.amazonaws.com/cvmlp/vqa/mscoco/vqa/v2_Annotations_Val_mscoco.zip -P '+dir_zip)
os.system('wget https://s3.amazonaws.com/cvmlp/vqa/mscoco/vqa/v2_Complementary_Pairs_Train_mscoco.zip -P '+dir_zip)
os.system('wget https://s3.amazonaws.com/cvmlp/vqa/mscoco/vqa/v2_Complementary_Pairs_Val_mscoco.zip -P '+dir_zip)
os.system('unzip '+osp.join(dir_zip, 'v2_Questions_Train_mscoco.zip')+' -d '+dir_ann)
os.system('unzip '+osp.join(dir_zip, 'v2_Questions_Val_mscoco.zip')+' -d '+dir_ann)
os.system('unzip '+osp.join(dir_zip, 'v2_Questions_Test_mscoco.zip')+' -d '+dir_ann)
os.system('unzip '+osp.join(dir_zip, 'v2_Annotations_Train_mscoco.zip')+' -d '+dir_ann)
os.system('unzip '+osp.join(dir_zip, 'v2_Annotations_Val_mscoco.zip')+' -d '+dir_ann)
os.system('unzip '+osp.join(dir_zip, 'v2_Complementary_Pairs_Train_mscoco.zip')+' -d '+dir_ann)
os.system('unzip '+osp.join(dir_zip, 'v2_Complementary_Pairs_Val_mscoco.zip')+' -d '+dir_ann)
os.system('mv '+osp.join(dir_ann, 'v2_mscoco_train2014_annotations.json')+' '
+osp.join(dir_ann, 'mscoco_train2014_annotations.json'))
os.system('mv '+osp.join(dir_ann, 'v2_mscoco_val2014_annotations.json')+' '
+osp.join(dir_ann, 'mscoco_val2014_annotations.json'))
os.system('mv '+osp.join(dir_ann, 'v2_OpenEnded_mscoco_train2014_questions.json')+' '
+osp.join(dir_ann, 'OpenEnded_mscoco_train2014_questions.json'))
os.system('mv '+osp.join(dir_ann, 'v2_OpenEnded_mscoco_val2014_questions.json')+' '
+osp.join(dir_ann, 'OpenEnded_mscoco_val2014_questions.json'))
os.system('mv '+osp.join(dir_ann, 'v2_OpenEnded_mscoco_test2015_questions.json')+' '
+osp.join(dir_ann, 'OpenEnded_mscoco_test2015_questions.json'))
os.system('mv '+osp.join(dir_ann, 'v2_OpenEnded_mscoco_test-dev2015_questions.json')+' '
+osp.join(dir_ann, 'OpenEnded_mscoco_test-dev2015_questions.json'))
| 69.552632
| 115
| 0.744608
| 421
| 2,643
| 4.377672
| 0.118765
| 0.095496
| 0.113945
| 0.084645
| 0.887683
| 0.83885
| 0.819859
| 0.782963
| 0.673359
| 0.633207
| 0
| 0.033278
| 0.090428
| 2,643
| 37
| 116
| 71.432432
| 0.733361
| 0.02762
| 0
| 0
| 0
| 0.212121
| 0.564472
| 0.270355
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.060606
| 0
| 0.060606
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ee5e65718623153d5888b17161aba386c0688d22
| 99
|
py
|
Python
|
imbalanced/predictors/__init__.py
|
markalexander/imbalanced
|
3bc72887dc4a0142ba713c17db576879483b7fb3
|
[
"MIT"
] | 3
|
2018-07-06T08:50:50.000Z
|
2021-05-25T07:34:19.000Z
|
imbalanced/predictors/__init__.py
|
markalexander/imbalanced
|
3bc72887dc4a0142ba713c17db576879483b7fb3
|
[
"MIT"
] | null | null | null |
imbalanced/predictors/__init__.py
|
markalexander/imbalanced
|
3bc72887dc4a0142ba713c17db576879483b7fb3
|
[
"MIT"
] | 2
|
2018-08-12T22:53:09.000Z
|
2019-12-05T10:29:20.000Z
|
from .regressors import FeedForwardRegressor, HurdleRegressor, IntermediateClassificationRegressor
| 49.5
| 98
| 0.909091
| 6
| 99
| 15
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 99
| 2
| 98
| 49.5
| 0.967742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ee853061187af7ff0fc10b246e4b6905da1fc949
| 48
|
py
|
Python
|
tohe/db/__init__.py
|
lagerfeuer/tohe
|
424e8d62283cbb4f54c34e35654b28fe19d5e8c0
|
[
"MIT"
] | null | null | null |
tohe/db/__init__.py
|
lagerfeuer/tohe
|
424e8d62283cbb4f54c34e35654b28fe19d5e8c0
|
[
"MIT"
] | null | null | null |
tohe/db/__init__.py
|
lagerfeuer/tohe
|
424e8d62283cbb4f54c34e35654b28fe19d5e8c0
|
[
"MIT"
] | null | null | null |
from .db import ToheDB, adapt_list, convert_tags
| 48
| 48
| 0.833333
| 8
| 48
| 4.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104167
| 48
| 1
| 48
| 48
| 0.883721
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c9f279b396cb0c22ee0203a61dc314b44e99a808
| 105
|
py
|
Python
|
scenes/Room/action.py
|
ZachQin/qqspeed_mobile_auto
|
98d821491fd9dd5565018bc7a989a6f0a72a1d84
|
[
"MIT"
] | 1
|
2020-09-21T01:31:30.000Z
|
2020-09-21T01:31:30.000Z
|
scenes/Room/action.py
|
ZachQin/qqspeed_mobile_auto
|
98d821491fd9dd5565018bc7a989a6f0a72a1d84
|
[
"MIT"
] | null | null | null |
scenes/Room/action.py
|
ZachQin/qqspeed_mobile_auto
|
98d821491fd9dd5565018bc7a989a6f0a72a1d84
|
[
"MIT"
] | null | null | null |
import mobile_action
def action(img):
"""在这里填写需要执行的代码"""
mobile_action.tap_screen((1747, 1016))
| 17.5
| 42
| 0.704762
| 13
| 105
| 5.461538
| 0.769231
| 0.338028
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089888
| 0.152381
| 105
| 5
| 43
| 21
| 0.707865
| 0.114286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c9f67ba42b9eb2988ea28510c9b6c3f6ad26dcea
| 65
|
py
|
Python
|
mobilenet_segment/config/__init__.py
|
riven314/capstone_dash_interface
|
5eab25f4c15ad09aa889554820231175b0a3ed28
|
[
"CC0-1.0"
] | 1
|
2021-05-05T11:13:27.000Z
|
2021-05-05T11:13:27.000Z
|
mobilenet_segment/config/__init__.py
|
riven314/capstone_dash_interface
|
5eab25f4c15ad09aa889554820231175b0a3ed28
|
[
"CC0-1.0"
] | null | null | null |
mobilenet_segment/config/__init__.py
|
riven314/capstone_dash_interface
|
5eab25f4c15ad09aa889554820231175b0a3ed28
|
[
"CC0-1.0"
] | 1
|
2020-01-01T12:24:51.000Z
|
2020-01-01T12:24:51.000Z
|
from .defaults import _C as cfg
print('defaults.py is imported')
| 21.666667
| 32
| 0.769231
| 11
| 65
| 4.454545
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138462
| 65
| 2
| 33
| 32.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0.353846
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
c9fc3c3a0fdb00729165881f62eef511cdf482bb
| 29
|
py
|
Python
|
schematable/__init__.py
|
json2d/schematable
|
4820fe2efd8897c626f891c06d8c2e005b0eaecc
|
[
"MIT"
] | null | null | null |
schematable/__init__.py
|
json2d/schematable
|
4820fe2efd8897c626f891c06d8c2e005b0eaecc
|
[
"MIT"
] | 1
|
2020-10-07T23:33:01.000Z
|
2020-10-07T23:33:01.000Z
|
schematable/__init__.py
|
json2d/schematable
|
4820fe2efd8897c626f891c06d8c2e005b0eaecc
|
[
"MIT"
] | null | null | null |
from .core import SchemaTable
| 29
| 29
| 0.862069
| 4
| 29
| 6.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 29
| 1
| 29
| 29
| 0.961538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a00dd74f7a51e94234df5415a4d160bc37d0ecb3
| 68
|
py
|
Python
|
cpp/demo/run_nlos.py
|
BostonUniversitySeniorDesign/corner-camera
|
48bd550d14277c257ea9d96b446990629a783987
|
[
"MIT"
] | null | null | null |
cpp/demo/run_nlos.py
|
BostonUniversitySeniorDesign/corner-camera
|
48bd550d14277c257ea9d96b446990629a783987
|
[
"MIT"
] | null | null | null |
cpp/demo/run_nlos.py
|
BostonUniversitySeniorDesign/corner-camera
|
48bd550d14277c257ea9d96b446990629a783987
|
[
"MIT"
] | null | null | null |
import nlos
assert 'main' in dir(nlos)
assert callable(nlos.run)
| 9.714286
| 26
| 0.735294
| 11
| 68
| 4.545455
| 0.727273
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161765
| 68
| 6
| 27
| 11.333333
| 0.877193
| 0
| 0
| 0
| 0
| 0
| 0.059701
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
14f14775ceb759988ddf53763354123a6549f627
| 8,282
|
py
|
Python
|
tests/test_grep.py
|
ksamuel/grin
|
3cac404090f566301328394a47b7a3e2e764312c
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_grep.py
|
ksamuel/grin
|
3cac404090f566301328394a47b7a3e2e764312c
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_grep.py
|
ksamuel/grin
|
3cac404090f566301328394a47b7a3e2e764312c
|
[
"BSD-3-Clause"
] | null | null | null |
# Doctests are a bit easier to write for these tests.
from __future__ import unicode_literals
from io import BytesIO
import re
import grin
all_foo = b"""\
foo
foo
foo
foo
foo
"""
first_foo = b"""\
foo
bar
bar
bar
bar
"""
last_foo = b"""\
bar
bar
bar
bar
foo
"""
second_foo = b"""\
bar
foo
bar
bar
bar
"""
second_last_foo = b"""\
bar
bar
bar
foo
bar
"""
middle_foo = b"""\
bar
bar
foo
bar
bar
"""
small_gap = b"""\
bar
bar
foo
bar
foo
bar
bar
"""
no_eol = b"foo"
middle_of_line = b"""\
bar
bar
barfoobar
bar
bar
"""
def test_basic_defaults_with_no_context():
gt_default = grin.GrepText(re.compile(b"foo"))
assert gt_default.do_grep(BytesIO(all_foo)) == [
(0, 0, b"foo\n", [(0, 3)]),
(1, 0, b"foo\n", [(0, 3)]),
(2, 0, b"foo\n", [(0, 3)]),
(3, 0, b"foo\n", [(0, 3)]),
(4, 0, b"foo\n", [(0, 3)]),
]
assert gt_default.do_grep(BytesIO(first_foo)) == [(0, 0, b"foo\n", [(0, 3)])]
assert gt_default.do_grep(BytesIO(last_foo)) == [(4, 0, b"foo\n", [(0, 3)])]
assert gt_default.do_grep(BytesIO(second_foo)) == [(1, 0, b"foo\n", [(0, 3)])]
assert gt_default.do_grep(BytesIO(second_last_foo)) == [(3, 0, b"foo\n", [(0, 3)])]
assert gt_default.do_grep(BytesIO(middle_foo)) == [(2, 0, b"foo\n", [(0, 3)])]
assert gt_default.do_grep(BytesIO(small_gap)) == [
(2, 0, b"foo\n", [(0, 3)]),
(4, 0, b"foo\n", [(0, 3)]),
]
assert gt_default.do_grep(BytesIO(no_eol)) == [(0, 0, b"foo", [(0, 3)])]
assert gt_default.do_grep(BytesIO(middle_of_line)) == [
(2, 0, b"barfoobar\n", [(3, 6)])
]
def test_symetric_1_line_context():
gt_context_1 = grin.GrepText(
re.compile(b"foo"), options=grin.Options(before_context=1, after_context=1)
)
assert gt_context_1.do_grep(BytesIO(all_foo)) == [
(0, 0, b"foo\n", [(0, 3)]),
(1, 0, b"foo\n", [(0, 3)]),
(2, 0, b"foo\n", [(0, 3)]),
(3, 0, b"foo\n", [(0, 3)]),
(4, 0, b"foo\n", [(0, 3)]),
]
assert gt_context_1.do_grep(BytesIO(first_foo)) == [
(0, 0, b"foo\n", [(0, 3)]),
(1, 1, b"bar\n", None),
]
assert gt_context_1.do_grep(BytesIO(last_foo)) == [
(3, -1, b"bar\n", None),
(4, 0, b"foo\n", [(0, 3)]),
]
assert gt_context_1.do_grep(BytesIO(second_foo)) == [
(0, -1, b"bar\n", None),
(1, 0, b"foo\n", [(0, 3)]),
(2, 1, b"bar\n", None),
]
assert gt_context_1.do_grep(BytesIO(second_last_foo)) == [
(2, -1, b"bar\n", None),
(3, 0, b"foo\n", [(0, 3)]),
(4, 1, b"bar\n", None),
]
assert gt_context_1.do_grep(BytesIO(middle_foo)) == [
(1, -1, b"bar\n", None),
(2, 0, b"foo\n", [(0, 3)]),
(3, 1, b"bar\n", None),
]
assert gt_context_1.do_grep(BytesIO(small_gap)) == [
(1, -1, b"bar\n", None),
(2, 0, b"foo\n", [(0, 3)]),
(3, 1, b"bar\n", None),
(4, 0, b"foo\n", [(0, 3)]),
(5, 1, b"bar\n", None),
]
assert gt_context_1.do_grep(BytesIO(no_eol)) == [(0, 0, b"foo", [(0, 3)])]
assert gt_context_1.do_grep(BytesIO(middle_of_line)) == [
(1, -1, b"bar\n", None),
(2, 0, b"barfoobar\n", [(3, 6)]),
(3, 1, b"bar\n", None),
]
def test_symmetric_2_line_context():
gt_context_2 = grin.GrepText(
re.compile(b"foo"), options=grin.Options(before_context=2, after_context=2)
)
assert gt_context_2.do_grep(BytesIO(all_foo)) == [
(0, 0, b"foo\n", [(0, 3)]),
(1, 0, b"foo\n", [(0, 3)]),
(2, 0, b"foo\n", [(0, 3)]),
(3, 0, b"foo\n", [(0, 3)]),
(4, 0, b"foo\n", [(0, 3)]),
]
assert gt_context_2.do_grep(BytesIO(first_foo)) == [
(0, 0, b"foo\n", [(0, 3)]),
(1, 1, b"bar\n", None),
(2, 1, b"bar\n", None),
]
assert gt_context_2.do_grep(BytesIO(last_foo)) == [
(2, -1, b"bar\n", None),
(3, -1, b"bar\n", None),
(4, 0, b"foo\n", [(0, 3)]),
]
assert gt_context_2.do_grep(BytesIO(second_foo)) == [
(0, -1, b"bar\n", None),
(1, 0, b"foo\n", [(0, 3)]),
(2, 1, b"bar\n", None),
(3, 1, b"bar\n", None),
]
assert gt_context_2.do_grep(BytesIO(second_last_foo)) == [
(1, -1, b"bar\n", None),
(2, -1, b"bar\n", None),
(3, 0, b"foo\n", [(0, 3)]),
(4, 1, b"bar\n", None),
]
assert gt_context_2.do_grep(BytesIO(middle_foo)) == [
(0, -1, b"bar\n", None),
(1, -1, b"bar\n", None),
(2, 0, b"foo\n", [(0, 3)]),
(3, 1, b"bar\n", None),
(4, 1, b"bar\n", None),
]
assert gt_context_2.do_grep(BytesIO(small_gap)) == [
(0, -1, b"bar\n", None),
(1, -1, b"bar\n", None),
(2, 0, b"foo\n", [(0, 3)]),
(3, 1, b"bar\n", None),
(4, 0, b"foo\n", [(0, 3)]),
(5, 1, b"bar\n", None),
(6, 1, b"bar\n", None),
]
assert gt_context_2.do_grep(BytesIO(no_eol)) == [(0, 0, b"foo", [(0, 3)])]
assert gt_context_2.do_grep(BytesIO(middle_of_line)) == [
(0, -1, b"bar\n", None),
(1, -1, b"bar\n", None),
(2, 0, b"barfoobar\n", [(3, 6)]),
(3, 1, b"bar\n", None),
(4, 1, b"bar\n", None),
]
def test_1_line_of_before_context_no_lines_after():
gt_before_context_1 = grin.GrepText(
re.compile(b"foo"), options=grin.Options(before_context=1, after_context=0)
)
assert gt_before_context_1.do_grep(BytesIO(all_foo)) == [
(0, 0, b"foo\n", [(0, 3)]),
(1, 0, b"foo\n", [(0, 3)]),
(2, 0, b"foo\n", [(0, 3)]),
(3, 0, b"foo\n", [(0, 3)]),
(4, 0, b"foo\n", [(0, 3)]),
]
assert gt_before_context_1.do_grep(BytesIO(first_foo)) == [
(0, 0, b"foo\n", [(0, 3)])
]
assert gt_before_context_1.do_grep(BytesIO(last_foo)) == [
(3, -1, b"bar\n", None),
(4, 0, b"foo\n", [(0, 3)]),
]
assert gt_before_context_1.do_grep(BytesIO(second_foo)) == [
(0, -1, b"bar\n", None),
(1, 0, b"foo\n", [(0, 3)]),
]
assert gt_before_context_1.do_grep(BytesIO(second_last_foo)) == [
(2, -1, b"bar\n", None),
(3, 0, b"foo\n", [(0, 3)]),
]
assert gt_before_context_1.do_grep(BytesIO(middle_foo)) == [
(1, -1, b"bar\n", None),
(2, 0, b"foo\n", [(0, 3)]),
]
assert gt_before_context_1.do_grep(BytesIO(small_gap)) == [
(1, -1, b"bar\n", None),
(2, 0, b"foo\n", [(0, 3)]),
(3, -1, b"bar\n", None),
(4, 0, b"foo\n", [(0, 3)]),
]
assert gt_before_context_1.do_grep(BytesIO(no_eol)) == [(0, 0, b"foo", [(0, 3)])]
assert gt_before_context_1.do_grep(BytesIO(middle_of_line)) == [
(1, -1, b"bar\n", None),
(2, 0, b"barfoobar\n", [(3, 6)]),
]
def test_1_line_of_before_context_no_lines_before():
gt_after_context_1 = grin.GrepText(
re.compile(b"foo"), options=grin.Options(before_context=0, after_context=1)
)
assert gt_after_context_1.do_grep(BytesIO(all_foo)) == [
(0, 0, b"foo\n", [(0, 3)]),
(1, 0, b"foo\n", [(0, 3)]),
(2, 0, b"foo\n", [(0, 3)]),
(3, 0, b"foo\n", [(0, 3)]),
(4, 0, b"foo\n", [(0, 3)]),
]
assert gt_after_context_1.do_grep(BytesIO(first_foo)) == [
(0, 0, b"foo\n", [(0, 3)]),
(1, 1, b"bar\n", None),
]
assert gt_after_context_1.do_grep(BytesIO(last_foo)) == [(4, 0, b"foo\n", [(0, 3)])]
assert gt_after_context_1.do_grep(BytesIO(second_foo)) == [
(1, 0, b"foo\n", [(0, 3)]),
(2, 1, b"bar\n", None),
]
assert gt_after_context_1.do_grep(BytesIO(second_last_foo)) == [
(3, 0, b"foo\n", [(0, 3)]),
(4, 1, b"bar\n", None),
]
assert gt_after_context_1.do_grep(BytesIO(middle_foo)) == [
(2, 0, b"foo\n", [(0, 3)]),
(3, 1, b"bar\n", None),
]
assert gt_after_context_1.do_grep(BytesIO(small_gap)) == [
(2, 0, b"foo\n", [(0, 3)]),
(3, 1, b"bar\n", None),
(4, 0, b"foo\n", [(0, 3)]),
(5, 1, b"bar\n", None),
]
assert gt_after_context_1.do_grep(BytesIO(no_eol)) == [(0, 0, b"foo", [(0, 3)])]
assert gt_after_context_1.do_grep(BytesIO(middle_of_line)) == [
(2, 0, b"barfoobar\n", [(3, 6)]),
(3, 1, b"bar\n", None),
]
| 28.460481
| 88
| 0.496257
| 1,425
| 8,282
| 2.699649
| 0.043509
| 0.075903
| 0.084481
| 0.093579
| 0.893943
| 0.881466
| 0.859631
| 0.851573
| 0.838316
| 0.811541
| 0
| 0.070681
| 0.262014
| 8,282
| 290
| 89
| 28.558621
| 0.558737
| 0.006158
| 0
| 0.576208
| 0
| 0
| 0.10062
| 0
| 0
| 0
| 0
| 0
| 0.167286
| 1
| 0.018587
| false
| 0
| 0.01487
| 0
| 0.033457
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
14fc95d9c536940d21e06c555d724bdb62effa59
| 18
|
py
|
Python
|
2019/python/day1/__init__.py
|
lindseyeggleston/advent_of_code
|
246940f1acaf08773fab826f5f36518129569ab6
|
[
"Unlicense"
] | null | null | null |
2019/python/day1/__init__.py
|
lindseyeggleston/advent_of_code
|
246940f1acaf08773fab826f5f36518129569ab6
|
[
"Unlicense"
] | null | null | null |
2019/python/day1/__init__.py
|
lindseyeggleston/advent_of_code
|
246940f1acaf08773fab826f5f36518129569ab6
|
[
"Unlicense"
] | null | null | null |
from day1 import *
| 18
| 18
| 0.777778
| 3
| 18
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 0.166667
| 18
| 1
| 18
| 18
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0921465b6299ea7ba0a10661f61c16347ed6076f
| 115
|
py
|
Python
|
Week01/Problem01/ylu_01.py.py
|
nkruyer/SkillsWorkshop2018
|
2201255ff63eca111635789267d0600a95854c38
|
[
"BSD-3-Clause"
] | 1
|
2020-04-18T03:30:46.000Z
|
2020-04-18T03:30:46.000Z
|
Week01/Problem01/ylu_01.py.py
|
nkruyer/SkillsWorkshop2018
|
2201255ff63eca111635789267d0600a95854c38
|
[
"BSD-3-Clause"
] | 21
|
2018-07-12T19:12:23.000Z
|
2018-08-10T13:52:45.000Z
|
Week01/Problem01/ylu_01.py.py
|
nkruyer/SkillsWorkshop2018
|
2201255ff63eca111635789267d0600a95854c38
|
[
"BSD-3-Clause"
] | 60
|
2018-05-08T16:59:20.000Z
|
2018-08-01T14:28:28.000Z
|
m3 = range(0,1000,3)
m5 = range(0,1000,5)
m15 = range(0,1000,15)
answer = sum(m3)+sum(m5)-sum(m15)
print (answer)
| 16.428571
| 33
| 0.643478
| 24
| 115
| 3.083333
| 0.5
| 0.243243
| 0.405405
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.27
| 0.130435
| 115
| 6
| 34
| 19.166667
| 0.47
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0924cb821bf0cd00a6b7aa4d8824c0bb8d3291fe
| 84
|
py
|
Python
|
hola.py
|
JoseCordobaEAN/Flisol2018
|
f31c7d08dda48b49ed89fe766d1144a4a0e0f279
|
[
"MIT"
] | null | null | null |
hola.py
|
JoseCordobaEAN/Flisol2018
|
f31c7d08dda48b49ed89fe766d1144a4a0e0f279
|
[
"MIT"
] | null | null | null |
hola.py
|
JoseCordobaEAN/Flisol2018
|
f31c7d08dda48b49ed89fe766d1144a4a0e0f279
|
[
"MIT"
] | null | null | null |
# TODO Saludar al usuario
# TODO Preguntar su nombre
# TODO Saludar por el nombre
| 14
| 28
| 0.75
| 13
| 84
| 4.846154
| 0.692308
| 0.349206
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 84
| 5
| 29
| 16.8
| 0.954545
| 0.892857
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0.2
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
09350646b99b61201bb300a56ca5306046844158
| 31
|
py
|
Python
|
test/__init__.py
|
JhoneM/DynamicUrl
|
06abe3b9fbdebd20b035f7bf8b599161a02a26e3
|
[
"MIT"
] | null | null | null |
test/__init__.py
|
JhoneM/DynamicUrl
|
06abe3b9fbdebd20b035f7bf8b599161a02a26e3
|
[
"MIT"
] | null | null | null |
test/__init__.py
|
JhoneM/DynamicUrl
|
06abe3b9fbdebd20b035f7bf8b599161a02a26e3
|
[
"MIT"
] | null | null | null |
from . import dynamic_url_test
| 15.5
| 30
| 0.83871
| 5
| 31
| 4.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 1
| 31
| 31
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
093f163021543c7ed9ed987e4a4a2337e4fb8784
| 76
|
py
|
Python
|
cook.py
|
aguai/docgen
|
5147658e2eaae3c8232ae66edea5b90cd8933d94
|
[
"MIT"
] | null | null | null |
cook.py
|
aguai/docgen
|
5147658e2eaae3c8232ae66edea5b90cd8933d94
|
[
"MIT"
] | null | null | null |
cook.py
|
aguai/docgen
|
5147658e2eaae3c8232ae66edea5b90cd8933d94
|
[
"MIT"
] | null | null | null |
import lib.latex2svg.latex2svg as tex
import lib.mistletoe.mistletoe as rdr
| 25.333333
| 37
| 0.842105
| 12
| 76
| 5.333333
| 0.583333
| 0.28125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0.105263
| 76
| 2
| 38
| 38
| 0.911765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
095d887075b63819caa91bb8b78fd94d51cb32b3
| 36
|
py
|
Python
|
src/models/ops/deform_v2/__init__.py
|
FHellmann/Deformable_Dilated_Faster-RCNN
|
53e7ddcd6b3b8c7c38451cf08529d2792494c658
|
[
"MIT"
] | 1
|
2021-10-09T03:05:16.000Z
|
2021-10-09T03:05:16.000Z
|
src/models/ops/deform_v2/__init__.py
|
FHellmann/Deformable_Dilated_Faster-RCNN
|
53e7ddcd6b3b8c7c38451cf08529d2792494c658
|
[
"MIT"
] | null | null | null |
src/models/ops/deform_v2/__init__.py
|
FHellmann/Deformable_Dilated_Faster-RCNN
|
53e7ddcd6b3b8c7c38451cf08529d2792494c658
|
[
"MIT"
] | 2
|
2021-03-02T12:06:14.000Z
|
2021-11-20T16:02:43.000Z
|
from .dcn_v2 import DCN, DCNPooling
| 18
| 35
| 0.805556
| 6
| 36
| 4.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032258
| 0.138889
| 36
| 1
| 36
| 36
| 0.870968
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
11b00dbf2b483ad1515b9cc7e9e06c4d714bb963
| 111
|
py
|
Python
|
python-module/tissue_recognition/utils.py
|
bioinfoacademy/st_tissue_recognition
|
db739d30b79c209399c51bd845e808fb75ee19ee
|
[
"MIT"
] | null | null | null |
python-module/tissue_recognition/utils.py
|
bioinfoacademy/st_tissue_recognition
|
db739d30b79c209399c51bd845e808fb75ee19ee
|
[
"MIT"
] | 3
|
2018-06-22T15:13:36.000Z
|
2019-11-16T17:35:59.000Z
|
python-module/tissue_recognition/utils.py
|
bioinfoacademy/st_tissue_recognition
|
db739d30b79c209399c51bd845e808fb75ee19ee
|
[
"MIT"
] | 2
|
2021-02-07T12:39:02.000Z
|
2021-05-07T07:43:16.000Z
|
import ctypes as ct
def get_val(ptr, ptr_type):
return ct.cast(ptr, ct.POINTER(ptr_type)).contents.value
| 18.5
| 60
| 0.738739
| 20
| 111
| 3.95
| 0.7
| 0.177215
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144144
| 111
| 5
| 61
| 22.2
| 0.831579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
11bd34d07e304fd6a9afd8b21fafa93dc340b96f
| 91
|
py
|
Python
|
redisco/models/key.py
|
iamteem/redisco
|
a7ba19ff3c38061d6d8bc0c10fa754baadcfeb91
|
[
"MIT"
] | 110
|
2015-01-16T06:38:00.000Z
|
2022-02-02T05:12:57.000Z
|
redisco/models/key.py
|
daodaoliang/redisco
|
a7ba19ff3c38061d6d8bc0c10fa754baadcfeb91
|
[
"MIT"
] | 7
|
2015-04-28T15:40:02.000Z
|
2018-05-07T16:14:31.000Z
|
redisco/models/key.py
|
daodaoliang/redisco
|
a7ba19ff3c38061d6d8bc0c10fa754baadcfeb91
|
[
"MIT"
] | 29
|
2015-01-22T05:30:57.000Z
|
2021-05-18T08:25:33.000Z
|
class Key(str):
def __getitem__(self, key):
return Key("%s:%s" % (self, key,))
| 22.75
| 42
| 0.549451
| 13
| 91
| 3.538462
| 0.615385
| 0.304348
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.241758
| 91
| 3
| 43
| 30.333333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0.054945
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
eea3cd4e03f84b1b7d5577ee852f8c30ef130129
| 21
|
py
|
Python
|
Modules/vms/iodef/iodef.py
|
vmssoftware/cpython
|
b5d2c7f578d33963798a02ca32f0c151c908aa7c
|
[
"0BSD"
] | 2
|
2021-10-06T15:46:53.000Z
|
2022-01-26T02:58:54.000Z
|
Modules/vms/iodef/iodef.py
|
vmssoftware/cpython
|
b5d2c7f578d33963798a02ca32f0c151c908aa7c
|
[
"0BSD"
] | null | null | null |
Modules/vms/iodef/iodef.py
|
vmssoftware/cpython
|
b5d2c7f578d33963798a02ca32f0c151c908aa7c
|
[
"0BSD"
] | null | null | null |
from _iodef import *
| 10.5
| 20
| 0.761905
| 3
| 21
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 21
| 1
| 21
| 21
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e12db14df9970468cdad19adb6371b9d14c098e9
| 1,724
|
py
|
Python
|
tests/test_hdfe_funcs.py
|
ma-schmidt/hdfe
|
ff169487b8c25c2a10e1404c8702dcf5c182f2e2
|
[
"MIT"
] | 21
|
2016-07-14T20:10:34.000Z
|
2022-03-11T21:43:57.000Z
|
tests/test_hdfe_funcs.py
|
ma-schmidt/hdfe
|
ff169487b8c25c2a10e1404c8702dcf5c182f2e2
|
[
"MIT"
] | 5
|
2019-04-11T16:22:39.000Z
|
2020-03-01T03:10:42.000Z
|
tests/test_hdfe_funcs.py
|
ma-schmidt/hdfe
|
ff169487b8c25c2a10e1404c8702dcf5c182f2e2
|
[
"MIT"
] | 5
|
2018-10-12T07:51:25.000Z
|
2020-07-20T17:48:51.000Z
|
import pandas as pd
import numpy as np
from hdfe.hdfe import make_dummies, get_all_dummies
def test_make_dummies_arr() -> None:
x = np.array([1, 0, 0])
results = make_dummies(x, False)
expected = np.array([[0, 1], [1, 0], [1, 0]], dtype=float)
np.testing.assert_almost_equal(results.A, expected)
def test_make_dummies_ser() -> None:
x = pd.Series([1, 0, 0])
results = make_dummies(x, False)
expected = np.array([[0, 1], [1, 0], [1, 0]], dtype=float)
np.testing.assert_almost_equal(results.A, expected)
def test_make_dummies_cat() -> None:
x = pd.Series(["horse", "cat", "cat"]).astype("category")
results = make_dummies(x, False)
expected = np.array([[0, 1], [1, 0], [1, 0]], dtype=float)
np.testing.assert_almost_equal(results.A, expected)
def test_make_dummies_arr_drop() -> None:
x = np.array([1, 0, 0])
results = make_dummies(x, True)
expected = np.array([[0], [1], [1]], dtype=float)
np.testing.assert_almost_equal(results.A, expected)
def test_make_dummies_ser_drop() -> None:
x = pd.Series([1, 0, 0])
results = make_dummies(x, True)
expected = np.array([[0], [1], [1]], dtype=float)
np.testing.assert_almost_equal(results.A, expected)
def test_make_dummies_cat_drop() -> None:
x = pd.Series(["horse", "cat", "cat"]).astype("category")
results = make_dummies(x, True)
expected = np.array([[0], [1], [1]], dtype=float)
np.testing.assert_almost_equal(results.A, expected)
def test_get_all_dummies() -> None:
x = np.array([[0, 0], [1, 0], [0, 1]])
result = get_all_dummies(x)
expected = np.array([[1, 0, 1], [0, 1, 1], [1, 0, 0]], dtype=float)
np.testing.assert_almost_equal(result.A, expected)
| 32.528302
| 71
| 0.638631
| 273
| 1,724
| 3.85348
| 0.142857
| 0.026616
| 0.09981
| 0.126426
| 0.842205
| 0.811787
| 0.811787
| 0.811787
| 0.776616
| 0.776616
| 0
| 0.038001
| 0.175754
| 1,724
| 52
| 72
| 33.153846
| 0.702322
| 0
| 0
| 0.631579
| 0
| 0
| 0.022042
| 0
| 0
| 0
| 0
| 0
| 0.184211
| 1
| 0.184211
| false
| 0
| 0.078947
| 0
| 0.263158
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
012898812be1eaee86b4d68076f7196ae67deae3
| 22
|
py
|
Python
|
tests/pipfile/setup-py-implicit/dummy/__init__.py
|
davidjsherman/repo2docker
|
4da2e1e71565b26a4bf1d0f0d26ae7c2373a1fd7
|
[
"BSD-3-Clause"
] | 1,047
|
2017-05-25T03:37:21.000Z
|
2020-08-09T19:36:56.000Z
|
tests/pipfile/setup-py-implicit/dummy/__init__.py
|
davidjsherman/repo2docker
|
4da2e1e71565b26a4bf1d0f0d26ae7c2373a1fd7
|
[
"BSD-3-Clause"
] | 810
|
2017-05-24T20:50:49.000Z
|
2020-08-05T15:56:38.000Z
|
tests/pipfile/setup-py-implicit/dummy/__init__.py
|
davidjsherman/repo2docker
|
4da2e1e71565b26a4bf1d0f0d26ae7c2373a1fd7
|
[
"BSD-3-Clause"
] | 253
|
2017-06-02T20:23:05.000Z
|
2020-08-04T17:23:22.000Z
|
def dummy():
pass
| 7.333333
| 12
| 0.545455
| 3
| 22
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.318182
| 22
| 2
| 13
| 11
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
01291eb8dbf515042811936e9e4335de13453ed0
| 188
|
py
|
Python
|
marshmallow_dataclass/typing.py
|
shadchin/marshmallow_dataclass
|
fa6c28980ccfe45742cdc9430bfb3b737690935f
|
[
"MIT"
] | 342
|
2019-02-06T04:30:49.000Z
|
2022-03-25T18:19:25.000Z
|
marshmallow_dataclass/typing.py
|
shadchin/marshmallow_dataclass
|
fa6c28980ccfe45742cdc9430bfb3b737690935f
|
[
"MIT"
] | 164
|
2019-02-06T18:01:38.000Z
|
2022-03-30T09:18:37.000Z
|
marshmallow_dataclass/typing.py
|
shadchin/marshmallow_dataclass
|
fa6c28980ccfe45742cdc9430bfb3b737690935f
|
[
"MIT"
] | 78
|
2019-02-06T18:25:24.000Z
|
2022-03-14T09:13:55.000Z
|
import marshmallow.fields
from . import NewType
Url = NewType("Url", str, field=marshmallow.fields.Url)
Email = NewType("Email", str, field=marshmallow.fields.Email)
# Aliases
URL = Url
| 20.888889
| 61
| 0.75
| 25
| 188
| 5.64
| 0.4
| 0.361702
| 0.269504
| 0.35461
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12234
| 188
| 8
| 62
| 23.5
| 0.854545
| 0.037234
| 0
| 0
| 0
| 0
| 0.044693
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
6d6d34b7dd3f678c55f539c94442434f43a2411e
| 96
|
py
|
Python
|
inlines/admin.py
|
BroadwayDev/django-inlineobjects
|
977543e03be93d684f97ea4fcee9116ff73e94bd
|
[
"BSD-3-Clause"
] | 3
|
2015-08-12T12:44:57.000Z
|
2020-05-15T17:27:35.000Z
|
inlines/admin.py
|
BroadwayDev/django-inlineobjects
|
977543e03be93d684f97ea4fcee9116ff73e94bd
|
[
"BSD-3-Clause"
] | null | null | null |
inlines/admin.py
|
BroadwayDev/django-inlineobjects
|
977543e03be93d684f97ea4fcee9116ff73e94bd
|
[
"BSD-3-Clause"
] | 1
|
2020-06-09T20:26:04.000Z
|
2020-06-09T20:26:04.000Z
|
from django.contrib import admin
from inlines.models import *
admin.site.register(InlineType)
| 16
| 32
| 0.8125
| 13
| 96
| 6
| 0.769231
| 0.282051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114583
| 96
| 5
| 33
| 19.2
| 0.917647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6d95b4b2ae9b0f79acef0967a8c7716a2f05d4cf
| 34
|
py
|
Python
|
ditat_etl/utils/enrichment/peopledatalabs/__init__.py
|
ditat-llc/ditat_etl
|
3d4846ecb9663f9d3de2473aaf4bbcf52f735beb
|
[
"MIT"
] | 4
|
2021-08-11T23:05:37.000Z
|
2022-03-22T18:43:35.000Z
|
ditat_etl/utils/enrichment/peopledatalabs/__init__.py
|
ditat-llc/ditat_etl
|
3d4846ecb9663f9d3de2473aaf4bbcf52f735beb
|
[
"MIT"
] | null | null | null |
ditat_etl/utils/enrichment/peopledatalabs/__init__.py
|
ditat-llc/ditat_etl
|
3d4846ecb9663f9d3de2473aaf4bbcf52f735beb
|
[
"MIT"
] | null | null | null |
from .main import PeopleDataLabs
| 11.333333
| 32
| 0.823529
| 4
| 34
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147059
| 34
| 2
| 33
| 17
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6df0f2f8ad3a8e0a608d0c976ba0bb614e49463c
| 33
|
py
|
Python
|
source/shared/__init__.py
|
joshhills/dissertation-project
|
0a2ef09a98aa3e6921384a1768b051e02d8dffb1
|
[
"MIT"
] | null | null | null |
source/shared/__init__.py
|
joshhills/dissertation-project
|
0a2ef09a98aa3e6921384a1768b051e02d8dffb1
|
[
"MIT"
] | null | null | null |
source/shared/__init__.py
|
joshhills/dissertation-project
|
0a2ef09a98aa3e6921384a1768b051e02d8dffb1
|
[
"MIT"
] | null | null | null |
import database
import messaging
| 11
| 16
| 0.878788
| 4
| 33
| 7.25
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 33
| 2
| 17
| 16.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
09783c0856d1f4b610e86d37a4a29469fbd99b5f
| 268
|
py
|
Python
|
atomic/__init__.py
|
kr-g/pyatomic
|
8f9908b4a4a374a6643b28d97ace1bfa97ac0939
|
[
"MIT"
] | 3
|
2020-04-23T00:24:48.000Z
|
2022-01-16T14:52:05.000Z
|
atomic/__init__.py
|
kr-g/pyatomic
|
8f9908b4a4a374a6643b28d97ace1bfa97ac0939
|
[
"MIT"
] | null | null | null |
atomic/__init__.py
|
kr-g/pyatomic
|
8f9908b4a4a374a6643b28d97ace1bfa97ac0939
|
[
"MIT"
] | null | null | null |
from .atomic import Atomic
from .atomic import AtomicTimeoutException
from .atomic import AtomicRefCountException
from .atomic import AtomicNotLockedException
from .atomic import AtomicBaseClassException
from .atomic import AtomicOwnerException
VERSION = "0.0.3-rc1"
| 29.777778
| 44
| 0.850746
| 29
| 268
| 7.862069
| 0.413793
| 0.263158
| 0.421053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016667
| 0.104478
| 268
| 8
| 45
| 33.5
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0.033582
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.857143
| 0
| 0.857143
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
09aa1a105dbfc583b31d571fc3ce9422cc7cbbfd
| 13,005
|
py
|
Python
|
pybold/padding.py
|
hcherkaoui/pybold
|
54e5b074a614ea11fe439707263351d93340499a
|
[
"BSD-3-Clause"
] | 4
|
2019-11-03T10:12:11.000Z
|
2020-10-10T23:42:02.000Z
|
pybold/padding.py
|
CherkaouiHamza/pybold
|
54e5b074a614ea11fe439707263351d93340499a
|
[
"BSD-3-Clause"
] | null | null | null |
pybold/padding.py
|
CherkaouiHamza/pybold
|
54e5b074a614ea11fe439707263351d93340499a
|
[
"BSD-3-Clause"
] | 1
|
2021-05-18T04:52:10.000Z
|
2021-05-18T04:52:10.000Z
|
# coding: utf-8
""" This module provide the padding functions used with the Fourier
implementation for the HRF operator.
"""
import numpy as np
def _padd_symetric(arrays, p, c, paddtype):
""" Helper function to c-padd in a symetric way arrays.
Parameters
----------
arrays : np.ndarray or list of np.ndarray
array or list of arrays to padd.
p : int,
length of padding.
c : float,
value to padd with.
paddtype : ['left', 'right'],
where to place the padding.
Results
-------
arrays : np.ndarray or list of np.ndarray
the padded array or list of arrays
"""
# case list of arrays (symm padd for list of arr)
if isinstance(arrays, list):
if paddtype == "center":
p_left = int(p / 2)
p_right = int(p / 2) + (p % 2)
return [np.hstack([c*np.ones(p_left), a, c*np.ones(p_right)])
for a in arrays]
elif paddtype == "left":
return [np.hstack([c*np.ones(p), a]) for a in arrays]
elif paddtype == "right":
return [np.hstack([a, c*np.ones(p)]) for a in arrays]
else:
raise ValueError("paddtype should be ['left', "
"'center', 'right']")
# case one arrays (symm padd for one of arr)
else:
if paddtype == "center":
p_left = int(p / 2)
p_right = int(p / 2) + (p % 2)
return np.hstack([c*np.ones(p_left), arrays,
c*np.ones(p_right)])
elif paddtype == "left":
return np.hstack([c*np.ones(p), arrays])
elif paddtype == "right":
return np.hstack([arrays, c*np.ones(p)])
else:
raise ValueError("paddtype should be ['left', "
"'center', 'right']")
def _padd_assymetric(arrays, p, c, paddtype):
""" Helper function to c-padd in an assymetric way arrays.
Parameters
----------
arrays : np.ndarray or list of np.ndarray
array or list of arrays to padd.
p : tuple of int,
length of padding.
c : float,
value to padd with.
paddtype : ['center'],
where to place the padding.
Note:
-----
Will raise a ValueError if paddtype is not 'center'.
Results
-------
arrays : np.ndarray or list of np.ndarray
the padded array or list of arrays
"""
# case list of arrays (assymm padd for one of arr)
if isinstance(arrays, list):
if paddtype == "center":
return [np.hstack([c*np.ones(p[0]), a, c*np.ones(p[1])])
for a in arrays]
elif paddtype == "left":
raise ValueError("Can't have 'left' paddtype with a tuple padd "
" provided")
elif paddtype == "right":
raise ValueError("Can't have 'right' paddtype with a tuple padd "
" provided")
else:
raise ValueError("paddtype should be ['left', "
"'center', 'right']")
# case one arrays (assymm padd for one of arr)
else:
if paddtype == "center":
p_a = np.hstack([c*np.ones(p[0]), arrays, c*np.ones(p[1])])
return p_a
elif paddtype == "left":
raise ValueError("Can't have 'left' paddtype with a tuple padd "
" provided")
elif paddtype == "right":
raise ValueError("Can't have 'right' paddtype with a tuple padd "
" provided")
else:
raise ValueError("paddtype should be ['left', "
"'center', 'right']")
def padd(arrays, p, c=0.0, paddtype="center"):
""" Padd a list of arrays.
Parameters
----------
arrays : np.ndarray or list of np.ndarray
array or list of arrays to padd.
p : int or tuple of int,
length of padding.
c : float,
value to padd with.
paddtype : ['left', 'right' or 'center'],
where to place the padding.
Results
-------
arrays : np.ndarray or list of np.ndarray
the unpadded array or list of arrays.
"""
# case of assymetric padding
if isinstance(p, int):
if p < 1: # no padding array
return arrays
else:
return _padd_symetric(arrays, p=p, c=c, paddtype=paddtype)
# case of symetric padding
else:
return _padd_assymetric(arrays, p=p, c=c, paddtype=paddtype)
def _unpadd_symetric(arrays, p, paddtype):
""" Helper function to unpadd in an assymetric way arrays.
Parameters
----------
arrays : np.ndarray or list of np.ndarray
array or list of arrays to padd.
p : int,
length of padding.
paddtype : ['left', 'right'],
where to place the padding.
Results
-------
arrays : np.ndarray or list of np.ndarray
the unpadded array or list of arrays.
"""
# case list of arrays (symm padd for list of arr)
if isinstance(arrays, list):
if paddtype == "center":
p_left = int(p / 2)
p_right = int(p / 2) + (p % 2)
return [a[p_left:-p_right] for a in arrays]
elif paddtype == "left":
return [a[p:] for a in arrays]
elif paddtype == "right":
return [a[:-p] for a in arrays]
else:
raise ValueError("paddtype should be ['left', "
"'center', 'right']")
# case one array (symm padd for one of arr)
else:
if paddtype == "center":
p_left = int(p / 2)
p_right = int(p / 2) + (p % 2)
return arrays[p_left:-p_right]
elif paddtype == "left":
return arrays[p:]
elif paddtype == "right":
return arrays[:-p]
else:
raise ValueError("paddtype should be ['left', "
"'center', 'right']")
def _unpadd_assymetric(arrays, p, paddtype):
""" Helper function to unpadd in a symetric way arrays.
Parameters
----------
arrays : np.ndarray or list of np.ndarray
array or list of arrays to padd.
p : tuple of int,
length of padding.
paddtype : ['center'],
where to place the padding.
Note:
-----
Will raise a ValueError if paddtype is not 'center'.
Results
-------
arrays : np.ndarray or list of np.ndarray
the unpadded array or list of arrays.
"""
# case list of arrays (assymm padd for list of arr)
if isinstance(arrays, list):
if paddtype == "center":
return [a[p[0]:-p[1]] for a in arrays]
elif paddtype == "left":
raise ValueError("Can't have 'left' paddtype with a tuple padd "
"provided")
elif paddtype == "right":
raise ValueError("Can't have 'right' paddtype with a tuple padd "
" provided")
else:
raise ValueError("paddtype should be ['left', "
"'center', 'right']")
# case one array (assymm padd for one of arr)
else:
if paddtype == "center":
return arrays[p[0]:-p[1]]
elif paddtype == "left":
raise ValueError("Can't have 'left' paddtype with a tuple padd "
"provided")
elif paddtype == "right":
raise ValueError("Can't have 'right' paddtype with a tuple "
"provided")
else:
raise ValueError("paddtype should be ['left', "
"'center', 'right']")
def unpadd(arrays, p, paddtype="center"):
""" Unpadd a list of arrays.
Parameters
----------
arrays : np.ndarray or list of np.ndarray
array or list of arrays to padd.
p : int or tuple of int,
length of padding.
paddtype : ['left', 'right' or 'center'],
where to place the padding.
Results
-------
arrays : np.ndarray or list of np.ndarray
the unpadded array or list of arrays.
"""
# case of assymetric padding
if isinstance(p, int):
if p < 1: # no padding case
return arrays
else:
return _unpadd_symetric(arrays, p, paddtype)
# case of symetric padding
else:
return _unpadd_assymetric(arrays, p, paddtype)
def _custom_padd(a, min_power_of_2=1024, min_zero_padd=50,
zero_padd_ratio=0.5):
""" Private helper to make a zeros-mirror-zeros padd to the next power of
two of a.
Parameters
----------
arrays : np.ndarray,
array to padd.
min_power_of_2 : int (default=512),
min length (power of two) for the padded array.
zero_padd_ratio : float (default=0.5),
determine the ratio of the length of zero padds (either for the first
or the second zero-padd) w.r.t the array length.
min_zero_padd : int (default=50)
min zero padd, either for the first or the second zero-padd.
Note:
-----
Having a signal close to ~200 can make trouble.
Results
-------
arrays : np.ndarray or list of np.ndarray
the unpadded array.
p : tuple of int,
the applied padd.
"""
if not np.log2(min_power_of_2).is_integer():
raise ValueError("min_power_of_2 should be a power of two, "
"got {0}".format(min_power_of_2))
nextpow2 = int(np.power(2, np.ceil(np.log2(len(a)))))
nextpow2 = min_power_of_2 if nextpow2 < min_power_of_2 else nextpow2
diff = nextpow2 - len(a)
# define the three possible padding
zero_padd_len = int(zero_padd_ratio * len(a))
too_short = zero_padd_len < min_zero_padd
zero_padd_len = min_zero_padd if too_short else zero_padd_len
p_zeros = (zero_padd_len, zero_padd_len)
len_padd_left = int(diff / 2)
len_padd_right = int(diff / 2) + (len(a) % 2)
p_total = (len_padd_left, len_padd_right)
if diff == 0:
# [ s ]
p_total = 0
return a, p_total
elif (0 < diff) and (diff < 2 * zero_padd_len):
# [ /zeros | s | zeros/ ]
a = padd(a, p_total)
return a, p_total
elif (2 * zero_padd_len < diff) and (diff < 4 * zero_padd_len):
# [ zeros | mirror-signal | s | mirror-signal | zeros ]
len_reflect_padd_left = len_padd_left - zero_padd_len
len_reflect_padd_right = len_padd_right - zero_padd_len
p_reflect = (len_reflect_padd_left, len_reflect_padd_right)
# padding
a = np.pad(a, p_reflect, mode='reflect')
a = padd(a, p_zeros)
return a, p_total
else:
# [ zeros | mirror-signal | zeros | s | zeros | mirror-signal | zeros ]
len_reflect_padd_left = len_padd_left - 2 * zero_padd_len
len_reflect_padd_right = len_padd_right - 2 * zero_padd_len
p_reflect = (len_reflect_padd_left, len_reflect_padd_right)
# padding
a = padd(a, p_zeros)
a = np.pad(a, p_reflect, mode='reflect')
a = padd(a, p_zeros)
return a, p_total
def custom_padd(arrays, min_power_of_2=1024, min_zero_padd=50,
zero_padd_ratio=0.5):
""" Zeros-mirror-zeros padding function to the next power of two of arrays.
Parameters
----------
arrays : np.ndarray or list of np.ndarray,
array or list of arrays to padd.
min_power_of_2 : int (default=512),
min length (power of two) for the padded array.
zero_padd_ratio : float (default=0.5),
determine the ratio of the length of zero padds (either for the first
or the second zero-padd) w.r.t the array length.
min_zero_padd : int (default=50)
min zero padd, either for the first or the second zero-padd.
Note:
-----
Having a signal close to ~200 can make trouble.
Results
-------
arrays : np.ndarray or list of np.ndarray
the unpadded array or list of arrays.
p : tuple of int,
the applied padd (might not be the same for all the arrays).
"""
if isinstance(arrays, list):
_, padd = _custom_padd(arrays[0],
min_power_of_2=min_power_of_2,
min_zero_padd=min_zero_padd,
zero_padd_ratio=zero_padd_ratio)
padd_arrays = [_custom_padd(a,
min_power_of_2=min_power_of_2,
min_zero_padd=min_zero_padd,
zero_padd_ratio=zero_padd_ratio)[0]
for a in arrays]
return padd_arrays, padd
else:
return _custom_padd(arrays,
min_power_of_2=min_power_of_2,
min_zero_padd=min_zero_padd,
zero_padd_ratio=zero_padd_ratio)
| 30.67217
| 79
| 0.548635
| 1,717
| 13,005
| 4.026791
| 0.078626
| 0.048597
| 0.033555
| 0.036882
| 0.841626
| 0.809228
| 0.797946
| 0.74718
| 0.726352
| 0.726352
| 0
| 0.011532
| 0.346559
| 13,005
| 423
| 80
| 30.744681
| 0.802071
| 0.356786
| 0
| 0.662857
| 0
| 0
| 0.127904
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045714
| false
| 0
| 0.005714
| 0
| 0.211429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
09eca240fc081309ef0df8accb895ace0da96ef2
| 7,295
|
py
|
Python
|
src/tests/test_db_ops_modeldataadder.py
|
shinyshoes404/covid-py-ml
|
5efa605d93870fe32e9d7cef8aa211256fb12e31
|
[
"MIT"
] | null | null | null |
src/tests/test_db_ops_modeldataadder.py
|
shinyshoes404/covid-py-ml
|
5efa605d93870fe32e9d7cef8aa211256fb12e31
|
[
"MIT"
] | null | null | null |
src/tests/test_db_ops_modeldataadder.py
|
shinyshoes404/covid-py-ml
|
5efa605d93870fe32e9d7cef8aa211256fb12e31
|
[
"MIT"
] | null | null | null |
import unittest, mock, os, platform, sqlite3
from datetime import datetime
from test_data import test_model_data_df, test_predict_data_df, test_predictions
# import class to test
from db_ops.db_ops import ModelDataAdder, DbBuilder
@mock.patch('db_ops.db_ops.DbConfig') # create mock DbConfig for entire class
class TestModelDataAdderAddData(unittest.TestCase):
def setUp(self):
# set a new db dir and file path for testing
self.TEST_DIR = os.path.dirname(os.path.abspath(__file__))
if platform.system() == "Windows":
self.TEMP_DB_DIR = os.path.join(self.TEST_DIR, ".\\data_test")
self.DB_PATH = os.path.join(self.TEMP_DB_DIR, ".\\test.db")
else:
self.TEMP_DB_DIR = os.path.join(self.TEST_DIR, "./data_test")
self.DB_PATH = os.path.join(self.TEMP_DB_DIR, "./test.db")
def tearDown(self):
# clean up any dirs or files created during testing
if os.path.isfile(self.DB_PATH):
os.remove(self.DB_PATH)
if os.path.isdir(self.TEMP_DB_DIR):
os.rmdir(self.TEMP_DB_DIR)
def test_integ_verify_models_tbl(self, mock_db_config):
# add the properties to our mock_db_config object
type(mock_db_config).db_dir = mock.PropertyMock(return_value=self.TEMP_DB_DIR)
type(mock_db_config).db_path = mock.PropertyMock(return_value=self.DB_PATH)
# create the database to use for testing
db_build = DbBuilder()
db_build.create_db()
# intantiate the ModelDataAdder class to create our check object
check_obj = ModelDataAdder(model_date=datetime(2021, 9, 3, 0, 0, 0),
model_score=0.65,
model_data=test_model_data_df[['date','casecount-mv-avg','pos-test-mv-avg', 'icu-top16-hosp-total-util']],
icu_predictions=test_predictions,
icu_prediction_dates=test_predict_data_df['predict-date'])
# add data to test db
check_obj.add_data()
# assert that db exists before we try to check for data, if there was no db file, creating
# the connection in the next step would make one
self.assertEqual(os.path.isfile(self.DB_PATH), True, "A test db file should already exist, expecting True")
# make sure our data is in the database
sql_check = "SELECT * FROM models;"
conn = sqlite3.connect(self.DB_PATH)
cur = conn.cursor()
cur.execute(sql_check)
check_results = cur.fetchall()
cur.close()
conn.close()
# verify that we have one row of data
self.assertEqual(len(check_results), 1,"Should be one model stored in db, expecting 1")
def test_integ_verify_model_prediction_tbl(self, mock_db_config):
# add the properties to our mock_db_config object
type(mock_db_config).db_dir = mock.PropertyMock(return_value=self.TEMP_DB_DIR)
type(mock_db_config).db_path = mock.PropertyMock(return_value=self.DB_PATH)
# create the database to use for testing
db_build = DbBuilder()
db_build.create_db()
# intantiate the ModelDataAdder class to create our check object
check_obj = ModelDataAdder(model_date=datetime(2021, 9, 3, 0, 0, 0),
model_score=0.65,
model_data=test_model_data_df[['date','casecount-mv-avg','pos-test-mv-avg', 'icu-top16-hosp-total-util']],
icu_predictions=test_predictions,
icu_prediction_dates=test_predict_data_df['predict-date'])
# add data to test db
check_obj.add_data()
# assert that db exists before we try to check for data, if there was no db file, creating
# the connection in the next step would make one
self.assertEqual(os.path.isfile(self.DB_PATH), True, "A test db file should already exist, expecting True")
# make sure our data is in the database
sql_check = "SELECT * FROM model_prediction;"
conn = sqlite3.connect(self.DB_PATH)
cur = conn.cursor()
cur.execute(sql_check)
check_results = cur.fetchall()
cur.close()
conn.close()
# verify that we have one row of data
self.assertEqual(len(check_results), 2,"Should be two predictions stored in db, expecting 2")
def test_integ_verify_model_data_tbl(self, mock_db_config):
# add the properties to our mock_db_config object
type(mock_db_config).db_dir = mock.PropertyMock(return_value=self.TEMP_DB_DIR)
type(mock_db_config).db_path = mock.PropertyMock(return_value=self.DB_PATH)
# create the database to use for testing
db_build = DbBuilder()
db_build.create_db()
# intantiate the ModelDataAdder class to create our check object
check_obj = ModelDataAdder(model_date=datetime(2021, 9, 3, 0, 0, 0),
model_score=0.65,
model_data=test_model_data_df[['date','casecount-mv-avg','pos-test-mv-avg', 'icu-top16-hosp-total-util']],
icu_predictions=test_predictions,
icu_prediction_dates=test_predict_data_df['predict-date'])
# add data to test db
check_obj.add_data()
# assert that db exists before we try to check for data, if there was no db file, creating
# the connection in the next step would make one
self.assertEqual(os.path.isfile(self.DB_PATH), True, "A test db file should already exist, expecting True")
# make sure our data is in the database
sql_check = "SELECT * FROM model_data;"
conn = sqlite3.connect(self.DB_PATH)
cur = conn.cursor()
cur.execute(sql_check)
check_results = cur.fetchall()
cur.close()
conn.close()
# verify that we have one row of data
self.assertEqual(len(check_results), 2,"Should be two model data points in db, expecting 2")
def test_integ_verify_except_rollback(self, mock_db_config):
# add the properties to our mock_db_config object
type(mock_db_config).db_dir = mock.PropertyMock(return_value=self.TEMP_DB_DIR)
type(mock_db_config).db_path = mock.PropertyMock(return_value=self.DB_PATH)
# create the database to use for testing
db_build = DbBuilder()
db_build.create_db()
# intantiate the ModelDataAdder class to create our check object
check_obj = ModelDataAdder(model_date=datetime(2021, 9, 3, 0, 0, 0),
model_score=None, # will cause an integrity error
model_data=test_model_data_df[['date','casecount-mv-avg','pos-test-mv-avg', 'icu-top16-hosp-total-util']],
icu_predictions=test_predictions,
icu_prediction_dates=test_predict_data_df['predict-date'])
check_obj.add_data()
self.assertEqual(check_obj.rollback, True, "Should cause integrity error in sqlite, expecting True")
if __name__ == "__main__":
unittest.main()
| 47.679739
| 142
| 0.635641
| 1,000
| 7,295
| 4.417
| 0.153
| 0.024451
| 0.043468
| 0.029432
| 0.819108
| 0.809373
| 0.804392
| 0.804392
| 0.789903
| 0.789903
| 0
| 0.011936
| 0.276491
| 7,295
| 153
| 143
| 47.679739
| 0.824934
| 0.201645
| 0
| 0.645161
| 0
| 0
| 0.137604
| 0.021064
| 0
| 0
| 0
| 0
| 0.075269
| 1
| 0.064516
| false
| 0
| 0.043011
| 0
| 0.11828
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
61eee6afb6071b39a59540a0d7a0736b8eb3a066
| 37
|
py
|
Python
|
app/piki/__init__.py
|
mudream4869/imgcity
|
b12d8c941eb2cc3e619f0a8414f14349d5bf60fb
|
[
"Apache-2.0"
] | null | null | null |
app/piki/__init__.py
|
mudream4869/imgcity
|
b12d8c941eb2cc3e619f0a8414f14349d5bf60fb
|
[
"Apache-2.0"
] | 3
|
2021-03-14T01:43:48.000Z
|
2021-03-27T03:44:54.000Z
|
app/piki/__init__.py
|
mudream4869/imgcity
|
b12d8c941eb2cc3e619f0a8414f14349d5bf60fb
|
[
"Apache-2.0"
] | null | null | null |
from app.piki.piki import PikiReader
| 18.5
| 36
| 0.837838
| 6
| 37
| 5.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
61f5213d8190932fa2e527de621828c04a8b45a8
| 1,689
|
py
|
Python
|
database_creation/verses/bible_book_regex.py
|
davidhansonc/ftta_nt_greek_study_tool
|
703aefa44d4bc226babd7fc9a5a98f7851762297
|
[
"MIT"
] | null | null | null |
database_creation/verses/bible_book_regex.py
|
davidhansonc/ftta_nt_greek_study_tool
|
703aefa44d4bc226babd7fc9a5a98f7851762297
|
[
"MIT"
] | null | null | null |
database_creation/verses/bible_book_regex.py
|
davidhansonc/ftta_nt_greek_study_tool
|
703aefa44d4bc226babd7fc9a5a98f7851762297
|
[
"MIT"
] | null | null | null |
nt_regex = {
'Matthew': '.*(M[a-z]*t[a-z]*)[\s\.](\d+):(\d+)[\s]*(.*)',
'Mark': '(M[a-z]*k[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
'Luke': '(L[a-z]*k[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
'John': '(^J[a-z]*n[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
'Acts': '(A[a-z]*c[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
'Romans': '(R[a-z]*m[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
'First Corinthians': '(1C[a-z]*o[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
'Second Corinthians': '(2C[a-z]*o[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
'Galatians': '(G[a-z]*a[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
'Ephesians': '(E[a-z]*p[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
'Philippians': '(P[a-ln-z]+)[\s\.](\d+):(\d+)[\s]*(.+)',
'Colossians': '(C[a-z]*o[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
'First Thessalonians': '(1Th[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
'Second Thessalonians': '(2Th[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
'First Timothy': '(1Ti[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
'Second Timothy': '(2Ti[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
'Titus': '(T[a-z]*t[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
'Philemon': '(P[a-z]*m[a-z]*)[\s\.](\d+):?(\d+)[\s]*(.+)',
'Hebrews': '(H[a-z]*e[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
'James': '(J[a-z]*a[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
'First Peter': '(1P[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
'Second Peter': '(2P[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
'First John': '(1J[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
'Second John': '(2J[a-z]*)[\s\.](\d+):?(\d+)[\s]*(.+)',
'Third John': '(3J[a-z]*)[\s\.](\d+):?(\d+)[\s]*(.+)',
'Jude': '(J[a-z]*d[a-z]*)[\s\.](\d+):?(\d+)[\s]*(.+)',
'Revelation': '(R[a-z]*v[a-z]*)[\s\.](\d+):(\d+)[\s]*(.+)',
}
| 58.241379
| 72
| 0.305506
| 281
| 1,689
| 1.83274
| 0.19573
| 0.16699
| 0.157282
| 0.209709
| 0.483495
| 0.473786
| 0.368932
| 0.192233
| 0.192233
| 0
| 0
| 0.00727
| 0.104204
| 1,689
| 29
| 73
| 58.241379
| 0.333113
| 0
| 0
| 0
| 0
| 0.586207
| 0.799408
| 0.644379
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
110ef7a06c03485b3d50c0e992b2c22ba894c64d
| 30
|
py
|
Python
|
src/westpa/oldtools/__init__.py
|
burntyellow/adelman_ci
|
cca251a51b34843faed0275cce01d7a307829993
|
[
"MIT"
] | 140
|
2015-01-07T23:30:36.000Z
|
2022-03-28T17:15:30.000Z
|
src/oldtools/__init__.py
|
burntyellow/westpa
|
9dc62478fcef0001b9c038cd56a40b6be1b9d64a
|
[
"MIT"
] | 157
|
2015-01-03T03:38:36.000Z
|
2022-03-31T14:12:16.000Z
|
src/oldtools/__init__.py
|
burntyellow/westpa
|
9dc62478fcef0001b9c038cd56a40b6be1b9d64a
|
[
"MIT"
] | 56
|
2015-01-02T21:21:40.000Z
|
2022-03-03T16:27:54.000Z
|
from . import aframe, miscfn
| 10
| 28
| 0.733333
| 4
| 30
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 30
| 2
| 29
| 15
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1114e9f15c6011ee32ee6bde7bf5d9f3b6f4f947
| 437
|
py
|
Python
|
PogDjango/pages/views.py
|
Cotton0419/PogDjango
|
4baaa9abc9f0e115292d9b1298be95270519825d
|
[
"MIT"
] | null | null | null |
PogDjango/pages/views.py
|
Cotton0419/PogDjango
|
4baaa9abc9f0e115292d9b1298be95270519825d
|
[
"MIT"
] | null | null | null |
PogDjango/pages/views.py
|
Cotton0419/PogDjango
|
4baaa9abc9f0e115292d9b1298be95270519825d
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.http import HttpResponse
def index(request):
return render(request, "index.html", {})
def champions(request):
return render(request, "champions.html", {})
def clubs(request):
return render(request, "clubs.html", {})
def leaderboards(request):
return render(request, "leaderboards.html", {})
def summoner (request):
return render(request, 'summoner.html', {})
| 24.277778
| 51
| 0.700229
| 50
| 437
| 6.12
| 0.32
| 0.212418
| 0.310458
| 0.424837
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 437
| 18
| 52
| 24.277778
| 0.831522
| 0
| 0
| 0
| 0
| 0
| 0.146119
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.416667
| false
| 0
| 0.166667
| 0.416667
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
3a464c3f024bca5a78f7c9616a9b41bc860522c9
| 215
|
py
|
Python
|
pts/feature/__init__.py
|
PandoraLS/pytorch-ts
|
a21427277082bc0ce980837e431c40296d07aa3f
|
[
"Apache-2.0",
"MIT"
] | 647
|
2020-03-16T16:47:50.000Z
|
2022-03-31T23:29:40.000Z
|
pts/feature/__init__.py
|
PandoraLS/pytorch-ts
|
a21427277082bc0ce980837e431c40296d07aa3f
|
[
"Apache-2.0",
"MIT"
] | 71
|
2020-03-17T10:58:14.000Z
|
2022-03-23T08:53:12.000Z
|
pts/feature/__init__.py
|
PandoraLS/pytorch-ts
|
a21427277082bc0ce980837e431c40296d07aa3f
|
[
"Apache-2.0",
"MIT"
] | 110
|
2020-03-16T17:39:45.000Z
|
2022-03-25T22:26:39.000Z
|
from .holiday import (
CustomDateFeatureSet,
CustomHolidayFeatureSet,
)
from .fourier_date_feature import fourier_time_features_from_frequency
from .lags import lags_for_fourier_time_features_from_frequency
| 30.714286
| 70
| 0.860465
| 25
| 215
| 6.92
| 0.52
| 0.127168
| 0.219653
| 0.265896
| 0.369942
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106977
| 215
| 6
| 71
| 35.833333
| 0.901042
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
3a4ae90ff0d69a145df6cb15fcaace521e25cd46
| 214
|
py
|
Python
|
server/chalk/todos/admin.py
|
wcjordan/chalk
|
0b6ca6f2456a14bdca8fba1eba6ff9ee76f31c8e
|
[
"MIT"
] | 1
|
2020-05-30T00:47:17.000Z
|
2020-05-30T00:47:17.000Z
|
server/chalk/todos/admin.py
|
wcjordan/chalk
|
f710c6a75d8a04375af254c79f5f44612c1deead
|
[
"MIT"
] | 3
|
2021-03-30T13:39:41.000Z
|
2021-08-05T01:20:14.000Z
|
server/chalk/todos/admin.py
|
wcjordan/chalk
|
f710c6a75d8a04375af254c79f5f44612c1deead
|
[
"MIT"
] | null | null | null |
"""
Django admin config for Todos
"""
from django.contrib import admin
from simple_history.admin import SimpleHistoryAdmin
from chalk.todos import models
admin.site.register(models.TodoModel, SimpleHistoryAdmin)
| 21.4
| 57
| 0.82243
| 27
| 214
| 6.481481
| 0.592593
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107477
| 214
| 9
| 58
| 23.777778
| 0.91623
| 0.135514
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3a56caf63e32090fda3804379095fe523cd379b7
| 253,518
|
py
|
Python
|
modules/api/src/test/functional/tests/batch/create_batch_change_test.py
|
nspadaccino/vinyldns
|
1c2635a4414cfa5e8b28987f12a90ba8c6a09044
|
[
"Apache-2.0"
] | null | null | null |
modules/api/src/test/functional/tests/batch/create_batch_change_test.py
|
nspadaccino/vinyldns
|
1c2635a4414cfa5e8b28987f12a90ba8c6a09044
|
[
"Apache-2.0"
] | null | null | null |
modules/api/src/test/functional/tests/batch/create_batch_change_test.py
|
nspadaccino/vinyldns
|
1c2635a4414cfa5e8b28987f12a90ba8c6a09044
|
[
"Apache-2.0"
] | null | null | null |
import datetime
from typing import Optional, Union
import pytest
from utils import *
def does_not_contain(x):
is_not(contains_exactly(x))
def validate_change_error_response_basics(input_json, change_type, input_name, record_type, ttl, record_data):
assert_that(input_json["changeType"], is_(change_type))
assert_that(input_json["inputName"], is_(input_name))
assert_that(input_json["type"], is_(record_type))
assert_that(record_type, is_in(["A", "AAAA", "CNAME", "PTR", "TXT", "MX"]))
if change_type == "Add":
assert_that(input_json["ttl"], is_(ttl))
if record_type in ["A", "AAAA"]:
assert_that(input_json["record"]["address"], is_(record_data))
elif record_type == "CNAME":
assert_that(input_json["record"]["cname"], is_(record_data))
elif record_type == "PTR":
assert_that(input_json["record"]["ptrdname"], is_(record_data))
elif record_type == "TXT":
assert_that(input_json["record"]["text"], is_(record_data))
elif record_type == "MX":
assert_that(input_json["record"]["preference"], is_(record_data["preference"]))
assert_that(input_json["record"]["exchange"], is_(record_data["exchange"]))
return
def assert_failed_change_in_error_response(input_json, change_type="Add", input_name="fqdn.", record_type="A", ttl=200,
record_data: Optional[Union[str, dict]] = "1.1.1.1", error_messages=[]):
validate_change_error_response_basics(input_json, change_type, input_name, record_type, ttl, record_data)
assert_error(input_json, error_messages)
return
def assert_successful_change_in_error_response(input_json, change_type="Add", input_name="fqdn.", record_type="A", ttl=200,
record_data: Optional[Union[str, dict]] = "1.1.1.1"):
validate_change_error_response_basics(input_json, change_type, input_name, record_type, ttl, record_data)
assert_that("errors" in input_json, is_(False))
return
def assert_change_success(changes_json, zone, index, record_name, input_name, record_data, ttl=200,
record_type="A", change_type="Add"):
assert_that(changes_json[index]["zoneId"], is_(zone["id"]))
assert_that(changes_json[index]["zoneName"], is_(zone["name"]))
assert_that(changes_json[index]["recordName"], is_(record_name))
assert_that(changes_json[index]["inputName"], is_(input_name))
if change_type == "Add":
assert_that(changes_json[index]["ttl"], is_(ttl))
assert_that(changes_json[index]["type"], is_(record_type))
assert_that(changes_json[index]["id"], is_not(none()))
assert_that(changes_json[index]["changeType"], is_(change_type))
assert_that(record_type, is_in(["A", "AAAA", "CNAME", "PTR", "TXT", "MX"]))
if record_type in ["A", "AAAA"] and change_type == "Add":
assert_that(changes_json[index]["record"]["address"], is_(record_data))
elif record_type == "CNAME" and change_type == "Add":
assert_that(changes_json[index]["record"]["cname"], is_(record_data))
elif record_type == "PTR" and change_type == "Add":
assert_that(changes_json[index]["record"]["ptrdname"], is_(record_data))
elif record_type == "TXT" and change_type == "Add":
assert_that(changes_json[index]["record"]["text"], is_(record_data))
elif record_type == "MX" and change_type == "Add":
assert_that(changes_json[index]["record"]["preference"], is_(record_data["preference"]))
assert_that(changes_json[index]["record"]["exchange"], is_(record_data["exchange"]))
return
def assert_error(input_json, error_messages):
for error in error_messages:
assert_that(input_json["errors"], has_item(error))
assert_that(len(input_json["errors"]), is_(len(error_messages)))
@pytest.mark.serial
def test_create_batch_change_with_adds_success(shared_zone_test_context):
"""
Test successfully creating a batch change with adds
"""
client = shared_zone_test_context.ok_vinyldns_client
parent_zone = shared_zone_test_context.parent_zone
ok_zone = shared_zone_test_context.ok_zone
classless_delegation_zone = shared_zone_test_context.classless_zone_delegation_zone
classless_base_zone = shared_zone_test_context.classless_base_zone
ip6_reverse_zone = shared_zone_test_context.ip6_16_nibble_zone
partition_id = shared_zone_test_context.partition_id
ok_zone_name = shared_zone_test_context.ok_zone["name"]
parent_zone_name = shared_zone_test_context.parent_zone["name"]
ip4_zone_name = shared_zone_test_context.classless_base_zone["name"]
ip4_prefix = shared_zone_test_context.ip4_classless_prefix
ip6_prefix = shared_zone_test_context.ip6_prefix
batch_change_input = {
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json(f"{parent_zone_name}", address="4.5.6.7"),
get_change_A_AAAA_json(f"{ok_zone_name}", record_type="AAAA", address=f"{ip6_prefix}::60"),
get_change_A_AAAA_json(f"relative.{parent_zone_name}"),
get_change_CNAME_json(f"CNAME.PARENT.COM{partition_id}", cname="nice.parent.com"),
get_change_CNAME_json(f"_2cname.{parent_zone_name}", cname="nice.parent.com"),
get_change_CNAME_json(f"4.{ip4_zone_name}", cname=f"4.4/30.{ip4_zone_name}"),
get_change_PTR_json(f"{ip4_prefix}.193", ptrdname="www.vinyldns"),
get_change_PTR_json(f"{ip4_prefix}.44"),
get_change_PTR_json(f"{ip6_prefix}:1000::60", ptrdname="www.vinyldns"),
get_change_TXT_json(f"txt.{ok_zone_name}"),
get_change_TXT_json(f"{ok_zone_name}"),
get_change_TXT_json(f"txt-unique-characters.{ok_zone_name}", text='a\\\\`=` =\\"Cat\\"\nattr=val'),
get_change_TXT_json(f"txt.{ip4_zone_name}"),
get_change_MX_json(f"mx.{ok_zone_name}", preference=0),
get_change_MX_json(f"{ok_zone_name}", preference=1000, exchange="bar.foo.")
]
}
to_delete = []
try:
result = client.create_batch_change(batch_change_input, status=202)
completed_batch = client.wait_until_batch_change_completed(result)
record_set_list = [(change["zoneId"], change["recordSetId"]) for change in completed_batch["changes"]]
to_delete = set(record_set_list) # set here because multiple items in the batch combine to one RS
# validate initial response
assert_that(result["comments"], is_("this is optional"))
assert_that(result["userName"], is_("ok"))
assert_that(result["userId"], is_("ok"))
assert_that(result["id"], is_not(none()))
assert_that(completed_batch["status"], is_("Complete"))
assert_change_success(result["changes"], zone=parent_zone, index=0,
record_name=f"{parent_zone_name}", input_name=f"{parent_zone_name}", record_data="4.5.6.7")
assert_change_success(result["changes"], zone=ok_zone, index=1,
record_name=f"{ok_zone_name}", input_name=f"{ok_zone_name}", record_data=f"{ip6_prefix}::60", record_type="AAAA")
assert_change_success(result["changes"], zone=parent_zone, index=2,
record_name="relative", input_name=f"relative.{parent_zone_name}", record_data="1.1.1.1")
assert_change_success(result["changes"], zone=parent_zone, index=3,
record_name="CNAME", input_name=f"CNAME.PARENT.COM{partition_id}.", record_data="nice.parent.com.", record_type="CNAME")
assert_change_success(result["changes"], zone=parent_zone, index=4,
record_name="_2cname", input_name=f"_2cname.{parent_zone_name}", record_data="nice.parent.com.", record_type="CNAME")
assert_change_success(result["changes"], zone=classless_base_zone, index=5,
record_name="4", input_name=f"4.{ip4_zone_name}", record_data=f"4.4/30.{ip4_zone_name}", record_type="CNAME")
assert_change_success(result["changes"], zone=classless_delegation_zone, index=6,
record_name="193", input_name=f"{ip4_prefix}.193", record_data="www.vinyldns.", record_type="PTR")
assert_change_success(result["changes"], zone=classless_base_zone, index=7,
record_name="44", input_name=f"{ip4_prefix}.44", record_data="test.com.", record_type="PTR")
assert_change_success(result["changes"], zone=ip6_reverse_zone, index=8,
record_name="0.6.0.0.0.0.0.0.0.0.0.0.0.0.0.0", input_name=f"{ip6_prefix}:1000::60", record_data="www.vinyldns.", record_type="PTR")
assert_change_success(result["changes"], zone=ok_zone, index=9,
record_name="txt", input_name=f"txt.{ok_zone_name}", record_data="test", record_type="TXT")
assert_change_success(result["changes"], zone=ok_zone, index=10,
record_name=f"{ok_zone_name}", input_name=f"{ok_zone_name}", record_data="test", record_type="TXT")
assert_change_success(result["changes"], zone=ok_zone, index=11,
record_name="txt-unique-characters", input_name=f"txt-unique-characters.{ok_zone_name}", record_data='a\\\\`=` =\\"Cat\\"\nattr=val', record_type="TXT")
assert_change_success(result["changes"], zone=classless_base_zone, index=12,
record_name="txt", input_name=f"txt.{ip4_zone_name}", record_data="test", record_type="TXT")
assert_change_success(result["changes"], zone=ok_zone, index=13,
record_name="mx", input_name=f"mx.{ok_zone_name}", record_data={"preference": 0, "exchange": "foo.bar."}, record_type="MX")
assert_change_success(result["changes"], zone=ok_zone, index=14,
record_name=f"{ok_zone_name}", input_name=f"{ok_zone_name}", record_data={"preference": 1000, "exchange": "bar.foo."}, record_type="MX")
completed_status = [change["status"] == "Complete" for change in completed_batch["changes"]]
assert_that(all(completed_status), is_(True))
# get all the recordsets created by this batch, validate
rs1 = client.get_recordset(record_set_list[0][0], record_set_list[0][1])["recordSet"]
expected1 = {"name": parent_zone_name,
"zoneId": parent_zone["id"],
"type": "A",
"ttl": 200,
"records": [{"address": "4.5.6.7"}]}
verify_recordset(rs1, expected1)
rs3 = client.get_recordset(record_set_list[1][0], record_set_list[1][1])["recordSet"]
expected3 = {"name": ok_zone_name,
"zoneId": ok_zone["id"],
"type": "AAAA",
"ttl": 200,
"records": [{"address": f"{ip6_prefix}::60"}]}
verify_recordset(rs3, expected3)
rs4 = client.get_recordset(record_set_list[2][0], record_set_list[2][1])["recordSet"]
expected4 = {"name": "relative",
"zoneId": parent_zone["id"],
"type": "A",
"ttl": 200,
"records": [{"address": "1.1.1.1"}]}
verify_recordset(rs4, expected4)
rs5 = client.get_recordset(record_set_list[3][0], record_set_list[3][1])["recordSet"]
expected5 = {"name": "CNAME",
"zoneId": parent_zone["id"],
"type": "CNAME",
"ttl": 200,
"records": [{"cname": "nice.parent.com."}]}
verify_recordset(rs5, expected5)
rs6 = client.get_recordset(record_set_list[4][0], record_set_list[4][1])["recordSet"]
expected6 = {"name": "_2cname",
"zoneId": parent_zone["id"],
"type": "CNAME",
"ttl": 200,
"records": [{"cname": "nice.parent.com."}]}
verify_recordset(rs6, expected6)
rs7 = client.get_recordset(record_set_list[5][0], record_set_list[5][1])["recordSet"]
expected7 = {"name": "4",
"zoneId": classless_base_zone["id"],
"type": "CNAME",
"ttl": 200,
"records": [{"cname": f"4.4/30.{ip4_zone_name}"}]}
verify_recordset(rs7, expected7)
rs8 = client.get_recordset(record_set_list[6][0], record_set_list[6][1])["recordSet"]
expected8 = {"name": "193",
"zoneId": classless_delegation_zone["id"],
"type": "PTR",
"ttl": 200,
"records": [{"ptrdname": "www.vinyldns."}]}
verify_recordset(rs8, expected8)
rs9 = client.get_recordset(record_set_list[7][0], record_set_list[7][1])["recordSet"]
expected9 = {"name": "44",
"zoneId": classless_base_zone["id"],
"type": "PTR",
"ttl": 200,
"records": [{"ptrdname": "test.com."}]}
verify_recordset(rs9, expected9)
rs10 = client.get_recordset(record_set_list[8][0], record_set_list[8][1])["recordSet"]
expected10 = {"name": "0.6.0.0.0.0.0.0.0.0.0.0.0.0.0.0",
"zoneId": ip6_reverse_zone["id"],
"type": "PTR",
"ttl": 200,
"records": [{"ptrdname": "www.vinyldns."}]}
verify_recordset(rs10, expected10)
rs11 = client.get_recordset(record_set_list[9][0], record_set_list[9][1])["recordSet"]
expected11 = {"name": "txt",
"zoneId": ok_zone["id"],
"type": "TXT",
"ttl": 200,
"records": [{"text": "test"}]}
verify_recordset(rs11, expected11)
rs12 = client.get_recordset(record_set_list[10][0], record_set_list[10][1])["recordSet"]
expected12 = {"name": f"{ok_zone_name}",
"zoneId": ok_zone["id"],
"type": "TXT",
"ttl": 200,
"records": [{"text": "test"}]}
verify_recordset(rs12, expected12)
rs13 = client.get_recordset(record_set_list[11][0], record_set_list[11][1])["recordSet"]
expected13 = {"name": "txt-unique-characters",
"zoneId": ok_zone["id"],
"type": "TXT",
"ttl": 200,
"records": [{"text": 'a\\\\`=` =\\"Cat\\"\nattr=val'}]}
verify_recordset(rs13, expected13)
rs14 = client.get_recordset(record_set_list[12][0], record_set_list[12][1])["recordSet"]
expected14 = {"name": "txt",
"zoneId": classless_base_zone["id"],
"type": "TXT",
"ttl": 200,
"records": [{"text": "test"}]}
verify_recordset(rs14, expected14)
rs15 = client.get_recordset(record_set_list[13][0], record_set_list[13][1])["recordSet"]
expected15 = {"name": "mx",
"zoneId": ok_zone["id"],
"type": "MX",
"ttl": 200,
"records": [{"preference": 0, "exchange": "foo.bar."}]}
verify_recordset(rs15, expected15)
rs16 = client.get_recordset(record_set_list[14][0], record_set_list[14][1])["recordSet"]
expected16 = {"name": f"{ok_zone_name}",
"zoneId": ok_zone["id"],
"type": "MX",
"ttl": 200,
"records": [{"preference": 1000, "exchange": "bar.foo."}]}
verify_recordset(rs16, expected16)
finally:
clear_zoneid_rsid_tuple_list(to_delete, client)
@pytest.mark.manual_batch_review
def test_create_batch_change_with_scheduled_time_and_owner_group_succeeds(shared_zone_test_context):
"""
Test successfully creating a batch change with scheduled time and owner group set
"""
client = shared_zone_test_context.ok_vinyldns_client
dt = (datetime.datetime.now() + datetime.timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%SZ")
ok_zone_name = shared_zone_test_context.ok_zone["name"]
batch_change_input = {
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json(generate_record_name(ok_zone_name), address="4.5.6.7"),
],
"scheduledTime": dt,
"ownerGroupId": shared_zone_test_context.ok_group["id"]
}
result = None
try:
result = client.create_batch_change(batch_change_input, status=202)
assert_that(result["status"], "Scheduled")
assert_that(result["scheduledTime"], dt)
finally:
if result:
rejecter = shared_zone_test_context.support_user_client
rejecter.reject_batch_change(result["id"], status=200)
@pytest.mark.manual_batch_review
def test_create_scheduled_batch_change_with_zone_discovery_error_without_owner_group_fails(shared_zone_test_context):
"""
Test creating a scheduled batch without owner group ID fails if there is a zone discovery error
"""
client = shared_zone_test_context.ok_vinyldns_client
dt = (datetime.datetime.now() + datetime.timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%SZ")
batch_change_input = {
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json("zone-discovery.failure.", address="4.5.6.7"),
],
"scheduledTime": dt
}
errors = client.create_batch_change(batch_change_input, status=400)
assert_that(errors, is_("Batch change requires owner group for manual review."))
@pytest.mark.manual_batch_review
def test_create_scheduled_batch_change_with_scheduled_time_in_the_past_fails(shared_zone_test_context):
"""
Test creating a scheduled batch with a scheduled time in the past
"""
client = shared_zone_test_context.ok_vinyldns_client
yesterday = (datetime.datetime.now() - datetime.timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%SZ")
ok_zone_name = shared_zone_test_context.ok_zone["name"]
batch_change_input = {
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json(generate_record_name(ok_zone_name), address="4.5.6.7"),
],
"ownerGroupId": shared_zone_test_context.ok_group["id"],
"scheduledTime": yesterday
}
errors = client.create_batch_change(batch_change_input, status=400)
assert_that(errors, is_("Scheduled time must be in the future."))
@pytest.mark.manual_batch_review
def test_create_batch_change_with_soft_failures_scheduled_time_and_allow_manual_review_disabled_fails(
shared_zone_test_context):
"""
Test creating a batch change with soft errors, scheduled time, and allowManualReview disabled results in hard failure
"""
client = shared_zone_test_context.ok_vinyldns_client
dt = (datetime.datetime.now() + datetime.timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%SZ")
batch_change_input = {
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json("non.existent", address="4.5.6.7"),
],
"scheduledTime": dt,
"ownerGroupId": shared_zone_test_context.ok_group["id"]
}
response = client.create_batch_change(batch_change_input, False, status=400)
assert_failed_change_in_error_response(response[0], input_name="non.existent.", record_type="A",
record_data="4.5.6.7",
error_messages=["Zone Discovery Failed: zone for \"non.existent.\" does not exist in VinylDNS. "
"If zone exists, then it must be connected to in VinylDNS."])
def test_create_batch_change_without_scheduled_time_succeeds(shared_zone_test_context):
"""
Test successfully creating a batch change without scheduled time set
"""
client = shared_zone_test_context.ok_vinyldns_client
ok_zone_name = shared_zone_test_context.ok_zone["name"]
batch_change_input = {
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json(generate_record_name(ok_zone_name), address="4.5.6.7"),
]
}
to_delete = []
try:
result = client.create_batch_change(batch_change_input, status=202)
completed_batch = client.wait_until_batch_change_completed(result)
record_set_list = [(change["zoneId"], change["recordSetId"]) for change in completed_batch["changes"]]
to_delete = set(record_set_list)
assert_that(completed_batch, is_not(has_key("scheduledTime")))
finally:
clear_zoneid_rsid_tuple_list(to_delete, client)
@pytest.mark.manual_batch_review
def test_create_batch_change_with_zone_discovery_error_without_owner_group_fails(shared_zone_test_context):
"""
Test creating a batch change with zone discovery error fails if no owner group ID is provided
"""
client = shared_zone_test_context.ok_vinyldns_client
batch_change_input = {
"changes": [
get_change_A_AAAA_json("some.non-existent.zone.")
]
}
errors = client.create_batch_change(batch_change_input, status=400)
assert_that(errors, is_("Batch change requires owner group for manual review."))
@pytest.mark.serial
def test_create_batch_change_with_updates_deletes_success(shared_zone_test_context):
"""
Test successfully creating a batch change with updates and deletes
"""
ok_client = shared_zone_test_context.ok_vinyldns_client
dummy_client = shared_zone_test_context.dummy_vinyldns_client
dummy_zone = shared_zone_test_context.dummy_zone
ok_zone = shared_zone_test_context.ok_zone
classless_zone_delegation_zone = shared_zone_test_context.classless_zone_delegation_zone
ok_zone_acl = generate_acl_rule("Delete", groupId=shared_zone_test_context.dummy_group["id"], recordMask=".*", recordTypes=["CNAME"])
classless_zone_delegation_zone_acl = generate_acl_rule("Write", groupId=shared_zone_test_context.dummy_group["id"], recordTypes=["PTR"])
rs_delete_dummy = create_recordset(dummy_zone, "delete", "AAAA", [{"address": "1:2:3:4:5:6:7:8"}])
rs_update_dummy = create_recordset(dummy_zone, "update", "A", [{"address": "1.2.3.4"}])
rs_delete_ok = create_recordset(ok_zone, "delete", "CNAME", [{"cname": "delete.cname."}])
rs_update_classless = create_recordset(classless_zone_delegation_zone, "193", "PTR", [{"ptrdname": "will.change."}])
txt_delete_dummy = create_recordset(dummy_zone, "delete-txt", "TXT", [{"text": "test"}])
mx_delete_dummy = create_recordset(dummy_zone, "delete-mx", "MX", [{"preference": 1, "exchange": "foo.bar."}])
mx_update_dummy = create_recordset(dummy_zone, "update-mx", "MX", [{"preference": 1, "exchange": "foo.bar."}])
ok_zone_name = shared_zone_test_context.ok_zone["name"]
dummy_zone_name = shared_zone_test_context.dummy_zone["name"]
ip4_prefix = shared_zone_test_context.ip4_classless_prefix
batch_change_input = {
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json(f"delete.{dummy_zone_name}", record_type="AAAA", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(f"update.{dummy_zone_name}", ttl=300, address="1.2.3.4"),
get_change_A_AAAA_json(f"Update.{dummy_zone_name}", change_type="DeleteRecordSet"),
get_change_CNAME_json(f"delete.{ok_zone_name}", change_type="DeleteRecordSet"),
get_change_PTR_json(f"{ip4_prefix}.193", ttl=300, ptrdname="has.changed."),
get_change_PTR_json(f"{ip4_prefix}.193", change_type="DeleteRecordSet"),
get_change_TXT_json(f"delete-txt.{dummy_zone_name}", change_type="DeleteRecordSet"),
get_change_MX_json(f"delete-mx.{dummy_zone_name}", change_type="DeleteRecordSet"),
get_change_MX_json(f"update-mx.{dummy_zone_name}", change_type="DeleteRecordSet"),
get_change_MX_json(f"update-mx.{dummy_zone_name}", preference=1000)
]
}
to_create = [rs_delete_dummy, rs_update_dummy, rs_delete_ok, rs_update_classless, txt_delete_dummy, mx_delete_dummy, mx_update_dummy]
to_delete = []
try:
for rs in to_create:
if rs["zoneId"] == dummy_zone["id"]:
create_client = dummy_client
else:
create_client = ok_client
create_rs = create_client.create_recordset(rs, status=202)
create_client.wait_until_recordset_change_status(create_rs, "Complete")
# Configure ACL rules
add_ok_acl_rules(shared_zone_test_context, [ok_zone_acl])
add_classless_acl_rules(shared_zone_test_context, [classless_zone_delegation_zone_acl])
result = dummy_client.create_batch_change(batch_change_input, status=202)
completed_batch = dummy_client.wait_until_batch_change_completed(result)
record_set_list = [(change["zoneId"], change["recordSetId"]) for change in completed_batch["changes"]]
to_delete = set(record_set_list) # set here because multiple items in the batch combine to one RS
## validate initial response
assert_that(result["comments"], is_("this is optional"))
assert_that(result["userName"], is_("dummy"))
assert_that(result["userId"], is_("dummy"))
assert_that(result["id"], is_not(none()))
assert_that(completed_batch["status"], is_("Complete"))
assert_change_success(result["changes"], zone=dummy_zone, index=0, record_name="delete",
input_name=f"delete.{dummy_zone_name}", record_data=None, record_type="AAAA", change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=dummy_zone, index=1, record_name="update", ttl=300,
input_name=f"update.{dummy_zone_name}", record_data="1.2.3.4")
assert_change_success(result["changes"], zone=dummy_zone, index=2, record_name="Update",
input_name=f"Update.{dummy_zone_name}", record_data=None, change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=3, record_name="delete",
input_name=f"delete.{ok_zone_name}", record_data=None, record_type="CNAME", change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=classless_zone_delegation_zone, index=4, record_name="193", ttl=300,
input_name=f"{ip4_prefix}.193", record_data="has.changed.", record_type="PTR")
assert_change_success(result["changes"], zone=classless_zone_delegation_zone, index=5, record_name="193",
input_name=f"{ip4_prefix}.193", record_data=None, record_type="PTR", change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=dummy_zone, index=6, record_name="delete-txt",
input_name=f"delete-txt.{dummy_zone_name}", record_data=None, record_type="TXT", change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=dummy_zone, index=7, record_name="delete-mx",
input_name=f"delete-mx.{dummy_zone_name}", record_data=None, record_type="MX", change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=dummy_zone, index=8, record_name="update-mx",
input_name=f"update-mx.{dummy_zone_name}", record_data=None, record_type="MX", change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=dummy_zone, index=9, record_name="update-mx",
input_name=f"update-mx.{dummy_zone_name}", record_data={"preference": 1000, "exchange": "foo.bar."}, record_type="MX")
rs1 = dummy_client.get_recordset(record_set_list[0][0], record_set_list[0][1], status=404)
assert_that(rs1, is_("RecordSet with id " + record_set_list[0][1] + " does not exist."))
rs2 = dummy_client.get_recordset(record_set_list[1][0], record_set_list[1][1])["recordSet"]
expected2 = {"name": "update",
"zoneId": dummy_zone["id"],
"type": "A",
"ttl": 300,
"records": [{"address": "1.2.3.4"}]}
verify_recordset(rs2, expected2)
# since this is an update, record_set_list[1] and record_set_list[2] are the same record
rs3 = dummy_client.get_recordset(record_set_list[2][0], record_set_list[2][1])["recordSet"]
verify_recordset(rs3, expected2)
rs4 = dummy_client.get_recordset(record_set_list[3][0], record_set_list[3][1], status=404)
assert_that(rs4, is_("RecordSet with id " + record_set_list[3][1] + " does not exist."))
rs5 = dummy_client.get_recordset(record_set_list[4][0], record_set_list[4][1])["recordSet"]
expected5 = {"name": "193",
"zoneId": classless_zone_delegation_zone["id"],
"type": "PTR",
"ttl": 300,
"records": [{"ptrdname": "has.changed."}]}
verify_recordset(rs5, expected5)
# since this is an update, record_set_list[5] and record_set_list[4] are the same record
rs6 = dummy_client.get_recordset(record_set_list[5][0], record_set_list[5][1])["recordSet"]
verify_recordset(rs6, expected5)
rs7 = dummy_client.get_recordset(record_set_list[6][0], record_set_list[6][1], status=404)
assert_that(rs7, is_("RecordSet with id " + record_set_list[6][1] + " does not exist."))
rs8 = dummy_client.get_recordset(record_set_list[7][0], record_set_list[7][1], status=404)
assert_that(rs8, is_("RecordSet with id " + record_set_list[7][1] + " does not exist."))
rs9 = dummy_client.get_recordset(record_set_list[8][0], record_set_list[8][1])["recordSet"]
expected9 = {"name": "update-mx",
"zoneId": dummy_zone["id"],
"type": "MX",
"ttl": 200,
"records": [{"preference": 1000, "exchange": "foo.bar."}]}
verify_recordset(rs9, expected9)
finally:
# Clean up updates
dummy_deletes = [rs for rs in to_delete if rs[0] == dummy_zone["id"]]
ok_deletes = [rs for rs in to_delete if rs[0] != dummy_zone["id"]]
clear_zoneid_rsid_tuple_list(dummy_deletes, dummy_client)
clear_zoneid_rsid_tuple_list(ok_deletes, ok_client)
# Clean up ACL rules
clear_ok_acl_rules(shared_zone_test_context)
clear_classless_acl_rules(shared_zone_test_context)
def test_create_batch_change_without_comments_succeeds(shared_zone_test_context):
"""
Test successfully creating a batch change without comments
Test successfully creating a batch using inputName without a trailing dot, and that the
returned inputName is dotted
"""
client = shared_zone_test_context.ok_vinyldns_client
parent_zone = shared_zone_test_context.parent_zone
test_record_name = generate_record_name()
test_record_fqdn = "{0}.{1}".format(test_record_name, parent_zone["name"])
batch_change_input = {
"changes": [
get_change_A_AAAA_json(test_record_fqdn, address="4.5.6.7"),
]
}
to_delete = []
try:
result = client.create_batch_change(batch_change_input, status=202)
completed_batch = client.wait_until_batch_change_completed(result)
to_delete = [(change["zoneId"], change["recordSetId"]) for change in completed_batch["changes"]]
assert_change_success(result["changes"], zone=parent_zone, index=0, record_name=test_record_name, input_name=test_record_fqdn, record_data="4.5.6.7")
finally:
clear_zoneid_rsid_tuple_list(to_delete, client)
def test_create_batch_change_with_owner_group_id_succeeds(shared_zone_test_context):
"""
Test successfully creating a batch change with owner group ID specified
"""
client = shared_zone_test_context.ok_vinyldns_client
ok_zone = shared_zone_test_context.ok_zone
test_record_name = generate_record_name()
test_record_fqdn = "{0}.{1}".format(test_record_name, ok_zone["name"])
batch_change_input = {
"changes": [
get_change_A_AAAA_json(test_record_fqdn, address="4.3.2.1")
],
"ownerGroupId": shared_zone_test_context.ok_group["id"]
}
to_delete = []
try:
result = client.create_batch_change(batch_change_input, status=202)
completed_batch = client.wait_until_batch_change_completed(result)
to_delete = [(change["zoneId"], change["recordSetId"]) for change in completed_batch["changes"]]
assert_change_success(result["changes"], zone=ok_zone, index=0, record_name=test_record_name, input_name=test_record_fqdn, record_data="4.3.2.1")
assert_that(completed_batch["ownerGroupId"], is_(shared_zone_test_context.ok_group["id"]))
finally:
clear_zoneid_rsid_tuple_list(to_delete, client)
def test_create_batch_change_without_owner_group_id_succeeds(shared_zone_test_context):
"""
Test successfully creating a batch change without owner group ID specified
"""
client = shared_zone_test_context.ok_vinyldns_client
ok_zone = shared_zone_test_context.ok_zone
test_record_name = generate_record_name()
test_record_fqdn = "{0}.{1}".format(test_record_name, ok_zone["name"])
batch_change_input = {
"changes": [
get_change_A_AAAA_json(test_record_fqdn, address="4.3.2.1")
]
}
to_delete = []
try:
result = client.create_batch_change(batch_change_input, status=202)
completed_batch = client.wait_until_batch_change_completed(result)
to_delete = [(change["zoneId"], change["recordSetId"]) for change in completed_batch["changes"]]
assert_change_success(result["changes"], zone=ok_zone, index=0, record_name=test_record_name, input_name=test_record_fqdn, record_data="4.3.2.1")
assert_that(completed_batch, is_not(has_key("ownerGroupId")))
finally:
clear_zoneid_rsid_tuple_list(to_delete, client)
@pytest.mark.skip_production
def test_create_batch_change_with_missing_ttl_returns_default_or_existing(shared_zone_test_context):
"""
Test creating a batch change without a ttl returns the default or existing value
"""
client = shared_zone_test_context.ok_vinyldns_client
ok_zone = shared_zone_test_context.ok_zone
update_name = generate_record_name()
update_fqdn = "{0}.{1}".format(update_name, ok_zone["name"])
rs_update = create_recordset(ok_zone, update_name, "CNAME", [{"cname": "old-ttl.cname."}], ttl=300)
batch_change_input = {
"comments": "this is optional",
"changes": [
{
"changeType": "DeleteRecordSet",
"inputName": update_fqdn,
"type": "CNAME",
},
{
"changeType": "Add",
"inputName": update_fqdn,
"type": "CNAME",
"record": {
"cname": "updated-ttl.cname."
}
},
{
"changeType": "Add",
"inputName": generate_record_name(ok_zone["name"]),
"type": "CNAME",
"record": {
"cname": "new-ttl-record.cname."
}
}
]
}
to_delete = []
try:
create_rs = client.create_recordset(rs_update, status=202)
client.wait_until_recordset_change_status(create_rs, "Complete")
to_delete = [(create_rs["zone"]["id"], create_rs["recordSet"]["id"])]
result = client.create_batch_change(batch_change_input, status=202)
completed_batch = client.wait_until_batch_change_completed(result)
record_set_list = [(change["zoneId"], change["recordSetId"]) for change in completed_batch["changes"]]
to_delete = set(record_set_list)
updated_record = client.get_recordset(record_set_list[0][0], record_set_list[0][1])["recordSet"]
assert_that(updated_record["ttl"], is_(300))
new_record = client.get_recordset(record_set_list[2][0], record_set_list[2][1])["recordSet"]
assert_that(new_record["ttl"], is_(7200))
finally:
clear_zoneid_rsid_tuple_list(to_delete, client)
def test_create_batch_change_partial_failure(shared_zone_test_context):
"""
Test batch change status with partial failures
"""
client = shared_zone_test_context.ok_vinyldns_client
ok_zone = shared_zone_test_context.ok_zone
batch_change_input = {
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json(f"will-succeed.{ok_zone['name']}", address="4.5.6.7"),
get_change_A_AAAA_json(f"direct-to-backend.{ok_zone['name']}", address="4.5.6.7") # this record will fail in processing
]
}
to_delete = []
try:
dns_add(shared_zone_test_context.ok_zone, "direct-to-backend", 200, "A", "1.2.3.4")
result = client.create_batch_change(batch_change_input, status=202)
completed_batch = client.wait_until_batch_change_completed(result)
record_set_list = [(change["zoneId"], change["recordSetId"]) for change in completed_batch["changes"] if
change["status"] == "Complete"]
to_delete = set(record_set_list) # set here because multiple items in the batch combine to one RS
assert_that(completed_batch["status"], is_("PartialFailure"))
finally:
clear_zoneid_rsid_tuple_list(to_delete, client)
dns_delete(shared_zone_test_context.ok_zone, "direct-to-backend", "A")
def test_create_batch_change_failed(shared_zone_test_context):
"""
Test batch change status with all failures
"""
client = shared_zone_test_context.ok_vinyldns_client
ok_zone_name = shared_zone_test_context.ok_zone["name"]
batch_change_input = {
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json(f"backend-foo.{ok_zone_name}", address="4.5.6.7"),
get_change_A_AAAA_json(f"backend-already-exists.{ok_zone_name}", address="4.5.6.7")
]
}
try:
# these records already exist in the backend, but are not synced in zone
dns_add(shared_zone_test_context.ok_zone, "backend-foo", 200, "A", "1.2.3.4")
dns_add(shared_zone_test_context.ok_zone, "backend-already-exists", 200, "A", "1.2.3.4")
result = client.create_batch_change(batch_change_input, status=202)
completed_batch = client.wait_until_batch_change_completed(result)
assert_that(completed_batch["status"], is_("Failed"))
finally:
dns_delete(shared_zone_test_context.ok_zone, "backend-foo", "A")
dns_delete(shared_zone_test_context.ok_zone, "backend-already-exists", "A")
def test_empty_batch_fails(shared_zone_test_context):
"""
Test creating batch without any changes fails with
"""
batch_change_input = {
"comments": "this should fail processing",
"changes": []
}
errors = shared_zone_test_context.ok_vinyldns_client.create_batch_change(batch_change_input, status=400)["errors"]
assert_that(errors[0], contains_string(
"Batch change contained no changes. Batch change must have at least one change, up to a maximum of"))
def test_create_batch_change_without_changes_fails(shared_zone_test_context):
"""
Test creating a batch change with missing changes fails
"""
client = shared_zone_test_context.ok_vinyldns_client
batch_change_input = {
"comments": "this is optional"
}
errors = client.create_batch_change(batch_change_input, status=400)
assert_error(errors, error_messages=["Missing BatchChangeInput.changes"])
def test_create_batch_change_with_missing_change_type_fails(shared_zone_test_context):
"""
Test creating a batch change with missing change type fails
"""
client = shared_zone_test_context.ok_vinyldns_client
batch_change_input = {
"comments": "this is optional",
"changes": [
{
"inputName": "thing.thing.com.",
"type": "A",
"ttl": 200,
"record": {
"address": "4.5.6.7"
}
}
]
}
errors = client.create_batch_change(batch_change_input, status=400)
assert_error(errors, error_messages=["Missing BatchChangeInput.changes.changeType"])
def test_create_batch_change_with_invalid_change_type_fails(shared_zone_test_context):
"""
Test creating a batch change with invalid change type fails
"""
client = shared_zone_test_context.ok_vinyldns_client
batch_change_input = {
"comments": "this is optional",
"changes": [
{
"changeType": "InvalidChangeType",
"data": {
"inputName": "thing.thing.com.",
"type": "A",
"ttl": 200,
"record": {
"address": "4.5.6.7"
}
}
}
]
}
errors = client.create_batch_change(batch_change_input, status=400)
assert_error(errors, error_messages=["Invalid ChangeInputType"])
def test_create_batch_change_with_missing_input_name_fails(shared_zone_test_context):
"""
Test creating a batch change without an inputName fails
"""
client = shared_zone_test_context.ok_vinyldns_client
batch_change_input = {
"comments": "this is optional",
"changes": [
{
"changeType": "Add",
"type": "A",
"ttl": 200,
"record": {
"address": "4.5.6.7"
}
}
]
}
errors = client.create_batch_change(batch_change_input, status=400)
assert_error(errors, error_messages=["Missing BatchChangeInput.changes.inputName"])
def test_create_batch_change_with_unsupported_record_type_fails(shared_zone_test_context):
"""
Test creating a batch change with unsupported record type fails
"""
client = shared_zone_test_context.ok_vinyldns_client
batch_change_input = {
"comments": "this is optional",
"changes": [
{
"changeType": "Add",
"inputName": "thing.thing.com.",
"type": "UNKNOWN",
"ttl": 200,
"record": {
"address": "4.5.6.7"
}
}
]
}
errors = client.create_batch_change(batch_change_input, status=400)
assert_error(errors,
error_messages=["Unsupported type UNKNOWN, valid types include: A, AAAA, CNAME, PTR, TXT, and MX"])
def test_create_batch_change_with_high_value_domain_fails(shared_zone_test_context):
"""
Test creating a batch change with a high value domain as an inputName fails
"""
client = shared_zone_test_context.ok_vinyldns_client
ok_zone_name = shared_zone_test_context.ok_zone["name"]
ip4_prefix = shared_zone_test_context.ip4_classless_prefix
ip6_prefix = shared_zone_test_context.ip6_prefix
batch_change_input = {
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json(f"high-value-domain-add.{ok_zone_name}"),
get_change_A_AAAA_json(f"high-value-domain-update.{ok_zone_name}", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(f"high-value-domain-update.{ok_zone_name}"),
get_change_A_AAAA_json(f"high-value-domain-delete.{ok_zone_name}", change_type="DeleteRecordSet"),
get_change_PTR_json(f"{ip4_prefix}.252"),
get_change_PTR_json(f"{ip4_prefix}.253", change_type="DeleteRecordSet"), # 253 exists already
get_change_PTR_json(f"{ip4_prefix}.253"),
get_change_PTR_json(f"{ip4_prefix}.253", change_type="DeleteRecordSet"),
get_change_PTR_json(f"{ip6_prefix}:0:0:0:0:ffff"),
get_change_PTR_json(f"{ip6_prefix}:0:0:0:ffff:0", change_type="DeleteRecordSet"), # ffff:0 exists already
get_change_PTR_json(f"{ip6_prefix}:0:0:0:ffff:0"),
get_change_PTR_json(f"{ip6_prefix}:0:0:0:ffff:0", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(f"i-can-be-touched.{ok_zone_name}")
]
}
response = client.create_batch_change(batch_change_input, status=400)
assert_error(response[0], error_messages=[f'Record name "high-value-domain-add.{ok_zone_name}" is configured as a High Value Domain, so it cannot be modified.'])
assert_error(response[1], error_messages=[f'Record name "high-value-domain-update.{ok_zone_name}" is configured as a High Value Domain, so it cannot be modified.'])
assert_error(response[2], error_messages=[f'Record name "high-value-domain-update.{ok_zone_name}" is configured as a High Value Domain, so it cannot be modified.'])
assert_error(response[3], error_messages=[f'Record name "high-value-domain-delete.{ok_zone_name}" is configured as a High Value Domain, so it cannot be modified.'])
assert_error(response[4], error_messages=[f'Record name "{ip4_prefix}.252" is configured as a High Value Domain, so it cannot be modified.'])
assert_error(response[5], error_messages=[f'Record name "{ip4_prefix}.253" is configured as a High Value Domain, so it cannot be modified.'])
assert_error(response[6], error_messages=[f'Record name "{ip4_prefix}.253" is configured as a High Value Domain, so it cannot be modified.'])
assert_error(response[7], error_messages=[f'Record name "{ip4_prefix}.253" is configured as a High Value Domain, so it cannot be modified.'])
assert_error(response[8], error_messages=[f'Record name "{ip6_prefix}:0:0:0:0:ffff" is configured as a High Value Domain, so it cannot be modified.'])
assert_error(response[9], error_messages=[f'Record name "{ip6_prefix}:0:0:0:ffff:0" is configured as a High Value Domain, so it cannot be modified.'])
assert_error(response[10], error_messages=[f'Record name "{ip6_prefix}:0:0:0:ffff:0" is configured as a High Value Domain, so it cannot be modified.'])
assert_error(response[11], error_messages=[f'Record name "{ip6_prefix}:0:0:0:ffff:0" is configured as a High Value Domain, so it cannot be modified.'])
assert_that(response[12], is_not(has_key("errors")))
@pytest.mark.manual_batch_review
def test_create_batch_change_with_domains_requiring_review_succeeds(shared_zone_test_context):
"""
Test creating a batch change with an input name requiring review is accepted
"""
rejecter = shared_zone_test_context.support_user_client
client = shared_zone_test_context.ok_vinyldns_client
ok_zone_name = shared_zone_test_context.ok_zone["name"]
ip4_prefix = shared_zone_test_context.ip4_classless_prefix
ip6_prefix = shared_zone_test_context.ip6_prefix
batch_change_input = {
"ownerGroupId": shared_zone_test_context.ok_group["id"],
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json(f"needs-review-add.{ok_zone_name}"),
get_change_A_AAAA_json(f"needs-review-update.{ok_zone_name}", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(f"needs-review-update.{ok_zone_name}"),
get_change_A_AAAA_json(f"needs-review-delete.{ok_zone_name}", change_type="DeleteRecordSet"),
get_change_PTR_json(f"{ip4_prefix}.254"),
get_change_PTR_json(f"{ip4_prefix}.255", change_type="DeleteRecordSet"), # 255 exists already
get_change_PTR_json(f"{ip4_prefix}.255"),
get_change_PTR_json(f"{ip4_prefix}.255", change_type="DeleteRecordSet"),
get_change_PTR_json(f"{ip6_prefix}:0:0:0:ffff:1"),
get_change_PTR_json(f"{ip6_prefix}:0:0:0:ffff:2", change_type="DeleteRecordSet"), # ffff:2 exists already
get_change_PTR_json(f"{ip6_prefix}:0:0:0:ffff:2"),
get_change_PTR_json(f"{ip6_prefix}:0:0:0:ffff:2", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(f"i-can-be-touched.{ok_zone_name}")
]
}
response = None
try:
response = client.create_batch_change(batch_change_input, status=202)
get_batch = client.get_batch_change(response["id"])
assert_that(get_batch["status"], is_("PendingReview"))
assert_that(get_batch["approvalStatus"], is_("PendingReview"))
for i in range(1, 11):
assert_that(get_batch["changes"][i]["status"], is_("NeedsReview"))
assert_that(get_batch["changes"][i]["validationErrors"][0]["errorType"], is_("RecordRequiresManualReview"))
assert_that(get_batch["changes"][12]["validationErrors"], empty())
finally:
# Clean up so data doesn't change
if response:
rejecter.reject_batch_change(response["id"], status=200)
@pytest.mark.manual_batch_review
def test_create_batch_change_with_soft_failures_and_allow_manual_review_disabled_fails(shared_zone_test_context):
"""
Test creating a batch change with soft errors and allowManualReview disabled results in hard failure
"""
client = shared_zone_test_context.ok_vinyldns_client
dt = (datetime.datetime.now() + datetime.timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%SZ")
batch_change_input = {
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json("non.existent", address="4.5.6.7"),
],
"ownerGroupId": shared_zone_test_context.ok_group["id"]
}
response = client.create_batch_change(batch_change_input, False, status=400)
assert_failed_change_in_error_response(response[0], input_name="non.existent.", record_type="A",
record_data="4.5.6.7",
error_messages=["Zone Discovery Failed: zone for \"non.existent.\" does not exist in VinylDNS. "
"If zone exists, then it must be connected to in VinylDNS."])
def test_create_batch_change_with_invalid_record_type_fails(shared_zone_test_context):
"""
Test creating a batch change with invalid record type fails
"""
client = shared_zone_test_context.ok_vinyldns_client
batch_change_input = {
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json("thing.thing.com.", "B", address="4.5.6.7")
]
}
errors = client.create_batch_change(batch_change_input, status=400)
assert_error(errors, error_messages=["Invalid RecordType"])
def test_create_batch_change_with_missing_record_fails(shared_zone_test_context):
"""
Test creating a batch change without a record fails
"""
client = shared_zone_test_context.ok_vinyldns_client
batch_change_input = {
"comments": "this is optional",
"changes": [
{
"changeType": "Add",
"inputName": "thing.thing.com.",
"type": "A",
"ttl": 200
}
]
}
errors = client.create_batch_change(batch_change_input, status=400)
assert_error(errors, error_messages=["Missing BatchChangeInput.changes.record.address"])
def test_create_batch_change_with_empty_record_fails(shared_zone_test_context):
"""
Test creating a batch change with empty record fails
"""
client = shared_zone_test_context.ok_vinyldns_client
batch_change_input = {
"comments": "this is optional",
"changes": [
{
"changeType": "Add",
"inputName": "thing.thing.com.",
"type": "A",
"ttl": 200,
"record": {}
}
]
}
errors = client.create_batch_change(batch_change_input, status=400)
assert_error(errors, error_messages=["Missing A.address"])
def test_create_batch_change_with_bad_A_record_data_fails(shared_zone_test_context):
"""
Test creating a batch change with malformed A record address fails
"""
client = shared_zone_test_context.ok_vinyldns_client
bad_A_data_request_add = {
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json("thing.thing.com.", address="bad address")
]
}
bad_A_data_request_delete_record_set = {
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json("thing.thing.com.", address="bad address", change_type="DeleteRecordSet")
]
}
error1 = client.create_batch_change(bad_A_data_request_add, status=400)
error2 = client.create_batch_change(bad_A_data_request_delete_record_set, status=400)
assert_error(error1, error_messages=["A must be a valid IPv4 Address"])
assert_error(error2, error_messages=["A must be a valid IPv4 Address"])
def test_create_batch_change_with_bad_AAAA_record_data_fails(shared_zone_test_context):
"""
Test creating a batch change with malformed AAAA record address fails
"""
client = shared_zone_test_context.ok_vinyldns_client
bad_AAAA_data_request_add = {
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json("thing.thing.com.", record_type="AAAA", address="bad address")
]
}
bad_AAAA_data_request_delete_record_set = {
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json("thing.thing.com.", record_type="AAAA", address="bad address", change_type="DeleteRecordSet")
]
}
error1 = client.create_batch_change(bad_AAAA_data_request_add, status=400)
error2 = client.create_batch_change(bad_AAAA_data_request_delete_record_set, status=400)
assert_error(error1, error_messages=["AAAA must be a valid IPv6 Address"])
assert_error(error2, error_messages=["AAAA must be a valid IPv6 Address"])
def test_create_batch_change_with_incorrect_CNAME_record_attribute_fails(shared_zone_test_context):
"""
Test creating a batch change with incorrect CNAME record attribute fails
"""
client = shared_zone_test_context.ok_vinyldns_client
bad_CNAME_data_request = {
"comments": "this is optional",
"changes": [
{
"changeType": "Add",
"inputName": "bizz.bazz.",
"type": "CNAME",
"ttl": 200,
"record": {
"address": "buzz."
}
}
]
}
errors = client.create_batch_change(bad_CNAME_data_request, status=400)["errors"]
assert_that(errors, contains_exactly("Missing CNAME.cname"))
def test_create_batch_change_with_incorrect_PTR_record_attribute_fails(shared_zone_test_context):
"""
Test creating a batch change with incorrect PTR record attribute fails
"""
client = shared_zone_test_context.ok_vinyldns_client
bad_PTR_data_request = {
"comments": "this is optional",
"changes": [
{
"changeType": "Add",
"inputName": "4.5.6.7",
"type": "PTR",
"ttl": 200,
"record": {
"address": "buzz."
}
}
]
}
errors = client.create_batch_change(bad_PTR_data_request, status=400)["errors"]
assert_that(errors, contains_exactly("Missing PTR.ptrdname"))
def test_create_batch_change_with_bad_CNAME_record_attribute_fails(shared_zone_test_context):
"""
Test creating a batch change with malformed CNAME record fails
"""
client = shared_zone_test_context.ok_vinyldns_client
bad_CNAME_data_request_add = {
"comments": "this is optional",
"changes": [
get_change_CNAME_json(input_name="bizz.baz.", cname="s." + "s" * 256)
]
}
bad_CNAME_data_request_delete_record_set = {
"comments": "this is optional",
"changes": [
get_change_CNAME_json(input_name="bizz.baz.", cname="s." + "s" * 256, change_type="DeleteRecordSet")
]
}
error1 = client.create_batch_change(bad_CNAME_data_request_add, status=400)
error2 = client.create_batch_change(bad_CNAME_data_request_delete_record_set, status=400)
assert_error(error1, error_messages=["CNAME domain name must not exceed 255 characters"])
assert_error(error2, error_messages=["CNAME domain name must not exceed 255 characters"])
def test_create_batch_change_with_bad_PTR_record_attribute_fails(shared_zone_test_context):
"""
Test creating a batch change with malformed PTR record fails
"""
client = shared_zone_test_context.ok_vinyldns_client
bad_PTR_data_request_add = {
"comments": "this is optional",
"changes": [
get_change_PTR_json("4.5.6.7", ptrdname="s" * 256),
]
}
bad_PTR_data_request_delete_record_set = {
"comments": "this is optional",
"changes": [
get_change_PTR_json("4.5.6.7", ptrdname="s" * 256),
]
}
error1 = client.create_batch_change(bad_PTR_data_request_add, status=400)
error2 = client.create_batch_change(bad_PTR_data_request_delete_record_set, status=400)
assert_error(error1, error_messages=["PTR must be less than 255 characters"])
assert_error(error2, error_messages=["PTR must be less than 255 characters"])
def test_create_batch_change_with_missing_input_name_for_delete_fails(shared_zone_test_context):
"""
Test creating a batch change without an inputName for DeleteRecordSet fails
"""
client = shared_zone_test_context.ok_vinyldns_client
batch_change_input = {
"comments": "this is optional",
"changes": [
{
"changeType": "DeleteRecordSet",
"type": "A"
}
]
}
errors = client.create_batch_change(batch_change_input, status=400)
assert_error(errors, error_messages=["Missing BatchChangeInput.changes.inputName"])
def test_create_batch_change_with_missing_record_type_for_delete_fails(shared_zone_test_context):
"""
Test creating a batch change without record type for DeleteRecordSet fails
"""
client = shared_zone_test_context.ok_vinyldns_client
batch_change_input = {
"comments": "this is optional",
"changes": [
{
"changeType": "DeleteRecordSet",
"inputName": "thing.thing.com."
}
]
}
errors = client.create_batch_change(batch_change_input, status=400)
assert_error(errors, error_messages=["Missing BatchChangeInput.changes.type"])
def test_mx_recordtype_cannot_have_invalid_preference(shared_zone_test_context):
"""
Test batch fails with bad mx preference
"""
ok_client = shared_zone_test_context.ok_vinyldns_client
ok_zone_name = shared_zone_test_context.ok_zone["name"]
batch_change_input_low_add = {
"comments": "this is optional",
"changes": [
get_change_MX_json(f"too-small.{ok_zone_name}", preference=-1)
]
}
batch_change_input_high_add = {
"comments": "this is optional",
"changes": [
get_change_MX_json(f"too-big.{ok_zone_name}", preference=65536)
]
}
batch_change_input_low_delete_record_set = {
"comments": "this is optional",
"changes": [
get_change_MX_json(f"too-small.{ok_zone_name}", preference=-1, change_type="DeleteRecordSet")
]
}
batch_change_input_high_delete_record_set = {
"comments": "this is optional",
"changes": [
get_change_MX_json(f"too-big.{ok_zone_name}", preference=65536, change_type="DeleteRecordSet")
]
}
error_low_add = ok_client.create_batch_change(batch_change_input_low_add, status=400)
error_high_add = ok_client.create_batch_change(batch_change_input_high_add, status=400)
error_low_delete_record_set = ok_client.create_batch_change(batch_change_input_low_delete_record_set, status=400)
error_high_delete_record_set = ok_client.create_batch_change(batch_change_input_high_delete_record_set, status=400)
assert_error(error_low_add, error_messages=["MX.preference must be a 16 bit integer"])
assert_error(error_high_add, error_messages=["MX.preference must be a 16 bit integer"])
assert_error(error_low_delete_record_set, error_messages=["MX.preference must be a 16 bit integer"])
assert_error(error_high_delete_record_set, error_messages=["MX.preference must be a 16 bit integer"])
def test_create_batch_change_with_invalid_duplicate_record_names_fails(shared_zone_test_context):
"""
Test creating a batch change that contains_exactly a CNAME record and another record with the same name fails
"""
client = shared_zone_test_context.ok_vinyldns_client
ok_zone_name: str = shared_zone_test_context.ok_zone["name"]
rs_A_delete = create_recordset(shared_zone_test_context.ok_zone, "delete1", "A", [{"address": "10.1.1.1"}])
rs_CNAME_delete = create_recordset(shared_zone_test_context.ok_zone, "delete-this1", "CNAME",
[{"cname": "cname."}])
to_create = [rs_A_delete, rs_CNAME_delete]
to_delete = []
bare_ok_zone_name = ok_zone_name.rstrip('.')
batch_change_input = {
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json(f"thing1.{ok_zone_name}", address="4.5.6.7"),
get_change_CNAME_json(f"thing1.{bare_ok_zone_name}"),
get_change_A_AAAA_json(f"delete1.{bare_ok_zone_name}", change_type="DeleteRecordSet"),
get_change_CNAME_json(f"delete1.{bare_ok_zone_name}"),
get_change_A_AAAA_json(f"delete-this1.{bare_ok_zone_name}", address="4.5.6.7"),
get_change_CNAME_json(f"delete-this1.{bare_ok_zone_name}", change_type="DeleteRecordSet")
]
}
try:
for create_json in to_create:
create_result = client.create_recordset(create_json, status=202)
to_delete.append(client.wait_until_recordset_change_status(create_result, "Complete"))
response = client.create_batch_change(batch_change_input, status=400)
assert_successful_change_in_error_response(response[0], input_name=f"thing1.{ok_zone_name}", record_data="4.5.6.7")
assert_failed_change_in_error_response(response[1], input_name=f"thing1.{ok_zone_name}", record_type="CNAME", record_data="test.com.",
error_messages=[f'Record Name "thing1.{ok_zone_name}" Not Unique In Batch Change: '
f'cannot have multiple "CNAME" records with the same name.'])
assert_successful_change_in_error_response(response[2], input_name=f"delete1.{ok_zone_name}", change_type="DeleteRecordSet")
assert_successful_change_in_error_response(response[3], input_name=f"delete1.{ok_zone_name}", record_type="CNAME", record_data="test.com.")
assert_successful_change_in_error_response(response[4], input_name=f"delete-this1.{ok_zone_name}", record_data="4.5.6.7")
assert_successful_change_in_error_response(response[5], input_name=f"delete-this1.{ok_zone_name}", change_type="DeleteRecordSet", record_type="CNAME")
finally:
clear_recordset_list(to_delete, client)
def test_create_batch_change_with_readonly_user_fails(shared_zone_test_context):
"""
Test creating a batch change with an read-only user fails (acl rules on zone)
"""
dummy_client = shared_zone_test_context.dummy_vinyldns_client
ok_client = shared_zone_test_context.ok_vinyldns_client
ok_zone_name = shared_zone_test_context.ok_zone["name"]
ok_group_name = shared_zone_test_context.ok_group["name"]
acl_rule = generate_acl_rule("Read", groupId=shared_zone_test_context.dummy_group["id"], recordMask=".*",
recordTypes=["A", "AAAA"])
delete_rs = create_recordset(shared_zone_test_context.ok_zone, "delete", "A", [{"address": "127.0.0.1"}], 300)
update_rs = create_recordset(shared_zone_test_context.ok_zone, "update", "A", [{"address": "127.0.0.1"}], 300)
batch_change_input = {
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json(f"relative.{ok_zone_name}", address="4.5.6.7"),
get_change_A_AAAA_json(f"delete.{ok_zone_name}", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(f"update.{ok_zone_name}", address="1.2.3.4"),
get_change_A_AAAA_json(f"update.{ok_zone_name}", change_type="DeleteRecordSet")
]
}
to_delete = []
try:
add_ok_acl_rules(shared_zone_test_context, acl_rule)
for rs in [delete_rs, update_rs]:
create_result = ok_client.create_recordset(rs, status=202)
to_delete.append(ok_client.wait_until_recordset_change_status(create_result, "Complete"))
errors = dummy_client.create_batch_change(batch_change_input, status=400)
assert_failed_change_in_error_response(errors[0], input_name=f"relative.{ok_zone_name}", record_data="4.5.6.7",
error_messages=[f'User \"dummy\" is not authorized. Contact zone owner group: {ok_group_name} at test@test.com to make DNS changes.'])
assert_failed_change_in_error_response(errors[1], input_name=f"delete.{ok_zone_name}", change_type="DeleteRecordSet",
record_data="4.5.6.7",
error_messages=[f'User "dummy" is not authorized. Contact zone owner group: {ok_group_name} at test@test.com to make DNS changes.'])
assert_failed_change_in_error_response(errors[2], input_name=f"update.{ok_zone_name}", record_data="1.2.3.4",
error_messages=[f'User \"dummy\" is not authorized. Contact zone owner group: {ok_group_name} at test@test.com to make DNS changes.'])
assert_failed_change_in_error_response(errors[3], input_name=f"update.{ok_zone_name}", change_type="DeleteRecordSet",
record_data=None,
error_messages=[f'User \"dummy\" is not authorized. Contact zone owner group: {ok_group_name} at test@test.com to make DNS changes.'])
finally:
clear_ok_acl_rules(shared_zone_test_context)
clear_recordset_list(to_delete, ok_client)
def test_a_recordtype_add_checks(shared_zone_test_context):
"""
Test all add validations performed on A records submitted in batch changes
"""
client = shared_zone_test_context.ok_vinyldns_client
dummy_zone_name = shared_zone_test_context.dummy_zone["name"]
dummy_group_name = shared_zone_test_context.dummy_group["name"]
parent_zone_name = shared_zone_test_context.parent_zone["name"]
existing_a_name = generate_record_name()
existing_a_fqdn = "{0}.{1}".format(existing_a_name, shared_zone_test_context.parent_zone["name"])
existing_a = create_recordset(shared_zone_test_context.parent_zone, existing_a_name, "A", [{"address": "10.1.1.1"}],
100)
existing_cname_name = generate_record_name()
existing_cname_fqdn = "{0}.{1}".format(existing_cname_name, shared_zone_test_context.parent_zone["name"])
existing_cname = create_recordset(shared_zone_test_context.parent_zone, existing_cname_name, "CNAME",
[{"cname": "cname.data."}], 100)
good_record_name = generate_record_name()
good_record_fqdn = "{0}.{1}".format(good_record_name, shared_zone_test_context.parent_zone["name"])
batch_change_input = {
"changes": [
# valid changes
get_change_A_AAAA_json(good_record_fqdn, address="1.2.3.4"),
# input validation failures
get_change_A_AAAA_json(f"bad-ttl-and-invalid-name$.{parent_zone_name}", ttl=29, address="1.2.3.4"),
get_change_A_AAAA_json("reverse-zone.10.10.in-addr.arpa.", address="1.2.3.4"),
# zone discovery failures
get_change_A_AAAA_json(f"no.subzone.{parent_zone_name}", address="1.2.3.4"),
get_change_A_AAAA_json("no.zone.at.all.", address="1.2.3.4"),
# context validation failures
get_change_CNAME_json(f"cname-duplicate.{parent_zone_name}"),
get_change_A_AAAA_json(f"cname-duplicate.{parent_zone_name}", address="1.2.3.4"),
get_change_A_AAAA_json(existing_a_fqdn, address="1.2.3.4"),
get_change_A_AAAA_json(existing_cname_fqdn, address="1.2.3.4"),
get_change_A_AAAA_json(f"user-add-unauthorized.{dummy_zone_name}", address="1.2.3.4")
]
}
to_create = [existing_a, existing_cname]
to_delete = []
try:
for create_json in to_create:
create_result = client.create_recordset(create_json, status=202)
to_delete.append(client.wait_until_recordset_change_status(create_result, "Complete"))
response = client.create_batch_change(batch_change_input, status=400)
# successful changes
assert_successful_change_in_error_response(response[0], input_name=good_record_fqdn, record_data="1.2.3.4")
# ttl, domain name, reverse zone input validations
assert_failed_change_in_error_response(response[1], input_name=f"bad-ttl-and-invalid-name$.{parent_zone_name}", ttl=29,
record_data="1.2.3.4",
error_messages=['Invalid TTL: "29", must be a number between 30 and 2147483647.',
f'Invalid domain name: "bad-ttl-and-invalid-name$.{parent_zone_name}", '
"valid domain names must be letters, numbers, underscores, and hyphens, joined by dots, and terminated with a dot."])
assert_failed_change_in_error_response(response[2], input_name="reverse-zone.10.10.in-addr.arpa.",
record_data="1.2.3.4",
error_messages=["Invalid Record Type In Reverse Zone: record with name \"reverse-zone.10.10.in-addr.arpa.\" and "
"type \"A\" is not allowed in a reverse zone."])
# zone discovery failure
assert_failed_change_in_error_response(response[3], input_name=f"no.subzone.{parent_zone_name}", record_data="1.2.3.4",
error_messages=[f'Zone Discovery Failed: zone for "no.subzone.{parent_zone_name}" does not exist in VinylDNS. '
f'If zone exists, then it must be connected to in VinylDNS.'])
assert_failed_change_in_error_response(response[4], input_name="no.zone.at.all.", record_data="1.2.3.4",
error_messages=['Zone Discovery Failed: zone for "no.zone.at.all." does not exist in VinylDNS. '
'If zone exists, then it must be connected to in VinylDNS.'])
# context validations: duplicate name failure is always on the cname
assert_failed_change_in_error_response(response[5], input_name=f"cname-duplicate.{parent_zone_name}",
record_type="CNAME", record_data="test.com.",
error_messages=[f"Record Name \"cname-duplicate.{parent_zone_name}\" Not Unique In Batch Change: "
f"cannot have multiple \"CNAME\" records with the same name."])
assert_successful_change_in_error_response(response[6], input_name=f"cname-duplicate.{parent_zone_name}",
record_data="1.2.3.4")
# context validations: conflicting recordsets, unauthorized error
assert_failed_change_in_error_response(response[7], input_name=existing_a_fqdn, record_data="1.2.3.4",
error_messages=[f"Record \"{existing_a_fqdn}\" Already Exists: "
f"cannot add an existing record; to update it, issue a DeleteRecordSet then an Add."])
assert_failed_change_in_error_response(response[8], input_name=existing_cname_fqdn,
record_data="1.2.3.4",
error_messages=[f'CNAME Conflict: CNAME record names must be unique. '
f'Existing record with name "{existing_cname_fqdn}" and type \"CNAME\" conflicts with this record.'])
assert_failed_change_in_error_response(response[9], input_name=f"user-add-unauthorized.{dummy_zone_name}",
record_data="1.2.3.4",
error_messages=[f"User \"ok\" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes."])
finally:
clear_recordset_list(to_delete, client)
def test_a_recordtype_update_delete_checks(shared_zone_test_context):
"""
Test all update and delete validations performed on A records submitted in batch changes
"""
ok_client = shared_zone_test_context.ok_vinyldns_client
dummy_client = shared_zone_test_context.dummy_vinyldns_client
ok_zone = shared_zone_test_context.ok_zone
dummy_zone = shared_zone_test_context.dummy_zone
ok_zone_name = ok_zone["name"]
dummy_zone_name = dummy_zone["name"]
dummy_group_name = shared_zone_test_context.dummy_group["name"]
group_to_delete = {}
temp_group = {
"name": "test-group-for-record-in-private-zone",
"email": "test@test.com",
"description": "for testing that a get batch change still works when record owner group is deleted",
"members": [{"id": "ok"}, {"id": "dummy"}],
"admins": [{"id": "ok"}, {"id": "dummy"}]
}
rs_delete_name = generate_record_name()
rs_delete_fqdn = rs_delete_name + f".{ok_zone_name}"
rs_delete_ok = create_recordset(ok_zone, rs_delete_name, "A", [{"address": "1.1.1.1"}])
rs_update_name = generate_record_name()
rs_update_fqdn = rs_update_name + f".{ok_zone_name}"
rs_update_ok = create_recordset(ok_zone, rs_update_name, "A", [{"address": "1.1.1.1"}])
rs_delete_dummy_name = generate_record_name()
rs_delete_dummy_fqdn = rs_delete_dummy_name + f".{dummy_zone_name}"
rs_delete_dummy = create_recordset(dummy_zone, rs_delete_dummy_name, "A", [{"address": "1.1.1.1"}])
rs_update_dummy_name = generate_record_name()
rs_update_dummy_fqdn = rs_update_dummy_name + f".{dummy_zone_name}"
rs_update_dummy = create_recordset(dummy_zone, rs_update_dummy_name, "A", [{"address": "1.1.1.1"}])
rs_dummy_with_owner_name = generate_record_name()
rs_delete_dummy_with_owner_fqdn = rs_dummy_with_owner_name + f".{dummy_zone_name}"
rs_update_dummy_with_owner_fqdn = rs_dummy_with_owner_name + f".{dummy_zone_name}"
batch_change_input = {
"comments": "this is optional",
"changes": [
# valid changes
get_change_A_AAAA_json(rs_delete_fqdn, change_type="DeleteRecordSet"),
get_change_A_AAAA_json(rs_update_fqdn, change_type="DeleteRecordSet"),
get_change_A_AAAA_json(rs_update_fqdn, ttl=300),
# input validations failures
get_change_A_AAAA_json("$invalid.host.name.", change_type="DeleteRecordSet"),
get_change_A_AAAA_json("reverse.zone.in-addr.arpa.", change_type="DeleteRecordSet"),
get_change_A_AAAA_json("$another.invalid.host.name.", ttl=300),
get_change_A_AAAA_json("$another.invalid.host.name.", change_type="DeleteRecordSet"),
get_change_A_AAAA_json("another.reverse.zone.in-addr.arpa.", ttl=10),
get_change_A_AAAA_json("another.reverse.zone.in-addr.arpa.", change_type="DeleteRecordSet"),
# zone discovery failures
get_change_A_AAAA_json("zone.discovery.error.", change_type="DeleteRecordSet"),
# context validation failures: record does not exist, not authorized
get_change_A_AAAA_json(f"non-existent.{ok_zone_name}", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(rs_delete_dummy_fqdn, change_type="DeleteRecordSet"),
get_change_A_AAAA_json(rs_update_dummy_fqdn, change_type="DeleteRecordSet"),
get_change_A_AAAA_json(rs_update_dummy_fqdn, ttl=300),
get_change_A_AAAA_json(rs_delete_dummy_with_owner_fqdn, change_type="DeleteRecordSet"),
get_change_A_AAAA_json(rs_update_dummy_with_owner_fqdn, ttl=300)
]
}
to_create = [rs_delete_ok, rs_update_ok, rs_delete_dummy, rs_update_dummy]
to_delete = []
try:
group_to_delete = dummy_client.create_group(temp_group, status=200)
rs_update_dummy_with_owner = create_recordset(dummy_zone, rs_dummy_with_owner_name, "A", [{"address": "1.1.1.1"}], 100, group_to_delete["id"])
create_rs_update_dummy_with_owner = dummy_client.create_recordset(rs_update_dummy_with_owner, status=202)
to_delete.append(dummy_client.wait_until_recordset_change_status(create_rs_update_dummy_with_owner, "Complete"))
for rs in to_create:
if rs["zoneId"] == dummy_zone["id"]:
create_client = dummy_client
else:
create_client = ok_client
create_rs = create_client.create_recordset(rs, status=202)
to_delete.append(create_client.wait_until_recordset_change_status(create_rs, "Complete"))
# Confirm that record set doesn't already exist
ok_client.get_recordset(ok_zone["id"], "non-existent", status=404)
response = ok_client.create_batch_change(batch_change_input, status=400)
# valid changes
assert_successful_change_in_error_response(response[0], input_name=rs_delete_fqdn, change_type="DeleteRecordSet")
assert_successful_change_in_error_response(response[1], input_name=rs_update_fqdn, change_type="DeleteRecordSet")
assert_successful_change_in_error_response(response[2], input_name=rs_update_fqdn, ttl=300)
# input validations failures
assert_failed_change_in_error_response(response[3], input_name="$invalid.host.name.",
change_type="DeleteRecordSet",
error_messages=['Invalid domain name: "$invalid.host.name.", valid domain names must be letters, '
'numbers, underscores, and hyphens, joined by dots, and terminated with a dot.'])
assert_failed_change_in_error_response(response[4], input_name="reverse.zone.in-addr.arpa.",
change_type="DeleteRecordSet",
error_messages=['Invalid Record Type In Reverse Zone: record with name "reverse.zone.in-addr.arpa." and type "A" '
'is not allowed in a reverse zone.'])
assert_failed_change_in_error_response(response[5], input_name="$another.invalid.host.name.", ttl=300,
error_messages=['Invalid domain name: "$another.invalid.host.name.", valid domain names must be letters, '
'numbers, underscores, and hyphens, joined by dots, and terminated with a dot.'])
assert_failed_change_in_error_response(response[6], input_name="$another.invalid.host.name.",
change_type="DeleteRecordSet",
error_messages=['Invalid domain name: "$another.invalid.host.name.", valid domain names must be letters, '
'numbers, underscores, and hyphens, joined by dots, and terminated with a dot.'])
assert_failed_change_in_error_response(response[7], input_name="another.reverse.zone.in-addr.arpa.", ttl=10,
error_messages=['Invalid Record Type In Reverse Zone: record with name "another.reverse.zone.in-addr.arpa." '
'and type "A" is not allowed in a reverse zone.',
'Invalid TTL: "10", must be a number between 30 and 2147483647.'])
assert_failed_change_in_error_response(response[8], input_name="another.reverse.zone.in-addr.arpa.",
change_type="DeleteRecordSet",
error_messages=['Invalid Record Type In Reverse Zone: record with name "another.reverse.zone.in-addr.arpa." '
'and type "A" is not allowed in a reverse zone.'])
# zone discovery failure
assert_failed_change_in_error_response(response[9], input_name="zone.discovery.error.",
change_type="DeleteRecordSet",
error_messages=['Zone Discovery Failed: zone for "zone.discovery.error." does not exist in VinylDNS. '
'If zone exists, then it must be connected to in VinylDNS.'])
# context validation failures: record does not exist, not authorized
assert_failed_change_in_error_response(response[10], input_name=f"non-existent.{ok_zone_name}",
change_type="DeleteRecordSet",
error_messages=[
f'Record "non-existent.{ok_zone_name}" Does Not Exist: cannot delete a record that does not exist.'])
assert_failed_change_in_error_response(response[11], input_name=rs_delete_dummy_fqdn,
change_type="DeleteRecordSet",
error_messages=[f'User \"ok\" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes.'])
assert_failed_change_in_error_response(response[12], input_name=rs_update_dummy_fqdn,
change_type="DeleteRecordSet",
error_messages=[f'User \"ok\" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes.'])
assert_failed_change_in_error_response(response[13], input_name=rs_update_dummy_fqdn, ttl=300,
error_messages=[f'User \"ok\" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes.'])
assert_failed_change_in_error_response(response[14], input_name=rs_update_dummy_with_owner_fqdn, change_type="DeleteRecordSet",
error_messages=[f'User \"ok\" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes.'])
assert_failed_change_in_error_response(response[15], input_name=rs_update_dummy_with_owner_fqdn, ttl=300,
error_messages=[f'User \"ok\" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes.'])
finally:
# Clean up updates
dummy_deletes = [rs for rs in to_delete if rs["zone"]["id"] == dummy_zone["id"]]
ok_deletes = [rs for rs in to_delete if rs["zone"]["id"] != dummy_zone["id"]]
clear_recordset_list(dummy_deletes, dummy_client)
clear_recordset_list(ok_deletes, ok_client)
dummy_client.delete_group(group_to_delete["id"], status=200)
def test_aaaa_recordtype_add_checks(shared_zone_test_context):
"""
Test all add validations performed on AAAA records submitted in batch changes
"""
client = shared_zone_test_context.ok_vinyldns_client
dummy_zone_name = shared_zone_test_context.dummy_zone["name"]
parent_zone_name = shared_zone_test_context.parent_zone["name"]
dummy_group_name = shared_zone_test_context.dummy_group["name"]
existing_aaaa_name = generate_record_name()
existing_aaaa_fqdn = existing_aaaa_name + "." + shared_zone_test_context.parent_zone["name"]
existing_aaaa = create_recordset(shared_zone_test_context.parent_zone, existing_aaaa_name, "AAAA", [{"address": "1::1"}], 100)
existing_cname_name = generate_record_name()
existing_cname_fqdn = existing_cname_name + "." + shared_zone_test_context.parent_zone["name"]
existing_cname = create_recordset(shared_zone_test_context.parent_zone, existing_cname_name, "CNAME", [{"cname": "cname.data."}], 100)
good_record_name = generate_record_name()
good_record_fqdn = good_record_name + "." + shared_zone_test_context.parent_zone["name"]
batch_change_input = {
"changes": [
# valid changes
get_change_A_AAAA_json(good_record_fqdn, record_type="AAAA", address="1::1"),
# input validation failures
get_change_A_AAAA_json(f"bad-ttl-and-invalid-name$.{parent_zone_name}", ttl=29, record_type="AAAA", address="1::1"),
get_change_A_AAAA_json("reverse-zone.1.2.3.ip6.arpa.", record_type="AAAA", address="1::1"),
# zone discovery failures
get_change_A_AAAA_json(f"no.subzone.{parent_zone_name}", record_type="AAAA", address="1::1"),
get_change_A_AAAA_json("no.zone.at.all.", record_type="AAAA", address="1::1"),
# context validation failures
get_change_CNAME_json(f"cname-duplicate.{parent_zone_name}"),
get_change_A_AAAA_json(f"cname-duplicate.{parent_zone_name}", record_type="AAAA", address="1::1"),
get_change_A_AAAA_json(existing_aaaa_fqdn, record_type="AAAA", address="1::1"),
get_change_A_AAAA_json(existing_cname_fqdn, record_type="AAAA", address="1::1"),
get_change_A_AAAA_json(f"user-add-unauthorized.{dummy_zone_name}", record_type="AAAA", address="1::1")
]
}
to_create = [existing_aaaa, existing_cname]
to_delete = []
try:
for create_json in to_create:
create_result = client.create_recordset(create_json, status=202)
to_delete.append(client.wait_until_recordset_change_status(create_result, "Complete"))
response = client.create_batch_change(batch_change_input, status=400)
# successful changes
assert_successful_change_in_error_response(response[0], input_name=good_record_fqdn, record_type="AAAA", record_data="1::1")
# ttl, domain name, reverse zone input validations
assert_failed_change_in_error_response(response[1], input_name=f"bad-ttl-and-invalid-name$.{parent_zone_name}", ttl=29,
record_type="AAAA", record_data="1::1",
error_messages=['Invalid TTL: "29", must be a number between 30 and 2147483647.',
f'Invalid domain name: "bad-ttl-and-invalid-name$.{parent_zone_name}", '
"valid domain names must be letters, numbers, underscores, and hyphens, joined by dots, and terminated with a dot."])
assert_failed_change_in_error_response(response[2], input_name="reverse-zone.1.2.3.ip6.arpa.",
record_type="AAAA", record_data="1::1",
error_messages=["Invalid Record Type In Reverse Zone: record with name \"reverse-zone.1.2.3.ip6.arpa.\" "
"and type \"AAAA\" is not allowed in a reverse zone."])
# zone discovery failures
assert_failed_change_in_error_response(response[3], input_name=f"no.subzone.{parent_zone_name}", record_type="AAAA",
record_data="1::1",
error_messages=[f'Zone Discovery Failed: zone for \"no.subzone.{parent_zone_name}\" does not exist in VinylDNS. '
f'If zone exists, then it must be connected to in VinylDNS.'])
assert_failed_change_in_error_response(response[4], input_name="no.zone.at.all.", record_type="AAAA",
record_data="1::1",
error_messages=["Zone Discovery Failed: zone for \"no.zone.at.all.\" does not exist in VinylDNS. "
"If zone exists, then it must be connected to in VinylDNS."])
# context validations: duplicate name failure (always on the cname), conflicting recordsets, unauthorized error
assert_failed_change_in_error_response(response[5], input_name=f"cname-duplicate.{parent_zone_name}",
record_type="CNAME", record_data="test.com.",
error_messages=[f"Record Name \"cname-duplicate.{parent_zone_name}\" Not Unique In Batch Change: "
f"cannot have multiple \"CNAME\" records with the same name."])
assert_successful_change_in_error_response(response[6], input_name=f"cname-duplicate.{parent_zone_name}",
record_type="AAAA", record_data="1::1")
assert_failed_change_in_error_response(response[7], input_name=existing_aaaa_fqdn, record_type="AAAA",
record_data="1::1",
error_messages=[f"Record \"{existing_aaaa_fqdn}\" Already Exists: cannot add an existing record; "
f"to update it, issue a DeleteRecordSet then an Add."])
assert_failed_change_in_error_response(response[8], input_name=existing_cname_fqdn, record_type="AAAA",
record_data="1::1",
error_messages=[f"CNAME Conflict: CNAME record names must be unique. Existing record with name \"{existing_cname_fqdn}\" "
f"and type \"CNAME\" conflicts with this record."])
assert_failed_change_in_error_response(response[9], input_name=f"user-add-unauthorized.{dummy_zone_name}",
record_type="AAAA", record_data="1::1",
error_messages=[f"User \"ok\" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes."])
finally:
clear_recordset_list(to_delete, client)
def test_aaaa_recordtype_update_delete_checks(shared_zone_test_context):
"""
Test all update and delete validations performed on AAAA records submitted in batch changes
"""
ok_client = shared_zone_test_context.ok_vinyldns_client
dummy_client = shared_zone_test_context.dummy_vinyldns_client
ok_zone = shared_zone_test_context.ok_zone
dummy_zone = shared_zone_test_context.dummy_zone
ok_zone_name = shared_zone_test_context.ok_zone["name"]
dummy_zone_name = shared_zone_test_context.dummy_zone["name"]
dummy_group_name = shared_zone_test_context.dummy_group["name"]
rs_delete_name = generate_record_name()
rs_delete_fqdn = rs_delete_name + f".{ok_zone_name}"
rs_delete_ok = create_recordset(ok_zone, rs_delete_name, "AAAA", [{"address": "1::4:5:6:7:8"}], 200)
rs_update_name = generate_record_name()
rs_update_fqdn = rs_update_name + f".{ok_zone_name}"
rs_update_ok = create_recordset(ok_zone, rs_update_name, "AAAA", [{"address": "1:1:1:1:1:1:1:1"}], 200)
rs_delete_dummy_name = generate_record_name()
rs_delete_dummy_fqdn = rs_delete_dummy_name + f".{dummy_zone_name}"
rs_delete_dummy = create_recordset(dummy_zone, rs_delete_dummy_name, "AAAA", [{"address": "1::1"}], 200)
rs_update_dummy_name = generate_record_name()
rs_update_dummy_fqdn = rs_update_dummy_name + f".{dummy_zone_name}"
rs_update_dummy = create_recordset(dummy_zone, rs_update_dummy_name, "AAAA", [{"address": "1:2:3:4:5:6:7:8"}], 200)
batch_change_input = {
"comments": "this is optional",
"changes": [
# valid changes
get_change_A_AAAA_json(rs_delete_fqdn, record_type="AAAA", change_type="DeleteRecordSet", address="1:0::4:5:6:7:8"),
get_change_A_AAAA_json(rs_update_fqdn, record_type="AAAA", ttl=300, address="1:2:3:4:5:6:7:8"),
get_change_A_AAAA_json(rs_update_fqdn, record_type="AAAA", change_type="DeleteRecordSet"),
# input validations failures
get_change_A_AAAA_json(f"invalid-name$.{ok_zone_name}", record_type="AAAA", change_type="DeleteRecordSet"),
get_change_A_AAAA_json("reverse.zone.in-addr.arpa.", record_type="AAAA", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(f"bad-ttl-and-invalid-name$-update.{ok_zone_name}", record_type="AAAA", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(f"bad-ttl-and-invalid-name$-update.{ok_zone_name}", ttl=29, record_type="AAAA", address="1:2:3:4:5:6:7:8"),
# zone discovery failure
get_change_A_AAAA_json("no.zone.at.all.", record_type="AAAA", change_type="DeleteRecordSet"),
# context validation failures
get_change_A_AAAA_json(f"delete-nonexistent.{ok_zone_name}", record_type="AAAA", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(f"update-nonexistent.{ok_zone_name}", record_type="AAAA", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(f"update-nonexistent.{ok_zone_name}", record_type="AAAA", address="1::1"),
get_change_A_AAAA_json(rs_delete_dummy_fqdn, record_type="AAAA", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(rs_update_dummy_fqdn, record_type="AAAA", address="1::1"),
get_change_A_AAAA_json(rs_update_dummy_fqdn, record_type="AAAA", change_type="DeleteRecordSet")
]
}
to_create = [rs_delete_ok, rs_update_ok, rs_delete_dummy, rs_update_dummy]
to_delete = []
try:
for rs in to_create:
if rs["zoneId"] == dummy_zone["id"]:
create_client = dummy_client
else:
create_client = ok_client
create_rs = create_client.create_recordset(rs, status=202)
to_delete.append(create_client.wait_until_recordset_change_status(create_rs, "Complete"))
# Confirm that record set doesn't already exist
ok_client.get_recordset(ok_zone["id"], "delete-nonexistent", status=404)
response = ok_client.create_batch_change(batch_change_input, status=400)
# successful changes
assert_successful_change_in_error_response(response[0], input_name=rs_delete_fqdn, record_type="AAAA",
record_data=None, change_type="DeleteRecordSet")
assert_successful_change_in_error_response(response[1], ttl=300, input_name=rs_update_fqdn, record_type="AAAA",
record_data="1:2:3:4:5:6:7:8")
assert_successful_change_in_error_response(response[2], input_name=rs_update_fqdn, record_type="AAAA",
record_data=None, change_type="DeleteRecordSet")
# input validations failures: invalid input name, reverse zone error, invalid ttl
assert_failed_change_in_error_response(response[3], input_name=f"invalid-name$.{ok_zone_name}", record_type="AAAA",
record_data=None, change_type="DeleteRecordSet",
error_messages=[f'Invalid domain name: "invalid-name$.{ok_zone_name}", '
f'valid domain names must be letters, numbers, underscores, and hyphens, joined by dots, and terminated with a dot.'])
assert_failed_change_in_error_response(response[4], input_name="reverse.zone.in-addr.arpa.", record_type="AAAA",
record_data=None, change_type="DeleteRecordSet",
error_messages=["Invalid Record Type In Reverse Zone: record with name \"reverse.zone.in-addr.arpa.\" and "
"type \"AAAA\" is not allowed in a reverse zone."])
assert_failed_change_in_error_response(response[5], input_name=f"bad-ttl-and-invalid-name$-update.{ok_zone_name}",
record_type="AAAA", record_data=None, change_type="DeleteRecordSet",
error_messages=[f'Invalid domain name: "bad-ttl-and-invalid-name$-update.{ok_zone_name}", '
f'valid domain names must be letters, numbers, underscores, and hyphens, joined by dots, and terminated with a dot.'])
assert_failed_change_in_error_response(response[6], input_name=f"bad-ttl-and-invalid-name$-update.{ok_zone_name}", ttl=29,
record_type="AAAA", record_data="1:2:3:4:5:6:7:8",
error_messages=['Invalid TTL: "29", must be a number between 30 and 2147483647.',
f'Invalid domain name: "bad-ttl-and-invalid-name$-update.{ok_zone_name}", '
f'valid domain names must be letters, numbers, underscores, and hyphens, joined by dots, and terminated with a dot.'])
# zone discovery failure
assert_failed_change_in_error_response(response[7], input_name="no.zone.at.all.", record_type="AAAA",
record_data=None, change_type="DeleteRecordSet",
error_messages=["Zone Discovery Failed: zone for \"no.zone.at.all.\" does not exist in VinylDNS. "
"If zone exists, then it must be connected to in VinylDNS."])
# context validation failures: record does not exist, not authorized
assert_failed_change_in_error_response(response[8], input_name=f"delete-nonexistent.{ok_zone_name}", record_type="AAAA",
record_data=None, change_type="DeleteRecordSet",
error_messages=[f"Record \"delete-nonexistent.{ok_zone_name}\" Does Not Exist: cannot delete a record that does not exist."])
assert_failed_change_in_error_response(response[9], input_name=f"update-nonexistent.{ok_zone_name}", record_type="AAAA",
record_data=None, change_type="DeleteRecordSet",
error_messages=[f"Record \"update-nonexistent.{ok_zone_name}\" Does Not Exist: cannot delete a record that does not exist."])
assert_successful_change_in_error_response(response[10], input_name=f"update-nonexistent.{ok_zone_name}", record_type="AAAA", record_data="1::1")
assert_failed_change_in_error_response(response[11], input_name=rs_delete_dummy_fqdn,
record_type="AAAA", record_data=None, change_type="DeleteRecordSet",
error_messages=[f"User \"ok\" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes."])
assert_failed_change_in_error_response(response[12], input_name=rs_update_dummy_fqdn,
record_type="AAAA", record_data="1::1",
error_messages=[f"User \"ok\" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes."])
assert_failed_change_in_error_response(response[13], input_name=rs_update_dummy_fqdn,
record_type="AAAA", record_data=None, change_type="DeleteRecordSet",
error_messages=[f"User \"ok\" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes."])
finally:
# Clean up updates
dummy_deletes = [rs for rs in to_delete if rs["zone"]["id"] == dummy_zone["id"]]
ok_deletes = [rs for rs in to_delete if rs["zone"]["id"] != dummy_zone["id"]]
clear_recordset_list(dummy_deletes, dummy_client)
clear_recordset_list(ok_deletes, ok_client)
def test_cname_recordtype_add_checks(shared_zone_test_context):
"""
Test all add validations performed on CNAME records submitted in batch changes
"""
client = shared_zone_test_context.ok_vinyldns_client
ok_zone = shared_zone_test_context.ok_zone
ok_zone_name = shared_zone_test_context.ok_zone["name"]
dummy_zone_name = shared_zone_test_context.dummy_zone["name"]
dummy_group_name = shared_zone_test_context.dummy_group["name"]
ip4_prefix = shared_zone_test_context.ip4_classless_prefix
ip4_zone_name = shared_zone_test_context.classless_base_zone["name"]
ip4_reverse_zone_name = shared_zone_test_context.ip4_reverse_zone["name"]
parent_zone_name = shared_zone_test_context.parent_zone["name"]
existing_forward_name = generate_record_name()
existing_forward_fqdn = existing_forward_name + "." + shared_zone_test_context.parent_zone["name"]
existing_forward = create_recordset(shared_zone_test_context.parent_zone, existing_forward_name, "A",
[{"address": "1.2.3.4"}], 100)
existing_reverse_fqdn = "0." + shared_zone_test_context.classless_base_zone["name"]
existing_reverse = create_recordset(shared_zone_test_context.classless_base_zone, "0", "PTR",
[{"ptrdname": "test.com. "}], 100)
existing_cname_name = generate_record_name()
existing_cname_fqdn = existing_cname_name + "." + shared_zone_test_context.parent_zone["name"]
existing_cname = create_recordset(shared_zone_test_context.parent_zone, existing_cname_name, "CNAME",
[{"cname": "cname.data. "}], 100)
rs_a_to_cname_ok_name = generate_record_name()
rs_a_to_cname_ok_fqdn = rs_a_to_cname_ok_name + f".{ok_zone_name}"
rs_a_to_cname_ok = create_recordset(ok_zone, rs_a_to_cname_ok_name, "A", [{"address": "1.1.1.1"}])
rs_cname_to_A_ok_name = generate_record_name()
rs_cname_to_A_ok_fqdn = rs_cname_to_A_ok_name + f".{ok_zone_name}"
rs_cname_to_A_ok = create_recordset(ok_zone, rs_cname_to_A_ok_name, "CNAME", [{"cname": "test.com."}])
forward_fqdn = generate_record_name(parent_zone_name)
reverse_fqdn = generate_record_name(ip4_reverse_zone_name)
batch_change_input = {
"changes": [
# valid change
get_change_CNAME_json(forward_fqdn),
get_change_CNAME_json(reverse_fqdn),
# valid changes - delete and add of same record name but different type
get_change_A_AAAA_json(rs_a_to_cname_ok_fqdn, change_type="DeleteRecordSet"),
get_change_CNAME_json(rs_a_to_cname_ok_fqdn),
get_change_A_AAAA_json(rs_cname_to_A_ok_fqdn),
get_change_CNAME_json(rs_cname_to_A_ok_fqdn, change_type="DeleteRecordSet"),
# input validations failures
get_change_CNAME_json(f"bad-ttl-and-invalid-name$.{parent_zone_name}", ttl=29, cname="also$bad.name"),
# zone discovery failure
get_change_CNAME_json("no.zone.com."),
# cant be apex
get_change_CNAME_json(parent_zone_name),
# context validation failures
get_change_PTR_json(f"{ip4_prefix}.15"),
get_change_CNAME_json(f"15.{ip4_zone_name}", cname="duplicate.other.type.within.batch."),
get_change_CNAME_json(f"cname-duplicate.{parent_zone_name}"),
get_change_CNAME_json(f"cname-duplicate.{parent_zone_name}", cname="duplicate.cname.type.within.batch."),
get_change_CNAME_json(existing_forward_fqdn),
get_change_CNAME_json(existing_cname_fqdn),
get_change_CNAME_json(f"0.{ip4_zone_name}", cname="duplicate.in.db."),
get_change_CNAME_json(f"user-add-unauthorized.{dummy_zone_name}")
]
}
to_create = [existing_forward, existing_reverse, existing_cname, rs_a_to_cname_ok, rs_cname_to_A_ok]
to_delete = []
try:
for create_json in to_create:
create_result = client.create_recordset(create_json, status=202)
to_delete.append(client.wait_until_recordset_change_status(create_result, "Complete"))
response = client.create_batch_change(batch_change_input, status=400)
# successful changes
assert_successful_change_in_error_response(response[0], input_name=forward_fqdn, record_type="CNAME", record_data="test.com.")
assert_successful_change_in_error_response(response[1], input_name=reverse_fqdn, record_type="CNAME", record_data="test.com.")
# successful changes - delete and add of same record name but different type
assert_successful_change_in_error_response(response[2], input_name=rs_a_to_cname_ok_fqdn, change_type="DeleteRecordSet")
assert_successful_change_in_error_response(response[3], input_name=rs_a_to_cname_ok_fqdn, record_type="CNAME", record_data="test.com.")
assert_successful_change_in_error_response(response[4], input_name=rs_cname_to_A_ok_fqdn)
assert_successful_change_in_error_response(response[5], input_name=rs_cname_to_A_ok_fqdn, record_type="CNAME", change_type="DeleteRecordSet")
# ttl, domain name, data
assert_failed_change_in_error_response(response[6], input_name=f"bad-ttl-and-invalid-name$.{parent_zone_name}", ttl=29,
record_type="CNAME", record_data="also$bad.name.",
error_messages=['Invalid TTL: "29", must be a number between 30 and 2147483647.',
f'Invalid domain name: "bad-ttl-and-invalid-name$.{parent_zone_name}", '
"valid domain names must be letters, numbers, underscores, and hyphens, joined by dots, and terminated with a dot.",
'Invalid domain name: "also$bad.name.", valid domain names must be letters, numbers, underscores, and hyphens, '
"joined by dots, and terminated with a dot."])
# zone discovery failure
assert_failed_change_in_error_response(response[7], input_name="no.zone.com.", record_type="CNAME",
record_data="test.com.",
error_messages=["Zone Discovery Failed: zone for \"no.zone.com.\" does not exist in VinylDNS. "
"If zone exists, then it must be connected to in VinylDNS."])
# CNAME cant be apex
assert_failed_change_in_error_response(response[8], input_name=parent_zone_name, record_type="CNAME",
record_data="test.com.",
error_messages=[f"CNAME cannot be the same name as zone \"{parent_zone_name}\"."])
# context validations: duplicates in batch
assert_successful_change_in_error_response(response[9], input_name=f"{ip4_prefix}.15", record_type="PTR",
record_data="test.com.")
assert_failed_change_in_error_response(response[10], input_name=f"15.{ip4_zone_name}", record_type="CNAME",
record_data="duplicate.other.type.within.batch.",
error_messages=[f"Record Name \"15.{ip4_zone_name}\" Not Unique In Batch Change: "
f"cannot have multiple \"CNAME\" records with the same name."])
assert_failed_change_in_error_response(response[11], input_name=f"cname-duplicate.{parent_zone_name}",
record_type="CNAME", record_data="test.com.",
error_messages=[f"Record Name \"cname-duplicate.{parent_zone_name}\" Not Unique In Batch Change: "
f"cannot have multiple \"CNAME\" records with the same name."])
assert_failed_change_in_error_response(response[12], input_name=f"cname-duplicate.{parent_zone_name}",
record_type="CNAME", record_data="duplicate.cname.type.within.batch.",
error_messages=[f"Record Name \"cname-duplicate.{parent_zone_name}\" Not Unique In Batch Change: "
f"cannot have multiple \"CNAME\" records with the same name."])
# context validations: existing recordsets pre-request, unauthorized, failure on duplicate add
assert_failed_change_in_error_response(response[13], input_name=existing_forward_fqdn,
record_type="CNAME", record_data="test.com.",
error_messages=[f"CNAME Conflict: CNAME record names must be unique. "
f"Existing record with name \"{existing_forward_fqdn}\" and type \"A\" conflicts with this record."])
assert_failed_change_in_error_response(response[14], input_name=existing_cname_fqdn,
record_type="CNAME", record_data="test.com.",
error_messages=[f"Record \"{existing_cname_fqdn}\" Already Exists: cannot add an existing record; to update it, "
f"issue a DeleteRecordSet then an Add.",
f"CNAME Conflict: CNAME record names must be unique. "
f"Existing record with name \"{existing_cname_fqdn}\" and type \"CNAME\" conflicts with this record."])
assert_failed_change_in_error_response(response[15], input_name=existing_reverse_fqdn, record_type="CNAME",
record_data="duplicate.in.db.",
error_messages=["CNAME Conflict: CNAME record names must be unique. "
f"Existing record with name \"{existing_reverse_fqdn}\" and type \"PTR\" conflicts with this record."])
assert_failed_change_in_error_response(response[16], input_name=f"user-add-unauthorized.{dummy_zone_name}",
record_type="CNAME", record_data="test.com.",
error_messages=[f"User \"ok\" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes."])
finally:
clear_recordset_list(to_delete, client)
def test_cname_recordtype_update_delete_checks(shared_zone_test_context):
"""
Test all update and delete validations performed on CNAME records submitted in batch changes
"""
ok_client = shared_zone_test_context.ok_vinyldns_client
dummy_client = shared_zone_test_context.dummy_vinyldns_client
ok_zone = shared_zone_test_context.ok_zone
dummy_zone = shared_zone_test_context.dummy_zone
classless_base_zone = shared_zone_test_context.classless_base_zone
dummy_zone_name = shared_zone_test_context.dummy_zone["name"]
dummy_group_name = shared_zone_test_context.dummy_group["name"]
ok_zone_name = shared_zone_test_context.ok_zone["name"]
ip4_zone_name = shared_zone_test_context.classless_base_zone["name"]
parent_zone_name = shared_zone_test_context.parent_zone["name"]
rs_delete_ok = create_recordset(ok_zone, "delete3", "CNAME", [{"cname": "test.com."}])
rs_update_ok = create_recordset(ok_zone, "update3", "CNAME", [{"cname": "test.com."}])
rs_delete_dummy = create_recordset(dummy_zone, "delete-unauthorized3", "CNAME", [{"cname": "test.com."}])
rs_update_dummy = create_recordset(dummy_zone, "update-unauthorized3", "CNAME", [{"cname": "test.com."}])
rs_delete_base = create_recordset(classless_base_zone, "200", "CNAME",
[{"cname": f"200.192/30.{ip4_zone_name}"}])
rs_update_base = create_recordset(classless_base_zone, "201", "CNAME",
[{"cname": f"201.192/30.{ip4_zone_name}"}])
rs_update_duplicate_add = create_recordset(shared_zone_test_context.parent_zone, "Existing-Cname2", "CNAME",
[{"cname": "cname.data. "}], 100)
batch_change_input = {
"comments": "this is optional",
"changes": [
# valid changes - forward zone
get_change_CNAME_json(f"delete3.{ok_zone_name}", change_type="DeleteRecordSet"),
get_change_CNAME_json(f"update3.{ok_zone_name}", change_type="DeleteRecordSet"),
get_change_CNAME_json(f"update3.{ok_zone_name}", ttl=300),
# valid changes - reverse zone
get_change_CNAME_json(f"200.{ip4_zone_name}", change_type="DeleteRecordSet"),
get_change_CNAME_json(f"201.{ip4_zone_name}", change_type="DeleteRecordSet"),
get_change_CNAME_json(f"201.{ip4_zone_name}", ttl=300),
# input validation failures
get_change_CNAME_json("$invalid.host.name.", change_type="DeleteRecordSet"),
get_change_CNAME_json("$another.invalid.host.name", change_type="DeleteRecordSet"),
get_change_CNAME_json("$another.invalid.host.name", ttl=20, cname="$another.invalid.cname."),
# zone discovery failures
get_change_CNAME_json("zone.discovery.error.", change_type="DeleteRecordSet"),
# context validation failures: record does not exist, not authorized, failure on update with multiple adds
get_change_CNAME_json(f"non-existent-delete.{ok_zone_name}", change_type="DeleteRecordSet"),
get_change_CNAME_json(f"non-existent-update.{ok_zone_name}", change_type="DeleteRecordSet"),
get_change_CNAME_json(f"non-existent-update.{ok_zone_name}"),
get_change_CNAME_json(f"delete-unauthorized3.{dummy_zone_name}", change_type="DeleteRecordSet"),
get_change_CNAME_json(f"update-unauthorized3.{dummy_zone_name}", change_type="DeleteRecordSet"),
get_change_CNAME_json(f"update-unauthorized3.{dummy_zone_name}", ttl=300),
get_change_CNAME_json(f"existing-cname2.{parent_zone_name}", change_type="DeleteRecordSet"),
get_change_CNAME_json(f"existing-cname2.{parent_zone_name}"),
get_change_CNAME_json(f"existing-cname2.{parent_zone_name}", cname="test2.com.")
]
}
to_create = [rs_delete_ok, rs_update_ok, rs_delete_dummy, rs_update_dummy, rs_delete_base, rs_update_base,
rs_update_duplicate_add]
to_delete = []
try:
for rs in to_create:
if rs["zoneId"] == dummy_zone["id"]:
create_client = dummy_client
else:
create_client = ok_client
create_rs = create_client.create_recordset(rs, status=202)
to_delete.append(create_client.wait_until_recordset_change_status(create_rs, "Complete"))
# Confirm that record set doesn't already exist
ok_client.get_recordset(ok_zone["id"], "non-existent", status=404)
response = ok_client.create_batch_change(batch_change_input, status=400)
# valid changes - forward zone
assert_successful_change_in_error_response(response[0], input_name=f"delete3.{ok_zone_name}", record_type="CNAME",
change_type="DeleteRecordSet")
assert_successful_change_in_error_response(response[1], input_name=f"update3.{ok_zone_name}", record_type="CNAME",
change_type="DeleteRecordSet")
assert_successful_change_in_error_response(response[2], input_name=f"update3.{ok_zone_name}", record_type="CNAME", ttl=300,
record_data="test.com.")
# valid changes - reverse zone
assert_successful_change_in_error_response(response[3], input_name=f"200.{ip4_zone_name}",
record_type="CNAME", change_type="DeleteRecordSet")
assert_successful_change_in_error_response(response[4], input_name=f"201.{ip4_zone_name}",
record_type="CNAME", change_type="DeleteRecordSet")
assert_successful_change_in_error_response(response[5], input_name=f"201.{ip4_zone_name}",
record_type="CNAME", ttl=300, record_data="test.com.")
# ttl, domain name, data
assert_failed_change_in_error_response(response[6], input_name="$invalid.host.name.", record_type="CNAME",
change_type="DeleteRecordSet",
error_messages=['Invalid domain name: "$invalid.host.name.", valid domain names must be letters, numbers, '
'underscores, and hyphens, joined by dots, and terminated with a dot.'])
assert_failed_change_in_error_response(response[7], input_name="$another.invalid.host.name.",
record_type="CNAME", change_type="DeleteRecordSet",
error_messages=['Invalid domain name: "$another.invalid.host.name.", valid domain names must be letters, numbers, '
'underscores, and hyphens, joined by dots, and terminated with a dot.'])
assert_failed_change_in_error_response(response[8], input_name="$another.invalid.host.name.", ttl=20,
record_type="CNAME", record_data="$another.invalid.cname.",
error_messages=['Invalid TTL: "20", must be a number between 30 and 2147483647.',
'Invalid domain name: "$another.invalid.host.name.", valid domain names must be letters, numbers, '
'underscores, and hyphens, joined by dots, and terminated with a dot.',
'Invalid domain name: "$another.invalid.cname.", valid domain names must be letters, numbers, '
'underscores, and hyphens, joined by dots, and terminated with a dot.'])
# zone discovery failure
assert_failed_change_in_error_response(response[9], input_name="zone.discovery.error.", record_type="CNAME",
change_type="DeleteRecordSet",
error_messages=[
'Zone Discovery Failed: zone for "zone.discovery.error." does not exist in VinylDNS. If zone exists, then it must be connected to in VinylDNS.'])
# context validation failures: record does not exist, not authorized
assert_failed_change_in_error_response(response[10], input_name=f"non-existent-delete.{ok_zone_name}", record_type="CNAME",
change_type="DeleteRecordSet",
error_messages=[
f'Record "non-existent-delete.{ok_zone_name}" Does Not Exist: cannot delete a record that does not exist.'])
assert_failed_change_in_error_response(response[11], input_name=f"non-existent-update.{ok_zone_name}", record_type="CNAME",
change_type="DeleteRecordSet",
error_messages=[
f'Record "non-existent-update.{ok_zone_name}" Does Not Exist: cannot delete a record that does not exist.'])
assert_successful_change_in_error_response(response[12], input_name=f"non-existent-update.{ok_zone_name}",
record_type="CNAME", record_data="test.com.")
assert_failed_change_in_error_response(response[13], input_name=f"delete-unauthorized3.{dummy_zone_name}",
record_type="CNAME", change_type="DeleteRecordSet",
error_messages=[f'User "ok" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes.'])
assert_failed_change_in_error_response(response[14], input_name=f"update-unauthorized3.{dummy_zone_name}",
record_type="CNAME", change_type="DeleteRecordSet",
error_messages=[f'User "ok" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes.'])
assert_failed_change_in_error_response(response[15], input_name=f"update-unauthorized3.{dummy_zone_name}",
record_type="CNAME", ttl=300, record_data="test.com.",
error_messages=[f'User "ok" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes.'])
assert_successful_change_in_error_response(response[16], input_name=f"existing-cname2.{parent_zone_name}",
record_type="CNAME", change_type="DeleteRecordSet")
assert_failed_change_in_error_response(response[17], input_name=f"existing-cname2.{parent_zone_name}",
record_type="CNAME", record_data="test.com.",
error_messages=[f"Record Name \"existing-cname2.{parent_zone_name}\" Not Unique In Batch Change: "
f"cannot have multiple \"CNAME\" records with the same name."])
assert_failed_change_in_error_response(response[18], input_name=f"existing-cname2.{parent_zone_name}",
record_type="CNAME", record_data="test2.com.",
error_messages=[f"Record Name \"existing-cname2.{parent_zone_name}\" Not Unique In Batch Change: "
f"cannot have multiple \"CNAME\" records with the same name."])
finally:
# Clean up updates
dummy_deletes = [rs for rs in to_delete if rs["zone"]["id"] == dummy_zone["id"]]
ok_deletes = [rs for rs in to_delete if rs["zone"]["id"] != dummy_zone["id"]]
clear_recordset_list(dummy_deletes, dummy_client)
clear_recordset_list(ok_deletes, ok_client)
@pytest.mark.serial
def test_ptr_recordtype_auth_checks(shared_zone_test_context):
"""
Test all authorization validations performed on PTR records submitted in batch changes
"""
dummy_client = shared_zone_test_context.dummy_vinyldns_client
ok_client = shared_zone_test_context.ok_vinyldns_client
ip4_prefix = shared_zone_test_context.ip4_classless_prefix
ip6_prefix = shared_zone_test_context.ip6_prefix
ok_group_name = shared_zone_test_context.ok_group["name"]
no_auth_ipv4 = create_recordset(shared_zone_test_context.classless_base_zone, "25", "PTR",
[{"ptrdname": "ptrdname.data."}], 200)
no_auth_ipv6 = create_recordset(shared_zone_test_context.ip6_16_nibble_zone, "4.3.2.1.0.0.0.0.0.0.0.0.0.0.0.0",
"PTR", [{"ptrdname": "ptrdname.data."}], 200)
batch_change_input = {
"changes": [
get_change_PTR_json(f"{ip4_prefix}.5", ptrdname="not.authorized.ipv4.ptr.base."),
get_change_PTR_json(f"{ip4_prefix}.193", ptrdname="not.authorized.ipv4.ptr.classless.delegation."),
get_change_PTR_json(f"{ip6_prefix}:1000::1234", ptrdname="not.authorized.ipv6.ptr."),
get_change_PTR_json(f"{ip4_prefix}.25", change_type="DeleteRecordSet"),
get_change_PTR_json(f"{ip6_prefix}:1000::1234", change_type="DeleteRecordSet")
]
}
to_create = [no_auth_ipv4, no_auth_ipv6]
to_delete = []
try:
for create_json in to_create:
create_result = ok_client.create_recordset(create_json, status=202)
to_delete.append(ok_client.wait_until_recordset_change_status(create_result, "Complete"))
errors = dummy_client.create_batch_change(batch_change_input, status=400)
assert_failed_change_in_error_response(errors[0], input_name=f"{ip4_prefix}.5", record_type="PTR",
record_data="not.authorized.ipv4.ptr.base.",
error_messages=[f"User \"dummy\" is not authorized. Contact zone owner group: {ok_group_name} at test@test.com to make DNS changes."])
assert_failed_change_in_error_response(errors[1], input_name=f"{ip4_prefix}.193", record_type="PTR",
record_data="not.authorized.ipv4.ptr.classless.delegation.",
error_messages=[f"User \"dummy\" is not authorized. Contact zone owner group: {ok_group_name} at test@test.com to make DNS changes."])
assert_failed_change_in_error_response(errors[2], input_name=f"{ip6_prefix}:1000::1234", record_type="PTR",
record_data="not.authorized.ipv6.ptr.",
error_messages=[f"User \"dummy\" is not authorized. Contact zone owner group: {ok_group_name} at test@test.com to make DNS changes."])
assert_failed_change_in_error_response(errors[3], input_name=f"{ip4_prefix}.25", record_type="PTR", record_data=None,
change_type="DeleteRecordSet",
error_messages=[f"User \"dummy\" is not authorized. Contact zone owner group: {ok_group_name} at test@test.com to make DNS changes."])
assert_failed_change_in_error_response(errors[4], input_name=f"{ip6_prefix}:1000::1234", record_type="PTR",
record_data=None, change_type="DeleteRecordSet",
error_messages=[f"User \"dummy\" is not authorized. Contact zone owner group: {ok_group_name} at test@test.com to make DNS changes."])
finally:
clear_recordset_list(to_delete, ok_client)
@pytest.mark.serial
def test_ipv4_ptr_recordtype_add_checks(shared_zone_test_context):
"""
Perform all add, non-authorization validations performed on IPv4 PTR records submitted in batch changes
"""
client = shared_zone_test_context.ok_vinyldns_client
ip4_prefix = shared_zone_test_context.ip4_classless_prefix
ip4_zone_name = shared_zone_test_context.classless_base_zone["name"]
existing_ipv4 = create_recordset(shared_zone_test_context.classless_zone_delegation_zone, "193", "PTR", [{"ptrdname": "ptrdname.data."}])
existing_cname = create_recordset(shared_zone_test_context.classless_base_zone, "199", "CNAME", [{"cname": "cname.data. "}], 300)
batch_change_input = {
"changes": [
# valid change
get_change_PTR_json(f"{ip4_prefix}.44", ptrdname="base.vinyldns"),
get_change_PTR_json(f"{ip4_prefix}.198", ptrdname="delegated.vinyldns"),
# input validation failures
get_change_PTR_json("invalidip.111."),
get_change_PTR_json("4.5.6.7", ttl=29, ptrdname="-1.2.3.4"),
# delegated and non-delegated PTR duplicate name checks
get_change_PTR_json(f"{ip4_prefix}.196"), # delegated zone
get_change_CNAME_json(f"196.{ip4_zone_name}"), # non-delegated zone
get_change_CNAME_json(f"196.192/30.{ip4_zone_name}"), # delegated zone
get_change_PTR_json(f"{ip4_prefix}.55"), # non-delegated zone
get_change_CNAME_json(f"55.{ip4_zone_name}"), # non-delegated zone
get_change_CNAME_json(f"55.192/30.{ip4_zone_name}"), # delegated zone
# zone discovery failure
get_change_PTR_json(f"192.1.1.100"),
# context validation failures
get_change_PTR_json(f"{ip4_prefix}.193", ptrdname="existing-ptr."),
get_change_PTR_json(f"{ip4_prefix}.199", ptrdname="existing-cname.")
]
}
to_create = [existing_ipv4, existing_cname]
to_delete = []
try:
# make sure 196 is cleared before continuing
delete_recordset_by_name(shared_zone_test_context.classless_zone_delegation_zone["id"], "196", client)
for create_json in to_create:
create_result = client.create_recordset(create_json, status=202)
to_delete.append(client.wait_until_recordset_change_status(create_result, "Complete"))
response = client.create_batch_change(batch_change_input, status=400)
# successful changes
assert_successful_change_in_error_response(response[0], input_name=f"{ip4_prefix}.44", record_type="PTR", record_data="base.vinyldns.")
assert_successful_change_in_error_response(response[1], input_name=f"{ip4_prefix}.198", record_type="PTR", record_data="delegated.vinyldns.")
# input validation failures: invalid ip, ttl, data
assert_failed_change_in_error_response(response[2], input_name="invalidip.111.", record_type="PTR", record_data="test.com.",
error_messages=['Invalid IP address: "invalidip.111.".'])
assert_failed_change_in_error_response(response[3], input_name="4.5.6.7", ttl=29, record_type="PTR", record_data="-1.2.3.4.",
error_messages=['Invalid TTL: "29", must be a number between 30 and 2147483647.',
'Invalid domain name: "-1.2.3.4.", '
"valid domain names must be letters, numbers, underscores, and hyphens, joined by dots, and terminated with a dot."])
# delegated and non-delegated PTR duplicate name checks
assert_successful_change_in_error_response(response[4], input_name=f"{ip4_prefix}.196", record_type="PTR", record_data="test.com.")
assert_successful_change_in_error_response(response[5], input_name=f"196.{ip4_zone_name}", record_type="CNAME", record_data="test.com.")
assert_failed_change_in_error_response(response[6], input_name=f"196.192/30.{ip4_zone_name}", record_type="CNAME", record_data="test.com.",
error_messages=[f'Record Name "196.192/30.{ip4_zone_name}" Not Unique In Batch Change: cannot have multiple "CNAME" records with the same name.'])
assert_successful_change_in_error_response(response[7], input_name=f"{ip4_prefix}.55", record_type="PTR", record_data="test.com.")
assert_failed_change_in_error_response(response[8], input_name=f"55.{ip4_zone_name}", record_type="CNAME", record_data="test.com.",
error_messages=[f'Record Name "55.{ip4_zone_name}" Not Unique In Batch Change: cannot have multiple "CNAME" records with the same name.'])
assert_successful_change_in_error_response(response[9], input_name=f"55.192/30.{ip4_zone_name}", record_type="CNAME", record_data="test.com.")
# zone discovery failure
assert_failed_change_in_error_response(response[10], input_name="192.1.1.100", record_type="PTR", record_data="test.com.",
error_messages=['Zone Discovery Failed: zone for "192.1.1.100" does not exist in VinylDNS. '
'If zone exists, then it must be connected to in VinylDNS.'])
# context validations: existing cname recordset
assert_failed_change_in_error_response(response[11], input_name=f"{ip4_prefix}.193", record_type="PTR", record_data="existing-ptr.",
error_messages=[f'Record "{ip4_prefix}.193" Already Exists: cannot add an existing record; to update it, issue a DeleteRecordSet then an Add.'])
assert_failed_change_in_error_response(response[12], input_name=f"{ip4_prefix}.199", record_type="PTR", record_data="existing-cname.",
error_messages=[
f'CNAME Conflict: CNAME record names must be unique. Existing record with name "{ip4_prefix}.199" and type "CNAME" conflicts with this record.'])
finally:
clear_recordset_list(to_delete, client)
# TODO: Commenting this out as it deletes a zone that is used by other tests and recreates it which is messed up
# @pytest.mark.serial
# def test_ipv4_ptr_record_when_zone_discovery_only_finds_mismatched_delegated_zone_fails(shared_zone_test_context):
# """
# Test IPv4 PTR record discovery for only delegated zones that do not match the record name fails
# """
# # TODO: This is really strange, deleting a classless base zone and then re-creating it?
# ok_client = shared_zone_test_context.ok_vinyldns_client
# classless_base_zone = shared_zone_test_context.classless_base_zone
#
# batch_change_input = {
# "changes": [
# get_change_PTR_json(f"{ip4_prefix}.1"),
# # dummy change with too big TTL so ZD failure wont go to pending if enabled
# get_change_A_AAAA_json("this.change.will.fail.", ttl=99999999999, address="1.1.1.1")
# ]
# }
#
# try:
# # delete classless base zone (2.0.192.in-addr.arpa); only remaining zone is delegated zone (192/30.2.0.192.in-addr.arpa)
# ok_client.delete_zone(classless_base_zone["id"], status=202)
# ok_client.wait_until_zone_deleted(classless_base_zone["id"])
# response = ok_client.create_batch_change(batch_change_input, status=400)
# assert_failed_change_in_error_response(response[0], input_name=f"{ip4_prefix}.1", record_type="PTR",
# record_data="test.com.",
# error_messages=[
# f'Zone Discovery Failed: zone for "{ip4_prefix}.1" does not exist in VinylDNS. If zone exists, then it must be connected to in VinylDNS.'])
#
# finally:
# # re-create classless base zone and update zone info in shared_zone_test_context for use in future tests
# zone_create_change = ok_client.create_zone(shared_zone_test_context.classless_base_zone_json, status=202)
# shared_zone_test_context.classless_base_zone = zone_create_change["zone"]
# ok_client.wait_until_zone_active(zone_create_change[u"zone"][u"id"])
@pytest.mark.serial
def test_ipv4_ptr_recordtype_update_delete_checks(shared_zone_test_context):
"""
Test all update and delete validations performed on ipv4 PTR records submitted in batch changes
"""
ok_client = shared_zone_test_context.ok_vinyldns_client
base_zone = shared_zone_test_context.classless_base_zone
delegated_zone = shared_zone_test_context.classless_zone_delegation_zone
ip4_prefix = shared_zone_test_context.ip4_classless_prefix
ip4_zone_name = shared_zone_test_context.classless_base_zone["name"]
rs_delete_ipv4 = create_recordset(base_zone, "25", "PTR", [{"ptrdname": "delete.ptr."}], 200)
rs_update_ipv4 = create_recordset(delegated_zone, "193", "PTR", [{"ptrdname": "update.ptr."}], 200)
rs_replace_cname = create_recordset(base_zone, "21", "CNAME", [{"cname": "replace.cname."}], 200)
rs_replace_ptr = create_recordset(base_zone, "17", "PTR", [{"ptrdname": "replace.ptr."}], 200)
rs_update_ipv4_fail = create_recordset(base_zone, "9", "PTR", [{"ptrdname": "failed-update.ptr."}], 200)
batch_change_input = {
"comments": "this is optional",
"changes": [
# valid changes ipv4
get_change_PTR_json(f"{ip4_prefix}.25", change_type="DeleteRecordSet"),
get_change_PTR_json(f"{ip4_prefix}.193", ttl=300, ptrdname="has-updated.ptr."),
get_change_PTR_json(f"{ip4_prefix}.193", change_type="DeleteRecordSet"),
# valid changes: delete and add of same record name but different type
get_change_CNAME_json(f"21.{ip4_zone_name}", change_type="DeleteRecordSet"),
get_change_PTR_json(f"{ip4_prefix}.21", ptrdname="replace-cname.ptr."),
get_change_CNAME_json(f"17.{ip4_zone_name}", cname="replace-ptr.cname."),
get_change_PTR_json(f"{ip4_prefix}.17", change_type="DeleteRecordSet"),
# input validations failures
get_change_PTR_json("1.1.1", change_type="DeleteRecordSet"),
get_change_PTR_json("192.0.2.", change_type="DeleteRecordSet"),
get_change_PTR_json("192.0.2.", ttl=29, ptrdname="failed-update$.ptr"),
# zone discovery failure
get_change_PTR_json("192.1.1.25", change_type="DeleteRecordSet"),
# context validation failures
get_change_PTR_json(f"{ip4_prefix}.199", change_type="DeleteRecordSet"),
get_change_PTR_json(f"{ip4_prefix}.200", ttl=300, ptrdname="has-updated.ptr."),
get_change_PTR_json(f"{ip4_prefix}.200", change_type="DeleteRecordSet"),
]
}
to_create = [rs_delete_ipv4, rs_update_ipv4, rs_replace_cname, rs_replace_ptr, rs_update_ipv4_fail]
to_delete = []
try:
for rs in to_create:
create_rs = ok_client.create_recordset(rs, status=202)
to_delete.append(ok_client.wait_until_recordset_change_status(create_rs, "Complete"))
response = ok_client.create_batch_change(batch_change_input, status=400)
# successful changes
assert_successful_change_in_error_response(response[0], input_name=f"{ip4_prefix}.25", record_type="PTR",
record_data=None, change_type="DeleteRecordSet")
assert_successful_change_in_error_response(response[1], ttl=300, input_name=f"{ip4_prefix}.193", record_type="PTR",
record_data="has-updated.ptr.")
assert_successful_change_in_error_response(response[2], input_name=f"{ip4_prefix}.193", record_type="PTR",
record_data=None, change_type="DeleteRecordSet")
# successful changes: add and delete of same record name but different type
assert_successful_change_in_error_response(response[3], input_name=f"21.{ip4_zone_name}",
record_type="CNAME", record_data=None, change_type="DeleteRecordSet")
assert_successful_change_in_error_response(response[4], input_name=f"{ip4_prefix}.21", record_type="PTR",
record_data="replace-cname.ptr.")
assert_successful_change_in_error_response(response[5], input_name=f"17.{ip4_zone_name}",
record_type="CNAME", record_data="replace-ptr.cname.")
assert_successful_change_in_error_response(response[6], input_name=f"{ip4_prefix}.17", record_type="PTR",
record_data=None, change_type="DeleteRecordSet")
# input validations failures: invalid IP, ttl, and record data
assert_failed_change_in_error_response(response[7], input_name="1.1.1", record_type="PTR", record_data=None,
change_type="DeleteRecordSet",
error_messages=['Invalid IP address: "1.1.1".'])
assert_failed_change_in_error_response(response[8], input_name="192.0.2.", record_type="PTR", record_data=None,
change_type="DeleteRecordSet",
error_messages=['Invalid IP address: "192.0.2.".'])
assert_failed_change_in_error_response(response[9], ttl=29, input_name="192.0.2.", record_type="PTR",
record_data="failed-update$.ptr.",
error_messages=['Invalid TTL: "29", must be a number between 30 and 2147483647.',
'Invalid IP address: "192.0.2.".',
'Invalid domain name: "failed-update$.ptr.", valid domain names must be letters, numbers, underscores, and hyphens, '
'joined by dots, and terminated with a dot.'])
# zone discovery failure
assert_failed_change_in_error_response(response[10], input_name="192.1.1.25", record_type="PTR",
record_data=None, change_type="DeleteRecordSet",
error_messages=["Zone Discovery Failed: zone for \"192.1.1.25\" does not exist in VinylDNS. If zone exists, "
"then it must be connected to in VinylDNS."])
# context validation failures: record does not exist
assert_failed_change_in_error_response(response[11], input_name=f"{ip4_prefix}.199", record_type="PTR",
record_data=None, change_type="DeleteRecordSet",
error_messages=[f"Record \"{ip4_prefix}.199\" Does Not Exist: cannot delete a record that does not exist."])
assert_successful_change_in_error_response(response[12], ttl=300, input_name=f"{ip4_prefix}.200", record_type="PTR", record_data="has-updated.ptr.")
assert_failed_change_in_error_response(response[13], input_name=f"{ip4_prefix}.200", record_type="PTR",
record_data=None, change_type="DeleteRecordSet",
error_messages=[f"Record \"{ip4_prefix}.200\" Does Not Exist: cannot delete a record that does not exist."])
finally:
clear_recordset_list(to_delete, ok_client)
def test_ipv6_ptr_recordtype_add_checks(shared_zone_test_context):
"""
Test all add, non-authorization validations performed on IPv6 PTR records submitted in batch changes
"""
client = shared_zone_test_context.ok_vinyldns_client
ip6_prefix = shared_zone_test_context.ip6_prefix
existing_ptr = create_recordset(shared_zone_test_context.ip6_16_nibble_zone, "b.b.b.b.0.0.0.0.0.0.0.0.0.0.0.0",
"PTR", [{"ptrdname": "test.com."}], 100)
batch_change_input = {
"changes": [
# valid change
get_change_PTR_json(f"{ip6_prefix}:1000::1234"),
# input validation failures
get_change_PTR_json(f"{ip6_prefix}:1000::abe", ttl=29),
get_change_PTR_json(f"{ip6_prefix}:1000::bae", ptrdname="$malformed.hostname."),
get_change_PTR_json("fd69:27cc:fe91de::ab", ptrdname="malformed.ip.address."),
# zone discovery failure
get_change_PTR_json("fedc:ba98:7654::abc", ptrdname="zone.discovery.error."),
# context validation failures
get_change_PTR_json(f"{ip6_prefix}:1000::bbbb", ptrdname="existing.ptr.")
]
}
to_create = [existing_ptr]
to_delete = []
try:
for create_json in to_create:
create_result = client.create_recordset(create_json, status=202)
to_delete.append(client.wait_until_recordset_change_status(create_result, "Complete"))
response = client.create_batch_change(batch_change_input, status=400)
# successful changes
assert_successful_change_in_error_response(response[0], input_name=f"{ip6_prefix}:1000::1234",
record_type="PTR", record_data="test.com.")
# independent validations: bad TTL, malformed host name/IP address, duplicate record
assert_failed_change_in_error_response(response[1], input_name=f"{ip6_prefix}:1000::abe", ttl=29,
record_type="PTR", record_data="test.com.",
error_messages=['Invalid TTL: "29", must be a number between 30 and 2147483647.'])
assert_failed_change_in_error_response(response[2], input_name=f"{ip6_prefix}:1000::bae", record_type="PTR",
record_data="$malformed.hostname.",
error_messages=['Invalid domain name: "$malformed.hostname.", valid domain names must be letters, numbers, '
'underscores, and hyphens, joined by dots, and terminated with a dot.'])
assert_failed_change_in_error_response(response[3], input_name="fd69:27cc:fe91de::ab", record_type="PTR",
record_data="malformed.ip.address.",
error_messages=['Invalid IP address: "fd69:27cc:fe91de::ab".'])
# zone discovery failure
assert_failed_change_in_error_response(response[4], input_name="fedc:ba98:7654::abc", record_type="PTR",
record_data="zone.discovery.error.",
error_messages=["Zone Discovery Failed: zone for \"fedc:ba98:7654::abc\" does not exist in VinylDNS. "
"If zone exists, then it must be connected to in VinylDNS."])
# context validations: existing record sets pre-request
assert_failed_change_in_error_response(response[5], input_name=f"{ip6_prefix}:1000::bbbb", record_type="PTR",
record_data="existing.ptr.",
error_messages=[f"Record \"{ip6_prefix}:1000::bbbb\" Already Exists: cannot add an existing record; "
"to update it, issue a DeleteRecordSet then an Add."])
finally:
clear_recordset_list(to_delete, client)
def test_ipv6_ptr_recordtype_update_delete_checks(shared_zone_test_context):
"""
Test all update and delete validations performed on ipv6 PTR records submitted in batch changes
"""
ok_client = shared_zone_test_context.ok_vinyldns_client
ip6_reverse_zone = shared_zone_test_context.ip6_16_nibble_zone
ip6_prefix = shared_zone_test_context.ip6_prefix
rs_delete_ipv6 = create_recordset(ip6_reverse_zone, "a.a.a.a.0.0.0.0.0.0.0.0.0.0.0.0", "PTR",
[{"ptrdname": "delete.ptr."}], 200)
rs_update_ipv6 = create_recordset(ip6_reverse_zone, "2.6.0.0.0.0.0.0.0.0.0.0.0.0.0.0", "PTR",
[{"ptrdname": "update.ptr."}], 200)
rs_update_ipv6_fail = create_recordset(ip6_reverse_zone, "8.1.0.0.0.0.0.0.0.0.0.0.0.0.0.0", "PTR",
[{"ptrdname": "failed-update.ptr."}], 200)
batch_change_input = {
"comments": "this is optional",
"changes": [
# valid changes ipv6
get_change_PTR_json(f"{ip6_prefix}:1000::aaaa", change_type="DeleteRecordSet"),
get_change_PTR_json(f"{ip6_prefix}:1000::62", ttl=300, ptrdname="has-updated.ptr."),
get_change_PTR_json(f"{ip6_prefix}:1000::62", change_type="DeleteRecordSet"),
# input validations failures
get_change_PTR_json("fd69:27cc:fe91de::ab", change_type="DeleteRecordSet"),
get_change_PTR_json("fd69:27cc:fe91de::ba", change_type="DeleteRecordSet"),
get_change_PTR_json("fd69:27cc:fe91de::ba", ttl=29, ptrdname="failed-update$.ptr"),
# zone discovery failures
get_change_PTR_json("fedc:ba98:7654::abc", change_type="DeleteRecordSet"),
# context validation failures
get_change_PTR_json(f"{ip6_prefix}:1000::60", change_type="DeleteRecordSet"),
get_change_PTR_json(f"{ip6_prefix}:1000::65", ttl=300, ptrdname="has-updated.ptr."),
get_change_PTR_json(f"{ip6_prefix}:1000::65", change_type="DeleteRecordSet")
]
}
to_create = [rs_delete_ipv6, rs_update_ipv6, rs_update_ipv6_fail]
to_delete = []
try:
for rs in to_create:
create_rs = ok_client.create_recordset(rs, status=202)
to_delete.append(ok_client.wait_until_recordset_change_status(create_rs, "Complete"))
response = ok_client.create_batch_change(batch_change_input, status=400)
# successful changes
assert_successful_change_in_error_response(response[0], input_name=f"{ip6_prefix}:1000::aaaa",
record_type="PTR", record_data=None, change_type="DeleteRecordSet")
assert_successful_change_in_error_response(response[1], ttl=300, input_name=f"{ip6_prefix}:1000::62",
record_type="PTR", record_data="has-updated.ptr.")
assert_successful_change_in_error_response(response[2], input_name=f"{ip6_prefix}:1000::62", record_type="PTR",
record_data=None, change_type="DeleteRecordSet")
# input validations failures: invalid IP, ttl, and record data
assert_failed_change_in_error_response(response[3], input_name="fd69:27cc:fe91de::ab", record_type="PTR",
record_data=None, change_type="DeleteRecordSet",
error_messages=['Invalid IP address: "fd69:27cc:fe91de::ab".'])
assert_failed_change_in_error_response(response[4], input_name="fd69:27cc:fe91de::ba", record_type="PTR",
record_data=None, change_type="DeleteRecordSet",
error_messages=['Invalid IP address: "fd69:27cc:fe91de::ba".'])
assert_failed_change_in_error_response(response[5], ttl=29, input_name="fd69:27cc:fe91de::ba",
record_type="PTR", record_data="failed-update$.ptr.",
error_messages=['Invalid TTL: "29", must be a number between 30 and 2147483647.',
'Invalid IP address: "fd69:27cc:fe91de::ba".',
'Invalid domain name: "failed-update$.ptr.", valid domain names must be letters, numbers, underscores, '
'and hyphens, joined by dots, and terminated with a dot.'])
# zone discovery failure
assert_failed_change_in_error_response(response[6], input_name="fedc:ba98:7654::abc", record_type="PTR",
record_data=None, change_type="DeleteRecordSet",
error_messages=["Zone Discovery Failed: zone for \"fedc:ba98:7654::abc\" does not exist in VinylDNS. "
"If zone exists, then it must be connected to in VinylDNS."])
# context validation failures: record does not exist, failure on update with double add
assert_failed_change_in_error_response(response[7], input_name=f"{ip6_prefix}:1000::60", record_type="PTR",
record_data=None, change_type="DeleteRecordSet",
error_messages=[f"Record \"{ip6_prefix}:1000::60\" Does Not Exist: cannot delete a record that does not exist."])
assert_successful_change_in_error_response(response[8], ttl=300, input_name=f"{ip6_prefix}:1000::65",
record_type="PTR", record_data="has-updated.ptr.")
assert_failed_change_in_error_response(response[9], input_name=f"{ip6_prefix}:1000::65", record_type="PTR",
record_data=None, change_type="DeleteRecordSet",
error_messages=[f"Record \"{ip6_prefix}:1000::65\" Does Not Exist: cannot delete a record that does not exist."])
finally:
clear_recordset_list(to_delete, ok_client)
def test_txt_recordtype_add_checks(shared_zone_test_context):
"""
Test all add validations performed on TXT records submitted in batch changes
"""
client = shared_zone_test_context.ok_vinyldns_client
dummy_zone_name = shared_zone_test_context.dummy_zone["name"]
dummy_group_name = shared_zone_test_context.dummy_group["name"]
ok_zone_name = shared_zone_test_context.ok_zone["name"]
existing_txt_name = generate_record_name()
existing_txt_fqdn = existing_txt_name + f".{ok_zone_name}"
existing_txt = create_recordset(shared_zone_test_context.ok_zone, existing_txt_name, "TXT", [{"text": "test"}], 100)
existing_cname_name = generate_record_name()
existing_cname_fqdn = existing_cname_name + f".{ok_zone_name}"
existing_cname = create_recordset(shared_zone_test_context.ok_zone, existing_cname_name, "CNAME",
[{"cname": "test."}], 100)
good_record_fqdn = generate_record_name(ok_zone_name)
batch_change_input = {
"changes": [
# valid change
get_change_TXT_json(good_record_fqdn),
# input validation failures
get_change_TXT_json(f"bad-ttl-and-invalid-name$.{ok_zone_name}", ttl=29),
# zone discovery failures
get_change_TXT_json("no.zone.at.all."),
# context validation failures
get_change_CNAME_json(f"cname-duplicate.{ok_zone_name}"),
get_change_TXT_json(f"cname-duplicate.{ok_zone_name}"),
get_change_TXT_json(existing_txt_fqdn),
get_change_TXT_json(existing_cname_fqdn),
get_change_TXT_json(f"user-add-unauthorized.{dummy_zone_name}")
]
}
to_create = [existing_txt, existing_cname]
to_delete = []
try:
for create_json in to_create:
create_result = client.create_recordset(create_json, status=202)
to_delete.append(client.wait_until_recordset_change_status(create_result, "Complete"))
response = client.create_batch_change(batch_change_input, status=400)
# successful changes
assert_successful_change_in_error_response(response[0], input_name=good_record_fqdn, record_type="TXT",
record_data="test")
# ttl, domain name, record data
assert_failed_change_in_error_response(response[1], input_name=f"bad-ttl-and-invalid-name$.{ok_zone_name}", ttl=29,
record_type="TXT", record_data="test",
error_messages=[
'Invalid TTL: "29", must be a number between 30 and 2147483647.',
f'Invalid domain name: "bad-ttl-and-invalid-name$.{ok_zone_name}", '
"valid domain names must be letters, numbers, underscores, and hyphens, joined by dots, and terminated with a dot."])
# zone discovery failure
assert_failed_change_in_error_response(response[2], input_name="no.zone.at.all.", record_type="TXT",
record_data="test",
error_messages=['Zone Discovery Failed: zone for "no.zone.at.all." does not exist in VinylDNS. '
'If zone exists, then it must be connected to in VinylDNS.'])
# context validations: cname duplicate
assert_failed_change_in_error_response(response[3], input_name=f"cname-duplicate.{ok_zone_name}", record_type="CNAME",
record_data="test.com.",
error_messages=[f"Record Name \"cname-duplicate.{ok_zone_name}\" Not Unique In Batch Change: "
f"cannot have multiple \"CNAME\" records with the same name."])
# context validations: conflicting recordsets, unauthorized error
assert_failed_change_in_error_response(response[5], input_name=existing_txt_fqdn, record_type="TXT",
record_data="test",
error_messages=[f"Record \"{existing_txt_fqdn}\" Already Exists: "
f"cannot add an existing record; to update it, issue a DeleteRecordSet then an Add."])
assert_failed_change_in_error_response(response[6], input_name=existing_cname_fqdn, record_type="TXT",
record_data="test",
error_messages=[f"CNAME Conflict: CNAME record names must be unique. "
f"Existing record with name \"{existing_cname_fqdn}\" and type \"CNAME\" conflicts with this record."])
assert_failed_change_in_error_response(response[7], input_name=f"user-add-unauthorized.{dummy_zone_name}",
record_type="TXT", record_data="test",
error_messages=[f"User \"ok\" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes."])
finally:
clear_recordset_list(to_delete, client)
def test_txt_recordtype_update_delete_checks(shared_zone_test_context):
"""
Test all update and delete validations performed on TXT records submitted in batch changes
"""
ok_client = shared_zone_test_context.ok_vinyldns_client
dummy_client = shared_zone_test_context.dummy_vinyldns_client
ok_zone = shared_zone_test_context.ok_zone
dummy_zone = shared_zone_test_context.dummy_zone
ok_zone_name = shared_zone_test_context.ok_zone["name"]
dummy_zone_name = shared_zone_test_context.dummy_zone["name"]
dummy_group_name = shared_zone_test_context.dummy_group["name"]
rs_delete_name = generate_record_name()
rs_delete_fqdn = rs_delete_name + f".{ok_zone_name}"
rs_delete_ok = create_recordset(ok_zone, rs_delete_name, "TXT", [{"text": "test"}], 200)
rs_update_name = generate_record_name()
rs_update_fqdn = rs_update_name + f".{ok_zone_name}"
rs_update_ok = create_recordset(ok_zone, rs_update_name, "TXT", [{"text": "test"}], 200)
rs_delete_dummy_name = generate_record_name()
rs_delete_dummy_fqdn = rs_delete_dummy_name + f".{dummy_zone_name}"
rs_delete_dummy = create_recordset(dummy_zone, rs_delete_dummy_name, "TXT", [{"text": "test"}], 200)
rs_update_dummy_name = generate_record_name()
rs_update_dummy_fqdn = rs_update_dummy_name + f".{dummy_zone_name}"
rs_update_dummy = create_recordset(dummy_zone, rs_update_dummy_name, "TXT", [{"text": "test"}], 200)
batch_change_input = {
"comments": "this is optional",
"changes": [
# valid changes
get_change_TXT_json(rs_delete_fqdn, change_type="DeleteRecordSet"),
get_change_TXT_json(rs_update_fqdn, change_type="DeleteRecordSet"),
get_change_TXT_json(rs_update_fqdn, ttl=300),
# input validations failures
get_change_TXT_json(f"invalid-name$.{ok_zone_name}", change_type="DeleteRecordSet"),
get_change_TXT_json(f"invalid-ttl.{ok_zone_name}", ttl=29, text="bad-ttl"),
# zone discovery failure
get_change_TXT_json("no.zone.at.all.", change_type="DeleteRecordSet"),
# context validation failures
get_change_TXT_json(f"delete-nonexistent.{ok_zone_name}", change_type="DeleteRecordSet"),
get_change_TXT_json(f"update-nonexistent.{ok_zone_name}", change_type="DeleteRecordSet"),
get_change_TXT_json(f"update-nonexistent.{ok_zone_name}", text="test"),
get_change_TXT_json(rs_delete_dummy_fqdn, change_type="DeleteRecordSet"),
get_change_TXT_json(rs_update_dummy_fqdn, text="test"),
get_change_TXT_json(rs_update_dummy_fqdn, change_type="DeleteRecordSet")
]
}
to_create = [rs_delete_ok, rs_update_ok, rs_delete_dummy, rs_update_dummy]
to_delete = []
try:
for rs in to_create:
if rs["zoneId"] == dummy_zone["id"]:
create_client = dummy_client
else:
create_client = ok_client
create_rs = create_client.create_recordset(rs, status=202)
to_delete.append(create_client.wait_until_recordset_change_status(create_rs, "Complete"))
# Confirm that record set doesn't already exist
ok_client.get_recordset(ok_zone["id"], "delete-nonexistent", status=404)
response = ok_client.create_batch_change(batch_change_input, status=400)
# successful changes
assert_successful_change_in_error_response(response[0], input_name=rs_delete_fqdn, record_type="TXT", record_data=None, change_type="DeleteRecordSet")
assert_successful_change_in_error_response(response[1], input_name=rs_update_fqdn, record_type="TXT", record_data=None, change_type="DeleteRecordSet")
assert_successful_change_in_error_response(response[2], ttl=300, input_name=rs_update_fqdn, record_type="TXT", record_data="test")
# input validations failures: invalid input name, reverse zone error, invalid ttl
assert_failed_change_in_error_response(response[3], input_name=f"invalid-name$.{ok_zone_name}", record_type="TXT", record_data="test", change_type="DeleteRecordSet",
error_messages=[f'Invalid domain name: "invalid-name$.{ok_zone_name}", valid domain names must be '
f'letters, numbers, underscores, and hyphens, joined by dots, and terminated with a dot.'])
assert_failed_change_in_error_response(response[4], input_name=f"invalid-ttl.{ok_zone_name}", ttl=29, record_type="TXT", record_data="bad-ttl",
error_messages=['Invalid TTL: "29", must be a number between 30 and 2147483647.'])
# zone discovery failure
assert_failed_change_in_error_response(response[5], input_name="no.zone.at.all.", record_type="TXT", record_data=None, change_type="DeleteRecordSet",
error_messages=[
"Zone Discovery Failed: zone for \"no.zone.at.all.\" does not exist in VinylDNS. "
"If zone exists, then it must be connected to in VinylDNS."])
# context validation failures: record does not exist, not authorized
assert_failed_change_in_error_response(response[6], input_name=f"delete-nonexistent.{ok_zone_name}", record_type="TXT", record_data=None, change_type="DeleteRecordSet",
error_messages=[f"Record \"delete-nonexistent.{ok_zone_name}\" Does Not Exist: cannot delete a record that does not exist."])
assert_failed_change_in_error_response(response[7], input_name=f"update-nonexistent.{ok_zone_name}", record_type="TXT", record_data=None, change_type="DeleteRecordSet",
error_messages=[f"Record \"update-nonexistent.{ok_zone_name}\" Does Not Exist: cannot delete a record that does not exist."])
assert_successful_change_in_error_response(response[8], input_name=f"update-nonexistent.{ok_zone_name}", record_type="TXT", record_data="test")
assert_failed_change_in_error_response(response[9], input_name=rs_delete_dummy_fqdn, record_type="TXT", record_data=None, change_type="DeleteRecordSet",
error_messages=[f"User \"ok\" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes."])
assert_failed_change_in_error_response(response[10], input_name=rs_update_dummy_fqdn, record_type="TXT", record_data="test",
error_messages=[f"User \"ok\" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes."])
assert_failed_change_in_error_response(response[11], input_name=rs_update_dummy_fqdn, record_type="TXT", record_data=None, change_type="DeleteRecordSet",
error_messages=[f"User \"ok\" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes."])
finally:
# Clean up updates
dummy_deletes = [rs for rs in to_delete if rs["zone"]["id"] == dummy_zone["id"]]
ok_deletes = [rs for rs in to_delete if rs["zone"]["id"] != dummy_zone["id"]]
clear_recordset_list(dummy_deletes, dummy_client)
clear_recordset_list(ok_deletes, ok_client)
def test_mx_recordtype_add_checks(shared_zone_test_context):
"""
Test all add validations performed on MX records submitted in batch changes
"""
client = shared_zone_test_context.ok_vinyldns_client
ok_zone_name = shared_zone_test_context.ok_zone["name"]
dummy_zone_name = shared_zone_test_context.dummy_zone["name"]
dummy_group_name = shared_zone_test_context.dummy_group["name"]
ip4_zone_name = shared_zone_test_context.classless_base_zone["name"]
existing_mx_name = generate_record_name()
existing_mx_fqdn = f"{existing_mx_name}.{ok_zone_name}"
existing_mx = create_recordset(shared_zone_test_context.ok_zone, existing_mx_name, "MX", [{"preference": 1, "exchange": "foo.bar."}], 100)
existing_cname_name = generate_record_name()
existing_cname_fqdn = f"{existing_cname_name}.{ok_zone_name}"
existing_cname = create_recordset(shared_zone_test_context.ok_zone, existing_cname_name, "CNAME", [{"cname": "test."}], 100)
good_record_fqdn = generate_record_name(ok_zone_name)
batch_change_input = {
"changes": [
# valid change
get_change_MX_json(good_record_fqdn),
# input validation failures
get_change_MX_json(f"bad-ttl-and-invalid-name$.{ok_zone_name}", ttl=29),
get_change_MX_json(f"bad-exchange.{ok_zone_name}", exchange="foo$.bar."),
get_change_MX_json(f"mx.{ip4_zone_name}"),
# zone discovery failures
get_change_MX_json(f"no.subzone.{ok_zone_name}"),
get_change_MX_json("no.zone.at.all."),
# context validation failures
get_change_CNAME_json(f"cname-duplicate.{ok_zone_name}"),
get_change_MX_json(f"cname-duplicate.{ok_zone_name}"),
get_change_MX_json(existing_mx_fqdn),
get_change_MX_json(existing_cname_fqdn),
get_change_MX_json(f"user-add-unauthorized.{dummy_zone_name}")
]
}
to_create = [existing_mx, existing_cname]
to_delete = []
try:
for create_json in to_create:
create_result = client.create_recordset(create_json, status=202)
to_delete.append(client.wait_until_recordset_change_status(create_result, "Complete"))
response = client.create_batch_change(batch_change_input, status=400)
# successful changes
assert_successful_change_in_error_response(response[0], input_name=good_record_fqdn, record_type="MX", record_data={"preference": 1, "exchange": "foo.bar."})
# ttl, domain name, record data
assert_failed_change_in_error_response(response[1], input_name=f"bad-ttl-and-invalid-name$.{ok_zone_name}", ttl=29, record_type="MX",
record_data={"preference": 1, "exchange": "foo.bar."},
error_messages=['Invalid TTL: "29", must be a number between 30 and 2147483647.',
f'Invalid domain name: "bad-ttl-and-invalid-name$.{ok_zone_name}", '
"valid domain names must be letters, numbers, underscores, and hyphens, joined by dots, and terminated with a dot."])
assert_failed_change_in_error_response(response[2], input_name=f"bad-exchange.{ok_zone_name}", record_type="MX",
record_data={"preference": 1, "exchange": "foo$.bar."},
error_messages=['Invalid domain name: "foo$.bar.", valid domain names must be letters, numbers, underscores, and hyphens, '
'joined by dots, and terminated with a dot.'])
assert_failed_change_in_error_response(response[3], input_name=f"mx.{ip4_zone_name}", record_type="MX",
record_data={"preference": 1, "exchange": "foo.bar."},
error_messages=[f'Invalid Record Type In Reverse Zone: record with name "mx.{ip4_zone_name}" and type "MX" is not allowed in a reverse zone.'])
# zone discovery failures
assert_failed_change_in_error_response(response[4], input_name=f"no.subzone.{ok_zone_name}", record_type="MX",
record_data={"preference": 1, "exchange": "foo.bar."},
error_messages=[f'Zone Discovery Failed: zone for "no.subzone.{ok_zone_name}" does not exist in VinylDNS. '
f'If zone exists, then it must be connected to in VinylDNS.'])
assert_failed_change_in_error_response(response[5], input_name="no.zone.at.all.", record_type="MX",
record_data={"preference": 1, "exchange": "foo.bar."},
error_messages=['Zone Discovery Failed: zone for "no.zone.at.all." does not exist in VinylDNS. '
'If zone exists, then it must be connected to in VinylDNS.'])
# context validations: cname duplicate
assert_failed_change_in_error_response(response[6], input_name=f"cname-duplicate.{ok_zone_name}", record_type="CNAME",
record_data="test.com.",
error_messages=[f"Record Name \"cname-duplicate.{ok_zone_name}\" Not Unique In Batch Change: "
f"cannot have multiple \"CNAME\" records with the same name."])
# context validations: conflicting recordsets, unauthorized error
assert_failed_change_in_error_response(response[8], input_name=existing_mx_fqdn, record_type="MX",
record_data={"preference": 1, "exchange": "foo.bar."},
error_messages=[f"Record \"{existing_mx_fqdn}\" Already Exists: cannot add an existing record; to update it, "
f"issue a DeleteRecordSet then an Add."])
assert_failed_change_in_error_response(response[9], input_name=existing_cname_fqdn, record_type="MX",
record_data={"preference": 1, "exchange": "foo.bar."},
error_messages=["CNAME Conflict: CNAME record names must be unique. "
f"Existing record with name \"{existing_cname_fqdn}\" and type \"CNAME\" conflicts with this record."])
assert_failed_change_in_error_response(response[10], input_name=f"user-add-unauthorized.{dummy_zone_name}", record_type="MX",
record_data={"preference": 1, "exchange": "foo.bar."},
error_messages=[f"User \"ok\" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes."])
finally:
clear_recordset_list(to_delete, client)
def test_mx_recordtype_update_delete_checks(shared_zone_test_context):
"""
Test all update and delete validations performed on MX records submitted in batch changes
"""
ok_client = shared_zone_test_context.ok_vinyldns_client
dummy_client = shared_zone_test_context.dummy_vinyldns_client
ok_zone = shared_zone_test_context.ok_zone
dummy_zone = shared_zone_test_context.dummy_zone
dummy_zone_name = shared_zone_test_context.dummy_zone["name"]
dummy_group_name = shared_zone_test_context.dummy_group["name"]
ok_zone_name = shared_zone_test_context.ok_zone["name"]
ip4_zone_name = shared_zone_test_context.classless_base_zone["name"]
rs_delete_name = generate_record_name()
rs_delete_fqdn = rs_delete_name + f".{ok_zone_name}"
rs_delete_ok = create_recordset(ok_zone, rs_delete_name, "MX", [{"preference": 1, "exchange": "foo.bar."}], 200)
rs_update_name = generate_record_name()
rs_update_fqdn = rs_update_name + f".{ok_zone_name}"
rs_update_ok = create_recordset(ok_zone, rs_update_name, "MX", [{"preference": 1, "exchange": "foo.bar."}], 200)
rs_delete_dummy_name = generate_record_name()
rs_delete_dummy_fqdn = rs_delete_dummy_name + f".{dummy_zone_name}"
rs_delete_dummy = create_recordset(dummy_zone, rs_delete_dummy_name, "MX", [{"preference": 1, "exchange": "foo.bar."}], 200)
rs_update_dummy_name = generate_record_name()
rs_update_dummy_fqdn = rs_update_dummy_name + f".{dummy_zone_name}"
rs_update_dummy = create_recordset(dummy_zone, rs_update_dummy_name, "MX", [{"preference": 1, "exchange": "foo.bar."}], 200)
batch_change_input = {
"comments": "this is optional",
"changes": [
# valid changes
get_change_MX_json(rs_delete_fqdn, change_type="DeleteRecordSet"),
get_change_MX_json(rs_update_fqdn, change_type="DeleteRecordSet"),
get_change_MX_json(rs_update_fqdn, ttl=300),
# input validations failures
get_change_MX_json(f"invalid-name$.{ok_zone_name}", change_type="DeleteRecordSet"),
get_change_MX_json(f"delete.{ok_zone_name}", ttl=29),
get_change_MX_json(f"bad-exchange.{ok_zone_name}", exchange="foo$.bar."),
get_change_MX_json(f"mx.{ip4_zone_name}"),
# zone discovery failures
get_change_MX_json("no.zone.at.all.", change_type="DeleteRecordSet"),
# context validation failures
get_change_MX_json(f"delete-nonexistent.{ok_zone_name}", change_type="DeleteRecordSet"),
get_change_MX_json(f"update-nonexistent.{ok_zone_name}", change_type="DeleteRecordSet"),
get_change_MX_json(f"update-nonexistent.{ok_zone_name}", preference=1000, exchange="foo.bar."),
get_change_MX_json(rs_delete_dummy_fqdn, change_type="DeleteRecordSet"),
get_change_MX_json(rs_update_dummy_fqdn, preference=1000, exchange="foo.bar."),
get_change_MX_json(rs_update_dummy_fqdn, change_type="DeleteRecordSet")
]
}
to_create = [rs_delete_ok, rs_update_ok, rs_delete_dummy, rs_update_dummy]
to_delete = []
try:
for rs in to_create:
if rs["zoneId"] == dummy_zone["id"]:
create_client = dummy_client
else:
create_client = ok_client
create_rs = create_client.create_recordset(rs, status=202)
to_delete.append(create_client.wait_until_recordset_change_status(create_rs, "Complete"))
# Confirm that record set doesn't already exist
ok_client.get_recordset(ok_zone["id"], "delete-nonexistent", status=404)
response = ok_client.create_batch_change(batch_change_input, status=400)
# successful changes
assert_successful_change_in_error_response(response[0], input_name=rs_delete_fqdn, record_type="MX", record_data=None, change_type="DeleteRecordSet")
assert_successful_change_in_error_response(response[1], input_name=rs_update_fqdn, record_type="MX", record_data=None, change_type="DeleteRecordSet")
assert_successful_change_in_error_response(response[2], ttl=300, input_name=rs_update_fqdn, record_type="MX", record_data={"preference": 1, "exchange": "foo.bar."})
# input validations failures: invalid input name, reverse zone error, invalid ttl
assert_failed_change_in_error_response(response[3], input_name=f"invalid-name$.{ok_zone_name}", record_type="MX", record_data={"preference": 1, "exchange": "foo.bar."},
change_type="DeleteRecordSet",
error_messages=[f'Invalid domain name: "invalid-name$.{ok_zone_name}", valid domain names must be letters, '
f'numbers, underscores, and hyphens, joined by dots, and terminated with a dot.'])
assert_failed_change_in_error_response(response[4], input_name=f"delete.{ok_zone_name}", ttl=29, record_type="MX",
record_data={"preference": 1, "exchange": "foo.bar."},
error_messages=['Invalid TTL: "29", must be a number between 30 and 2147483647.'])
assert_failed_change_in_error_response(response[5], input_name=f"bad-exchange.{ok_zone_name}", record_type="MX",
record_data={"preference": 1, "exchange": "foo$.bar."},
error_messages=['Invalid domain name: "foo$.bar.", valid domain names must be letters, numbers, '
'underscores, and hyphens, joined by dots, and terminated with a dot.'])
assert_failed_change_in_error_response(response[6], input_name=f"mx.{ip4_zone_name}", record_type="MX",
record_data={"preference": 1, "exchange": "foo.bar."},
error_messages=[f'Invalid Record Type In Reverse Zone: record with name "mx.{ip4_zone_name}" '
f'and type "MX" is not allowed in a reverse zone.'])
# zone discovery failure
assert_failed_change_in_error_response(response[7], input_name="no.zone.at.all.", record_type="MX",
record_data=None, change_type="DeleteRecordSet",
error_messages=["Zone Discovery Failed: zone for \"no.zone.at.all.\" does not exist in VinylDNS. "
"If zone exists, then it must be connected to in VinylDNS."])
# context validation failures: record does not exist, not authorized
assert_failed_change_in_error_response(response[8], input_name=f"delete-nonexistent.{ok_zone_name}", record_type="MX",
record_data=None, change_type="DeleteRecordSet",
error_messages=[f"Record \"delete-nonexistent.{ok_zone_name}\" Does Not Exist: cannot delete a record that does not exist."])
assert_failed_change_in_error_response(response[9], input_name=f"update-nonexistent.{ok_zone_name}", record_type="MX",
record_data=None, change_type="DeleteRecordSet",
error_messages=[f"Record \"update-nonexistent.{ok_zone_name}\" Does Not Exist: cannot delete a record that does not exist."])
assert_successful_change_in_error_response(response[10], input_name=f"update-nonexistent.{ok_zone_name}", record_type="MX",
record_data={"preference": 1000, "exchange": "foo.bar."})
assert_failed_change_in_error_response(response[11], input_name=rs_delete_dummy_fqdn, record_type="MX",
record_data=None, change_type="DeleteRecordSet",
error_messages=[f"User \"ok\" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes."])
assert_failed_change_in_error_response(response[12], input_name=rs_update_dummy_fqdn, record_type="MX",
record_data={"preference": 1000, "exchange": "foo.bar."},
error_messages=[f"User \"ok\" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes."])
assert_failed_change_in_error_response(response[13], input_name=rs_update_dummy_fqdn, record_type="MX",
record_data=None, change_type="DeleteRecordSet",
error_messages=[f"User \"ok\" is not authorized. Contact zone owner group: {dummy_group_name} at test@test.com to make DNS changes."])
finally:
# Clean up updates
dummy_deletes = [rs for rs in to_delete if rs["zone"]["id"] == dummy_zone["id"]]
ok_deletes = [rs for rs in to_delete if rs["zone"]["id"] != dummy_zone["id"]]
clear_recordset_list(dummy_deletes, dummy_client)
clear_recordset_list(ok_deletes, ok_client)
def test_create_batch_change_does_not_save_owner_group_id_for_non_shared_zone(shared_zone_test_context):
"""
Test successfully creating a batch change with owner group ID doesn't save value for records in non-shared zone
"""
ok_client = shared_zone_test_context.ok_vinyldns_client
ok_zone = shared_zone_test_context.ok_zone
ok_group = shared_zone_test_context.ok_group
ok_zone_name = shared_zone_test_context.ok_zone["name"]
update_name = generate_record_name()
update_fqdn = update_name + f".{ok_zone_name}"
update_rs = create_recordset(ok_zone, update_name, "A", [{"address": "127.0.0.1"}], 300)
batch_change_input = {
"changes": [
get_change_A_AAAA_json(f"no-owner-group-id.{ok_zone_name}", address="4.3.2.1"),
get_change_A_AAAA_json(update_fqdn, address="1.2.3.4"),
get_change_A_AAAA_json(update_fqdn, change_type="DeleteRecordSet")
],
"ownerGroupId": ok_group["id"]
}
to_delete = []
try:
create_result = ok_client.create_recordset(update_rs, status=202)
to_delete.append(ok_client.wait_until_recordset_change_status(create_result, "Complete"))
result = ok_client.create_batch_change(batch_change_input, status=202)
completed_batch = ok_client.wait_until_batch_change_completed(result)
assert_that(completed_batch["ownerGroupId"], is_(batch_change_input["ownerGroupId"]))
to_delete = [(change["zoneId"], change["recordSetId"]) for change in completed_batch["changes"]]
assert_change_success(result["changes"], zone=ok_zone, index=0, record_name="no-owner-group-id",
input_name=f"no-owner-group-id.{ok_zone_name}", record_data="4.3.2.1")
assert_change_success(result["changes"], zone=ok_zone, index=1, record_name=update_name,
input_name=update_fqdn, record_data="1.2.3.4")
assert_change_success(result["changes"], zone=ok_zone, index=2, record_name=update_name,
input_name=update_fqdn, change_type="DeleteRecordSet", record_data=None)
for (zoneId, recordSetId) in to_delete:
get_recordset = ok_client.get_recordset(zoneId, recordSetId, status=200)
assert_that(get_recordset["recordSet"], is_not(has_key("ownerGroupId")))
finally:
clear_zoneid_rsid_tuple_list(to_delete, ok_client)
def test_create_batch_change_for_shared_zone_owner_group_applied_logic(shared_zone_test_context):
"""
Test successfully creating a batch change with owner group ID in shared zone succeeds and sets owner group ID
on creates and only updates without a pre-existing owner group ID
"""
shared_client = shared_zone_test_context.shared_zone_vinyldns_client
shared_zone = shared_zone_test_context.shared_zone
shared_zone_name = shared_zone_test_context.shared_zone["name"]
shared_record_group = shared_zone_test_context.shared_record_group
without_group_name = generate_record_name()
without_group_fqdn = f"{without_group_name}.{shared_zone_name}"
update_rs_without_owner_group = create_recordset(shared_zone, without_group_name, "A", [{"address": "127.0.0.1"}], 300)
with_group_name = generate_record_name()
with_group_fqdn = f"{with_group_name}.{shared_zone_name}"
update_rs_with_owner_group = create_recordset(shared_zone, with_group_name, "A", [{"address": "127.0.0.1"}], 300, shared_record_group["id"])
create_name = generate_record_name()
create_fqdn = f"{create_name}.{shared_zone_name}"
batch_change_input = {
"changes": [
get_change_A_AAAA_json(create_fqdn, address="4.3.2.1"),
get_change_A_AAAA_json(without_group_fqdn, address="1.2.3.4"),
get_change_A_AAAA_json(without_group_fqdn, change_type="DeleteRecordSet"),
get_change_A_AAAA_json(with_group_fqdn, address="1.2.3.4"),
get_change_A_AAAA_json(with_group_fqdn, change_type="DeleteRecordSet")
],
"ownerGroupId": "shared-zone-group"
}
to_delete = []
try:
# Create first record for updating and verify that owner group ID is not set
create_result = shared_client.create_recordset(update_rs_without_owner_group, status=202)
to_delete.append(shared_client.wait_until_recordset_change_status(create_result, "Complete"))
create_result = shared_client.get_recordset(create_result["recordSet"]["zoneId"], create_result["recordSet"]["id"], status=200)
assert_that(create_result["recordSet"], is_not(has_key("ownerGroupId")))
# Create second record for updating and verify that owner group ID is set
create_result = shared_client.create_recordset(update_rs_with_owner_group, status=202)
to_delete.append(shared_client.wait_until_recordset_change_status(create_result, "Complete"))
create_result = shared_client.get_recordset(create_result["recordSet"]["zoneId"], create_result["recordSet"]["id"], status=200)
assert_that(create_result["recordSet"]["ownerGroupId"], is_(shared_record_group["id"]))
# Create batch
result = shared_client.create_batch_change(batch_change_input, status=202)
completed_batch = shared_client.wait_until_batch_change_completed(result)
assert_that(completed_batch["ownerGroupId"], is_(batch_change_input["ownerGroupId"]))
to_delete = [(change["zoneId"], change["recordSetId"]) for change in completed_batch["changes"]]
assert_that(result["ownerGroupId"], is_("shared-zone-group"))
assert_change_success(result["changes"], zone=shared_zone, index=0, record_name=create_name, input_name=create_fqdn, record_data="4.3.2.1")
assert_change_success(result["changes"], zone=shared_zone, index=1, record_name=without_group_name, input_name=without_group_fqdn, record_data="1.2.3.4")
assert_change_success(result["changes"], zone=shared_zone, index=2, record_name=without_group_name, input_name=without_group_fqdn, change_type="DeleteRecordSet", record_data=None)
assert_change_success(result["changes"], zone=shared_zone, index=3, record_name=with_group_name, input_name=with_group_fqdn, record_data="1.2.3.4")
assert_change_success(result["changes"], zone=shared_zone, index=4, record_name=with_group_name, input_name=with_group_fqdn, change_type="DeleteRecordSet", record_data=None)
for (zoneId, recordSetId) in to_delete:
get_recordset = shared_client.get_recordset(zoneId, recordSetId, status=200)
if get_recordset["recordSet"]["name"] == with_group_name:
assert_that(get_recordset["recordSet"]["ownerGroupId"], is_(shared_record_group["id"]))
else:
assert_that(get_recordset["recordSet"]["ownerGroupId"], is_(batch_change_input["ownerGroupId"]))
finally:
clear_zoneid_rsid_tuple_list(to_delete, shared_client)
def test_create_batch_change_for_shared_zone_with_invalid_owner_group_id_fails(shared_zone_test_context):
"""
Test creating a batch change with invalid owner group ID fails
"""
shared_client = shared_zone_test_context.shared_zone_vinyldns_client
shared_zone_name = shared_zone_test_context.shared_zone["name"]
batch_change_input = {
"changes": [
get_change_A_AAAA_json(f"no-owner-group-id.{shared_zone_name}", address="4.3.2.1")
],
"ownerGroupId": "non-existent-owner-group-id"
}
errors = shared_client.create_batch_change(batch_change_input, status=400)["errors"]
assert_that(errors, contains_exactly('Group with ID "non-existent-owner-group-id" was not found'))
def test_create_batch_change_for_shared_zone_with_unauthorized_owner_group_id_fails(shared_zone_test_context):
"""
Test creating a batch change with unauthorized owner group ID fails
"""
shared_client = shared_zone_test_context.shared_zone_vinyldns_client
ok_group = shared_zone_test_context.ok_group
shared_zone_name = shared_zone_test_context.shared_zone["name"]
batch_change_input = {
"changes": [
get_change_A_AAAA_json(f"no-owner-group-id.{shared_zone_name}", address="4.3.2.1")
],
"ownerGroupId": ok_group["id"]
}
errors = shared_client.create_batch_change(batch_change_input, status=400)["errors"]
assert_that(errors, contains_exactly('User "sharedZoneUser" must be a member of group "' + ok_group["id"] + '" to apply this group to batch changes.'))
def test_create_batch_change_validation_with_owner_group_id(shared_zone_test_context):
"""
Test creating a batch change should properly set owner group ID in the following circumstances:
- create in shared zone
- update in shared zone without existing owner group ID
Owner group ID will be ignored in the following circumstances:
- create in private zone
- update in private zone
- update in shared zone with pre-existing owner group ID
- delete in either private or shared zone
"""
shared_client = shared_zone_test_context.shared_zone_vinyldns_client
ok_client = shared_zone_test_context.ok_vinyldns_client
shared_group = shared_zone_test_context.shared_record_group
ok_group = shared_zone_test_context.ok_group
shared_zone = shared_zone_test_context.shared_zone
ok_zone = shared_zone_test_context.ok_zone
ok_zone_name = shared_zone_test_context.ok_zone["name"]
shared_zone_name = shared_zone_test_context.shared_zone["name"]
# record sets to setup
private_update_name = generate_record_name()
private_update_fqdn = f"{private_update_name}.{ok_zone_name}"
private_update = create_recordset(ok_zone, private_update_name, "A", [{"address": "1.1.1.1"}], 200)
shared_update_no_group_name = generate_record_name()
shared_update_no_group_fqdn = f"{shared_update_no_group_name}.{shared_zone_name}"
shared_update_no_owner_group = create_recordset(shared_zone, shared_update_no_group_name, "A", [{"address": "1.1.1.1"}], 200)
shared_update_group_name = generate_record_name()
shared_update_group_fqdn = f"{shared_update_group_name}.{shared_zone_name}"
shared_update_existing_owner_group = create_recordset(shared_zone, shared_update_group_name, "A", [{"address": "1.1.1.1"}], 200, shared_group["id"])
private_delete_name = generate_record_name()
private_delete_fqdn = f"{private_delete_name}.{ok_zone_name}"
private_delete = create_recordset(ok_zone, private_delete_name, "A", [{"address": "1.1.1.1"}], 200)
shared_delete_name = generate_record_name()
shared_delete_fqdn = f"{shared_delete_name}.{shared_zone_name}"
shared_delete = create_recordset(shared_zone, shared_delete_name, "A", [{"address": "1.1.1.1"}], 200)
to_delete_ok = {}
to_delete_shared = {}
private_create_name = generate_record_name()
private_create_fqdn = f"{private_create_name}.{ok_zone_name}"
shared_create_name = generate_record_name()
shared_create_fqdn = f"{shared_create_name}.{shared_zone_name}"
batch_change_input = {
"changes": [
get_change_A_AAAA_json(private_create_fqdn),
get_change_A_AAAA_json(shared_create_fqdn),
get_change_A_AAAA_json(private_update_fqdn, ttl=300),
get_change_A_AAAA_json(private_update_fqdn, change_type="DeleteRecordSet"),
get_change_A_AAAA_json(shared_update_no_group_fqdn, ttl=300),
get_change_A_AAAA_json(shared_update_no_group_fqdn, change_type="DeleteRecordSet"),
get_change_A_AAAA_json(shared_update_group_fqdn, ttl=300),
get_change_A_AAAA_json(shared_update_group_fqdn, change_type="DeleteRecordSet"),
get_change_A_AAAA_json(private_delete_fqdn, change_type="DeleteRecordSet"),
get_change_A_AAAA_json(shared_delete_fqdn, change_type="DeleteRecordSet")
],
"ownerGroupId": ok_group["id"]
}
try:
for rs in [private_update, private_delete]:
create_rs = ok_client.create_recordset(rs, status=202)
ok_client.wait_until_recordset_change_status(create_rs, "Complete")
for rs in [shared_update_no_owner_group, shared_update_existing_owner_group, shared_delete]:
create_rs = shared_client.create_recordset(rs, status=202)
shared_client.wait_until_recordset_change_status(create_rs, "Complete")
result = ok_client.create_batch_change(batch_change_input, status=202)
completed_batch = ok_client.wait_until_batch_change_completed(result)
assert_that(completed_batch["ownerGroupId"], is_(ok_group["id"]))
# set here because multiple items in the batch combine to one RS
record_set_list = [(change["zoneId"], change["recordSetId"]) for change in completed_batch["changes"] if
private_delete_name not in change["recordName"] and change["zoneId"] == ok_zone["id"]]
to_delete_ok = set(record_set_list)
record_set_list = [(change["zoneId"], change["recordSetId"]) for change in completed_batch["changes"] if
shared_delete_name not in change["recordName"] and change["zoneId"] == shared_zone["id"]]
to_delete_shared = set(record_set_list)
assert_change_success(completed_batch["changes"], zone=ok_zone, index=0,
record_name=private_create_name,
input_name=private_create_fqdn, record_data="1.1.1.1")
assert_change_success(completed_batch["changes"], zone=shared_zone, index=1,
record_name=shared_create_name,
input_name=shared_create_fqdn, record_data="1.1.1.1")
assert_change_success(completed_batch["changes"], zone=ok_zone, index=2,
record_name=private_update_name,
input_name=private_update_fqdn, record_data="1.1.1.1", ttl=300)
assert_change_success(completed_batch["changes"], zone=ok_zone, index=3,
record_name=private_update_name,
input_name=private_update_fqdn, record_data=None,
change_type="DeleteRecordSet")
assert_change_success(completed_batch["changes"], zone=shared_zone, index=4,
record_name=shared_update_no_group_name,
input_name=shared_update_no_group_fqdn, record_data="1.1.1.1",
ttl=300)
assert_change_success(completed_batch["changes"], zone=shared_zone, index=5,
record_name=shared_update_no_group_name,
input_name=shared_update_no_group_fqdn, record_data=None,
change_type="DeleteRecordSet")
assert_change_success(completed_batch["changes"], zone=shared_zone, index=6,
record_name=shared_update_group_name,
input_name=shared_update_group_fqdn,
record_data="1.1.1.1", ttl=300)
assert_change_success(completed_batch["changes"], zone=shared_zone, index=7,
record_name=shared_update_group_name,
input_name=shared_update_group_fqdn, record_data=None,
change_type="DeleteRecordSet")
assert_change_success(completed_batch["changes"], zone=ok_zone, index=8,
record_name=private_delete_name,
input_name=private_delete_fqdn, record_data=None,
change_type="DeleteRecordSet")
assert_change_success(completed_batch["changes"], zone=shared_zone, index=9,
record_name=shared_delete_name,
input_name=shared_delete_fqdn, record_data=None,
change_type="DeleteRecordSet")
# verify record set owner group
for result_rs in to_delete_ok:
rs_result = ok_client.get_recordset(result_rs[0], result_rs[1], status=200)
assert_that(rs_result["recordSet"], is_not(has_key("ownerGroupId")))
for result_rs in to_delete_shared:
rs_result = shared_client.get_recordset(result_rs[0], result_rs[1], status=200)
if rs_result["recordSet"]["name"] == shared_update_group_name:
assert_that(rs_result["recordSet"]["ownerGroupId"], is_(shared_group["id"]))
else:
assert_that(rs_result["recordSet"]["ownerGroupId"], is_(ok_group["id"]))
finally:
for tup in to_delete_ok:
delete_result = ok_client.delete_recordset(tup[0], tup[1], status=202)
ok_client.wait_until_recordset_change_status(delete_result, "Complete")
for tup in to_delete_shared:
delete_result = shared_client.delete_recordset(tup[0], tup[1], status=202)
shared_client.wait_until_recordset_change_status(delete_result, "Complete")
def test_create_batch_change_validation_without_owner_group_id(shared_zone_test_context):
"""
Test creating a batch change without owner group ID should validate changes properly
"""
shared_client = shared_zone_test_context.shared_zone_vinyldns_client
ok_client = shared_zone_test_context.ok_vinyldns_client
shared_group = shared_zone_test_context.shared_record_group
shared_zone = shared_zone_test_context.shared_zone
ok_zone = shared_zone_test_context.ok_zone
ok_zone_name = shared_zone_test_context.ok_zone["name"]
shared_zone_name = shared_zone_test_context.shared_zone["name"]
# record sets to setup
private_update_name = generate_record_name()
private_update_fqdn = f"{private_update_name}.{ok_zone_name}"
private_update = create_recordset(ok_zone, private_update_name, "A", [{"address": "1.1.1.1"}], 200)
shared_update_no_group_name = generate_record_name()
shared_update_no_group_fqdn = f"{shared_update_no_group_name}.{shared_zone_name}"
shared_update_no_owner_group = create_recordset(shared_zone, shared_update_no_group_name, "A", [{"address": "1.1.1.1"}], 200)
shared_update_group_name = generate_record_name()
shared_update_group_fqdn = f"{shared_update_group_name}.{shared_zone_name}"
shared_update_existing_owner_group = create_recordset(shared_zone, shared_update_group_name, "A", [{"address": "1.1.1.1"}], 200, shared_group["id"])
private_delete_name = generate_record_name()
private_delete_fqdn = private_delete_name + f".{ok_zone_name}"
private_delete = create_recordset(ok_zone, private_delete_name, "A", [{"address": "1.1.1.1"}], 200)
shared_delete_name = generate_record_name()
shared_delete_fqdn = f"{shared_delete_name}.{shared_zone_name}"
shared_delete = create_recordset(shared_zone, shared_delete_name, "A", [{"address": "1.1.1.1"}], 200)
to_delete_ok = []
to_delete_shared = []
private_create_name = generate_record_name()
private_create_fqdn = f"{private_create_name}.{ok_zone_name}"
shared_create_name = generate_record_name()
shared_create_fqdn = f"{shared_create_name}.{shared_zone_name}"
batch_change_input = {
"changes": [
get_change_A_AAAA_json(private_create_fqdn),
get_change_A_AAAA_json(shared_create_fqdn),
get_change_A_AAAA_json(private_update_fqdn, ttl=300),
get_change_A_AAAA_json(private_update_fqdn, change_type="DeleteRecordSet"),
get_change_A_AAAA_json(shared_update_no_group_fqdn, ttl=300),
get_change_A_AAAA_json(shared_update_no_group_fqdn, change_type="DeleteRecordSet"),
get_change_A_AAAA_json(shared_update_group_fqdn, ttl=300),
get_change_A_AAAA_json(shared_update_group_fqdn, change_type="DeleteRecordSet"),
get_change_A_AAAA_json(private_delete_fqdn, change_type="DeleteRecordSet"),
get_change_A_AAAA_json(shared_delete_fqdn, change_type="DeleteRecordSet")
]
}
try:
for rs in [private_update, private_delete]:
create_rs = ok_client.create_recordset(rs, status=202)
to_delete_ok.append(ok_client.wait_until_recordset_change_status(create_rs, "Complete")["recordSet"]["id"])
for rs in [shared_update_no_owner_group, shared_update_existing_owner_group, shared_delete]:
create_rs = shared_client.create_recordset(rs, status=202)
to_delete_shared.append(shared_client.wait_until_recordset_change_status(create_rs, "Complete")["recordSet"]["id"])
response = ok_client.create_batch_change(batch_change_input, status=400)
assert_successful_change_in_error_response(response[0], input_name=private_create_fqdn)
assert_failed_change_in_error_response(response[1], input_name=shared_create_fqdn,
error_messages=[f"Zone \"{shared_zone_name}\" is a shared zone, so owner group ID must be specified for record \"{shared_create_name}\"."])
assert_successful_change_in_error_response(response[2], input_name=private_update_fqdn, ttl=300)
assert_successful_change_in_error_response(response[3], change_type="DeleteRecordSet", input_name=private_update_fqdn)
assert_failed_change_in_error_response(response[4], input_name=shared_update_no_group_fqdn,
error_messages=[f"Zone \"{shared_zone_name}\" is a shared zone, so owner group ID must be specified for record \"{shared_update_no_group_name}\"."],
ttl=300)
assert_successful_change_in_error_response(response[5], change_type="DeleteRecordSet", input_name=shared_update_no_group_fqdn)
assert_successful_change_in_error_response(response[6], input_name=shared_update_group_fqdn, ttl=300)
assert_successful_change_in_error_response(response[7], change_type="DeleteRecordSet", input_name=shared_update_group_fqdn)
assert_successful_change_in_error_response(response[8], change_type="DeleteRecordSet", input_name=private_delete_fqdn)
assert_successful_change_in_error_response(response[9], change_type="DeleteRecordSet", input_name=shared_delete_fqdn)
finally:
for rsId in to_delete_ok:
delete_result = ok_client.delete_recordset(ok_zone["id"], rsId, status=202)
ok_client.wait_until_recordset_change_status(delete_result, "Complete")
for rsId in to_delete_shared:
delete_result = shared_client.delete_recordset(shared_zone["id"], rsId, status=202)
shared_client.wait_until_recordset_change_status(delete_result, "Complete")
def test_create_batch_delete_recordset_for_unassociated_user_in_owner_group_succeeds(shared_zone_test_context):
"""
Test delete change in batch for a record in a shared zone for an unassociated user belonging to the record owner group succeeds
"""
shared_client = shared_zone_test_context.shared_zone_vinyldns_client
ok_client = shared_zone_test_context.ok_vinyldns_client
shared_zone = shared_zone_test_context.shared_zone
shared_group = shared_zone_test_context.shared_record_group
shared_zone_name = shared_zone_test_context.shared_zone["name"]
shared_delete_name = generate_record_name()
shared_delete_fqdn = f"{shared_delete_name}.{shared_zone_name}"
shared_delete = create_recordset(shared_zone, shared_delete_name, "A", [{"address": "1.1.1.1"}], 200, shared_group["id"])
batch_change_input = {
"changes": [
get_change_A_AAAA_json(shared_delete_fqdn, change_type="DeleteRecordSet")
]
}
create_rs = shared_client.create_recordset(shared_delete, status=202)
shared_client.wait_until_recordset_change_status(create_rs, "Complete")
result = ok_client.create_batch_change(batch_change_input, status=202)
completed_batch = ok_client.wait_until_batch_change_completed(result)
assert_change_success(completed_batch["changes"], zone=shared_zone, index=0,
record_name=shared_delete_name,
input_name=shared_delete_fqdn, record_data=None,
change_type="DeleteRecordSet")
def test_create_batch_delete_recordset_for_unassociated_user_not_in_owner_group_fails(shared_zone_test_context):
"""
Test delete change in batch for a record in a shared zone for an unassociated user not belonging to the record owner group fails
"""
shared_client = shared_zone_test_context.shared_zone_vinyldns_client
unassociated_client = shared_zone_test_context.unassociated_client
shared_zone = shared_zone_test_context.shared_zone
shared_group = shared_zone_test_context.shared_record_group
shared_zone_name = shared_zone_test_context.shared_zone["name"]
shared_group_name = shared_group["name"]
create_rs = None
shared_delete_name = generate_record_name()
shared_delete_fqdn = f"{shared_delete_name}.{shared_zone_name}"
shared_delete = create_recordset(shared_zone, shared_delete_name, "A", [{"address": "1.1.1.1"}], 200, shared_group["id"])
batch_change_input = {
"changes": [
get_change_A_AAAA_json(shared_delete_fqdn, change_type="DeleteRecordSet")
]
}
try:
create_rs = shared_client.create_recordset(shared_delete, status=202)
shared_client.wait_until_recordset_change_status(create_rs, "Complete")
response = unassociated_client.create_batch_change(batch_change_input, status=400)
assert_failed_change_in_error_response(response[0], input_name=shared_delete_fqdn,
change_type="DeleteRecordSet",
error_messages=[f'User "list-group-user" is not authorized. Contact record owner group: '
f'{shared_group_name} at test@test.com to make DNS changes.'])
finally:
if create_rs:
delete_rs = shared_client.delete_recordset(shared_zone["id"], create_rs["recordSet"]["id"], status=202)
shared_client.wait_until_recordset_change_status(delete_rs, "Complete")
def test_create_batch_delete_recordset_for_zone_admin_not_in_owner_group_succeeds(shared_zone_test_context):
"""
Test delete change in batch for a record in a shared zone for a zone admin not belonging to the record owner group succeeds
"""
shared_client = shared_zone_test_context.shared_zone_vinyldns_client
ok_client = shared_zone_test_context.ok_vinyldns_client
shared_zone = shared_zone_test_context.shared_zone
ok_group = shared_zone_test_context.ok_group
shared_zone_name = shared_zone_test_context.shared_zone["name"]
shared_delete_name = generate_record_name()
shared_delete_fqdn = f"{shared_delete_name}.{shared_zone_name}"
shared_delete = create_recordset(shared_zone, shared_delete_name, "A", [{"address": "1.1.1.1"}], 200, ok_group["id"])
batch_change_input = {
"changes": [
get_change_A_AAAA_json(shared_delete_fqdn, change_type="DeleteRecordSet")
]
}
create_rs = ok_client.create_recordset(shared_delete, status=202)
shared_client.wait_until_recordset_change_status(create_rs, "Complete")
result = shared_client.create_batch_change(batch_change_input, status=202)
completed_batch = shared_client.wait_until_batch_change_completed(result)
assert_change_success(completed_batch["changes"], zone=shared_zone, index=0,
record_name=shared_delete_name,
input_name=shared_delete_fqdn, record_data=None,
change_type="DeleteRecordSet")
def test_create_batch_update_record_in_shared_zone_for_unassociated_user_in_owner_group_succeeds(
shared_zone_test_context):
"""
Test update change in batch for a record for a user belonging to record owner group succeeds
"""
shared_client = shared_zone_test_context.shared_zone_vinyldns_client
ok_client = shared_zone_test_context.ok_vinyldns_client
shared_zone = shared_zone_test_context.shared_zone
shared_record_group = shared_zone_test_context.shared_record_group
shared_zone_name = shared_zone_test_context.shared_zone["name"]
create_rs = None
shared_update_name = generate_record_name()
shared_update_fqdn = f"{shared_update_name}.{shared_zone_name}"
shared_update = create_recordset(shared_zone, shared_update_name, "MX", [{"preference": 1, "exchange": "foo.bar."}], 200,
shared_record_group["id"])
batch_change_input = {
"changes": [
get_change_MX_json(shared_update_fqdn, ttl=300),
get_change_MX_json(shared_update_fqdn, change_type="DeleteRecordSet")
]
}
try:
create_rs = shared_client.create_recordset(shared_update, status=202)
shared_client.wait_until_recordset_change_status(create_rs, "Complete")
result = ok_client.create_batch_change(batch_change_input, status=202)
completed_batch = ok_client.wait_until_batch_change_completed(result)
assert_change_success(completed_batch["changes"], zone=shared_zone, index=0, record_name=shared_update_name,
ttl=300,
record_type="MX", input_name=shared_update_fqdn,
record_data={"preference": 1, "exchange": "foo.bar."})
assert_change_success(completed_batch["changes"], zone=shared_zone, index=1, record_name=shared_update_name,
record_type="MX", input_name=shared_update_fqdn, record_data=None,
change_type="DeleteRecordSet")
finally:
if create_rs:
delete_rs = shared_client.delete_recordset(shared_zone["id"], create_rs["recordSet"]["id"], status=202)
shared_client.wait_until_recordset_change_status(delete_rs, "Complete")
def test_create_batch_with_global_acl_rule_applied_succeeds(shared_zone_test_context):
"""
Test that a user with a relevant global acl rule can update forward and reverse records, regardless of their current ownership
"""
shared_client = shared_zone_test_context.shared_zone_vinyldns_client
dummy_client = shared_zone_test_context.dummy_vinyldns_client
shared_zone = shared_zone_test_context.shared_zone
ok_client = shared_zone_test_context.ok_vinyldns_client
classless_base_zone = shared_zone_test_context.classless_base_zone
create_a_rs = None
create_ptr_rs = None
dummy_group_id = shared_zone_test_context.dummy_group["id"]
dummy_group_name = shared_zone_test_context.dummy_group["name"]
ip4_prefix = shared_zone_test_context.ip4_classless_prefix
shared_zone_name = shared_zone_test_context.shared_zone["name"]
a_name = generate_record_name()
a_fqdn = f"{a_name}.{shared_zone_name}"
a_record = create_recordset(shared_zone, a_name, "A", [{"address": "1.1.1.1"}], 200, "shared-zone-group")
ptr_record = create_recordset(classless_base_zone, "44", "PTR", [{"ptrdname": "foo."}], 200, None)
batch_change_input = {
"ownerGroupId": dummy_group_id,
"changes": [
get_change_A_AAAA_json(a_fqdn, record_type="A", ttl=200, address=f"{ip4_prefix}.44"),
get_change_PTR_json(f"{ip4_prefix}.44", ptrdname=a_fqdn),
get_change_A_AAAA_json(a_fqdn, record_type="A", address="1.1.1.1", change_type="DeleteRecordSet"),
get_change_PTR_json(f"{ip4_prefix}.44", change_type="DeleteRecordSet")
]
}
try:
create_a_rs = shared_client.create_recordset(a_record, status=202)
shared_client.wait_until_recordset_change_status(create_a_rs, "Complete")
create_ptr_rs = ok_client.create_recordset(ptr_record, status=202)
ok_client.wait_until_recordset_change_status(create_ptr_rs, "Complete")
result = dummy_client.create_batch_change(batch_change_input, status=202)
completed_batch = dummy_client.wait_until_batch_change_completed(result)
assert_change_success(completed_batch["changes"], zone=shared_zone, index=0,
record_name=a_name, ttl=200,
record_type="A", input_name=a_fqdn, record_data=f"{ip4_prefix}.44")
assert_change_success(completed_batch["changes"], zone=classless_base_zone, index=1,
record_name="44",
record_type="PTR", input_name=f"{ip4_prefix}.44",
record_data=a_fqdn)
assert_change_success(completed_batch["changes"], zone=shared_zone, index=2,
record_name=a_name, ttl=200,
record_type="A", input_name=a_fqdn, record_data=None,
change_type="DeleteRecordSet")
assert_change_success(completed_batch["changes"], zone=classless_base_zone, index=3,
record_name="44",
record_type="PTR", input_name=f"{ip4_prefix}.44", record_data=None,
change_type="DeleteRecordSet")
finally:
if create_a_rs:
retrieved = shared_client.get_recordset(shared_zone["id"], create_a_rs["recordSet"]["id"])
retrieved_rs = retrieved["recordSet"]
assert_that(retrieved_rs["ownerGroupId"], is_("shared-zone-group"))
assert_that(retrieved_rs["ownerGroupName"], is_("testSharedZoneGroup"))
delete_a_rs = shared_client.delete_recordset(shared_zone["id"], create_a_rs["recordSet"]["id"], status=202)
shared_client.wait_until_recordset_change_status(delete_a_rs, "Complete")
if create_ptr_rs:
retrieved = dummy_client.get_recordset(shared_zone["id"], create_ptr_rs["recordSet"]["id"])
retrieved_rs = retrieved["recordSet"]
assert_that(retrieved_rs, is_not(has_key("ownerGroupId")))
assert_that(retrieved_rs, is_not(has_key({dummy_group_name})))
delete_ptr_rs = ok_client.delete_recordset(classless_base_zone["id"], create_ptr_rs["recordSet"]["id"],
status=202)
ok_client.wait_until_recordset_change_status(delete_ptr_rs, "Complete")
def test_create_batch_with_irrelevant_global_acl_rule_applied_fails(shared_zone_test_context):
"""
Test that a user with an irrelevant global acl rule cannot update an owned records
"""
test_user_client = shared_zone_test_context.test_user_client
shared_client = shared_zone_test_context.shared_zone_vinyldns_client
shared_zone = shared_zone_test_context.shared_zone
ip4_prefix = shared_zone_test_context.ip4_classless_prefix
shared_zone_name = shared_zone_test_context.shared_zone["name"]
create_a_rs = None
a_name = generate_record_name()
a_fqdn = f"{a_name}.{shared_zone_name}"
a_record = create_recordset(shared_zone, a_name, "A", [{"address": "1.1.1.1"}], 200, "shared-zone-group")
batch_change_input = {
"changes": [
get_change_A_AAAA_json(a_fqdn, record_type="A", address=f"{ip4_prefix}.45"),
get_change_A_AAAA_json(a_fqdn, record_type="A", change_type="DeleteRecordSet"),
]
}
try:
create_a_rs = shared_client.create_recordset(a_record, status=202)
shared_client.wait_until_recordset_change_status(create_a_rs, "Complete")
response = test_user_client.create_batch_change(batch_change_input, status=400)
assert_failed_change_in_error_response(response[0], input_name=a_fqdn, record_type="A",
change_type="Add", record_data=f"{ip4_prefix}.45",
error_messages=['User "testuser" is not authorized. Contact record owner group: testSharedZoneGroup at email to make DNS changes.'])
finally:
if create_a_rs:
delete_a_rs = shared_client.delete_recordset(shared_zone["id"], create_a_rs["recordSet"]["id"], status=202)
shared_client.wait_until_recordset_change_status(delete_a_rs, "Complete")
@pytest.mark.manual_batch_review
def test_create_batch_with_zone_name_requiring_manual_review(shared_zone_test_context):
"""
Confirm that individual changes matching zone names requiring review get correctly flagged for manual review
"""
rejecter = shared_zone_test_context.support_user_client
client = shared_zone_test_context.ok_vinyldns_client
review_zone_name = shared_zone_test_context.requires_review_zone["name"]
batch_change_input = {
"changes": [
get_change_A_AAAA_json(f"add-test-batch.{review_zone_name}"),
get_change_A_AAAA_json(f"update-test-batch.{review_zone_name}", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(f"update-test-batch.{review_zone_name}"),
get_change_A_AAAA_json(f"delete-test-batch.{review_zone_name}", change_type="DeleteRecordSet")
],
"ownerGroupId": shared_zone_test_context.ok_group["id"]
}
response = None
try:
response = client.create_batch_change(batch_change_input, status=202)
get_batch = client.get_batch_change(response["id"])
assert_that(get_batch["status"], is_("PendingReview"))
assert_that(get_batch["approvalStatus"], is_("PendingReview"))
for i in range(0, 3):
assert_that(get_batch["changes"][i]["status"], is_("NeedsReview"))
assert_that(get_batch["changes"][i]["validationErrors"][0]["errorType"], is_("RecordRequiresManualReview"))
finally:
# Clean up so data doesn't change
if response:
rejecter.reject_batch_change(response["id"], status=200)
def test_create_batch_delete_record_for_invalid_record_data_fails(shared_zone_test_context):
"""
Test delete record set fails for non-existent record and non-existent record data
"""
client = shared_zone_test_context.ok_vinyldns_client
ok_zone_name = shared_zone_test_context.ok_zone["name"]
a_delete_name = generate_record_name()
a_delete_fqdn = a_delete_name + f".{ok_zone_name}"
a_delete = create_recordset(shared_zone_test_context.ok_zone, a_delete_fqdn, "A", [{"address": "1.1.1.1"}])
batch_change_input = {
"comments": "test delete record failures",
"changes": [
get_change_A_AAAA_json(f"delete-non-existent-record.{ok_zone_name}", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(a_delete_fqdn, address="4.5.6.7", change_type="DeleteRecordSet")
]
}
to_delete = []
try:
create_rs = client.create_recordset(a_delete, status=202)
to_delete.append(client.wait_until_recordset_change_status(create_rs, "Complete"))
errors = client.create_batch_change(batch_change_input, status=400)
assert_failed_change_in_error_response(errors[0], input_name=f"delete-non-existent-record.{ok_zone_name}", record_data="1.1.1.1", change_type="DeleteRecordSet",
error_messages=[f'Record "delete-non-existent-record.{ok_zone_name}" Does Not Exist: cannot delete a record that does not exist.'])
assert_failed_change_in_error_response(errors[1], input_name=a_delete_fqdn, record_data="4.5.6.7", change_type="DeleteRecordSet",
error_messages=["Record data 4.5.6.7 does not exist for \"" + a_delete_fqdn + "\"."])
finally:
clear_recordset_list(to_delete, client)
@pytest.mark.serial
def test_create_batch_delete_record_access_checks(shared_zone_test_context):
"""
Test access for full-delete DeleteRecord (delete) and non-full-delete DeleteRecord (update)
"""
ok_client = shared_zone_test_context.ok_vinyldns_client
ok_zone = shared_zone_test_context.ok_zone
dummy_client = shared_zone_test_context.dummy_vinyldns_client
dummy_group_id = shared_zone_test_context.dummy_group["id"]
ok_zone_name = shared_zone_test_context.ok_zone["name"]
ok_group_name = shared_zone_test_context.ok_group["name"]
a_delete_acl = generate_acl_rule("Delete", groupId=dummy_group_id, recordMask=".*", recordTypes=["A"])
txt_write_acl = generate_acl_rule("Write", groupId=dummy_group_id, recordMask=".*", recordTypes=["TXT"])
a_update_name = generate_record_name()
a_update_fqdn = a_update_name + f".{ok_zone_name}"
a_update = create_recordset(ok_zone, a_update_name, "A", [{"address": "1.1.1.1"}])
a_delete_name = generate_record_name()
a_delete_fqdn = a_delete_name + f".{ok_zone_name}"
a_delete = create_recordset(ok_zone, a_delete_name, "A", [{"address": "1.1.1.1"}])
txt_update_name = generate_record_name()
txt_update_fqdn = txt_update_name + f".{ok_zone_name}"
txt_update = create_recordset(ok_zone, txt_update_name, "TXT", [{"text": "test"}])
txt_delete_name = generate_record_name()
txt_delete_fqdn = txt_delete_name + f".{ok_zone_name}"
txt_delete = create_recordset(ok_zone, txt_delete_name, "TXT", [{"text": "test"}])
batch_change_input = {
"comments": "Testing DeleteRecord access levels",
"changes": [
get_change_A_AAAA_json(a_update_fqdn, change_type="DeleteRecordSet"),
get_change_A_AAAA_json(a_update_fqdn, address="4.5.6.7"),
get_change_A_AAAA_json(a_delete_fqdn, change_type="DeleteRecordSet"),
get_change_TXT_json(txt_update_fqdn, change_type="DeleteRecordSet"),
get_change_TXT_json(txt_update_fqdn, text="updated text"),
get_change_TXT_json(txt_delete_fqdn, change_type="DeleteRecordSet")
]
}
to_delete = []
try:
add_ok_acl_rules(shared_zone_test_context, [a_delete_acl, txt_write_acl])
for create_json in [a_update, a_delete, txt_update, txt_delete]:
create_result = ok_client.create_recordset(create_json, status=202)
to_delete.append(ok_client.wait_until_recordset_change_status(create_result, "Complete"))
response = dummy_client.create_batch_change(batch_change_input, status=400)
assert_successful_change_in_error_response(response[0], input_name=a_update_fqdn, record_data="1.1.1.1", change_type="DeleteRecordSet")
assert_successful_change_in_error_response(response[1], input_name=a_update_fqdn, record_data="4.5.6.7")
assert_successful_change_in_error_response(response[2], input_name=a_delete_fqdn, record_data="1.1.1.1", change_type="DeleteRecordSet")
assert_successful_change_in_error_response(response[3], input_name=txt_update_fqdn, record_type="TXT", record_data="test", change_type="DeleteRecordSet")
assert_successful_change_in_error_response(response[4], input_name=txt_update_fqdn, record_type="TXT", record_data="updated text")
assert_failed_change_in_error_response(response[5], input_name=txt_delete_fqdn, record_type="TXT", record_data="test", change_type="DeleteRecordSet",
error_messages=[f'User "dummy" is not authorized. Contact zone owner group: {ok_group_name} at test@test.com to make DNS changes.'])
finally:
clear_ok_acl_rules(shared_zone_test_context)
clear_recordset_list(to_delete, ok_client)
@pytest.mark.skip_production
def test_create_batch_multi_record_update_succeeds(shared_zone_test_context):
"""
Test record sets with multiple records can be added, updated and deleted in batch (relies on skip-prod)
"""
client = shared_zone_test_context.ok_vinyldns_client
ok_zone = shared_zone_test_context.ok_zone
ok_zone_name = shared_zone_test_context.ok_zone["name"]
# record sets to setup
a_update_record_set_name = generate_record_name()
a_update_record_set_fqdn = a_update_record_set_name + f".{ok_zone_name}"
a_update_record_set = create_recordset(ok_zone, a_update_record_set_name, "A", [{"address": "1.1.1.1"}, {"address": "1.1.1.2"}], 200)
txt_update_record_set_name = generate_record_name()
txt_update_record_set_fqdn = txt_update_record_set_name + f".{ok_zone_name}"
txt_update_record_set = create_recordset(ok_zone, txt_update_record_set_name, "TXT", [{"text": "hello"}, {"text": "again"}], 200)
a_update_record_full_name = generate_record_name()
a_update_record_full_fqdn = a_update_record_full_name + f".{ok_zone_name}"
a_update_record_full = create_recordset(ok_zone, a_update_record_full_name, "A", [{"address": "1.1.1.1"}, {"address": "1.1.1.2"}], 200)
txt_update_record_full_name = generate_record_name()
txt_update_record_full_fqdn = txt_update_record_full_name + f".{ok_zone_name}"
txt_update_record_full = create_recordset(ok_zone, txt_update_record_full_name, "TXT", [{"text": "hello"}, {"text": "again"}], 200)
a_update_record_name = generate_record_name()
a_update_record_fqdn = a_update_record_name + f".{ok_zone_name}"
a_update_record = create_recordset(ok_zone, a_update_record_name, "A", [{"address": "1.1.1.1"}, {"address": "1.1.1.2"}], 200)
txt_update_record_name = generate_record_name()
txt_update_record_fqdn = txt_update_record_name + f".{ok_zone_name}"
txt_update_record = create_recordset(ok_zone, txt_update_record_name, "TXT", [{"text": "hello"}, {"text": "again"}], 200)
a_update_record_only_name = generate_record_name()
a_update_record_only_fqdn = a_update_record_only_name + f".{ok_zone_name}"
a_update_record_only = create_recordset(ok_zone, a_update_record_only_name, "A", [{"address": "1.1.1.1"}, {"address": "1.1.1.2"}], 200)
txt_update_record_only_name = generate_record_name()
txt_update_record_only_fqdn = txt_update_record_only_name + f".{ok_zone_name}"
txt_update_record_only = create_recordset(ok_zone, txt_update_record_only_name, "TXT", [{"text": "hello"}, {"text": "again"}], 200)
a_delete_record_set_name = generate_record_name()
a_delete_record_set_fqdn = a_delete_record_set_name + f".{ok_zone_name}"
a_delete_record_set = create_recordset(ok_zone, a_delete_record_set_name, "A", [{"address": "1.1.1.1"}, {"address": "1.1.1.2"}], 200)
txt_delete_record_set_name = generate_record_name()
txt_delete_record_set_fqdn = txt_delete_record_set_name + f".{ok_zone_name}"
txt_delete_record_set = create_recordset(ok_zone, txt_delete_record_set_name, "TXT", [{"text": "hello"}, {"text": "again"}], 200)
a_delete_record_name = generate_record_name()
a_delete_record_fqdn = a_delete_record_name + f".{ok_zone_name}"
a_delete_record = create_recordset(ok_zone, a_delete_record_name, "A", [{"address": "1.1.1.1"}, {"address": "1.1.1.2"}], 200)
txt_delete_record_name = generate_record_name()
txt_delete_record_fqdn = txt_delete_record_name + f".{ok_zone_name}"
txt_delete_record = create_recordset(ok_zone, txt_delete_record_name, "TXT", [{"text": "hello"}, {"text": "again"}], 200)
cname_delete_record_name = generate_record_name()
cname_delete_record_fqdn = cname_delete_record_name + f".{ok_zone_name}"
cname_delete_record = create_recordset(ok_zone, cname_delete_record_name, "CNAME", [{"cname": "cAsEiNSeNsItIve.cNaMe."}], 200)
a_delete_record_and_record_set_name = generate_record_name()
a_delete_record_and_record_set_fqdn = a_delete_record_and_record_set_name + f".{ok_zone_name}"
a_delete_record_and_record_set = create_recordset(ok_zone, a_delete_record_and_record_set_name, "A", [{"address": "1.1.1.1"}, {"address": "1.1.1.2"}], 200)
txt_delete_record_and_record_set_name = generate_record_name()
txt_delete_record_and_record_set_fqdn = txt_delete_record_and_record_set_name + f".{ok_zone_name}"
txt_delete_record_and_record_set = create_recordset(ok_zone, txt_delete_record_and_record_set_name, "TXT", [{"text": "hello"}, {"text": "again"}], 200)
batch_change_input = {
"comments": "this is optional",
"changes": [
## Updates
# Add + DeleteRRSet
get_change_A_AAAA_json(a_update_record_set_fqdn, change_type="DeleteRecordSet"),
get_change_A_AAAA_json(a_update_record_set_fqdn, address="1.2.3.4"),
get_change_A_AAAA_json(a_update_record_set_fqdn, address="4.5.6.7"),
get_change_TXT_json(txt_update_record_set_fqdn, change_type="DeleteRecordSet"),
get_change_TXT_json(txt_update_record_set_fqdn, text="some-multi-text"),
get_change_TXT_json(txt_update_record_set_fqdn, text="more-multi-text"),
# Add + DeleteRecord (full delete)
get_change_A_AAAA_json(a_update_record_full_fqdn, address="1.1.1.1", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(a_update_record_full_fqdn, address="1.1.1.2", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(a_update_record_full_fqdn, address="1.2.3.4"),
get_change_A_AAAA_json(a_update_record_full_fqdn, address="4.5.6.7"),
get_change_TXT_json(txt_update_record_full_fqdn, text="hello", change_type="DeleteRecordSet"),
get_change_TXT_json(txt_update_record_full_fqdn, text="again", change_type="DeleteRecordSet"),
get_change_TXT_json(txt_update_record_full_fqdn, text="some-multi-text"),
get_change_TXT_json(txt_update_record_full_fqdn, text="more-multi-text"),
# Add + single DeleteRecord
get_change_A_AAAA_json(a_update_record_fqdn, address="1.1.1.1", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(a_update_record_fqdn, address="1.2.3.4"),
get_change_A_AAAA_json(a_update_record_fqdn, address="4.5.6.7"),
get_change_TXT_json(txt_update_record_fqdn, text="hello", change_type="DeleteRecordSet"),
get_change_TXT_json(txt_update_record_fqdn, text="some-multi-text"),
get_change_TXT_json(txt_update_record_fqdn, text="more-multi-text"),
# Single DeleteRecord
get_change_A_AAAA_json(a_update_record_only_fqdn, address="1.1.1.1", change_type="DeleteRecordSet"),
get_change_TXT_json(txt_update_record_only_fqdn, text="hello", change_type="DeleteRecordSet"),
## Full deletes
# Delete RRSet
get_change_A_AAAA_json(a_delete_record_set_fqdn, change_type="DeleteRecordSet"),
get_change_TXT_json(txt_delete_record_set_fqdn, change_type="DeleteRecordSet"),
# DeleteRecord (full delete)
get_change_A_AAAA_json(a_delete_record_fqdn, address="1.1.1.1", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(a_delete_record_fqdn, address="1.1.1.2", change_type="DeleteRecordSet"),
get_change_TXT_json(txt_delete_record_fqdn, text="hello", change_type="DeleteRecordSet"),
get_change_TXT_json(txt_delete_record_fqdn, text="again", change_type="DeleteRecordSet"),
get_change_CNAME_json(cname_delete_record_fqdn, cname="caseinsensitive.cname.", change_type="DeleteRecordSet"),
# DeleteRecord + DeleteRRSet
get_change_A_AAAA_json(a_delete_record_and_record_set_fqdn, address="1.1.1.1", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(a_delete_record_and_record_set_fqdn, change_type="DeleteRecordSet"),
get_change_TXT_json(txt_delete_record_and_record_set_fqdn, text="hello", change_type="DeleteRecordSet"),
get_change_TXT_json(txt_delete_record_and_record_set_fqdn, change_type="DeleteRecordSet"),
]
}
to_delete = []
try:
for rs in [a_update_record_set, txt_update_record_set, a_update_record_full, txt_update_record_full, a_update_record, txt_update_record, a_update_record_only, txt_update_record_only,
a_delete_record_set, txt_delete_record_set, a_delete_record, txt_delete_record, cname_delete_record, a_delete_record_and_record_set, txt_delete_record_and_record_set]:
create_rs = client.create_recordset(rs, status=202)
to_delete.append(client.wait_until_recordset_change_status(create_rs, "Complete"))
initial_result = client.create_batch_change(batch_change_input, status=202)
result = client.wait_until_batch_change_completed(initial_result)
assert_that(result["status"], is_("Complete"))
# Check batch change response
assert_change_success(result["changes"], zone=ok_zone, index=0, input_name=a_update_record_set_fqdn, record_name=a_update_record_set_name, record_data=None,
change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=1, input_name=a_update_record_set_fqdn, record_name=a_update_record_set_name, record_data="1.2.3.4")
assert_change_success(result["changes"], zone=ok_zone, index=2, input_name=a_update_record_set_fqdn, record_name=a_update_record_set_name, record_data="4.5.6.7")
assert_change_success(result["changes"], zone=ok_zone, index=3, input_name=txt_update_record_set_fqdn, record_name=txt_update_record_set_name, record_type="TXT",
record_data=None, change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=4, input_name=txt_update_record_set_fqdn, record_name=txt_update_record_set_name, record_type="TXT",
record_data="some-multi-text")
assert_change_success(result["changes"], zone=ok_zone, index=5, input_name=txt_update_record_set_fqdn, record_name=txt_update_record_set_name, record_type="TXT",
record_data="more-multi-text")
assert_change_success(result["changes"], zone=ok_zone, index=6, input_name=a_update_record_full_fqdn, record_name=a_update_record_full_name, record_data="1.1.1.1",
change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=7, input_name=a_update_record_full_fqdn, record_name=a_update_record_full_name, record_data="1.1.1.2",
change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=8, input_name=a_update_record_full_fqdn, record_name=a_update_record_full_name, record_data="1.2.3.4")
assert_change_success(result["changes"], zone=ok_zone, index=9, input_name=a_update_record_full_fqdn, record_name=a_update_record_full_name, record_data="4.5.6.7")
assert_change_success(result["changes"], zone=ok_zone, index=10, input_name=txt_update_record_full_fqdn, record_name=txt_update_record_full_name, record_type="TXT",
record_data="hello", change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=11, input_name=txt_update_record_full_fqdn, record_name=txt_update_record_full_name, record_type="TXT",
record_data="again", change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=12, input_name=txt_update_record_full_fqdn, record_name=txt_update_record_full_name, record_type="TXT",
record_data="some-multi-text")
assert_change_success(result["changes"], zone=ok_zone, index=13, input_name=txt_update_record_full_fqdn, record_name=txt_update_record_full_name, record_type="TXT",
record_data="more-multi-text")
assert_change_success(result["changes"], zone=ok_zone, index=14, input_name=a_update_record_fqdn, record_name=a_update_record_name, record_data="1.1.1.1",
change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=15, input_name=a_update_record_fqdn, record_name=a_update_record_name, record_data="1.2.3.4")
assert_change_success(result["changes"], zone=ok_zone, index=16, input_name=a_update_record_fqdn, record_name=a_update_record_name, record_data="4.5.6.7")
assert_change_success(result["changes"], zone=ok_zone, index=17, input_name=txt_update_record_fqdn, record_name=txt_update_record_name, record_type="TXT", record_data="hello",
change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=18, input_name=txt_update_record_fqdn, record_name=txt_update_record_name, record_type="TXT",
record_data="some-multi-text")
assert_change_success(result["changes"], zone=ok_zone, index=19, input_name=txt_update_record_fqdn, record_name=txt_update_record_name, record_type="TXT",
record_data="more-multi-text")
assert_change_success(result["changes"], zone=ok_zone, index=20, input_name=a_update_record_only_fqdn, record_name=a_update_record_only_name, record_data="1.1.1.1",
change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=21, input_name=txt_update_record_only_fqdn, record_name=txt_update_record_only_name, record_type="TXT",
record_data="hello", change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=22, input_name=a_delete_record_set_fqdn, record_name=a_delete_record_set_name, record_data=None,
change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=23, input_name=txt_delete_record_set_fqdn, record_name=txt_delete_record_set_name, record_type="TXT",
record_data=None, change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=24, input_name=a_delete_record_fqdn, record_name=a_delete_record_name, record_data="1.1.1.1",
change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=25, input_name=a_delete_record_fqdn, record_name=a_delete_record_name, record_data="1.1.1.2",
change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=26, input_name=txt_delete_record_fqdn, record_name=txt_delete_record_name, record_type="TXT", record_data="hello",
change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=27, input_name=txt_delete_record_fqdn, record_name=txt_delete_record_name, record_type="TXT", record_data="again",
change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=28, input_name=cname_delete_record_fqdn, record_name=cname_delete_record_name, record_type="CNAME",
record_data="caseinsensitive.cname.", change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=29, input_name=a_delete_record_and_record_set_fqdn, record_name=a_delete_record_and_record_set_name,
record_data="1.1.1.1", change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=30, input_name=a_delete_record_and_record_set_fqdn, record_name=a_delete_record_and_record_set_name,
record_data=None, change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=31, input_name=txt_delete_record_and_record_set_fqdn, record_name=txt_delete_record_and_record_set_name,
record_type="TXT", record_data="hello", change_type="DeleteRecordSet")
assert_change_success(result["changes"], zone=ok_zone, index=32, input_name=txt_delete_record_and_record_set_fqdn, record_name=txt_delete_record_and_record_set_name,
record_type="TXT", record_data=None, change_type="DeleteRecordSet")
# Perform look up to verify record set data
for rs in to_delete:
rs_name = rs["recordSet"]["name"]
rs_id = rs["recordSet"]["id"]
zone_id = rs["zone"]["id"]
# deletes should not exist
if rs_name in [a_delete_record_set_name, txt_delete_record_set_name, a_delete_record_name,
txt_delete_record_name, cname_delete_record_name, a_delete_record_and_record_set_name, txt_delete_record_and_record_set_name]:
client.get_recordset(zone_id, rs_id, status=404)
else:
result_rs = client.get_recordset(zone_id, rs_id, status=200)
records = result_rs["recordSet"]["records"]
# full deletes with updates
if rs_name in [a_update_record_set_name, a_update_record_full_name]:
assert_that(records, contains_exactly({"address": "1.2.3.4"}, {"address": "4.5.6.7"}))
assert_that(records, is_not(contains_exactly({"address": "1.1.1.1"}, {"address": "1.1.1.2"})))
elif rs_name in [txt_update_record_set_name, txt_update_record_full_name]:
assert_that(records, contains_exactly({"text": "some-multi-text"}, {"text": "more-multi-text"}))
assert_that(records, is_not(contains_exactly({"text": "hello"}, {"text": "again"})))
# single entry delete with adds
elif rs_name == a_update_record_name:
assert_that(records, contains_exactly({"address": "1.1.1.2"}, {"address": "1.2.3.4"}, {"address": "4.5.6.7"}))
assert_that(records, is_not(contains_exactly({"address": "1.1.1.1"})))
elif rs_name == txt_update_record_name:
assert_that(records, contains_exactly({"text": "again"}, {"text": "some-multi-text"}, {"text": "more-multi-text"}))
assert_that(records, is_not(contains_exactly({"text": "hello"})))
elif rs_name == a_update_record_only_name:
assert_that(records, contains_exactly({"address": "1.1.1.2"}))
assert_that(records, is_not(contains_exactly({"address": "1.1.1.1"})))
elif rs_name == txt_update_record_only_name:
assert_that(records, contains_exactly({"text": "again"}))
assert_that(records, is_not(contains_exactly({"text": "hello"})))
finally:
clear_recordset_list(to_delete, client)
def test_create_batch_deletes_succeeds(shared_zone_test_context):
"""
Test creating batch change with DeleteRecordSet with valid record data succeeds
"""
client = shared_zone_test_context.ok_vinyldns_client
ok_zone = shared_zone_test_context.ok_zone
ok_group = shared_zone_test_context.ok_group
ok_zone_name = shared_zone_test_context.ok_zone["name"]
rs_name = generate_record_name()
rs_name_2 = generate_record_name()
multi_rs_name = generate_record_name()
multi_rs_name_2 = generate_record_name()
rs_fqdn = rs_name + f".{ok_zone_name}"
rs_fqdn_2 = rs_name_2 + f".{ok_zone_name}"
multi_rs_fqdn = multi_rs_name + f".{ok_zone_name}"
multi_rs_fqdn_2 = multi_rs_name_2 + f".{ok_zone_name}"
rs_to_create = create_recordset(ok_zone, rs_name, "A", [{"address": "1.2.3.4"}], 200, ok_group["id"])
rs_to_create_2 = create_recordset(ok_zone, rs_name_2, "A", [{"address": "1.2.3.4"}], 200, ok_group["id"])
multi_record_rs_to_create = create_recordset(ok_zone, multi_rs_name, "A", [{"address": "1.2.3.4"}, {"address": "1.1.1.1"}], 200, ok_group["id"])
multi_record_rs_to_create_2 = create_recordset(ok_zone, multi_rs_name_2, "A", [{"address": "1.2.3.4"}, {"address": "1.1.1.1"}], 200, ok_group["id"])
batch_change_input = {
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json(rs_fqdn, address="1.2.3.4", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(rs_fqdn_2, change_type="DeleteRecordSet"),
get_change_A_AAAA_json(multi_rs_fqdn, address="1.2.3.4", change_type="DeleteRecordSet"),
get_change_A_AAAA_json(multi_rs_fqdn_2, change_type="DeleteRecordSet")
]
}
to_delete = []
try:
create_rs = client.create_recordset(rs_to_create, status=202)
create_rs_2 = client.create_recordset(rs_to_create_2, status=202)
create_multi_rs = client.create_recordset(multi_record_rs_to_create, status=202)
create_multi_rs_2 = client.create_recordset(multi_record_rs_to_create_2, status=202)
to_delete.append(client.wait_until_recordset_change_status(create_rs, "Complete"))
to_delete.append(client.wait_until_recordset_change_status(create_rs_2, "Complete"))
to_delete.append(client.wait_until_recordset_change_status(create_multi_rs, "Complete"))
to_delete.append(client.wait_until_recordset_change_status(create_multi_rs_2, "Complete"))
result = client.create_batch_change(batch_change_input, status=202)
client.wait_until_batch_change_completed(result)
client.get_recordset(create_rs["zone"]["id"], create_rs["recordSet"]["id"], status=404)
client.get_recordset(create_rs_2["zone"]["id"], create_rs_2["recordSet"]["id"], status=404)
updated_rs = client.get_recordset(create_multi_rs["zone"]["id"], create_multi_rs["recordSet"]["id"], status=200)["recordSet"]
assert_that(updated_rs["records"], is_([{"address": "1.1.1.1"}]))
client.get_recordset(create_multi_rs_2["zone"]["id"], create_multi_rs_2["recordSet"]["id"], status=404)
finally:
clear_recordset_list(to_delete, client)
@pytest.mark.serial
@pytest.mark.skip_production
def test_create_batch_change_with_multi_record_adds_with_multi_record_support(shared_zone_test_context):
"""
Test new recordsets with multiple records can be added in batch, but existing recordsets cannot be added to
"""
client = shared_zone_test_context.ok_vinyldns_client
ok_zone = shared_zone_test_context.ok_zone
ok_group = shared_zone_test_context.ok_group
ok_zone_name = shared_zone_test_context.ok_zone["name"]
ip4_prefix = shared_zone_test_context.ip4_classless_prefix
to_delete = []
rs_name = generate_record_name()
rs_fqdn = rs_name + f".{ok_zone_name}"
rs_to_create = create_recordset(ok_zone, rs_name, "A", [{"address": "1.2.3.4"}], 200, ok_group["id"])
batch_change_input = {
"comments": "this is optional",
"changes": [
get_change_A_AAAA_json(f"multi.{ok_zone_name}", address="1.2.3.4"),
get_change_A_AAAA_json(f"multi.{ok_zone_name}", address="4.5.6.7"),
get_change_PTR_json(f"{ip4_prefix}.44", ptrdname="multi.test"),
get_change_PTR_json(f"{ip4_prefix}.44", ptrdname="multi2.test"),
get_change_TXT_json(f"multi-txt.{ok_zone_name}", text="some-multi-text"),
get_change_TXT_json(f"multi-txt.{ok_zone_name}", text="more-multi-text"),
get_change_MX_json(f"multi-mx.{ok_zone_name}", preference=0),
get_change_MX_json(f"multi-mx.{ok_zone_name}", preference=1000, exchange="bar.foo."),
get_change_A_AAAA_json(rs_fqdn, address="1.1.1.1")
]
}
try:
create_rs = client.create_recordset(rs_to_create, status=202)
to_delete.append(client.wait_until_recordset_change_status(create_rs, "Complete"))
response = client.create_batch_change(batch_change_input, status=400)
assert_successful_change_in_error_response(response[0], input_name=f"multi.{ok_zone_name}", record_data="1.2.3.4")
assert_successful_change_in_error_response(response[1], input_name=f"multi.{ok_zone_name}", record_data="4.5.6.7")
assert_successful_change_in_error_response(response[2], input_name=f"{ip4_prefix}.44", record_type="PTR", record_data="multi.test.")
assert_successful_change_in_error_response(response[3], input_name=f"{ip4_prefix}.44", record_type="PTR", record_data="multi2.test.")
assert_successful_change_in_error_response(response[4], input_name=f"multi-txt.{ok_zone_name}", record_type="TXT", record_data="some-multi-text")
assert_successful_change_in_error_response(response[5], input_name=f"multi-txt.{ok_zone_name}", record_type="TXT", record_data="more-multi-text")
assert_successful_change_in_error_response(response[6], input_name=f"multi-mx.{ok_zone_name}", record_type="MX", record_data={"preference": 0, "exchange": "foo.bar."})
assert_successful_change_in_error_response(response[7], input_name=f"multi-mx.{ok_zone_name}", record_type="MX", record_data={"preference": 1000, "exchange": "bar.foo."})
assert_failed_change_in_error_response(response[8], input_name=rs_fqdn, record_data="1.1.1.1",
error_messages=["Record \"" + rs_fqdn + "\" Already Exists: cannot add an existing record; to update it, issue a DeleteRecordSet then an Add."])
finally:
clear_recordset_list(to_delete, client)
| 60.476622
| 196
| 0.653464
| 32,449
| 253,518
| 4.71919
| 0.018336
| 0.026748
| 0.034375
| 0.051563
| 0.92912
| 0.901628
| 0.869519
| 0.832583
| 0.795413
| 0.755494
| 0
| 0.021832
| 0.238808
| 253,518
| 4,191
| 197
| 60.491052
| 0.771702
| 0.057231
| 0
| 0.479474
| 0
| 0.02694
| 0.213719
| 0.046547
| 0.000641
| 0
| 0
| 0.000239
| 0.14272
| 1
| 0.024375
| false
| 0
| 0.001283
| 0
| 0.02694
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e910129c125bd598d3282438f15607fe0c3103a9
| 99
|
py
|
Python
|
tests/test_trigger.py
|
seyfahni/desky
|
abaea13a7b89a8f11864155cbe3d3ff37b0950a9
|
[
"MIT"
] | null | null | null |
tests/test_trigger.py
|
seyfahni/desky
|
abaea13a7b89a8f11864155cbe3d3ff37b0950a9
|
[
"MIT"
] | null | null | null |
tests/test_trigger.py
|
seyfahni/desky
|
abaea13a7b89a8f11864155cbe3d3ff37b0950a9
|
[
"MIT"
] | null | null | null |
import unittest
from desktop_buddy import trigger
class TriggerTest(unittest.TestCase):
pass
| 14.142857
| 37
| 0.808081
| 12
| 99
| 6.583333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151515
| 99
| 6
| 38
| 16.5
| 0.940476
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
3a610680ebaeb4fcf844ee7d82ed815978a4f526
| 31
|
py
|
Python
|
www/main/models/__init__.py
|
Rabyss/BeyondTheLines
|
67ca7d336ab9dc4d31b49faf667960e6deece2a5
|
[
"MIT"
] | 39
|
2019-01-24T10:45:23.000Z
|
2022-03-18T09:37:59.000Z
|
www/main/models/__init__.py
|
Rabyss/BeyondTheLines
|
67ca7d336ab9dc4d31b49faf667960e6deece2a5
|
[
"MIT"
] | 260
|
2018-11-27T12:56:33.000Z
|
2022-03-31T16:08:59.000Z
|
www/main/models/__init__.py
|
Rabyss/BeyondTheLines
|
67ca7d336ab9dc4d31b49faf667960e6deece2a5
|
[
"MIT"
] | 13
|
2018-11-30T16:49:05.000Z
|
2022-01-21T17:39:29.000Z
|
from .analysis import Analysis
| 15.5
| 30
| 0.83871
| 4
| 31
| 6.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 1
| 31
| 31
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3ac1831a44385e9f08153ca2fb7077bdffd4e945
| 161
|
py
|
Python
|
src/unicef_security/tasks.py
|
unicef/unicef-security
|
cc51ba52cddb845b8174cf3dc94706f0334453b2
|
[
"Apache-2.0"
] | null | null | null |
src/unicef_security/tasks.py
|
unicef/unicef-security
|
cc51ba52cddb845b8174cf3dc94706f0334453b2
|
[
"Apache-2.0"
] | 10
|
2019-04-24T14:33:49.000Z
|
2020-12-19T01:07:06.000Z
|
src/unicef_security/tasks.py
|
unicef/unicef-security
|
cc51ba52cddb845b8174cf3dc94706f0334453b2
|
[
"Apache-2.0"
] | 1
|
2019-04-11T15:34:18.000Z
|
2019-04-11T15:34:18.000Z
|
from celery.app import default_app
from unicef_security.sync import load_business_area
@default_app.task()
def sync_business_area():
load_business_area()
| 17.888889
| 51
| 0.813665
| 24
| 161
| 5.083333
| 0.541667
| 0.295082
| 0.262295
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118012
| 161
| 8
| 52
| 20.125
| 0.859155
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
3adb57604ab592952c0aaaf39d0d0e17cba4df50
| 108
|
py
|
Python
|
addons/hr_employee_transfer/models/__init__.py
|
gleis44/stellwerk
|
1fc4145eac6bbb76134ef9ebb22f2441a69d093f
|
[
"MIT"
] | null | null | null |
addons/hr_employee_transfer/models/__init__.py
|
gleis44/stellwerk
|
1fc4145eac6bbb76134ef9ebb22f2441a69d093f
|
[
"MIT"
] | null | null | null |
addons/hr_employee_transfer/models/__init__.py
|
gleis44/stellwerk
|
1fc4145eac6bbb76134ef9ebb22f2441a69d093f
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from . import employee_transfer
from . import hr_contract
from . import res_company
| 21.6
| 31
| 0.731481
| 15
| 108
| 5.066667
| 0.733333
| 0.394737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010989
| 0.157407
| 108
| 4
| 32
| 27
| 0.824176
| 0.194444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3ae6646083c48c2b00ebb9cc8c4a6f79c9816866
| 6,143
|
py
|
Python
|
vaccine.py
|
sujitmandal/Covid-19-Vaccine-Available-India
|
94ccb602eaaac6212fa202a7e3de2300ca4b277b
|
[
"MIT"
] | null | null | null |
vaccine.py
|
sujitmandal/Covid-19-Vaccine-Available-India
|
94ccb602eaaac6212fa202a7e3de2300ca4b277b
|
[
"MIT"
] | null | null | null |
vaccine.py
|
sujitmandal/Covid-19-Vaccine-Available-India
|
94ccb602eaaac6212fa202a7e3de2300ca4b277b
|
[
"MIT"
] | null | null | null |
# Author : Sujit Mandal
import os
import argparse
import requests
import pandas as pd #pip install pandas
from httpRespons import data_info
from httpRespons import userAgent
from httpRespons import httpResponseStatusCodes
def bothDose(request, date, output):
if request.status_code == 200:
data = request.json()
data = data.get('sessions')
for i in data:
if 'available_capacity' in i:
del i['available_capacity']
if 'session_id' in i:
del i['session_id']
if 'fee_type' in i:
del i['fee_type']
if 'lat' in i:
del i['lat']
if 'long' in i:
del i['long']
if 'center_id' in i:
del i['center_id']
data.sort(key=lambda x: x['min_age_limit'], reverse=False)
if len(data) != 0:
jaon_data = pd.json_normalize(data)
if output == 'html' or output == 'HTML':
jaon_data.to_html(date + '_both.html', index=False)
if output == 'csv' or output == 'CSV':
jaon_data.to_csv(date + '_both.csv', index=False)
else:
jaon_data = pd.json_normalize(data_info)
if output == 'html' or output == 'HTML':
jaon_data.to_html(date + '_both.html', index=False)
if output == 'csv' or output == 'CSV':
jaon_data.to_csv(date + '_both.csv', index=False)
else:
print('HTTP Response Status : {}'.format(httpResponseStatusCodes.get(request.status_code)))
def doseOne(request, date, output):
if request.status_code == 200:
data = request.json()
data = data.get('sessions')
for i in data:
if 'available_capacity_dose2' in i:
del i['available_capacity_dose2']
if 'available_capacity' in i:
del i['available_capacity']
if 'session_id' in i:
del i['session_id']
if 'fee_type' in i:
del i['fee_type']
if 'lat' in i:
del i['lat']
if 'long' in i:
del i['long']
if 'center_id' in i:
del i['center_id']
data.sort(key=lambda x: x['min_age_limit'], reverse=False)
if len(data) != 0:
jaon_data = pd.json_normalize(data)
if output == 'html' or output == 'HTML':
jaon_data.to_html(date + '_dose_one.html', index=False)
if output == 'csv' or output == 'CSV':
jaon_data.to_csv(date + '_dose_one.csv', index=False)
else:
jaon_data = pd.json_normalize(data_info)
if output == 'html' or output == 'HTML':
jaon_data.to_html(date + '_dose_one.html', index=False)
if output == 'csv' or output == 'CSV':
jaon_data.to_csv(date + '_dose_one.csv', index=False)
else:
print('HTTP Response Status : {}'.format(httpResponseStatusCodes.get(request.status_code)))
def doseTwo(request, date, output):
if request.status_code == 200:
data = request.json()
data = data.get('sessions')
for i in data:
if 'available_capacity_dose1' in i:
del i['available_capacity_dose1']
if 'available_capacity' in i:
del i['available_capacity']
if 'session_id' in i:
del i['session_id']
if 'fee_type' in i:
del i['fee_type']
if 'lat' in i:
del i['lat']
if 'long' in i:
del i['long']
if 'center_id' in i:
del i['center_id']
data.sort(key=lambda x: x['min_age_limit'], reverse=False)
if len(data) != 0:
jaon_data = pd.json_normalize(data)
if output == 'html' or output == 'HTML':
jaon_data.to_html(date + '_dose_two.html', index=False)
if output == 'csv' or output == 'CSV':
jaon_data.to_csv(date + '_dose_two.csv', index=False)
else:
jaon_data = pd.json_normalize(data_info)
if output == 'html' or output == 'HTML':
jaon_data.to_html(date + '_dose_two.html', index=False)
if output == 'csv' or output == 'CSV':
jaon_data.to_csv(date + '_dose_two.csv', index=False)
else:
print('HTTP Response Status : {}'.format(httpResponseStatusCodes.get(request.status_code)))
def main():
my_parser = argparse.ArgumentParser(description='Covid-19 Vaccine Information')
my_parser.add_argument("-dose", "-dose", required=True, help="Vaccine Dose")
my_parser.add_argument("-d", "-d", required=True, help="Date")
my_parser.add_argument("-p", "-p", required=True, help="PinCode")
my_parser.add_argument("-o", "-o", required=False, help="File Extention")
args = vars(my_parser.parse_args())
dose = args['dose']
date = args['d']
pincode = args['p']
output = args["o"]
api_url = 'https://cdn-api.co-vin.in/api/v2/appointment/sessions/public/findByPin?pincode={}&date={}'.format(pincode, date)
headers = {'user-agent' : userAgent}
API = requests.get(api_url, headers=headers)
if dose == 'both' or dose == 'BOTH':
bothDose(API, date, output)
print('\n')
print('{}'.format(date + '_both.' + output) + ' file is created.')
print('Directory : {}'.format(os.getcwd()))
if dose == 'one' or dose == 'ONE':
doseOne(API, date, output)
print('\n')
print('{}'.format(date + '_dose_one.' + output) + ' file is created.')
print('Directory : {}'.format(os.getcwd()))
if dose == 'two' or dose == 'TWO':
doseTwo(API, date, output)
print('\n')
print('{}'.format(date + '_dose_two.' + output) + ' file is created.')
print('Directory : {}'.format(os.getcwd()))
if __name__ == "__main__":
main()
| 29.676329
| 127
| 0.533778
| 754
| 6,143
| 4.176393
| 0.153846
| 0.019054
| 0.038107
| 0.044459
| 0.745951
| 0.745951
| 0.730708
| 0.730708
| 0.719911
| 0.695776
| 0
| 0.004618
| 0.330295
| 6,143
| 206
| 128
| 29.820388
| 0.760817
| 0.006349
| 0
| 0.717391
| 0
| 0.007246
| 0.184888
| 0.015735
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028986
| false
| 0
| 0.050725
| 0
| 0.07971
| 0.086957
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
aaed70d6a49fd3d2d784b37c5da74c50612427bd
| 102
|
py
|
Python
|
tests/data_utils.py
|
xoeye/typecats
|
0b97262bdef0e36232b7849187586c9bb7e82198
|
[
"MIT"
] | 14
|
2019-08-15T02:19:28.000Z
|
2021-12-28T19:44:56.000Z
|
tests/data_utils.py
|
xoeye/typecats
|
0b97262bdef0e36232b7849187586c9bb7e82198
|
[
"MIT"
] | 4
|
2019-08-22T21:11:37.000Z
|
2022-02-17T15:42:12.000Z
|
tests/data_utils.py
|
xoeye/typecats
|
0b97262bdef0e36232b7849187586c9bb7e82198
|
[
"MIT"
] | 2
|
2019-12-31T08:53:23.000Z
|
2021-09-03T20:26:21.000Z
|
import os
def ld(name: str):
return os.path.dirname(__file__) + os.sep + "data" + os.sep + name
| 17
| 70
| 0.637255
| 17
| 102
| 3.588235
| 0.705882
| 0.163934
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.205882
| 102
| 5
| 71
| 20.4
| 0.753086
| 0
| 0
| 0
| 0
| 0
| 0.039216
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
c93601418c7751f3a8913d9c070ec2228be0044a
| 28,433
|
py
|
Python
|
test_network.py
|
HaoZhang1018/STDFusionNet
|
e4d6dbb2d995032ed93880e90262834e1edaa65f
|
[
"MIT"
] | 12
|
2021-03-27T05:47:40.000Z
|
2022-03-06T14:38:20.000Z
|
test_network.py
|
Melon-Xu/STDFusionNet-1
|
8bc8ba4f94d8caded932655c6dbd0ba9e4f590d5
|
[
"MIT"
] | 1
|
2021-05-21T07:01:35.000Z
|
2021-05-25T14:44:55.000Z
|
test_network.py
|
Melon-Xu/STDFusionNet-1
|
8bc8ba4f94d8caded932655c6dbd0ba9e4f590d5
|
[
"MIT"
] | 7
|
2021-04-01T07:25:44.000Z
|
2022-03-28T13:28:50.000Z
|
import tensorflow as tf
from utils import weights_spectral_norm
class STDFusionNet():
def feature_padding(self, x, kernel=3, stride=1, pad=1):
if (kernel - stride) % 2 == 0:
pad_top = pad
pad_bottom = pad
pad_left = pad
pad_right = pad
else:
pad_top = pad
pad_bottom = kernel - stride - pad_top
pad_left = pad
pad_right = kernel - stride - pad_left
x = tf.pad(x, [[0, 0], [pad_top, pad_bottom], [pad_left, pad_right], [0, 0]])
return x
def vi_feature_extraction_network(self, vi_image, reader):
with tf.compat.v1.variable_scope('vi_extraction_network'):
with tf.compat.v1.variable_scope('conv1'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/vi_extraction_network/conv1/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/vi_extraction_network/conv1/b')))
input = self.feature_padding(vi_image, kernel=5, stride=1, pad=2)
conv1 = tf.nn.conv2d(input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
# conv1 = tf.contrib.layers.batch_norm(conv1, decay=0.9, updates_collections=None, epsilon=1e-5, scale=True)
conv1 = tf.nn.leaky_relu(conv1)
block1_input = conv1
print("block1_input shape: ", block1_input.get_shape().as_list())
# state size: 16
with tf.compat.v1.variable_scope('block1'):
with tf.compat.v1.variable_scope('conv1'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/vi_extraction_network/block1/conv1/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/vi_extraction_network/block1/conv1/b')))
conv1 = tf.nn.conv2d(block1_input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
conv1 = tf.nn.leaky_relu(conv1)
with tf.compat.v1.variable_scope('conv2'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/vi_extraction_network/block1/conv2/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/vi_extraction_network/block1/conv2/b')))
input = self.feature_padding(conv1)
conv2 = tf.nn.conv2d(input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
conv2 = tf.nn.leaky_relu(conv2)
with tf.compat.v1.variable_scope('conv3'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/vi_extraction_network/block1/conv3/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/vi_extraction_network/block1/conv3/b')))
conv3 = tf.nn.conv2d(conv2, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
print("conv3 shape: ", conv3.get_shape().as_list())
block1_output = tf.nn.leaky_relu(conv3 + block1_input)
block2_input = block1_output
with tf.compat.v1.variable_scope('block2'):
with tf.compat.v1.variable_scope('conv1'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/vi_extraction_network/block2/conv1/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/vi_extraction_network/block2/conv1/b')))
conv1 = tf.nn.conv2d(block2_input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
conv1 = tf.nn.leaky_relu(conv1)
with tf.compat.v1.variable_scope('conv2'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/vi_extraction_network/block2/conv2/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/vi_extraction_network/block2/conv2/b')))
input = self.feature_padding(conv1)
conv2 = tf.nn.conv2d(input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
conv2 = tf.nn.leaky_relu(conv2)
with tf.compat.v1.variable_scope('conv3'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/vi_extraction_network/block2/conv3/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/vi_extraction_network/block2/conv3/b')))
conv3 = tf.nn.conv2d(conv2, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
with tf.variable_scope('identity_conv'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/vi_extraction_network/block2/identity_conv/w')))
#weights = weights_spectral_norm(weights)
identity_conv = tf.nn.conv2d(block2_input, weights, strides=[1, 1, 1, 1], padding='VALID')
block2_output = tf.nn.leaky_relu(conv3 + identity_conv)
block3_input = block2_output
with tf.compat.v1.variable_scope('block3'):
with tf.compat.v1.variable_scope('conv1'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/vi_extraction_network/block3/conv1/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/vi_extraction_network/block3/conv1/b')))
conv1 = tf.nn.conv2d(block3_input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
conv1 = tf.nn.leaky_relu(conv1)
with tf.compat.v1.variable_scope('conv2'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/vi_extraction_network/block3/conv2/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/vi_extraction_network/block3/conv2/b')))
input = self.feature_padding(conv1)
conv2 = tf.nn.conv2d(input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
conv2 = tf.nn.leaky_relu(conv2)
with tf.compat.v1.variable_scope('conv3'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/vi_extraction_network/block3/conv3/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/vi_extraction_network/block3/conv3/b')))
conv3 = tf.nn.conv2d(conv2, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
with tf.variable_scope('identity_conv'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/vi_extraction_network/block3/identity_conv/w')))
#weights = weights_spectral_norm(weights)
identity_conv = tf.nn.conv2d(block3_input, weights, strides=[1, 1, 1, 1], padding='VALID')
block3_output = tf.nn.leaky_relu(conv3 + identity_conv)
encoding_feature = block3_output
return encoding_feature
def ir_feature_extraction_network(self, ir_image, reader):
with tf.compat.v1.variable_scope('ir_extraction_network'):
with tf.compat.v1.variable_scope('conv1'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/ir_extraction_network/conv1/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/ir_extraction_network/conv1/b')))
input = self.feature_padding(ir_image, kernel=5, stride=1, pad=2)
conv1 = tf.nn.conv2d(input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
# conv1 = tf.contrib.layers.batch_norm(conv1, decay=0.9, updates_collections=None, epsilon=1e-5, scale=True)
conv1 = tf.nn.leaky_relu(conv1)
block1_input = conv1
# state size: 16
with tf.compat.v1.variable_scope('block1'):
with tf.compat.v1.variable_scope('conv1'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/ir_extraction_network/block1/conv1/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/ir_extraction_network/block1/conv1/b')))
conv1 = tf.nn.conv2d(block1_input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
conv1 = tf.nn.leaky_relu(conv1)
with tf.compat.v1.variable_scope('conv2'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/ir_extraction_network/block1/conv2/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/ir_extraction_network/block1/conv2/b')))
input = self.feature_padding(conv1)
conv2 = tf.nn.conv2d(input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
conv2 = tf.nn.leaky_relu(conv2)
with tf.compat.v1.variable_scope('conv3'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/ir_extraction_network/block1/conv3/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/ir_extraction_network/block1/conv3/b')))
conv3 = tf.nn.conv2d(conv2, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
block1_output = tf.nn.leaky_relu(conv3 + block1_input)
block2_input = block1_output
with tf.compat.v1.variable_scope('block2'):
with tf.compat.v1.variable_scope('conv1'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/ir_extraction_network/block2/conv1/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/ir_extraction_network/block2/conv1/b')))
conv1 = tf.nn.conv2d(block2_input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
conv1 = tf.nn.leaky_relu(conv1)
with tf.compat.v1.variable_scope('conv2'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/ir_extraction_network/block2/conv2/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/ir_extraction_network/block2/conv2/b')))
input = self.feature_padding(conv1)
conv2 = tf.nn.conv2d(input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
conv2 = tf.nn.leaky_relu(conv2)
with tf.compat.v1.variable_scope('conv3'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/ir_extraction_network/block2/conv3/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/ir_extraction_network/block2/conv3/b')))
conv3 = tf.nn.conv2d(conv2, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
with tf.variable_scope('identity_conv'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/ir_extraction_network/block2/identity_conv/w')))
#weights = weights_spectral_norm(weights)
identity_conv = tf.nn.conv2d(block2_input, weights, strides=[1, 1, 1, 1], padding='VALID')
block2_output = tf.nn.leaky_relu(conv3 + identity_conv)
block3_input = block2_output
with tf.compat.v1.variable_scope('block3'):
with tf.compat.v1.variable_scope('conv1'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/ir_extraction_network/block3/conv1/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/ir_extraction_network/block3/conv1/b')))
conv1 = tf.nn.conv2d(block3_input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
conv1 = tf.nn.leaky_relu(conv1)
with tf.compat.v1.variable_scope('conv2'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/ir_extraction_network/block3/conv2/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/ir_extraction_network/block3/conv2/b')))
input = self.feature_padding(conv1)
conv2 = tf.nn.conv2d(input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
conv2 = tf.nn.leaky_relu(conv2)
with tf.compat.v1.variable_scope('conv3'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/ir_extraction_network/block3/conv3/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/ir_extraction_network/block3/conv3/b')))
conv3 = tf.nn.conv2d(conv2, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
with tf.variable_scope('identity_conv'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/ir_extraction_network/block3/identity_conv/w')))
#weights = weights_spectral_norm(weights)
identity_conv = tf.nn.conv2d(block3_input, weights, strides=[1, 1, 1, 1], padding='VALID')
block3_output = tf.nn.leaky_relu(conv3 + identity_conv)
encoding_feature = block3_output
return encoding_feature
def feature_reconstruction_network(self, feature, reader):
with tf.compat.v1.variable_scope('reconstruction_network'):
block1_input = feature
with tf.compat.v1.variable_scope('block1'):
with tf.compat.v1.variable_scope('conv1'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/reconstruction_network/block1/conv1/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/reconstruction_network/block1/conv1/b')))
conv1 = tf.nn.conv2d(block1_input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
conv1 = tf.nn.leaky_relu(conv1)
with tf.compat.v1.variable_scope('conv2'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/reconstruction_network/block1/conv2/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/reconstruction_network/block1/conv2/b')))
input = self.feature_padding(conv1)
conv2 = tf.nn.conv2d(input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
conv2 = tf.nn.leaky_relu(conv2)
with tf.compat.v1.variable_scope('conv3'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/reconstruction_network/block1/conv3/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/reconstruction_network/block1/conv3/b')))
conv3 = tf.nn.conv2d(conv2, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
with tf.variable_scope('identity_conv'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/reconstruction_network/block1/identity_conv/w')))
#weights = weights_spectral_norm(weights)
identity_conv = tf.nn.conv2d(block1_input, weights, strides=[1, 1, 1, 1], padding='VALID')
block1_output = tf.nn.elu(conv3 + identity_conv)
block2_input = block1_output
with tf.compat.v1.variable_scope('block2'):
with tf.compat.v1.variable_scope('conv1'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/reconstruction_network/block2/conv1/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/reconstruction_network/block2/conv1/b')))
conv1 = tf.nn.conv2d(block2_input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
conv1 = tf.nn.leaky_relu(conv1)
with tf.compat.v1.variable_scope('conv2'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/reconstruction_network/block2/conv2/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/reconstruction_network/block2/conv2/b')))
input = self.feature_padding(conv1)
conv2 = tf.nn.conv2d(input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
conv2 = tf.nn.leaky_relu(conv2)
with tf.compat.v1.variable_scope('conv3'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/reconstruction_network/block2/conv3/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/reconstruction_network/block2/conv3/b')))
conv3 = tf.nn.conv2d(conv2, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
with tf.variable_scope('identity_conv'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/reconstruction_network/block2/identity_conv/w')))
#weights = weights_spectral_norm(weights)
identity_conv = tf.nn.conv2d(block2_input, weights, strides=[1, 1, 1, 1], padding='VALID')
block2_output = tf.nn.elu(conv3 + identity_conv)
block3_input = block2_output
with tf.compat.v1.variable_scope('block3'):
with tf.compat.v1.variable_scope('conv1'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/reconstruction_network/block3/conv1/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/reconstruction_network/block3/conv1/b')))
conv1 = tf.nn.conv2d(block3_input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
conv1 = tf.nn.leaky_relu(conv1)
with tf.compat.v1.variable_scope('conv2'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/reconstruction_network/block3/conv2/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/reconstruction_network/block3/conv2/b')))
input = self.feature_padding(conv1)
conv2 = tf.nn.conv2d(input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
conv2 = tf.nn.leaky_relu(conv2)
with tf.compat.v1.variable_scope('conv3'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/reconstruction_network/block3/conv3/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/reconstruction_network/block3/conv3/b')))
conv3 = tf.nn.conv2d(conv2, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
with tf.variable_scope('identity_conv'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/reconstruction_network/block3/identity_conv/w')))
#weights = weights_spectral_norm(weights)
identity_conv = tf.nn.conv2d(block3_input, weights, strides=[1, 1, 1, 1], padding='VALID')
block3_output = tf.nn.leaky_relu(conv3 + identity_conv)
block4_input = block3_output
with tf.compat.v1.variable_scope('block4'):
with tf.compat.v1.variable_scope('conv1'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/reconstruction_network/block4/conv1/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/reconstruction_network/block4/conv1/b')))
conv1 = tf.nn.conv2d(block4_input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
conv1 = tf.nn.leaky_relu(conv1)
with tf.compat.v1.variable_scope('conv2'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/reconstruction_network/block4/conv2/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/reconstruction_network/block4/conv2/b')))
input = self.feature_padding(conv1)
conv2 = tf.nn.conv2d(input, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
conv2 = tf.nn.leaky_relu(conv2)
with tf.compat.v1.variable_scope('conv3'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/reconstruction_network/block4/conv3/w')))
#weights = weights_spectral_norm(weights)
bias = tf.compat.v1.get_variable("b", initializer=tf.constant(reader.get_tensor(
'STMFusion_model/reconstruction_network/block4/conv3/b')))
conv3 = tf.nn.conv2d(conv2, weights, strides=[1, 1, 1, 1], padding='VALID') + bias
with tf.variable_scope('identity_conv'):
weights = tf.compat.v1.get_variable("w", initializer=tf.constant(
reader.get_tensor('STMFusion_model/reconstruction_network/block4/identity_conv/w')))
#weights = weights_spectral_norm(weights)
identity_conv = tf.nn.conv2d(block4_input, weights, strides=[1, 1, 1, 1], padding='VALID')
block4_output = tf.nn.tanh(conv3 + identity_conv)
fusion_image = block4_output
return fusion_image
def STDFusion_model(self, vi_image, ir_image, reader):
with tf.variable_scope("STDFusion_model"):
vi_encoding_feature = self.vi_feature_extraction_network(vi_image, reader)
ir_encoding_feature = self.ir_feature_extraction_network(ir_image, reader)
feature = tf.concat([vi_encoding_feature, ir_encoding_feature], axis=-1)
f_image = self.feature_reconstruction_network(feature, reader)
return f_image, feature
| 73.280928
| 125
| 0.58302
| 3,207
| 28,433
| 4.96227
| 0.029623
| 0.015081
| 0.07352
| 0.058816
| 0.946085
| 0.940241
| 0.939864
| 0.933643
| 0.925851
| 0.925851
| 0
| 0.034509
| 0.301868
| 28,433
| 387
| 126
| 73.470284
| 0.767204
| 0.064854
| 0
| 0.655385
| 0
| 0
| 0.172643
| 0.148024
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015385
| false
| 0
| 0.006154
| 0
| 0.04
| 0.006154
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c939b24b1498978a3a7bb9f2c23dabb0c08e92fa
| 923
|
py
|
Python
|
Pyllurium/SubAtomic.py
|
kajchang/Pyllerium
|
8e42218bcdbc19b9868e3a971826c64119f815eb
|
[
"MIT"
] | null | null | null |
Pyllurium/SubAtomic.py
|
kajchang/Pyllerium
|
8e42218bcdbc19b9868e3a971826c64119f815eb
|
[
"MIT"
] | null | null | null |
Pyllurium/SubAtomic.py
|
kajchang/Pyllerium
|
8e42218bcdbc19b9868e3a971826c64119f815eb
|
[
"MIT"
] | null | null | null |
from Pyllurium.Particle import Particle
class SubAtomic(Particle):
def __init__(self, parent=None):
self.parent = parent
class Electron(SubAtomic):
def __init__(self, parent=None):
super().__init__(parent)
@property
def symbol(self):
return 'e'
@property
def mass(self):
return 1 / 1840
@property
def charge(self):
return -1
class Proton(SubAtomic):
def __init__(self, parent=None):
super().__init__(parent)
@property
def symbol(self):
return 'p'
@property
def mass(self):
return 1
@property
def charge(self):
return 1
class Neutron(SubAtomic):
def __init__(self, parent=None):
super().__init__(parent)
@property
def symbol(self):
return 'n'
@property
def mass(self):
return 1
@property
def charge(self):
return 0
| 15.913793
| 39
| 0.588299
| 104
| 923
| 4.951923
| 0.25
| 0.192233
| 0.106796
| 0.132039
| 0.792233
| 0.751456
| 0.700971
| 0.625243
| 0.625243
| 0.625243
| 0
| 0.015699
| 0.309859
| 923
| 57
| 40
| 16.192982
| 0.792779
| 0
| 0
| 0.7
| 0
| 0
| 0.00325
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.325
| false
| 0
| 0.025
| 0.225
| 0.675
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
c95f9b3a99426d8124d1f4fdade0417241063d32
| 146
|
py
|
Python
|
exercicios-Python/desaf112/pythonteste.py
|
marcelo-py/Exercicios-Python
|
d654d54821983897dbc377a2d3db97671dd75b5b
|
[
"MIT"
] | null | null | null |
exercicios-Python/desaf112/pythonteste.py
|
marcelo-py/Exercicios-Python
|
d654d54821983897dbc377a2d3db97671dd75b5b
|
[
"MIT"
] | null | null | null |
exercicios-Python/desaf112/pythonteste.py
|
marcelo-py/Exercicios-Python
|
d654d54821983897dbc377a2d3db97671dd75b5b
|
[
"MIT"
] | null | null | null |
from desaf112.utilidadescev import moeda
from desaf112.utilidadescev import dado
p = dado.dinheiro('Digite um preço: R$')
moeda.resumo(p, 20, 12)
| 29.2
| 40
| 0.780822
| 22
| 146
| 5.181818
| 0.681818
| 0.210526
| 0.438596
| 0.54386
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077519
| 0.116438
| 146
| 4
| 41
| 36.5
| 0.806202
| 0
| 0
| 0
| 0
| 0
| 0.130137
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
a3954fdd4f981c73fb84a56f5bcae6f8736b1bbf
| 148
|
py
|
Python
|
website/user_list.py
|
megaraph/johnian-network
|
1289323d17444d56efa3c92cbe49f9b2df010e9f
|
[
"MIT"
] | 1
|
2021-12-15T09:39:57.000Z
|
2021-12-15T09:39:57.000Z
|
website/user_list.py
|
megaraph/johnian-network
|
1289323d17444d56efa3c92cbe49f9b2df010e9f
|
[
"MIT"
] | null | null | null |
website/user_list.py
|
megaraph/johnian-network
|
1289323d17444d56efa3c92cbe49f9b2df010e9f
|
[
"MIT"
] | null | null | null |
''' Contains dictionaries of user associations '''
users = {
‘<email>’:<is_teacher>,
‘<email>’:<is_teacher>,
‘<email>’:<is_teacher>,
}
| 18.5
| 50
| 0.594595
| 15
| 148
| 5.666667
| 0.6
| 0.247059
| 0.494118
| 0.447059
| 0.494118
| 0.494118
| 0
| 0
| 0
| 0
| 0
| 0
| 0.182432
| 148
| 7
| 51
| 21.142857
| 0.702479
| 0
| 0
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a399fb48b4c6a41d21085eb5f12a7fc8765bd98e
| 114
|
py
|
Python
|
discoin/__init__.py
|
Discoin/discoin.py
|
4a3459dfaab6695fe88d05290465a1b7842b3606
|
[
"MIT"
] | 2
|
2020-07-26T11:29:47.000Z
|
2021-09-08T22:38:35.000Z
|
discoin/__init__.py
|
Discoin/discoin.py
|
4a3459dfaab6695fe88d05290465a1b7842b3606
|
[
"MIT"
] | 8
|
2020-02-11T14:23:38.000Z
|
2021-04-16T21:38:15.000Z
|
discoin/__init__.py
|
Discoin/discoin.py
|
4a3459dfaab6695fe88d05290465a1b7842b3606
|
[
"MIT"
] | null | null | null |
from .client import Client
from .classes import *
from .errors import *
from .config import VERSION as __version__
| 28.5
| 42
| 0.798246
| 16
| 114
| 5.4375
| 0.5
| 0.229885
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149123
| 114
| 4
| 42
| 28.5
| 0.896907
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6e84f7267a76420227c7d7b9042b7278b234726d
| 33
|
py
|
Python
|
chrome_webstore_download/__init__.py
|
jaymoulin/docker-google-chrome-webstore-download
|
343807e839934df20df0135249ba22cfb7575a9c
|
[
"MIT"
] | 3
|
2017-11-15T23:40:14.000Z
|
2019-10-22T01:46:43.000Z
|
chrome_webstore_download/__init__.py
|
jaymoulin/docker-google-chrome-webstore-download
|
343807e839934df20df0135249ba22cfb7575a9c
|
[
"MIT"
] | 1
|
2017-11-19T10:20:17.000Z
|
2017-11-22T08:00:58.000Z
|
chrome_webstore_download/__init__.py
|
jaymoulin/docker-google-chrome-webstore-download
|
343807e839934df20df0135249ba22cfb7575a9c
|
[
"MIT"
] | 1
|
2020-07-16T17:32:25.000Z
|
2020-07-16T17:32:25.000Z
|
"""
"""
from .download import *
| 6.6
| 23
| 0.545455
| 3
| 33
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.212121
| 33
| 4
| 24
| 8.25
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6e91b62fc6f394ffd4ad5be3b618ba384d6577e1
| 59
|
py
|
Python
|
test/test_import.py
|
GassiusODude/py-boring-stuff
|
501e134bdf2b84b29dd775bdf83f5c7f89c1c28d
|
[
"MIT"
] | null | null | null |
test/test_import.py
|
GassiusODude/py-boring-stuff
|
501e134bdf2b84b29dd775bdf83f5c7f89c1c28d
|
[
"MIT"
] | 10
|
2020-06-28T21:23:27.000Z
|
2021-08-13T11:52:14.000Z
|
test/test_import.py
|
GassiusODude/py-boring-stuff
|
501e134bdf2b84b29dd775bdf83f5c7f89c1c28d
|
[
"MIT"
] | null | null | null |
import boring_stuff
def test_imports():
import inspect
| 14.75
| 19
| 0.779661
| 8
| 59
| 5.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169492
| 59
| 4
| 20
| 14.75
| 0.897959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 1
| 0
| 1.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6ec6b07fdeac970f7d4064790c9f7ed029192247
| 48
|
py
|
Python
|
store_locator/forms.py
|
waustin/django-store-locator
|
2cbc31af068494401e93360a2560a7162031bfae
|
[
"MIT"
] | 1
|
2021-12-18T12:45:58.000Z
|
2021-12-18T12:45:58.000Z
|
store_locator/forms.py
|
waustin/django-store-locator
|
2cbc31af068494401e93360a2560a7162031bfae
|
[
"MIT"
] | null | null | null |
store_locator/forms.py
|
waustin/django-store-locator
|
2cbc31af068494401e93360a2560a7162031bfae
|
[
"MIT"
] | null | null | null |
from django import forms
# Add your forms here
| 12
| 24
| 0.770833
| 8
| 48
| 4.625
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208333
| 48
| 3
| 25
| 16
| 0.973684
| 0.395833
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.