hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
08c1ce03203727ebf7c8655e35027b9c40964b06
| 11,404
|
py
|
Python
|
opentsp/reducers.py
|
james-langbein/OpenTSP
|
20b8cef4dd0800ad032842d2caa6c6bddafa79e3
|
[
"MIT"
] | null | null | null |
opentsp/reducers.py
|
james-langbein/OpenTSP
|
20b8cef4dd0800ad032842d2caa6c6bddafa79e3
|
[
"MIT"
] | null | null | null |
opentsp/reducers.py
|
james-langbein/OpenTSP
|
20b8cef4dd0800ad032842d2caa6c6bddafa79e3
|
[
"MIT"
] | null | null | null |
from opentsp import helpers
def diamond_prune(instance):
# def list_prune(ls):
# var = True # may not be needed
# while var is True: # may not be needed
# bad_count = 0 # may not be needed
# semi_pruned_ls = [i for i in ls if i.fitness == 'good'] # may not be needed
# last_good_index = 0
# for index, edge in enumerate(semi_pruned_ls):
# if index == 0 or index == len(semi_pruned_ls) - 1:
# pass
# elif edge.length_ > semi_pruned_ls[last_good_index].length_ \
# or edge.length_ > semi_pruned_ls[index + 1].length_:
# edge.fitness = 'bad'
# # for i in instance.edges.values():
# # if i == edge:
# # i.fitness = 'bad'
# bad_count += 1 # may not be needed
# else:
# last_good_index = index
# if bad_count == 0: # may not be needed
# var = False # may not be needed
# pruned_ls = semi_pruned_ls # may not be needed
# return pruned_ls
#
# def list_prune_v2(ls, n): # changed 'bad' count check to a while loop less than three check, added another test
# loop = 1 # may not be needed
# while loop < n + 1: # may not be needed
# semi_pruned_ls = [i for i in ls if i.fitness == 'good'] # may not be needed
# if len(semi_pruned_ls) <= 2:
# break
# last_good_index = 0
# for index, edge in enumerate(semi_pruned_ls):
# if index == 0 or index == len(semi_pruned_ls) - 1:
# pass
# elif edge.length_ > semi_pruned_ls[last_good_index].length_ or \
# edge.length_ > semi_pruned_ls[index + 1].length_:
# edge.fitness = 'bad'
# else:
# last_good_index = index
# loop += 1
# if len(semi_pruned_ls) == 3:
# if semi_pruned_ls[1].length_ < semi_pruned_ls[0].length_ and \
# semi_pruned_ls[1].length_ < semi_pruned_ls[2].length_:
# if abs(helpers.angle(semi_pruned_ls[1].node_one, semi_pruned_ls[1].node_two,
# semi_pruned_ls[0].node_two)) < \
# abs(helpers.angle(semi_pruned_ls[1].node_one, semi_pruned_ls[1].node_two,
# semi_pruned_ls[2].node_two)):
# del semi_pruned_ls[0]
# else:
# del semi_pruned_ls[2]
# # add an else here in version 3 for when the middle edge is the middle length
# pruned_ls = semi_pruned_ls # may not be needed
# return pruned_ls
#
# def list_prune_v3(ls):
# loop = 1
# while loop < 3:
# # sp_ls stands for semi-pruned-ls
# sp_ls = [i for i in ls if i.fitness == 'good']
# if len(sp_ls) <= 3:
# break
# last_good_index = 0
# for index, edge in enumerate(sp_ls):
# if index == 0 or index == len(sp_ls) - 1:
# pass
# elif edge.length_ > sp_ls[last_good_index].length_ or edge.length_ > sp_ls[index + 1].length_:
# edge.fitness = 'bad'
# # for e in inst.edges.values():
# # if e == edge:
# # e.fitness = 'bad'
# else:
# last_good_index = index
# loop += 1
# if len(sp_ls) == 3:
# ang_to_0 = helpers.angle(sp_ls[1].node_one, sp_ls[1].node_two, sp_ls[0].node_two)
# ang_to_2 = helpers.angle(sp_ls[1].node_one, sp_ls[1].node_two, sp_ls[2].node_two)
# # if middle edge of remaining 3 is the shortest:
# if sp_ls[1].length_ < sp_ls[0].length_ and sp_ls[1].length_ < sp_ls[2].length_:
# # if angle to edge_0 in sp_ls is less then angle to edge_2 in sp_ls:
# if abs(ang_to_0) < abs(ang_to_2):
# del sp_ls[0]
# else:
# del sp_ls[2]
# elif sp_ls[1].length_ > sp_ls[0].length_ or sp_ls[1].length_ > sp_ls[2].length_:
# if abs(ang_to_0) < abs(ang_to_2) and sp_ls[0].length_ < sp_ls[2].length_:
# del sp_ls[1]
# elif abs(ang_to_0) < abs(ang_to_2) and sp_ls[0].length_ > sp_ls[2].length_:
# del sp_ls[0]
# elif abs(ang_to_0) > abs(ang_to_2) and sp_ls[0].length_ > sp_ls[2].length_:
# del sp_ls[1]
# elif abs(ang_to_0) > abs(ang_to_2) and sp_ls[0].length_ < sp_ls[2].length_:
# del sp_ls[2]
# else:
# print('The middle was the shortest but no edge was removed.')
# if len(sp_ls) > 3:
# print(f'The semi-pruned list had more three edges remaining on this iteration.')
# pruned_ls = sp_ls # may not be needed
# return pruned_ls
#
# def list_prune_v4(ls):
# while True:
# # sp_ls stands for semi-pruned-ls
# sp_ls = [i for i in ls if i.fitness == 'good']
# if len(sp_ls) <= 3:
# break
# last_good_index = 0
# for index, edge in enumerate(sp_ls):
# if index == 0 or index == len(sp_ls) - 1:
# pass
# elif edge.length_ > sp_ls[last_good_index].length_ or edge.length_ > sp_ls[index + 1].length_:
# edge.fitness = 'bad'
# # for e in inst.edges.values():
# # if e == edge:
# # e.fitness = 'bad'
# else:
# last_good_index = index
# if len(sp_ls) == 3:
# ang_to_0 = helpers.angle(sp_ls[1].node_one, sp_ls[1].node_two, sp_ls[0].node_two)
# ang_to_2 = helpers.angle(sp_ls[1].node_one, sp_ls[1].node_two, sp_ls[2].node_two)
# # if middle edge is the shortest:
# if sp_ls[1].length_ < sp_ls[0].length_ and sp_ls[1].length_ < sp_ls[2].length_:
# # if angle to edge_0 in sp_ls is less then angle to edge_2 in sp_ls:
# if abs(ang_to_0) < abs(ang_to_2):
# del sp_ls[0]
# else:
# del sp_ls[2]
# # else if middle edge is the middle length:
# elif sp_ls[1].length_ > sp_ls[0].length_ or sp_ls[1].length_ > sp_ls[2].length_:
# # comment what these ifs represent
# if abs(ang_to_0) < abs(ang_to_2) and sp_ls[0].length_ < sp_ls[2].length_:
# del sp_ls[1]
# elif abs(ang_to_0) < abs(ang_to_2) and sp_ls[0].length_ > sp_ls[2].length_:
# del sp_ls[0]
# elif abs(ang_to_0) > abs(ang_to_2) and sp_ls[0].length_ > sp_ls[2].length_:
# del sp_ls[1]
# elif abs(ang_to_0) > abs(ang_to_2) and sp_ls[0].length_ < sp_ls[2].length_:
# del sp_ls[2]
# else:
# print('The middle was the shortest but no edge was removed.')
# if len(sp_ls) > 3:
# print('The semi-pruned list had more three edges remaining on this iteration.')
# elif len(sp_ls) == 2:
# print('The semi-pruned list had two edges remaining on this iteration.')
# pruned_ls = sp_ls # may not be needed
# return pruned_ls
def list_prune_v5(ls):
while True:
# sp_ls stands for semi-pruned-ls
sp_ls = [i for i in ls if i.fitness == 'good']
if len(sp_ls) <= 4:
break
last_good_index = 0
for index, edge in enumerate(sp_ls):
if index == 0 or index == len(sp_ls) - 1:
pass
# if the edge is the middle in length:
elif edge.length_ > sp_ls[last_good_index].length_ or edge.length_ > sp_ls[index + 1].length_:
# print('Calculating angles.')
# if max(abs(sp_ls[last_good_index].angle), abs(sp_ls[index + 1].length)) - \
# min(abs(sp_ls[last_good_index].angle), abs(sp_ls[index + 1].length)) > 120:
edge.fitness = 'bad'
else:
last_good_index = index
if len(sp_ls) == 3:
ang_to_0 = helpers.angle(sp_ls[1].node_one, sp_ls[1].node_two, sp_ls[0].node_two)
ang_to_2 = helpers.angle(sp_ls[1].node_one, sp_ls[1].node_two, sp_ls[2].node_two)
# if middle edge is the shortest:
if sp_ls[1].length_ < sp_ls[0].length_ and sp_ls[1].length_ < sp_ls[2].length_:
# if angle to edge_0 in sp_ls is less then angle to edge_2 in sp_ls:
if abs(ang_to_0) < abs(ang_to_2):
del sp_ls[0]
else:
del sp_ls[2]
# else if middle edge is the middle length:
elif sp_ls[1].length_ > sp_ls[0].length_ or sp_ls[1].length_ > sp_ls[2].length_:
# comment what these ifs represent
if abs(ang_to_0) < abs(ang_to_2) and sp_ls[0].length_ < sp_ls[2].length_:
del sp_ls[1]
elif abs(ang_to_0) < abs(ang_to_2) and sp_ls[0].length_ > sp_ls[2].length_:
del sp_ls[0]
elif abs(ang_to_0) > abs(ang_to_2) and sp_ls[0].length_ > sp_ls[2].length_:
del sp_ls[1]
elif abs(ang_to_0) > abs(ang_to_2) and sp_ls[0].length_ < sp_ls[2].length_:
del sp_ls[2]
else:
print('The middle was the shortest but no edge was removed.')
if len(sp_ls) > 3:
print('The semi-pruned list had more three edges remaining on this iteration.')
elif len(sp_ls) == 2:
print('The semi-pruned list had two edges remaining on this iteration.')
pruned_ls = sp_ls # may not be needed
return pruned_ls
# populate edge angles
# eap_start = time.time()
# edg_count = 0
for edge in instance.edges.values():
# edg_count += 1
# print(f'Populating edge angle: {edg_count}')
edge.angle = helpers.angle(edge.node_one, instance.average_node, edge.node_two)
# eap_end = time.time()
# for each node, list the edges for which it is the origin, and which have good fitness
good_edges = []
# node_count = 0
# pn_start = time.time()
for node in instance.nodes.values():
# node_count += 1
# print(node_count)
ls = [i for i in instance.edges.values() if i.node_one == node]
ls.sort(key=lambda x: x.angle)
# n_start = time.time()
good = list_prune_v5(ls) # prune the list of edges
# n_end = time.time()
# print(f'Time taken for this node: {n_end - n_start}')
# print(instance.edges)
good_edges.extend(good) # this may not be needed...
# pn_end = time.time()
# print(f'Populating edge angles took: {eap_end - eap_start}')
# print(f'Processing the nodes took: {pn_end - pn_start}')
| 50.460177
| 118
| 0.514293
| 1,651
| 11,404
| 3.288916
| 0.079346
| 0.092818
| 0.030387
| 0.036464
| 0.819153
| 0.79116
| 0.781768
| 0.775875
| 0.753407
| 0.753407
| 0
| 0.02768
| 0.379078
| 11,404
| 225
| 119
| 50.684444
| 0.739161
| 0.683795
| 0
| 0.1875
| 0
| 0
| 0.056504
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0.020833
| 0.020833
| 0
| 0.083333
| 0.0625
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3e9b47c6499dae36c99eebabc156e1b8d2ff70e5
| 132
|
py
|
Python
|
raiden_contracts/tests/utils/__init__.py
|
karlb/raiden-contracts
|
944eb6aa4cc0189caab5b735b46bb6fb72ad5658
|
[
"MIT"
] | 49
|
2018-03-18T07:25:46.000Z
|
2022-03-11T14:07:18.000Z
|
raiden_contracts/tests/utils/__init__.py
|
karlb/raiden-contracts
|
944eb6aa4cc0189caab5b735b46bb6fb72ad5658
|
[
"MIT"
] | 1,378
|
2018-03-13T03:41:06.000Z
|
2022-03-28T23:19:12.000Z
|
raiden_contracts/tests/utils/__init__.py
|
karlb/raiden-contracts
|
944eb6aa4cc0189caab5b735b46bb6fb72ad5658
|
[
"MIT"
] | 55
|
2018-03-21T14:37:27.000Z
|
2022-02-07T10:31:59.000Z
|
# flake8: noqa
from .address import *
from .channel import *
from .constants import *
from .contracts import *
from .mock import *
| 16.5
| 24
| 0.727273
| 17
| 132
| 5.647059
| 0.529412
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009259
| 0.181818
| 132
| 7
| 25
| 18.857143
| 0.87963
| 0.090909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3ec4e9bd554a490aa01de9a036092a04cfc87f0a
| 29
|
py
|
Python
|
setup.py
|
anqurvanillapy/presentaski
|
d375b888879abe464e70fe1fe7c1082a2a5e0785
|
[
"MIT"
] | 24
|
2019-07-20T22:37:09.000Z
|
2021-07-07T07:13:56.000Z
|
setup.py
|
anqurvanillapy/presentaski
|
d375b888879abe464e70fe1fe7c1082a2a5e0785
|
[
"MIT"
] | 3
|
2021-05-10T05:29:59.000Z
|
2022-02-10T00:15:05.000Z
|
setup.py
|
anqurvanillapy/presentaski
|
d375b888879abe464e70fe1fe7c1082a2a5e0785
|
[
"MIT"
] | 8
|
2019-08-09T17:30:20.000Z
|
2021-12-01T13:27:46.000Z
|
from setuptools import setup
| 14.5
| 28
| 0.862069
| 4
| 29
| 6.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 29
| 1
| 29
| 29
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4110b685b7c3efb41ed2c84c1dc208a8344732bb
| 76
|
py
|
Python
|
src/quicknlp/callbacks.py
|
jalajthanaki/quick-nlp
|
861a54c9e30de076a2316cb6712d934de4058cc5
|
[
"MIT"
] | 287
|
2018-04-10T10:58:09.000Z
|
2022-03-22T02:05:40.000Z
|
src/quicknlp/callbacks.py
|
scutcyr/quick-nlp
|
861a54c9e30de076a2316cb6712d934de4058cc5
|
[
"MIT"
] | 1
|
2018-07-03T17:10:03.000Z
|
2018-07-03T17:10:03.000Z
|
src/quicknlp/callbacks.py
|
scutcyr/quick-nlp
|
861a54c9e30de076a2316cb6712d934de4058cc5
|
[
"MIT"
] | 51
|
2018-04-10T11:38:02.000Z
|
2021-10-17T06:23:43.000Z
|
from fastai.sgdr import Callback
class CVAELossCallback(Callback):
pass
| 15.2
| 33
| 0.802632
| 9
| 76
| 6.777778
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144737
| 76
| 5
| 34
| 15.2
| 0.938462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
f5f41b04bdef9371c3382cf0095ebebb32a8196c
| 169
|
py
|
Python
|
airline/flights/admin.py
|
VToropov1337/django_airline
|
295a0f97a65edb0c76a38a5aa903665bf0c01765
|
[
"MIT"
] | null | null | null |
airline/flights/admin.py
|
VToropov1337/django_airline
|
295a0f97a65edb0c76a38a5aa903665bf0c01765
|
[
"MIT"
] | null | null | null |
airline/flights/admin.py
|
VToropov1337/django_airline
|
295a0f97a65edb0c76a38a5aa903665bf0c01765
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Airport, Flight, Passenger
admin.site.register(Airport)
admin.site.register(Flight)
admin.site.register(Passenger)
| 24.142857
| 46
| 0.822485
| 23
| 169
| 6.043478
| 0.478261
| 0.194245
| 0.366906
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08284
| 169
| 6
| 47
| 28.166667
| 0.896774
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.4
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 6
|
eb18f87f2a6ef17d2166fa2c4f51f0de8e68836c
| 143
|
py
|
Python
|
test/com/facebook/buck/cli/testdata/run-command/cmd/echo_var.py
|
Unknoob/buck
|
2dfc734354b326f2f66896dde7746a11965d5a13
|
[
"Apache-2.0"
] | 8,027
|
2015-01-02T05:31:44.000Z
|
2022-03-31T07:08:09.000Z
|
test/com/facebook/buck/cli/testdata/run-command/cmd/echo_var.py
|
Unknoob/buck
|
2dfc734354b326f2f66896dde7746a11965d5a13
|
[
"Apache-2.0"
] | 2,355
|
2015-01-01T15:30:53.000Z
|
2022-03-30T20:21:16.000Z
|
test/com/facebook/buck/cli/testdata/run-command/cmd/echo_var.py
|
Unknoob/buck
|
2dfc734354b326f2f66896dde7746a11965d5a13
|
[
"Apache-2.0"
] | 1,280
|
2015-01-09T03:29:04.000Z
|
2022-03-30T15:14:14.000Z
|
from __future__ import absolute_import, division, print_function, unicode_literals
import os
print("VAR is '{}'".format(os.environ["VAR"]))
| 20.428571
| 82
| 0.762238
| 19
| 143
| 5.368421
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104895
| 143
| 6
| 83
| 23.833333
| 0.796875
| 0
| 0
| 0
| 0
| 0
| 0.097902
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
de8042d584231abcb103312139a99500ecfab225
| 100
|
py
|
Python
|
demo/api/jobs/__init__.py
|
benranderson/demo
|
b27834c79b19b478c917edced8e170122a0f7113
|
[
"MIT"
] | 1
|
2019-11-01T09:43:19.000Z
|
2019-11-01T09:43:19.000Z
|
demo/api/jobs/__init__.py
|
benranderson/demo
|
b27834c79b19b478c917edced8e170122a0f7113
|
[
"MIT"
] | 12
|
2019-09-30T22:35:20.000Z
|
2019-10-12T23:39:01.000Z
|
demo/api/jobs/__init__.py
|
benranderson/demo
|
b27834c79b19b478c917edced8e170122a0f7113
|
[
"MIT"
] | 1
|
2019-11-13T12:19:17.000Z
|
2019-11-13T12:19:17.000Z
|
from flask import Blueprint
jobs_bp = Blueprint("jobs", __name__)
from demo.api.jobs import views
| 16.666667
| 37
| 0.78
| 15
| 100
| 4.866667
| 0.666667
| 0.356164
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.14
| 100
| 5
| 38
| 20
| 0.848837
| 0
| 0
| 0
| 0
| 0
| 0.04
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
dec164630cfbc43375279b1efaac08306afb6ace
| 303
|
py
|
Python
|
src/python/WMComponent/DBS3Buffer/Oracle/UpdateAlgo.py
|
khurtado/WMCore
|
f74e252412e49189a92962945a94f93bec81cd1e
|
[
"Apache-2.0"
] | 21
|
2015-11-19T16:18:45.000Z
|
2021-12-02T18:20:39.000Z
|
src/python/WMComponent/DBS3Buffer/Oracle/UpdateAlgo.py
|
khurtado/WMCore
|
f74e252412e49189a92962945a94f93bec81cd1e
|
[
"Apache-2.0"
] | 5,671
|
2015-01-06T14:38:52.000Z
|
2022-03-31T22:11:14.000Z
|
src/python/WMComponent/DBS3Buffer/Oracle/UpdateAlgo.py
|
khurtado/WMCore
|
f74e252412e49189a92962945a94f93bec81cd1e
|
[
"Apache-2.0"
] | 67
|
2015-01-21T15:55:38.000Z
|
2022-02-03T19:53:13.000Z
|
#!/usr/bin/env python
"""
_DBSBuffer.UpdateAlgo_
Add PSetHash to Algo in DBS Buffer
"""
from WMComponent.DBS3Buffer.MySQL.UpdateAlgo import UpdateAlgo as MySQLUpdateAlgo
class UpdateAlgo(MySQLUpdateAlgo):
"""
_DBSBuffer.UpdateAlgo_
Add PSetHash to Algo in DBS Buffer
"""
pass
| 15.15
| 81
| 0.726073
| 35
| 303
| 6.171429
| 0.628571
| 0.175926
| 0.203704
| 0.277778
| 0.435185
| 0.435185
| 0.435185
| 0.435185
| 0.435185
| 0
| 0
| 0.004082
| 0.191419
| 303
| 19
| 82
| 15.947368
| 0.877551
| 0.455446
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
dee8087bc4133c328f5c4ece67a40882bd7c4250
| 342
|
py
|
Python
|
silversaucer/services/today_service.py
|
prcutler/silversaucer
|
aff67757da934c0fe7a8c71c6b239356d737f701
|
[
"MIT"
] | 2
|
2020-06-27T13:55:19.000Z
|
2021-12-10T17:40:39.000Z
|
silversaucer/services/today_service.py
|
prcutler/silversaucer
|
aff67757da934c0fe7a8c71c6b239356d737f701
|
[
"MIT"
] | 23
|
2019-06-20T13:45:34.000Z
|
2022-03-10T10:23:21.000Z
|
silversaucer/services/today_service.py
|
prcutler/silversaucer
|
aff67757da934c0fe7a8c71c6b239356d737f701
|
[
"MIT"
] | null | null | null |
import requests
import silversaucer.data.config as config
class AlbumInfo:
def album_release():
pass
def album_parent_release():
pass
def first_release():
pass
def album_anniversary():
pass
class ArtistInfo:
def artist_birthday():
pass
def artist_death():
pass
| 13.153846
| 41
| 0.616959
| 37
| 342
| 5.513514
| 0.513514
| 0.137255
| 0.205882
| 0.186275
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.318713
| 342
| 25
| 42
| 13.68
| 0.875536
| 0
| 0
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| true
| 0.375
| 0.125
| 0
| 0.625
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
723949c7a3257f7818be3e37d8ccd1dd402ec79f
| 105,843
|
py
|
Python
|
nfv/nfv-vim/nfv_vim/event_log/_instance.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 2
|
2020-02-07T19:01:36.000Z
|
2022-02-23T01:41:46.000Z
|
nfv/nfv-vim/nfv_vim/event_log/_instance.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 1
|
2021-01-14T12:02:25.000Z
|
2021-01-14T12:02:25.000Z
|
nfv/nfv-vim/nfv_vim/event_log/_instance.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 2
|
2021-01-13T08:39:21.000Z
|
2022-02-09T00:21:55.000Z
|
#
# Copyright (c) 2015-2016 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import six
from nfv_common import event_log
# Log Template Definitions
# *** Don't add a period to the end of reason_text, these are not sentences.
_event_templates = {
event_log.EVENT_ID.INSTANCE_RENAMED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Instance %(instance_name)s has been renamed to "
"%(additional_text)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Instance %(instance_name)s has been renamed "
"to %(additional_text)s owned by %(tenant_name)s "
"on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_ENABLED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Instance %(instance_name)s is enabled",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Instance %(instance_name)s is enabled on host "
"%(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Instance %(instance_name)s has failed",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Instance %(instance_name)s owned by "
"%(tenant_name)s has failed on host "
"%(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_SCHEDULING_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Instance %(instance_name)s has failed to schedule",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Instance %(instance_name)s owned by "
"%(tenant_name)s has failed to schedule%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_CREATE_BEGIN: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Create issued against instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Create issued %(initiated_text)s against instance "
"%(instance_name)s owned by %(tenant_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_CREATING: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Creating instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Creating instance %(instance_name)s owned by "
"%(tenant_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_CREATE_REJECTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Create rejected for instance %(instance_name)s%(reason)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Create rejected for instance %(instance_name)s"
"%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_CREATE_CANCELLED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Create cancelled for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Create cancelled for instance %(instance_name)s"
"%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_CREATE_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Create failed for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Create failed for instance %(instance_name)s"
"%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_CREATED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Instance %(instance_name)s has been created",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Instance %(instance_name)s owned by "
"%(tenant_name)s has been created",
}
}
},
event_log.EVENT_ID.INSTANCE_DELETE_BEGIN: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Delete issued against instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Delete issued %(initiated_text)s against instance "
"%(instance_name)s owned by %(tenant_name)s on "
"host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_DELETING: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Deleting instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Deleting instance %(instance_name)s owned by "
"%(tenant_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_DELETE_REJECTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Delete rejected for instance %(instance_name)s%(reason)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Delete rejected for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_DELETE_CANCELLED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Delete cancelled for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Delete cancelled for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_DELETE_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Delete failed for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Delete failed for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_DELETED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Deleted instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Deleted instance %(instance_name)s owned by "
"%(tenant_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_PAUSE_BEGIN: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Pause issued against instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Pause issued %(initiated_text)s against instance "
"%(instance_name)s owned by %(tenant_name)s on "
"host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_PAUSING: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Pause inprogress for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Pause inprogress for instance %(instance_name)s "
"on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_PAUSE_REJECTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Pause rejected for instance %(instance_name)s%(reason)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Pause rejected for instance %(instance_name)s "
"enabled on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_PAUSE_CANCELLED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Pause cancelled for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Pause cancelled for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_PAUSE_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Pause failed for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Pause failed for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_PAUSED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Pause complete for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Pause complete for instance %(instance_name)s "
"now paused on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_UNPAUSE_BEGIN: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Unpause issued against instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Unpause issued %(initiated_text)s against instance "
"%(instance_name)s owned by %(tenant_name)s on "
"host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_UNPAUSING: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Unpause inprogress for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Unpause inprogress for instance %(instance_name)s "
"on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_UNPAUSE_REJECTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Unpause rejected for instance %(instance_name)s%(reason)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Unpause rejected for instance %(instance_name)s "
"paused on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_UNPAUSE_CANCELLED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Unpause cancelled for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Unpause cancelled for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_UNPAUSE_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Unpause failed for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Unpause failed for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_UNPAUSED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Unpause complete for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Unpause complete for instance %(instance_name)s "
"now enabled on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_SUSPEND_BEGIN: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Suspend issued against instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Suspend issued %(initiated_text)s against instance "
"%(instance_name)s owned by %(tenant_name)s on "
"host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_SUSPENDING: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Suspend inprogress for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Suspend inprogress for instance %(instance_name)s "
"on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_SUSPEND_REJECTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Suspend rejected for instance %(instance_name)s%(reason)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Suspend rejected for instance %(instance_name)s "
"enabled on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_SUSPEND_CANCELLED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Suspend cancelled for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Suspend cancelled for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_SUSPEND_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Suspend failed for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Suspend failed for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_SUSPENDED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Suspend complete for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Suspend complete for instance %(instance_name)s "
"now suspended on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESUME_BEGIN: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resume issued against instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resume issued %(initiated_text)s against instance "
"%(instance_name)s owned by %(tenant_name)s on "
"host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESUMING: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resume inprogress for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resume inprogress for instance %(instance_name)s "
"on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESUME_REJECTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resume rejected for instance %(instance_name)s%(reason)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resume rejected for instance %(instance_name)s "
"suspended on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESUME_CANCELLED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resume cancelled for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resume cancelled for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESUME_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resume failed for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resume failed for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESUMED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resume complete for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resume complete for instance %(instance_name)s "
"now enabled on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_LIVE_MIGRATE_BEGIN: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Live-Migrate issued against instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Live-Migrate issued %(initiated_text)s against "
"instance %(instance_name)s owned by "
"%(tenant_name)s from host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_LIVE_MIGRATING: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Live-Migrate inprogress for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Live-Migrate inprogress for instance "
"%(instance_name)s from host %(from_host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_LIVE_MIGRATE_REJECTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Live-Migrate rejected for instance %(instance_name)s"
"%(reason)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Live-Migrate rejected for instance "
"%(instance_name)s now on host %(host_name)s"
"%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_LIVE_MIGRATE_CANCELLED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Live-Migrate cancelled for instance %(instance_name)s"
"%(reason)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Live-Migrate cancelled for instance "
"%(instance_name)s now on host %(host_name)s"
"%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_LIVE_MIGRATE_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Live-Migrate failed for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Live-Migrate failed for instance "
"%(instance_name)s now on host %(host_name)s"
"%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_LIVE_MIGRATED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Live-Migrate complete for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Live-Migrate complete for instance "
"%(instance_name)s now enabled on host "
"%(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_BEGIN: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Cold-Migrate issued against instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Cold-Migrate issued %(initiated_text)s against "
"instance %(instance_name)s owned by "
"%(tenant_name)s from host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_COLD_MIGRATING: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Cold-Migrate inprogress for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Cold-Migrate inprogress for instance "
"%(instance_name)s from host %(from_host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_REJECTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Cold-Migrate rejected for instance %(instance_name)s"
"%(reason)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Cold-Migrate rejected for instance "
"%(instance_name)s now on host %(host_name)s"
"%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_CANCELLED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Cold-Migrate cancelled for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Cold-Migrate cancelled for instance "
"%(instance_name)s on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Cold-Migrate failed for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Cold-Migrate failed for instance "
"%(instance_name)s now on host %(host_name)s"
"%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_COLD_MIGRATED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Cold-Migrate complete for instance %(instance_name)s "
"%(additional_text)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Cold-Migrate complete for instance "
"%(instance_name)s now enabled on host "
"%(host_name)s %(additional_text)s",
}
}
},
event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_CONFIRM_BEGIN: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Cold-Migrate-Confirm issued against instance "
"%(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Cold-Migrate-Confirm issued %(initiated_text)s "
"against instance %(instance_name)s owned by "
"%(tenant_name)s on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_CONFIRMING: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Cold-Migrate-Confirm inprogress for instance "
"%(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Cold-Migrate-Confirm inprogress for instance "
"%(instance_name)s on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_CONFIRM_REJECTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': ("Cold-Migrate-Confirm rejected for instance "
"%(instance_name)s%(reason)s"),
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Cold-Migrate-Confirm rejected for instance "
"%(instance_name)s now enabled on host "
"%(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_CONFIRM_CANCELLED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Cold-Migrate-Confirm cancelled for instance "
"%(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Cold-Migrate-Confirm cancelled for instance "
"%(instance_name)s on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_CONFIRM_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Cold-Migrate-Confirm failed for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Cold-Migrate-Confirm failed for instance "
"%(instance_name)s on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_CONFIRMED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': ("Cold-Migrate-Confirm complete for instance "
"%(instance_name)s"),
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Cold-Migrate-Confirm complete for instance "
"%(instance_name)s enabled on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_REVERT_BEGIN: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Cold-Migrate-Revert issued against instance "
"%(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Cold-Migrate-Revert issued %(initiated_text)s "
"against instance %(instance_name)s owned by "
"%(tenant_name)s on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_REVERTING: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Cold-Migrate-Revert inprogress for instance "
"%(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Cold-Migrate-Revert inprogress for instance "
"%(instance_name)s from host %(from_host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_REVERT_REJECTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': ("Cold-Migrate-Revert rejected for instance "
"%(instance_name)s, reason = %(additional_text)s"),
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Cold-Migrate-Revert rejected for instance "
"%(instance_name)s now on host %(host_name)s"
"%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_REVERT_CANCELLED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Cold-Migrate-Revert cancelled for instance "
"%(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Cold-Migrate-Revert cancelled for instance "
"%(instance_name)s on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_REVERT_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Cold-Migrate-Revert failed for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Cold-Migrate-Revert failed for instance "
"%(instance_name)s on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_REVERTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Cold-Migrate-Revert complete for instance "
"%(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Cold-Migrate-Revert complete for instance "
"%(instance_name)s now enabled on host "
"%(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESIZE_BEGIN: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resize issued against instance %(instance_name)s to "
"instance-type %(additional_text)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resize issued %(initiated_text)s against instance "
"%(instance_name)s owned by %(tenant_name)s to "
"instance-type %(additional_text)s on host "
"%(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESIZING: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resize inprogress for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resize inprogress for instance "
"%(instance_name)s on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESIZE_REJECTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resize rejected for instance %(instance_name)s%(reason)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resize rejected for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESIZE_CANCELLED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resize cancelled for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resize cancelled for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESIZE_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resize failed for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resize failed for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESIZED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resize complete for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resize complete for instance %(instance_name)s "
"enabled on host %(host_name)s waiting for "
"confirmation",
}
}
},
event_log.EVENT_ID.INSTANCE_RESIZE_CONFIRM_BEGIN: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resize-Confirm issued against instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resize-Confirm issued %(initiated_text)s against "
"instance %(instance_name)s owned by "
"%(tenant_name)s on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESIZE_CONFIRMING: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resize-Confirm inprogress for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resize-Confirm inprogress for instance "
"%(instance_name)s on host %(from_host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESIZE_CONFIRM_REJECTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resize-Confirm rejected for instance %(instance_name)s"
"%(reason)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resize-Confirm rejected for instance "
"%(instance_name)s owned by %(tenant_name)s on "
"host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESIZE_CONFIRM_CANCELLED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resize-Confirm cancelled for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resize-Confirm cancelled for instance "
"%(instance_name)s on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESIZE_CONFIRM_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resize-Confirm failed for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resize-Confirm failed for instance "
"%(instance_name)s on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESIZE_CONFIRMED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resize-Confirm complete for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resize-Confirm complete for instance "
"%(instance_name)s enabled on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESIZE_REVERT_BEGIN: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resize-Revert issued against instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resize-Revert issued %(initiated_text)s against "
"instance %(instance_name)s owned by "
"%(tenant_name)s on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESIZE_REVERTING: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resize-Revert inprogress for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resize-Revert inprogress for instance "
"%(instance_name)s on host %(from_host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESIZE_REVERT_REJECTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resize-Revert rejected for instance %(instance_name)s"
"%(reason)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resize-Revert rejected for instance "
"%(instance_name)s owned by %(tenant_name)s on "
"host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESIZE_REVERT_CANCELLED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resize-Revert cancelled for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resize-Revert cancelled for instance "
"%(instance_name)s on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESIZE_REVERT_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resize-Revert failed for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resize-Revert failed for instance "
"%(instance_name)s on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_RESIZE_REVERTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Resize-Revert complete for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Resize-Revert complete for instance "
"%(instance_name)s enabled on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_EVACUATE_BEGIN: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Evacuate issued against instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Evacuate issued %(initiated_text)s against "
"instance %(instance_name)s owned by "
"%(tenant_name)s on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_EVACUATING: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Evacuating instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Evacuating instance %(instance_name)s owned "
"by %(tenant_name)s from host %(from_host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_EVACUATE_REJECTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Evacuate rejected for instance %(instance_name)s"
"%(reason)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Evacuate rejected for instance %(instance_name)s "
"owned by %(tenant_name)s on host %(host_name)s"
"%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_EVACUATE_CANCELLED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Evacuate cancelled for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Evacuate cancelled for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_EVACUATE_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Evacuate failed for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Evacuate failed for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_EVACUATED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Evacuate complete for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Evacuate complete for instance %(instance_name)s "
"now enabled on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_START_BEGIN: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Start issued against instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Start issued %(initiated_text)s against instance "
"%(instance_name)s owned by %(tenant_name)s on "
"host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_STARTING: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Start inprogress for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Start inprogress for instance %(instance_name)s "
"on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_START_REJECTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Start rejected for instance %(instance_name)s%(reason)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Start rejected for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_START_CANCELLED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Start cancelled for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Start cancelled for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_START_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Start failed for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Start failed for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_STARTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Start complete for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Start complete for instance %(instance_name)s "
"now enabled on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_STOP_BEGIN: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Stop issued against instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Stop issued %(initiated_text)s against instance "
"%(instance_name)s owned by %(tenant_name)s on "
"host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_STOPPING: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Stop inprogress for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Stop inprogress for instance %(instance_name)s "
"on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_STOP_REJECTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Stop rejected for instance %(instance_name)s%(reason)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Stop rejected for instance %(instance_name)s "
"enabled on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_STOP_CANCELLED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Stop cancelled for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Stop cancelled for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_STOP_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Stop failed for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Stop failed for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_STOPPED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Stop complete for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Stop complete for instance %(instance_name)s "
"now disabled on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_REBOOT_BEGIN: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Reboot %(additional_text)s issued against instance "
"%(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Reboot %(additional_text)s issued "
"%(initiated_text)s against instance "
"%(instance_name)s owned by %(tenant_name)s on "
"host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_REBOOTING: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Reboot inprogress for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Reboot inprogress for instance %(instance_name)s "
"on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_REBOOT_REJECTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Reboot rejected for instance %(instance_name)s%(reason)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Reboot rejected for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_REBOOT_CANCELLED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Reboot cancelled for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Reboot cancelled for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_REBOOT_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Reboot failed for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Reboot failed for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_REBOOTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Reboot complete for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Reboot complete for instance %(instance_name)s "
"now enabled on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_REBUILD_BEGIN: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Rebuild issued against instance %(instance_name)s "
"using image %(additional_text)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Rebuild issued %(initiated_text)s against "
"instance %(instance_name)s owned by "
"%(tenant_name)s using image %(additional_text)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_REBUILDING: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Rebuild inprogress for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Rebuild inprogress for instance %(instance_name)s "
"on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_REBUILD_REJECTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Rebuild rejected for instance %(instance_name)s%(reason)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Rebuild rejected for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_REBUILD_CANCELLED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Rebuild cancelled for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Rebuild cancelled for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_REBUILD_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Rebuild failed for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Rebuild failed for instance %(instance_name)s "
"on host %(host_name)s%(reason)s",
}
}
},
event_log.EVENT_ID.INSTANCE_REBUILT: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Rebuild complete for instance %(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Rebuild complete for instance %(instance_name)s "
"now enabled on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_GUEST_HEARTBEAT_ESTABLISHED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.MEDIUM,
'reason_text': "Guest Heartbeat established for instance "
"%(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Guest Heartbeat established for instance "
"%(instance_name)s on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_GUEST_HEARTBEAT_DISCONNECTED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.MEDIUM,
'reason_text': "Guest Heartbeat disconnected for instance "
"%(instance_name)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Guest Heartbeat disconnected for instance "
"%(instance_name)s on host %(host_name)s",
}
}
},
event_log.EVENT_ID.INSTANCE_GUEST_HEARTBEAT_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Guest Heartbeat failed for instance %(instance_name)s"
"%(additional_text)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Guest Heartbeat failed for instance "
"%(instance_name)s on host %(host_name)s"
"%(repair_action)s",
}
}
},
event_log.EVENT_ID.INSTANCE_GUEST_HEALTH_CHECK_FAILED: {
'entity_type': "instance",
'entity': "instance=%(instance_uuid)s",
'event_type': event_log.EVENT_TYPE.ACTION_EVENT,
'importance': event_log.EVENT_IMPORTANCE.HIGH,
'reason_text': "Guest Health Check failed for instance %(instance_name)s"
"%(additional_text)s",
'exclude_event_context': [],
'event_context_data': {
event_log.EVENT_CONTEXT.ADMIN: {
'entity_type': "tenant.instance",
'entity': "tenant=%(tenant_uuid)s.instance=%(instance_uuid)s",
'reason_text': "Guest Health Check failed for instance "
"%(instance_name)s on host %(host_name)s"
"%(repair_action)s",
}
}
},
}
def _event_template_get(event_id, event_context):
"""
Returns the event template associated with the given context
"""
if event_id not in _event_templates:
return None
event_template = _event_templates[event_id]
if event_context in event_template['exclude_event_context']:
return None
template = dict()
template['entity_type'] = event_template['entity_type']
template['entity'] = event_template['entity']
template['event_type'] = event_template['event_type']
template['importance'] = event_template['importance']
template['reason_text'] = event_template['reason_text']
event_template_context_data = event_template.get('event_context_data', None)
if event_template_context_data is not None:
if event_context in event_template_context_data:
template_context = event_template_context_data[event_context]
if 'entity_type' in template_context:
template['entity_type'] = template_context['entity_type']
if 'entity' in template_context:
template['entity'] = template_context['entity']
if 'event_type' in template_context:
template['event_type'] = template_context['event_type']
if 'importance' in template_context:
template['importance'] = template_context['importance']
if 'reason_text' in template_context:
template['reason_text'] = template_context['reason_text']
return template
def _event_issue(event_id, event_context, template, data):
"""
Issue an event given the event template and data
"""
event_data = event_log.EventLogData(event_id,
template['event_type'],
event_context,
template['entity_type'],
template['entity'] % data,
template['reason_text'] % data,
template['importance'])
event_log.event_log(event_data)
return event_data
def instance_issue_log(instance, event_id, additional_text=None,
event_context=None, initiated_by=None, reason=None,
repair_action=None):
"""
Issue an event log for instance
"""
data = dict()
data['tenant_uuid'] = instance.tenant_uuid
data['tenant_name'] = instance.tenant_name
data['instance_uuid'] = instance.uuid
data['instance_name'] = instance.name
data['host_name'] = instance.host_name
data['from_host_name'] = instance.from_host_name
if additional_text is None:
data['additional_text'] = ""
else:
data['additional_text'] = six.text_type(
additional_text).rstrip('. \t\n\r')
if initiated_by is None:
data['initiated_text'] = ''
elif event_log.EVENT_INITIATED_BY.TENANT == initiated_by:
if instance.tenant_uuid == instance.tenant_name:
data['initiated_text'] = "by tenant %s" % instance.tenant_uuid
else:
data['initiated_text'] = "by %s" % instance.tenant_name
elif event_log.EVENT_INITIATED_BY.INSTANCE == initiated_by:
data['initiated_text'] = "by the instance"
elif event_log.EVENT_INITIATED_BY.INSTANCE_DIRECTOR == initiated_by:
data['initiated_text'] = "by the system"
else:
data['initiated_text'] = ""
if reason is None or '' == reason:
data['reason'] = ""
else:
data['reason'] = (", reason = %s"
% six.text_type(reason).rstrip('. \t\n\r'))
if repair_action is None or '' == repair_action:
data['repair_action'] = ""
else:
data['repair_action'] = (", %s" % six.text_type(
repair_action).rstrip('. \t\n\r'))
event_list = list()
# For now, override event context to be the admin only
event_context = event_log.EVENT_CONTEXT.ADMIN
if event_context is None:
for event_context in event_log.EVENT_CONTEXT:
template = _event_template_get(event_id, event_context)
if template is not None:
event_data = _event_issue(event_id, event_context, template, data)
event_list.append(event_data)
else:
template = _event_template_get(event_id, event_context)
if template is not None:
event_data = _event_issue(event_id, event_context, template, data)
event_list.append(event_data)
return event_list
def instance_last_event(instance, event_id):
"""
Returns true if the given event was last generated
"""
if instance.events:
if any(x.event_id == event_id for x in instance.events):
return True
return False
def instance_manage_events(instance, enabling=False):
"""
Generate events associated with the given instance
"""
def last_event(ev_id):
return instance_last_event(instance, ev_id)
# Action (inprogress -> finished) Events
event_id = None
additional_text = ''
reason = None
events = list()
if instance.is_failed() and not instance.is_action_running():
if last_event(event_log.EVENT_ID.INSTANCE_LIVE_MIGRATING):
if instance.from_host_name == instance.host_name:
event_id = event_log.EVENT_ID.INSTANCE_LIVE_MIGRATE_FAILED
elif last_event(event_log.EVENT_ID.INSTANCE_COLD_MIGRATING):
if instance.from_host_name == instance.host_name:
event_id = event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_FAILED
elif last_event(event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_CONFIRM_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_CONFIRM_FAILED
elif last_event(event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_REVERT_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_REVERT_FAILED
elif last_event(event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_REVERTING):
event_id = event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_REVERT_FAILED
elif last_event(event_log.EVENT_ID.INSTANCE_RESIZE_CONFIRM_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_RESIZE_CONFIRM_FAILED
elif last_event(event_log.EVENT_ID.INSTANCE_RESIZE_REVERT_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_RESIZE_REVERT_FAILED
elif last_event(event_log.EVENT_ID.INSTANCE_RESIZE_REVERTING):
event_id = event_log.EVENT_ID.INSTANCE_RESIZE_REVERT_FAILED
elif last_event(event_log.EVENT_ID.INSTANCE_UNPAUSE_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_UNPAUSE_FAILED
elif last_event(event_log.EVENT_ID.INSTANCE_RESUME_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_RESUME_FAILED
elif last_event(event_log.EVENT_ID.INSTANCE_REBOOT_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_REBOOT_FAILED
elif last_event(event_log.EVENT_ID.INSTANCE_REBOOTING):
event_id = event_log.EVENT_ID.INSTANCE_REBOOT_FAILED
elif last_event(event_log.EVENT_ID.INSTANCE_START_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_START_FAILED
elif last_event(event_log.EVENT_ID.INSTANCE_EVACUATE_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_EVACUATE_FAILED
elif last_event(event_log.EVENT_ID.INSTANCE_EVACUATING):
event_id = event_log.EVENT_ID.INSTANCE_EVACUATE_FAILED
elif last_event(event_log.EVENT_ID.INSTANCE_REBUILD_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_REBUILD_FAILED
elif last_event(event_log.EVENT_ID.INSTANCE_REBUILDING):
event_id = event_log.EVENT_ID.INSTANCE_REBUILD_FAILED
elif last_event(event_log.EVENT_ID.INSTANCE_DELETING):
event_id = event_log.EVENT_ID.INSTANCE_DELETE_FAILED
if event_id is not None and not last_event(event_id):
events = instance_issue_log(instance, event_id,
additional_text=additional_text,
reason=reason)
# State Events
event_id = None
additional_text = ''
reason = None
if instance.is_locked() and not instance.was_locked():
event_id = event_log.EVENT_ID.INSTANCE_STOPPED
elif instance.is_failed() and not instance.was_failed():
if instance.host_name is None or '' == instance.host_name:
event_id = event_log.EVENT_ID.INSTANCE_SCHEDULING_FAILED
elif instance.is_action_running():
if last_event(event_log.EVENT_ID.INSTANCE_DELETING):
event_id = event_log.EVENT_ID.INSTANCE_DELETE_FAILED
else:
event_id = event_log.EVENT_ID.INSTANCE_FAILED
else:
event_id = event_log.EVENT_ID.INSTANCE_FAILED
reason = instance.fail_reason
elif instance.is_paused() and not instance.was_paused():
event_id = event_log.EVENT_ID.INSTANCE_PAUSED
elif instance.is_suspended() and not instance.was_suspended():
event_id = event_log.EVENT_ID.INSTANCE_SUSPENDED
if event_id is not None and not last_event(event_id):
events.extend(instance_issue_log(instance, event_id,
additional_text=additional_text,
reason=reason))
# Action Events
event_id = None
additional_text = ''
reason = None
if instance.is_rebooting():
event_id = event_log.EVENT_ID.INSTANCE_REBOOTING
elif instance.is_rebuilding():
if last_event(event_log.EVENT_ID.INSTANCE_EVACUATE_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_EVACUATING
elif last_event(event_log.EVENT_ID.INSTANCE_REBUILD_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_REBUILDING
elif instance.is_migrating():
event_id = event_log.EVENT_ID.INSTANCE_LIVE_MIGRATING
elif instance.is_resizing():
if last_event(event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_COLD_MIGRATING
elif last_event(event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_REVERT_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_REVERTING
elif last_event(event_log.EVENT_ID.INSTANCE_RESIZE_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_RESIZING
elif last_event(event_log.EVENT_ID.INSTANCE_RESIZE_REVERT_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_RESIZE_REVERTING
elif instance.is_resized():
if last_event(event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_COLD_MIGRATING
elif last_event(event_log.EVENT_ID.INSTANCE_COLD_MIGRATING):
if instance.action_data.initiated_from_cli():
if instance.from_host_name != instance.host_name:
event_id = event_log.EVENT_ID.INSTANCE_COLD_MIGRATED
additional_text = "waiting for confirmation"
elif last_event(event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_REVERT_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_REVERTING
elif last_event(event_log.EVENT_ID.INSTANCE_RESIZE_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_RESIZING
elif last_event(event_log.EVENT_ID.INSTANCE_RESIZING):
event_id = event_log.EVENT_ID.INSTANCE_RESIZED
elif instance.is_enabled() and not instance.is_action_running():
if last_event(event_log.EVENT_ID.INSTANCE_LIVE_MIGRATING):
if instance.from_host_name != instance.host_name:
event_id = event_log.EVENT_ID.INSTANCE_LIVE_MIGRATED
elif last_event(event_log.EVENT_ID.INSTANCE_COLD_MIGRATING):
if instance.from_host_name != instance.host_name:
event_id = event_log.EVENT_ID.INSTANCE_COLD_MIGRATED
elif last_event(event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_CONFIRM_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_CONFIRMED
elif last_event(event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_REVERT_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_REVERTED
elif last_event(event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_REVERTING):
event_id = event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_REVERTED
elif last_event(event_log.EVENT_ID.INSTANCE_RESIZING):
# Note: This isn't going to work, because the unversioned
# notifications we get from nova do not include the flavor details.
# When we switch to use the versioned notifications, they will
# include the flavor. However, I have verified that the original
# reason for this clause no longer needs this code -
# nova will explicitly fail a resize if the disk size in the new
# flavor is smaller than the old flavor (instead of silently
# failing). I am leaving this code here in case there are some
# other silent failures we want to catch in the future.
if instance.from_instance_type_original_name == \
instance.instance_type_original_name:
event_id = event_log.EVENT_ID.INSTANCE_RESIZE_FAILED
elif last_event(event_log.EVENT_ID.INSTANCE_RESIZE_CONFIRM_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_RESIZE_CONFIRMED
elif last_event(event_log.EVENT_ID.INSTANCE_RESIZE_REVERT_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_RESIZE_REVERTED
elif last_event(event_log.EVENT_ID.INSTANCE_RESIZE_REVERTING):
event_id = event_log.EVENT_ID.INSTANCE_RESIZE_REVERTED
elif last_event(event_log.EVENT_ID.INSTANCE_UNPAUSE_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_UNPAUSED
elif last_event(event_log.EVENT_ID.INSTANCE_RESUME_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_RESUMED
elif last_event(event_log.EVENT_ID.INSTANCE_REBOOT_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_REBOOTED
elif last_event(event_log.EVENT_ID.INSTANCE_REBOOTING):
event_id = event_log.EVENT_ID.INSTANCE_REBOOTED
elif last_event(event_log.EVENT_ID.INSTANCE_START_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_STARTED
elif last_event(event_log.EVENT_ID.INSTANCE_EVACUATE_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_EVACUATED
elif last_event(event_log.EVENT_ID.INSTANCE_EVACUATING):
event_id = event_log.EVENT_ID.INSTANCE_EVACUATED
elif last_event(event_log.EVENT_ID.INSTANCE_REBUILD_BEGIN):
event_id = event_log.EVENT_ID.INSTANCE_REBUILT
elif last_event(event_log.EVENT_ID.INSTANCE_REBUILDING):
event_id = event_log.EVENT_ID.INSTANCE_REBUILT
elif enabling:
if not (last_event(event_log.EVENT_ID.INSTANCE_LIVE_MIGRATED) or
last_event(event_log.EVENT_ID.INSTANCE_LIVE_MIGRATE_FAILED) or
last_event(event_log.EVENT_ID.INSTANCE_COLD_MIGRATED) or
last_event(event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_FAILED) or
last_event(event_log.EVENT_ID.INSTANCE_COLD_MIGRATE_REVERTED) or
last_event(event_log.EVENT_ID.INSTANCE_RESIZE_REVERTED)):
event_id = event_log.EVENT_ID.INSTANCE_ENABLED
if event_id is not None and not last_event(event_id):
events.extend(instance_issue_log(instance, event_id,
additional_text=additional_text,
reason=reason))
if events:
instance.events = events
| 45.700777
| 84
| 0.588353
| 11,424
| 105,843
| 5.108193
| 0.020746
| 0.080197
| 0.129652
| 0.083488
| 0.943913
| 0.937007
| 0.928731
| 0.919083
| 0.911286
| 0.905357
| 0
| 0.000133
| 0.290298
| 105,843
| 2,315
| 85
| 45.720518
| 0.776735
| 0.010242
| 0
| 0.613022
| 0
| 0
| 0.399058
| 0.123318
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002751
| false
| 0
| 0.055938
| 0.000459
| 0.062357
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a0fbd694aac5708b8c216e49c04e1d5622cf0bfb
| 36
|
py
|
Python
|
sparkler/__init__.py
|
boazjohn/pyspark-job-server
|
bda2fa454b7875494869be81c9d75802df194feb
|
[
"BSD-3-Clause"
] | null | null | null |
sparkler/__init__.py
|
boazjohn/pyspark-job-server
|
bda2fa454b7875494869be81c9d75802df194feb
|
[
"BSD-3-Clause"
] | null | null | null |
sparkler/__init__.py
|
boazjohn/pyspark-job-server
|
bda2fa454b7875494869be81c9d75802df194feb
|
[
"BSD-3-Clause"
] | null | null | null |
from context import SparklerContext
| 18
| 35
| 0.888889
| 4
| 36
| 8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 36
| 1
| 36
| 36
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
19d953509b92dd564bf07109682800cdd1f4832c
| 27
|
py
|
Python
|
src/euler_python_package/euler_python/medium/p239.py
|
wilsonify/euler
|
5214b776175e6d76a7c6d8915d0e062d189d9b79
|
[
"MIT"
] | null | null | null |
src/euler_python_package/euler_python/medium/p239.py
|
wilsonify/euler
|
5214b776175e6d76a7c6d8915d0e062d189d9b79
|
[
"MIT"
] | null | null | null |
src/euler_python_package/euler_python/medium/p239.py
|
wilsonify/euler
|
5214b776175e6d76a7c6d8915d0e062d189d9b79
|
[
"MIT"
] | null | null | null |
def problem239():
pass
| 9
| 17
| 0.62963
| 3
| 27
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 0.259259
| 27
| 2
| 18
| 13.5
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
dfab125bfcfb2993c00b0d0f0cb075a88a8e8909
| 30
|
py
|
Python
|
avorion/__init__.py
|
rawbby/Avorion-Toolkit
|
a90616dd930d96ec1c7fd0035c5036e1c3b35f86
|
[
"MIT"
] | null | null | null |
avorion/__init__.py
|
rawbby/Avorion-Toolkit
|
a90616dd930d96ec1c7fd0035c5036e1c3b35f86
|
[
"MIT"
] | null | null | null |
avorion/__init__.py
|
rawbby/Avorion-Toolkit
|
a90616dd930d96ec1c7fd0035c5036e1c3b35f86
|
[
"MIT"
] | null | null | null |
import avorion.block as block
| 15
| 29
| 0.833333
| 5
| 30
| 5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 30
| 1
| 30
| 30
| 0.961538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dfae5107d8c66460f156fd7304483461711c6404
| 36
|
py
|
Python
|
app_verifications/__init__.py
|
kskarbinski/threads-api
|
c144c1cb51422095922310d278f80e4996c10ea0
|
[
"MIT"
] | null | null | null |
app_verifications/__init__.py
|
kskarbinski/threads-api
|
c144c1cb51422095922310d278f80e4996c10ea0
|
[
"MIT"
] | null | null | null |
app_verifications/__init__.py
|
kskarbinski/threads-api
|
c144c1cb51422095922310d278f80e4996c10ea0
|
[
"MIT"
] | null | null | null |
from .base_checks import BaseChecks
| 18
| 35
| 0.861111
| 5
| 36
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 36
| 1
| 36
| 36
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dfca0ff5a5ba58366fb925847d2dc6620830e737
| 1,291
|
py
|
Python
|
dnm_cohorts/de_novos/__init__.py
|
jeremymcrae/dnm_cohorts
|
e968357797d2d370b44904129c32c2e74b36b903
|
[
"MIT"
] | 1
|
2020-12-10T05:17:21.000Z
|
2020-12-10T05:17:21.000Z
|
dnm_cohorts/de_novos/__init__.py
|
jeremymcrae/dnm_cohorts
|
e968357797d2d370b44904129c32c2e74b36b903
|
[
"MIT"
] | null | null | null |
dnm_cohorts/de_novos/__init__.py
|
jeremymcrae/dnm_cohorts
|
e968357797d2d370b44904129c32c2e74b36b903
|
[
"MIT"
] | null | null | null |
from dnm_cohorts.de_novos.de_ligt_nejm import de_ligt_nejm_de_novos
from dnm_cohorts.de_novos.de_rubeis_nature import de_rubeis_nature_de_novos
from dnm_cohorts.de_novos.epi4k_ajhg import epi4k_ajhg_de_novos
from dnm_cohorts.de_novos.gilissen_nature import gilissen_nature_de_novos
from dnm_cohorts.de_novos.iossifov_neuron import iossifov_neuron_de_novos
from dnm_cohorts.de_novos.iossifov_nature import iossifov_nature_de_novos
from dnm_cohorts.de_novos.lelieveld_nn import lelieveld_nn_de_novos
from dnm_cohorts.de_novos.mcrae_nature import mcrae_nature_de_novos
from dnm_cohorts.de_novos.oroak_nature import oroak_nature_de_novos
from dnm_cohorts.de_novos.rauch_lancet import rauch_lancet_de_novos
from dnm_cohorts.de_novos.sanders_nature import sanders_nature_de_novos
from dnm_cohorts.de_novos.sanders_neuron import sanders_neuron_de_novos
from dnm_cohorts.de_novos.homsy_science import homsy_science_de_novos
from dnm_cohorts.de_novos.jonsson_nature import jonsson_nature_de_novos
from dnm_cohorts.de_novos.jin_nature_genetics import jin_nature_genetics_de_novos
from dnm_cohorts.de_novos.an_science import an_science_de_novos
from dnm_cohorts.de_novos.kaplanis_nature import kaplanis_nature_de_novos
from dnm_cohorts.de_novos.halldorsson_science import halldorsson_science_de_novos
| 64.55
| 81
| 0.915569
| 222
| 1,291
| 4.810811
| 0.135135
| 0.235955
| 0.235955
| 0.269663
| 0.566479
| 0.566479
| 0.543071
| 0.438202
| 0
| 0
| 0
| 0.001642
| 0.056545
| 1,291
| 19
| 82
| 67.947368
| 0.875205
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5f12a9bdecf1655636126fd3e97971afea085ad6
| 49
|
py
|
Python
|
run.py
|
pblan/matse-stundenplan
|
5f642d1b0c549407fd4742aa09a6c08f9b222fa2
|
[
"MIT"
] | 7
|
2020-09-10T17:31:12.000Z
|
2021-09-16T09:06:06.000Z
|
run.py
|
pblan/matse-stundenplan
|
5f642d1b0c549407fd4742aa09a6c08f9b222fa2
|
[
"MIT"
] | 5
|
2020-09-10T13:50:49.000Z
|
2021-06-18T09:53:00.000Z
|
run.py
|
pblan/matse-stundenplan
|
5f642d1b0c549407fd4742aa09a6c08f9b222fa2
|
[
"MIT"
] | null | null | null |
import matse_stundenplan
matse_stundenplan.run()
| 16.333333
| 24
| 0.877551
| 6
| 49
| 6.833333
| 0.666667
| 0.780488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061224
| 49
| 3
| 25
| 16.333333
| 0.891304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
a06bebf4bb3329532d28e07a48f9f119d7cbf502
| 835
|
py
|
Python
|
PyFTBot/PyFTBot.py
|
BlackRouter/PYFTBot
|
e7cdca979980183ca72c3c33e7c5734440eb031d
|
[
"MIT"
] | 2
|
2020-01-27T16:31:29.000Z
|
2020-03-11T07:57:49.000Z
|
PyFTBot/PyFTBot.py
|
BlackRouter/PYFTBot
|
e7cdca979980183ca72c3c33e7c5734440eb031d
|
[
"MIT"
] | null | null | null |
PyFTBot/PyFTBot.py
|
BlackRouter/PYFTBot
|
e7cdca979980183ca72c3c33e7c5734440eb031d
|
[
"MIT"
] | null | null | null |
import requests
def postbot(token,method,arg):
url = "https://script.google.com/macros/s/AKfycbzxgX8puIgB5uXelJ2wNzxa8VbheV463rBm6_SpEau-D2v4g0q1/exec?bot_token=" + token + "&method=" + method + "&args=" + arg
payload = {}
headers = {
'Content-Type': 'application/x-www-form-urlencoded'
}
response = requests.request("POST", url, headers=headers, data = payload)
return response.text.encode('utf8')
def getbot(token,method):
url = "https://script.google.com/macros/s/AKfycbzxgX8puIgB5uXelJ2wNzxa8VbheV463rBm6_SpEau-D2v4g0q1/exec?bot_token=" + token +"&method=" + method
payload = {}
headers = {
'Content-Type': 'application/x-www-form-urlencoded'
}
response = requests.request("GET", url, headers=headers, data = payload)
return response.text.encode('utf8')
| 36.304348
| 166
| 0.68024
| 91
| 835
| 6.197802
| 0.428571
| 0.078014
| 0.049645
| 0.070922
| 0.87234
| 0.87234
| 0.87234
| 0.87234
| 0.87234
| 0.87234
| 0
| 0.037518
| 0.17006
| 835
| 22
| 167
| 37.954545
| 0.776335
| 0
| 0
| 0.470588
| 0
| 0
| 0.408383
| 0.079042
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| false
| 0
| 0.058824
| 0
| 0.294118
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a0757c706515e6945e21794bdedac1f4db254814
| 40
|
py
|
Python
|
problems/pairsum/__init__.py
|
Benezivas/algobattle-problems
|
b00b85413893bd1618001a4cdaa0dd7442f4e481
|
[
"MIT"
] | null | null | null |
problems/pairsum/__init__.py
|
Benezivas/algobattle-problems
|
b00b85413893bd1618001a4cdaa0dd7442f4e481
|
[
"MIT"
] | null | null | null |
problems/pairsum/__init__.py
|
Benezivas/algobattle-problems
|
b00b85413893bd1618001a4cdaa0dd7442f4e481
|
[
"MIT"
] | null | null | null |
from .problem import Pairsum as Problem
| 20
| 39
| 0.825
| 6
| 40
| 5.5
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 40
| 1
| 40
| 40
| 0.970588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2672fc2cfba5eefb2f25005cdfafe340368b1fdb
| 390
|
py
|
Python
|
testing.py
|
Programmer-RD-AI/project-sructure-artificial-intelligence
|
48ed7f95589173183fb198ea7dd52fae97966b83
|
[
"Apache-2.0"
] | 1
|
2021-05-21T18:13:18.000Z
|
2021-05-21T18:13:18.000Z
|
testing.py
|
Programmer-RD-AI/project-sructure
|
48ed7f95589173183fb198ea7dd52fae97966b83
|
[
"Apache-2.0"
] | null | null | null |
testing.py
|
Programmer-RD-AI/project-sructure
|
48ed7f95589173183fb198ea7dd52fae97966b83
|
[
"Apache-2.0"
] | null | null | null |
from data_loading.data_loader import *
from data_loading.transforming import *
from models.baseline_model import *
from models.final_model import *
from models.test_model import *
from models.testing_models import *
from models.transfer_learning_models import *
def data_loading():
returned_info = load_all_data()
return returned_info
def modelling():
pass
data_loading()
| 19.5
| 45
| 0.789744
| 53
| 390
| 5.528302
| 0.415094
| 0.204778
| 0.273038
| 0.215017
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146154
| 390
| 19
| 46
| 20.526316
| 0.87988
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153846
| false
| 0.076923
| 0.538462
| 0
| 0.769231
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
26873dfb7a701ba315258308bc0c30d903686db2
| 33
|
py
|
Python
|
dqo/query_generator/rl/envs/__init__.py
|
danield137/deep_query_optimzation
|
01a25c966338007f15d14dea1b37e388e47bcfe3
|
[
"MIT"
] | null | null | null |
dqo/query_generator/rl/envs/__init__.py
|
danield137/deep_query_optimzation
|
01a25c966338007f15d14dea1b37e388e47bcfe3
|
[
"MIT"
] | null | null | null |
dqo/query_generator/rl/envs/__init__.py
|
danield137/deep_query_optimzation
|
01a25c966338007f15d14dea1b37e388e47bcfe3
|
[
"MIT"
] | null | null | null |
from .db_env import DatabaseEnvV1
| 33
| 33
| 0.878788
| 5
| 33
| 5.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033333
| 0.090909
| 33
| 1
| 33
| 33
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
268e3c09b92531fc6760b8b6d2be506f79cafe02
| 367
|
py
|
Python
|
backend/userapp/serializers.py
|
Lenend-KPU/LBS-Platform
|
75ba24db8969248e74e9d974638977de1c0bc36a
|
[
"MIT"
] | 15
|
2020-12-23T13:56:49.000Z
|
2021-12-10T11:04:23.000Z
|
backend/userapp/serializers.py
|
Lenend-KPU/LBS-Platform
|
75ba24db8969248e74e9d974638977de1c0bc36a
|
[
"MIT"
] | 41
|
2021-03-19T07:51:48.000Z
|
2021-11-22T09:45:46.000Z
|
backend/userapp/serializers.py
|
Lenend-KPU/LBS-Platform
|
75ba24db8969248e74e9d974638977de1c0bc36a
|
[
"MIT"
] | 3
|
2021-03-24T15:18:24.000Z
|
2021-09-11T14:51:35.000Z
|
# For Swagger Documentation
from rest_framework import serializers
class UserBodySerializer(serializers.Serializer):
username = serializers.CharField(help_text="사용자 이름")
email = serializers.CharField(help_text="이메일, 유니크 값, 해당 컬럼으로 로그인")
password = serializers.CharField(help_text="비밀번호, 해당 컬럼으로 로그인")
address = serializers.CharField(help_text="주소")
| 36.7
| 70
| 0.768392
| 45
| 367
| 6.155556
| 0.622222
| 0.288809
| 0.34657
| 0.404332
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133515
| 367
| 9
| 71
| 40.777778
| 0.871069
| 0.06812
| 0
| 0
| 0
| 0
| 0.141176
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.166667
| 0.166667
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
cd3f64e3bb017d34c9843830e808ad0ae81361b9
| 104
|
py
|
Python
|
dizoo/atari/envs/__init__.py
|
konnase/DI-engine
|
f803499cad191e9277b10e194132d74757bcfc8e
|
[
"Apache-2.0"
] | 2
|
2021-07-30T15:55:45.000Z
|
2021-07-30T16:35:10.000Z
|
dizoo/atari/envs/__init__.py
|
konnase/DI-engine
|
f803499cad191e9277b10e194132d74757bcfc8e
|
[
"Apache-2.0"
] | null | null | null |
dizoo/atari/envs/__init__.py
|
konnase/DI-engine
|
f803499cad191e9277b10e194132d74757bcfc8e
|
[
"Apache-2.0"
] | null | null | null |
from .atari_env import AtariEnv, AtariEnvMR
from .atari_multi_discrete_env import AtariMultiDiscreteEnv
| 34.666667
| 59
| 0.884615
| 13
| 104
| 6.769231
| 0.692308
| 0.204545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086538
| 104
| 2
| 60
| 52
| 0.926316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cd67e1fdd7a149b2a552c34bb798b67786cdd05e
| 49
|
py
|
Python
|
tests/s3bot_/test_freeze.py
|
jackstanek/s3bot
|
a0853cdf6de1f022aaa4bb795fc014d077ce76e9
|
[
"MIT"
] | null | null | null |
tests/s3bot_/test_freeze.py
|
jackstanek/s3bot
|
a0853cdf6de1f022aaa4bb795fc014d077ce76e9
|
[
"MIT"
] | 6
|
2017-08-15T17:43:32.000Z
|
2018-08-10T17:00:03.000Z
|
tests/s3bot_/test_freeze.py
|
jackstanek/s3bot
|
a0853cdf6de1f022aaa4bb795fc014d077ce76e9
|
[
"MIT"
] | null | null | null |
"""Tests for freeze/unfreeze"""
import unittest
| 12.25
| 31
| 0.734694
| 6
| 49
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122449
| 49
| 3
| 32
| 16.333333
| 0.837209
| 0.510204
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f813b33c4e4700e1c7a577d46b2db90949721c79
| 1,179
|
py
|
Python
|
code/test_trend.py
|
baolintian/Timeseries_feature_test
|
05b77b743ba268a2985503966fe7cbb02780e24b
|
[
"MIT"
] | null | null | null |
code/test_trend.py
|
baolintian/Timeseries_feature_test
|
05b77b743ba268a2985503966fe7cbb02780e24b
|
[
"MIT"
] | null | null | null |
code/test_trend.py
|
baolintian/Timeseries_feature_test
|
05b77b743ba268a2985503966fe7cbb02780e24b
|
[
"MIT"
] | null | null | null |
import pytest
from feature_judge import *
from util import *
def test_monotone_increase():
timeseries_name = "root.CNNP.QF.1#.QF1RCP604MP"
config_path = "../config/" + timeseries_name
image_path = "../images/" + timeseries_name
timeseries_path = "../data/" + timeseries_name + ".csv"
trend_config, threshold_config, resample_frequency = read_config(config_path)
timeseries = read_timeseries(timeseries_path, str(resample_frequency) + "min")
Dplot = 'yes'
s_tf = trend_features(timeseries, timeseries_name + ".numvalue", trend_config, image_path, Dplot)
assert s_tf == [0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0]
def test_wave():
timeseries_name = "wave_test"
config_path = "../config/" + timeseries_name
image_path = "../images/" + timeseries_name
timeseries_path = "../data/" + timeseries_name + ".csv"
trend_config, threshold_config, resample_frequency = read_config(config_path)
timeseries = read_timeseries(timeseries_path, str(resample_frequency) + "min")
Dplot = 'yes'
s_tf = trend_features(timeseries, timeseries_name, trend_config, image_path, Dplot)
assert s_tf == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]
| 43.666667
| 101
| 0.694656
| 156
| 1,179
| 4.948718
| 0.25
| 0.049223
| 0.062176
| 0.067358
| 0.80829
| 0.80829
| 0.797927
| 0.797927
| 0.797927
| 0.797927
| 0
| 0.029713
| 0.17218
| 1,179
| 26
| 102
| 45.346154
| 0.76127
| 0
| 0
| 0.521739
| 0
| 0
| 0.102629
| 0.022901
| 0
| 0
| 0
| 0
| 0.086957
| 1
| 0.086957
| false
| 0
| 0.130435
| 0
| 0.217391
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f869380f817123dbe1b50e8517ed8be79ab95bfe
| 4,546
|
py
|
Python
|
test/unit/test_fix_4.py
|
dlpezbel/SDS
|
43b64744d8011af6ccd62fee394d6af2b11cac68
|
[
"MIT"
] | 4
|
2020-05-22T09:42:32.000Z
|
2020-09-11T08:00:48.000Z
|
test/unit/test_fix_4.py
|
dlpezbel/SDS
|
43b64744d8011af6ccd62fee394d6af2b11cac68
|
[
"MIT"
] | 24
|
2020-07-11T07:36:26.000Z
|
2020-08-30T19:49:10.000Z
|
test/unit/test_fix_4.py
|
dlpezbel/SDS
|
43b64744d8011af6ccd62fee394d6af2b11cac68
|
[
"MIT"
] | null | null | null |
import unittest
from project.fix_4 import Fix_4_9
class Check_4_9_Test(unittest.TestCase):
def test_given_result_with_wrong_add_usage_when_fix_then_copy_instruction_returned(self):
check_result = {'evaluation': 'KO',
'code': 'DOCKERFILE_WITH_ADD_INSTRUCTION_NOT_PROPER_USED',
'description': 'You should use COPY rather than ADD instructions in Dockerfiles.',
'line': [6]}
instructions = [{'instruction': 'FROM', 'startline': 0, 'endline': 0, 'content': 'FROM alpine\n', 'value': 'alpine'},
{'instruction': 'ENV', 'startline': 1, 'endline': 1, 'content': 'ENV ADMIN_USER="mark"\n', 'value': 'ADMIN_USER="mark"'},
{'instruction': 'RUN', 'startline': 2, 'endline': 2, 'content': 'RUN echo $ADMIN_USER > '
'./mark\n', 'value': 'echo '
'$ADMIN_USER > ./mark'},
{'instruction': 'RUN', 'startline': 3, 'endline': 3, 'content': 'RUN unset ADMIN_USER\n', 'value': 'unset ADMIN_USER'},
{'instruction': 'COPY', 'startline': 4, 'endline': 4, 'content': 'COPY requirements.txt /tmp/\n', 'value': 'requirements.txt /tmp/'},
{'instruction': 'RUN', 'startline': 5, 'endline': 5, 'content': 'RUN pip install '
'--requirement '
'/tmp/requirements.txt\n',
'value': 'pip install --requirement /tmp/requirements.txt'},
{'instruction': 'ADD', 'startline': 6, 'endline': 6, 'content': 'ADD . /tmp/', 'value': '. '
'/tmp/'}]
result = Fix_4_9.fix_dockerfile(self,check_result,instructions)
self.assertEqual(len(result), 1)
self.assertEqual(result[0]['instruction'],'COPY')
def test_given_result_with_two_wrong_add_usage_when_fix_then_copy_instruction_returned(self):
check_result = {'evaluation': 'KO',
'code': 'DOCKERFILE_WITH_ADD_INSTRUCTION_NOT_PROPER_USED',
'description': 'You should use COPY rather than ADD instructions in Dockerfiles.',
'line': [4,6]}
instructions = [{'instruction': 'FROM', 'startline': 0, 'endline': 0, 'content': 'FROM alpine\n', 'value': 'alpine'},
{'instruction': 'ENV', 'startline': 1, 'endline': 1, 'content': 'ENV ADMIN_USER="mark"\n', 'value': 'ADMIN_USER="mark"'},
{'instruction': 'RUN', 'startline': 2, 'endline': 2, 'content': 'RUN echo $ADMIN_USER > '
'./mark\n', 'value': 'echo '
'$ADMIN_USER > ./mark'},
{'instruction': 'RUN', 'startline': 3, 'endline': 3, 'content': 'RUN unset ADMIN_USER\n', 'value': 'unset ADMIN_USER'},
{'instruction': 'ADD', 'startline': 4, 'endline': 6, 'content': 'ADD . /tmp/', 'value': '. '
'/tmp/'},
{'instruction': 'RUN', 'startline': 5, 'endline': 5, 'content': 'RUN pip install '
'--requirement '
'/tmp/requirements.txt\n',
'value': 'pip install --requirement /tmp/requirements.txt'},
{'instruction': 'ADD', 'startline': 6, 'endline': 6, 'content': 'ADD . /tmp/', 'value': '. '
'/tmp/'}]
result = Fix_4_9.fix_dockerfile(self,check_result,instructions)
self.assertEqual(len(result), 2)
self.assertEqual(result[0]['instruction'],'COPY')
self.assertEqual(result[1]['instruction'], 'COPY')
if __name__ == '__main__':
unittest.main()
| 78.37931
| 157
| 0.432248
| 371
| 4,546
| 5.09973
| 0.19407
| 0.057082
| 0.054968
| 0.029598
| 0.882664
| 0.859408
| 0.820296
| 0.806554
| 0.806554
| 0.806554
| 0
| 0.017308
| 0.428069
| 4,546
| 57
| 158
| 79.754386
| 0.710385
| 0
| 0
| 0.693878
| 0
| 0
| 0.338539
| 0.040035
| 0
| 0
| 0
| 0
| 0.102041
| 1
| 0.040816
| false
| 0
| 0.040816
| 0
| 0.102041
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f8816b77dbeebf8f665669c0185ad76a0af851fd
| 218
|
py
|
Python
|
gym_brt/quanser/__init__.py
|
Data-Science-in-Mechanical-Engineering/vision-based-furuta-pendulum
|
84bfc5a089a2a8ace250f030f0298d45a3f9772f
|
[
"MIT"
] | 10
|
2018-08-02T20:01:13.000Z
|
2021-09-07T18:09:20.000Z
|
gym_brt/quanser/__init__.py
|
Data-Science-in-Mechanical-Engineering/vision-based-furuta-pendulum
|
84bfc5a089a2a8ace250f030f0298d45a3f9772f
|
[
"MIT"
] | 4
|
2019-05-20T18:38:34.000Z
|
2022-01-24T19:49:42.000Z
|
gym_brt/quanser/__init__.py
|
Data-Science-in-Mechanical-Engineering/vision-based-furuta-pendulum
|
84bfc5a089a2a8ace250f030f0298d45a3f9772f
|
[
"MIT"
] | 12
|
2019-04-09T03:56:50.000Z
|
2022-02-02T19:01:31.000Z
|
from gym_brt.quanser.qube_interfaces import QubeSimulator
try:
from gym_brt.quanser.qube_interfaces import QubeHardware
except ImportError:
print("Warning: Can not import QubeHardware in quanser/__init__.py")
| 31.142857
| 72
| 0.816514
| 29
| 218
| 5.862069
| 0.655172
| 0.082353
| 0.117647
| 0.2
| 0.435294
| 0.435294
| 0.435294
| 0
| 0
| 0
| 0
| 0
| 0.123853
| 218
| 6
| 73
| 36.333333
| 0.890052
| 0
| 0
| 0
| 0
| 0
| 0.270642
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.8
| 0
| 0.8
| 0.2
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3e05ab55dc1476e96fcfbbb7524815995b45b067
| 45
|
py
|
Python
|
lib/dataset/__init__.py
|
sunset1995/ExampleResearchProject
|
c17438163b272647c1e2fcfce6007bd78018ad65
|
[
"MIT"
] | null | null | null |
lib/dataset/__init__.py
|
sunset1995/ExampleResearchProject
|
c17438163b272647c1e2fcfce6007bd78018ad65
|
[
"MIT"
] | null | null | null |
lib/dataset/__init__.py
|
sunset1995/ExampleResearchProject
|
c17438163b272647c1e2fcfce6007bd78018ad65
|
[
"MIT"
] | 1
|
2021-12-06T09:10:23.000Z
|
2021-12-06T09:10:23.000Z
|
from .dataset_example import ExampleDataset
| 15
| 43
| 0.866667
| 5
| 45
| 7.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 45
| 2
| 44
| 22.5
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3e3932c9a9340ffb69e35319cd52b1460bc35836
| 30
|
py
|
Python
|
cosmosis/runtime/julia_modules/__init__.py
|
annis/cosmosis
|
55efc1bc2260ca39298c584ae809fa2a8e72a38e
|
[
"BSD-2-Clause"
] | 2
|
2021-06-18T14:11:59.000Z
|
2022-02-23T19:19:36.000Z
|
cosmosis/runtime/julia_modules/__init__.py
|
annis/cosmosis
|
55efc1bc2260ca39298c584ae809fa2a8e72a38e
|
[
"BSD-2-Clause"
] | 2
|
2021-11-02T12:44:24.000Z
|
2022-03-30T15:09:48.000Z
|
cosmosis/runtime/julia_modules/__init__.py
|
annis/cosmosis
|
55efc1bc2260ca39298c584ae809fa2a8e72a38e
|
[
"BSD-2-Clause"
] | 2
|
2022-03-25T21:26:27.000Z
|
2022-03-29T06:37:46.000Z
|
from .julia import JuliaModule
| 30
| 30
| 0.866667
| 4
| 30
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 30
| 1
| 30
| 30
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3e554313354911582e7fe5703208f0715c44c977
| 28
|
py
|
Python
|
pyutilx/__init__.py
|
sarmadgulzar/pyutilx
|
ca7c61d17fc03dfcad0f9bd14859f59db6fd8c17
|
[
"MIT"
] | null | null | null |
pyutilx/__init__.py
|
sarmadgulzar/pyutilx
|
ca7c61d17fc03dfcad0f9bd14859f59db6fd8c17
|
[
"MIT"
] | null | null | null |
pyutilx/__init__.py
|
sarmadgulzar/pyutilx
|
ca7c61d17fc03dfcad0f9bd14859f59db6fd8c17
|
[
"MIT"
] | null | null | null |
from pyutilx.utils import *
| 14
| 27
| 0.785714
| 4
| 28
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 28
| 1
| 28
| 28
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3e917a918a2b4335601e2146e095c888305ef904
| 470
|
py
|
Python
|
temboo/core/Library/PagerDuty/Events/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 7
|
2016-03-07T02:07:21.000Z
|
2022-01-21T02:22:41.000Z
|
temboo/core/Library/PagerDuty/Events/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | null | null | null |
temboo/core/Library/PagerDuty/Events/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 8
|
2016-06-14T06:01:11.000Z
|
2020-04-22T09:21:44.000Z
|
from temboo.Library.PagerDuty.Events.AcknowledgeEvent import AcknowledgeEvent, AcknowledgeEventInputSet, AcknowledgeEventResultSet, AcknowledgeEventChoreographyExecution
from temboo.Library.PagerDuty.Events.ResolveEvent import ResolveEvent, ResolveEventInputSet, ResolveEventResultSet, ResolveEventChoreographyExecution
from temboo.Library.PagerDuty.Events.TriggerEvent import TriggerEvent, TriggerEventInputSet, TriggerEventResultSet, TriggerEventChoreographyExecution
| 117.5
| 169
| 0.910638
| 33
| 470
| 12.969697
| 0.545455
| 0.070093
| 0.119159
| 0.182243
| 0.224299
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.044681
| 470
| 3
| 170
| 156.666667
| 0.953229
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
e44128af074f6e50ae7363ee14705c600acb8473
| 19,490
|
py
|
Python
|
2020/029_DaVinci17/davinci17.py
|
toru-ver4/sample_code
|
9165b4cb07a3cb1b3b5a7f6b3a329be081bddabe
|
[
"BSD-3-Clause"
] | 19
|
2019-11-12T23:34:35.000Z
|
2022-03-08T13:21:03.000Z
|
2020/029_DaVinci17/davinci17.py
|
toru-ver4/sample_code
|
9165b4cb07a3cb1b3b5a7f6b3a329be081bddabe
|
[
"BSD-3-Clause"
] | 101
|
2019-08-12T01:20:13.000Z
|
2022-03-18T12:17:01.000Z
|
2020/029_DaVinci17/davinci17.py
|
toru-ver4/sample_code
|
9165b4cb07a3cb1b3b5a7f6b3a329be081bddabe
|
[
"BSD-3-Clause"
] | 3
|
2020-06-08T09:48:08.000Z
|
2022-03-09T15:35:51.000Z
|
# -*- coding: utf-8 -*-
"""
==========
"""
# import standard libraries
import os
# import third-party libraries
import numpy as np
import matplotlib.pyplot as plt
from colour import write_image, read_image
# import my libraries
import test_pattern_generator2 as tpg
import transfer_functions as tf
import plot_utility as pu
# information
__author__ = 'Toru Yoshihara'
__copyright__ = 'Copyright (C) 2020 - Toru Yoshihara'
__license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause'
__maintainer__ = 'Toru Yoshihara'
__email__ = 'toru.ver.11 at-sign gmail.com'
__all__ = []
def create_ramp():
x = np.linspace(0, 1, 1920).reshape((1, 1920, 1))
img = np.ones((1080, 1920, 3))
img = x * img
write_image(img, "test_src.tif", bit_depth='uint16')
def create_exr_ramp(min_exposure=-12, max_exposure=12):
x = np.linspace(0, 1, 1920).reshape((1, 1920, 1))
y = tpg.shaper_func_log2_to_linear(
x, min_exposure=min_exposure, max_exposure=max_exposure)
img = np.ones((1080, 1920, 3)) * y
fname = f"./img/test_src_exp_{min_exposure}_{max_exposure}.exr"
write_image(img, fname, bit_depth='float32')
def plot_input_drt():
# file_list = [
# ['./img/old/test_out_sdr100.tif', 'SDR 100'],
# ['./img/old/test_out_hdr500.tif', 'HDR 500'],
# ['./img/old/test_out_hdr1000.tif', 'HDR 1000'],
# ['./img/old/test_out_hdr2000.tif', 'HDR 2000'],
# ['./img/old/test_out_hdr4000.tif', 'HDR 4000'],
# ['./img/old/test_out_off.tif', 'DRT OFF']
# ]
# check_input_drt_test(
# file_list=file_list, graph_name="Input_DRT_Characteristics_w_SDR")
# file_list = [
# ['./img/old/test_out_hdr500.tif', 'HDR 500'],
# ['./img/old/test_out_hdr1000.tif', 'HDR 1000'],
# ['./img/old/test_out_hdr2000.tif', 'HDR 2000'],
# ['./img/old/test_out_hdr4000.tif', 'HDR 4000'],
# ['./img/old/test_out_off.tif', 'DRT OFF']
# ]
# check_input_drt_test(
# file_list=file_list, graph_name="Input_DRT_Characteristics_wo_SDR")
# file_list = [
# ['./img/old/test_out_sdr_er_100-200.tif', 'SDR ER 100/200'],
# ['./img/old/test_out_hdr_er_1000-2000.tif', 'HDR ER 1000/2000'],
# ['./img/old/test_out_hdr_er_1000-4000.tif', 'HDR ER 1000/4000'],
# ['./img/old/test_out_hdr_er_1000-10000.tif', 'HDR ER 1000/10000'],
# ['./img/old/test_out_hdr_er_4000-10000.tif', 'HDR ER 4000/10000'],
# ['./img/old/test_out_off.tif', 'DRT OFF']
# ]
# check_input_drt_test(
# file_list=file_list, graph_name="Input_DRT_Characteristics_ER_w_SDR")
file_list = [
['./img/old/test_out_hdr_er_1000-2000.tif', 'HDR ER 1000/2000', '-.'],
['./img/old/test_out_hdr_er_1000-4000.tif', 'HDR ER 1000/4000', '--'],
['./img/old/test_out_hdr_er_1000-10000.tif', 'HDR ER 1000/10000', '-'],
['./img/old/test_out_hdr_er_4000-10000.tif', 'HDR ER 4000/10000', '-'],
# ['./img/old/test_out_off.tif', 'DRT OFF']
]
check_input_drt_test(
file_list=file_list, graph_name="Input_DRT_Characteristics_ER_wo_SDR")
# check_input_drt_test_sdr_only()
def check_input_drt_test(file_list, graph_name):
create_ramp()
x = np.linspace(0, 1, 1920)
x_luminance = tf.eotf_to_luminance(x, tf.ST2084)
fig, ax1 = pu.plot_1_graph(
fontsize=20,
figsize=(10, 8),
graph_title="DaVinci17 Input DRT Characteristics",
graph_title_size=None,
xlabel="Input Luminance [cd/m2]",
ylabel="Output Luminance [cd/m2]",
axis_label_size=None,
legend_size=17,
xlim=[0.009, 15000],
ylim=[0.009, 15000],
xtick=None,
ytick=None,
xtick_size=None,
ytick_size=None,
linewidth=3,
minor_xtick_num=None,
minor_ytick_num=None,
return_figure=True)
pu.log_scale_settings(ax1, grid_alpha=0.5, bg_color="#E0E0E0")
for idx in range(len(file_list))[::-1]:
img = read_image(file_list[idx][0])[0, :, 0]
label = file_list[idx][1]
ls = file_list[idx][2]
y_luminance = tf.eotf_to_luminance(img, tf.ST2084)
ax1.plot(x_luminance, y_luminance, ls, label=label)
plt.legend(loc='upper left')
fname_full = f"./img/{graph_name}.png"
plt.savefig(fname_full, bbox_inches='tight', pad_inches=0.1)
# plt.show()
plt.close(fig)
def check_input_drt_test_sdr_only():
create_ramp()
x = np.linspace(0, 1, 1920)
fig, ax1 = pu.plot_1_graph(
fontsize=20,
figsize=(10, 8),
graph_title="DaVinci17 Input DRT Characteristics",
graph_title_size=None,
xlabel="Input Luminance [cd/m2]",
ylabel="Output Luminance [cd/m2]",
axis_label_size=None,
legend_size=17,
xlim=[0.009, 15000],
ylim=[0.009, 15000],
xtick=None,
ytick=None,
xtick_size=None,
ytick_size=None,
linewidth=3,
minor_xtick_num=None,
minor_ytick_num=None,
return_figure=True)
pu.log_scale_settings(ax1, grid_alpha=0.5, bg_color="#E0E0E0")
# img = read_image("./img/test_out_sdr100_on_gm24.tif")[0, :, 0]
# label = "DRT OFF(ST2084 to Gamma2.4 (.tif))"
# x_luminance = tf.eotf_to_luminance(x, tf.ST2084)
# y_luminance = tf.eotf_to_luminance(img, tf.GAMMA24)
# ax1.plot(x_luminance, y_luminance, label=label)
# img = read_image("./img/test_out_sdr100_on_gm24_203nits.tif")[0, :, 0]
# label = "DRT OFF(ST2084 to Gamma2.4 (.tif) 203nits)"
# x_luminance = tf.eotf_to_luminance(x, tf.ST2084)
# y_luminance = tf.eotf_to_luminance(img, tf.GAMMA24)
# ax1.plot(x_luminance, y_luminance, label=label)
img = read_image("./img/old/test_out_sdr100_on_gm24.tif")[0, :, 0]
label = 'SDR 100 (Output color space is Gamma2.4)'
x_luminance = tf.eotf_to_luminance(x, tf.ST2084)
y_luminance = tf.eotf_to_luminance(img, tf.GAMMA24)
ax1.plot(x_luminance, y_luminance, label=label)
# img = read_image("./img/test_out_exp_-12_12_sdr_drt-off_gm24.tif")[0, :, 0]
# label = "DRT OFF(Gamma2.4 to Gamma2.4 (.tif))"
# x_luminance = tf.eotf_to_luminance(x, tf.GAMMA24)
# y_luminance = tf.eotf_to_luminance(img, tf.GAMMA24)
# ax1.plot(x_luminance, y_luminance, label=label)
# img = read_image("./img/test_out_exp_-12_12_sdr_drt-off.tif")[0, :, 0]
# label = "DRT OFF(Linear to Gamma2.4 (.exr))"
# y_luminance = tf.eotf_to_luminance(img, tf.GAMMA24)
# x = np.linspace(0, 1, 1920)
# x_luminance = tpg.shaper_func_log2_to_linear(
# x, min_exposure=-12, max_exposure=12)
# ax1.plot(
# x_luminance * 100, y_luminance, '--', color=pu.SKY, label=label)
plt.legend(loc='upper left')
fname_full = "./img/input_drt_sdr_only.png"
plt.savefig(fname_full, bbox_inches='tight', pad_inches=0.1)
# plt.show()
plt.close(fig)
def check_100nits_code_value_on_st2084():
code_value = tf.oetf_from_luminance(100, tf.ST2084)
print(code_value)
print(code_value * 1023)
def plot_forum_fig1():
x = np.linspace(0, 1, 1920)
fig, ax1 = pu.plot_1_graph(
fontsize=20,
figsize=(10, 8),
graph_title="HDR to SDR conversion",
graph_title_size=None,
xlabel="Input Luminance [cd/m2]",
ylabel="Output Luminance [cd/m2]",
axis_label_size=None,
legend_size=17,
xlim=[0.009, 15000],
ylim=[0.009, 15000],
xtick=None,
ytick=None,
xtick_size=None,
ytick_size=None,
linewidth=3,
minor_xtick_num=None,
minor_ytick_num=None,
return_figure=True)
pu.log_scale_settings(ax1, grid_alpha=0.5, bg_color="#E0E0E0")
img = read_image("./img/dv17_fig1_sdr_out_st2084.tif")[0, :, 0]
label = "(a) src: ST2084(.tif)"
x_luminance = tf.eotf_to_luminance(x, tf.ST2084)
y_luminance = tf.eotf_to_luminance(img, tf.GAMMA24)
ax1.plot(x_luminance, y_luminance, color=pu.BLUE, label=label)
# img = read_image("./img/dv17_fig1_203_sdr_out_st2084.tif")[0, :, 0]
# label = "(b) src: ST2084(.tif), ref-white: 203nits"
# x_luminance = tf.eotf_to_luminance(x, tf.ST2084)
# y_luminance = tf.eotf_to_luminance(img, tf.GAMMA24)
# ax1.plot(x_luminance, y_luminance, label=label)
img = read_image("./img/dv17_fig1_sdr_out_linear.tif")[0, :, 0]
label = "(b) src: Linear(.exr), This is the expected result."
y_luminance = tf.eotf_to_luminance(img, tf.GAMMA24)
x = np.linspace(0, 1, 1920)
x_luminance = tpg.shaper_func_log2_to_linear(
x, min_exposure=-12, max_exposure=12)
ax1.plot(
x_luminance * 100, y_luminance, '--', color=pu.RED, label=label)
# img = read_image("./img/dv17_fig1_203_sdr_out_linear.tif")[0, :, 0]
# label = "src=Linear(.exr), ref-white=203nits"
# y_luminance = tf.eotf_to_luminance(img, tf.GAMMA24)
# x = np.linspace(0, 1, 1920)
# x_luminance = tpg.shaper_func_log2_to_linear(
# x, min_exposure=-12, max_exposure=12)
# ax1.plot(
# x_luminance * 100, y_luminance, label=label)
plt.legend(loc='upper left')
fname_full = "./img/fig1.png"
plt.savefig(fname_full, bbox_inches='tight', pad_inches=0.1)
# plt.show()
plt.close(fig)
def plot_output_drt():
# file_list = [
# # ['./img/Output_DRT_SDR_ER_100-200.tif', 'SDR ER 100/200', '-'],
# ['./img/old/Output_DRT_HDR_ER_1000-2000.tif', 'HDR ER 1000/2000', '-'],
# ['./img/old/Output_DRT_HDR_ER_1000-4000.tif', 'HDR ER 1000/4000', '-'],
# ['./img/old/Output_DRT_HDR_ER_1000-10000.tif', 'HDR ER 1000/10000', '-'],
# ['./img/old/Output_DRT_HDR_ER_4000-10000.tif', 'HDR ER 4000/10000', '--'],
# ]
# check_output_drt_test(
# file_list=file_list,
# graph_name="DaVinci17 Output DRT ER 無印ST2084")
# file_list = [
# # ['./img/Output_DRT_SDR_ER_100-200.tif', 'SDR ER 100/200', '-'],
# ['./img/Output_DRT_HDR_ER_1000-2000.tif', 'HDR ER 1000/2000', '-'],
# ['./img/Output_DRT_HDR_ER_1000-4000.tif', 'HDR ER 1000/4000', '-'],
# ['./img/Output_DRT_HDR_ER_1000-10000.tif', 'HDR ER 1000/10000', '-'],
# ['./img/Output_DRT_HDR_ER_4000-10000.tif', 'HDR ER 4000/10000', '--'],
# ]
# check_output_drt_test(
# file_list=file_list,
# graph_name="DaVinci17 Output DRT Characteristics ER")
# file_list = [
# # ['./img/Output_DRT_SDR_100.tif', 'SDR 100', '-'],
# ['./img/old/Output_DRT_HDR_500.tif', 'HDR 500', '-'],
# ['./img/old/Output_DRT_HDR_1000.tif', 'HDR 1000', '-'],
# ['./img/old/Output_DRT_HDR_2000.tif', 'HDR 2000', '-'],
# ['./img/old/Output_DRT_HDR_4000.tif', 'HDR 4000', '-']
# ]
# check_output_drt_test(
# file_list=file_list,
# graph_name="DaVinci17 Output DRT 無印 ST2084")
file_list = [
# ['./img/Output_DRT_SDR_100.tif', 'SDR 100', '-'],
['./img/Output_DRT_HDR_500.tif', 'HDR 500', '-'],
['./img/Output_DRT_HDR_1000.tif', 'HDR 1000', '-'],
['./img/Output_DRT_HDR_2000.tif', 'HDR 2000', '-'],
['./img/Output_DRT_HDR_4000.tif', 'HDR 4000', '-'],
['./img/Output_DRT_HDR_10000.tif', 'Custom (10000 nit)', '--']
]
check_output_drt_test(
file_list=file_list,
graph_name="DaVinci17 Output DRT Characteristics")
file_list = [
['./img/DRT_In_None_HDR1000-500.tif', 'HDR 1000, ST2084 500 nit', '-'],
['./img/DRT_In_None_HDR1000-1000.tif', 'HDR 1000, ST2084 1000 nit', '-'],
['./img/DRT_In_None_HDR1000-2000.tif', 'HDR 1000, ST2084 2000 nit', '-'],
['./img/DRT_In_None_HDR1000-4000.tif', 'HDR 1000, ST2084 4000 nit', '-'],
['./img/DRT_In_None_HDR1000-10000.tif', 'HDR 1000, ST2084 10000 nit', '-'],
]
check_output_drt_test(
file_list=file_list,
graph_name="DaVinci17 Out DRT Characteristics_fix_HDR1000")
def check_output_drt_test(file_list, graph_name):
x = np.linspace(0, 1, 1920)
x_luminance = tf.eotf_to_luminance(x, tf.ST2084)
fig, ax1 = pu.plot_1_graph(
fontsize=20,
figsize=(10, 8),
graph_title="DaVinci17 Output DRT Characteristics",
graph_title_size=None,
xlabel="Input Luminance [cd/m2]",
ylabel="Output Luminance [cd/m2]",
axis_label_size=None,
legend_size=17,
xlim=[0.009, 15000],
ylim=[0.009, 15000],
xtick=None,
ytick=None,
xtick_size=None,
ytick_size=None,
linewidth=3,
minor_xtick_num=None,
minor_ytick_num=None,
return_figure=True)
pu.log_scale_settings(ax1, grid_alpha=0.5, bg_color="#E0E0E0")
for idx in range(len(file_list)):
img = read_image(file_list[idx][0])[0, :, 0]
label = file_list[idx][1]
ls = file_list[idx][2]
y_luminance = tf.eotf_to_luminance(img, tf.ST2084)
ax1.plot(x_luminance, y_luminance, ls, label=label)
plt.legend(loc='upper left')
fname_full = f"./img/{graph_name}.png".replace(' ', "_")
plt.savefig(fname_full, bbox_inches='tight', pad_inches=0.1)
# plt.show()
plt.close(fig)
def check_output_drt_test_exr(file_list, graph_name):
x = np.linspace(0, 1, 1920)
x_luminance = tf.eotf_to_luminance(x, tf.ST2084)
fig, ax1 = pu.plot_1_graph(
fontsize=20,
figsize=(10, 8),
graph_title=graph_name,
graph_title_size=None,
xlabel="Input Luminance [cd/m2]",
ylabel="Output Luminance [cd/m2]",
axis_label_size=None,
legend_size=17,
xlim=[0.009, 15000],
ylim=None,
xtick=None,
ytick=None,
xtick_size=None,
ytick_size=None,
linewidth=3,
minor_xtick_num=None,
minor_ytick_num=None,
return_figure=True)
pu.log_scale_settings(ax1, grid_alpha=0.5, bg_color="#E0E0E0")
for idx in range(len(file_list)):
img = read_image(file_list[idx][0])[0, :, 0]
label = file_list[idx][1]
ls = file_list[idx][2]
y_luminance = img * 10000
ax1.plot(x_luminance, y_luminance, ls, label=label)
plt.legend(loc='upper left')
fname_full = f"./img/{graph_name}.png".replace(' ', "_")
plt.savefig(fname_full, bbox_inches='tight', pad_inches=0.1)
# plt.show()
plt.close(fig)
def plot_total_drt():
file_list = [
['./img/DRT_Total_HDR_500.tif', 'HDR 500', '-'],
['./img/DRT_Total_HDR_1000.tif', 'HDR 1000', '-'],
['./img/DRT_Total_HDR_2000.tif', 'HDR 2000', '-'],
['./img/DRT_Total_HDR_4000.tif', 'HDR 4000', '-'],
['./img/DRT_Total_HDR_10000.tif', 'Custom (10000 nit)', '-'],
]
check_total_drt_test(
file_list=file_list,
graph_name="Input-Output_DRT_Characteristics")
file_list = [
['./img/Output_DRT_HDR1000-500.tif', 'HDR 1000, ST2084 500 nit', '-'],
['./img/Output_DRT_HDR1000-1000.tif', 'HDR 1000, ST2084 1000 nit', '-'],
['./img/Output_DRT_HDR1000-2000.tif', 'HDR 1000, ST2084 2000 nit', '-'],
['./img/Output_DRT_HDR1000-4000.tif', 'HDR 1000, ST2084 4000 nit', '-'],
['./img/Output_DRT_HDR1000-10000.tif','HDR 1000, ST2084 10000 nit', '-'],
]
check_total_drt_test(
file_list=file_list,
graph_name="DaVinci17 In-Out DRT Characteristics_fix_HDR1000")
file_list = [
['./img/DRT_Total_HDR_ER_1000-2000.tif', 'HDR ER 1000/2000', '-'],
['./img/DRT_Total_HDR_ER_1000-4000.tif', 'HDR ER 1000/4000', '-'],
['./img/DRT_Total_HDR_ER_1000-10000.tif', 'HDR ER 1000/10000', '-'],
['./img/DRT_Total_HDR_ER_4000-10000.tif', 'HDR ER 4000/10000', '-'],
]
check_total_drt_test(
file_list=file_list,
graph_name="Input-Output_DRT_Characteristics_ER")
def check_total_drt_test(file_list, graph_name):
x = np.linspace(0, 1, 1920)
x_luminance = tf.eotf_to_luminance(x, tf.ST2084)
fig, ax1 = pu.plot_1_graph(
fontsize=20,
figsize=(10, 8),
graph_title="DaVinci17 Input-Output DRT Characteristics",
graph_title_size=None,
xlabel="Input Luminance [cd/m2]",
ylabel="Output Luminance [cd/m2]",
axis_label_size=None,
legend_size=17,
xlim=[0.009, 15000],
ylim=[0.009, 15000],
xtick=None,
ytick=None,
xtick_size=None,
ytick_size=None,
linewidth=3,
minor_xtick_num=None,
minor_ytick_num=None,
return_figure=True)
pu.log_scale_settings(ax1, grid_alpha=0.5, bg_color="#E0E0E0")
for idx in range(len(file_list)):
img = read_image(file_list[idx][0])[0, :, 0]
label = file_list[idx][1]
ls = file_list[idx][2]
y_luminance = tf.eotf_to_luminance(img, tf.ST2084)
ax1.plot(x_luminance, y_luminance, ls, label=label)
plt.legend(loc='upper left')
fname_full = f"./img/{graph_name}.png".replace(' ', "_")
plt.savefig(fname_full, bbox_inches='tight', pad_inches=0.1)
# plt.show()
plt.close(fig)
def plot_inv_drt():
file_list = [
# ['./img/Inverse_DRT_to_HDR500.tif', 'SDR to HDR 500 nit', '-'],
['./img/Inverse_DRT_to_HDR1000.tif', 'SDR to HDR 1000 nit', '-'],
# ['./img/Inverse_DRT_to_HDR2000.tif', 'SDR to HDR 2000 nit', '-'],
['./img/Inverse_DRT_to_HDR4000.tif', 'SDR to HDR 4000 nit', '-'],
['./img/Inverse_DRT_to_HDR10000.tif', 'SDR to HDR 10000 nit', '-'],
]
check_inv_drt_test(
file_list=file_list,
graph_name="Inverse_DRT_Characteristics")
def check_inv_drt_test(file_list, graph_name):
x = np.linspace(0, 1, 1920)
x_luminance = tf.eotf_to_luminance(x, tf.GAMMA24)
fig, ax1 = pu.plot_1_graph(
fontsize=20,
figsize=(10, 8),
graph_title="DaVinci17 Inverse DRT for SDR to HDR Conversion",
graph_title_size=None,
xlabel="Input Luminance [cd/m2]",
ylabel="Output Luminance [cd/m2]",
axis_label_size=None,
legend_size=17,
xlim=[0.009, 15000],
ylim=[0.009, 15000],
xtick=None,
ytick=None,
xtick_size=None,
ytick_size=None,
linewidth=3,
minor_xtick_num=None,
minor_ytick_num=None,
return_figure=True)
pu.log_scale_settings(ax1, grid_alpha=0.5, bg_color="#E0E0E0")
for idx in range(len(file_list))[::-1]:
img = read_image(file_list[idx][0])[0, :, 0]
label = file_list[idx][1]
ls = file_list[idx][2]
y_luminance = tf.eotf_to_luminance(img, tf.ST2084)
ax1.plot(x_luminance, y_luminance, ls, label=label)
plt.legend(loc='upper left')
fname_full = f"./img/{graph_name}.png".replace(' ', "_")
plt.savefig(fname_full, bbox_inches='tight', pad_inches=0.1)
# plt.show()
plt.close(fig)
def conv_st2084_to_linear():
src_file = "./ST2084_vs_Linear/st2084_clip_checker_st2084.png"
dst_file = "./ST2084_vs_Linear/st2084_clip_checker_linear.exr"
img_st2084 = read_image(src_file)
img_linear = tf.eotf(img_st2084, tf.ST2084) * 100
write_image(img_linear, dst_file)
def main_func():
# create_exr_ramp()
# plot_input_drt()
# plot_output_drt()
# check_100nits_code_value_on_st2084()
# plot_forum_fig1()
# plot_total_drt()
# plot_inv_drt()
conv_st2084_to_linear()
if __name__ == '__main__':
os.chdir(os.path.dirname(os.path.abspath(__file__)))
main_func()
| 35.436364
| 84
| 0.621601
| 2,833
| 19,490
| 3.969643
| 0.081186
| 0.045527
| 0.024009
| 0.03628
| 0.870799
| 0.847057
| 0.805264
| 0.781789
| 0.756536
| 0.714921
| 0
| 0.10165
| 0.219651
| 19,490
| 549
| 85
| 35.500911
| 0.63778
| 0.257825
| 0
| 0.656069
| 0
| 0
| 0.22785
| 0.111413
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046243
| false
| 0
| 0.020231
| 0
| 0.066474
| 0.00578
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e45aae8cb9dd8fd2f61809c0abf8fff4d2adb469
| 5,212
|
py
|
Python
|
nabu/neuralnetworks/loss_computers/pit_loss.py
|
Darleen2019/Nabu-MSSS
|
5e862cbf846d45b8a317f87588533f3fde9f0726
|
[
"MIT"
] | 18
|
2017-10-16T13:12:46.000Z
|
2022-02-15T01:20:00.000Z
|
nabu/neuralnetworks/loss_computers/pit_loss.py
|
Darleen2019/Nabu-MSSS
|
5e862cbf846d45b8a317f87588533f3fde9f0726
|
[
"MIT"
] | null | null | null |
nabu/neuralnetworks/loss_computers/pit_loss.py
|
Darleen2019/Nabu-MSSS
|
5e862cbf846d45b8a317f87588533f3fde9f0726
|
[
"MIT"
] | 9
|
2017-10-03T18:10:10.000Z
|
2020-11-13T08:26:31.000Z
|
"""@file pit_loss.py
contains the PITLoss"""
import loss_computer
from nabu.neuralnetworks.components import ops
import tensorflow as tf
import warnings
class PITLoss(loss_computer.LossComputer):
"""A loss computer that calculates the loss"""
def __call__(self, targets, logits, seq_length):
"""
Compute the loss
Creates the operation to compute the Permudation Invariant Training loss
Args:
targets: a dictionary of [batch_size x time x ...] tensor containing
the targets
logits: a dictionary of [batch_size x time x ...] tensors containing the logits
seq_length: a dictionary of [batch_size] vectors containing
the sequence lengths
Returns:
loss: a scalar value containing the loss
norm: a scalar value indicating how to normalize the loss
"""
if 'activation' in self.lossconf:
activation = self.lossconf['activation']
else:
activation = 'softmax'
if 'rescale_recs' in self.lossconf:
rescale_recs = self.lossconf['rescale_recs'] == 'True'
else:
rescale_recs = False
if 'overspeakerized' in self.lossconf:
overspeakerized = self.lossconf['overspeakerized'] == 'True'
else:
overspeakerized = False
if 'transpose_order' in self.lossconf:
transpose_order = map(int, self.lossconf['transpose_order'].split(' '))
else:
transpose_order = False
if 'no_perm' in self.lossconf:
no_perm = self.lossconf['no_perm'] == 'True'
else:
no_perm = False
if 'logits_name' in self.lossconf:
logits_name = self.lossconf['logits_name']
else:
logits_name = 'bin_est'
multi_targets = targets['multi_targets']
mix_to_mask = targets['mix_to_mask']
seq_length = seq_length[logits_name]
logits = logits[logits_name]
if transpose_order:
logits = tf.transpose(logits, transpose_order)
loss, norm = ops.pit_loss(
multi_targets, logits, mix_to_mask, seq_length, self.batch_size, activation=activation,
rescale_recs=rescale_recs, overspeakerized=overspeakerized, no_perm=no_perm)
return loss, norm
class PITLossSigmoid(loss_computer.LossComputer):
"""A loss computer that calculates the loss"""
def __call__(self, targets, logits, seq_length):
"""
Compute the loss
Creates the operation to compute the Permudation Invariant Training loss
Args:
targets: a dictionary of [batch_size x time x ...] tensor containing
the targets
logits: a dictionary of [batch_size x time x ...] tensors containing the logits
seq_length: a dictionary of [batch_size] vectors containing
the sequence lengths
Returns:
loss: a scalar value containing the loss
norm: a scalar value indicating how to normalize the loss
"""
warnings.warn('In following versions it will be required to use the PITLoss', Warning)
multi_targets = targets['multi_targets']
mix_to_mask = targets['mix_to_mask']
seq_length = seq_length['bin_est']
logits = logits['bin_est']
loss, norm = ops.pit_loss(multi_targets, logits, mix_to_mask, seq_length, self.batch_size, activation='sigmoid')
return loss, norm
class PITLossSigmoidScaled(loss_computer.LossComputer):
"""A loss computer that calculates the loss"""
def __call__(self, targets, logits, seq_length):
"""
Compute the loss
Creates the operation to compute the Permudation Invariant Training loss
Args:
targets: a dictionary of [batch_size x time x ...] tensor containing
the targets
logits: a dictionary of [batch_size x time x ...] tensors containing the logits
seq_length: a dictionary of [batch_size] vectors containing
the sequence lengths
Returns:
loss: a scalar value containing the loss
norm: a scalar value indicating how to normalize the loss
"""
warnings.warn('In following versions it will be required to use the PITLoss', Warning)
multi_targets = targets['multi_targets']
mix_to_mask = targets['mix_to_mask']
seq_length = seq_length['bin_est']
logits = logits['bin_est']
loss, norm = ops.pit_loss(
multi_targets, logits, mix_to_mask, seq_length, self.batch_size, activation='sigmoid', rescale_recs=True)
return loss, norm
class PITLossOverspeakerized(loss_computer.LossComputer):
"""A loss computer that calculates the loss"""
def __call__(self, targets, logits, seq_length):
"""
Compute the loss
Creates the operation to compute the Permudation Invariant Training loss
Args:
targets: a dictionary of [batch_size x time x ...] tensor containing
the targets
logits: a dictionary of [nrS x batch_size x time x ...] tensors containing the logits
seq_length: a dictionary of [batch_size] vectors containing
the sequence lengths
Returns:
loss: a scalar value containing the loss
norm: a scalar value indicating how to normalize the loss
"""
warnings.warn('In following versions it will be required to use the PITLoss', Warning)
multi_targets = targets['multi_targets']
mix_to_mask = targets['mix_to_mask']
seq_length = seq_length['bin_est']
logits = logits['bin_est']
print 'Assuming "bin_est" is already activated with sigmoid'
loss, norm = ops.pit_loss(
multi_targets, logits, mix_to_mask, seq_length, self.batch_size, activation=None, rescale_recs=False,
overspeakerized=True)
return loss, norm
| 30.658824
| 114
| 0.739256
| 725
| 5,212
| 5.132414
| 0.142069
| 0.048374
| 0.041924
| 0.053212
| 0.720505
| 0.720505
| 0.720505
| 0.720505
| 0.720505
| 0.720505
| 0
| 0
| 0.17594
| 5,212
| 169
| 115
| 30.840237
| 0.866356
| 0
| 0
| 0.478873
| 0
| 0
| 0.182571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.056338
| null | null | 0.014085
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e48b74217337855e4e9559d5b305851358c2de2b
| 4,007
|
py
|
Python
|
examples/competition_example.py
|
gcollic/ffai
|
bb3f6707f86d3c540dca47caf4c594a93f5eac43
|
[
"Apache-2.0"
] | null | null | null |
examples/competition_example.py
|
gcollic/ffai
|
bb3f6707f86d3c540dca47caf4c594a93f5eac43
|
[
"Apache-2.0"
] | null | null | null |
examples/competition_example.py
|
gcollic/ffai
|
bb3f6707f86d3c540dca47caf4c594a93f5eac43
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
from ffai.ai.competition import Competition
import examples.scripted_bot_example
import examples.grodbot
from copy import deepcopy
from ffai.core.load import get_team, get_rule_set, get_config
# Load competition configuration for the bot bowl
config = get_config('ff-11-bot-bowl-i.json')
# scripted vs. random
competition = Competition('MyCompetition', competitor_a_team_id='human-1', competitor_b_team_id='human-2', competitor_a_name='scripted', competitor_b_name='random', config=config)
results = competition.run(num_games=20)
results.print()
# Random vs. idle
config.time_limits.game = 10 # 10 second time limit per game
config.time_limits.turn = 1 # 1 second time limit per turn
competition = Competition('MyCompetition', competitor_a_team_id='human-1', competitor_b_team_id='human-2', competitor_a_name='random', competitor_b_name='idle', config=config)
results = competition.run(num_games=2)
results.print()
# Random vs. violator
config.time_limits.game = 60 # 60 second time limit per game
config.time_limits.turn_ = 1 # 1 second time limit per turn
config.time_limits.secondary = 1 # 1 second time limit for secondary choices
config.time_limits.disqualification = 1 # 1 second disqualification limit
competition = Competition('MyCompetition', competitor_a_team_id='human-1', competitor_b_team_id='human-2', competitor_a_name='random', competitor_b_name='violator', config=config)
results = competition.run(num_games=2)
results.print()
# Random vs. just-in-time
config.time_limits.game = 600 # 60 second time limit per game
config.time_limits.turn = 1 # 1 second time limit per turn
config.time_limits.secondary = 1 # 1 second time limit for secondary choices
config.time_limits.disqualification = 1 # 1 second disqualification limit
#config.debug_mode = True
competition = Competition('MyCompetition', competitor_a_team_id='human-1', competitor_b_team_id='human-2', competitor_a_name='random', competitor_b_name='just-in-time', config=config)
results = competition.run(num_games=2)
results.print()
# Random vs. init crash
config.time_limits.game = 60 # 60 second time limit per game
config.time_limits.turn = 1 # 1 second time limit per turn
config.time_limits.secondary = 1 # 1 second time limit for secondary choices
config.time_limits.disqualification = 1 # 1 second disqualification threshold
config.time_limits.init = 20 # 2 init limit
competition = Competition('MyCompetition', competitor_a_team_id='human-1', competitor_b_team_id='human-2', competitor_a_name='random', competitor_b_name='init-crash', config=config)
results = competition.run(num_games=2)
results.print()
# Random vs. crash
config.time_limits.game = 32 # 32 second time limit per game
config.time_limits.turn = 1 # 1 second time limit per turn
competition = Competition('MyCompetition', competitor_a_team_id='human-1', competitor_b_team_id='human-2', competitor_a_name='random', competitor_b_name='crash', config=config)
results = competition.run(num_games=2)
results.print()
# Random vs. manipulator
config.time_limits.game = 32 # 32 second time limit per game
config.time_limits.turn = 1 # 1 second time limit per turn
competition = Competition('MyCompetition', competitor_a_team_id='human-1', competitor_b_team_id='human-2', competitor_a_name='random', competitor_b_name='manipulator', config=config)
results = competition.run(num_games=2)
results.print()
# Scripted vs. grodbot
config = get_config('ff-11-bot-bowl-i.json')
competition = Competition('MyCompetition', competitor_a_team_id='human-1', competitor_b_team_id='human-2', competitor_a_name='scripted', competitor_b_name='grodbot', config=config)
results = competition.run(num_games=2)
results.print()
# Scripted vs. grodbot
config = get_config('ff-11-bot-bowl-i.json')
competition = Competition('MyCompetition', competitor_a_team_id='human-1', competitor_b_team_id='human-2', competitor_a_name='random', competitor_b_name='grodbot', config=config)
results = competition.run(num_games=2)
results.print()
| 50.0875
| 183
| 0.788121
| 605
| 4,007
| 5.006612
| 0.120661
| 0.062727
| 0.100363
| 0.071311
| 0.855398
| 0.852096
| 0.852096
| 0.838561
| 0.838561
| 0.828326
| 0
| 0.02416
| 0.101323
| 4,007
| 79
| 184
| 50.721519
| 0.816995
| 0.218118
| 0
| 0.648148
| 0
| 0
| 0.13991
| 0.020309
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.092593
| 0
| 0.092593
| 0.166667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e4e970e8f332bf8a0fc817bda2a03accf3973017
| 28
|
py
|
Python
|
qrshare/config/__init__.py
|
mensch272/qrshare
|
335f8a96e4fdecc1e520d5dd2900f9860b3a70e8
|
[
"Apache-2.0"
] | 3
|
2020-12-23T22:30:39.000Z
|
2021-02-17T20:50:28.000Z
|
qrshare/config/__init__.py
|
mHaisham/qrshare
|
335f8a96e4fdecc1e520d5dd2900f9860b3a70e8
|
[
"Apache-2.0"
] | null | null | null |
qrshare/config/__init__.py
|
mHaisham/qrshare
|
335f8a96e4fdecc1e520d5dd2900f9860b3a70e8
|
[
"Apache-2.0"
] | null | null | null |
from .user import UserConfig
| 28
| 28
| 0.857143
| 4
| 28
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 28
| 1
| 28
| 28
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5f7934e36a83f68750ea5e21300c261a523a77a3
| 2,940
|
py
|
Python
|
tests/test_uu_events/test_gsm_sms_submit.py
|
matan1008/srsran-controller
|
8389a78976efb7dfe3ef5dc17f5ac14adcae732c
|
[
"MIT"
] | null | null | null |
tests/test_uu_events/test_gsm_sms_submit.py
|
matan1008/srsran-controller
|
8389a78976efb7dfe3ef5dc17f5ac14adcae732c
|
[
"MIT"
] | null | null | null |
tests/test_uu_events/test_gsm_sms_submit.py
|
matan1008/srsran-controller
|
8389a78976efb7dfe3ef5dc17f5ac14adcae732c
|
[
"MIT"
] | null | null | null |
import datetime
from pyshark import FileCapture
from srsran_controller.uu_events.factory import EventsFactory
from srsran_controller.uu_events.gsm_sms_submit import GSM_SMS_SUBMIT_NAME
GSM_SMS_SUBMIT_PCAP_DATA = (
'0a0d0d0ab80000004d3c2b1a01000000ffffffffffffffff02003500496e74656c28522920436f726528544d292069372d37373030204350'
'55204020332e363047487a20287769746820535345342e3229000000030017004c696e757820352e31312e302d32352d67656e6572696300'
'04003a0044756d70636170202857697265736861726b2920332e322e3320284769742076332e322e33207061636b6167656420617320332e'
'322e332d3129000000000000b80000000100000060000000010000000000040002000b006c74652d6e6574776f726b000900010009000000'
'0b000e000075647020706f7274203538343700000c0017004c696e757820352e31312e302d32352d67656e65726963000000000060000000'
'060000007c0200000000000042c0a016b6cea54859020000590200000242c3b919f70242c0a8340208004500024bb4b7400040119999c0a8'
'3402c0a834fe163716d70237ec996d61632d6c746501000302004a0300000433d007010a000f00013d3a223523461f8000a00000480564e0'
'e28e80e040ec644d2023e0038000d02a7081200ce28021e1922f2a468902acc00000f886f91f8fd26020552504870043806b45000042cb32'
'00004011f356ac10000208080808ef7f0035002e7efdd987010000010000000000000a696e69742d7030316d64056170706c6503636f6d00'
'0041000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
'0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
'0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
'0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
'0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
'0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
'0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
'000000000000000000000000000000007c020000050000006c00000000000000f1ca05008b4328be01001c00436f756e746572732070726f'
'76696465642062792064756d7063617002000800f1ca0500f83dc6b103000800f1ca0500ef4228be04000800b30000000000000005000800'
'0000000000000000000000006c000000'
)
def test_parsing_gsm_sms_submit(tmp_path):
p = tmp_path / 'gsm_sms_submit.pcap'
p.write_bytes(bytes.fromhex(GSM_SMS_SUBMIT_PCAP_DATA))
with FileCapture(str(p)) as pcap:
submit = list(EventsFactory().from_packet(list(pcap)[0]))[0]
assert submit == {
'event': GSM_SMS_SUBMIT_NAME,
'data': {
'rp_da': '3548900076',
'content': 'Do food',
'tp_da': '972543845166',
},
'rnti': 74,
'time': datetime.datetime(2021, 9, 1, 19, 40, 56, 27320),
}
| 63.913043
| 118
| 0.867007
| 120
| 2,940
| 20.975
| 0.55
| 0.016687
| 0.033373
| 0.01907
| 0.305125
| 0
| 0
| 0
| 0
| 0
| 0
| 0.715247
| 0.089796
| 2,940
| 45
| 119
| 65.333333
| 0.225336
| 0
| 0
| 0.15
| 0
| 0
| 0.72449
| 0.696599
| 0
| 1
| 0
| 0
| 0.025
| 1
| 0.025
| false
| 0
| 0.1
| 0
| 0.125
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5fd3c753cecd5d2a0610dfd86aaa1b3b41a1e198
| 24
|
py
|
Python
|
strategies/__init__.py
|
arrdguez/trade_signals
|
d81f75d4a62196ef764fd4ef66baeebaacc8999b
|
[
"MIT"
] | 2
|
2017-08-11T14:38:34.000Z
|
2017-08-11T19:36:06.000Z
|
strategies/__init__.py
|
arrdguez/trade_signals
|
d81f75d4a62196ef764fd4ef66baeebaacc8999b
|
[
"MIT"
] | null | null | null |
strategies/__init__.py
|
arrdguez/trade_signals
|
d81f75d4a62196ef764fd4ef66baeebaacc8999b
|
[
"MIT"
] | 1
|
2019-01-22T22:05:36.000Z
|
2019-01-22T22:05:36.000Z
|
from . import strategies
| 24
| 24
| 0.833333
| 3
| 24
| 6.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 24
| 1
| 24
| 24
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3964547e6a0629004643a3325ae5f82d19c2ef23
| 134
|
py
|
Python
|
app/admin/__init__.py
|
quanpower/sitp
|
082f244dd35c5e881b332a624d4808f3e9e81a96
|
[
"Apache-2.0"
] | null | null | null |
app/admin/__init__.py
|
quanpower/sitp
|
082f244dd35c5e881b332a624d4808f3e9e81a96
|
[
"Apache-2.0"
] | 4
|
2020-03-24T15:46:19.000Z
|
2022-03-08T21:09:16.000Z
|
app/admin/__init__.py
|
quanpower/sitp
|
082f244dd35c5e881b332a624d4808f3e9e81a96
|
[
"Apache-2.0"
] | null | null | null |
from .user_admin import UserAdminView, UserModelView
from .test_admin import TestAdminView
from .daq_admin import TemperatureModelView
| 44.666667
| 52
| 0.880597
| 16
| 134
| 7.1875
| 0.625
| 0.286957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089552
| 134
| 3
| 53
| 44.666667
| 0.942623
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
399a727e24c644c8ae9cc63eaac653fb13b61329
| 350
|
py
|
Python
|
anet/tasks/mnist/envs/__init__.py
|
thomasaunger/Anet
|
1d353f280a30c3207fa6d09af91a85c4955bbda4
|
[
"BSD-3-Clause"
] | null | null | null |
anet/tasks/mnist/envs/__init__.py
|
thomasaunger/Anet
|
1d353f280a30c3207fa6d09af91a85c4955bbda4
|
[
"BSD-3-Clause"
] | null | null | null |
anet/tasks/mnist/envs/__init__.py
|
thomasaunger/Anet
|
1d353f280a30c3207fa6d09af91a85c4955bbda4
|
[
"BSD-3-Clause"
] | null | null | null |
from anet.tasks.mnist.envs.mnist_env import MNISTEnv
from anet.tasks.mnist.envs.mnist_env_binary import MNISTEnvBinary
from anet.tasks.mnist.envs.mnist_env_quaternary import MNISTEnvQuaternary
from anet.tasks.mnist.envs.mnist_env_senary import MNISTEnvSenary
from anet.tasks.mnist.envs.mnist_env_octonary import MNISTEnvOctonary
| 58.333333
| 73
| 0.825714
| 49
| 350
| 5.714286
| 0.326531
| 0.142857
| 0.232143
| 0.321429
| 0.535714
| 0.535714
| 0.535714
| 0
| 0
| 0
| 0
| 0
| 0.117143
| 350
| 5
| 74
| 70
| 0.906149
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
39a74f0331b6f192088f410af1d510fb310d89d6
| 569
|
py
|
Python
|
addons/iap/__init__.py
|
SHIVJITH/Odoo_Machine_Test
|
310497a9872db7844b521e6dab5f7a9f61d365a4
|
[
"Apache-2.0"
] | null | null | null |
addons/iap/__init__.py
|
SHIVJITH/Odoo_Machine_Test
|
310497a9872db7844b521e6dab5f7a9f61d365a4
|
[
"Apache-2.0"
] | null | null | null |
addons/iap/__init__.py
|
SHIVJITH/Odoo_Machine_Test
|
310497a9872db7844b521e6dab5f7a9f61d365a4
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from . import models
from . import tools
# compatibility imports
from odoo.addons.iap.tools.iap_tools import iap_jsonrpc as jsonrpc
from odoo.addons.iap.tools.iap_tools import iap_authorize as authorize
from odoo.addons.iap.tools.iap_tools import iap_cancel as cancel
from odoo.addons.iap.tools.iap_tools import iap_capture as capture
from odoo.addons.iap.tools.iap_tools import iap_charge as charge
from odoo.addons.iap.tools.iap_tools import InsufficientCreditError
| 40.642857
| 74
| 0.817223
| 92
| 569
| 4.934783
| 0.326087
| 0.211454
| 0.185022
| 0.22467
| 0.508811
| 0.508811
| 0.508811
| 0.508811
| 0.429515
| 0
| 0
| 0.001984
| 0.114236
| 569
| 13
| 75
| 43.769231
| 0.89881
| 0.203866
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
39e2423c1092a361c17f988ec8504186eff2db0c
| 37
|
py
|
Python
|
sonorus/experimental/datasets/__init__.py
|
imbesat-rizvi/sonorus
|
38698d55b00c67fb3bcff4e4349b6c214a29e6f5
|
[
"MIT"
] | null | null | null |
sonorus/experimental/datasets/__init__.py
|
imbesat-rizvi/sonorus
|
38698d55b00c67fb3bcff4e4349b6c214a29e6f5
|
[
"MIT"
] | null | null | null |
sonorus/experimental/datasets/__init__.py
|
imbesat-rizvi/sonorus
|
38698d55b00c67fb3bcff4e4349b6c214a29e6f5
|
[
"MIT"
] | 2
|
2021-01-17T22:53:02.000Z
|
2021-03-03T01:11:43.000Z
|
from .CommonVoice import CommonVoice
| 18.5
| 36
| 0.864865
| 4
| 37
| 8
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
39e9519a6eff46cac49cf76a44d9d7574984452b
| 474
|
py
|
Python
|
mettler_toledo_device/__init__.py
|
MNE-collab/mettler_toledo_device_python
|
f681ab42073d54eb5e9aff43f1c10928b3e26c6d
|
[
"BSD-3-Clause"
] | 19
|
2016-03-21T18:13:00.000Z
|
2022-01-19T04:06:44.000Z
|
mettler_toledo_device/__init__.py
|
MNE-collab/mettler_toledo_device_python
|
f681ab42073d54eb5e9aff43f1c10928b3e26c6d
|
[
"BSD-3-Clause"
] | 4
|
2015-10-29T18:40:51.000Z
|
2021-11-08T14:32:45.000Z
|
mettler_toledo_device/__init__.py
|
peterpolidoro/mettler_toledo_device_python
|
8d068e53d6176434414bfbe4e1cb6e42bfb4fd66
|
[
"BSD-3-Clause"
] | 12
|
2015-09-01T21:18:10.000Z
|
2022-03-13T20:14:27.000Z
|
'''
This Python package (mettler_toledo_device) creates a class named
MettlerToledoDevice, which contains an instance of
serial_device2.SerialDevice and adds methods to it to interface to
Mettler Toledo balances and scales that use the Mettler Toledo
Standard Interface Command Set (MT-SICS).
'''
from .mettler_toledo_device import MettlerToledoDevice, MettlerToledoDevices, MettlerToledoError, find_mettler_toledo_device_ports, find_mettler_toledo_device_port, __version__
| 52.666667
| 176
| 0.85443
| 62
| 474
| 6.258065
| 0.693548
| 0.201031
| 0.195876
| 0.118557
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002353
| 0.103376
| 474
| 8
| 177
| 59.25
| 0.910588
| 0.607595
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f2f3e72aef64ecb0b8ef1b952d5c90f43e1e57f3
| 49
|
py
|
Python
|
ultrafeedparser/__init__.py
|
kkszysiu/ultrafeedparser
|
f3f9a53013049a29771743b5e4ec97fb7c39080e
|
[
"MIT"
] | null | null | null |
ultrafeedparser/__init__.py
|
kkszysiu/ultrafeedparser
|
f3f9a53013049a29771743b5e4ec97fb7c39080e
|
[
"MIT"
] | null | null | null |
ultrafeedparser/__init__.py
|
kkszysiu/ultrafeedparser
|
f3f9a53013049a29771743b5e4ec97fb7c39080e
|
[
"MIT"
] | null | null | null |
from ultrafeedparser.libultrafeedparser import *
| 24.5
| 48
| 0.877551
| 4
| 49
| 10.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 1
| 49
| 49
| 0.955556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
8454d18afa2b2a515f5bc9966bd931bed0d45184
| 693
|
py
|
Python
|
test.py
|
johnngnky/luhn
|
7b79485b4748e183050998514ea430db3fb5f9e6
|
[
"MIT"
] | 40
|
2016-08-06T16:01:12.000Z
|
2022-02-21T13:28:09.000Z
|
test.py
|
johnngnky/luhn
|
7b79485b4748e183050998514ea430db3fb5f9e6
|
[
"MIT"
] | 6
|
2017-12-12T22:51:51.000Z
|
2021-06-29T15:17:09.000Z
|
test.py
|
johnngnky/luhn
|
7b79485b4748e183050998514ea430db3fb5f9e6
|
[
"MIT"
] | 17
|
2017-02-24T19:47:05.000Z
|
2022-03-23T17:41:50.000Z
|
import luhn
def test_checksum_len1():
assert luhn.checksum('7') == 7
def test_checksum_len2():
assert luhn.checksum('13') == 5
def test_checksum_len3():
assert luhn.checksum('383') == 3
def test_checksum_len4():
assert luhn.checksum('2827') == 3
def test_checksum_len13():
assert luhn.checksum('4346537657597') == 9
def test_checksum_len14():
assert luhn.checksum('27184931073326') == 1
def test_valid():
assert luhn.verify('356938035643809')
def test_invalid():
assert not luhn.verify('4222222222222222')
def test_generate():
assert luhn.generate('7992739871') == 3
def test_append():
assert luhn.append('53461861341123') =='534618613411234'
| 21.65625
| 60
| 0.702742
| 87
| 693
| 5.413793
| 0.37931
| 0.14862
| 0.191083
| 0.067941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208547
| 0.155844
| 693
| 31
| 61
| 22.354839
| 0.596581
| 0
| 0
| 0
| 0
| 0
| 0.154401
| 0
| 0
| 0
| 0
| 0
| 0.47619
| 1
| 0.47619
| true
| 0
| 0.047619
| 0
| 0.52381
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
84629e4d247436e994dec74bd2b6a3ce52eae863
| 45
|
py
|
Python
|
dgp/genera/load/__init__.py
|
dataspot/dgp
|
553a255a4884b935cf2efecdc761050232f0f066
|
[
"MIT"
] | 1
|
2019-07-17T11:34:27.000Z
|
2019-07-17T11:34:27.000Z
|
dgp/genera/load/__init__.py
|
datahq/dgp
|
f39592ce20ba67b73b08188f14585b6eb3d43f96
|
[
"MIT"
] | 2
|
2019-04-30T12:32:32.000Z
|
2019-04-30T12:35:26.000Z
|
dgp/genera/load/__init__.py
|
dataspot/dgp
|
553a255a4884b935cf2efecdc761050232f0f066
|
[
"MIT"
] | null | null | null |
from .loader import LoaderDGP, PostLoaderDGP
| 22.5
| 44
| 0.844444
| 5
| 45
| 7.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 45
| 1
| 45
| 45
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ffc5644871bbea822c49b55163e4af186877387e
| 12,145
|
py
|
Python
|
apiserver/apiserver/api/tests.py
|
protodarkstar/spaceport
|
ce28444765208f6c90bd32dcafed2aa4404e76a0
|
[
"MIT"
] | null | null | null |
apiserver/apiserver/api/tests.py
|
protodarkstar/spaceport
|
ce28444765208f6c90bd32dcafed2aa4404e76a0
|
[
"MIT"
] | null | null | null |
apiserver/apiserver/api/tests.py
|
protodarkstar/spaceport
|
ce28444765208f6c90bd32dcafed2aa4404e76a0
|
[
"MIT"
] | null | null | null |
import django, sys, os
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
django.setup()
from django.test import TestCase
import datetime
from dateutil import relativedelta
from rest_framework.exceptions import ValidationError
from apiserver.api import utils, utils_paypal, models
testing_member, _ = models.Member.objects.get_or_create(
first_name='unittest',
preferred_name='unittest',
last_name='tester',
)
class TestMonthsSpanned(TestCase):
def test_num_months_spanned_one_month(self):
date2 = datetime.date(2020, 1, 10)
date1 = datetime.date(2020, 2, 10)
spanned = utils.num_months_spanned(date1, date2)
self.assertEqual(spanned, 1)
def test_num_months_spanned_one_week(self):
date1 = datetime.date(2020, 2, 5)
date2 = datetime.date(2020, 1, 28)
spanned = utils.num_months_spanned(date1, date2)
self.assertEqual(spanned, 1)
def test_num_months_spanned_two_days(self):
date1 = datetime.date(2020, 2, 1)
date2 = datetime.date(2020, 1, 31)
spanned = utils.num_months_spanned(date1, date2)
self.assertEqual(spanned, 1)
def test_num_months_spanned_two_years(self):
date1 = datetime.date(2022, 1, 18)
date2 = datetime.date(2020, 1, 18)
spanned = utils.num_months_spanned(date1, date2)
self.assertEqual(spanned, 24)
def test_num_months_spanned_same_month(self):
date1 = datetime.date(2020, 1, 31)
date2 = datetime.date(2020, 1, 1)
spanned = utils.num_months_spanned(date1, date2)
self.assertEqual(spanned, 0)
class TestMonthsDifference(TestCase):
def test_num_months_difference_one_month(self):
date2 = datetime.date(2020, 1, 10)
date1 = datetime.date(2020, 2, 10)
difference = utils.num_months_difference(date1, date2)
self.assertEqual(difference, 1)
def test_num_months_difference_one_week(self):
date1 = datetime.date(2020, 2, 5)
date2 = datetime.date(2020, 1, 28)
difference = utils.num_months_difference(date1, date2)
self.assertEqual(difference, 0)
def test_num_months_difference_two_days(self):
date1 = datetime.date(2020, 2, 1)
date2 = datetime.date(2020, 1, 31)
difference = utils.num_months_difference(date1, date2)
self.assertEqual(difference, 0)
def test_num_months_difference_two_years(self):
date1 = datetime.date(2022, 1, 18)
date2 = datetime.date(2020, 1, 18)
difference = utils.num_months_difference(date1, date2)
self.assertEqual(difference, 24)
def test_num_months_difference_same_month(self):
date1 = datetime.date(2020, 1, 31)
date2 = datetime.date(2020, 1, 1)
difference = utils.num_months_difference(date1, date2)
self.assertEqual(difference, 0)
class TestAddMonths(TestCase):
def test_add_months_one_month(self):
date = datetime.date(2020, 1, 18)
num_months = 1
new_date = utils.add_months(date, num_months)
self.assertEqual(new_date, datetime.date(2020, 2, 18))
def test_add_months_february(self):
date = datetime.date(2020, 1, 31)
num_months = 1
new_date = utils.add_months(date, num_months)
self.assertEqual(new_date, datetime.date(2020, 2, 29))
def test_add_months_february_leap(self):
date = datetime.date(2020, 2, 29)
num_months = 12
new_date = utils.add_months(date, num_months)
self.assertEqual(new_date, datetime.date(2021, 2, 28))
def test_add_months_hundred_years(self):
date = datetime.date(2020, 1, 31)
num_months = 1200
new_date = utils.add_months(date, num_months)
self.assertEqual(new_date, datetime.date(2120, 1, 31))
class TestCalcStatus(TestCase):
def test_calc_member_status_14_days(self):
expire_date = datetime.date.today() + datetime.timedelta(days=14)
status, former = utils.calc_member_status(expire_date)
self.assertEqual(status, 'Current')
self.assertEqual(former, False)
def test_calc_member_status_90_days(self):
expire_date = datetime.date.today() + datetime.timedelta(days=90)
status, former = utils.calc_member_status(expire_date)
self.assertEqual(status, 'Prepaid')
self.assertEqual(former, False)
def test_calc_member_status_tomorrow(self):
expire_date = datetime.date.today() + datetime.timedelta(days=1)
status, former = utils.calc_member_status(expire_date)
self.assertEqual(status, 'Current')
self.assertEqual(former, False)
def test_calc_member_status_today(self):
expire_date = datetime.date.today()
status, former = utils.calc_member_status(expire_date)
self.assertEqual(status, 'Due')
self.assertEqual(former, False)
def test_calc_member_status_yesterday(self):
expire_date = datetime.date.today() - datetime.timedelta(days=1)
status, former = utils.calc_member_status(expire_date)
self.assertEqual(status, 'Due')
self.assertEqual(former, False)
def test_calc_member_status_85_days_ago(self):
expire_date = datetime.date.today() - datetime.timedelta(days=85)
status, former = utils.calc_member_status(expire_date)
self.assertEqual(status, 'Overdue')
self.assertEqual(former, False)
def test_calc_member_status_95_days_ago(self):
expire_date = datetime.date.today() - datetime.timedelta(days=95)
status, former = utils.calc_member_status(expire_date)
self.assertEqual(status, 'Overdue')
self.assertEqual(former, True)
class TestFakeMonths(TestCase):
def test_fake_missing_membership_months_one_month(self):
testing_member.current_start_date = datetime.date(2018, 6, 6)
testing_member.expire_date = datetime.date(2018, 7, 6)
tx, count = utils.fake_missing_membership_months(testing_member)
self.assertEqual(count, 1)
def test_fake_missing_membership_months_one_and_half_month(self):
testing_member.current_start_date = datetime.date(2018, 6, 1)
testing_member.expire_date = datetime.date(2018, 7, 15)
tx, count = utils.fake_missing_membership_months(testing_member)
self.assertEqual(count, 1)
def test_fake_missing_membership_months_one_year(self):
testing_member.current_start_date = datetime.date(2018, 6, 6)
testing_member.expire_date = datetime.date(2019, 6, 6)
tx, count = utils.fake_missing_membership_months(testing_member)
self.assertEqual(count, 12)
def test_fake_missing_membership_months_same_month(self):
testing_member.current_start_date = datetime.date(2018, 6, 6)
testing_member.expire_date = datetime.date(2018, 6, 16)
tx, count = utils.fake_missing_membership_months(testing_member)
self.assertEqual(count, 0)
class TestTallyMembership(TestCase):
def get_member_clear_transactions(self):
member = testing_member
member.paused_date = None
member.expire_date = None
return member
def test_tally_membership_months_prepaid(self):
member = self.get_member_clear_transactions()
test_num_months = 8
start_date = datetime.date.today() - relativedelta.relativedelta(months=6, days=14)
end_date = start_date + relativedelta.relativedelta(months=test_num_months)
member.current_start_date = start_date
member.save()
for i in range(test_num_months):
models.Transaction.objects.create(
amount=0,
member_id=member.id,
number_of_membership_months=1,
)
result = utils.tally_membership_months(member)
self.assertEqual(member.expire_date, end_date)
self.assertEqual(member.status, 'Prepaid')
def test_tally_membership_months_current(self):
member = self.get_member_clear_transactions()
test_num_months = 7
start_date = datetime.date.today() - relativedelta.relativedelta(months=6, days=14)
end_date = start_date + relativedelta.relativedelta(months=test_num_months)
member.current_start_date = start_date
member.save()
for i in range(test_num_months):
models.Transaction.objects.create(
amount=0,
member_id=member.id,
number_of_membership_months=1,
)
result = utils.tally_membership_months(member)
self.assertEqual(member.expire_date, end_date)
self.assertEqual(member.status, 'Current')
def test_tally_membership_months_due(self):
member = self.get_member_clear_transactions()
test_num_months = 6
start_date = datetime.date.today() - relativedelta.relativedelta(months=6, days=14)
end_date = start_date + relativedelta.relativedelta(months=test_num_months)
member.current_start_date = start_date
member.save()
for i in range(test_num_months):
models.Transaction.objects.create(
amount=0,
member_id=member.id,
number_of_membership_months=1,
)
result = utils.tally_membership_months(member)
self.assertEqual(member.expire_date, end_date)
self.assertEqual(member.status, 'Due')
def test_tally_membership_months_overdue(self):
member = self.get_member_clear_transactions()
test_num_months = 5
start_date = datetime.date.today() - relativedelta.relativedelta(months=6, days=14)
end_date = start_date + relativedelta.relativedelta(months=test_num_months)
member.current_start_date = start_date
member.save()
for i in range(test_num_months):
models.Transaction.objects.create(
amount=0,
member_id=member.id,
number_of_membership_months=1,
)
result = utils.tally_membership_months(member)
self.assertEqual(member.expire_date, end_date)
self.assertEqual(member.status, 'Overdue')
def test_tally_membership_months_overdue_pause(self):
member = self.get_member_clear_transactions()
test_num_months = 1
start_date = datetime.date.today() - relativedelta.relativedelta(months=6, days=14)
end_date = start_date + relativedelta.relativedelta(months=test_num_months)
member.current_start_date = start_date
member.save()
for i in range(test_num_months):
models.Transaction.objects.create(
amount=0,
member_id=member.id,
number_of_membership_months=1,
)
result = utils.tally_membership_months(member)
self.assertEqual(member.expire_date, end_date)
self.assertEqual(member.paused_date, end_date)
self.assertEqual(member.status, 'Overdue')
def test_tally_membership_months_dont_run(self):
member = self.get_member_clear_transactions()
start_date = datetime.date.today()
member.current_start_date = start_date
member.paused_date = start_date
member.save()
result = utils.tally_membership_months(member)
self.assertEqual(result, False)
class TestParsePayPalDate(TestCase):
def test_parse(self):
string = '20:12:59 Jan 13, 2009 PST'
result = utils_paypal.parse_paypal_date(string)
self.assertEqual(str(result), '2009-01-14 04:12:59+00:00')
def test_parse_dst(self):
string = '20:12:59 Jul 13, 2009 PDT'
result = utils_paypal.parse_paypal_date(string)
self.assertEqual(str(result), '2009-07-14 03:12:59+00:00')
def test_parse_bad_tz(self):
string = '20:12:59 Jul 13, 2009 QOT'
self.assertRaises(ValidationError, utils_paypal.parse_paypal_date, string)
def test_parse_bad_string(self):
string = 'ave satanas'
self.assertRaises(ValidationError, utils_paypal.parse_paypal_date, string)
| 32.214854
| 91
| 0.679374
| 1,512
| 12,145
| 5.178571
| 0.10582
| 0.075096
| 0.059259
| 0.032567
| 0.877522
| 0.839591
| 0.809962
| 0.790294
| 0.771903
| 0.737931
| 0
| 0.045348
| 0.228324
| 12,145
| 376
| 92
| 32.300532
| 0.79012
| 0
| 0
| 0.561265
| 0
| 0
| 0.022231
| 0.001811
| 0
| 0
| 0
| 0
| 0.189723
| 1
| 0.142292
| false
| 0
| 0.023715
| 0
| 0.197628
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ffd2ddce31fd10d3479bdbfc3d26fa6f568b338f
| 38
|
py
|
Python
|
monosloth/factory/__init__.py
|
monosloth/framework
|
7121f91aefc14c9b7ee088152282d07ee300ad8f
|
[
"MIT"
] | null | null | null |
monosloth/factory/__init__.py
|
monosloth/framework
|
7121f91aefc14c9b7ee088152282d07ee300ad8f
|
[
"MIT"
] | null | null | null |
monosloth/factory/__init__.py
|
monosloth/framework
|
7121f91aefc14c9b7ee088152282d07ee300ad8f
|
[
"MIT"
] | null | null | null |
from . factory import AbstractFactory
| 19
| 37
| 0.842105
| 4
| 38
| 8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 38
| 1
| 38
| 38
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ffe8643f0f4144b08f6866b18cd401e32143731e
| 33
|
py
|
Python
|
plato_pylib/parseOther/__init__.py
|
RFogarty1/plato_pylib
|
b0ab65bfe489c4bb1fd321cc102580bef2b6ff68
|
[
"MIT"
] | null | null | null |
plato_pylib/parseOther/__init__.py
|
RFogarty1/plato_pylib
|
b0ab65bfe489c4bb1fd321cc102580bef2b6ff68
|
[
"MIT"
] | null | null | null |
plato_pylib/parseOther/__init__.py
|
RFogarty1/plato_pylib
|
b0ab65bfe489c4bb1fd321cc102580bef2b6ff68
|
[
"MIT"
] | null | null | null |
from . import parse_castep_files
| 16.5
| 32
| 0.848485
| 5
| 33
| 5.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 33
| 1
| 33
| 33
| 0.896552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
fff0e9f5686b72564862947b9ec0132e5d71e610
| 45
|
py
|
Python
|
wrktoolbox/goals/__init__.py
|
kishorekumar-kk/wrktoolbox
|
20ba73a6dc04c4c1436ed6e3d37095246b3c7392
|
[
"MIT"
] | 3
|
2020-04-08T08:54:26.000Z
|
2021-07-27T16:29:39.000Z
|
wrktoolbox/goals/__init__.py
|
kishorekumar-kk/wrktoolbox
|
20ba73a6dc04c4c1436ed6e3d37095246b3c7392
|
[
"MIT"
] | 2
|
2019-07-08T13:19:41.000Z
|
2021-01-24T21:06:06.000Z
|
wrktoolbox/goals/__init__.py
|
kishorekumar-kk/wrktoolbox
|
20ba73a6dc04c4c1436ed6e3d37095246b3c7392
|
[
"MIT"
] | 2
|
2020-11-03T07:54:53.000Z
|
2021-01-22T11:59:05.000Z
|
from .common import *
from .latency import *
| 15
| 22
| 0.733333
| 6
| 45
| 5.5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177778
| 45
| 2
| 23
| 22.5
| 0.891892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0819dd9ae08806203eee263f6f9ff9e454f6c3ae
| 72
|
py
|
Python
|
selfsupervised3d/dataset/__init__.py
|
jcreinhold/selfsupervised3d
|
735f0b8d0e5344fd7692649523a13a7c04ac2584
|
[
"Apache-2.0"
] | 5
|
2020-05-01T15:54:05.000Z
|
2021-11-24T11:37:23.000Z
|
selfsupervised3d/dataset/__init__.py
|
jcreinhold/selfsupervised3d
|
735f0b8d0e5344fd7692649523a13a7c04ac2584
|
[
"Apache-2.0"
] | 1
|
2022-01-25T15:05:11.000Z
|
2022-01-25T15:05:11.000Z
|
selfsupervised3d/dataset/__init__.py
|
jcreinhold/selfsupervised3d
|
735f0b8d0e5344fd7692649523a13a7c04ac2584
|
[
"Apache-2.0"
] | null | null | null |
from .blendowski import *
from .context import *
from .doersch import *
| 18
| 25
| 0.75
| 9
| 72
| 6
| 0.555556
| 0.37037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 72
| 3
| 26
| 24
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f260c5bafba124a2ba06f128524f19e1b2f89fd0
| 87
|
py
|
Python
|
kcwidrp/tests/test_import_pipeline.py
|
MNBrod/KCWI_DRP
|
9331a545879f647ed83ceb9c7d925770b254a8eb
|
[
"BSD-3-Clause"
] | 5
|
2020-04-09T20:05:52.000Z
|
2021-08-04T18:04:28.000Z
|
kcwidrp/tests/test_import_pipeline.py
|
MNBrod/KCWI_DRP
|
9331a545879f647ed83ceb9c7d925770b254a8eb
|
[
"BSD-3-Clause"
] | 80
|
2020-03-19T00:35:27.000Z
|
2022-03-07T20:08:23.000Z
|
kcwidrp/tests/test_import_pipeline.py
|
MNBrod/KCWI_DRP
|
9331a545879f647ed83ceb9c7d925770b254a8eb
|
[
"BSD-3-Clause"
] | 9
|
2021-01-22T02:00:32.000Z
|
2022-02-08T19:43:16.000Z
|
import pytest
def test_import_pipeline():
import kcwidrp.pipelines.kcwi_pipeline
| 14.5
| 42
| 0.804598
| 11
| 87
| 6.090909
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 87
| 5
| 43
| 17.4
| 0.893333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 1
| 0
| 1.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4b31fd1cb2734311d191d0b2050d510bb40ced15
| 159
|
py
|
Python
|
auto_ts/models/ar_based/__init__.py
|
barrosm/Auto_TS
|
9fe2fcaae92209664deaf70b800865eb5e26ece1
|
[
"Apache-2.0"
] | 423
|
2020-05-11T10:47:49.000Z
|
2022-03-30T14:14:20.000Z
|
auto_ts/models/ar_based/__init__.py
|
Moehrenbaum/Auto_TS
|
e0a6634a727e44b4d5bbf6fbfefde99b6b3e8f86
|
[
"Apache-2.0"
] | 70
|
2020-06-05T13:38:49.000Z
|
2022-03-17T11:42:25.000Z
|
auto_ts/models/ar_based/__init__.py
|
Moehrenbaum/Auto_TS
|
e0a6634a727e44b4d5bbf6fbfefde99b6b3e8f86
|
[
"Apache-2.0"
] | 75
|
2020-02-16T00:55:20.000Z
|
2022-03-22T03:55:09.000Z
|
from .build_arima import BuildArima
from .build_sarimax import BuildSarimax
from .build_autoarimax import BuildAutoSarimax
from .build_var import BuildVAR
| 31.8
| 47
| 0.849057
| 20
| 159
| 6.55
| 0.55
| 0.274809
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125786
| 159
| 4
| 48
| 39.75
| 0.942446
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4b67e31ed9929a8d5e10c397b9885043f24920c7
| 170
|
py
|
Python
|
install/super_prove/lib/pyliveness/__init__.py
|
ljbrooks/superkb_release
|
cd8c476ba687dea3cdd979eb4b1a7bd9471ece66
|
[
"MIT"
] | null | null | null |
install/super_prove/lib/pyliveness/__init__.py
|
ljbrooks/superkb_release
|
cd8c476ba687dea3cdd979eb4b1a7bd9471ece66
|
[
"MIT"
] | null | null | null |
install/super_prove/lib/pyliveness/__init__.py
|
ljbrooks/superkb_release
|
cd8c476ba687dea3cdd979eb4b1a7bd9471ece66
|
[
"MIT"
] | null | null | null |
from stabilizing_constraints import extract_stabilizing_constraints
from liveness_to_safety import extract_liveness_as_safety
from utils import fold_fairness_constraints
| 42.5
| 67
| 0.929412
| 22
| 170
| 6.727273
| 0.545455
| 0.297297
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070588
| 170
| 3
| 68
| 56.666667
| 0.936709
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4b9afaa36b8bb43a70fce83537138bc71d4c2082
| 11,476
|
py
|
Python
|
py_client/algorithm_interface_test/trains/test_update_trajectory.py
|
sma-software/openviriato.algorithm-platform.py-client
|
73d4cf89aa6f4d02ab15b5504d92107848742325
|
[
"Apache-2.0"
] | 2
|
2021-06-21T06:50:29.000Z
|
2021-06-30T15:58:02.000Z
|
py_client/algorithm_interface_test/trains/test_update_trajectory.py
|
sma-software/openviriato.algorithm-platform.py-client
|
73d4cf89aa6f4d02ab15b5504d92107848742325
|
[
"Apache-2.0"
] | null | null | null |
py_client/algorithm_interface_test/trains/test_update_trajectory.py
|
sma-software/openviriato.algorithm-platform.py-client
|
73d4cf89aa6f4d02ab15b5504d92107848742325
|
[
"Apache-2.0"
] | null | null | null |
import datetime
import unittest
from unittest import mock
import py_client.algorithm_interface_test.test_helper.SessionMockFactory as SessionMockFactory
from py_client.aidm import UpdateStopTimesTrainPathNode, AlgorithmTrain, AlgorithmTrainPathNode, StopStatus, \
UpdateRunTimesTrainPathSegment
from py_client.algorithm_interface import algorithm_interface_factory
from py_client.algorithm_interface_test.test_helper.SessionMockTestBase import get_api_url, SessionMockTestBase
class TestUpdateTrajectory(unittest.TestCase):
class UpdateTrajectoryTestMockSession(SessionMockTestBase):
def put(self, request, json):
self._last_body = json
self._last_request = request
json_string = ("{ \n"
" \"id\": 2060,\n"
" \"code\": \"TestUpdateTrajectory\","
" \"trainPathNodes\": [\n"
" {\n"
" \"id\": 1332,\n"
" \"sectionTrackId\": null,\n"
" \"nodeId\": 18,\n"
" \"nodeTrackId\": null,\n"
" \"FormationId\": 1187,\n"
" \"arrivalTime\": \"2003-05-01T00:04:00\",\n"
" \"departureTime\": \"2003-05-01T00:05:30\",\n"
" \"minimumRunTime\": null,\n"
" \"minimumStopTime\": \"P0D\",\n"
" \"stopStatus\": \"operationalStop\",\n"
" \"sequenceNumber\": 0\n"
" },\n"
" {\n"
" \"id\": 1696,\n"
" \"sectionTrackId\": 1172,\n"
" \"nodeId\": 10,\n"
" \"nodeTrackId\": null,\n"
" \"FormationId\": null,\n"
" \"arrivalTime\": \"2003-05-01T00:10:30\",\n"
" \"departureTime\": \"2003-05-01T00:10:30\",\n"
" \"minimumRunTime\": \"PT5M\",\n"
" \"minimumStopTime\": \"P0D\",\n"
" \"stopStatus\": \"commercialStop\",\n"
" \"sequenceNumber\": 1\n"
" }\n"
" ],\n"
" \"debugString\": \"Mocked RVZH_1_1_J03 tt_(G)\"\n"
"}")
return SessionMockFactory.create_response_mock(json_string, 200)
@mock.patch('requests.Session', side_effect=UpdateTrajectoryTestMockSession)
def setUp(self, mocked_get_obj):
self.interface_to_viriato = algorithm_interface_factory.create(get_api_url())
@mock.patch('requests.Session', side_effect=UpdateTrajectoryTestMockSession)
def test_update_trajectory_request(self, mocked_get_obj):
train_id = 2060
update_train_stop_time_node = UpdateStopTimesTrainPathNode(train_path_node_id=1332,
arrival_time=datetime.datetime(2003, 5, 1, 0, 4),
departure_time=datetime.datetime(2003, 5, 1, 0, 5),
stop_status=StopStatus.operational_stop,
minimum_stop_time=datetime.timedelta(seconds=30))
self.interface_to_viriato.update_train_trajectory_stop_times(train_id, update_train_stop_time_node)
session_obj = self.interface_to_viriato._AlgorithmInterface__communication_layer.currentSession
self.assertEqual(session_obj.last_request,
get_api_url() + '/trains/2060/train-path-nodes:update-trajectory-stop-times')
self.assertDictEqual(session_obj.last_body,
dict(trainPathNodeId=1332,
arrivalTime="2003-05-01T00:04:00",
departureTime="2003-05-01T00:05:00",
minimumStopTime="PT30S",
stopStatus="operationalStop"))
@mock.patch('requests.Session', side_effect=UpdateTrajectoryTestMockSession)
def test_update_trajectory_response(self, mocked_get_obj):
train_id = 2060
update_train_stop_time_node = UpdateStopTimesTrainPathNode(train_path_node_id=1332,
arrival_time=datetime.datetime(2003, 5, 1, 0, 4),
departure_time=datetime.datetime(2003, 5, 1, 0, 5),
stop_status=StopStatus.operational_stop,
minimum_stop_time=datetime.timedelta(seconds=30))
updated_algorithm_train = self.interface_to_viriato.update_train_trajectory_stop_times(train_id, update_train_stop_time_node)
self.assertIsInstance(updated_algorithm_train, AlgorithmTrain)
self.assertEqual(updated_algorithm_train.debug_string, 'Mocked RVZH_1_1_J03 tt_(G)')
self.assertEqual(updated_algorithm_train.code, "TestUpdateTrajectory")
self.assertEqual(updated_algorithm_train.id, 2060)
self.assertIsInstance(updated_algorithm_train.train_path_nodes, list)
self.assertIsInstance(updated_algorithm_train.train_path_nodes[0], AlgorithmTrainPathNode)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].id, 1332)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].section_track_id, None)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].node_track_id, None)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].formation_id, 1187)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].arrival_time, datetime.datetime(2003, 5, 1, 0, 4))
self.assertEqual(
updated_algorithm_train.train_path_nodes[0].departure_time,
datetime.datetime(2003, 5, 1, 0, 5, 30))
self.assertEqual(updated_algorithm_train.train_path_nodes[0].minimum_run_time, None)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].minimum_stop_time, datetime.timedelta(0))
self.assertEqual(updated_algorithm_train.train_path_nodes[0].stop_status, StopStatus.operational_stop)
@mock.patch('requests.Session', side_effect=UpdateTrajectoryTestMockSession)
def test_update_trajectory_request_with_update_train_path_segment(self, mocked_get_obj):
train_id = 20610
update_train_path_segment = UpdateRunTimesTrainPathSegment(
to_train_path_node_id=1332,
to_node_arrival_time=datetime.datetime(2003, 5, 1, 0, 4),
from_node_departure_time=datetime.datetime(2003, 5, 1, 0, 5),
minimum_run_time=datetime.timedelta(seconds=120))
self.interface_to_viriato.update_train_trajectory_run_times(train_id, update_train_path_segment)
session_obj = self.interface_to_viriato._AlgorithmInterface__communication_layer.currentSession
self.assertEqual(session_obj.last_request,
get_api_url() + '/trains/20610/train-path-nodes:update-trajectory-run-times')
self.assertDictEqual(session_obj.last_body,
dict(toTrainPathNodeId=1332,
toNodeArrivalTime="2003-05-01T00:04:00",
fromNodeDepartureTime="2003-05-01T00:05:00",
minimumRunTime="PT2M"))
@mock.patch('requests.Session', side_effect=UpdateTrajectoryTestMockSession)
def test_update_trajectory_request_with_update_train_path_segment_minimum_run_time_none(self, mocked_get_obj):
train_id = 2062
update_train_path_segment = UpdateRunTimesTrainPathSegment(
to_train_path_node_id=1332,
to_node_arrival_time=datetime.datetime(2003, 5, 1, 0, 4),
from_node_departure_time=datetime.datetime(2003, 5, 1, 0, 5),
minimum_run_time=None)
self.interface_to_viriato.update_train_trajectory_run_times(train_id, update_train_path_segment)
session_obj = self.interface_to_viriato._AlgorithmInterface__communication_layer.currentSession
self.assertEqual(session_obj.last_request,
get_api_url() + '/trains/2062/train-path-nodes:update-trajectory-run-times')
self.assertDictEqual(session_obj.last_body,
dict(toTrainPathNodeId=1332,
toNodeArrivalTime="2003-05-01T00:04:00",
fromNodeDepartureTime="2003-05-01T00:05:00",
minimumRunTime=None))
@mock.patch('requests.Session', side_effect=UpdateTrajectoryTestMockSession)
def test_update_trajectory_response_with_update_train_path_segment(self, mocked_get_obj):
train_id = 2060
update_train_path_segment = UpdateRunTimesTrainPathSegment(
to_train_path_node_id=1332,
to_node_arrival_time=datetime.datetime(2003, 5, 1, 0, 4),
from_node_departure_time=datetime.datetime(2003, 5, 1, 0, 5),
minimum_run_time=None)
updated_algorithm_train = self.interface_to_viriato.update_train_trajectory_run_times(
train_id,
update_train_path_segment)
self.assertIsInstance(updated_algorithm_train, AlgorithmTrain)
self.assertEqual(updated_algorithm_train.debug_string, 'Mocked RVZH_1_1_J03 tt_(G)')
self.assertEqual(updated_algorithm_train.code, "TestUpdateTrajectory")
self.assertEqual(updated_algorithm_train.id, 2060)
self.assertIsInstance(updated_algorithm_train.train_path_nodes, list)
self.assertIsInstance(updated_algorithm_train.train_path_nodes[0], AlgorithmTrainPathNode)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].id, 1332)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].section_track_id, None)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].node_track_id, None)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].formation_id, 1187)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].arrival_time, datetime.datetime(2003, 5, 1, 0, 4))
self.assertEqual(
updated_algorithm_train.train_path_nodes[0].departure_time,
datetime.datetime(2003, 5, 1, 0, 5, 30))
self.assertEqual(updated_algorithm_train.train_path_nodes[0].minimum_run_time, None)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].minimum_stop_time, datetime.timedelta(0))
self.assertEqual(updated_algorithm_train.train_path_nodes[0].stop_status, StopStatus.operational_stop)
@mock.patch('requests.Session', side_effect=UpdateTrajectoryTestMockSession)
def tearDown(self, mocked_get_obj) -> None:
self.interface_to_viriato.__exit__(None, None, None)
| 63.403315
| 134
| 0.610491
| 1,142
| 11,476
| 5.78021
| 0.119089
| 0.053174
| 0.101803
| 0.11271
| 0.850174
| 0.800182
| 0.783972
| 0.769126
| 0.751856
| 0.75125
| 0
| 0.052416
| 0.298449
| 11,476
| 180
| 135
| 63.755556
| 0.767482
| 0
| 0
| 0.572327
| 0
| 0
| 0.07994
| 0.015315
| 0
| 0
| 0
| 0
| 0.226415
| 1
| 0.050314
| false
| 0
| 0.044025
| 0
| 0.113208
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
29a020686cc5c5a57c7c2d502cd9da8c6bf64ffe
| 3,008
|
py
|
Python
|
pykg2vec/test/test_hp_loader.py
|
baxtree/pykg2vec
|
59498ed5aae7cbe44f881b2c807fb02f1b53999d
|
[
"MIT"
] | 430
|
2019-04-17T19:04:25.000Z
|
2022-03-31T12:20:18.000Z
|
pykg2vec/test/test_hp_loader.py
|
KonstantinKlepikov/pykg2vec
|
658b70a54a371f79252550b0cad7e19578198505
|
[
"MIT"
] | 102
|
2019-05-11T04:29:57.000Z
|
2022-02-16T12:56:28.000Z
|
pykg2vec/test/test_hp_loader.py
|
KonstantinKlepikov/pykg2vec
|
658b70a54a371f79252550b0cad7e19578198505
|
[
"MIT"
] | 102
|
2019-06-11T08:40:38.000Z
|
2022-03-27T09:36:13.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This module is for testing unit functions of the hyperparameter loader
"""
import os
import pytest
from pykg2vec.common import KGEArgParser
from pykg2vec.common import HyperparameterLoader
def test_load_default_hyperparameter_file():
hp_loader = HyperparameterLoader(KGEArgParser().get_args([]))
hyperparams = hp_loader.load_hyperparameter("freebase15k", "analogy")
search_space = hp_loader.load_search_space("analogy")
assert hyperparams["learning_rate"] == 0.1
assert hyperparams["hidden_size"] == 200
assert str(search_space["epochs"].inputs()[1]) == "0 Literal{10}"
def test_load_custom_hyperparameter_file():
custom_hyperparamter_file = os.path.join(os.path.dirname(__file__), "resource", "custom_hyperparams", "custom_hpf.yaml")
custom_ss_file = os.path.join(os.path.dirname(__file__), "resource", "custom_hyperparams", "custom_ssf.yaml")
hp_loader = HyperparameterLoader(KGEArgParser().get_args(["-hpf", custom_hyperparamter_file, "-ssf", custom_ss_file]))
hyperparams = hp_loader.load_hyperparameter("freebase15k", "analogy")
search_space = hp_loader.load_search_space("analogy")
assert hyperparams["learning_rate"] == 0.01
assert hyperparams["hidden_size"] == 200
assert str(search_space["epochs"].inputs()[1]) == "0 Literal{100}"
def test_exception_on_hyperparameter_file_not_exist():
with pytest.raises(FileNotFoundError) as e:
hp_loader = HyperparameterLoader(KGEArgParser().get_args(["-hpf", "not_exist_file"]))
hp_loader.load_hyperparameter("freebase15k", "analogy")
assert str(e.value) == "Cannot find configuration file not_exist_file"
def test_exception_on_search_space_file_not_exist():
with pytest.raises(FileNotFoundError) as e:
hp_loader = HyperparameterLoader(KGEArgParser().get_args(["-ssf", "not_exist_file"]))
hp_loader.load_search_space("analogy")
assert str(e.value) == "Cannot find configuration file not_exist_file"
def test_exception_on_hyperparameter_file_with_wrong_extension():
custom_hyperparamter_file = os.path.join(os.path.dirname(__file__), "resource", "custom_hyperparams", "custom.txt")
with pytest.raises(ValueError) as e:
hp_loader = HyperparameterLoader(KGEArgParser().get_args(["-hpf", custom_hyperparamter_file]))
hp_loader.load_hyperparameter("freebase15k", "analogy")
assert str(e.value) == "Configuration file must have .yaml or .yml extension: %s" % custom_hyperparamter_file
def test_exception_on_search_space_file_with_wrong_extension():
custom_hyperparamter_file = os.path.join(os.path.dirname(__file__), "resource", "custom_hyperparams", "custom.txt")
with pytest.raises(ValueError) as e:
hp_loader = HyperparameterLoader(KGEArgParser().get_args(["-ssf", custom_hyperparamter_file]))
hp_loader.load_search_space("analogy")
assert str(e.value) == "Configuration file must have .yaml or .yml extension: %s" % custom_hyperparamter_file
| 50.133333
| 124
| 0.751995
| 376
| 3,008
| 5.680851
| 0.228723
| 0.052434
| 0.044944
| 0.11236
| 0.859082
| 0.859082
| 0.799157
| 0.799157
| 0.780431
| 0.776217
| 0
| 0.011756
| 0.123338
| 3,008
| 59
| 125
| 50.983051
| 0.798256
| 0.037566
| 0
| 0.47619
| 0
| 0
| 0.206168
| 0
| 0
| 0
| 0
| 0
| 0.238095
| 1
| 0.142857
| false
| 0
| 0.095238
| 0
| 0.238095
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
29b26df998f0970a50ae05d98bf6fa7a56ec0af0
| 44
|
py
|
Python
|
Core/__init__.py
|
Erosion2020/SpaceCore
|
ba81bf1913461a200f9e88acb7d0d91d7deda8e8
|
[
"MIT"
] | 4
|
2022-03-22T08:21:52.000Z
|
2022-03-23T12:58:17.000Z
|
Core/__init__.py
|
Erosion2020/SpaceCore
|
ba81bf1913461a200f9e88acb7d0d91d7deda8e8
|
[
"MIT"
] | null | null | null |
Core/__init__.py
|
Erosion2020/SpaceCore
|
ba81bf1913461a200f9e88acb7d0d91d7deda8e8
|
[
"MIT"
] | null | null | null |
import Core.Start
start = Core.Start.start
| 11
| 24
| 0.772727
| 7
| 44
| 4.857143
| 0.428571
| 0.529412
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 44
| 3
| 25
| 14.666667
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
29b817621419b23c57b1f8ee7a56ed180b4a0dcf
| 2,148
|
py
|
Python
|
benchmarks/test_headless_time.py
|
TheRakeshPurohit/wasmer-python
|
2375974d9dc50a2caf29fdd9e07d49fd94537e03
|
[
"MIT"
] | 900
|
2019-04-11T01:52:10.000Z
|
2020-09-02T11:09:14.000Z
|
benchmarks/test_headless_time.py
|
TheRakeshPurohit/wasmer-python
|
2375974d9dc50a2caf29fdd9e07d49fd94537e03
|
[
"MIT"
] | 172
|
2019-04-15T18:04:55.000Z
|
2020-09-01T15:20:06.000Z
|
benchmarks/test_headless_time.py
|
TheRakeshPurohit/wasmer-python
|
2375974d9dc50a2caf29fdd9e07d49fd94537e03
|
[
"MIT"
] | 28
|
2019-04-11T02:49:04.000Z
|
2020-08-27T09:47:49.000Z
|
from wasmer import engine, Store, Module, Instance
from wasmer_compiler_cranelift import Compiler as Cranelift
from wasmer_compiler_llvm import Compiler as LLVM
from wasmer_compiler_singlepass import Compiler as Singlepass
TEST_BYTES = open('benchmarks/nbody.wasm', 'rb').read()
def test_benchmark_headless_time_nbody_cranelift_jit(benchmark):
store = Store(engine.JIT(Cranelift))
module = Module(store, TEST_BYTES)
serialized = module.serialize()
@benchmark
def bench():
deserialized = Module.deserialize(store, serialized)
_ = Instance(deserialized)
def test_benchmark_headless_time_nbody_cranelift_native(benchmark):
store = Store(engine.Native(Cranelift))
module = Module(store, TEST_BYTES)
serialized = module.serialize()
@benchmark
def bench():
deserialized = Module.deserialize(store, serialized)
_ = Instance(deserialized)
def test_benchmark_headless_time_nbody_llvm_jit(benchmark):
store = Store(engine.JIT(LLVM))
module = Module(store, TEST_BYTES)
serialized = module.serialize()
@benchmark
def bench():
deserialized = Module.deserialize(store, serialized)
_ = Instance(deserialized)
def test_benchmark_headless_time_nbody_llvm_native(benchmark):
store = Store(engine.Native(LLVM))
module = Module(store, TEST_BYTES)
serialized = module.serialize()
@benchmark
def bench():
deserialized = Module.deserialize(store, serialized)
_ = Instance(deserialized)
def test_benchmark_headless_time_nbody_singlepass_jit(benchmark):
store = Store(engine.JIT(Singlepass))
module = Module(store, TEST_BYTES)
serialized = module.serialize()
@benchmark
def bench():
deserialized = Module.deserialize(store, serialized)
_ = Instance(deserialized)
def test_benchmark_headless_time_nbody_singlepass_native(benchmark):
store = Store(engine.Native(Singlepass))
module = Module(store, TEST_BYTES)
serialized = module.serialize()
@benchmark
def bench():
deserialized = Module.deserialize(store, serialized)
_ = Instance(deserialized)
| 32.059701
| 68
| 0.729981
| 231
| 2,148
| 6.549784
| 0.134199
| 0.041639
| 0.06345
| 0.095175
| 0.849967
| 0.849967
| 0.715135
| 0.681428
| 0.681428
| 0.681428
| 0
| 0
| 0.181564
| 2,148
| 66
| 69
| 32.545455
| 0.860637
| 0
| 0
| 0.679245
| 0
| 0
| 0.010708
| 0.009777
| 0
| 0
| 0
| 0
| 0
| 1
| 0.226415
| false
| 0.09434
| 0.075472
| 0
| 0.301887
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
29d8a001d6a7ae114b79cc77fcb1a2d992438b85
| 620
|
py
|
Python
|
bk_monitor/utils/data_name_builder.py
|
qqqqqie/bk-log
|
1765f1901aafaa6fb6a57b8db5d35dd32b3cb5c1
|
[
"MIT"
] | 75
|
2021-07-14T09:32:36.000Z
|
2022-03-31T15:26:53.000Z
|
bk_monitor/utils/data_name_builder.py
|
qqqqqie/bk-log
|
1765f1901aafaa6fb6a57b8db5d35dd32b3cb5c1
|
[
"MIT"
] | 561
|
2021-07-14T07:45:47.000Z
|
2022-03-31T11:41:28.000Z
|
bk_monitor/utils/data_name_builder.py
|
qqqqqie/bk-log
|
1765f1901aafaa6fb6a57b8db5d35dd32b3cb5c1
|
[
"MIT"
] | 41
|
2021-07-14T07:39:50.000Z
|
2022-03-25T09:22:18.000Z
|
# -*- coding: utf-8 -*-
class DataNameBuilder(object):
"""
data_name等拼接工具
"""
def __init__(self, data_name, bk_biz_id, data_name_prefix):
self.data_name = data_name
self.bk_biz_id = bk_biz_id
self.data_name_prefix = data_name_prefix
@property
def name(self):
return f"{self.data_name_prefix}_{self.data_name}"
@property
def time_series_group_name(self):
return f"{self.data_name_prefix}_{self.data_name}"
@property
def table_id(self):
return f"{self.bk_biz_id}_{self.data_name_prefix}_{self.data_name}.base".replace("-", "_")
| 24.8
| 98
| 0.656452
| 87
| 620
| 4.218391
| 0.287356
| 0.26158
| 0.294278
| 0.196185
| 0.544959
| 0.544959
| 0.474114
| 0.305177
| 0.305177
| 0.305177
| 0
| 0.002058
| 0.216129
| 620
| 24
| 99
| 25.833333
| 0.753086
| 0.059677
| 0
| 0.357143
| 0
| 0
| 0.253968
| 0.250441
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.214286
| 0.571429
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
4b1b4518f4b6df57a383c8b9ad9886d33d8473c4
| 35
|
py
|
Python
|
WeatherCrawler/__init__.py
|
Venivedivici/WeatherCrawler
|
0070b3a7555551f5fae04cfc22251b8b2761b9ca
|
[
"MIT"
] | 1
|
2019-11-18T09:33:22.000Z
|
2019-11-18T09:33:22.000Z
|
WeatherCrawler/__init__.py
|
Venivedivici/WeatherCrawler
|
0070b3a7555551f5fae04cfc22251b8b2761b9ca
|
[
"MIT"
] | null | null | null |
WeatherCrawler/__init__.py
|
Venivedivici/WeatherCrawler
|
0070b3a7555551f5fae04cfc22251b8b2761b9ca
|
[
"MIT"
] | 1
|
2019-11-18T09:33:07.000Z
|
2019-11-18T09:33:07.000Z
|
from .Crawler import WeatherCrawler
| 35
| 35
| 0.885714
| 4
| 35
| 7.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 35
| 1
| 35
| 35
| 0.96875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d99a377f59eb4b1df89ef9046ac2390c669714ea
| 162
|
py
|
Python
|
virtual_finance_api/compat/yfinance/endpoints/__init__.py
|
hootnot/virtual-yahoofinance-REST-API
|
3246d3f4c14821e4ef0f9de57dd759cf03f42681
|
[
"Apache-2.0"
] | 1
|
2022-03-18T08:27:34.000Z
|
2022-03-18T08:27:34.000Z
|
virtual_finance_api/compat/yfinance/endpoints/__init__.py
|
hootnot/virtual-yahoofinance-REST-API
|
3246d3f4c14821e4ef0f9de57dd759cf03f42681
|
[
"Apache-2.0"
] | null | null | null |
virtual_finance_api/compat/yfinance/endpoints/__init__.py
|
hootnot/virtual-yahoofinance-REST-API
|
3246d3f4c14821e4ef0f9de57dd759cf03f42681
|
[
"Apache-2.0"
] | 1
|
2021-06-18T02:14:03.000Z
|
2021-06-18T02:14:03.000Z
|
# -*- coding: utf-8 -*-
from .bundle import Financials, History, Holders, Options, Profile
__all__ = ("Financials", "History", "Holders", "Options", "Profile")
| 27
| 68
| 0.67284
| 17
| 162
| 6.176471
| 0.705882
| 0.32381
| 0.457143
| 0.590476
| 0.72381
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007143
| 0.135802
| 162
| 5
| 69
| 32.4
| 0.742857
| 0.12963
| 0
| 0
| 0
| 0
| 0.273381
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
d9a2b76266ce4dfcd0a606870c00ff37bc7cd936
| 6,436
|
py
|
Python
|
tests/test_layers.py
|
lauxley/kraken
|
091e902d5c2c20066a2fe25a4df656268a3c928b
|
[
"Apache-2.0"
] | null | null | null |
tests/test_layers.py
|
lauxley/kraken
|
091e902d5c2c20066a2fe25a4df656268a3c928b
|
[
"Apache-2.0"
] | null | null | null |
tests/test_layers.py
|
lauxley/kraken
|
091e902d5c2c20066a2fe25a4df656268a3c928b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import unittest
from nose.tools import raises
import torch
from kraken.lib import layers
class TestLayers(unittest.TestCase):
"""
Testing custom layer implementations.
"""
def setUp(self):
torch.set_grad_enabled(False)
def test_maxpool(self):
"""
Test maximum pooling layer.
"""
mp = layers.MaxPool((3, 3), (2, 2))
o = mp(torch.randn(1, 2, 32, 64))
self.assertEqual(o[0].shape, (1, 2, 15, 31))
def test_1d_dropout(self):
"""
Test 1d dropout layer.
"""
do = layers.Dropout(0.2, 1)
o = do(torch.randn(1, 2, 32, 64))
self.assertEqual(o[0].shape, (1, 2, 32, 64))
def test_2d_dropout(self):
"""
Test 2d dropout layer.
"""
do = layers.Dropout(0.2, 2)
o = do(torch.randn(1, 2, 32, 64))
self.assertEqual(o[0].shape, (1, 2, 32, 64))
def test_forward_rnn_layer_x(self):
"""
Test unidirectional RNN layer in x-dimension.
"""
rnn = layers.TransposedSummarizingRNN(10, 2, 'f', False, False)
o = rnn(torch.randn(1, 10, 32, 64))
self.assertEqual(o[0].shape, (1, 2, 32, 64))
def test_forward_rnn_layer_y(self):
"""
Test unidirectional RNN layer in y-dimension.
"""
rnn = layers.TransposedSummarizingRNN(10, 2, 'f', True, False)
o = rnn(torch.randn(1, 10, 32, 64))
self.assertEqual(o[0].shape, (1, 2, 32, 64))
def test_forward_rnn_layer_x_summarize(self):
"""
Test unidirectional summarizing RNN layer in x-dimension.
"""
rnn = layers.TransposedSummarizingRNN(10, 2, 'f', False, True)
o = rnn(torch.randn(1, 10, 32, 64))
self.assertEqual(o[0].shape, (1, 2, 32, 1))
def test_forward_rnn_layer_y_summarize(self):
"""
Test unidirectional summarizing RNN layer in y-dimension.
"""
rnn = layers.TransposedSummarizingRNN(10, 2, 'f', True, True)
o = rnn(torch.randn(1, 10, 32, 64))
self.assertEqual(o[0].shape, (1, 2, 1, 64))
def test_bidi_rnn_layer_x(self):
"""
Test bidirectional RNN layer in x-dimension.
"""
rnn = layers.TransposedSummarizingRNN(10, 2, 'b', False, False)
o = rnn(torch.randn(1, 10, 32, 64))
self.assertEqual(o[0].shape, (1, 4, 32, 64))
def test_bidi_rnn_layer_y(self):
"""
Test bidirectional RNN layer in y-dimension.
"""
rnn = layers.TransposedSummarizingRNN(10, 2, 'b', True, False)
o = rnn(torch.randn(1, 10, 32, 64))
self.assertEqual(o[0].shape, (1, 4, 32, 64))
def test_bidi_rnn_layer_x_summarize(self):
"""
Test bidirectional summarizing RNN layer in x-dimension.
"""
rnn = layers.TransposedSummarizingRNN(10, 2, 'b', False, True)
o = rnn(torch.randn(1, 10, 32, 64))
self.assertEqual(o[0].shape, (1, 4, 32, 1))
def test_bidi_rnn_layer_y_summarize(self):
"""
Test bidirectional summarizing RNN layer in y-dimension.
"""
rnn = layers.TransposedSummarizingRNN(10, 2, 'b', True, True)
o = rnn(torch.randn(1, 10, 32, 64))
self.assertEqual(o[0].shape, (1, 4, 1, 64))
def test_linsoftmax(self):
"""
Test basic function of linear layer.
"""
lin = layers.LinSoftmax(20, 10)
o = lin(torch.randn(1, 20, 12, 24))
self.assertEqual(o[0].shape, (1, 10, 12, 24))
def test_linsoftmax_train(self):
"""
Test function of linear layer in training mode (log_softmax)
"""
lin = layers.LinSoftmax(20, 10).train()
o = lin(torch.randn(1, 20, 12, 24))
self.assertLess(o[0].max(), 0)
def test_linsoftmax_test(self):
"""
Test function of linear layer in eval mode (softmax)
"""
lin = layers.LinSoftmax(20, 10).eval()
o = lin(torch.randn(1, 20, 12, 24))
self.assertGreaterEqual(o[0].min(), 0)
def test_linsoftmax_aug(self):
"""
Test basic function of linear layer with 1-augmentation.
"""
lin = layers.LinSoftmax(20, 10, True)
o = lin(torch.randn(1, 20, 12, 24))
self.assertEqual(o[0].shape, (1, 10, 12, 24))
def test_linsoftmax_aug_train(self):
"""
Test function of linear layer in training mode (log_softmax) with 1-augmentation
"""
lin = layers.LinSoftmax(20, 10, True).train()
o = lin(torch.randn(1, 20, 12, 24))
self.assertLess(o[0].max(), 0)
def test_linsoftmax_aug_test(self):
"""
Test function of linear layer in eval mode (softmax) with 1-augmentation
"""
lin = layers.LinSoftmax(20, 10, True).eval()
o = lin(torch.randn(1, 20, 12, 24))
self.assertGreaterEqual(o[0].min(), 0)
def test_actconv2d_lin(self):
"""
Test convolutional layer without activation.
"""
conv = layers.ActConv2D(5, 12, (3, 3), (1, 1), 'l')
o = conv(torch.randn(1, 5, 24, 12))
self.assertEqual(o[0].shape, (1, 12, 24, 12))
def test_actconv2d_sigmoid(self):
"""
Test convolutional layer with sigmoid activation.
"""
conv = layers.ActConv2D(5, 12, (3, 3), (1, 1), 's')
o = conv(torch.randn(1, 5, 24, 12))
self.assertTrue(0 <= o[0].min() <= 1)
self.assertTrue(0 <= o[0].max() <= 1)
def test_actconv2d_tanh(self):
"""
Test convolutional layer with tanh activation.
"""
conv = layers.ActConv2D(5, 12, (3, 3), (1, 1), 't')
o = conv(torch.randn(1, 5, 24, 12))
self.assertTrue(-1 <= o[0].min() <= 1)
self.assertTrue(-1 <= o[0].max() <= 1)
def test_actconv2d_softmax(self):
"""
Test convolutional layer with softmax activation.
"""
conv = layers.ActConv2D(5, 12, (3, 3), (1, 1), 'm')
o = conv(torch.randn(1, 5, 24, 12))
self.assertTrue(0 <= o[0].min() <= 1)
self.assertTrue(0 <= o[0].max() <= 1)
def test_actconv2d_relu(self):
"""
Test convolutional layer with relu activation.
"""
conv = layers.ActConv2D(5, 12, (3, 3), (1, 1), 'r')
o = conv(torch.randn(1, 5, 24, 12))
self.assertLessEqual(0, o[0].min())
self.assertLessEqual(0, o[0].max())
| 32.670051
| 88
| 0.555469
| 873
| 6,436
| 4.017182
| 0.117984
| 0.014827
| 0.069005
| 0.067864
| 0.872541
| 0.815227
| 0.753636
| 0.704876
| 0.670088
| 0.643285
| 0
| 0.084211
| 0.291485
| 6,436
| 196
| 89
| 32.836735
| 0.684868
| 0.178216
| 0
| 0.373737
| 0
| 0
| 0.002727
| 0
| 0
| 0
| 0
| 0
| 0.262626
| 1
| 0.232323
| false
| 0
| 0.040404
| 0
| 0.282828
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d9dec55abc25d0667a679e0acc3dfaf2c34915e1
| 51,951
|
py
|
Python
|
model_docsum.py
|
EdinburghNLP/Refresh
|
8c2d25e9f770529e0ceb8909b452a080e94fc7cd
|
[
"BSD-3-Clause"
] | 265
|
2018-04-23T13:13:11.000Z
|
2021-12-08T11:24:56.000Z
|
model_docsum.py
|
trivago/Refresh
|
ef721d36e3e700c76fa1afd20116c800c83bc20d
|
[
"BSD-3-Clause"
] | 38
|
2018-04-25T12:05:32.000Z
|
2021-06-11T09:20:03.000Z
|
model_docsum.py
|
shashiongithub/Refresh
|
8c2d25e9f770529e0ceb8909b452a080e94fc7cd
|
[
"BSD-3-Clause"
] | 58
|
2018-06-08T13:20:14.000Z
|
2021-11-16T15:24:05.000Z
|
####################################
# Author: Shashi Narayan
# Date: September 2016
# Project: Document Summarization
# H2020 Summa Project
####################################
"""
Document Summarization Modules and Models
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import seq2seq
from tensorflow.python.ops import math_ops
# from tf.nn import variable_scope
from my_flags import FLAGS
from model_utils import *
### Various types of extractor
def sentence_extractor_nonseqrnn_noatt(sents_ext, encoder_state):
"""Implements Sentence Extractor: No attention and non-sequential RNN
Args:
sents_ext: Embedding of sentences to label for extraction
encoder_state: encoder_state
Returns:
extractor output and logits
"""
# Define Variables
weight = variable_on_cpu('weight', [FLAGS.size, FLAGS.target_label_size], tf.random_normal_initializer())
bias = variable_on_cpu('bias', [FLAGS.target_label_size], tf.random_normal_initializer())
# Get RNN output
rnn_extractor_output, _ = simple_rnn(sents_ext, initial_state=encoder_state)
with variable_scope.variable_scope("Reshape-Out"):
rnn_extractor_output = reshape_list2tensor(rnn_extractor_output, FLAGS.max_doc_length, FLAGS.size)
# Get Final logits without softmax
extractor_output_forlogits = tf.reshape(rnn_extractor_output, [-1, FLAGS.size])
logits = tf.matmul(extractor_output_forlogits, weight) + bias
# logits: [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
logits = tf.reshape(logits, [-1, FLAGS.max_doc_length, FLAGS.target_label_size])
return rnn_extractor_output, logits
def sentence_extractor_nonseqrnn_titimgatt(sents_ext, encoder_state, titleimages):
"""Implements Sentence Extractor: Non-sequential RNN with attention over title-images
Args:
sents_ext: Embedding of sentences to label for extraction
encoder_state: encoder_state
titleimages: Embeddings of title and images in the document
Returns:
extractor output and logits
"""
# Define Variables
weight = variable_on_cpu('weight', [FLAGS.size, FLAGS.target_label_size], tf.random_normal_initializer())
bias = variable_on_cpu('bias', [FLAGS.target_label_size], tf.random_normal_initializer())
# Get RNN output
rnn_extractor_output, _ = simple_attentional_rnn(sents_ext, titleimages, initial_state=encoder_state)
with variable_scope.variable_scope("Reshape-Out"):
rnn_extractor_output = reshape_list2tensor(rnn_extractor_output, FLAGS.max_doc_length, FLAGS.size)
# Get Final logits without softmax
extractor_output_forlogits = tf.reshape(rnn_extractor_output, [-1, FLAGS.size])
logits = tf.matmul(extractor_output_forlogits, weight) + bias
# logits: [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
logits = tf.reshape(logits, [-1, FLAGS.max_doc_length, FLAGS.target_label_size])
return rnn_extractor_output, logits
def sentence_extractor_seqrnn_docatt(sents_ext, encoder_outputs, encoder_state, sents_labels):
"""Implements Sentence Extractor: Sequential RNN with attention over sentences during encoding
Args:
sents_ext: Embedding of sentences to label for extraction
encoder_outputs, encoder_state
sents_labels: Gold sent labels for training
Returns:
extractor output and logits
"""
# Define MLP Variables
weights = {
'h1': variable_on_cpu('weight_1', [2*FLAGS.size, FLAGS.size], tf.random_normal_initializer()),
'h2': variable_on_cpu('weight_2', [FLAGS.size, FLAGS.size], tf.random_normal_initializer()),
'out': variable_on_cpu('weight_out', [FLAGS.size, FLAGS.target_label_size], tf.random_normal_initializer())
}
biases = {
'b1': variable_on_cpu('bias_1', [FLAGS.size], tf.random_normal_initializer()),
'b2': variable_on_cpu('bias_2', [FLAGS.size], tf.random_normal_initializer()),
'out': variable_on_cpu('bias_out', [FLAGS.target_label_size], tf.random_normal_initializer())
}
# Shift sents_ext for RNN
with variable_scope.variable_scope("Shift-SentExt"):
# Create embeddings for special symbol (lets assume all 0) and put in the front by shifting by one
special_tensor = tf.zeros_like(sents_ext[0]) # tf.ones_like(sents_ext[0])
sents_ext_shifted = [special_tensor] + sents_ext[:-1]
# Reshape sents_labels for RNN (Only used for cross entropy training)
with variable_scope.variable_scope("Reshape-Label"):
# only used for training
sents_labels = reshape_tensor2list(sents_labels, FLAGS.max_doc_length, FLAGS.target_label_size)
# Define Sequential Decoder
extractor_outputs, logits = jporg_attentional_seqrnn_decoder(sents_ext_shifted, encoder_outputs, encoder_state, sents_labels, weights, biases)
# Final logits without softmax
with variable_scope.variable_scope("Reshape-Out"):
logits = reshape_list2tensor(logits, FLAGS.max_doc_length, FLAGS.target_label_size)
extractor_outputs = reshape_list2tensor(extractor_outputs, FLAGS.max_doc_length, 2*FLAGS.size)
return extractor_outputs, logits
def policy_network(vocab_embed_variable, document_placeholder, label_placeholder):
"""Build the policy core network.
Args:
vocab_embed_variable: [vocab_size, FLAGS.wordembed_size], embeddings without PAD and UNK
document_placeholder: [None,(FLAGS.max_doc_length + FLAGS.max_title_length + FLAGS.max_image_length), FLAGS.max_sent_length]
label_placeholder: Gold label [None, FLAGS.max_doc_length, FLAGS.target_label_size], only used during cross entropy training of JP's model.
Returns:
Outputs of sentence extractor and logits without softmax
"""
with tf.variable_scope('PolicyNetwork') as scope:
### Full Word embedding Lookup Variable
# PADDING embedding non-trainable
pad_embed_variable = variable_on_cpu("pad_embed", [1, FLAGS.wordembed_size], tf.constant_initializer(0), trainable=False)
# UNK embedding trainable
unk_embed_variable = variable_on_cpu("unk_embed", [1, FLAGS.wordembed_size], tf.constant_initializer(0), trainable=True)
# Get fullvocab_embed_variable
fullvocab_embed_variable = tf.concat(0, [pad_embed_variable, unk_embed_variable, vocab_embed_variable])
# print(fullvocab_embed_variable)
### Lookup layer
with tf.variable_scope('Lookup') as scope:
document_placeholder_flat = tf.reshape(document_placeholder, [-1])
document_word_embedding = tf.nn.embedding_lookup(fullvocab_embed_variable, document_placeholder_flat, name="Lookup")
document_word_embedding = tf.reshape(document_word_embedding, [-1, (FLAGS.max_doc_length + FLAGS.max_title_length + FLAGS.max_image_length),
FLAGS.max_sent_length, FLAGS.wordembed_size])
# print(document_word_embedding)
### Convolution Layer
with tf.variable_scope('ConvLayer') as scope:
document_word_embedding = tf.reshape(document_word_embedding, [-1, FLAGS.max_sent_length, FLAGS.wordembed_size])
document_sent_embedding = conv1d_layer_sentence_representation(document_word_embedding) # [None, sentembed_size]
document_sent_embedding = tf.reshape(document_sent_embedding, [-1, (FLAGS.max_doc_length + FLAGS.max_title_length + FLAGS.max_image_length),
FLAGS.sentembed_size])
# print(document_sent_embedding)
### Reshape Tensor to List [-1, (max_doc_length+max_title_length+max_image_length), sentembed_size] -> List of [-1, sentembed_size]
with variable_scope.variable_scope("ReshapeDoc_TensorToList"):
document_sent_embedding = reshape_tensor2list(document_sent_embedding, (FLAGS.max_doc_length + FLAGS.max_title_length + FLAGS.max_image_length), FLAGS.sentembed_size)
# print(document_sent_embedding)
# document_sents_enc
document_sents_enc = document_sent_embedding[:FLAGS.max_doc_length]
if FLAGS.doc_encoder_reverse:
document_sents_enc = document_sents_enc[::-1]
# document_sents_ext
document_sents_ext = document_sent_embedding[:FLAGS.max_doc_length]
# document_sents_titimg
document_sents_titimg = document_sent_embedding[FLAGS.max_doc_length:]
### Document Encoder
with tf.variable_scope('DocEnc') as scope:
encoder_outputs, encoder_state = simple_rnn(document_sents_enc)
### Sentence Label Extractor
with tf.variable_scope('SentExt') as scope:
if (FLAGS.attend_encoder) and (len(document_sents_titimg) != 0):
# Multiple decoder
print("Multiple decoder is not implement yet.")
exit(0)
# # Decoder to attend captions
# attendtitimg_extractor_output, _ = simple_attentional_rnn(document_sents_ext, document_sents_titimg, initial_state=encoder_state)
# # Attend previous decoder
# logits = sentence_extractor_seqrnn_docatt(document_sents_ext, attendtitimg_extractor_output, encoder_state, label_placeholder)
elif (not FLAGS.attend_encoder) and (len(document_sents_titimg) != 0):
# Attend only titimages during decoding
extractor_output, logits = sentence_extractor_nonseqrnn_titimgatt(document_sents_ext, encoder_state, document_sents_titimg)
elif (FLAGS.attend_encoder) and (len(document_sents_titimg) == 0):
# JP model: attend encoder
extractor_outputs, logits = sentence_extractor_seqrnn_docatt(document_sents_ext, encoder_outputs, encoder_state, label_placeholder)
else:
# Attend nothing
extractor_output, logits = sentence_extractor_nonseqrnn_noatt(document_sents_ext, encoder_state)
# print(extractor_output)
# print(logits)
return extractor_output, logits
def baseline_future_reward_estimator(extractor_output):
"""Implements linear regression to estimate future rewards
Args:
extractor_output: [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.size or 2*FLAGS.size]
Output:
rewards: [FLAGS.batch_size, FLAGS.max_doc_length]
"""
with tf.variable_scope('FutureRewardEstimator') as scope:
last_size = extractor_output.get_shape()[2].value
# Define Variables
weight = variable_on_cpu('weight', [last_size, 1], tf.random_normal_initializer())
bias = variable_on_cpu('bias', [1], tf.random_normal_initializer())
extractor_output_forreward = tf.reshape(extractor_output, [-1, last_size])
future_rewards = tf.matmul(extractor_output_forreward, weight) + bias
# future_rewards: [FLAGS.batch_size, FLAGS.max_doc_length, 1]
future_rewards = tf.reshape(future_rewards, [-1, FLAGS.max_doc_length, 1])
future_rewards = tf.squeeze(future_rewards)
return future_rewards
def baseline_single_future_reward_estimator(extractor_output):
"""Implements linear regression to estimate future rewards for whole document
Args:
extractor_output: [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.size or 2*FLAGS.size]
Output:
rewards: [FLAGS.batch_size]
"""
with tf.variable_scope('FutureRewardEstimator') as scope:
last_size = extractor_output.get_shape()[2].value
# Define Variables
weight = variable_on_cpu('weight', [FLAGS.max_doc_length*last_size, 1], tf.random_normal_initializer())
bias = variable_on_cpu('bias', [1], tf.random_normal_initializer())
extractor_output_forreward = tf.reshape(extractor_output, [-1, FLAGS.max_doc_length*last_size]) # [FLAGS.batch_size, FLAGS.max_doc_length*(FLAGS.size or 2*FLAGS.size)]
future_rewards = tf.matmul(extractor_output_forreward, weight) + bias # [FLAGS.batch_size, 1]
# future_rewards: [FLAGS.batch_size, 1]
future_rewards = tf.squeeze(future_rewards) # [FLAGS.batch_size]
return future_rewards
### Loss Functions
def mean_square_loss_doclevel(future_rewards, actual_reward):
"""Implements mean_square_loss for futute reward prediction
args:
future_rewards: [FLAGS.batch_size]
actual_reward: [FLAGS.batch_size]
Output
Float Value
"""
with tf.variable_scope('MeanSquareLoss') as scope:
sq_loss = tf.square(future_rewards - actual_reward) # [FLAGS.batch_size]
mean_sq_loss = tf.reduce_mean(sq_loss)
tf.add_to_collection('mean_square_loss', mean_sq_loss)
return mean_sq_loss
def mean_square_loss(future_rewards, actual_reward, weights):
"""Implements mean_square_loss for futute reward prediction
args:
future_rewards: [FLAGS.batch_size, FLAGS.max_doc_length]
actual_reward: [FLAGS.batch_size]
weights: Weights to avoid padded part [FLAGS.batch_size, FLAGS.max_doc_length]
Output
Float Value
"""
with tf.variable_scope('MeanSquareLoss') as scope:
actual_reward = tf.expand_dims(actual_reward, 1) # [FLAGS.batch_size, 1]
sq_loss = tf.square(future_rewards - actual_reward) # [FLAGS.batch_size, FLAGS.max_doc_length]
mean_sq_loss = 0
if FLAGS.weighted_loss:
sq_loss = tf.mul(sq_loss, weights)
sq_loss_sum = tf.reduce_sum(sq_loss)
valid_sentences = tf.reduce_sum(weights)
mean_sq_loss = sq_loss_sum / valid_sentences
else:
mean_sq_loss = tf.reduce_mean(sq_loss)
tf.add_to_collection('mean_square_loss', mean_sq_loss)
return mean_sq_loss
def cross_entropy_loss(logits, labels, weights):
"""Estimate cost of predictions
Add summary for "cost" and "cost/avg".
Args:
logits: Logits from inference(). [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
labels: Sentence extraction gold levels [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
weights: Weights to avoid padded part [FLAGS.batch_size, FLAGS.max_doc_length]
Returns:
Cross-entropy Cost
"""
with tf.variable_scope('CrossEntropyLoss') as scope:
# Reshape logits and labels to match the requirement of softmax_cross_entropy_with_logits
logits = tf.reshape(logits, [-1, FLAGS.target_label_size]) # [FLAGS.batch_size*FLAGS.max_doc_length, FLAGS.target_label_size]
labels = tf.reshape(labels, [-1, FLAGS.target_label_size]) # [FLAGS.batch_size*FLAGS.max_doc_length, FLAGS.target_label_size]
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits, labels) # [FLAGS.batch_size*FLAGS.max_doc_length]
cross_entropy = tf.reshape(cross_entropy, [-1, FLAGS.max_doc_length]) # [FLAGS.batch_size, FLAGS.max_doc_length]
if FLAGS.weighted_loss:
cross_entropy = tf.mul(cross_entropy, weights)
# Cross entroy / document
cross_entropy = tf.reduce_sum(cross_entropy, reduction_indices=1) # [FLAGS.batch_size]
cross_entropy_mean = tf.reduce_mean(cross_entropy, name='crossentropy')
# ## Cross entroy / sentence
# cross_entropy_sum = tf.reduce_sum(cross_entropy)
# valid_sentences = tf.reduce_sum(weights)
# cross_entropy_mean = cross_entropy_sum / valid_sentences
# cross_entropy = -tf.reduce_sum(labels * tf.log(logits), reduction_indices=1)
# cross_entropy_mean = tf.reduce_mean(cross_entropy, name='crossentropy')
tf.add_to_collection('cross_entropy_loss', cross_entropy_mean)
# # # The total loss is defined as the cross entropy loss plus all of
# # # the weight decay terms (L2 loss).
# # return tf.add_n(tf.get_collection('losses'), name='total_loss')
return cross_entropy_mean
def predict_labels(logits):
""" Predict self labels
logits: Logits from inference(). [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
Return [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
"""
with tf.variable_scope('PredictLabels') as scope:
# Reshape logits for argmax and argmin
logits = tf.reshape(logits, [-1, FLAGS.target_label_size]) # [FLAGS.batch_size*FLAGS.max_doc_length, FLAGS.target_label_size]
# Get labels predicted using these logits
logits_argmax = tf.argmax(logits, 1) # [FLAGS.batch_size*FLAGS.max_doc_length]
logits_argmax = tf.reshape(logits_argmax, [-1, FLAGS.max_doc_length]) # [FLAGS.batch_size, FLAGS.max_doc_length]
logits_argmax = tf.expand_dims(logits_argmax, 2) # [FLAGS.batch_size, FLAGS.max_doc_length, 1]
logits_argmin = tf.argmin(logits, 1) # [FLAGS.batch_size*FLAGS.max_doc_length]
logits_argmin = tf.reshape(logits_argmin, [-1, FLAGS.max_doc_length]) # [FLAGS.batch_size, FLAGS.max_doc_length]
logits_argmin = tf.expand_dims(logits_argmin, 2) # [FLAGS.batch_size, FLAGS.max_doc_length, 1]
# Convert argmin and argmax to labels, works only if FLAGS.target_label_size = 2
labels = tf.concat(2, [logits_argmin, logits_argmax]) # [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
dtype = tf.float16 if FLAGS.use_fp16 else tf.float32
labels = tf.cast(labels, dtype)
return labels
def estimate_ltheta_ot(logits, labels, future_rewards, actual_rewards, weights):
"""
Args:
logits: Logits from inference(). [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
labels: Label placeholdr for self prediction [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
future_rewards: [FLAGS.batch_size, FLAGS.max_doc_length]
actual_reward: [FLAGS.batch_size]
weights: Weights to avoid padded part [FLAGS.batch_size, FLAGS.max_doc_length]
Returns:
[FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
"""
with tf.variable_scope('LTheta_Ot') as scope:
# Get Reward Weights: External reward - Predicted reward
actual_rewards = tf.tile(actual_rewards, [FLAGS.max_doc_length]) # [FLAGS.batch_size * FLAGS.max_doc_length] , [a,b] * 3 = [a, b, a, b, a, b]
actual_rewards = tf.reshape(actual_rewards, [FLAGS.max_doc_length, -1]) # [FLAGS.max_doc_length, FLAGS.batch_size], # [[a,b], [a,b], [a,b]]
actual_rewards = tf.transpose(actual_rewards) # [FLAGS.batch_size, FLAGS.max_doc_length] # [[a,a,a], [b,b,b]]
diff_act_pred = actual_rewards - future_rewards # [FLAGS.batch_size, FLAGS.max_doc_length]
diff_act_pred = tf.expand_dims(diff_act_pred, 2) # [FLAGS.batch_size, FLAGS.max_doc_length, 1]
# Convert (FLAGS.target_label_size = 2)
diff_act_pred = tf.concat(2, [diff_act_pred, diff_act_pred]) # [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# Reshape logits and labels to match the requirement of softmax_cross_entropy_with_logits
logits = tf.reshape(logits, [-1, FLAGS.target_label_size]) # [FLAGS.batch_size*FLAGS.max_doc_length, FLAGS.target_label_size]
logits = tf.nn.softmax(logits)
logits = tf.reshape(logits, [-1, FLAGS.max_doc_length, FLAGS.target_label_size]) # [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# Get the difference
diff_logits_indicator = logits - labels # [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# Multiply with reward
d_ltheta_ot = tf.mul(diff_act_pred, diff_logits_indicator) # [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# Multiply with weight
weights = tf.expand_dims(weights, 2) # [FLAGS.batch_size, FLAGS.max_doc_length, 1]
weights = tf.concat(2, [weights, weights]) # [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
d_ltheta_ot = tf.mul(d_ltheta_ot, weights) # [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
return d_ltheta_ot
# def estimate_ltheta_ot_mixer(logits, labels_gold, labels_pred, future_rewards, actual_rewards, weights, annealing_step):
# """
# Args:
# logits: Logits from inference(). [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# labels_gold: Label placeholdr for gold labels [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# labels_pred: Label placeholdr for self prediction [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# future_rewards: [FLAGS.batch_size, FLAGS.max_doc_length]
# actual_reward: [FLAGS.batch_size]
# weights: Weights to avoid padded part [FLAGS.batch_size, FLAGS.max_doc_length]
# annealing_step: [1], single value but in tensor form
# Returns:
# [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# """
# with tf.variable_scope('LTheta_Ot_Mixer') as scope:
# print(annealing_step)
# policygradloss_length = tf.reduce_sum(annealing_step) * FLAGS.annealing_step_delta
# crossentryloss_length = FLAGS.max_doc_length - policygradloss_length
# # Reshape logits and partition
# logits = tf.reshape(logits, [-1, FLAGS.target_label_size]) # [FLAGS.batch_size*FLAGS.max_doc_length, FLAGS.target_label_size]
# logits = tf.nn.softmax(logits)
# logits = tf.reshape(logits, [-1, FLAGS.max_doc_length, FLAGS.target_label_size]) # [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# logits_list = reshape_tensor2list(logits, FLAGS.max_doc_length, FLAGS.target_label_size)
# logits_ce_gold_list = logits_list[0:crossentryloss_length]
# logits_ce_gold = reshape_list2tensor(logits_ce_gold_list, crossentryloss_length, FLAGS.target_label_size) # [FLAGS.batch_size, crossentryloss_length, FLAGS.target_label_size]
# logits_reward_list = logits_list[crossentryloss_length:]
# logits_reward = reshape_list2tensor(logits_reward_list, policygradloss_length, FLAGS.target_label_size) # [FLAGS.batch_size, policygradloss_length, FLAGS.target_label_size]
# # Crossentropy loss with gold labels: partition gold_labels
# labels_gold_list = reshape_tensor2list(labels_gold, FLAGS.max_doc_length, FLAGS.target_label_size)
# labels_gold_used_list = labels_gold_list[0:crossentryloss_length]
# labels_gold_used = reshape_list2tensor(labels_gold_used_list, crossentryloss_length, FLAGS.target_label_size) # [FLAGS.batch_size, crossentryloss_length, FLAGS.target_label_size]
# # d_ltheta_ot : cross entropy
# diff_logits_goldlabels = logits_ce_gold - labels_gold_used # [FLAGS.batch_size, crossentryloss_length, FLAGS.target_label_size]
# # Policy gradient for rest
# # Get Reward Weights: External reward - Predicted reward
# actual_rewards = tf.tile(actual_rewards, [FLAGS.max_doc_length]) # [FLAGS.batch_size * FLAGS.max_doc_length] , [a,b] * 3 = [a, b, a, b, a, b]
# actual_rewards = tf.reshape(actual_rewards, [FLAGS.max_doc_length, -1]) # [FLAGS.max_doc_length, FLAGS.batch_size], # [[a,b], [a,b], [a,b]]
# actual_rewards = tf.transpose(actual_rewards) # [FLAGS.batch_size, FLAGS.max_doc_length] # [[a,a,a], [b,b,b]]
# diff_act_pred = actual_rewards - future_rewards # [FLAGS.batch_size, FLAGS.max_doc_length]
# diff_act_pred = tf.expand_dims(diff_act_pred, 2) # [FLAGS.batch_size, FLAGS.max_doc_length, 1]
# # Convert (FLAGS.target_label_size = 2)
# diff_act_pred = tf.concat(2, [diff_act_pred, diff_act_pred]) # [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# # Get used reward diff
# diff_act_pred_list = reshape_tensor2list(diff_act_pred, FLAGS.max_doc_length, FLAGS.target_label_size)
# diff_reward_act_pred_used_list = diff_act_pred_list[crossentryloss_length:]
# diff_reward_act_pred_used = reshape_list2tensor(diff_reward_act_pred_used_list, policygradloss_length, FLAGS.target_label_size) # [FLAGS.batch_size, policygradloss_length, FLAGS.target_label_size]
# # Partition predicted labels
# labels_pred_list = reshape_tensor2list(labels_pred, FLAGS.max_doc_length, FLAGS.target_label_size)
# labels_pred_used_list = labels_pred_list[crossentryloss_length:]
# labels_pred_used = reshape_list2tensor(labels_pred_used_list, policygradloss_length, FLAGS.target_label_size) # [FLAGS.batch_size, policygradloss_length, FLAGS.target_label_size]
# # d_ltheta_ot : reward weighted
# diff_logits_predlabels = logits_reward - labels_pred_used # [FLAGS.batch_size, policygradloss_length, FLAGS.target_label_size]
# # Multiply with reward
# reward_weighted_diff_logits_predlabels = tf.mul(diff_reward_act_pred_used, diff_logits_predlabels) # [FLAGS.batch_size, policygradloss_length, FLAGS.target_label_size]
# # Concat both part
# d_ltheta_ot_mixer = tf.concat(1, [diff_logits_goldlabels, reward_weighted_diff_logits_predlabels]) # [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# # Multiply with weight
# weights = tf.expand_dims(weights, 2) # [FLAGS.batch_size, FLAGS.max_doc_length, 1]
# weights = tf.concat(2, [weights, weights]) # [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# d_ltheta_ot_mixer = tf.mul(d_ltheta_ot_mixer, weights) # [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# return d_ltheta_ot_mixer
def reward_weighted_cross_entropy_loss_multisample(logits, labels, actual_rewards, weights):
"""Estimate cost of predictions
Add summary for "cost" and "cost/avg".
Args:
logits: Logits from inference(). [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
labels: Label placeholdr for multiple sampled prediction [FLAGS.batch_size, 1, FLAGS.max_doc_length, FLAGS.target_label_size]
actual_rewards: [FLAGS.batch_size, 1]
weights: Weights to avoid padded part [FLAGS.batch_size, FLAGS.max_doc_length]
Returns:
Cross-entropy Cost
"""
with tf.variable_scope('RWCELossMultiSample') as scope:
# Expand logits and weights for roll outs
logits_temp = tf.expand_dims(logits, 1) # [FLAGS.batch_size, 1, FLAGS.max_doc_length, FLAGS.target_label_size]
weights_temp = tf.expand_dims(weights, 1) # [FLAGS.batch_size, 1, FLAGS.max_doc_length]
logits_expanded = logits_temp
weights_expanded = weights_temp
# for ridx in range(1,FLAGS.num_sample_rollout):
# logits_expanded = tf.concat(1, [logits_expanded, logits_temp]) # [FLAGS.batch_size, n++, FLAGS.max_doc_length, FLAGS.target_label_size]
# weights_expanded = tf.concat(1, [weights_expanded, weights_temp]) # [FLAGS.batch_size, n++, FLAGS.max_doc_length]
# Reshape logits and labels to match the requirement of softmax_cross_entropy_with_logits
logits_expanded = tf.reshape(logits_expanded, [-1, FLAGS.target_label_size]) # [FLAGS.batch_size*1*FLAGS.max_doc_length, FLAGS.target_label_size]
labels = tf.reshape(labels, [-1, FLAGS.target_label_size]) # [FLAGS.batch_size*1*FLAGS.max_doc_length, FLAGS.target_label_size]
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits_expanded, labels) # [FLAGS.batch_size*1*FLAGS.max_doc_length]
cross_entropy = tf.reshape(cross_entropy, [-1, 1, FLAGS.max_doc_length]) # [FLAGS.batch_size, 1, FLAGS.max_doc_length]
if FLAGS.weighted_loss:
cross_entropy = tf.mul(cross_entropy, weights_expanded) # [FLAGS.batch_size, 1, FLAGS.max_doc_length]
# Reshape actual rewards
actual_rewards = tf.reshape(actual_rewards, [-1]) # [FLAGS.batch_size*1]
# [[a, b], [c, d], [e, f]] 3x2 => [a, b, c, d, e, f] [6]
actual_rewards = tf.tile(actual_rewards, [FLAGS.max_doc_length]) # [FLAGS.batch_size * 1 * FLAGS.max_doc_length]
# [a, b, c, d, e, f] * 2 = [a, b, c, d, e, f, a, b, c, d, e, f] [12]
actual_rewards = tf.reshape(actual_rewards, [FLAGS.max_doc_length, -1]) # [FLAGS.max_doc_length, FLAGS.batch_size*1]
# [[a, b, c, d, e, f], [a, b, c, d, e, f]] [2, 6]
actual_rewards = tf.transpose(actual_rewards) # [FLAGS.batch_size*1, FLAGS.max_doc_length]
# [[a,a], [b,b], [c,c], [d,d], [e,e], [f,f]] [6 x 2]
actual_rewards = tf.reshape(actual_rewards, [-1, 1, FLAGS.max_doc_length]) # [FLAGS.batch_size, 1, FLAGS.max_doc_length],
# [[[a,a], [b,b]], [[c,c], [d,d]], [[e,e], [f,f]]] [3 x 2 x 2]
# Multiply with reward
reward_weighted_cross_entropy = tf.mul(cross_entropy, actual_rewards) # [FLAGS.batch_size, 1, FLAGS.max_doc_length]
# Cross entroy / sample / document
reward_weighted_cross_entropy = tf.reduce_sum(reward_weighted_cross_entropy, reduction_indices=2) # [FLAGS.batch_size, 1]
reward_weighted_cross_entropy_mean = tf.reduce_mean(reward_weighted_cross_entropy, name='rewardweightedcemultisample')
tf.add_to_collection('reward_cross_entropy_loss_multisample', reward_weighted_cross_entropy_mean)
return reward_weighted_cross_entropy_mean
def reward_weighted_cross_entropy_loss(logits, labels, actual_rewards, weights):
"""Estimate cost of predictions
Add summary for "cost" and "cost/avg".
Args:
logits: Logits from inference(). [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
labels: Label placeholdr for self prediction [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
actual_reward: [FLAGS.batch_size]
weights: Weights to avoid padded part [FLAGS.batch_size, FLAGS.max_doc_length]
Returns:
Cross-entropy Cost
"""
with tf.variable_scope('RewardWeightedCrossEntropyLoss') as scope:
# Reshape logits and labels to match the requirement of softmax_cross_entropy_with_logits
logits = tf.reshape(logits, [-1, FLAGS.target_label_size]) # [FLAGS.batch_size*FLAGS.max_doc_length, FLAGS.target_label_size]
labels = tf.reshape(labels, [-1, FLAGS.target_label_size]) # [FLAGS.batch_size*FLAGS.max_doc_length, FLAGS.target_label_size]
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits, labels) # [FLAGS.batch_size*FLAGS.max_doc_length]
cross_entropy = tf.reshape(cross_entropy, [-1, FLAGS.max_doc_length]) # [FLAGS.batch_size, FLAGS.max_doc_length]
if FLAGS.weighted_loss:
cross_entropy = tf.mul(cross_entropy, weights) # [FLAGS.batch_size, FLAGS.max_doc_length]
# Reshape actual rewards
actual_rewards = tf.tile(actual_rewards, [FLAGS.max_doc_length]) # [FLAGS.batch_size * FLAGS.max_doc_length] , [a,b] * 3 = [a, b, a, b, a, b]
actual_rewards = tf.reshape(actual_rewards, [FLAGS.max_doc_length, -1]) # [FLAGS.max_doc_length, FLAGS.batch_size], # [[a,b], [a,b], [a,b]]
actual_rewards = tf.transpose(actual_rewards) # [FLAGS.batch_size, FLAGS.max_doc_length] # [[a,a,a], [b,b,b]]
# Multiply with reward
reward_weighted_cross_entropy = tf.mul(cross_entropy, actual_rewards) # [FLAGS.batch_size, FLAGS.max_doc_length]
# Cross entroy / document
reward_weighted_cross_entropy = tf.reduce_sum(reward_weighted_cross_entropy, reduction_indices=1) # [FLAGS.batch_size]
reward_weighted_cross_entropy_mean = tf.reduce_mean(reward_weighted_cross_entropy, name='rewardweightedcrossentropy')
tf.add_to_collection('reward_cross_entropy_loss', reward_weighted_cross_entropy_mean)
return reward_weighted_cross_entropy_mean
# def reward_weighted_cross_entropy_loss(logits, labels, future_rewards, actual_rewards, weights):
# """Estimate cost of predictions
# Add summary for "cost" and "cost/avg".
# Args:
# logits: Logits from inference(). [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# labels: Label placeholdr for self prediction [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# future_rewards: [FLAGS.batch_size, FLAGS.max_doc_length]
# actual_reward: [FLAGS.batch_size]
# weights: Weights to avoid padded part [FLAGS.batch_size, FLAGS.max_doc_length]
# Returns:
# Cross-entropy Cost
# """
# with tf.variable_scope('RewardWeightedCrossEntropyLoss') as scope:
# # Get Reward Weights: External reward - Predicted reward
# actual_rewards = tf.tile(actual_rewards, [FLAGS.max_doc_length]) # [FLAGS.batch_size * FLAGS.max_doc_length] , [a,b] * 3 = [a, b, a, b, a, b]
# actual_rewards = tf.reshape(actual_rewards, [FLAGS.max_doc_length, -1]) # [FLAGS.max_doc_length, FLAGS.batch_size], # [[a,b], [a,b], [a,b]]
# actual_rewards = tf.transpose(actual_rewards) # [FLAGS.batch_size, FLAGS.max_doc_length] # [[a,a,a], [b,b,b]]
# # Error: actual_rewards = tf.reshape(tf.tile(actual_rewards, [FLAGS.max_doc_length]),[-1, FLAGS.max_doc_length]) # [FLAGS.batch_size, FLAGS.max_doc_length]
# diff_act_pred = future_rewards - actual_rewards # actual_rewards - future_rewards # [FLAGS.batch_size, FLAGS.max_doc_length]
# # Reshape logits and labels to match the requirement of softmax_cross_entropy_with_logits
# logits = tf.reshape(logits, [-1, FLAGS.target_label_size]) # [FLAGS.batch_size*FLAGS.max_doc_length, FLAGS.target_label_size]
# labels = tf.reshape(labels, [-1, FLAGS.target_label_size]) # [FLAGS.batch_size*FLAGS.max_doc_length, FLAGS.target_label_size]
# cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits, labels) # [FLAGS.batch_size*FLAGS.max_doc_length]
# cross_entropy = tf.reshape(cross_entropy, [-1, FLAGS.max_doc_length]) # [FLAGS.batch_size, FLAGS.max_doc_length]
# if FLAGS.weighted_loss:
# cross_entropy = tf.mul(cross_entropy, weights) # [FLAGS.batch_size, FLAGS.max_doc_length]
# # Multiply with reward
# reward_weighted_cross_entropy = tf.mul(cross_entropy, diff_act_pred) # [FLAGS.batch_size, FLAGS.max_doc_length]
# # Cross entroy / document
# reward_weighted_cross_entropy = tf.reduce_sum(reward_weighted_cross_entropy, reduction_indices=1) # [FLAGS.batch_size]
# reward_weighted_cross_entropy_mean = tf.reduce_mean(reward_weighted_cross_entropy, name='rewardweightedcrossentropy')
# tf.add_to_collection('reward_cross_entropy_loss', reward_weighted_cross_entropy_mean)
# return reward_weighted_cross_entropy_mean
# def temp_reward_weighted_cross_entropy_loss(logits, labels, future_rewards, actual_rewards, weights):
# """Estimate cost of predictions
# Add summary for "cost" and "cost/avg".
# Args:
# logits: Logits from inference(). [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# labels: Label placeholdr for self prediction [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# future_rewards: [FLAGS.batch_size, FLAGS.max_doc_length]
# actual_reward: [FLAGS.batch_size]
# weights: Weights to avoid padded part [FLAGS.batch_size, FLAGS.max_doc_length]
# Returns:
# Cross-entropy Cost
# """
# with tf.variable_scope('TempRewardWeightedCrossEntropyLoss') as scope:
# # Get Reward Weights: External reward - Predicted reward
# actual_rewards = tf.tile(actual_rewards, [FLAGS.max_doc_length]) # [FLAGS.batch_size * FLAGS.max_doc_length] , [a,b] * 3 = [a, b, a, b, a, b]
# actual_rewards = tf.reshape(actual_rewards, [FLAGS.max_doc_length, -1]) # [FLAGS.max_doc_length, FLAGS.batch_size], # [[a,b], [a,b], [a,b]]
# actual_rewards = tf.transpose(actual_rewards) # [FLAGS.batch_size, FLAGS.max_doc_length] # [[a,a,a], [b,b,b]]
# diff_act_pred = future_rewards - actual_rewards # actual_rewards - future_rewards # [FLAGS.batch_size, FLAGS.max_doc_length]
# # Reshape logits and labels to match the requirement of softmax_cross_entropy_with_logits
# logits = tf.reshape(logits, [-1, FLAGS.target_label_size]) # [FLAGS.batch_size*FLAGS.max_doc_length, FLAGS.target_label_size]
# labels = tf.reshape(labels, [-1, FLAGS.target_label_size]) # [FLAGS.batch_size*FLAGS.max_doc_length, FLAGS.target_label_size]
# cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits, labels) # [FLAGS.batch_size*FLAGS.max_doc_length]
# cross_entropy = tf.reshape(cross_entropy, [-1, FLAGS.max_doc_length]) # [FLAGS.batch_size, FLAGS.max_doc_length]
# if FLAGS.weighted_loss:
# cross_entropy = tf.mul(cross_entropy, weights) # [FLAGS.batch_size, FLAGS.max_doc_length]
# # Multiply with reward
# reward_weighted_cross_entropy = tf.mul(cross_entropy, diff_act_pred) # [FLAGS.batch_size, FLAGS.max_doc_length]
# # Cross entroy / document
# reward_weighted_cross_entropy = tf.reduce_sum(reward_weighted_cross_entropy, reduction_indices=1) # [FLAGS.batch_size]
# reward_weighted_cross_entropy_mean = tf.reduce_mean(reward_weighted_cross_entropy, name='rewardweightedcrossentropy')
# optimizer = tf.train.AdamOptimizer(learning_rate=FLAGS.learning_rate, name='adam')
# # Compute gradients of policy network
# policy_network_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="PolicyNetwork")
# # print(policy_network_variables)
# # Compute gradients of policy network
# grads_and_vars = optimizer.compute_gradients(reward_weighted_cross_entropy_mean, var_list=policy_network_variables)
# # print(grads_and_vars)
# return actual_rewards, cross_entropy, diff_act_pred, reward_weighted_cross_entropy, reward_weighted_cross_entropy_mean, grads_and_vars
# def cross_entropy_loss_selfprediction(logits, weights):
# """Optimizing expected reward: Weighted cross entropy
# args:
# logits: Logits without softmax. [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# weights: Weights to avoid padded part [FLAGS.batch_size, FLAGS.max_doc_length]
# return:
# [FLAGS.batch_size, FLAGS.max_doc_length]
# """
# with tf.variable_scope('SelfPredCrossEntropyLoss') as scope:
# # Reshape logits for argmax and argmin
# logits = tf.reshape(logits, [-1, FLAGS.target_label_size]) # [FLAGS.batch_size*FLAGS.max_doc_length, FLAGS.target_label_size]
# # Get labels if predicted using these logits
# logits_argmax = tf.argmax(logits, 1) # [FLAGS.batch_size*FLAGS.max_doc_length]
# logits_argmax = tf.reshape(logits_argmax, [-1, FLAGS.max_doc_length]) # [FLAGS.batch_size, FLAGS.max_doc_length]
# logits_argmax = tf.expand_dims(logits_argmax, 2) # [FLAGS.batch_size, FLAGS.max_doc_length, 1]
# logits_argmin = tf.argmin(logits, 1) # [FLAGS.batch_size*FLAGS.max_doc_length]
# logits_argmin = tf.reshape(logits_argmin, [-1, FLAGS.max_doc_length]) # [FLAGS.batch_size, FLAGS.max_doc_length]
# logits_argmin = tf.expand_dims(logits_argmin, 2) # [FLAGS.batch_size, FLAGS.max_doc_length, 1]
# # Convert argmin and argmax to labels, works only if FLAGS.target_label_size = 2
# labels = tf.concat(2, [logits_argmin, logits_argmax]) # [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# dtype = tf.float16 if FLAGS.use_fp16 else tf.float32
# labels = tf.cast(labels, dtype)
# labels = tf.reshape(labels, [-1, FLAGS.target_label_size]) # [FLAGS.batch_size*FLAGS.max_doc_length, FLAGS.target_label_size]
# # softmax_cross_entropy_with_logits
# cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits, labels) # [FLAGS.batch_size*FLAGS.max_doc_length]
# cross_entropy = tf.reshape(cross_entropy, [-1, FLAGS.max_doc_length]) # [FLAGS.batch_size, FLAGS.max_doc_length]
# if FLAGS.weighted_loss:
# cross_entropy = tf.mul(cross_entropy, weights)
# return cross_entropy
# def weighted_cross_entropy_loss(logits, future_rewards, actual_reward, weights):
# """Optimizing expected reward: Weighted cross entropy
# args:
# logits: Logits without softmax. [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# future_rewards: [FLAGS.batch_size, FLAGS.max_doc_length]
# actual_reward: [FLAGS.batch_size]
# weights: Weights to avoid padded part [FLAGS.batch_size, FLAGS.max_doc_length]
# """
# with tf.variable_scope('WeightedCrossEntropyLoss') as scope:
# # Get Weights: External reward - Predicted reward
# actual_reward = tf.reshape(tf.tile(actual_reward, [FLAGS.max_doc_length]),[-1, FLAGS.max_doc_length]) # [FLAGS.batch_size, FLAGS.max_doc_length]
# diff_act_pred = future_rewards - actual_reward # actual_reward - future_rewards # [FLAGS.batch_size, FLAGS.max_doc_length]
# # Reshape logits for argmax and argmin
# logits = tf.reshape(logits, [-1, FLAGS.target_label_size]) # [FLAGS.batch_size*FLAGS.max_doc_length, FLAGS.target_label_size]
# # Get labels if predicted using these logits
# logits_argmax = tf.argmax(logits, 1) # [FLAGS.batch_size*FLAGS.max_doc_length]
# logits_argmax = tf.reshape(logits_argmax, [-1, FLAGS.max_doc_length]) # [FLAGS.batch_size, FLAGS.max_doc_length]
# logits_argmax = tf.expand_dims(logits_argmax, 2) # [FLAGS.batch_size, FLAGS.max_doc_length, 1]
# logits_argmin = tf.argmin(logits, 1) # [FLAGS.batch_size*FLAGS.max_doc_length]
# logits_argmin = tf.reshape(logits_argmin, [-1, FLAGS.max_doc_length]) # [FLAGS.batch_size, FLAGS.max_doc_length]
# logits_argmin = tf.expand_dims(logits_argmin, 2) # [FLAGS.batch_size, FLAGS.max_doc_length, 1]
# # Convert argmin and argmax to labels, works only if FLAGS.target_label_size = 2
# labels = tf.concat(2, [logits_argmin, logits_argmax]) # [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# dtype = tf.float16 if FLAGS.use_fp16 else tf.float32
# labels = tf.cast(labels, dtype)
# labels = tf.reshape(labels, [-1, FLAGS.target_label_size]) # [FLAGS.batch_size*FLAGS.max_doc_length, FLAGS.target_label_size]
# # softmax_cross_entropy_with_logits
# cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits, labels) # [FLAGS.batch_size*FLAGS.max_doc_length]
# cross_entropy = tf.reshape(cross_entropy, [-1, FLAGS.max_doc_length]) # [FLAGS.batch_size, FLAGS.max_doc_length]
# if FLAGS.weighted_loss:
# cross_entropy = tf.mul(cross_entropy, weights)
# # Multiply with reward
# cross_entropy = tf.mul(cross_entropy, diff_act_pred)
# # Cross entroy / document
# cross_entropy = tf.reduce_sum(cross_entropy, reduction_indices=1) # [FLAGS.batch_size]
# cross_entropy_mean = tf.reduce_mean(cross_entropy, name='crossentropy')
# tf.add_to_collection('reward_cross_entropy_loss', cross_entropy_mean)
# # # # The total loss is defined as the cross entropy loss plus all of
# # # # the weight decay terms (L2 loss).
# # # return tf.add_n(tf.get_collection('losses'), name='total_loss')
# return cross_entropy_mean
### Training functions
def train_cross_entropy_loss(cross_entropy_loss):
""" Training with Gold Label: Pretraining network to start with a better policy
Args: cross_entropy_loss
"""
with tf.variable_scope('TrainCrossEntropyLoss') as scope:
optimizer = tf.train.AdamOptimizer(learning_rate=FLAGS.learning_rate, name='adam')
# Compute gradients of policy network
policy_network_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="PolicyNetwork")
# print(policy_network_variables)
grads_and_vars = optimizer.compute_gradients(cross_entropy_loss, var_list=policy_network_variables)
# print(grads_and_vars)
# Apply Gradients
return optimizer.apply_gradients(grads_and_vars)
def train_meansq_loss(futreward_meansq_loss):
""" Training with Gold Label: Pretraining network to start with a better policy
Args: futreward_meansq_loss
"""
with tf.variable_scope('TrainMeanSqLoss') as scope:
optimizer = tf.train.AdamOptimizer(learning_rate=FLAGS.learning_rate, name='adam')
# Compute gradients of Future reward estimator
futreward_estimator_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="FutureRewardEstimator")
# print(futreward_estimator_variables)
grads_and_vars = optimizer.compute_gradients(futreward_meansq_loss, var_list=futreward_estimator_variables)
# print(grads_and_vars)
# Apply Gradients
return optimizer.apply_gradients(grads_and_vars)
def train_neg_expectedreward(reward_weighted_cross_entropy_loss_multisample):
"""Training with Policy Gradient: Optimizing expected reward
args:
reward_weighted_cross_entropy_loss_multisample
"""
with tf.variable_scope('TrainExpReward') as scope:
optimizer = tf.train.AdamOptimizer(learning_rate=FLAGS.learning_rate, name='adam')
# Compute gradients of policy network
policy_network_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="PolicyNetwork")
# print(policy_network_variables)
# Compute gradients of policy network
grads_and_vars = optimizer.compute_gradients(reward_weighted_cross_entropy_loss_multisample, var_list=policy_network_variables)
# print(grads_and_vars)
# Clip gradient: Pascanu et al. 2013, Exploding gradient problem
grads_and_vars_capped_norm = [(tf.clip_by_norm(grad, 5.0), var) for grad, var in grads_and_vars]
# Apply Gradients
# return optimizer.apply_gradients(grads_and_vars)
return optimizer.apply_gradients(grads_and_vars_capped_norm)
# def train_neg_expectedreward(reward_weighted_cross_entropy_loss):
# """Training with Policy Gradient: Optimizing expected reward
# args:
# reward_weighted_cross_entropy_loss
# """
# with tf.variable_scope('TrainExpReward') as scope:
# optimizer = tf.train.AdamOptimizer(learning_rate=FLAGS.learning_rate, name='adam')
# # Compute gradients of policy network
# policy_network_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="PolicyNetwork")
# # print(policy_network_variables)
# # Compute gradients of policy network
# grads_and_vars = optimizer.compute_gradients(reward_weighted_cross_entropy_loss, var_list=policy_network_variables)
# # print(grads_and_vars)
# # Clip gradient: Pascanu et al. 2013, Exploding gradient problem
# grads_and_vars_capped_norm = [(tf.clip_by_norm(grad, 5.0), var) for grad, var in grads_and_vars]
# # Apply Gradients
# # return optimizer.apply_gradients(grads_and_vars)
# return optimizer.apply_gradients(grads_and_vars_capped_norm)
# def train_neg_expectedreward(logits, d_ltheta_ot):
# """Training with Policy Gradient: Optimizing expected reward
# args:
# logits: Logits without softmax. [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# d_ltheta_ot: Placeholder [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
# """
# with tf.variable_scope('TrainExpReward') as scope:
# optimizer = tf.train.AdamOptimizer(learning_rate=FLAGS.learning_rate, name='adam')
# # Modify logits with d_ltheta_ot
# logits = tf.mul(logits, d_ltheta_ot)
# # Compute gradients of policy network
# policy_network_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="PolicyNetwork")
# # print(policy_network_variables)
# # Compute gradients of policy network
# grads_and_vars = optimizer.compute_gradients(logits, var_list=policy_network_variables)
# # print(grads_and_vars)
# # Clip gradient: Pascanu et al. 2013, Exploding gradient problem
# grads_and_vars_capped_norm = [(tf.clip_by_norm(grad, 5.0), var) for grad, var in grads_and_vars]
# # Apply Gradients
# # return optimizer.apply_gradients(grads_and_vars)
# return optimizer.apply_gradients(grads_and_vars_capped_norm)
# def temp_train_neg_expectedreward(logits, d_ltheta_ot):
# with tf.variable_scope('TempTrainExpReward') as scope:
# optimizer = tf.train.AdamOptimizer(learning_rate=FLAGS.learning_rate, name='adam')
# # Modify logits with d_ltheta_ot
# logits = tf.mul(logits, d_ltheta_ot)
# # Compute gradients of policy network
# policy_network_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="PolicyNetwork")
# # print(policy_network_variables)
# # Compute gradients of policy network
# grads_and_vars = optimizer.compute_gradients(logits, var_list=policy_network_variables)
# grads_and_vars_capped_norm = [(tf.clip_by_norm(grad, 5.0), var) for grad, var in grads_and_vars]
# grads_and_vars_capped_val = [(tf.clip_by_value(grad, -1., 1.), var) for grad, var in grads_and_vars]
# # tf.clip_by_norm(t, clip_norm, axes=None, name=None)
# # https://www.tensorflow.org/versions/r0.11/api_docs/python/train/gradient_clipping
# return grads_and_vars, grads_and_vars_capped_norm, grads_and_vars_capped_val
### Accuracy Calculations
def accuracy(logits, labels, weights):
"""Estimate accuracy of predictions
Args:
logits: Logits from inference(). [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
labels: Sentence extraction gold levels [FLAGS.batch_size, FLAGS.max_doc_length, FLAGS.target_label_size]
weights: Weights to avoid padded part [FLAGS.batch_size, FLAGS.max_doc_length]
Returns:
Accuracy: Estimates average of accuracy for each sentence
"""
with tf.variable_scope('Accuracy') as scope:
logits = tf.reshape(logits, [-1, FLAGS.target_label_size]) # [FLAGS.batch_size*FLAGS.max_doc_length, FLAGS.target_label_size]
labels = tf.reshape(labels, [-1, FLAGS.target_label_size]) # [FLAGS.batch_size*FLAGS.max_doc_length, FLAGS.target_label_size]
correct_pred = tf.equal(tf.argmax(logits,1), tf.argmax(labels,1)) # [FLAGS.batch_size*FLAGS.max_doc_length]
correct_pred = tf.reshape(correct_pred, [-1, FLAGS.max_doc_length]) # [FLAGS.batch_size, FLAGS.max_doc_length]
correct_pred = tf.cast(correct_pred, tf.float32)
# Get Accuracy
accuracy = tf.reduce_mean(correct_pred, name='accuracy')
if FLAGS.weighted_loss:
correct_pred = tf.mul(correct_pred, weights)
correct_pred = tf.reduce_sum(correct_pred, reduction_indices=1) # [FLAGS.batch_size]
doc_lengths = tf.reduce_sum(weights, reduction_indices=1) # [FLAGS.batch_size]
correct_pred_avg = tf.div(correct_pred, doc_lengths)
accuracy = tf.reduce_mean(correct_pred_avg, name='accuracy')
return accuracy
# Improve it to show exact accuracy (top three ranked ones), not all.
| 57.277839
| 206
| 0.713018
| 6,857
| 51,951
| 5.058918
| 0.058918
| 0.052351
| 0.075067
| 0.105855
| 0.816051
| 0.78486
| 0.757618
| 0.741474
| 0.72536
| 0.698463
| 0
| 0.006259
| 0.188139
| 51,951
| 906
| 207
| 57.34106
| 0.816203
| 0.616408
| 0
| 0.343096
| 0
| 0
| 0.040117
| 0.013444
| 0
| 0
| 0
| 0
| 0
| 1
| 0.07113
| false
| 0
| 0.041841
| 0
| 0.1841
| 0.008368
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8a4a8691170a529ee8a70b8a44c2472636ed9e1a
| 34
|
py
|
Python
|
nxtools/tools/__init__.py
|
aonghus/nxtools
|
d21a1b26c3116bf2b580a59f82a1690278f4bc7b
|
[
"BSD-3-Clause"
] | 1
|
2020-01-12T12:04:39.000Z
|
2020-01-12T12:04:39.000Z
|
nxtools/tools/__init__.py
|
aonghus/nxtools
|
d21a1b26c3116bf2b580a59f82a1690278f4bc7b
|
[
"BSD-3-Clause"
] | null | null | null |
nxtools/tools/__init__.py
|
aonghus/nxtools
|
d21a1b26c3116bf2b580a59f82a1690278f4bc7b
|
[
"BSD-3-Clause"
] | null | null | null |
from nxtools.tools.tools import *
| 17
| 33
| 0.794118
| 5
| 34
| 5.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 34
| 1
| 34
| 34
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8a7fac079894ebd49d1428005f8826de59161c66
| 81
|
py
|
Python
|
conv/__init__.py
|
snj830526/py_invest_helper
|
ae3240acbb68465b8987e5dda015ca020951ce6d
|
[
"BSD-2-Clause"
] | null | null | null |
conv/__init__.py
|
snj830526/py_invest_helper
|
ae3240acbb68465b8987e5dda015ca020951ce6d
|
[
"BSD-2-Clause"
] | null | null | null |
conv/__init__.py
|
snj830526/py_invest_helper
|
ae3240acbb68465b8987e5dda015ca020951ce6d
|
[
"BSD-2-Clause"
] | null | null | null |
from .constants import *
from .slack_send_message import *
from .invest import *
| 20.25
| 33
| 0.777778
| 11
| 81
| 5.545455
| 0.636364
| 0.327869
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 81
| 3
| 34
| 27
| 0.884058
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8a852fb32f2b21c43efaa22d9ec4c57feb00e533
| 20,925
|
py
|
Python
|
src/cogs/moderation.py
|
Arslee-Develop/openmod
|
322548212995d1b2fea5defa6c6637767f744a97
|
[
"MIT"
] | 5
|
2020-11-07T04:54:58.000Z
|
2021-08-31T20:31:22.000Z
|
src/cogs/moderation.py
|
Arslee-Develop/openmod
|
322548212995d1b2fea5defa6c6637767f744a97
|
[
"MIT"
] | 7
|
2021-01-03T08:54:11.000Z
|
2021-08-31T02:27:15.000Z
|
src/cogs/moderation.py
|
Arslee-Develop/openmod
|
322548212995d1b2fea5defa6c6637767f744a97
|
[
"MIT"
] | 4
|
2021-04-08T14:33:15.000Z
|
2021-06-01T16:21:12.000Z
|
import asyncio
from typing import NoReturn
import discord
from discord import Member, User
from discord.ext import commands
from discord.ext.commands import Bot, Context, Greedy
from discord_components import Button, ButtonStyle, DiscordComponents
from cogs.utils import Config, Logger, Settings, Strings, Utils
CONFIG = Config()
class Moderation(commands.Cog, name="Moderation"):
def __init__(self, bot: Bot) -> None:
self.bot = bot
self.name = "Moderation"
@commands.command()
@commands.guild_only()
@commands.bot_has_permissions(ban_members=True)
@commands.has_permissions(ban_members=True)
@commands.cooldown(1, 5, commands.BucketType.user)
async def ban(self,
ctx: Context,
member: Member,
*,
reason: str = "N/A") -> NoReturn:
"""Bans the user.
Attributes:
-----------
- `member` - user
- `reason` - ban reason
"""
s = await Settings(ctx.guild.id)
lang = await s.get_field("locale", CONFIG["default_locale"])
STRINGS = Strings(lang)
select_components = [[
Button(style=ButtonStyle.green, label="✓"),
Button(style=ButtonStyle.red, label="X"),
]]
done_components = [[
Button(style=ButtonStyle.grey, label="·", disabled=True),
]]
embedconfirm = discord.Embed(
title="Ban Command",
description="```Do you want to ban this member?```",
)
await ctx.send(embed=embedconfirm, components=select_components)
response = await self.bot.wait_for(
"button_click", check=lambda message: message.author == ctx.author)
try:
if response.component.label == "✓":
await response.respond(
type=7,
embed=discord.Embed(
title="Action confirmed",
description=f"Banning {member} for {reason}",
color=0xFF8000,
),
components=done_components,
)
if not member.bot:
embed = Utils.error_embed(
STRINGS["moderation"]["dm_kick"].format(
ctx.guild, reason))
await member.send(embed=embed)
await asyncio.sleep(5)
await member.ban(reason=reason)
else:
await response.respond(
type=7,
embed=discord.Embed(
title="Action Aborted",
description="The action was aborted by clicking the no button",
color=0xDD2E44,
),
components=done_components,
)
except discord.Forbidden:
await ctx.message.add_reaction(CONFIG["no_emoji"])
embed = Utils.error_embed(STRINGS["error"]["ban_fail"])
msg = await ctx.send(embed=embed)
await asyncio.sleep(5)
await msg.delete()
else:
try:
embed = Utils.error_embed(
STRINGS["moderation"]["dm_ban"].format(
ctx.guild.name, reason))
await member.send(embed=embed)
except:
pass
await ctx.message.add_reaction(CONFIG["yes_emoji"])
@commands.command()
@commands.guild_only()
@commands.bot_has_permissions(ban_members=True)
@commands.has_permissions(ban_members=True)
@commands.cooldown(1, 5, commands.BucketType.user)
async def unban(self, ctx, *, member) -> NoReturn:
"""Unbans the user.
Attributes:
-----------
- `member` - user tag. Example: `name#1234`
"""
s = await Settings(ctx.guild.id)
lang = await s.get_field("locale", CONFIG["default_locale"])
STRINGS = Strings(lang)
select_components = [[
Button(style=ButtonStyle.green, label="✓"),
Button(style=ButtonStyle.red, label="X"),
]]
done_components = [[
Button(style=ButtonStyle.grey, label="·", disabled=True),
]]
embedconfirm = discord.Embed(
title="Unban Command",
description="```Do you want to unban this member?```",
)
await ctx.send(embed=embedconfirm, components=select_components)
response = await self.bot.wait_for(
"button_click", check=lambda message: message.author == ctx.author)
if "#" in ctx.message.content and response.component.label == "✓":
banned_users = await ctx.guild.bans()
for ban_entry in banned_users:
member_name, member_discriminator = member.split("#")
user = ban_entry.user
if (user.name, user.discriminator) == (
member_name,
member_discriminator,
):
await ctx.guild.unban(user)
await response.respond(
type=7,
embed=discord.Embed(
title="Action confirmed",
description=f"Unbanned {user}",
color=0xFF8000,
),
components=done_components,
)
return
elif response.component.label == "✓":
member = await self.client.fetch_user(int(member))
await ctx.guild.unban(member)
await response.respond(
type=7,
embed=discord.Embed(
title="Action confirmed",
description=f"Unbanned {member}",
color=0xFF8000,
),
components=done_components,
)
else:
await response.respond(
type=7,
embed=discord.Embed(
title="Action Aborted",
description="The action was aborted by clicking the no button",
color=0xDD2E44,
),
components=done_components,
)
await ctx.message.add_reaction(CONFIG["no_emoji"])
embed = Utils.error_embed(STRINGS["error"]["user_not_found"])
await ctx.send(embed=embed)
@commands.command()
@commands.guild_only()
@commands.bot_has_permissions(ban_members=True)
@commands.has_permissions(ban_members=True)
@commands.cooldown(1, 5, commands.BucketType.user)
async def multiban(self,
ctx: Context,
members: Greedy[Member],
*,
reason: str = "N/A") -> NoReturn:
"""Bans multiple users.
Attributes:
-----------
- `member` - user
- `reason` - ban reason
"""
s = await Settings(ctx.guild.id)
lang = await s.get_field("locale", CONFIG["default_locale"])
STRINGS = Strings(lang)
not_banned_members = []
for member in members:
try:
await member.ban(reason=reason)
await ctx.send("Members banned")
except discord.Forbidden:
not_banned_members.append(member.mention)
else:
try:
embed = Utils.error_embed(
STRINGS["moderation"]["dm_ban"].format(
ctx.guild.name, reason))
await member.send(embed=embed)
except:
pass
if not not_banned_members:
await ctx.message.add_reaction(CONFIG["yes_emoji"])
else:
await ctx.message.add_reaction(CONFIG["warn_emoji"])
msg = await ctx.send(
Utils.warn_embed(
STRINGS["moderation"]["on_not_full_multiban"].format(
", ".join(not_banned_members))))
await asyncio.sleep(30)
await msg.delete()
@commands.command()
@commands.guild_only()
@commands.bot_has_permissions(kick_members=True)
@commands.has_permissions(kick_members=True)
@commands.cooldown(1, 5, commands.BucketType.user)
async def kick(self,
ctx: Context,
member: Member,
*,
reason: str = "N/A") -> NoReturn:
"""Kicks the user.
Attributes:
-----------
- `member` - user
- `reason` - kick reason
"""
s = await Settings(ctx.guild.id)
lang = await s.get_field("locale", CONFIG["default_locale"])
STRINGS = Strings(lang)
select_components = [[
Button(style=ButtonStyle.green, label="✓"),
Button(style=ButtonStyle.red, label="X"),
]]
done_components = [[
Button(style=ButtonStyle.grey, label="·", disabled=True),
]]
embedconfirm = discord.Embed(
title="Kick Command",
description="```Do you want to kick this member?```",
)
await ctx.send(embed=embedconfirm, components=select_components)
response = await self.bot.wait_for(
"button_click", check=lambda message: message.author == ctx.author)
if response.component.label == "✓":
await response.respond(
type=7,
embed=discord.Embed(
title="Action Completed",
description=f"Kicked {member} for {reason}",
color=0xDD2E44,
),
components=done_components,
)
if not member.bot:
embed = Utils.error_embed(
STRINGS["moderation"]["dm_kick"].format(ctx.guild, reason))
await member.send(embed=embed)
await asyncio.sleep(5)
await member.kick()
await ctx.message.add_reaction(CONFIG["yes_emoji"])
else:
await response.respond(
type=7,
embed=discord.Embed(
title="Action Aborted",
description="The action was aborted by clicking the no button",
color=0xDD2E44,
),
components=done_components,
)
return
@commands.command(aliases=["clear"])
@commands.guild_only()
@commands.bot_has_permissions(manage_messages=True)
@commands.has_permissions(manage_messages=True)
@commands.cooldown(1, 5, commands.BucketType.user)
async def purge(self, ctx: Context, number: int) -> NoReturn:
"""Deletes a specified number of messages in the current channel.
Attributes:
-----------
- `number` - The number of messages to be deleted.
"""
s = await Settings(ctx.guild.id)
lang = await s.get_field("locale", CONFIG["default_locale"])
STRINGS = Strings(lang)
select_components = [[
Button(style=ButtonStyle.green, label="✓"),
Button(style=ButtonStyle.red, label="X"),
]]
done_components = [[
Button(style=ButtonStyle.grey, label="·", disabled=True),
]]
embedconfirm = discord.Embed(
title="Clear Command",
description=f"```Do you want to remove {number} messages?```",
)
await ctx.send(embed=embedconfirm, components=select_components)
response = await self.bot.wait_for(
"button_click", check=lambda message: message.author == ctx.author)
if response.component.label == "✓":
await response.respond(
type=7,
embed=discord.Embed(
title="Action Completed",
description=f"Purging {number} messages",
color=0xDD2E44,
),
components=done_components,
)
await asyncio.sleep(10)
deleted = await ctx.channel.purge(limit=number + 1)
else:
await response.respond(
type=7,
embed=discord.Embed(
title="Action Aborted",
description="The action was aborted by clicking the no button",
color=0xDD2E44,
),
components=done_components,
)
return
@commands.command(aliases=["setnick, setname"])
@commands.guild_only()
@commands.bot_has_permissions(manage_nicknames=True)
@commands.has_permissions(manage_roles=True)
@commands.cooldown(1, 5, commands.BucketType.user)
async def setname(self, ctx: Context, member: Member, *,
name: str) -> NoReturn:
s = await Settings(ctx.guild.id)
lang = await s.get_field("locale", CONFIG["default_locale"])
STRINGS = Strings(lang)
if len(name) > 32:
embed = Utils.error_embed(STRINGS["error"]["too_long_name"])
await ctx.send(embed=embed)
elif (ctx.message.author.guild_permissions.manage_nicknames
or member == ctx.message.author):
await member.edit(nick=name)
await ctx.message.add_reaction(CONFIG["yes_emoji"])
else:
embed = Utils.error_embed(STRINGS["error"]["missing_perms"])
await ctx.send(embed=embed)
@commands.command()
@commands.guild_only()
@commands.bot_has_permissions(manage_roles=True)
@commands.cooldown(1, 5, commands.BucketType.user)
async def mute(self,
ctx: Context,
member: Member,
*,
reason: str = "N/A") -> NoReturn:
s = await Settings(ctx.guild.id)
lang = await s.get_field("locale", CONFIG["default_locale"])
STRINGS = Strings(lang)
mute_role_id = await s.get_field("mute_role_id")
if (mute_role_id is None or
discord.utils.get(ctx.guild.roles, id=mute_role_id) is None):
embed = Utils.done_embed(
STRINGS["moderation"]["on_mute_role_create"])
await ctx.send(embed=embed)
mute_role = await ctx.guild.create_role(name="Muted")
await s.set_field("mute_role_id", mute_role.id)
mute_role_id = await s.get_field("mute_role_id")
else:
mute_role = discord.utils.get(ctx.guild.roles, id=mute_role_id)
for user_role in member.roles:
if user_role == mute_role:
embed = Utils.error_embed(
STRINGS["error"]["already_muted"])
await ctx.send(embed=embed)
return
for channel in ctx.guild.text_channels:
await channel.set_permissions(mute_role,
read_messages=True,
send_messages=False,
speak=False)
await member.add_roles(mute_role)
await ctx.message.add_reaction(CONFIG["yes_emoji"])
@commands.command()
@commands.guild_only()
@commands.bot_has_permissions(manage_roles=True)
@commands.has_permissions(manage_roles=True)
@commands.cooldown(1, 5, commands.BucketType.user)
async def unmute(self,
ctx: Context,
member: Member,
*,
reason: str = "N/A") -> NoReturn:
mute_role = discord.utils.get(ctx.guild.roles,
id=Utils.get_mute_role(
None, ctx.message))
if mute_role is None:
# FIXME
await ctx.send("нету роли мута ок да\n\n\nок")
else:
await member.remove_roles(mute_role)
await ctx.message.add_reaction(CONFIG["yes_emoji"])
@commands.command()
@commands.guild_only()
@commands.bot_has_permissions(manage_roles=True)
@commands.has_permissions(manage_roles=True)
@commands.cooldown(1, 30, commands.BucketType.user)
# `RoleConverter` will automatically convert it to a `discord.Role` instance
async def lockdownrole(self, ctx, role: discord.Role):
s = await Settings(ctx.guild.id)
lang = await s.get_field("locale", CONFIG["default_locale"])
STRINGS = Strings(lang)
for channel in ctx.guild.channels:
await channel.set_permissions(role, send_messages=False)
embed = discord.Embed(
title=STRINGS["moderation"]["lockdowntitleone"],
description=STRINGS["moderation"]["lockdowndescone"],
)
await ctx.send(embed=embed)
@commands.command()
@commands.guild_only()
@commands.bot_has_permissions(manage_roles=True)
@commands.has_permissions(manage_roles=True)
@commands.cooldown(1, 30, commands.BucketType.user)
async def unlockrole(self, ctx, role: discord.Role):
s = await Settings(ctx.guild.id)
lang = await s.get_field("locale", CONFIG["default_locale"])
STRINGS = Strings(lang)
for channel in ctx.guild.channels:
await channel.set_permissions(role, send_messages=True)
embed = discord.Embed(
title=STRINGS["moderation"]["lockdownliftedtitleone"],
description=STRINGS["moderation"]["lockdownlifteddescone"],
color=0x6E8F5D,
)
await ctx.send(embed=embed)
@commands.command()
@commands.guild_only()
@commands.bot_has_permissions(manage_roles=True)
@commands.has_permissions(manage_roles=True)
@commands.cooldown(1, 30, commands.BucketType.user)
async def lockdown(self, ctx):
s = await Settings(ctx.guild.id)
lang = await s.get_field("locale", CONFIG["default_locale"])
STRINGS = Strings(lang)
for channel in ctx.guild.channels:
await channel.set_permissions(ctx.guild.default_role,
send_messages=False)
embed = discord.Embed(
title=STRINGS["moderation"]["lockdowntitleone"],
description=STRINGS["moderation"]["lockdowndescone"],
)
await ctx.send(embed=embed)
@commands.command()
@commands.guild_only()
@commands.bot_has_permissions(manage_roles=True)
@commands.has_permissions(manage_roles=True)
@commands.cooldown(1, 30, commands.BucketType.user)
async def unlock(self, ctx):
s = await Settings(ctx.guild.id)
lang = await s.get_field("locale", CONFIG["default_locale"])
STRINGS = Strings(lang)
for channel in ctx.guild.channels:
await channel.set_permissions(ctx.guild.default_role,
send_messages=True)
embed = discord.Embed(
title=STRINGS["moderation"]["lockdownliftedtitleone"],
description=STRINGS["moderation"]["lockdownlifteddescone"],
color=0x6E8F5D,
)
await ctx.send(embed=embed)
@commands.command()
@commands.guild_only()
@commands.bot_has_permissions(manage_roles=True)
@commands.has_permissions(manage_roles=True)
@commands.cooldown(1, 30, commands.BucketType.user)
async def channellock(self, ctx):
s = await Settings(ctx.guild.id)
lang = await s.get_field("locale", CONFIG["default_locale"])
STRINGS = Strings(lang)
await ctx.channel.set_permissions(ctx.guild.default_role,
send_messages=False)
embed = discord.Embed(
title=STRINGS["moderation"]["channellockdowntitle"],
description=STRINGS["moderation"]["channellockdowndesc"],
color=0x000000,
)
await ctx.send(embed=embed)
@commands.command()
@commands.guild_only()
@commands.bot_has_permissions(manage_roles=True)
@commands.has_permissions(manage_roles=True)
@commands.cooldown(1, 30, commands.BucketType.user)
async def channelunlock(self, ctx):
s = await Settings(ctx.guild.id)
lang = await s.get_field("locale", CONFIG["default_locale"])
STRINGS = Strings(lang)
await ctx.channel.set_permissions(ctx.guild.default_role,
send_messages=True)
embed = discord.Embed(
title=STRINGS["moderation"]["channellockdownliftedtitle"],
description=STRINGS["moderation"]["channellockdownlifteddesc"],
color=0x6E8F5D,
)
await ctx.send(embed=embed)
def setup(bot: Bot) -> NoReturn:
bot.add_cog(Moderation(bot))
Logger.cog_loaded(bot.get_cog("Moderation").name)
| 37.5
| 87
| 0.553548
| 2,099
| 20,925
| 5.398285
| 0.109576
| 0.024711
| 0.028506
| 0.024005
| 0.79031
| 0.759951
| 0.724561
| 0.71856
| 0.706822
| 0.693849
| 0
| 0.008295
| 0.337491
| 20,925
| 557
| 88
| 37.567325
| 0.808122
| 0.003823
| 0
| 0.703625
| 0
| 0
| 0.088265
| 0.006801
| 0
| 0
| 0.005163
| 0.001795
| 0
| 1
| 0.004264
| false
| 0.004264
| 0.017058
| 0
| 0.031983
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8aafb55d5256078a3136eb2437c10173b73784b3
| 91
|
py
|
Python
|
examplesFromForkedLibraries/SchusterLabQoc1/quantum_optimal_control/__init__.py
|
rayonde/yarn
|
a8259292791b3332e8521baeb6c7ee78afb53ae2
|
[
"MIT"
] | 1
|
2020-07-09T13:31:21.000Z
|
2020-07-09T13:31:21.000Z
|
yarn/SchusterLabQoc1/__init__.py
|
rayonde/yarn
|
a8259292791b3332e8521baeb6c7ee78afb53ae2
|
[
"MIT"
] | null | null | null |
yarn/SchusterLabQoc1/__init__.py
|
rayonde/yarn
|
a8259292791b3332e8521baeb6c7ee78afb53ae2
|
[
"MIT"
] | null | null | null |
#IMPORTS
from .core import *
from .helper_functions import *
from .main_grape import *
| 22.75
| 32
| 0.736264
| 12
| 91
| 5.416667
| 0.666667
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186813
| 91
| 4
| 33
| 22.75
| 0.878378
| 0.076923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8ab38a7279b9da97ec8c4bfe2bf466de1d0a4e4b
| 22,627
|
py
|
Python
|
colour/models/rgb/ictcp.py
|
colour-science/colour
|
6d9b1b8b9e96b5a3c3e3b64d9954be808e4e37a8
|
[
"BSD-3-Clause"
] | 1,380
|
2015-01-10T12:30:33.000Z
|
2022-03-30T10:19:57.000Z
|
colour/models/rgb/ictcp.py
|
colour-science/colour
|
6d9b1b8b9e96b5a3c3e3b64d9954be808e4e37a8
|
[
"BSD-3-Clause"
] | 638
|
2015-01-02T10:49:05.000Z
|
2022-03-29T10:16:22.000Z
|
colour/models/rgb/ictcp.py
|
colour-science/colour
|
6d9b1b8b9e96b5a3c3e3b64d9954be808e4e37a8
|
[
"BSD-3-Clause"
] | 250
|
2015-01-21T15:27:19.000Z
|
2022-03-30T10:23:58.000Z
|
# -*- coding: utf-8 -*-
"""
:math:`IC_TC_P` Colour Encoding
===============================
Defines the :math:`IC_TC_P` colour encoding related transformations:
- :func:`colour.RGB_to_ICtCp`
- :func:`colour.ICtCp_to_RGB`
- :func:`colour.XYZ_to_ICtCp`
- :func:`colour.ICtCp_to_XYZ`
References
----------
- :cite:`Dolby2016a` : Dolby. (2016). WHAT IS ICtCp? - INTRODUCTION.
https://www.dolby.com/us/en/technologies/dolby-vision/ICtCp-white-paper.pdf
- :cite:`InternationalTelecommunicationUnion2018` : International
Telecommunication Union. (2018). Recommendation ITU-R BT.2100-2 - Image
parameter values for high dynamic range television for use in production
and international programme exchange.
https://www.itu.int/dms_pubrec/itu-r/rec/bt/\
R-REC-BT.2100-2-201807-I!!PDF-E.pdf
- :cite:`Lu2016c` : Lu, T., Pu, F., Yin, P., Chen, T., Husak, W., Pytlarz,
J., Atkins, R., Froehlich, J., & Su, G.-M. (2016). ITP Colour Space and Its
Compression Performance for High Dynamic Range and Wide Colour Gamut Video
Distribution. ZTE Communications, 14(1), 32-38.
"""
import numpy as np
from colour.algebra import vector_dot
from colour.colorimetry import CCS_ILLUMINANTS
from colour.models.rgb import RGB_COLOURSPACES, RGB_to_XYZ, XYZ_to_RGB
from colour.models.rgb.transfer_functions import (
eotf_ST2084, eotf_inverse_ST2084, oetf_HLG_BT2100, oetf_inverse_HLG_BT2100)
from colour.utilities import (domain_range_scale, from_range_1, to_domain_1,
validate_method)
__author__ = 'Colour Developers'
__copyright__ = 'Copyright (C) 2013-2021 - Colour Developers'
__license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause'
__maintainer__ = 'Colour Developers'
__email__ = 'colour-developers@colour-science.org'
__status__ = 'Production'
__all__ = [
'MATRIX_ICTCP_RGB_TO_LMS', 'MATRIX_ICTCP_LMS_TO_RGB',
'MATRIX_ICTCP_LMS_P_TO_ICTCP', 'MATRIX_ICTCP_ICTCP_TO_LMS_P',
'MATRIX_ICTCP_LMS_P_TO_ICTCP_HLG_BT2100_2',
'MATRIX_ICTCP_ICTCP_TO_LMS_P_HLG_BT2100_2', 'RGB_to_ICtCp', 'ICtCp_to_RGB',
'XYZ_to_ICtCp', 'ICtCp_to_XYZ'
]
MATRIX_ICTCP_RGB_TO_LMS = np.array([
[1688, 2146, 262],
[683, 2951, 462],
[99, 309, 3688],
]) / 4096
"""
*ITU-R BT.2020* colourspace to normalised cone responses matrix.
MATRIX_ICTCP_RGB_TO_LMS : array_like, (3, 3)
"""
MATRIX_ICTCP_LMS_TO_RGB = np.linalg.inv(MATRIX_ICTCP_RGB_TO_LMS)
"""
:math:`IC_TC_P` colourspace normalised cone responses to *ITU-R BT.2020*
colourspace matrix.
MATRIX_ICTCP_LMS_TO_RGB : array_like, (3, 3)
"""
MATRIX_ICTCP_LMS_P_TO_ICTCP = np.array([
[2048, 2048, 0],
[6610, -13613, 7003],
[17933, -17390, -543],
]) / 4096
"""
:math:`LMS_p` *SMPTE ST 2084:2014* encoded normalised cone responses to
:math:`IC_TC_P` colour encoding matrix.
MATRIX_ICTCP_LMS_P_TO_ICTCP : array_like, (3, 3)
"""
MATRIX_ICTCP_ICTCP_TO_LMS_P = np.linalg.inv(MATRIX_ICTCP_LMS_P_TO_ICTCP)
"""
:math:`IC_TC_P` colour encoding to :math:`LMS_p` *SMPTE ST 2084:2014* encoded
normalised cone responses matrix.
MATRIX_ICTCP_ICTCP_TO_LMS_P : array_like, (3, 3)
"""
MATRIX_ICTCP_LMS_P_TO_ICTCP_HLG_BT2100_2 = np.array([
[2048, 2048, 0],
[3625, -7465, 3840],
[9500, -9212, -288],
]) / 4096
"""
:math:`LMS_p` *SMPTE ST 2084:2014* encoded normalised cone responses to
:math:`IC_TC_P` colour encoding matrix as given in *ITU-R BT.2100-2*.
MATRIX_ICTCP_LMS_P_TO_ICTCP_HLG_BT2100_2 : array_like, (3, 3)
"""
MATRIX_ICTCP_ICTCP_TO_LMS_P_HLG_BT2100_2 = np.linalg.inv(
MATRIX_ICTCP_LMS_P_TO_ICTCP_HLG_BT2100_2)
"""
:math:`IC_TC_P` colour encoding to :math:`LMS_p` *SMPTE ST 2084:2014* encoded
normalised cone responses matrix as given in *ITU-R BT.2100-2*.
MATRIX_ICTCP_ICTCP_TO_LMS_P_HLG_BT2100_2 : array_like, (3, 3)
"""
def RGB_to_ICtCp(RGB, method='Dolby 2016', L_p=10000):
"""
Converts from *ITU-R BT.2020* colourspace to :math:`IC_TC_P` colour
encoding.
Parameters
----------
RGB : array_like
*ITU-R BT.2020* colourspace array.
method : unicode, optional
**{'Dolby 2016', 'ITU-R BT.2100-1 HLG', 'ITU-R BT.2100-1 PQ',
'ITU-R BT.2100-2 HLG', 'ITU-R BT.2100-2 PQ'}**,
Computation method. *Recommendation ITU-R BT.2100* defines multiple
variants of the :math:`IC_TC_P` colour encoding:
- *ITU-R BT.2100-1*
- *SMPTE ST 2084:2014* inverse electro-optical transfer
function (EOTF / EOCF) and the :math:`IC_TC_P` matrix from
:cite:`Dolby2016a`: *Dolby 2016*, *ITU-R BT.2100-1 PQ*,
*ITU-R BT.2100-2 PQ* methods.
- *Recommendation ITU-R BT.2100* *Reference HLG* opto-electrical
transfer function (OETF / OECF) and the :math:`IC_TC_P` matrix
from :cite:`Dolby2016a`: *ITU-R BT.2100-1 HLG* method.
- *ITU-R BT.2100-2*
- *SMPTE ST 2084:2014* inverse electro-optical transfer
function (EOTF / EOCF) and the :math:`IC_TC_P` matrix from
:cite:`Dolby2016a`: *Dolby 2016*, *ITU-R BT.2100-1 PQ*,
*ITU-R BT.2100-2 PQ* methods.
- *Recommendation ITU-R BT.2100* *Reference HLG* opto-electrical
transfer function (OETF / OECF) and a custom :math:`IC_TC_P`
matrix from :cite:`InternationalTelecommunicationUnion2018`:
*ITU-R BT.2100-2 HLG* method.
L_p : numeric, optional
Display peak luminance :math:`cd/m^2` for *SMPTE ST 2084:2014*
non-linear encoding. This parameter should stay at its default
:math:`10000 cd/m^2` value for practical applications. It is exposed so
that the definition can be used as a fitting function.
Returns
-------
ndarray
:math:`IC_TC_P` colour encoding array.
Warnings
--------
The underlying *SMPTE ST 2084:2014* transfer function is an absolute
transfer function.
Notes
-----
- The *ITU-R BT.2100-1 PQ* and *ITU-R BT.2100-2 PQ* methods are aliases
for the *Dolby 2016* method.
- The underlying *SMPTE ST 2084:2014* transfer function is an absolute
transfer function, thus the domain and range values for the *Reference*
and *1* scales are only indicative that the data is not affected by
scale transformations. The effective domain of *SMPTE ST 2084:2014*
inverse electro-optical transfer function (EOTF / EOCF) is
[0.0001, 10000].
+------------+-----------------------+------------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+==================+
| ``RGB`` | ``UN`` | ``UN`` |
+------------+-----------------------+------------------+
+------------+-----------------------+------------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+==================+
| ``ICtCp`` | ``I`` : [0, 1] | ``I`` : [0, 1] |
| | | |
| | ``CT`` : [-1, 1] | ``CT`` : [-1, 1] |
| | | |
| | ``CP`` : [-1, 1] | ``CP`` : [-1, 1] |
+------------+-----------------------+------------------+
References
----------
:cite:`Dolby2016a`, :cite:`Lu2016c`
Examples
--------
>>> RGB = np.array([0.45620519, 0.03081071, 0.04091952])
>>> RGB_to_ICtCp(RGB) # doctest: +ELLIPSIS
array([ 0.0735136..., 0.0047525..., 0.0935159...])
>>> RGB_to_ICtCp(RGB, method='ITU-R BT.2100-2 HLG') # doctest: +ELLIPSIS
array([ 0.6256789..., -0.0198449..., 0.3591125...])
"""
RGB = to_domain_1(RGB)
method = validate_method(method, [
'Dolby 2016', 'ITU-R BT.2100-1 HLG', 'ITU-R BT.2100-1 PQ',
'ITU-R BT.2100-2 HLG', 'ITU-R BT.2100-2 PQ'
])
is_hlg_method = 'hlg' in method
is_BT2100_2_method = '2100-2' in method
LMS = vector_dot(MATRIX_ICTCP_RGB_TO_LMS, RGB)
with domain_range_scale('ignore'):
LMS_p = (oetf_HLG_BT2100(LMS)
if is_hlg_method else eotf_inverse_ST2084(LMS, L_p))
ICtCp = (vector_dot(MATRIX_ICTCP_LMS_P_TO_ICTCP_HLG_BT2100_2, LMS_p)
if (is_hlg_method and is_BT2100_2_method) else vector_dot(
MATRIX_ICTCP_LMS_P_TO_ICTCP, LMS_p))
return from_range_1(ICtCp)
def ICtCp_to_RGB(ICtCp, method='Dolby 2016', L_p=10000):
"""
Converts from :math:`IC_TC_P` colour encoding to *ITU-R BT.2020*
colourspace.
Parameters
----------
ICtCp : array_like
:math:`IC_TC_P` colour encoding array.
method : unicode, optional
**{'Dolby 2016', 'ITU-R BT.2100-1 HLG', 'ITU-R BT.2100-1 PQ',
'ITU-R BT.2100-2 HLG', 'ITU-R BT.2100-2 PQ'}**,
Computation method. *Recommendation ITU-R BT.2100* defines multiple
variants of the :math:`IC_TC_P` colour encoding:
- *ITU-R BT.2100-1*
- *SMPTE ST 2084:2014* inverse electro-optical transfer
function (EOTF / EOCF) and the :math:`IC_TC_P` matrix from
:cite:`Dolby2016a`: *Dolby 2016*, *ITU-R BT.2100-1 PQ*,
*ITU-R BT.2100-2 PQ* methods.
- *Recommendation ITU-R BT.2100* *Reference HLG* opto-electrical
transfer function (OETF / OECF) and the :math:`IC_TC_P` matrix
from :cite:`Dolby2016a`: *ITU-R BT.2100-1 HLG* method.
- *ITU-R BT.2100-2*
- *SMPTE ST 2084:2014* inverse electro-optical transfer
function (EOTF / EOCF) and the :math:`IC_TC_P` matrix from
:cite:`Dolby2016a`: *Dolby 2016*, *ITU-R BT.2100-1 PQ*,
*ITU-R BT.2100-2 PQ* methods.
- *Recommendation ITU-R BT.2100* *Reference HLG* opto-electrical
transfer function (OETF / OECF) and a custom :math:`IC_TC_P`
matrix from :cite:`InternationalTelecommunicationUnion2018`:
*ITU-R BT.2100-2 HLG* method.
L_p : numeric, optional
Display peak luminance :math:`cd/m^2` for *SMPTE ST 2084:2014*
non-linear encoding. This parameter should stay at its default
:math:`10000 cd/m^2` value for practical applications. It is exposed so
that the definition can be used as a fitting function.
Returns
-------
ndarray
*ITU-R BT.2020* colourspace array.
Warnings
--------
The underlying *SMPTE ST 2084:2014* transfer function is an absolute
transfer function.
Notes
-----
- The *ITU-R BT.2100-1 PQ* and *ITU-R BT.2100-2 PQ* methods are aliases
for the *Dolby 2016* method.
- The underlying *SMPTE ST 2084:2014* transfer function is an absolute
transfer function, thus the domain and range values for the *Reference*
and *1* scales are only indicative that the data is not affected by
scale transformations.
+------------+-----------------------+------------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+==================+
| ``ICtCp`` | ``I`` : [0, 1] | ``I`` : [0, 1] |
| | | |
| | ``CT`` : [-1, 1] | ``CT`` : [-1, 1] |
| | | |
| | ``CP`` : [-1, 1] | ``CP`` : [-1, 1] |
+------------+-----------------------+------------------+
+------------+-----------------------+------------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+==================+
| ``RGB`` | ``UN`` | ``UN`` |
+------------+-----------------------+------------------+
References
----------
:cite:`Dolby2016a`, :cite:`Lu2016c`
Examples
--------
>>> ICtCp = np.array([0.07351364, 0.00475253, 0.09351596])
>>> ICtCp_to_RGB(ICtCp) # doctest: +ELLIPSIS
array([ 0.4562052..., 0.0308107..., 0.0409195...])
>>> ICtCp = np.array([0.62567899, -0.01984490, 0.35911259])
>>> ICtCp_to_RGB(ICtCp, method='ITU-R BT.2100-2 HLG') # doctest: +ELLIPSIS
array([ 0.4562052..., 0.0308107..., 0.0409195...])
"""
ICtCp = to_domain_1(ICtCp)
method = validate_method(method, [
'Dolby 2016', 'ITU-R BT.2100-1 HLG', 'ITU-R BT.2100-1 PQ',
'ITU-R BT.2100-2 HLG', 'ITU-R BT.2100-2 PQ'
])
is_hlg_method = 'hlg' in method
is_BT2100_2_method = '2100-2' in method
LMS_p = (vector_dot(MATRIX_ICTCP_ICTCP_TO_LMS_P_HLG_BT2100_2, ICtCp)
if (is_hlg_method and is_BT2100_2_method) else vector_dot(
MATRIX_ICTCP_ICTCP_TO_LMS_P, ICtCp))
with domain_range_scale('ignore'):
LMS = (oetf_inverse_HLG_BT2100(LMS_p)
if is_hlg_method else eotf_ST2084(LMS_p, L_p))
RGB = vector_dot(MATRIX_ICTCP_LMS_TO_RGB, LMS)
return from_range_1(RGB)
def XYZ_to_ICtCp(XYZ,
illuminant=CCS_ILLUMINANTS[
'CIE 1931 2 Degree Standard Observer']['D65'],
chromatic_adaptation_transform='CAT02',
method='Dolby 2016',
L_p=10000):
"""
Converts from *CIE XYZ* tristimulus values to :math:`IC_TC_P` colour
encoding.
Parameters
----------
XYZ : array_like
*CIE XYZ* tristimulus values.
illuminant : array_like, optional
Source illuminant chromaticity coordinates.
chromatic_adaptation_transform : unicode, optional
**{'CAT02', 'XYZ Scaling', 'Von Kries', 'Bradford', 'Sharp',
'Fairchild', 'CMCCAT97', 'CMCCAT2000', 'CAT02 Brill 2008', 'CAT16',
'Bianco 2010', 'Bianco PC 2010'}**,
*Chromatic adaptation* transform.
method : unicode, optional
**{'Dolby 2016', 'ITU-R BT.2100-1 HLG', 'ITU-R BT.2100-1 PQ',
'ITU-R BT.2100-2 HLG', 'ITU-R BT.2100-2 PQ'}**,
Computation method. *Recommendation ITU-R BT.2100* defines multiple
variants of the :math:`IC_TC_P` colour encoding:
- *ITU-R BT.2100-1*
- *SMPTE ST 2084:2014* inverse electro-optical transfer
function (EOTF / EOCF) and the :math:`IC_TC_P` matrix from
:cite:`Dolby2016a`: *Dolby 2016*, *ITU-R BT.2100-1 PQ*,
*ITU-R BT.2100-2 PQ* methods.
- *Recommendation ITU-R BT.2100* *Reference HLG* opto-electrical
transfer function (OETF / OECF) and the :math:`IC_TC_P` matrix
from :cite:`Dolby2016a`: *ITU-R BT.2100-1 HLG* method.
- *ITU-R BT.2100-2*
- *SMPTE ST 2084:2014* inverse electro-optical transfer
function (EOTF / EOCF) and the :math:`IC_TC_P` matrix from
:cite:`Dolby2016a`: *Dolby 2016*, *ITU-R BT.2100-1 PQ*,
*ITU-R BT.2100-2 PQ* methods.
- *Recommendation ITU-R BT.2100* *Reference HLG* opto-electrical
transfer function (OETF / OECF) and a custom :math:`IC_TC_P`
matrix from :cite:`InternationalTelecommunicationUnion2018`:
*ITU-R BT.2100-2 HLG* method.
L_p : numeric, optional
Display peak luminance :math:`cd/m^2` for *SMPTE ST 2084:2014*
non-linear encoding. This parameter should stay at its default
:math:`10000 cd/m^2` value for practical applications. It is exposed so
that the definition can be used as a fitting function.
Returns
-------
ndarray
:math:`IC_TC_P` colour encoding array.
Warnings
--------
The underlying *SMPTE ST 2084:2014* transfer function is an absolute
transfer function.
Notes
-----
- The underlying *SMPTE ST 2084:2014* transfer function is an absolute
transfer function, thus the domain and range values for the *Reference*
- The *ITU-R BT.2100-1 PQ* and *ITU-R BT.2100-2 PQ* methods are aliases
for the *Dolby 2016* method.
and *1* scales are only indicative that the data is not affected by
scale transformations. The effective domain of *SMPTE ST 2084:2014*
inverse electro-optical transfer function (EOTF / EOCF) is
[0.0001, 10000].
+------------+-----------------------+------------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+==================+
| ``XYZ`` | ``UN`` | ``UN`` |
+------------+-----------------------+------------------+
+------------+-----------------------+------------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+==================+
| ``ICtCp`` | ``I`` : [0, 1] | ``I`` : [0, 1] |
| | | |
| | ``CT`` : [-1, 1] | ``CT`` : [-1, 1] |
| | | |
| | ``CP`` : [-1, 1] | ``CP`` : [-1, 1] |
+------------+-----------------------+------------------+
References
----------
:cite:`Dolby2016a`, :cite:`Lu2016c`
Examples
--------
>>> XYZ = np.array([0.20654008, 0.12197225, 0.05136952])
>>> XYZ_to_ICtCp(XYZ) # doctest: +ELLIPSIS
array([ 0.0685809..., -0.0028384..., 0.0602098...])
>>> XYZ_to_ICtCp(XYZ, method='ITU-R BT.2100-2 HLG') # doctest: +ELLIPSIS
array([ 0.5924279..., -0.0374073..., 0.2512267...])
"""
BT2020 = RGB_COLOURSPACES['ITU-R BT.2020']
RGB = XYZ_to_RGB(
XYZ,
illuminant,
BT2020.whitepoint,
BT2020.matrix_XYZ_to_RGB,
chromatic_adaptation_transform,
)
return RGB_to_ICtCp(RGB, method, L_p)
def ICtCp_to_XYZ(ICtCp,
illuminant=CCS_ILLUMINANTS[
'CIE 1931 2 Degree Standard Observer']['D65'],
chromatic_adaptation_transform='CAT02',
method='Dolby 2016',
L_p=10000):
"""
Converts from :math:`IC_TC_P` colour encoding to *CIE XYZ* tristimulus
values.
Parameters
----------
ICtCp : array_like
:math:`IC_TC_P` colour encoding array.
illuminant : array_like, optional
Source illuminant chromaticity coordinates.
chromatic_adaptation_transform : unicode, optional
**{'CAT02', 'XYZ Scaling', 'Von Kries', 'Bradford', 'Sharp',
'Fairchild', 'CMCCAT97', 'CMCCAT2000', 'CAT02 Brill 2008', 'CAT16',
'Bianco 2010', 'Bianco PC 2010'}**,
*Chromatic adaptation* transform.
method : unicode, optional
**{'Dolby 2016', 'ITU-R BT.2100-1 HLG', 'ITU-R BT.2100-1 PQ',
'ITU-R BT.2100-2 HLG', 'ITU-R BT.2100-2 PQ'}**,
Computation method. *Recommendation ITU-R BT.2100* defines multiple
variants of the :math:`IC_TC_P` colour encoding:
- *ITU-R BT.2100-1*
- *SMPTE ST 2084:2014* inverse electro-optical transfer
function (EOTF / EOCF) and the :math:`IC_TC_P` matrix from
:cite:`Dolby2016a`: *Dolby 2016*, *ITU-R BT.2100-1 PQ*,
*ITU-R BT.2100-2 PQ* methods.
- *Recommendation ITU-R BT.2100* *Reference HLG* opto-electrical
transfer function (OETF / OECF) and the :math:`IC_TC_P` matrix
from :cite:`Dolby2016a`: *ITU-R BT.2100-1 HLG* method.
- *ITU-R BT.2100-2*
- *SMPTE ST 2084:2014* inverse electro-optical transfer
function (EOTF / EOCF) and the :math:`IC_TC_P` matrix from
:cite:`Dolby2016a`: *Dolby 2016*, *ITU-R BT.2100-1 PQ*,
*ITU-R BT.2100-2 PQ* methods.
- *Recommendation ITU-R BT.2100* *Reference HLG* opto-electrical
transfer function (OETF / OECF) and a custom :math:`IC_TC_P`
matrix from :cite:`InternationalTelecommunicationUnion2018`:
*ITU-R BT.2100-2 HLG* method.
L_p : numeric, optional
Display peak luminance :math:`cd/m^2` for *SMPTE ST 2084:2014*
non-linear encoding. This parameter should stay at its default
:math:`10000 cd/m^2` value for practical applications. It is exposed so
that the definition can be used as a fitting function.
Returns
-------
ndarray
*CIE XYZ* tristimulus values.
Warnings
--------
The underlying *SMPTE ST 2084:2014* transfer function is an absolute
transfer function.
Notes
-----
- The *ITU-R BT.2100-1 PQ* and *ITU-R BT.2100-2 PQ* methods are aliases
for the *Dolby 2016* method.
- The underlying *SMPTE ST 2084:2014* transfer function is an absolute
transfer function, thus the domain and range values for the *Reference*
and *1* scales are only indicative that the data is not affected by
scale transformations.
+------------+-----------------------+------------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+==================+
| ``ICtCp`` | ``I`` : [0, 1] | ``I`` : [0, 1] |
| | | |
| | ``CT`` : [-1, 1] | ``CT`` : [-1, 1] |
| | | |
| | ``CP`` : [-1, 1] | ``CP`` : [-1, 1] |
+------------+-----------------------+------------------+
+------------+-----------------------+------------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+==================+
| ``XYZ`` | ``UN`` | ``UN`` |
+------------+-----------------------+------------------+
References
----------
:cite:`Dolby2016a`, :cite:`Lu2016c`
Examples
--------
>>> ICtCp = np.array([0.06858097, -0.00283842, 0.06020983])
>>> ICtCp_to_XYZ(ICtCp) # doctest: +ELLIPSIS
array([ 0.2065400..., 0.1219722..., 0.0513695...])
>>> ICtCp = np.array([0.59242792, -0.03740730, 0.25122675])
>>> ICtCp_to_XYZ(ICtCp, method='ITU-R BT.2100-2 HLG') # doctest: +ELLIPSIS
array([ 0.2065400..., 0.1219722..., 0.0513695...])
"""
RGB = ICtCp_to_RGB(ICtCp, method, L_p)
BT2020 = RGB_COLOURSPACES['ITU-R BT.2020']
XYZ = RGB_to_XYZ(
RGB,
BT2020.whitepoint,
illuminant,
BT2020.matrix_RGB_to_XYZ,
chromatic_adaptation_transform,
)
return XYZ
| 39.147059
| 79
| 0.544571
| 2,786
| 22,627
| 4.268126
| 0.11809
| 0.030948
| 0.045917
| 0.069801
| 0.811286
| 0.784038
| 0.747288
| 0.735851
| 0.723741
| 0.700278
| 0
| 0.101353
| 0.265258
| 22,627
| 577
| 80
| 39.214905
| 0.613895
| 0.709241
| 0
| 0.378641
| 0
| 0
| 0.179487
| 0.050812
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038835
| false
| 0
| 0.058252
| 0
| 0.135922
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8ad0e06c8b83d2326eeb7a88b1b69f1b8e6d4b97
| 131
|
py
|
Python
|
tests/test_linux.py
|
iot-spectator/iot-health
|
ff5cf5b3613d47fb990751259fab68ad8940b1c4
|
[
"MIT"
] | null | null | null |
tests/test_linux.py
|
iot-spectator/iot-health
|
ff5cf5b3613d47fb990751259fab68ad8940b1c4
|
[
"MIT"
] | 22
|
2020-10-05T00:31:31.000Z
|
2021-05-15T06:37:37.000Z
|
tests/test_linux.py
|
iot-spectator/iot-health
|
ff5cf5b3613d47fb990751259fab68ad8940b1c4
|
[
"MIT"
] | null | null | null |
"""Unit tests for Linux module."""
from iothealth import linux
def test_basic():
assert linux.Linux().summary() is not None
| 16.375
| 46
| 0.70229
| 19
| 131
| 4.789474
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175573
| 131
| 7
| 47
| 18.714286
| 0.842593
| 0.21374
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8ad61fc7c648eaf74f1d1b7775a1ad9aa2c945d6
| 23,952
|
py
|
Python
|
tf_agents/bandits/agents/neural_linucb_agent_test.py
|
Mdlglobal-atlassian-net/agents
|
43c5d4e0b924c45b33291dc73a305d4a8d79c170
|
[
"Apache-2.0"
] | 1
|
2020-06-07T06:34:12.000Z
|
2020-06-07T06:34:12.000Z
|
tf_agents/bandits/agents/neural_linucb_agent_test.py
|
yj1990/agents
|
ba3817ea48d574d314017542e1e4858566f953f4
|
[
"Apache-2.0"
] | null | null | null |
tf_agents/bandits/agents/neural_linucb_agent_test.py
|
yj1990/agents
|
ba3817ea48d574d314017542e1e4858566f953f4
|
[
"Apache-2.0"
] | 2
|
2020-06-05T18:38:16.000Z
|
2020-07-08T14:41:42.000Z
|
# coding=utf-8
# Copyright 2018 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tf_agents.bandits.agents.neural_linucb_agent."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl.testing import parameterized
import numpy as np
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
import tensorflow_probability as tfp
from tf_agents.bandits.agents import neural_linucb_agent
from tf_agents.bandits.agents import utils as bandit_utils
from tf_agents.bandits.drivers import driver_utils
from tf_agents.bandits.networks import global_and_arm_feature_network
from tf_agents.bandits.policies import policy_utilities
from tf_agents.bandits.specs import utils as bandit_spec_utils
from tf_agents.networks import network
from tf_agents.specs import tensor_spec
from tf_agents.trajectories import policy_step
from tf_agents.trajectories import time_step
from tf_agents.utils import common
from tensorflow.python.framework import test_util # pylint: disable=g-direct-tensorflow-import # TF internal
tfd = tfp.distributions
class DummyNet(network.Network):
def __init__(self, observation_spec, encoding_dim=10):
super(DummyNet, self).__init__(
observation_spec, state_spec=(), name='DummyNet')
context_dim = observation_spec.shape[0]
# Store custom layers that can be serialized through the Checkpointable API.
self._dummy_layers = [
tf.keras.layers.Dense(
encoding_dim,
kernel_initializer=tf.compat.v1.initializers.constant(
np.ones([context_dim, encoding_dim])),
bias_initializer=tf.compat.v1.initializers.constant(
np.zeros([encoding_dim])))
]
def call(self, inputs, step_type=None, network_state=()):
del step_type
inputs = tf.cast(inputs, tf.float32)
for layer in self._dummy_layers:
inputs = layer(inputs)
return inputs, network_state
def test_cases():
return parameterized.named_parameters(
{
'testcase_name': '_batch1_contextdim10',
'batch_size': 1,
'context_dim': 10,
}, {
'testcase_name': '_batch4_contextdim5',
'batch_size': 4,
'context_dim': 5,
})
def _get_initial_and_final_steps(batch_size, context_dim):
observation = np.array(range(batch_size * context_dim)).reshape(
[batch_size, context_dim])
reward = np.random.uniform(0.0, 1.0, [batch_size])
initial_step = time_step.TimeStep(
tf.constant(
time_step.StepType.FIRST, dtype=tf.int32, shape=[batch_size],
name='step_type'),
tf.constant(0.0, dtype=tf.float32, shape=[batch_size], name='reward'),
tf.constant(1.0, dtype=tf.float32, shape=[batch_size], name='discount'),
tf.constant(observation, dtype=tf.float32,
shape=[batch_size, context_dim], name='observation'))
final_step = time_step.TimeStep(
tf.constant(
time_step.StepType.LAST, dtype=tf.int32, shape=[batch_size],
name='step_type'),
tf.constant(reward, dtype=tf.float32, shape=[batch_size], name='reward'),
tf.constant(1.0, dtype=tf.float32, shape=[batch_size], name='discount'),
tf.constant(observation + 100.0, dtype=tf.float32,
shape=[batch_size, context_dim], name='observation'))
return initial_step, final_step
def _get_initial_and_final_steps_with_action_mask(batch_size,
context_dim,
num_actions=None):
observation = np.array(range(batch_size * context_dim)).reshape(
[batch_size, context_dim])
observation = tf.constant(observation, dtype=tf.float32)
mask = 1 - tf.eye(batch_size, num_columns=num_actions, dtype=tf.int32)
reward = np.random.uniform(0.0, 1.0, [batch_size])
initial_step = time_step.TimeStep(
tf.constant(
time_step.StepType.FIRST,
dtype=tf.int32,
shape=[batch_size],
name='step_type'),
tf.constant(0.0, dtype=tf.float32, shape=[batch_size], name='reward'),
tf.constant(1.0, dtype=tf.float32, shape=[batch_size], name='discount'),
(observation, mask))
final_step = time_step.TimeStep(
tf.constant(
time_step.StepType.LAST,
dtype=tf.int32,
shape=[batch_size],
name='step_type'),
tf.constant(reward, dtype=tf.float32, shape=[batch_size], name='reward'),
tf.constant(1.0, dtype=tf.float32, shape=[batch_size], name='discount'),
(observation + 100.0, mask))
return initial_step, final_step
def _get_action_step(action):
return policy_step.PolicyStep(
action=tf.convert_to_tensor(action),
info=policy_utilities.PolicyInfo())
def _get_experience(initial_step, action_step, final_step):
single_experience = driver_utils.trajectory_for_bandit(
initial_step, action_step, final_step)
# Adds a 'time' dimension.
return tf.nest.map_structure(
lambda x: tf.expand_dims(tf.convert_to_tensor(x), 1),
single_experience)
@test_util.run_all_in_graph_and_eager_modes
class NeuralLinUCBAgentTest(tf.test.TestCase, parameterized.TestCase):
def setUp(self):
super(NeuralLinUCBAgentTest, self).setUp()
tf.compat.v1.enable_resource_variables()
@test_cases()
def testInitializeAgentNumTrainSteps0(self, batch_size, context_dim):
num_actions = 5
observation_spec = tensor_spec.TensorSpec([context_dim], tf.float32)
time_step_spec = time_step.time_step_spec(observation_spec)
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=num_actions - 1)
encoder = DummyNet(observation_spec)
agent = neural_linucb_agent.NeuralLinUCBAgent(
time_step_spec=time_step_spec,
action_spec=action_spec,
encoding_network=encoder,
encoding_network_num_train_steps=0,
encoding_dim=10,
optimizer=None)
self.evaluate(agent.initialize())
@test_cases()
def testInitializeAgentNumTrainSteps10(self, batch_size, context_dim):
num_actions = 5
observation_spec = tensor_spec.TensorSpec([context_dim], tf.float32)
time_step_spec = time_step.time_step_spec(observation_spec)
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=num_actions - 1)
encoder = DummyNet(observation_spec)
agent = neural_linucb_agent.NeuralLinUCBAgent(
time_step_spec=time_step_spec,
action_spec=action_spec,
encoding_network=encoder,
encoding_network_num_train_steps=10,
encoding_dim=10,
optimizer=None)
self.evaluate(agent.initialize())
@test_cases()
def testNeuralLinUCBUpdateNumTrainSteps0(self, batch_size=1, context_dim=10):
"""Check NeuralLinUCBAgent updates when behaving like LinUCB."""
# Construct a `Trajectory` for the given action, observation, reward.
num_actions = 5
initial_step, final_step = _get_initial_and_final_steps(
batch_size, context_dim)
action = np.random.randint(num_actions, size=batch_size, dtype=np.int32)
action_step = _get_action_step(action)
experience = _get_experience(initial_step, action_step, final_step)
# Construct an agent and perform the update.
observation_spec = tensor_spec.TensorSpec([context_dim], tf.float32)
time_step_spec = time_step.time_step_spec(observation_spec)
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=num_actions - 1)
encoder = DummyNet(observation_spec)
encoding_dim = 10
agent = neural_linucb_agent.NeuralLinUCBAgent(
time_step_spec=time_step_spec,
action_spec=action_spec,
encoding_network=encoder,
encoding_network_num_train_steps=0,
encoding_dim=encoding_dim,
optimizer=tf.compat.v1.train.AdamOptimizer(learning_rate=1e-2))
loss_info = agent.train(experience)
self.evaluate(agent.initialize())
self.evaluate(tf.compat.v1.global_variables_initializer())
self.evaluate(loss_info)
final_a = self.evaluate(agent.cov_matrix)
final_b = self.evaluate(agent.data_vector)
# Compute the expected updated estimates.
observations_list = tf.dynamic_partition(
data=tf.reshape(tf.cast(experience.observation, tf.float64),
[batch_size, context_dim]),
partitions=tf.convert_to_tensor(action),
num_partitions=num_actions)
rewards_list = tf.dynamic_partition(
data=tf.reshape(tf.cast(experience.reward, tf.float64), [batch_size]),
partitions=tf.convert_to_tensor(action),
num_partitions=num_actions)
expected_a_updated_list = []
expected_b_updated_list = []
for _, (observations_for_arm, rewards_for_arm) in enumerate(zip(
observations_list, rewards_list)):
encoded_observations_for_arm, _ = encoder(observations_for_arm)
encoded_observations_for_arm = tf.cast(
encoded_observations_for_arm, dtype=tf.float64)
num_samples_for_arm_current = tf.cast(
tf.shape(rewards_for_arm)[0], tf.float64)
num_samples_for_arm_total = num_samples_for_arm_current
# pylint: disable=cell-var-from-loop
def true_fn():
a_new = tf.matmul(
encoded_observations_for_arm,
encoded_observations_for_arm,
transpose_a=True)
b_new = bandit_utils.sum_reward_weighted_observations(
rewards_for_arm, encoded_observations_for_arm)
return a_new, b_new
def false_fn():
return (tf.zeros([encoding_dim, encoding_dim], dtype=tf.float64),
tf.zeros([encoding_dim], dtype=tf.float64))
a_new, b_new = tf.cond(
tf.squeeze(num_samples_for_arm_total) > 0,
true_fn,
false_fn)
expected_a_updated_list.append(self.evaluate(a_new))
expected_b_updated_list.append(self.evaluate(b_new))
# Check that the actual updated estimates match the expectations.
self.assertAllClose(expected_a_updated_list, final_a)
self.assertAllClose(expected_b_updated_list, final_b)
@test_cases()
def testNeuralLinUCBUpdateDistributed(self, batch_size=1, context_dim=10):
"""Same as above but with distributed LinUCB updates."""
# Construct a `Trajectory` for the given action, observation, reward.
num_actions = 5
initial_step, final_step = _get_initial_and_final_steps(
batch_size, context_dim)
action = np.random.randint(num_actions, size=batch_size, dtype=np.int32)
action_step = _get_action_step(action)
experience = _get_experience(initial_step, action_step, final_step)
# Construct an agent and perform the update.
observation_spec = tensor_spec.TensorSpec([context_dim], tf.float32)
time_step_spec = time_step.time_step_spec(observation_spec)
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=num_actions - 1)
encoder = DummyNet(observation_spec)
encoding_dim = 10
agent = neural_linucb_agent.NeuralLinUCBAgent(
time_step_spec=time_step_spec,
action_spec=action_spec,
encoding_network=encoder,
encoding_network_num_train_steps=0,
encoding_dim=encoding_dim,
optimizer=tf.compat.v1.train.AdamOptimizer(learning_rate=1e-2))
self.evaluate(agent.initialize())
self.evaluate(tf.compat.v1.global_variables_initializer())
# Call the distributed LinUCB training instead of agent.train().
train_fn = common.function_in_tf1()(
agent.compute_loss_using_linucb_distributed)
reward = tf.cast(experience.reward, agent._dtype)
loss_info = train_fn(
experience.observation, action, reward, weights=None)
self.evaluate(loss_info)
final_a = self.evaluate(agent.cov_matrix)
final_b = self.evaluate(agent.data_vector)
# Compute the expected updated estimates.
observations_list = tf.dynamic_partition(
data=tf.reshape(tf.cast(experience.observation, tf.float64),
[batch_size, context_dim]),
partitions=tf.convert_to_tensor(action),
num_partitions=num_actions)
rewards_list = tf.dynamic_partition(
data=tf.reshape(tf.cast(experience.reward, tf.float64), [batch_size]),
partitions=tf.convert_to_tensor(action),
num_partitions=num_actions)
expected_a_updated_list = []
expected_b_updated_list = []
for _, (observations_for_arm, rewards_for_arm) in enumerate(zip(
observations_list, rewards_list)):
encoded_observations_for_arm, _ = encoder(observations_for_arm)
encoded_observations_for_arm = tf.cast(
encoded_observations_for_arm, dtype=tf.float64)
num_samples_for_arm_current = tf.cast(
tf.shape(rewards_for_arm)[0], tf.float64)
num_samples_for_arm_total = num_samples_for_arm_current
# pylint: disable=cell-var-from-loop
def true_fn():
a_new = tf.matmul(
encoded_observations_for_arm,
encoded_observations_for_arm,
transpose_a=True)
b_new = bandit_utils.sum_reward_weighted_observations(
rewards_for_arm, encoded_observations_for_arm)
return a_new, b_new
def false_fn():
return (tf.zeros([encoding_dim, encoding_dim], dtype=tf.float64),
tf.zeros([encoding_dim], dtype=tf.float64))
a_new, b_new = tf.cond(
tf.squeeze(num_samples_for_arm_total) > 0,
true_fn,
false_fn)
expected_a_updated_list.append(self.evaluate(a_new))
expected_b_updated_list.append(self.evaluate(b_new))
# Check that the actual updated estimates match the expectations.
self.assertAllClose(expected_a_updated_list, final_a)
self.assertAllClose(expected_b_updated_list, final_b)
@test_cases()
def testNeuralLinUCBUpdateNumTrainSteps10(self, batch_size=1, context_dim=10):
"""Check NeuralLinUCBAgent updates when behaving like eps-greedy."""
# Construct a `Trajectory` for the given action, observation, reward.
num_actions = 5
initial_step, final_step = _get_initial_and_final_steps(
batch_size, context_dim)
action = np.random.randint(num_actions, size=batch_size, dtype=np.int32)
action_step = _get_action_step(action)
experience = _get_experience(initial_step, action_step, final_step)
# Construct an agent and perform the update.
observation_spec = tensor_spec.TensorSpec([context_dim], tf.float32)
time_step_spec = time_step.time_step_spec(observation_spec)
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=num_actions - 1)
encoder = DummyNet(observation_spec)
encoding_dim = 10
variable_collection = neural_linucb_agent.NeuralLinUCBVariableCollection(
num_actions, encoding_dim)
agent = neural_linucb_agent.NeuralLinUCBAgent(
time_step_spec=time_step_spec,
action_spec=action_spec,
encoding_network=encoder,
encoding_network_num_train_steps=10,
encoding_dim=encoding_dim,
variable_collection=variable_collection,
optimizer=tf.compat.v1.train.AdamOptimizer(learning_rate=0.001))
loss_info, _ = agent.train(experience)
self.evaluate(agent.initialize())
self.evaluate(tf.compat.v1.global_variables_initializer())
loss_value = self.evaluate(loss_info)
self.assertGreater(loss_value, 0.0)
@test_cases()
def testNeuralLinUCBUpdateNumTrainSteps10MaskedActions(
self, batch_size=1, context_dim=10):
"""Check updates when behaving like eps-greedy and using masked actions."""
# Construct a `Trajectory` for the given action, observation, reward.
num_actions = 5
initial_step, final_step = _get_initial_and_final_steps_with_action_mask(
batch_size, context_dim, num_actions)
action = np.random.randint(num_actions, size=batch_size, dtype=np.int32)
action_step = _get_action_step(action)
experience = _get_experience(initial_step, action_step, final_step)
# Construct an agent and perform the update.
observation_spec = (tensor_spec.TensorSpec([context_dim], tf.float32),
tensor_spec.TensorSpec([num_actions], tf.int32))
time_step_spec = time_step.time_step_spec(observation_spec)
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=num_actions - 1)
encoder = DummyNet(observation_spec[0])
encoding_dim = 10
agent = neural_linucb_agent.NeuralLinUCBAgent(
time_step_spec=time_step_spec,
action_spec=action_spec,
encoding_network=encoder,
encoding_network_num_train_steps=10,
encoding_dim=encoding_dim,
optimizer=tf.compat.v1.train.AdamOptimizer(learning_rate=0.001),
observation_and_action_constraint_splitter=lambda x: (x[0], x[1]))
loss_info, _ = agent.train(experience)
self.evaluate(agent.initialize())
self.evaluate(tf.compat.v1.global_variables_initializer())
loss_value = self.evaluate(loss_info)
self.assertGreater(loss_value, 0.0)
def testInitializeRestoreVariableCollection(self):
if not tf.executing_eagerly():
self.skipTest('Test only works in eager mode.')
num_actions = 5
encoding_dim = 7
variable_collection = neural_linucb_agent.NeuralLinUCBVariableCollection(
num_actions=num_actions, encoding_dim=encoding_dim)
self.evaluate(tf.compat.v1.global_variables_initializer())
self.evaluate(variable_collection.num_samples_list)
checkpoint = tf.train.Checkpoint(variable_collection=variable_collection)
checkpoint_dir = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_dir, 'checkpoint')
checkpoint.save(file_prefix=checkpoint_prefix)
variable_collection.actions_from_reward_layer.assign(False)
latest_checkpoint = tf.train.latest_checkpoint(checkpoint_dir)
checkpoint_load_status = checkpoint.restore(latest_checkpoint)
self.evaluate(checkpoint_load_status.initialize_or_restore())
self.assertEqual(
self.evaluate(variable_collection.actions_from_reward_layer), True)
def testTrainPerArmAgentWithMask(self):
num_actions = 5
obs_spec = bandit_spec_utils.create_per_arm_observation_spec(
2, 3, num_actions, add_action_mask=True)
time_step_spec = time_step.time_step_spec(obs_spec)
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=num_actions - 1)
encoding_dim = 10
encoder = (
global_and_arm_feature_network.create_feed_forward_common_tower_network(
obs_spec[0], (4, 3), (3, 4), (4, 2), encoding_dim))
agent = neural_linucb_agent.NeuralLinUCBAgent(
time_step_spec=time_step_spec,
action_spec=action_spec,
encoding_network=encoder,
encoding_network_num_train_steps=10,
encoding_dim=encoding_dim,
observation_and_action_constraint_splitter=lambda x: (x[0], x[1]),
accepts_per_arm_features=True,
optimizer=tf.compat.v1.train.AdamOptimizer(learning_rate=0.001))
observations = ({
bandit_spec_utils.GLOBAL_FEATURE_KEY:
tf.constant([[1, 2], [3, 4]], dtype=tf.float32),
bandit_spec_utils.PER_ARM_FEATURE_KEY:
tf.cast(
tf.reshape(tf.range(30), shape=[2, 5, 3]), dtype=tf.float32)
}, tf.ones(shape=(2, num_actions), dtype=tf.int32))
actions = np.array([0, 3], dtype=np.int32)
rewards = np.array([0.5, 3.0], dtype=np.float32)
initial_step = time_step.TimeStep(
tf.constant(
time_step.StepType.FIRST,
dtype=tf.int32,
shape=[2],
name='step_type'),
tf.constant(0.0, dtype=tf.float32, shape=[2], name='reward'),
tf.constant(1.0, dtype=tf.float32, shape=[2], name='discount'),
observations)
final_step = time_step.TimeStep(
tf.constant(
time_step.StepType.LAST,
dtype=tf.int32,
shape=[2],
name='step_type'),
tf.constant(rewards, dtype=tf.float32, name='reward'),
tf.constant(1.0, dtype=tf.float32, shape=[2], name='discount'),
observations)
action_step = policy_step.PolicyStep(
action=tf.convert_to_tensor(actions),
info=policy_utilities.PerArmPolicyInfo(
chosen_arm_features=np.array([[1, 2, 3], [3, 2, 1]],
dtype=np.float32)))
experience = _get_experience(initial_step, action_step, final_step)
loss_info, _ = agent.train(experience, None)
self.evaluate(tf.compat.v1.initialize_all_variables())
loss_value = self.evaluate(loss_info)
self.assertGreater(loss_value, 0.0)
def testTrainPerArmAgentVariableActions(self):
num_actions = 5
obs_spec = bandit_spec_utils.create_per_arm_observation_spec(
2, 3, num_actions, add_num_actions_feature=True)
time_step_spec = time_step.time_step_spec(obs_spec)
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=num_actions - 1)
encoding_dim = 10
encoder = (
global_and_arm_feature_network.create_feed_forward_common_tower_network(
obs_spec, (4, 3), (3, 4), (4, 2), encoding_dim))
agent = neural_linucb_agent.NeuralLinUCBAgent(
time_step_spec=time_step_spec,
action_spec=action_spec,
encoding_network=encoder,
encoding_network_num_train_steps=10,
encoding_dim=encoding_dim,
accepts_per_arm_features=True,
optimizer=tf.compat.v1.train.AdamOptimizer(learning_rate=0.001))
observations = {
bandit_spec_utils.GLOBAL_FEATURE_KEY:
tf.constant([[1, 2], [3, 4]], dtype=tf.float32),
bandit_spec_utils.PER_ARM_FEATURE_KEY:
tf.cast(
tf.reshape(tf.range(30), shape=[2, 5, 3]), dtype=tf.float32),
bandit_spec_utils.NUM_ACTIONS_FEATURE_KEY:
tf.constant([3, 4], dtype=tf.int32)
}
actions = np.array([0, 3], dtype=np.int32)
rewards = np.array([0.5, 3.0], dtype=np.float32)
initial_step = time_step.TimeStep(
tf.constant(
time_step.StepType.FIRST,
dtype=tf.int32,
shape=[2],
name='step_type'),
tf.constant(0.0, dtype=tf.float32, shape=[2], name='reward'),
tf.constant(1.0, dtype=tf.float32, shape=[2], name='discount'),
observations)
final_step = time_step.TimeStep(
tf.constant(
time_step.StepType.LAST,
dtype=tf.int32,
shape=[2],
name='step_type'),
tf.constant(rewards, dtype=tf.float32, name='reward'),
tf.constant(1.0, dtype=tf.float32, shape=[2], name='discount'),
observations)
action_step = policy_step.PolicyStep(
action=tf.convert_to_tensor(actions),
info=policy_utilities.PerArmPolicyInfo(
chosen_arm_features=np.array([[1, 2, 3], [3, 2, 1]],
dtype=np.float32)))
experience = _get_experience(initial_step, action_step, final_step)
loss_info, _ = agent.train(experience, None)
self.evaluate(tf.compat.v1.initialize_all_variables())
loss_value = self.evaluate(loss_info)
self.assertGreater(loss_value, 0.0)
if __name__ == '__main__':
tf.test.main()
| 41.728223
| 110
| 0.7042
| 3,081
| 23,952
| 5.163259
| 0.115547
| 0.028665
| 0.024139
| 0.01911
| 0.784762
| 0.771499
| 0.753395
| 0.742457
| 0.725673
| 0.723032
| 0
| 0.021254
| 0.196602
| 23,952
| 573
| 111
| 41.801047
| 0.805436
| 0.077948
| 0
| 0.719409
| 0
| 0
| 0.016753
| 0
| 0
| 0
| 0
| 0
| 0.018987
| 1
| 0.044304
| false
| 0
| 0.042194
| 0.008439
| 0.111814
| 0.00211
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
76ef8af8e493241c02c12f7735007f17682a26cd
| 1,507
|
py
|
Python
|
client/tests/test_authentication.py
|
uktrade/directory-forms-api
|
078e38ddf7a761d2d34a0e1ab2dc3f20cd32e6aa
|
[
"MIT"
] | null | null | null |
client/tests/test_authentication.py
|
uktrade/directory-forms-api
|
078e38ddf7a761d2d34a0e1ab2dc3f20cd32e6aa
|
[
"MIT"
] | 77
|
2018-10-29T14:38:37.000Z
|
2022-03-23T14:20:39.000Z
|
client/tests/test_authentication.py
|
uktrade/directory-forms-api
|
078e38ddf7a761d2d34a0e1ab2dc3f20cd32e6aa
|
[
"MIT"
] | 1
|
2021-08-05T10:20:17.000Z
|
2021-08-05T10:20:17.000Z
|
import pytest
import sigauth
from client import authentication
from client.tests import factories
@pytest.mark.django_db
def test_client_sender_authentication_ok(rf):
authenticator = authentication.ClientSenderIdAuthentication()
client_model_instance = factories.ClientFactory(
name='test',
access_key='test-key',
)
signer = sigauth.helpers.RequestSigner(
secret='test-key',
sender_id=str(client_model_instance.identifier),
)
headers = signer.get_signature_headers(
url='/',
body=None,
method='get',
content_type='text/plain',
)
request = rf.get('/', HTTP_X_SIGNATURE=headers[signer.header_name])
client, _ = authenticator.authenticate(request)
assert client == client_model_instance
@pytest.mark.django_db
def test_client_sender_authentication_authorisation_ok(rf):
authenticator = authentication.ClientSenderIdAuthentication()
client_model_instance = factories.ClientFactory(
name='test',
access_key='test-key',
)
signer = sigauth.helpers.RequestSigner(
secret='test-key',
sender_id=str(client_model_instance.identifier),
)
headers = signer.get_signature_headers(
url='/',
body=None,
method='get',
content_type='text/plain',
)
request = rf.get('/', HTTP_AUTHORIZATION=headers[signer.header_name])
client, _ = authenticator.authenticate(request)
assert client == client_model_instance
| 25.982759
| 73
| 0.692104
| 157
| 1,507
| 6.388535
| 0.318471
| 0.065803
| 0.113659
| 0.035892
| 0.879362
| 0.879362
| 0.879362
| 0.879362
| 0.879362
| 0.777667
| 0
| 0
| 0.205043
| 1,507
| 57
| 74
| 26.438596
| 0.837229
| 0
| 0
| 0.681818
| 0
| 0
| 0.04645
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 1
| 0.045455
| false
| 0
| 0.090909
| 0
| 0.136364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
76f03734b197bf0cd5dd50c3c53794ba74f5f546
| 20,562
|
py
|
Python
|
mpf/devices/segment_display/transitions.py
|
haggispinball/mpf_fathom_fast
|
1035c3fb90bb279de84cc3ed4aa1e1df38d0d563
|
[
"MIT"
] | 163
|
2015-01-25T02:19:50.000Z
|
2022-03-26T12:00:28.000Z
|
mpf/devices/segment_display/transitions.py
|
haggispinball/mpf_fathom_fast
|
1035c3fb90bb279de84cc3ed4aa1e1df38d0d563
|
[
"MIT"
] | 1,086
|
2015-03-23T19:53:17.000Z
|
2022-03-24T20:46:11.000Z
|
mpf/devices/segment_display/transitions.py
|
haggispinball/mpf_fathom_fast
|
1035c3fb90bb279de84cc3ed4aa1e1df38d0d563
|
[
"MIT"
] | 148
|
2015-01-28T02:31:39.000Z
|
2022-03-22T13:54:01.000Z
|
"""Text transitions used for segment displays."""
import abc
from typing import Optional, List
from mpf.core.placeholder_manager import TextTemplate
from mpf.core.rgb_color import RGBColor
from mpf.devices.segment_display.segment_display_text import SegmentDisplayText, UncoloredSegmentDisplayText
STEP_OUT_OF_RANGE_ERROR = "Step is out of range"
TRANSITION_DIRECTION_UNKNOWN_ERROR = "Transition uses an unknown direction value"
class TransitionBase(metaclass=abc.ABCMeta):
"""Base class for text transitions in segment displays."""
__slots__ = ["output_length", "config", "collapse_dots", "collapse_commas"]
def __init__(self, output_length: int, collapse_dots: bool, collapse_commas: bool, config: dict) -> None:
"""Initialize the transition."""
self.output_length = output_length
self.config = config
self.collapse_dots = collapse_dots
self.collapse_commas = collapse_commas
for key, value in config.items():
if hasattr(self, key):
setattr(self, key, value)
@abc.abstractmethod
def get_step_count(self):
"""Return the total number of steps required for the transition."""
raise NotImplementedError
# pylint: disable=too-many-arguments
@abc.abstractmethod
def get_transition_step(self, step: int, current_text: str, new_text: str,
current_colors: Optional[List[RGBColor]] = None,
new_colors: Optional[List[RGBColor]] = None) -> SegmentDisplayText:
"""Calculate all the steps in the transition."""
raise NotImplementedError
class TransitionRunner:
"""Class to run/execute transitions using an iterator."""
__slots__ = ["_transition", "_step", "_current_placeholder", "_new_placeholder", "_current_colors", "_new_colors"]
# pylint: disable=too-many-arguments
def __init__(self, machine, transition: TransitionBase, current_text: str, new_text: str,
current_colors: Optional[List[RGBColor]] = None,
new_colors: Optional[List[RGBColor]] = None) -> None:
"""Class initializer."""
self._transition = transition
self._step = 0
self._current_placeholder = TextTemplate(machine, current_text)
self._new_placeholder = TextTemplate(machine, new_text)
self._current_colors = current_colors
self._new_colors = new_colors
def __iter__(self):
"""Return the iterator."""
return self
def __next__(self):
"""Evaluate and return the next transition step."""
if self._step >= self._transition.get_step_count():
raise StopIteration
transition_step = self._transition.get_transition_step(self._step,
self._current_placeholder.evaluate({}),
self._new_placeholder.evaluate({}),
self._current_colors,
self._new_colors)
self._step += 1
return transition_step
class NoTransition(TransitionBase):
"""Segment display no transition effect."""
def get_step_count(self):
"""Return the total number of steps required for the transition."""
return 1
# pylint: disable=too-many-arguments
def get_transition_step(self, step: int, current_text: str, new_text: str,
current_colors: Optional[List[RGBColor]] = None,
new_colors: Optional[List[RGBColor]] = None) -> SegmentDisplayText:
"""Calculate all the steps in the transition."""
if step < 0 or step >= self.get_step_count():
raise AssertionError(STEP_OUT_OF_RANGE_ERROR)
return SegmentDisplayText.from_str(new_text, self.output_length, self.collapse_dots, self.collapse_commas,
new_colors)
class PushTransition(TransitionBase):
"""Segment display push transition effect."""
def __init__(self, output_length: int, collapse_dots: bool, collapse_commas: bool, config: dict) -> None:
"""Class initializer."""
self.direction = 'right'
self.text = None
self.text_color = None
super().__init__(output_length, collapse_dots, collapse_commas, config)
if self.text is None:
self.text = ''
def get_step_count(self):
"""Return the total number of steps required for the transition."""
return self.output_length + len(self.text)
# pylint: disable=too-many-arguments
def get_transition_step(self, step: int, current_text: str, new_text: str,
current_colors: Optional[List[RGBColor]] = None,
new_colors: Optional[List[RGBColor]] = None) -> SegmentDisplayText:
"""Calculate all the steps in the transition."""
if step < 0 or step >= self.get_step_count():
raise AssertionError(STEP_OUT_OF_RANGE_ERROR)
current_display_text = SegmentDisplayText.from_str(current_text, self.output_length, self.collapse_dots,
self.collapse_commas, current_colors)
new_display_text = SegmentDisplayText.from_str(new_text, self.output_length, self.collapse_dots,
self.collapse_commas, new_colors)
if self.text:
if new_colors and not self.text_color:
text_color = [new_colors[0]]
else:
text_color = self.text_color
transition_text = SegmentDisplayText.from_str(self.text, len(self.text), self.collapse_dots,
self.collapse_commas, text_color)
else:
transition_text = UncoloredSegmentDisplayText([], self.collapse_dots, self.collapse_commas)
if self.direction == 'right':
temp_list = new_display_text
temp_list.extend(transition_text)
temp_list.extend(current_display_text)
return temp_list[
self.output_length + len(self.text) - (step + 1):2 * self.output_length + len(
self.text) - (step + 1)]
if self.direction == 'left':
temp_list = current_display_text
temp_list.extend(transition_text)
temp_list.extend(new_display_text)
return temp_list[step + 1:step + 1 + self.output_length]
raise AssertionError(TRANSITION_DIRECTION_UNKNOWN_ERROR)
class CoverTransition(TransitionBase):
"""Segment display cover transition effect."""
def __init__(self, output_length: int, collapse_dots: bool, collapse_commas: bool, config: dict) -> None:
"""Class initializer."""
self.direction = 'right'
self.text = None
self.text_color = None
super().__init__(output_length, collapse_dots, collapse_commas, config)
if self.text is None:
self.text = ''
def get_step_count(self):
"""Return the total number of steps required for the transition."""
return self.output_length + len(self.text)
# pylint: disable=too-many-arguments
def get_transition_step(self, step: int, current_text: str, new_text: str,
current_colors: Optional[List[RGBColor]] = None,
new_colors: Optional[List[RGBColor]] = None) -> SegmentDisplayText:
"""Calculate all the steps in the transition."""
if step < 0 or step >= self.get_step_count():
raise AssertionError(STEP_OUT_OF_RANGE_ERROR)
current_display_text = SegmentDisplayText.from_str(current_text, self.output_length, self.collapse_dots,
self.collapse_commas, current_colors)
new_display_text = SegmentDisplayText.from_str(new_text, self.output_length, self.collapse_dots,
self.collapse_commas, new_colors)
if self.text:
if new_colors and not self.text_color:
text_color = [new_colors[0]]
else:
text_color = self.text_color
transition_text = SegmentDisplayText.from_str(self.text, len(self.text), self.collapse_dots,
self.collapse_commas, text_color)
else:
transition_text = UncoloredSegmentDisplayText([], self.collapse_dots, self.collapse_commas)
if self.direction == 'right':
new_extended_display_text = new_display_text
new_extended_display_text.extend(transition_text)
if step < self.output_length:
temp_text = new_extended_display_text[-(step + 1):]
temp_text.extend(current_display_text[step + 1:])
else:
temp_text = new_display_text[-(step + 1):-(step + 1) + self.output_length]
return temp_text
if self.direction == 'left':
new_extended_display_text = transition_text
new_extended_display_text.extend(new_display_text)
if step < self.output_length:
temp_text = current_display_text[:self.output_length - (step + 1)]
temp_text.extend(new_extended_display_text[:step + 1])
else:
temp_text = new_extended_display_text[step - self.output_length + 1:step + 1]
return temp_text
raise AssertionError(TRANSITION_DIRECTION_UNKNOWN_ERROR)
class UncoverTransition(TransitionBase):
"""Segment display uncover transition effect."""
def __init__(self, output_length: int, collapse_dots: bool, collapse_commas: bool, config: dict) -> None:
"""Class initializer."""
self.direction = 'right'
self.text = None
self.text_color = None
super().__init__(output_length, collapse_dots, collapse_commas, config)
if self.text is None:
self.text = ''
def get_step_count(self):
"""Return the total number of steps required for the transition."""
return self.output_length + len(self.text)
# pylint: disable=too-many-arguments
def get_transition_step(self, step: int, current_text: str, new_text: str,
current_colors: Optional[List[RGBColor]] = None,
new_colors: Optional[List[RGBColor]] = None) -> SegmentDisplayText:
"""Calculate all the steps in the transition."""
if step < 0 or step >= self.get_step_count():
raise AssertionError(STEP_OUT_OF_RANGE_ERROR)
current_display_text = SegmentDisplayText.from_str(current_text, self.output_length, self.collapse_dots,
self.collapse_commas, current_colors)
new_display_text = SegmentDisplayText.from_str(new_text, self.output_length, self.collapse_dots,
self.collapse_commas, new_colors)
if self.text:
if new_colors and not self.text_color:
text_color = [new_colors[0]]
else:
text_color = self.text_color
transition_text = SegmentDisplayText.from_str(self.text, len(self.text), self.collapse_dots,
self.collapse_commas, text_color)
else:
transition_text = UncoloredSegmentDisplayText([], self.collapse_dots, self.collapse_commas)
if self.direction == 'right':
current_extended_display_text = transition_text
current_extended_display_text.extend(current_display_text)
if step < len(self.text):
temp_text = current_extended_display_text[
len(self.text) - step - 1:len(self.text) - step - 1 + self.output_length]
else:
temp_text = new_display_text[:step - len(self.text) + 1]
temp_text.extend(current_extended_display_text[:self.output_length - len(temp_text)])
return temp_text
if self.direction == 'left':
current_extended_display_text = current_display_text
current_extended_display_text.extend(transition_text)
if step < len(self.text):
temp_text = current_extended_display_text[step + 1:step + 1 + self.output_length]
else:
temp_text = current_display_text[step + 1:]
temp_text.extend(new_display_text[-(self.output_length - len(temp_text)):])
return temp_text
raise AssertionError(TRANSITION_DIRECTION_UNKNOWN_ERROR)
class WipeTransition(TransitionBase):
"""Segment display wipe transition effect."""
def __init__(self, output_length: int, collapse_dots: bool, collapse_commas: bool, config: dict) -> None:
"""Class initializer."""
self.direction = 'right'
self.text = None
self.text_color = None
super().__init__(output_length, collapse_dots, collapse_commas, config)
if self.text is None:
self.text = ''
def get_step_count(self):
"""Return the total number of steps required for the transition."""
return self.output_length + len(self.text)
# pylint: disable=too-many-arguments,too-many-branches,too-many-return-statements
def get_transition_step(self, step: int, current_text: str, new_text: str,
current_colors: Optional[List[RGBColor]] = None,
new_colors: Optional[List[RGBColor]] = None) -> SegmentDisplayText:
"""Calculate all the steps in the transition."""
if step < 0 or step >= self.get_step_count():
raise AssertionError(STEP_OUT_OF_RANGE_ERROR)
current_display_text = SegmentDisplayText.from_str(current_text, self.output_length, self.collapse_dots,
self.collapse_commas, current_colors)
new_display_text = SegmentDisplayText.from_str(new_text, self.output_length, self.collapse_dots,
self.collapse_commas, new_colors)
if self.text:
if new_colors and not self.text_color:
text_color = [new_colors[0]]
else:
text_color = self.text_color
transition_text = SegmentDisplayText.from_str(self.text, len(self.text), self.collapse_dots,
self.collapse_commas, text_color)
else:
transition_text = UncoloredSegmentDisplayText([], self.collapse_dots, self.collapse_commas)
if self.direction == 'right':
if step < len(self.text):
temp_text = transition_text[-(step + 1):]
temp_text.extend(current_display_text[step + 1:])
elif step < self.output_length:
temp_text = new_display_text[:step - len(self.text) + 1]
temp_text.extend(transition_text)
temp_text.extend(current_display_text[len(temp_text):])
else:
temp_text = new_display_text[:step - len(self.text) + 1]
temp_text.extend(transition_text[:self.output_length - len(temp_text)])
return temp_text
if self.direction == 'left':
if step < len(self.text):
temp_text = current_display_text[:self.output_length - (step + 1)]
temp_text.extend(transition_text[:step + 1])
elif step < self.output_length:
temp_text = current_display_text[:self.output_length - (step + 1)]
temp_text.extend(transition_text)
temp_text.extend(new_display_text[len(temp_text):])
elif step < self.output_length + len(self.text) - 1:
temp_text = transition_text[step - (self.output_length + len(self.text)) + 1:]
temp_text.extend(new_display_text[-(self.output_length - len(temp_text)):])
else:
temp_text = new_display_text
return temp_text
raise AssertionError(TRANSITION_DIRECTION_UNKNOWN_ERROR)
class SplitTransition(TransitionBase):
"""Segment display split transition effect."""
def __init__(self, output_length: int, collapse_dots: bool, collapse_commas: bool, config: dict) -> None:
"""Class initializer."""
self.direction = 'out'
self.mode = 'push'
super().__init__(output_length, collapse_dots, collapse_commas, config)
def get_step_count(self):
"""Return the total number of steps required for the transition."""
return int((self.output_length + 1) / 2)
# pylint: disable=too-many-arguments,too-many-branches,too-many-return-statements
def get_transition_step(self, step: int, current_text: str, new_text: str,
current_colors: Optional[List[RGBColor]] = None,
new_colors: Optional[List[RGBColor]] = None) -> SegmentDisplayText:
"""Calculate all the steps in the transition."""
if step < 0 or step >= self.get_step_count():
raise AssertionError(STEP_OUT_OF_RANGE_ERROR)
current_display_text = SegmentDisplayText.from_str(current_text, self.output_length, self.collapse_dots,
self.collapse_commas, current_colors)
new_display_text = SegmentDisplayText.from_str(new_text, self.output_length, self.collapse_dots,
self.collapse_commas, new_colors)
if self.mode == 'push':
if self.direction == 'out':
if step == self.get_step_count() - 1:
return new_display_text
characters = int(self.output_length / 2)
split_point = characters
if characters * 2 == self.output_length:
characters -= 1
else:
split_point += 1
characters -= step
temp_text = current_display_text[split_point - characters:split_point]
temp_text.extend(new_display_text[characters:characters + (self.output_length - 2 * characters)])
temp_text.extend(current_display_text[split_point:split_point + characters])
return temp_text
if self.direction == 'in':
if step == self.get_step_count() - 1:
return new_display_text
split_point = int(self.output_length / 2)
characters = 1
if split_point * 2 < self.output_length:
split_point += 1
characters += step
temp_text = new_display_text[split_point - characters:split_point]
temp_text.extend(current_display_text[characters:characters + (self.output_length - 2 * characters)])
temp_text.extend(new_display_text[split_point:split_point + characters])
return temp_text
raise AssertionError(TRANSITION_DIRECTION_UNKNOWN_ERROR)
if self.mode == 'wipe':
if self.direction == 'out':
if step == self.get_step_count() - 1:
return new_display_text
characters = int(self.output_length / 2)
if characters * 2 == self.output_length:
characters -= 1
characters -= step
temp_text = current_display_text[:characters]
temp_text.extend(new_display_text[characters:characters + (self.output_length - 2 * characters)])
temp_text.extend(current_display_text[-characters:])
return temp_text
if self.direction == 'in':
if step == self.get_step_count() - 1:
return new_display_text
temp_text = new_display_text[:step + 1]
temp_text.extend(current_display_text[step + 1:step + 1 + (self.output_length - 2 * len(temp_text))])
temp_text.extend(new_display_text[-(step + 1):])
return temp_text
raise AssertionError(TRANSITION_DIRECTION_UNKNOWN_ERROR)
raise AssertionError("Transition uses an unknown mode value")
| 45.191209
| 118
| 0.608112
| 2,250
| 20,562
| 5.258222
| 0.057778
| 0.059505
| 0.071676
| 0.040571
| 0.83577
| 0.79951
| 0.787085
| 0.761051
| 0.725552
| 0.70459
| 0
| 0.004615
| 0.304494
| 20,562
| 454
| 119
| 45.290749
| 0.82267
| 0.08292
| 0
| 0.678344
| 0
| 0
| 0.01632
| 0
| 0
| 0
| 0
| 0
| 0.041401
| 1
| 0.073248
| false
| 0
| 0.015924
| 0
| 0.200637
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0a0053fabf01c386213e86f68926dd263ff678d7
| 24,835
|
py
|
Python
|
tools/notebook/extensions/wstl/magics/wstl_test.py
|
nwo-strap/healthcare-data-harmonization
|
377c316d5ade4a13a8f1b5d2fdd904484d26fb3a
|
[
"Apache-2.0"
] | 1
|
2022-03-18T16:43:18.000Z
|
2022-03-18T16:43:18.000Z
|
tools/notebook/extensions/wstl/magics/wstl_test.py
|
nwo-strap/healthcare-data-harmonization
|
377c316d5ade4a13a8f1b5d2fdd904484d26fb3a
|
[
"Apache-2.0"
] | null | null | null |
tools/notebook/extensions/wstl/magics/wstl_test.py
|
nwo-strap/healthcare-data-harmonization
|
377c316d5ade4a13a8f1b5d2fdd904484d26fb3a
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests for wstl.magics.wstl."""
import json
from unittest from unittest import mock
from absl.testing import absltest
from fakefs import fake_filesystem
from google.cloud import storage
from googleapiclient.http import HttpError
import grpc
import grpc_testing
from IPython.core import error
from IPython.display import JSON
from IPython.terminal import interactiveshell
from IPython.testing import tools
from google3.google.rpc import code_pb2
from google3.google.rpc import status_pb2
from wstl.magics import wstl
from wstl.proto import wstlservice_pb2
from wstl.proto import wstlservice_pb2_grpc
# pylint: disable=invalid-name
class WstlTest(absltest.TestCase):
# TODO(): look into x86_64-grtev4-linux-gnu-driver_is_not_gcc error
# when merging with test cases from load_hl7
def setUp(self):
super(WstlTest, self).setUp()
self.config = tools.default_config()
self.config.TerminalInteractiveShell.simple_prompt = True
self.shell = interactiveshell.TerminalInteractiveShell.instance(
config=self.config)
self._time = grpc_testing.strict_real_time()
self._channel = grpc_testing.channel(
wstlservice_pb2.DESCRIPTOR.services_by_name.values(), self._time)
self.sample_hl7v2 = json.dumps({
"ADT_A01": {
"ACC": None,
"AL1": [{
"0": "AL1",
"1": "0",
"2": {
"1": "AA"
},
"3": {
"1": "Z88.0",
"2": "Personal history of allergy to penicillin",
"3": "ZAL"
},
"4": {
"1": "SEVERE"
},
"5": ["Shortness of breath"],
"6": None
}],
"ARV_1": None,
"ARV_2": None,
"DB1": None,
"DRG": None
}
})
def test_wstl_magic_is_correctly_defined(self):
with self.shell.builtin_trap:
ip = self.shell.get_ipython()
failure = ip.magics_manager.register(wstl.WSTLMagics)
self.assertIsNone(failure)
magic = ip.find_cell_magic("wstl")
self.assertIsNotNone(magic)
failure = ip.magics_manager.register(wstl.LoadHL7Magics)
self.assertIsNone(failure)
magic = ip.find_line_magic("load_hl7v2_datastore")
self.assertIsNotNone(magic)
magic = ip.find_line_magic("load_hl7v2_gcs")
self.assertIsNotNone(magic)
# TODO (): add additional unit tests using mock gRPC server.
def test_wstl_magic_invoke_no_connection(self):
with self.shell.builtin_trap:
ip = self.shell.get_ipython()
failure = ip.magics_manager.register(wstl.WSTLMagics)
self.assertIsNone(failure)
# No mock server connection has been established so the magic command
# should raise an exception.
with self.assertRaises(NotImplementedError):
ip.run_cell_magic("wstl", "", "TopLevelField: $ToUpper(\"a\")")
@mock.patch.object(wstl, "_get_message_from_hl7v2_store", autospec=True)
def test_load_hl7v2_from_datastore_success(self, mocked_client):
with self.shell.builtin_trap:
mocked_client.return_value = '{"content":"some hl7v2 message"}'
# TODO () investigate get_ipython returns null issue
ip = self.shell.get_ipython()
failure = ip.magics_manager.register(wstl.LoadHL7Magics)
self.assertIsNone(failure)
result = ip.run_line_magic(
"load_hl7v2_datastore", """--project_id=project --region=us
--dataset_id=ds --hl7v2_store_id=store""")
self.assertEqual(result.data, json.loads(mocked_client.return_value))
@mock.patch.object(wstl, "_get_message_from_hl7v2_store", autospec=True)
def test_load_hl7v2_from_datastore_failure(self, mocked_client):
with self.shell.builtin_trap:
mocked_client.side_effect = HttpError(
mock.Mock(status=403), bytes("permission denied", "utf-8"))
ip = self.shell.get_ipython()
failure = ip.magics_manager.register(wstl.LoadHL7Magics)
self.assertIsNone(failure)
with self.assertRaises(HttpError):
ip.run_line_magic(
"load_hl7v2_datastore", """--project_id=project --region=us
--dataset_id=dsi --hl7v2_store_id=store""")
@mock.patch.object(storage, "Client", autospec=True)
@mock.patch.object(storage, "Blob", autospec=True)
def test_load_hl7v2_from_gcs_success_direct_return(self, mock_blob,
mock_client):
with self.shell.builtin_trap:
mock_blob.download_as_string.return_value = self.sample_hl7v2
mock_blob.content_encoding = None
mock_client.return_value.bucket.return_value.get_blob.return_value = mock_blob
ip = self.shell.get_ipython()
failure = ip.magics_manager.register(wstl.LoadHL7Magics)
self.assertIsNone(failure)
result = ip.run_line_magic(
"load_hl7v2_gcs", """--bucket_name=foo
--source_blob_name=bar""")
self.assertEqual(result.data, json.loads(self.sample_hl7v2))
@mock.patch.object(storage, "Client", autospec=True)
@mock.patch.object(storage, "Blob", autospec=True)
def test_load_hl7v2_from_gcs_success_output_file_create(
self, mock_blob, mock_client):
with self.shell.builtin_trap:
mock_blob.download_as_string.return_value = self.sample_hl7v2
mock_blob.content_encoding = None
mock_client.return_value.bucket.return_value.get_blob.return_value = mock_blob
ip = self.shell.get_ipython()
failure = ip.magics_manager.register(wstl.LoadHL7Magics)
self.assertIsNone(failure)
with mock.patch("builtins.open", autospec=True) as mock_open:
ip.run_line_magic(
"load_hl7v2_gcs", """--bucket_name=foo
--source_blob_name=bar --dest_file_name=some_file.txt""")
mock_open.assert_called_once_with("some_file.txt", "w")
@mock.patch.object(storage, "Client", autospec=True)
@mock.patch.object(storage, "Blob", autospec=True)
def test_load_hl7v2_from_gcs_success_output_file_content(
self, mock_blob, mock_client):
with self.shell.builtin_trap:
fs = fake_filesystem.FakeFilesystem()
fake_open = fake_filesystem.FakeFileOpen(fs)
mock_blob.download_as_string.return_value = self.sample_hl7v2
mock_blob.content_encoding = None
mock_client.return_value.bucket.return_value.get_blob.return_value = mock_blob
ip = self.shell.get_ipython()
failure = ip.magics_manager.register(wstl.LoadHL7Magics)
self.assertIsNone(failure)
with mock.patch.multiple(wstl, open=fake_open):
tmp_filename = "fake.txt"
ip.run_line_magic(
"load_hl7v2_gcs", """--bucket_name=foo
--source_blob_name=bar --dest_file_name={}""".format(tmp_filename))
self.assertEqual(
fs.GetObject(tmp_filename).contents.decode("UTF-8"),
self.sample_hl7v2)
@mock.patch.object(storage, "Bucket", autospec=True)
@mock.patch.object(storage, "Client", autospec=True)
def test_load_hl7v2_from_gcs_not_found(self, mock_client, mock_bucket):
with self.shell.builtin_trap:
mock_bucket.exists.return_value = False
mock_client.return_value.bucket.return_value = mock_bucket
ip = self.shell.get_ipython()
failure = ip.magics_manager.register(wstl.LoadHL7Magics)
self.assertIsNone(failure)
with self.assertRaises(ValueError):
ip.run_line_magic("load_hl7v2_gcs",
"""--bucket_name=foo --source_blob_name=bar""")
@mock.patch.object(storage, "Client", autospec=True)
@mock.patch.object(storage, "Blob", autospec=True)
def test_load_hl7v2_from_gcs_wrong_data(self, mock_blob, mock_client):
with self.shell.builtin_trap:
mock_blob.download_as_string.return_value = "some invalid json".encode(
"UTF-8")
mock_blob.content_encoding = None
mock_client.return_value.bucket.return_value.get_blob.return_value = mock_blob
ip = self.shell.get_ipython()
failure = ip.magics_manager.register(wstl.LoadHL7Magics)
self.assertIsNone(failure)
with self.assertRaises(json.JSONDecodeError):
ip.run_line_magic("load_hl7v2_gcs",
"""--bucket_name=foo --source_blob_name=bar""")
def test_fhir_validate_magic_is_correctly_defined(self):
ip = self.shell.get_ipython()
failure = ip.magics_manager.register(wstl.WSTLMagics)
self.assertIsNone(failure)
magic = ip.find_line_magic("fhir_validate")
self.assertIsNotNone(magic)
# we cannot test object identity because decorators return wrapped versions.
self.assertEqual(wstl.__name__, magic.__module__)
@mock.patch.object(grpc, "insecure_channel", autospec=True)
@mock.patch.object(wstlservice_pb2_grpc, "WhistleServiceStub", autospec=True)
def test_fhir_validate_magic_inline_json(self, mock_stub, mock_channel):
class FakeChannel:
def __init__(self, channel):
self.channel = channel
def __enter__(self):
return self.channel
def __exit__(self, exc_type, exc_val, exc_tb):
self.channel._close()
return False
class FakeService:
def __init__(self, res):
self.resp = res
def FhirValidate(self, req):
del req
return self.resp
mock_channel.return_value = FakeChannel(self._channel)
ip = self.shell.get_ipython()
failure = ip.magics_manager.register(wstl.WSTLMagics)
self.assertIsNone(failure)
lines = [
"--input=json://{'id':'example','resourceType':" +
"'Device','udi':{'carrierHRF':'test'}}",
"--version=r4 --input=json://{'id':'example','resourceType':" +
"'Device','udi':{'carrierHRF':'test'}}",
"--version=stu3 --input=json://{'id':'example','resourceType':" +
"'Device','udi':{'carrierHRF':'test'}}",
"--version=stu3 --input=json://{'id':'example','resourceType':" +
"'3','udi':{'carrierHRF':'test'}}"
]
results = []
resps = [
wstlservice_pb2.ValidationResponse(status=[
status_pb2.Status(code=code_pb2.OK, message="Validation Success")
]),
wstlservice_pb2.ValidationResponse(status=[
status_pb2.Status(code=code_pb2.OK, message="Validation Success")
]),
wstlservice_pb2.ValidationResponse(status=[
status_pb2.Status(code=code_pb2.OK, message="Validation Success")
]),
wstlservice_pb2.ValidationResponse(status=[
status_pb2.Status(
code=code_pb2.INVALID_ARGUMENT, message="invalid FHIR resource")
])
]
reqs = [
wstlservice_pb2.ValidationRequest(
fhir_version=wstlservice_pb2.ValidationRequest.FhirVersion.R4,
input=[
wstlservice_pb2.Location(
inline_json="{'id':'example','resourceType':" +
"'Device','udi':{'carrierHRF':'test'}}")
]),
wstlservice_pb2.ValidationRequest(
fhir_version=wstlservice_pb2.ValidationRequest.FhirVersion.R4,
input=[
wstlservice_pb2.Location(
inline_json="{'id':'example','resourceType':" +
"'Device','udi':{'carrierHRF':'test'}}")
]),
wstlservice_pb2.ValidationRequest(
fhir_version=wstlservice_pb2.ValidationRequest.FhirVersion.STU3,
input=[
wstlservice_pb2.Location(
inline_json="{'id':'example','resourceType':" +
"'Device','udi':{'carrierHRF':'test'}}")
]),
wstlservice_pb2.ValidationRequest(
fhir_version=wstlservice_pb2.ValidationRequest.FhirVersion.STU3,
input=[
wstlservice_pb2.Location(
inline_json="{'id':'example','resourceType':" +
"'3','udi':{'carrierHRF':'test'}}")
])
]
for i in range(len(lines)):
mock_service = mock.create_autospec(FakeService)
mock_service.FhirValidate.return_value = resps[i]
mock_stub.return_value = mock_service
result = ip.run_line_magic("fhir_validate", lines[i])
results.append(result)
mock_service.FhirValidate.assert_called_once_with(reqs[i])
wants = [
{
"status": [{
"message": "Validation Success"
}]
},
{
"status": [{
"message": "Validation Success"
}]
},
{
"status": [{
"message": "Validation Success"
}]
},
{
"status": [{
"code": 3,
"message": "invalid FHIR resource"
}]
},
]
for j in range(len(wants)):
result = results[j]
want = JSON(json.dumps(wants[j]))
self.assertEqual(
result.data,
want.data,
msg="JSON.data mismatch on input {}".format(lines[j]))
self.assertEqual(
result.url,
want.url,
msg="JSON.url mismatch on input {}".format(lines[j]))
self.assertEqual(
result.filename,
want.filename,
msg="JSON.filename mismatch on input {}".format(lines[j]))
@mock.patch.object(grpc, "insecure_channel", autospec=True)
@mock.patch.object(wstlservice_pb2_grpc, "WhistleServiceStub", autospec=True)
def test_fhir_validate_magic_ipython(self, mock_stub, mock_channel):
class FakeChannel:
def __init__(self, channel):
self.channel = channel
def __enter__(self):
return self.channel
def __exit__(self, exc_type, exc_val, exc_tb):
self.channel._close()
return False
class FakeService:
def __init__(self, res):
self.resp = res
def FhirValidate(self, req):
del req
return self.resp
mock_channel.return_value = FakeChannel(self._channel)
ip = self.shell.get_ipython()
failure = ip.magics_manager.register(wstl.WSTLMagics)
self.assertIsNone(failure)
st1 = "{'id':'example','resourceType':'Device','udi':{'carrierHRF':'test'}}"
st2 = "{'id':'example','resourceType':'3','udi':{'carrierHRF':'test'}}"
stList = [st1, st1]
ip.push("st1")
ip.push("st2")
ip.push("stList")
lines = [
"--version=stu3 --input=py://st1", "--version=stu3 --input=py://st2",
"--version=stu3 --input=py://stList",
"--version=stu3 --input=pylist://stList"
]
results = []
resps = [
wstlservice_pb2.ValidationResponse(status=[
status_pb2.Status(code=code_pb2.OK, message="Validation Success")
]),
wstlservice_pb2.ValidationResponse(status=[
status_pb2.Status(
code=code_pb2.INVALID_ARGUMENT, message="invalid FHIR resource")
]),
wstlservice_pb2.ValidationResponse(status=[
status_pb2.Status(
code=code_pb2.INVALID_ARGUMENT, message="invalid FHIR resource")
]),
wstlservice_pb2.ValidationResponse(status=[
status_pb2.Status(code=code_pb2.OK, message="Validation Success")
]),
]
reqs = [
wstlservice_pb2.ValidationRequest(
fhir_version=wstlservice_pb2.ValidationRequest.FhirVersion.STU3,
input=[
wstlservice_pb2.Location(
inline_json="{'id':'example','resourceType':" +
"'Device','udi':{'carrierHRF':'test'}}")
]),
wstlservice_pb2.ValidationRequest(
fhir_version=wstlservice_pb2.ValidationRequest.FhirVersion.STU3,
input=[
wstlservice_pb2.Location(
inline_json="{'id':'example','resourceType':" +
"'3','udi':{'carrierHRF':'test'}}")
]),
wstlservice_pb2.ValidationRequest(
fhir_version=wstlservice_pb2.ValidationRequest.FhirVersion.STU3,
input=[
wstlservice_pb2.Location(
inline_json="[\"{'id':'example','resourceType':" +
"'Device','udi':{'carrierHRF':'test'}}\", " +
"\"{'id':'example','resourceType':" +
"'Device','udi':{'carrierHRF':'test'}}\"]")
]),
wstlservice_pb2.ValidationRequest(
fhir_version=wstlservice_pb2.ValidationRequest.FhirVersion.STU3,
input=[
wstlservice_pb2.Location(
inline_json="{'id':'example','resourceType':" +
"'Device','udi':{'carrierHRF':'test'}}"),
wstlservice_pb2.Location(
inline_json="{'id':'example','resourceType':" +
"'Device','udi':{'carrierHRF':'test'}}"),
]),
]
for i in range(len(lines)):
mock_service = mock.create_autospec(FakeService)
mock_service.FhirValidate.return_value = resps[i]
mock_stub.return_value = mock_service
result = ip.run_line_magic("fhir_validate", lines[i])
results.append(result)
mock_service.FhirValidate.assert_called_once_with(reqs[i])
wants = [
{
"status": [{
"message": "Validation Success"
}]
},
{
"status": [{
"code": 3,
"message": "invalid FHIR resource"
}]
},
{
"status": [{
"code": 3,
"message": "invalid FHIR resource"
}]
},
{
"status": [{
"message": "Validation Success"
}]
},
]
for j in range(len(wants)):
result = results[j]
want = JSON(json.dumps(wants[j]))
self.assertEqual(
result.data,
want.data,
msg="JSON.data mismatch on input {}".format(lines[j]))
self.assertEqual(
result.url,
want.url,
msg="JSON.url mismatch on input {}".format(lines[j]))
self.assertEqual(
result.filename,
want.filename,
msg="JSON.filename mismatch on input {}".format(lines[j]))
# Delete created variables to suppress the unused-variable linter warning.
del st1
del st2
del stList
@mock.patch.object(grpc, "insecure_channel", autospec=True)
@mock.patch.object(wstlservice_pb2_grpc, "WhistleServiceStub", autospec=True)
@mock.patch.object(storage, "Client", autospec=True)
@mock.patch.object(storage, "Bucket", autospec=True)
def test_fhir_validate_magic_gcs(self, mock_bucket, mock_client, mock_stub,
mock_channel):
class FakeChannel:
def __init__(self, channel):
self.channel = channel
def __enter__(self):
return self.channel
def __exit__(self, exc_type, exc_val, exc_tb):
self.channel._close()
return False
class FakeService:
def __init__(self, res):
self.resp = res
def FhirValidate(self, req):
del req
raise grpc.RpcError(code_pb2.UNIMPLEMENTED,
"GCS source not implemented yet")
class Item(object):
def __init__(self, bucket, name):
self.bucket = bucket
self.name = name
class FakeBucket(object):
def __init__(self, bucket_name):
self.name = bucket_name
mock_channel.return_value = FakeChannel(self._channel)
bucket = FakeBucket("fake_bucket")
items = [Item(bucket, "file.wstl")]
mock_bucket.list_blobs.return_value = items
mock_client.return_value.get_bucket.return_value = mock_bucket
ip = self.shell.get_ipython()
failure = ip.magics_manager.register(wstl.WSTLMagics)
self.assertIsNone(failure)
with mock.patch.object(FakeService, "FhirValidate") as mock_method:
mock_stub.return_value = FakeService(None)
mock_method.side_effect = grpc.RpcError
result = ip.run_line_magic(
"fhir_validate", "--version=stu3 --input=gs://fake_bucket/file.wstl")
self.assertIsInstance(result, grpc.RpcError)
req_gs = wstlservice_pb2.ValidationRequest(
fhir_version=wstlservice_pb2.ValidationRequest.FhirVersion.STU3,
input=[
wstlservice_pb2.Location(
gcs_location="gs://fake_bucket/file.wstl")
])
mock_method.assert_called_once_with(req_gs)
@mock.patch.object(grpc, "insecure_channel", autospec=True)
@mock.patch.object(wstlservice_pb2_grpc, "WhistleServiceStub", autospec=True)
@mock.patch.object(storage, "Client", autospec=True)
@mock.patch.object(storage, "Bucket", autospec=True)
def test_fhir_validate_magic_gcs_wildcard(self, mock_bucket, mock_client,
mock_stub, mock_channel):
class FakeChannel:
def __init__(self, channel):
self.channel = channel
def __enter__(self):
return self.channel
def __exit__(self, exc_type, exc_val, exc_tb):
self.channel._close()
return False
class FakeService:
def __init__(self):
pass
def FhirValidate(self, req):
del req
raise grpc.RpcError(code_pb2.UNIMPLEMENTED,
"GCS source not implemented yet")
class Item(object):
def __init__(self, bucket, name):
self.bucket = bucket
self.name = name
class FakeBucket(object):
def __init__(self, bucket_name):
self.name = bucket_name
mock_channel.return_value = FakeChannel(self._channel)
bucket = FakeBucket("fake_bucket")
items = [
Item(bucket, "file1.txt"),
Item(bucket, "lib_folder/file2.wstl"),
Item(bucket, "lib_folder/file3.txt"),
Item(bucket, "lib_folder/file4.json"),
Item(bucket, "input.json")
]
mock_bucket.list_blobs.return_value = iter(items)
mock_client.return_value.get_bucket.return_value = mock_bucket
ip = self.shell.get_ipython()
failure = ip.magics_manager.register(wstl.WSTLMagics)
self.assertIsNone(failure)
with mock.patch.object(FakeService, "FhirValidate") as mock_method:
mock_stub.return_value = FakeService()
mock_method.side_effect = grpc.RpcError
result = ip.run_line_magic(
"fhir_validate", "--version=stu3 --input=gs://fake_bucket/*.txt")
self.assertIsInstance(result, grpc.RpcError)
req_gs = wstlservice_pb2.ValidationRequest(
fhir_version=wstlservice_pb2.ValidationRequest.FhirVersion.STU3,
input=[
wstlservice_pb2.Location(
gcs_location="gs://fake_bucket/file1.txt"),
wstlservice_pb2.Location(
gcs_location="gs://fake_bucket/lib_folder/file3.txt")
])
mock_method.assert_called_once_with(req_gs)
@mock.patch.object(grpc, "insecure_channel", autospec=True)
def test_fhir_validate_magic_invalid_input(self, mock_channel):
class FakeChannel:
def __init__(self, channel):
self.channel = channel
def __enter__(self):
return self.channel
def __exit__(self, exc_type, exc_val, exc_tb):
self.channel._close()
return False
mock_channel.return_value = FakeChannel(self._channel)
ip = self.shell.get_ipython()
failure = ip.magics_manager.register(wstl.WSTLMagics)
self.assertIsNone(failure)
lines = [
"--input={'id':'example','resourceType':" +
"'Device','udi':{'carrierHRF':'test'}}",
"--version=r4 --input={'id':'example','resourceType':" +
"'Device','udi':{'carrierHRF':'test'}}",
"--version=R4 --input=json://{'id':'example','resourceType':" +
"'Device','udi':{'carrierHRF':'test'}}",
"--version=stu3 --input={'id':'example','resourceType':" +
"'Device','udi':{'carrierHRF':'test'}}",
"--version=STU3 --input=json://{'id':'example','resourceType':" +
"'Device','udi':{'carrierHRF':'test'}}"
]
errors = [
ValueError, ValueError, error.UsageError, ValueError, error.UsageError
]
for i in range(len(lines)):
self.assertRaises(errors[i], ip.run_line_magic, "fhir_validate", lines[i])
if __name__ == "__main__":
absltest.main()
| 36.956845
| 84
| 0.625851
| 2,739
| 24,835
| 5.431544
| 0.136546
| 0.044229
| 0.027223
| 0.030853
| 0.798212
| 0.782752
| 0.766552
| 0.751496
| 0.738724
| 0.733616
| 0
| 0.011816
| 0.250292
| 24,835
| 671
| 85
| 37.011923
| 0.787207
| 0.041675
| 0
| 0.686957
| 0
| 0
| 0.165152
| 0.079405
| 0
| 0
| 0
| 0.00149
| 0.073043
| 0
| null | null | 0.001739
| 0.029565
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0a33a0665fd59fb4638092b0f5df73d65d20b6c6
| 948
|
py
|
Python
|
config.py
|
itechnotion/flask-boilerplate
|
cd9c0f469254d81c363e7b142496bf1f1dbdc371
|
[
"Apache-2.0"
] | null | null | null |
config.py
|
itechnotion/flask-boilerplate
|
cd9c0f469254d81c363e7b142496bf1f1dbdc371
|
[
"Apache-2.0"
] | null | null | null |
config.py
|
itechnotion/flask-boilerplate
|
cd9c0f469254d81c363e7b142496bf1f1dbdc371
|
[
"Apache-2.0"
] | null | null | null |
class Config(object):
DEBUG = False
TESTING = False
SECRET_KEY = "B\xb2?.\xdf\x9f\xa7m\xf8\x8a%,\xf7\xc4\xfa\x91"
MONGO_URI="mongodb://localhost:27017/test"
IMAGE_UPLOADS = "/home/username/projects/my_app/app/static/images/uploads"
SESSION_COOKIE_SECURE = False
class ProductionConfig(Config):
DEBUG = False
MONGO_URI="mongodb://localhost:27017/"
DB_NAME="test"
IMAGE_UPLOADS = "/home/username/projects/my_app/app/static/images/uploads"
SESSION_COOKIE_SECURE = True
class DevelopmentConfig(Config):
DEBUG = True
MONGO_URI="mongodb://localhost:27017/"
DB_NAME="test"
IMAGE_UPLOADS = "/home/username/projects/my_app/app/static/images/uploads"
SESSION_COOKIE_SECURE = False
class TestingConfig(Config):
TESTING = True
MONGO_URI="mongodb://localhost:27017/test"
IMAGE_UPLOADS = "/home/username/projects/my_app/app/static/images/uploads"
SESSION_COOKIE_SECURE = False
| 35.111111
| 78
| 0.724684
| 123
| 948
| 5.398374
| 0.349594
| 0.048193
| 0.090361
| 0.144578
| 0.736446
| 0.736446
| 0.724398
| 0.724398
| 0.724398
| 0.724398
| 0
| 0.035936
| 0.148734
| 948
| 27
| 79
| 35.111111
| 0.786865
| 0
| 0
| 0.625
| 0
| 0.041667
| 0.410959
| 0.402529
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0a51f1ff9a22ea232f2fbcffcef0d5952cde5b50
| 34,097
|
py
|
Python
|
cinder/tests/unit/api/contrib/test_qos_specs_manage.py
|
ilay09/cinder
|
86f084d42f18bd5971cc7a0df3e6d815543a472d
|
[
"Apache-2.0"
] | null | null | null |
cinder/tests/unit/api/contrib/test_qos_specs_manage.py
|
ilay09/cinder
|
86f084d42f18bd5971cc7a0df3e6d815543a472d
|
[
"Apache-2.0"
] | null | null | null |
cinder/tests/unit/api/contrib/test_qos_specs_manage.py
|
ilay09/cinder
|
86f084d42f18bd5971cc7a0df3e6d815543a472d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2013 eBay Inc.
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
import mock
from six.moves import http_client
import webob
from cinder.api.contrib import qos_specs_manage
from cinder import context
from cinder import db
from cinder import exception
from cinder import objects
from cinder import test
from cinder.tests.unit.api import fakes
from cinder.tests.unit import fake_constants as fake
from cinder.tests.unit import fake_notifier
def stub_qos_specs(id):
res = dict(name='qos_specs_' + str(id))
res.update(dict(consumer='back-end'))
res.update(dict(id=str(id)))
specs = {"key1": "value1",
"key2": "value2",
"key3": "value3",
"key4": "value4",
"key5": "value5"}
res.update(dict(specs=specs))
return objects.QualityOfServiceSpecs(**res)
def stub_qos_associates(id):
return [{
'association_type': 'volume_type',
'name': 'FakeVolTypeName',
'id': fake.VOLUME_TYPE_ID}]
def return_qos_specs_get_all(context, filters=None, marker=None, limit=None,
offset=None, sort_keys=None, sort_dirs=None):
return [
stub_qos_specs(fake.QOS_SPEC_ID),
stub_qos_specs(fake.QOS_SPEC2_ID),
stub_qos_specs(fake.QOS_SPEC3_ID),
]
def return_qos_specs_get_qos_specs(context, id):
if id == fake.WILL_NOT_BE_FOUND_ID:
raise exception.QoSSpecsNotFound(specs_id=id)
return stub_qos_specs(id)
def return_qos_specs_delete(context, id, force):
if id == fake.WILL_NOT_BE_FOUND_ID:
raise exception.QoSSpecsNotFound(specs_id=id)
elif id == fake.IN_USE_ID:
raise exception.QoSSpecsInUse(specs_id=id)
pass
def return_qos_specs_delete_keys(context, id, keys):
if id == fake.WILL_NOT_BE_FOUND_ID:
raise exception.QoSSpecsNotFound(specs_id=id)
if 'foo' in keys:
raise exception.QoSSpecsKeyNotFound(specs_id=id,
specs_key='foo')
def return_qos_specs_update(context, id, specs):
if id == fake.WILL_NOT_BE_FOUND_ID:
raise exception.QoSSpecsNotFound(specs_id=id)
elif id == fake.INVALID_ID:
raise exception.InvalidQoSSpecs(reason=id)
elif id == fake.UPDATE_FAILED_ID:
raise exception.QoSSpecsUpdateFailed(specs_id=id,
qos_specs=specs)
pass
def return_qos_specs_create(context, name, specs):
if name == 'qos_spec_%s' % fake.ALREADY_EXISTS_ID:
raise exception.QoSSpecsExists(specs_id=name)
elif name == 'qos_spec_%s' % fake.ACTION_FAILED_ID:
raise exception.QoSSpecsCreateFailed(name=id, qos_specs=specs)
elif name == 'qos_spec_%s' % fake.INVALID_ID:
raise exception.InvalidQoSSpecs(reason=name)
return objects.QualityOfServiceSpecs(name=name,
specs=specs,
consumer='back-end',
id=fake.QOS_SPEC_ID)
def return_get_qos_associations(context, id):
if id == fake.WILL_NOT_BE_FOUND_ID:
raise exception.QoSSpecsNotFound(specs_id=id)
elif id == fake.RAISE_ID:
raise exception.CinderException()
return stub_qos_associates(id)
def return_associate_qos_specs(context, id, type_id):
if id == fake.WILL_NOT_BE_FOUND_ID:
raise exception.QoSSpecsNotFound(specs_id=id)
elif id == fake.ACTION_FAILED_ID:
raise exception.QoSSpecsAssociateFailed(specs_id=id,
type_id=type_id)
elif id == fake.ACTION2_FAILED_ID:
raise exception.QoSSpecsDisassociateFailed(specs_id=id,
type_id=type_id)
if type_id == fake.WILL_NOT_BE_FOUND_ID:
raise exception.VolumeTypeNotFound(
volume_type_id=type_id)
pass
def return_disassociate_all(context, id):
if id == fake.WILL_NOT_BE_FOUND_ID:
raise exception.QoSSpecsNotFound(specs_id=id)
elif id == fake.ACTION2_FAILED_ID:
raise exception.QoSSpecsDisassociateFailed(specs_id=id,
type_id=None)
@ddt.ddt
class QoSSpecManageApiTest(test.TestCase):
def _create_qos_specs(self, name, values=None):
"""Create a transfer object."""
if values:
specs = dict(name=name, qos_specs=values)
else:
specs = {'name': name,
'consumer': 'back-end',
'specs': {
'key1': 'value1',
'key2': 'value2'}}
return db.qos_specs_create(self.ctxt, specs)['id']
def setUp(self):
super(QoSSpecManageApiTest, self).setUp()
self.flags(host='fake')
self.controller = qos_specs_manage.QoSSpecsController()
self.ctxt = context.RequestContext(user_id=fake.USER_ID,
project_id=fake.PROJECT_ID,
is_admin=True)
self.user_ctxt = context.RequestContext(
fake.USER_ID, fake.PROJECT_ID, auth_token=True)
self.qos_id1 = self._create_qos_specs("Qos_test_1")
self.qos_id2 = self._create_qos_specs("Qos_test_2")
self.qos_id3 = self._create_qos_specs("Qos_test_3")
self.qos_id4 = self._create_qos_specs("Qos_test_4")
@mock.patch('cinder.volume.qos_specs.get_all_specs',
side_effect=return_qos_specs_get_all)
def test_index(self, mock_get_all_specs):
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs' % fake.PROJECT_ID)
res = self.controller.index(req)
self.assertEqual(3, len(res['qos_specs']))
names = set()
for item in res['qos_specs']:
self.assertEqual('value1', item['specs']['key1'])
names.add(item['name'])
expected_names = ['qos_specs_%s' % fake.QOS_SPEC_ID,
'qos_specs_%s' % fake.QOS_SPEC2_ID,
'qos_specs_%s' % fake.QOS_SPEC3_ID]
self.assertEqual(set(expected_names), names)
def test_index_with_limit(self):
url = '/v2/%s/qos-specs?limit=2' % fake.PROJECT_ID
req = fakes.HTTPRequest.blank(url, use_admin_context=True)
res = self.controller.index(req)
self.assertEqual(2, len(res['qos_specs']))
self.assertEqual(self.qos_id4, res['qos_specs'][0]['id'])
self.assertEqual(self.qos_id3, res['qos_specs'][1]['id'])
expect_next_link = ('http://localhost/v2/%s/qos-specs?limit'
'=2&marker=%s') % (
fake.PROJECT_ID, res['qos_specs'][1]['id'])
self.assertEqual(expect_next_link, res['qos_specs_links'][0]['href'])
def test_index_with_offset(self):
url = '/v2/%s/qos-specs?offset=1' % fake.PROJECT_ID
req = fakes.HTTPRequest.blank(url, use_admin_context=True)
res = self.controller.index(req)
self.assertEqual(3, len(res['qos_specs']))
def test_index_with_offset_out_of_range(self):
url = '/v2/%s/qos-specs?offset=356576877698707' % fake.PROJECT_ID
req = fakes.HTTPRequest.blank(url, use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index,
req)
def test_index_with_limit_and_offset(self):
url = '/v2/%s/qos-specs?limit=2&offset=1' % fake.PROJECT_ID
req = fakes.HTTPRequest.blank(url, use_admin_context=True)
res = self.controller.index(req)
self.assertEqual(2, len(res['qos_specs']))
self.assertEqual(self.qos_id3, res['qos_specs'][0]['id'])
self.assertEqual(self.qos_id2, res['qos_specs'][1]['id'])
def test_index_with_marker(self):
url = '/v2/%s/qos-specs?marker=%s' % (fake.PROJECT_ID, self.qos_id4)
req = fakes.HTTPRequest.blank(url, use_admin_context=True)
res = self.controller.index(req)
self.assertEqual(3, len(res['qos_specs']))
def test_index_with_filter(self):
url = '/v2/%s/qos-specs?id=%s' % (fake.PROJECT_ID, self.qos_id4)
req = fakes.HTTPRequest.blank(url, use_admin_context=True)
res = self.controller.index(req)
self.assertEqual(1, len(res['qos_specs']))
self.assertEqual(self.qos_id4, res['qos_specs'][0]['id'])
def test_index_with_sort_keys(self):
url = '/v2/%s/qos-specs?sort=id' % fake.PROJECT_ID
req = fakes.HTTPRequest.blank(url, use_admin_context=True)
res = self.controller.index(req)
self.assertEqual(4, len(res['qos_specs']))
expect_result = [self.qos_id1, self.qos_id2,
self.qos_id3, self.qos_id4]
expect_result.sort(reverse=True)
self.assertEqual(expect_result[0], res['qos_specs'][0]['id'])
self.assertEqual(expect_result[1], res['qos_specs'][1]['id'])
self.assertEqual(expect_result[2], res['qos_specs'][2]['id'])
self.assertEqual(expect_result[3], res['qos_specs'][3]['id'])
def test_index_with_sort_keys_and_sort_dirs(self):
url = '/v2/%s/qos-specs?sort=id:asc' % fake.PROJECT_ID
req = fakes.HTTPRequest.blank(url, use_admin_context=True)
res = self.controller.index(req)
self.assertEqual(4, len(res['qos_specs']))
expect_result = [self.qos_id1, self.qos_id2,
self.qos_id3, self.qos_id4]
expect_result.sort()
self.assertEqual(expect_result[0], res['qos_specs'][0]['id'])
self.assertEqual(expect_result[1], res['qos_specs'][1]['id'])
self.assertEqual(expect_result[2], res['qos_specs'][2]['id'])
self.assertEqual(expect_result[3], res['qos_specs'][3]['id'])
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.delete',
side_effect=return_qos_specs_delete)
def test_qos_specs_delete(self, mock_qos_delete, mock_qos_get_specs):
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs/%s' % (
fake.PROJECT_ID, fake.QOS_SPEC_ID))
notifier = fake_notifier.get_fake_notifier()
with mock.patch('cinder.rpc.get_notifier', return_value=notifier):
self.controller.delete(req, fake.QOS_SPEC_ID)
self.assertEqual(1, notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.delete',
side_effect=return_qos_specs_delete)
def test_qos_specs_delete_not_found(self, mock_qos_delete,
mock_qos_get_specs):
notifier = fake_notifier.get_fake_notifier()
with mock.patch('cinder.rpc.get_notifier', return_value=notifier):
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs/%s' %
(fake.PROJECT_ID,
fake.WILL_NOT_BE_FOUND_ID))
self.assertRaises(exception.QoSSpecsNotFound,
self.controller.delete, req,
fake.WILL_NOT_BE_FOUND_ID)
self.assertEqual(1, notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.delete',
side_effect=return_qos_specs_delete)
def test_qos_specs_delete_inuse(self, mock_qos_delete,
mock_qos_get_specs):
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs/%s' % (
fake.PROJECT_ID, fake.IN_USE_ID))
notifier = fake_notifier.get_fake_notifier()
with mock.patch('cinder.rpc.get_notifier', return_value=notifier):
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.delete,
req, fake.IN_USE_ID)
self.assertEqual(1, notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.delete',
side_effect=return_qos_specs_delete)
def test_qos_specs_delete_inuse_force(self, mock_qos_delete,
mock_qos_get_specs):
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs/%s?force=True' %
(fake.PROJECT_ID, fake.IN_USE_ID))
notifier = fake_notifier.get_fake_notifier()
with mock.patch('cinder.rpc.get_notifier', return_value=notifier):
self.assertRaises(webob.exc.HTTPInternalServerError,
self.controller.delete,
req, fake.IN_USE_ID)
self.assertEqual(1, notifier.get_notification_count())
def test_qos_specs_delete_with_invalid_force(self):
invalid_force = "invalid_bool"
req = fakes.HTTPRequest.blank(
'/v2/%s/qos-specs/%s/delete_keys?force=%s' %
(fake.PROJECT_ID, fake.QOS_SPEC_ID, invalid_force))
self.assertRaises(exception.InvalidParameterValue,
self.controller.delete,
req, fake.QOS_SPEC_ID)
@mock.patch('cinder.volume.qos_specs.delete_keys',
side_effect=return_qos_specs_delete_keys)
def test_qos_specs_delete_keys(self, mock_qos_delete_keys):
body = {"keys": ['bar', 'zoo']}
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs/%s/delete_keys' %
(fake.PROJECT_ID, fake.IN_USE_ID))
notifier = fake_notifier.get_fake_notifier()
with mock.patch('cinder.rpc.get_notifier', return_value=notifier):
self.controller.delete_keys(req, fake.IN_USE_ID, body)
self.assertEqual(1, notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.delete_keys',
side_effect=return_qos_specs_delete_keys)
def test_qos_specs_delete_keys_qos_notfound(self, mock_qos_specs_delete):
body = {"keys": ['bar', 'zoo']}
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs/%s/delete_keys' %
(fake.PROJECT_ID,
fake.WILL_NOT_BE_FOUND_ID))
notifier = fake_notifier.get_fake_notifier()
with mock.patch('cinder.rpc.get_notifier', return_value=notifier):
self.assertRaises(exception.QoSSpecsNotFound,
self.controller.delete_keys,
req, fake.WILL_NOT_BE_FOUND_ID, body)
self.assertEqual(1, notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.delete_keys',
side_effect=return_qos_specs_delete_keys)
def test_qos_specs_delete_keys_badkey(self, mock_qos_specs_delete):
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs/%s/delete_keys' %
(fake.PROJECT_ID, fake.IN_USE_ID))
body = {"keys": ['foo', 'zoo']}
notifier = fake_notifier.get_fake_notifier()
with mock.patch('cinder.rpc.get_notifier', return_value=notifier):
self.assertRaises(exception.QoSSpecsKeyNotFound,
self.controller.delete_keys,
req, fake.IN_USE_ID, body)
self.assertEqual(1, notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.delete_keys',
side_effect=return_qos_specs_delete_keys)
def test_qos_specs_delete_keys_get_notifier(self, mock_qos_delete_keys):
body = {"keys": ['bar', 'zoo']}
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs/%s/delete_keys' %
(fake.PROJECT_ID, fake.IN_USE_ID))
notifier = fake_notifier.get_fake_notifier()
with mock.patch('cinder.rpc.get_notifier', return_value=notifier,
autospec=True) as mock_get_notifier:
self.controller.delete_keys(req, fake.IN_USE_ID, body)
mock_get_notifier.assert_called_once_with('QoSSpecs')
@mock.patch('cinder.volume.qos_specs.create',
side_effect=return_qos_specs_create)
@mock.patch('cinder.utils.validate_dictionary_string_length')
def test_create(self, mock_validate, mock_qos_spec_create):
body = {"qos_specs": {"name": "qos_specs_%s" % fake.QOS_SPEC_ID,
"key1": "value1"}}
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs' % fake.PROJECT_ID)
notifier = fake_notifier.get_fake_notifier()
with mock.patch('cinder.rpc.get_notifier', return_value=notifier):
res_dict = self.controller.create(req, body)
self.assertEqual(1, notifier.get_notification_count())
self.assertEqual('qos_specs_%s' % fake.QOS_SPEC_ID,
res_dict['qos_specs']['name'])
self.assertTrue(mock_validate.called)
@mock.patch('cinder.volume.qos_specs.create',
side_effect=return_qos_specs_create)
def test_create_invalid_input(self, mock_qos_get_specs):
body = {"qos_specs": {"name": 'qos_spec_%s' % fake.INVALID_ID,
"consumer": "invalid_consumer"}}
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs' % fake.PROJECT_ID)
notifier = fake_notifier.get_fake_notifier()
with mock.patch('cinder.rpc.get_notifier', return_value=notifier):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
self.assertEqual(1, notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.create',
side_effect=return_qos_specs_create)
def test_create_conflict(self, mock_qos_spec_create):
body = {"qos_specs": {"name": 'qos_spec_%s' % fake.ALREADY_EXISTS_ID,
"key1": "value1"}}
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs' % fake.PROJECT_ID)
notifier = fake_notifier.get_fake_notifier()
with mock.patch('cinder.rpc.get_notifier', return_value=notifier):
self.assertRaises(webob.exc.HTTPConflict,
self.controller.create, req, body)
self.assertEqual(1, notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.create',
side_effect=return_qos_specs_create)
def test_create_failed(self, mock_qos_spec_create):
body = {"qos_specs": {"name": 'qos_spec_%s' % fake.ACTION_FAILED_ID,
"key1": "value1"}}
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs' % fake.PROJECT_ID)
notifier = fake_notifier.get_fake_notifier()
with mock.patch('cinder.rpc.get_notifier', return_value=notifier):
self.assertRaises(webob.exc.HTTPInternalServerError,
self.controller.create, req, body)
self.assertEqual(1, notifier.get_notification_count())
@ddt.data({'foo': {'a': 'b'}},
{'qos_specs': {'a': 'b'}},
{'qos_specs': 'string'},
None)
def test_create_invalid_body_bad_request(self, body):
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs' % fake.PROJECT_ID,
use_admin_context=True)
req.method = 'POST'
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
@ddt.data({'name': 'fake_name', 'a' * 256: 'a'},
{'name': 'fake_name', 'a': 'a' * 256},
{'name': 'fake_name', '': 'a'})
def test_create_qos_with_invalid_specs(self, value):
body = {'qos_specs': value}
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs' % fake.PROJECT_ID,
use_admin_context=True)
req.method = 'POST'
self.assertRaises(exception.InvalidInput,
self.controller.create, req, body)
@ddt.data({'name': None},
{'name': 'n' * 256},
{'name': ''},
{'name': ' '})
def test_create_qos_with_invalid_spec_name(self, value):
body = {'qos_specs': value}
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs' % fake.PROJECT_ID,
use_admin_context=True)
req.method = 'POST'
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
@mock.patch('cinder.volume.qos_specs.update',
side_effect=return_qos_specs_update)
def test_update(self, mock_qos_update):
notifier = fake_notifier.get_fake_notifier()
with mock.patch('cinder.rpc.get_notifier', return_value=notifier):
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs/%s' %
(fake.PROJECT_ID, fake.QOS_SPEC_ID))
body = {'qos_specs': {'key1': 'value1',
'key2': 'value2'}}
res = self.controller.update(req, fake.QOS_SPEC_ID, body)
self.assertDictEqual(body, res)
self.assertEqual(1, notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.update',
side_effect=return_qos_specs_update)
def test_update_not_found(self, mock_qos_update):
notifier = fake_notifier.get_fake_notifier()
with mock.patch('cinder.rpc.get_notifier', return_value=notifier):
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs/%s' %
(fake.PROJECT_ID,
fake.WILL_NOT_BE_FOUND_ID))
body = {'qos_specs': {'key1': 'value1',
'key2': 'value2'}}
self.assertRaises(exception.QoSSpecsNotFound,
self.controller.update,
req, fake.WILL_NOT_BE_FOUND_ID, body)
self.assertEqual(1, notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.update',
side_effect=return_qos_specs_update)
def test_update_invalid_input(self, mock_qos_update):
notifier = fake_notifier.get_fake_notifier()
with mock.patch('cinder.rpc.get_notifier', return_value=notifier):
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs/%s' %
(fake.PROJECT_ID, fake.INVALID_ID))
body = {'qos_specs': {'key1': 'value1',
'key2': 'value2'}}
self.assertRaises(exception.InvalidQoSSpecs,
self.controller.update,
req, fake.INVALID_ID, body)
self.assertEqual(1, notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.update',
side_effect=return_qos_specs_update)
def test_update_failed(self, mock_qos_update):
notifier = fake_notifier.get_fake_notifier()
with mock.patch('cinder.rpc.get_notifier', return_value=notifier):
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs/%s' %
(fake.PROJECT_ID,
fake.UPDATE_FAILED_ID))
body = {'qos_specs': {'key1': 'value1',
'key2': 'value2'}}
self.assertRaises(webob.exc.HTTPInternalServerError,
self.controller.update,
req, fake.UPDATE_FAILED_ID, body)
self.assertEqual(1, notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
def test_show(self, mock_get_qos_specs):
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs/%s' % (
fake.PROJECT_ID, fake.QOS_SPEC_ID))
res_dict = self.controller.show(req, fake.QOS_SPEC_ID)
self.assertEqual(fake.QOS_SPEC_ID, res_dict['qos_specs']['id'])
self.assertEqual('qos_specs_%s' % fake.QOS_SPEC_ID,
res_dict['qos_specs']['name'])
@mock.patch('cinder.volume.qos_specs.get_associations',
side_effect=return_get_qos_associations)
def test_get_associations(self, mock_get_assciations):
req = fakes.HTTPRequest.blank(
'/v2/%s/qos-specs/%s/associations' % (
fake.PROJECT_ID, fake.QOS_SPEC_ID))
res = self.controller.associations(req, fake.QOS_SPEC_ID)
self.assertEqual('FakeVolTypeName',
res['qos_associations'][0]['name'])
self.assertEqual(fake.VOLUME_TYPE_ID,
res['qos_associations'][0]['id'])
@mock.patch('cinder.volume.qos_specs.get_associations',
side_effect=return_get_qos_associations)
def test_get_associations_not_found(self, mock_get_assciations):
req = fakes.HTTPRequest.blank(
'/v2/%s/qos-specs/%s/associations' %
(fake.PROJECT_ID, fake.WILL_NOT_BE_FOUND_ID))
self.assertRaises(exception.QoSSpecsNotFound,
self.controller.associations,
req, fake.WILL_NOT_BE_FOUND_ID)
@mock.patch('cinder.volume.qos_specs.get_associations',
side_effect=return_get_qos_associations)
def test_get_associations_failed(self, mock_get_associations):
req = fakes.HTTPRequest.blank(
'/v2/%s/qos-specs/%s/associations' % (
fake.PROJECT_ID, fake.RAISE_ID))
self.assertRaises(webob.exc.HTTPInternalServerError,
self.controller.associations,
req, fake.RAISE_ID)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.associate_qos_with_type',
side_effect=return_associate_qos_specs)
def test_associate(self, mock_associate, mock_get_qos):
req = fakes.HTTPRequest.blank(
'/v2/%s/qos-specs/%s/associate?vol_type_id=%s' %
(fake.PROJECT_ID, fake.QOS_SPEC_ID, fake.VOLUME_TYPE_ID))
res = self.controller.associate(req, fake.QOS_SPEC_ID)
self.assertEqual(http_client.ACCEPTED, res.status_int)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.associate_qos_with_type',
side_effect=return_associate_qos_specs)
def test_associate_no_type(self, mock_associate, mock_get_qos):
req = fakes.HTTPRequest.blank('/v2/%s/qos-specs/%s/associate' %
(fake.PROJECT_ID, fake.QOS_SPEC_ID))
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.associate, req, fake.QOS_SPEC_ID)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.associate_qos_with_type',
side_effect=return_associate_qos_specs)
def test_associate_not_found(self, mock_associate, mock_get_qos):
req = fakes.HTTPRequest.blank(
'/v2/%s/qos-specs/%s/associate?vol_type_id=%s' % (
fake.PROJECT_ID, fake.WILL_NOT_BE_FOUND_ID,
fake.VOLUME_TYPE_ID))
self.assertRaises(exception.QoSSpecsNotFound,
self.controller.associate, req,
fake.WILL_NOT_BE_FOUND_ID)
req = fakes.HTTPRequest.blank(
'/v2/%s/qos-specs/%s/associate?vol_type_id=%s' %
(fake.PROJECT_ID, fake.QOS_SPEC_ID, fake.WILL_NOT_BE_FOUND_ID))
self.assertRaises(exception.VolumeTypeNotFound,
self.controller.associate, req, fake.QOS_SPEC_ID)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.associate_qos_with_type',
side_effect=return_associate_qos_specs)
def test_associate_fail(self, mock_associate, mock_get_qos):
req = fakes.HTTPRequest.blank(
'/v2/%s/qos-specs/%s/associate?vol_type_id=%s' %
(fake.PROJECT_ID, fake.ACTION_FAILED_ID, fake.VOLUME_TYPE_ID))
self.assertRaises(webob.exc.HTTPInternalServerError,
self.controller.associate, req,
fake.ACTION_FAILED_ID)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.disassociate_qos_specs',
side_effect=return_associate_qos_specs)
def test_disassociate(self, mock_disassociate, mock_get_qos):
req = fakes.HTTPRequest.blank(
'/v2/%s/qos-specs/%s/disassociate?vol_type_id=%s' % (
fake.PROJECT_ID, fake.QOS_SPEC_ID, fake.VOLUME_TYPE_ID))
res = self.controller.disassociate(req, fake.QOS_SPEC_ID)
self.assertEqual(http_client.ACCEPTED, res.status_int)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.disassociate_qos_specs',
side_effect=return_associate_qos_specs)
def test_disassociate_no_type(self, mock_disassociate, mock_get_qos):
req = fakes.HTTPRequest.blank(
'/v2/%s/qos-specs/%s/disassociate' % (
fake.PROJECT_ID, fake.QOS_SPEC_ID))
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.disassociate, req, fake.QOS_SPEC_ID)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.disassociate_qos_specs',
side_effect=return_associate_qos_specs)
def test_disassociate_not_found(self, mock_disassociate, mock_get_qos):
req = fakes.HTTPRequest.blank(
'/v2/%s/qos-specs/%s/disassociate?vol_type_id=%s' % (
fake.PROJECT_ID, fake.WILL_NOT_BE_FOUND_ID,
fake.VOLUME_TYPE_ID))
self.assertRaises(exception.QoSSpecsNotFound,
self.controller.disassociate, req,
fake.WILL_NOT_BE_FOUND_ID)
req = fakes.HTTPRequest.blank(
'/v2/%s/qos-specs/%s/disassociate?vol_type_id=%s' %
(fake.PROJECT_ID, fake.VOLUME_TYPE_ID, fake.WILL_NOT_BE_FOUND_ID))
self.assertRaises(exception.VolumeTypeNotFound,
self.controller.disassociate, req,
fake.VOLUME_TYPE_ID)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.disassociate_qos_specs',
side_effect=return_associate_qos_specs)
def test_disassociate_failed(self, mock_disassociate, mock_get_qos):
req = fakes.HTTPRequest.blank(
'/v2/%s/qos-specs/%s/disassociate?vol_type_id=%s' % (
fake.PROJECT_ID, fake.ACTION2_FAILED_ID, fake.VOLUME_TYPE_ID))
self.assertRaises(webob.exc.HTTPInternalServerError,
self.controller.disassociate, req,
fake.ACTION2_FAILED_ID)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.disassociate_all',
side_effect=return_disassociate_all)
def test_disassociate_all(self, mock_disassociate, mock_get_qos):
req = fakes.HTTPRequest.blank(
'/v2/%s/qos-specs/%s/disassociate_all' % (
fake.PROJECT_ID, fake.QOS_SPEC_ID))
res = self.controller.disassociate_all(req, fake.QOS_SPEC_ID)
self.assertEqual(http_client.ACCEPTED, res.status_int)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.disassociate_all',
side_effect=return_disassociate_all)
def test_disassociate_all_not_found(self, mock_disassociate, mock_get):
req = fakes.HTTPRequest.blank(
'/v2/%s/qos-specs/%s/disassociate_all' % (
fake.PROJECT_ID, fake.WILL_NOT_BE_FOUND_ID))
self.assertRaises(exception.QoSSpecsNotFound,
self.controller.disassociate_all, req,
fake.WILL_NOT_BE_FOUND_ID)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.disassociate_all',
side_effect=return_disassociate_all)
def test_disassociate_all_failed(self, mock_disassociate, mock_get):
req = fakes.HTTPRequest.blank(
'/v2/%s/qos-specs/%s/disassociate_all' % (
fake.PROJECT_ID, fake.ACTION2_FAILED_ID))
self.assertRaises(webob.exc.HTTPInternalServerError,
self.controller.disassociate_all, req,
fake.ACTION2_FAILED_ID)
| 46.900963
| 79
| 0.622723
| 4,164
| 34,097
| 4.791066
| 0.065562
| 0.101454
| 0.04812
| 0.049474
| 0.831128
| 0.794135
| 0.76802
| 0.737895
| 0.70807
| 0.696241
| 0
| 0.008263
| 0.265331
| 34,097
| 726
| 80
| 46.965565
| 0.788144
| 0.019298
| 0
| 0.59204
| 0
| 0
| 0.139352
| 0.095724
| 0
| 0
| 0
| 0
| 0.129353
| 1
| 0.094527
| false
| 0.004975
| 0.021559
| 0.003317
| 0.129353
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0a5f301c1cf4a7a4ab22403ddc020d23f8909148
| 3,801
|
py
|
Python
|
test-framework/test-suites/integration/tests/list/test_list_host_firmware_mapping.py
|
anooprajendra/stacki
|
5e3f51c928ff5367a7441f07bf28f0121e7abdff
|
[
"BSD-3-Clause"
] | 123
|
2015-05-12T23:36:45.000Z
|
2017-07-05T23:26:57.000Z
|
test-framework/test-suites/integration/tests/list/test_list_host_firmware_mapping.py
|
anooprajendra/stacki
|
5e3f51c928ff5367a7441f07bf28f0121e7abdff
|
[
"BSD-3-Clause"
] | 177
|
2015-06-05T19:17:47.000Z
|
2017-07-07T17:57:24.000Z
|
test-framework/test-suites/integration/tests/list/test_list_host_firmware_mapping.py
|
anooprajendra/stacki
|
5e3f51c928ff5367a7441f07bf28f0121e7abdff
|
[
"BSD-3-Clause"
] | 32
|
2015-06-07T02:25:03.000Z
|
2017-06-23T07:35:35.000Z
|
import json
import pytest
@pytest.mark.parametrize(
"hosts, expected_results",
(
(
"",
[
{"host": "backend-0-0", "version": "1.2.3", "make": "mellanox", "model": "m7800"},
{"host": "backend-0-1", "version": "1.2.3.4", "make": "dell", "model": "x1052-software"},
],
),
("backend-0-0", [{"host": "backend-0-0", "version": "1.2.3", "make": "mellanox", "model": "m7800"}]),
("backend-0-1", [{"host": "backend-0-1", "version": "1.2.3.4", "make": "dell", "model": "x1052-software"}]),
),
)
def test_list_host_firmware_mapping_host_filter(
host,
add_host_with_net,
fake_local_firmware_file,
revert_firmware,
hosts,
expected_results,
):
"""Test that list host firmware mapping filters correctly based on provided arguments."""
# Add a backend-0-1
add_host_with_net(
hostname = "backend-0-1",
rack = 0,
rank = 1,
appliance = "backend",
interface = "eth0",
ip = "192.168.1.1",
network = "fake_net",
address = "192.168.1.0",
pxe = True,
)
# Add a piece of mellanox firmware to backend-0-0.
result = host.run(f"stack add firmware 1.2.3 make=mellanox model=m7800 source={fake_local_firmware_file} hosts=backend-0-0")
assert result.rc == 0
# Add a piece of dell firmware to backend-0-1
result = host.run(f"stack add firmware 1.2.3.4 make=dell model=x1052-software source={fake_local_firmware_file} hosts=backend-0-1")
assert result.rc == 0
# List the firmware mappings
result = host.run(f"stack list host firmware mapping {hosts} output-format=json")
assert result.rc == 0
assert expected_results == json.loads(result.stdout)
@pytest.mark.parametrize(
"make, model, versions, expected_results",
(
(
"",
"",
"",
[
{"host": "backend-0-0", "version": "1.2.3", "make": "mellanox", "model": "m7800"},
{"host": "backend-0-1", "version": "1.2.3.4", "make": "dell", "model": "x1052-software"},
],
),
("mellanox", "", "", [{"host": "backend-0-0", "version": "1.2.3", "make": "mellanox", "model": "m7800"}]),
("mellanox", "m7800", "", [{"host": "backend-0-0", "version": "1.2.3", "make": "mellanox", "model": "m7800"}]),
("mellanox", "m7800", "1.2.3", [{"host": "backend-0-0", "version": "1.2.3", "make": "mellanox", "model": "m7800"}]),
("dell", "", "", [{"host": "backend-0-1", "version": "1.2.3.4", "make": "dell", "model": "x1052-software"}]),
("dell", "x1052-software", "", [{"host": "backend-0-1", "version": "1.2.3.4", "make": "dell", "model": "x1052-software"}]),
("dell", "x1052-software", "1.2.3.4", [{"host": "backend-0-1", "version": "1.2.3.4", "make": "dell", "model": "x1052-software"}]),
),
)
def test_list_host_firmware_mapping_non_host_filter(
host,
add_host_with_net,
fake_local_firmware_file,
revert_firmware,
make,
model,
versions,
expected_results,
):
"""Test that list host firmware mapping filters correctly based on provided arguments."""
# Add a backend-0-1
add_host_with_net(
hostname = "backend-0-1",
rack = 0,
rank = 1,
appliance = "backend",
interface = "eth0",
ip = "192.168.1.1",
network = "fake_net",
address = "192.168.1.0",
pxe = True,
)
# Add a piece of mellanox firmware to backend-0-0.
result = host.run(f"stack add firmware 1.2.3 make=mellanox model=m7800 source={fake_local_firmware_file} hosts=backend-0-0")
assert result.rc == 0
# Add a piece of dell firmware to backend-0-1
result = host.run(f"stack add firmware 1.2.3.4 make=dell model=x1052-software source={fake_local_firmware_file} hosts=backend-0-1")
assert result.rc == 0
# List the firmware mappings
result = host.run(
f"stack list host firmware mapping {f'make={make}' if make else ''} {f'model={model}' if model else ''} "
f"{f'versions={versions}' if versions else ''} output-format=json"
)
assert result.rc == 0
assert expected_results == json.loads(result.stdout)
| 35.194444
| 132
| 0.639042
| 568
| 3,801
| 4.183099
| 0.135563
| 0.087542
| 0.022727
| 0.050505
| 0.919192
| 0.904882
| 0.904882
| 0.904882
| 0.904882
| 0.904882
| 0
| 0.073879
| 0.148908
| 3,801
| 107
| 133
| 35.523364
| 0.660587
| 0.116811
| 0
| 0.645161
| 0
| 0.053763
| 0.464211
| 0.046421
| 0
| 0
| 0
| 0
| 0.086022
| 1
| 0.021505
| false
| 0
| 0.021505
| 0
| 0.043011
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0a6fa82e8ed05dcf794259b504bf39d910129947
| 158
|
py
|
Python
|
testcontainers/google/__init__.py
|
singerjess/testcontainers-python
|
24eafa31c785a29877cbf874019adc9fb0e7b02d
|
[
"Apache-2.0"
] | 465
|
2018-10-09T13:09:40.000Z
|
2022-03-31T15:33:23.000Z
|
testcontainers/google/__init__.py
|
singerjess/testcontainers-python
|
24eafa31c785a29877cbf874019adc9fb0e7b02d
|
[
"Apache-2.0"
] | 142
|
2018-10-23T14:36:48.000Z
|
2022-03-31T17:00:51.000Z
|
testcontainers/google/__init__.py
|
singerjess/testcontainers-python
|
24eafa31c785a29877cbf874019adc9fb0e7b02d
|
[
"Apache-2.0"
] | 119
|
2018-11-16T21:13:05.000Z
|
2022-03-31T14:12:39.000Z
|
"""
Google Cloud Emulators
======================
Allows to spin up google cloud emulators, such as PubSub.
"""
from .pubsub import PubSubContainer # noqa
| 17.555556
| 57
| 0.639241
| 18
| 158
| 5.611111
| 0.777778
| 0.217822
| 0.39604
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.158228
| 158
| 8
| 58
| 19.75
| 0.759399
| 0.696203
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
6a51a3c53ab512c3347b2873b0066a5e7a74d674
| 158
|
py
|
Python
|
configs/lxmert/lxmert_pretrain.py
|
inspur-hsslab/iMIX
|
99898de97ef8b45462ca1d6bf2542e423a73d769
|
[
"Apache-2.0"
] | 23
|
2021-06-26T08:45:19.000Z
|
2022-03-02T02:13:33.000Z
|
configs/lxmert/lxmert_pretrain.py
|
XChuanLee/iMIX
|
99898de97ef8b45462ca1d6bf2542e423a73d769
|
[
"Apache-2.0"
] | null | null | null |
configs/lxmert/lxmert_pretrain.py
|
XChuanLee/iMIX
|
99898de97ef8b45462ca1d6bf2542e423a73d769
|
[
"Apache-2.0"
] | 9
|
2021-06-10T02:36:20.000Z
|
2021-11-09T02:18:16.000Z
|
_base_ = [
'../_base_/models/lxmert/lxmert_pretrain_config.py',
'../_base_/datasets/lxmert/lxmert_pretrain.py',
'../_base_/default_runtime.py',
]
| 26.333333
| 56
| 0.683544
| 18
| 158
| 5.333333
| 0.5
| 0.25
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120253
| 158
| 5
| 57
| 31.6
| 0.690647
| 0
| 0
| 0
| 0
| 0
| 0.765823
| 0.765823
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6a9e4640b30d1ea9e1e970937fa940851128eee9
| 35
|
py
|
Python
|
pathfinder/terminal/phybeast/utils/extract_rate/__init__.py
|
pf-core/pf-core
|
0caf8abde968b959be2284518f7dc951ba680202
|
[
"MIT"
] | null | null | null |
pathfinder/terminal/phybeast/utils/extract_rate/__init__.py
|
pf-core/pf-core
|
0caf8abde968b959be2284518f7dc951ba680202
|
[
"MIT"
] | null | null | null |
pathfinder/terminal/phybeast/utils/extract_rate/__init__.py
|
pf-core/pf-core
|
0caf8abde968b959be2284518f7dc951ba680202
|
[
"MIT"
] | null | null | null |
from .commands import extract_rate
| 17.5
| 34
| 0.857143
| 5
| 35
| 5.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 35
| 1
| 35
| 35
| 0.935484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0ac2ba7ab3515acd4609d17e088cd120fdd0e6f0
| 202
|
py
|
Python
|
sanic_openapi/swagger.py
|
artcg/sanic-openapi
|
ad6064427ca7310dc17d52729b516184d65ab1e3
|
[
"MIT"
] | null | null | null |
sanic_openapi/swagger.py
|
artcg/sanic-openapi
|
ad6064427ca7310dc17d52729b516184d65ab1e3
|
[
"MIT"
] | 1
|
2021-03-16T06:45:56.000Z
|
2021-03-16T06:45:56.000Z
|
sanic_openapi/swagger.py
|
artcg/sanic-openapi
|
ad6064427ca7310dc17d52729b516184d65ab1e3
|
[
"MIT"
] | null | null | null |
from .oas3.blueprint import blueprint_factory as oas3_factory
from .swagger2.blueprint import blueprint_factory as swagger_factory
swagger_blueprint = swagger_factory()
oas3_blueprint = oas3_factory()
| 33.666667
| 68
| 0.856436
| 26
| 202
| 6.346154
| 0.307692
| 0.157576
| 0.290909
| 0.375758
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027322
| 0.094059
| 202
| 5
| 69
| 40.4
| 0.874317
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 1
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
0ac414cdd888f20f89112783a1a04f36d217480a
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/parso/cache.py
|
Retraces/UkraineBot
|
3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/parso/cache.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/parso/cache.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/f5/c1/0f/e7b8b80a368c9841621dc7d1939541c14648fb37079b8f125b2fcda6ba
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.427083
| 0
| 96
| 1
| 96
| 96
| 0.46875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0afc6d83fc7801ed7075cc2cca70b88791ed9ebd
| 107,025
|
py
|
Python
|
google/cloud/managedidentities/v1beta1/managedidentities-v1beta1-py/tests/unit/gapic/managedidentities_v1beta1/test_managed_identities_service.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 7
|
2021-02-21T10:39:41.000Z
|
2021-12-07T07:31:28.000Z
|
google/cloud/managedidentities/v1beta1/managedidentities-v1beta1-py/tests/unit/gapic/managedidentities_v1beta1/test_managed_identities_service.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 6
|
2021-02-02T23:46:11.000Z
|
2021-11-15T01:46:02.000Z
|
google/cloud/managedidentities/v1beta1/managedidentities-v1beta1-py/tests/unit/gapic/managedidentities_v1beta1/test_managed_identities_service.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 4
|
2021-01-28T23:25:45.000Z
|
2021-08-30T01:55:16.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import packaging.version
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import future
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import operation_async # type: ignore
from google.api_core import operations_v1
from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.managedidentities_v1beta1.services.managed_identities_service import ManagedIdentitiesServiceAsyncClient
from google.cloud.managedidentities_v1beta1.services.managed_identities_service import ManagedIdentitiesServiceClient
from google.cloud.managedidentities_v1beta1.services.managed_identities_service import pagers
from google.cloud.managedidentities_v1beta1.services.managed_identities_service import transports
from google.cloud.managedidentities_v1beta1.services.managed_identities_service.transports.base import _GOOGLE_AUTH_VERSION
from google.cloud.managedidentities_v1beta1.types import managed_identities_service
from google.cloud.managedidentities_v1beta1.types import resource
from google.longrunning import operations_pb2
from google.oauth2 import service_account
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
import google.auth
# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively
# through google-api-core:
# - Delete the auth "less than" test cases
# - Delete these pytest markers (Make the "greater than or equal to" tests the default).
requires_google_auth_lt_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"),
reason="This test requires google-auth < 1.25.0",
)
requires_google_auth_gte_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"),
reason="This test requires google-auth >= 1.25.0",
)
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert ManagedIdentitiesServiceClient._get_default_mtls_endpoint(None) is None
assert ManagedIdentitiesServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
assert ManagedIdentitiesServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint
assert ManagedIdentitiesServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint
assert ManagedIdentitiesServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint
assert ManagedIdentitiesServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
@pytest.mark.parametrize("client_class", [
ManagedIdentitiesServiceClient,
ManagedIdentitiesServiceAsyncClient,
])
def test_managed_identities_service_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == 'managedidentities.googleapis.com:443'
@pytest.mark.parametrize("transport_class,transport_name", [
(transports.ManagedIdentitiesServiceGrpcTransport, "grpc"),
(transports.ManagedIdentitiesServiceGrpcAsyncIOTransport, "grpc_asyncio"),
])
def test_managed_identities_service_client_service_account_always_use_jwt(transport_class, transport_name):
with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize("client_class", [
ManagedIdentitiesServiceClient,
ManagedIdentitiesServiceAsyncClient,
])
def test_managed_identities_service_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == 'managedidentities.googleapis.com:443'
def test_managed_identities_service_client_get_transport_class():
transport = ManagedIdentitiesServiceClient.get_transport_class()
available_transports = [
transports.ManagedIdentitiesServiceGrpcTransport,
]
assert transport in available_transports
transport = ManagedIdentitiesServiceClient.get_transport_class("grpc")
assert transport == transports.ManagedIdentitiesServiceGrpcTransport
@pytest.mark.parametrize("client_class,transport_class,transport_name", [
(ManagedIdentitiesServiceClient, transports.ManagedIdentitiesServiceGrpcTransport, "grpc"),
(ManagedIdentitiesServiceAsyncClient, transports.ManagedIdentitiesServiceGrpcAsyncIOTransport, "grpc_asyncio"),
])
@mock.patch.object(ManagedIdentitiesServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ManagedIdentitiesServiceClient))
@mock.patch.object(ManagedIdentitiesServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ManagedIdentitiesServiceAsyncClient))
def test_managed_identities_service_client_client_options(client_class, transport_class, transport_name):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(ManagedIdentitiesServiceClient, 'get_transport_class') as gtc:
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials()
)
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(ManagedIdentitiesServiceClient, 'get_transport_class') as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class()
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
with pytest.raises(ValueError):
client = client_class()
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [
(ManagedIdentitiesServiceClient, transports.ManagedIdentitiesServiceGrpcTransport, "grpc", "true"),
(ManagedIdentitiesServiceAsyncClient, transports.ManagedIdentitiesServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"),
(ManagedIdentitiesServiceClient, transports.ManagedIdentitiesServiceGrpcTransport, "grpc", "false"),
(ManagedIdentitiesServiceAsyncClient, transports.ManagedIdentitiesServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"),
])
@mock.patch.object(ManagedIdentitiesServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ManagedIdentitiesServiceClient))
@mock.patch.object(ManagedIdentitiesServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ManagedIdentitiesServiceAsyncClient))
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_managed_identities_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
options = client_options.ClientOptions(client_cert_source=client_cert_source_callback)
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
with mock.patch.object(transport_class, '__init__') as patched:
with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
with mock.patch.object(transport_class, '__init__') as patched:
with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False):
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize("client_class,transport_class,transport_name", [
(ManagedIdentitiesServiceClient, transports.ManagedIdentitiesServiceGrpcTransport, "grpc"),
(ManagedIdentitiesServiceAsyncClient, transports.ManagedIdentitiesServiceGrpcAsyncIOTransport, "grpc_asyncio"),
])
def test_managed_identities_service_client_client_options_scopes(client_class, transport_class, transport_name):
# Check the case scopes are provided.
options = client_options.ClientOptions(
scopes=["1", "2"],
)
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize("client_class,transport_class,transport_name", [
(ManagedIdentitiesServiceClient, transports.ManagedIdentitiesServiceGrpcTransport, "grpc"),
(ManagedIdentitiesServiceAsyncClient, transports.ManagedIdentitiesServiceGrpcAsyncIOTransport, "grpc_asyncio"),
])
def test_managed_identities_service_client_client_options_credentials_file(client_class, transport_class, transport_name):
# Check the case credentials file is provided.
options = client_options.ClientOptions(
credentials_file="credentials.json"
)
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_managed_identities_service_client_client_options_from_dict():
with mock.patch('google.cloud.managedidentities_v1beta1.services.managed_identities_service.transports.ManagedIdentitiesServiceGrpcTransport.__init__') as grpc_transport:
grpc_transport.return_value = None
client = ManagedIdentitiesServiceClient(
client_options={'api_endpoint': 'squid.clam.whelk'}
)
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_create_microsoft_ad_domain(transport: str = 'grpc', request_type=managed_identities_service.CreateMicrosoftAdDomainRequest):
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_microsoft_ad_domain),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.create_microsoft_ad_domain(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.CreateMicrosoftAdDomainRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_create_microsoft_ad_domain_from_dict():
test_create_microsoft_ad_domain(request_type=dict)
def test_create_microsoft_ad_domain_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_microsoft_ad_domain),
'__call__') as call:
client.create_microsoft_ad_domain()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.CreateMicrosoftAdDomainRequest()
@pytest.mark.asyncio
async def test_create_microsoft_ad_domain_async(transport: str = 'grpc_asyncio', request_type=managed_identities_service.CreateMicrosoftAdDomainRequest):
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_microsoft_ad_domain),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.create_microsoft_ad_domain(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.CreateMicrosoftAdDomainRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_create_microsoft_ad_domain_async_from_dict():
await test_create_microsoft_ad_domain_async(request_type=dict)
def test_create_microsoft_ad_domain_field_headers():
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = managed_identities_service.CreateMicrosoftAdDomainRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_microsoft_ad_domain),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.create_microsoft_ad_domain(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_create_microsoft_ad_domain_field_headers_async():
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = managed_identities_service.CreateMicrosoftAdDomainRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_microsoft_ad_domain),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.create_microsoft_ad_domain(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_reset_admin_password(transport: str = 'grpc', request_type=managed_identities_service.ResetAdminPasswordRequest):
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.reset_admin_password),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = managed_identities_service.ResetAdminPasswordResponse(
password='password_value',
)
response = client.reset_admin_password(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.ResetAdminPasswordRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, managed_identities_service.ResetAdminPasswordResponse)
assert response.password == 'password_value'
def test_reset_admin_password_from_dict():
test_reset_admin_password(request_type=dict)
def test_reset_admin_password_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.reset_admin_password),
'__call__') as call:
client.reset_admin_password()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.ResetAdminPasswordRequest()
@pytest.mark.asyncio
async def test_reset_admin_password_async(transport: str = 'grpc_asyncio', request_type=managed_identities_service.ResetAdminPasswordRequest):
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.reset_admin_password),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(managed_identities_service.ResetAdminPasswordResponse(
password='password_value',
))
response = await client.reset_admin_password(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.ResetAdminPasswordRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, managed_identities_service.ResetAdminPasswordResponse)
assert response.password == 'password_value'
@pytest.mark.asyncio
async def test_reset_admin_password_async_from_dict():
await test_reset_admin_password_async(request_type=dict)
def test_reset_admin_password_field_headers():
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = managed_identities_service.ResetAdminPasswordRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.reset_admin_password),
'__call__') as call:
call.return_value = managed_identities_service.ResetAdminPasswordResponse()
client.reset_admin_password(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_reset_admin_password_field_headers_async():
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = managed_identities_service.ResetAdminPasswordRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.reset_admin_password),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(managed_identities_service.ResetAdminPasswordResponse())
await client.reset_admin_password(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_list_domains(transport: str = 'grpc', request_type=managed_identities_service.ListDomainsRequest):
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_domains),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = managed_identities_service.ListDomainsResponse(
next_page_token='next_page_token_value',
unreachable=['unreachable_value'],
)
response = client.list_domains(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.ListDomainsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListDomainsPager)
assert response.next_page_token == 'next_page_token_value'
assert response.unreachable == ['unreachable_value']
def test_list_domains_from_dict():
test_list_domains(request_type=dict)
def test_list_domains_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_domains),
'__call__') as call:
client.list_domains()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.ListDomainsRequest()
@pytest.mark.asyncio
async def test_list_domains_async(transport: str = 'grpc_asyncio', request_type=managed_identities_service.ListDomainsRequest):
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_domains),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(managed_identities_service.ListDomainsResponse(
next_page_token='next_page_token_value',
unreachable=['unreachable_value'],
))
response = await client.list_domains(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.ListDomainsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListDomainsAsyncPager)
assert response.next_page_token == 'next_page_token_value'
assert response.unreachable == ['unreachable_value']
@pytest.mark.asyncio
async def test_list_domains_async_from_dict():
await test_list_domains_async(request_type=dict)
def test_list_domains_field_headers():
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = managed_identities_service.ListDomainsRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_domains),
'__call__') as call:
call.return_value = managed_identities_service.ListDomainsResponse()
client.list_domains(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_list_domains_field_headers_async():
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = managed_identities_service.ListDomainsRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_domains),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(managed_identities_service.ListDomainsResponse())
await client.list_domains(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_list_domains_pager():
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_domains),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
managed_identities_service.ListDomainsResponse(
domains=[
resource.Domain(),
resource.Domain(),
resource.Domain(),
],
next_page_token='abc',
),
managed_identities_service.ListDomainsResponse(
domains=[],
next_page_token='def',
),
managed_identities_service.ListDomainsResponse(
domains=[
resource.Domain(),
],
next_page_token='ghi',
),
managed_identities_service.ListDomainsResponse(
domains=[
resource.Domain(),
resource.Domain(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
('parent', ''),
)),
)
pager = client.list_domains(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, resource.Domain)
for i in results)
def test_list_domains_pages():
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_domains),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
managed_identities_service.ListDomainsResponse(
domains=[
resource.Domain(),
resource.Domain(),
resource.Domain(),
],
next_page_token='abc',
),
managed_identities_service.ListDomainsResponse(
domains=[],
next_page_token='def',
),
managed_identities_service.ListDomainsResponse(
domains=[
resource.Domain(),
],
next_page_token='ghi',
),
managed_identities_service.ListDomainsResponse(
domains=[
resource.Domain(),
resource.Domain(),
],
),
RuntimeError,
)
pages = list(client.list_domains(request={}).pages)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_domains_async_pager():
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_domains),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
managed_identities_service.ListDomainsResponse(
domains=[
resource.Domain(),
resource.Domain(),
resource.Domain(),
],
next_page_token='abc',
),
managed_identities_service.ListDomainsResponse(
domains=[],
next_page_token='def',
),
managed_identities_service.ListDomainsResponse(
domains=[
resource.Domain(),
],
next_page_token='ghi',
),
managed_identities_service.ListDomainsResponse(
domains=[
resource.Domain(),
resource.Domain(),
],
),
RuntimeError,
)
async_pager = await client.list_domains(request={},)
assert async_pager.next_page_token == 'abc'
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, resource.Domain)
for i in responses)
@pytest.mark.asyncio
async def test_list_domains_async_pages():
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_domains),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
managed_identities_service.ListDomainsResponse(
domains=[
resource.Domain(),
resource.Domain(),
resource.Domain(),
],
next_page_token='abc',
),
managed_identities_service.ListDomainsResponse(
domains=[],
next_page_token='def',
),
managed_identities_service.ListDomainsResponse(
domains=[
resource.Domain(),
],
next_page_token='ghi',
),
managed_identities_service.ListDomainsResponse(
domains=[
resource.Domain(),
resource.Domain(),
],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_domains(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
def test_get_domain(transport: str = 'grpc', request_type=managed_identities_service.GetDomainRequest):
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_domain),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = resource.Domain(
name='name_value',
authorized_networks=['authorized_networks_value'],
reserved_ip_range='reserved_ip_range_value',
locations=['locations_value'],
admin='admin_value',
fqdn='fqdn_value',
state=resource.Domain.State.CREATING,
status_message='status_message_value',
)
response = client.get_domain(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.GetDomainRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resource.Domain)
assert response.name == 'name_value'
assert response.authorized_networks == ['authorized_networks_value']
assert response.reserved_ip_range == 'reserved_ip_range_value'
assert response.locations == ['locations_value']
assert response.admin == 'admin_value'
assert response.fqdn == 'fqdn_value'
assert response.state == resource.Domain.State.CREATING
assert response.status_message == 'status_message_value'
def test_get_domain_from_dict():
test_get_domain(request_type=dict)
def test_get_domain_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_domain),
'__call__') as call:
client.get_domain()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.GetDomainRequest()
@pytest.mark.asyncio
async def test_get_domain_async(transport: str = 'grpc_asyncio', request_type=managed_identities_service.GetDomainRequest):
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_domain),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resource.Domain(
name='name_value',
authorized_networks=['authorized_networks_value'],
reserved_ip_range='reserved_ip_range_value',
locations=['locations_value'],
admin='admin_value',
fqdn='fqdn_value',
state=resource.Domain.State.CREATING,
status_message='status_message_value',
))
response = await client.get_domain(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.GetDomainRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resource.Domain)
assert response.name == 'name_value'
assert response.authorized_networks == ['authorized_networks_value']
assert response.reserved_ip_range == 'reserved_ip_range_value'
assert response.locations == ['locations_value']
assert response.admin == 'admin_value'
assert response.fqdn == 'fqdn_value'
assert response.state == resource.Domain.State.CREATING
assert response.status_message == 'status_message_value'
@pytest.mark.asyncio
async def test_get_domain_async_from_dict():
await test_get_domain_async(request_type=dict)
def test_get_domain_field_headers():
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = managed_identities_service.GetDomainRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_domain),
'__call__') as call:
call.return_value = resource.Domain()
client.get_domain(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_get_domain_field_headers_async():
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = managed_identities_service.GetDomainRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_domain),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resource.Domain())
await client.get_domain(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_update_domain(transport: str = 'grpc', request_type=managed_identities_service.UpdateDomainRequest):
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_domain),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.update_domain(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.UpdateDomainRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_update_domain_from_dict():
test_update_domain(request_type=dict)
def test_update_domain_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_domain),
'__call__') as call:
client.update_domain()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.UpdateDomainRequest()
@pytest.mark.asyncio
async def test_update_domain_async(transport: str = 'grpc_asyncio', request_type=managed_identities_service.UpdateDomainRequest):
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_domain),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.update_domain(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.UpdateDomainRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_update_domain_async_from_dict():
await test_update_domain_async(request_type=dict)
def test_update_domain_field_headers():
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = managed_identities_service.UpdateDomainRequest()
request.domain.name = 'domain.name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_domain),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.update_domain(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'domain.name=domain.name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_update_domain_field_headers_async():
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = managed_identities_service.UpdateDomainRequest()
request.domain.name = 'domain.name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_domain),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.update_domain(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'domain.name=domain.name/value',
) in kw['metadata']
def test_delete_domain(transport: str = 'grpc', request_type=managed_identities_service.DeleteDomainRequest):
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_domain),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.delete_domain(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.DeleteDomainRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_delete_domain_from_dict():
test_delete_domain(request_type=dict)
def test_delete_domain_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_domain),
'__call__') as call:
client.delete_domain()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.DeleteDomainRequest()
@pytest.mark.asyncio
async def test_delete_domain_async(transport: str = 'grpc_asyncio', request_type=managed_identities_service.DeleteDomainRequest):
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_domain),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.delete_domain(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.DeleteDomainRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_delete_domain_async_from_dict():
await test_delete_domain_async(request_type=dict)
def test_delete_domain_field_headers():
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = managed_identities_service.DeleteDomainRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_domain),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.delete_domain(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_delete_domain_field_headers_async():
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = managed_identities_service.DeleteDomainRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_domain),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.delete_domain(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_attach_trust(transport: str = 'grpc', request_type=managed_identities_service.AttachTrustRequest):
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.attach_trust),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.attach_trust(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.AttachTrustRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_attach_trust_from_dict():
test_attach_trust(request_type=dict)
def test_attach_trust_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.attach_trust),
'__call__') as call:
client.attach_trust()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.AttachTrustRequest()
@pytest.mark.asyncio
async def test_attach_trust_async(transport: str = 'grpc_asyncio', request_type=managed_identities_service.AttachTrustRequest):
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.attach_trust),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.attach_trust(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.AttachTrustRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_attach_trust_async_from_dict():
await test_attach_trust_async(request_type=dict)
def test_attach_trust_field_headers():
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = managed_identities_service.AttachTrustRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.attach_trust),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.attach_trust(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_attach_trust_field_headers_async():
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = managed_identities_service.AttachTrustRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.attach_trust),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.attach_trust(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_reconfigure_trust(transport: str = 'grpc', request_type=managed_identities_service.ReconfigureTrustRequest):
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.reconfigure_trust),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.reconfigure_trust(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.ReconfigureTrustRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_reconfigure_trust_from_dict():
test_reconfigure_trust(request_type=dict)
def test_reconfigure_trust_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.reconfigure_trust),
'__call__') as call:
client.reconfigure_trust()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.ReconfigureTrustRequest()
@pytest.mark.asyncio
async def test_reconfigure_trust_async(transport: str = 'grpc_asyncio', request_type=managed_identities_service.ReconfigureTrustRequest):
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.reconfigure_trust),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.reconfigure_trust(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.ReconfigureTrustRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_reconfigure_trust_async_from_dict():
await test_reconfigure_trust_async(request_type=dict)
def test_reconfigure_trust_field_headers():
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = managed_identities_service.ReconfigureTrustRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.reconfigure_trust),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.reconfigure_trust(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_reconfigure_trust_field_headers_async():
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = managed_identities_service.ReconfigureTrustRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.reconfigure_trust),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.reconfigure_trust(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_detach_trust(transport: str = 'grpc', request_type=managed_identities_service.DetachTrustRequest):
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.detach_trust),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.detach_trust(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.DetachTrustRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_detach_trust_from_dict():
test_detach_trust(request_type=dict)
def test_detach_trust_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.detach_trust),
'__call__') as call:
client.detach_trust()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.DetachTrustRequest()
@pytest.mark.asyncio
async def test_detach_trust_async(transport: str = 'grpc_asyncio', request_type=managed_identities_service.DetachTrustRequest):
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.detach_trust),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.detach_trust(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.DetachTrustRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_detach_trust_async_from_dict():
await test_detach_trust_async(request_type=dict)
def test_detach_trust_field_headers():
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = managed_identities_service.DetachTrustRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.detach_trust),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.detach_trust(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_detach_trust_field_headers_async():
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = managed_identities_service.DetachTrustRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.detach_trust),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.detach_trust(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_validate_trust(transport: str = 'grpc', request_type=managed_identities_service.ValidateTrustRequest):
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.validate_trust),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.validate_trust(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.ValidateTrustRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_validate_trust_from_dict():
test_validate_trust(request_type=dict)
def test_validate_trust_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.validate_trust),
'__call__') as call:
client.validate_trust()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.ValidateTrustRequest()
@pytest.mark.asyncio
async def test_validate_trust_async(transport: str = 'grpc_asyncio', request_type=managed_identities_service.ValidateTrustRequest):
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.validate_trust),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.validate_trust(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == managed_identities_service.ValidateTrustRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_validate_trust_async_from_dict():
await test_validate_trust_async(request_type=dict)
def test_validate_trust_field_headers():
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = managed_identities_service.ValidateTrustRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.validate_trust),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.validate_trust(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_validate_trust_field_headers_async():
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = managed_identities_service.ValidateTrustRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.validate_trust),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.validate_trust(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.ManagedIdentitiesServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.ManagedIdentitiesServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ManagedIdentitiesServiceClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
transport = transports.ManagedIdentitiesServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ManagedIdentitiesServiceClient(
client_options={"scopes": ["1", "2"]},
transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.ManagedIdentitiesServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = ManagedIdentitiesServiceClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.ManagedIdentitiesServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.ManagedIdentitiesServiceGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize("transport_class", [
transports.ManagedIdentitiesServiceGrpcTransport,
transports.ManagedIdentitiesServiceGrpcAsyncIOTransport,
])
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, 'default') as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
assert isinstance(
client.transport,
transports.ManagedIdentitiesServiceGrpcTransport,
)
def test_managed_identities_service_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.ManagedIdentitiesServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json"
)
def test_managed_identities_service_base_transport():
# Instantiate the base transport.
with mock.patch('google.cloud.managedidentities_v1beta1.services.managed_identities_service.transports.ManagedIdentitiesServiceTransport.__init__') as Transport:
Transport.return_value = None
transport = transports.ManagedIdentitiesServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
'create_microsoft_ad_domain',
'reset_admin_password',
'list_domains',
'get_domain',
'update_domain',
'delete_domain',
'attach_trust',
'reconfigure_trust',
'detach_trust',
'validate_trust',
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
with pytest.raises(NotImplementedError):
transport.close()
# Additionally, the LRO client (a property) should
# also raise NotImplementedError
with pytest.raises(NotImplementedError):
transport.operations_client
@requires_google_auth_gte_1_25_0
def test_managed_identities_service_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.managedidentities_v1beta1.services.managed_identities_service.transports.ManagedIdentitiesServiceTransport._prep_wrapped_messages') as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ManagedIdentitiesServiceTransport(
credentials_file="credentials.json",
quota_project_id="octopus",
)
load_creds.assert_called_once_with("credentials.json",
scopes=None,
default_scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
quota_project_id="octopus",
)
@requires_google_auth_lt_1_25_0
def test_managed_identities_service_base_transport_with_credentials_file_old_google_auth():
# Instantiate the base transport with a credentials file
with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.managedidentities_v1beta1.services.managed_identities_service.transports.ManagedIdentitiesServiceTransport._prep_wrapped_messages') as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ManagedIdentitiesServiceTransport(
credentials_file="credentials.json",
quota_project_id="octopus",
)
load_creds.assert_called_once_with("credentials.json", scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
quota_project_id="octopus",
)
def test_managed_identities_service_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.managedidentities_v1beta1.services.managed_identities_service.transports.ManagedIdentitiesServiceTransport._prep_wrapped_messages') as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ManagedIdentitiesServiceTransport()
adc.assert_called_once()
@requires_google_auth_gte_1_25_0
def test_managed_identities_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, 'default', autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
ManagedIdentitiesServiceClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
quota_project_id=None,
)
@requires_google_auth_lt_1_25_0
def test_managed_identities_service_auth_adc_old_google_auth():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, 'default', autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
ManagedIdentitiesServiceClient()
adc.assert_called_once_with(
scopes=( 'https://www.googleapis.com/auth/cloud-platform',),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[
transports.ManagedIdentitiesServiceGrpcTransport,
transports.ManagedIdentitiesServiceGrpcAsyncIOTransport,
],
)
@requires_google_auth_gte_1_25_0
def test_managed_identities_service_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, 'default', autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class",
[
transports.ManagedIdentitiesServiceGrpcTransport,
transports.ManagedIdentitiesServiceGrpcAsyncIOTransport,
],
)
@requires_google_auth_lt_1_25_0
def test_managed_identities_service_transport_auth_adc_old_google_auth(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus")
adc.assert_called_once_with(scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.ManagedIdentitiesServiceGrpcTransport, grpc_helpers),
(transports.ManagedIdentitiesServiceGrpcAsyncIOTransport, grpc_helpers_async)
],
)
def test_managed_identities_service_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(
quota_project_id="octopus",
scopes=["1", "2"]
)
create_channel.assert_called_with(
"managedidentities.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
scopes=["1", "2"],
default_host="managedidentities.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize("transport_class", [transports.ManagedIdentitiesServiceGrpcTransport, transports.ManagedIdentitiesServiceGrpcAsyncIOTransport])
def test_managed_identities_service_grpc_transport_client_cert_source_for_mtls(
transport_class
):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert,
private_key=expected_key
)
def test_managed_identities_service_host_no_port():
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(api_endpoint='managedidentities.googleapis.com'),
)
assert client.transport._host == 'managedidentities.googleapis.com:443'
def test_managed_identities_service_host_with_port():
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(api_endpoint='managedidentities.googleapis.com:8000'),
)
assert client.transport._host == 'managedidentities.googleapis.com:8000'
def test_managed_identities_service_grpc_transport_channel():
channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.ManagedIdentitiesServiceGrpcTransport(
host="squid.clam.whelk",
channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_managed_identities_service_grpc_asyncio_transport_channel():
channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.ManagedIdentitiesServiceGrpcAsyncIOTransport(
host="squid.clam.whelk",
channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize("transport_class", [transports.ManagedIdentitiesServiceGrpcTransport, transports.ManagedIdentitiesServiceGrpcAsyncIOTransport])
def test_managed_identities_service_transport_channel_mtls_with_client_cert_source(
transport_class
):
with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred:
with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, 'default') as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize("transport_class", [transports.ManagedIdentitiesServiceGrpcTransport, transports.ManagedIdentitiesServiceGrpcAsyncIOTransport])
def test_managed_identities_service_transport_channel_mtls_with_adc(
transport_class
):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_managed_identities_service_grpc_lro_client():
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(
transport.operations_client,
operations_v1.OperationsClient,
)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_managed_identities_service_grpc_lro_async_client():
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc_asyncio',
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(
transport.operations_client,
operations_v1.OperationsAsyncClient,
)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_common_billing_account_path():
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, )
actual = ManagedIdentitiesServiceClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "clam",
}
path = ManagedIdentitiesServiceClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = ManagedIdentitiesServiceClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "whelk"
expected = "folders/{folder}".format(folder=folder, )
actual = ManagedIdentitiesServiceClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "octopus",
}
path = ManagedIdentitiesServiceClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = ManagedIdentitiesServiceClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "oyster"
expected = "organizations/{organization}".format(organization=organization, )
actual = ManagedIdentitiesServiceClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nudibranch",
}
path = ManagedIdentitiesServiceClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = ManagedIdentitiesServiceClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "cuttlefish"
expected = "projects/{project}".format(project=project, )
actual = ManagedIdentitiesServiceClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "mussel",
}
path = ManagedIdentitiesServiceClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = ManagedIdentitiesServiceClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(project=project, location=location, )
actual = ManagedIdentitiesServiceClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = ManagedIdentitiesServiceClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = ManagedIdentitiesServiceClient.parse_common_location_path(path)
assert expected == actual
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(transports.ManagedIdentitiesServiceTransport, '_prep_wrapped_messages') as prep:
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(transports.ManagedIdentitiesServiceTransport, '_prep_wrapped_messages') as prep:
transport_class = ManagedIdentitiesServiceClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
@pytest.mark.asyncio
async def test_transport_close_async():
client = ManagedIdentitiesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc_asyncio",
)
with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close:
async with client:
close.assert_not_called()
close.assert_called_once()
def test_transport_close():
transports = {
"grpc": "_grpc_channel",
}
for transport, close_name in transports.items():
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport
)
with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close:
with client:
close.assert_not_called()
close.assert_called_once()
def test_client_ctx():
transports = [
'grpc',
]
for transport in transports:
client = ManagedIdentitiesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport
)
# Test client calls underlying transport.
with mock.patch.object(type(client.transport), "close") as close:
close.assert_not_called()
with client:
pass
close.assert_called()
| 39.37638
| 270
| 0.694548
| 12,048
| 107,025
| 5.92007
| 0.038015
| 0.032177
| 0.045426
| 0.023442
| 0.890655
| 0.863835
| 0.84307
| 0.807964
| 0.787844
| 0.763
| 0
| 0.00461
| 0.22576
| 107,025
| 2,717
| 271
| 39.390872
| 0.856148
| 0.184798
| 0
| 0.684794
| 0
| 0
| 0.084845
| 0.037353
| 0
| 0
| 0
| 0.000368
| 0.133052
| 1
| 0.048046
| false
| 0.019007
| 0.016367
| 0.001056
| 0.06547
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7c1398bb26de687c5ca421fdb5515e5b1eabd34b
| 29
|
py
|
Python
|
app/Voltage/__init__.py
|
gizmo-cda/g2x
|
841364b8ef4ef4197bbb3682f33ff4ddd539619f
|
[
"MIT"
] | null | null | null |
app/Voltage/__init__.py
|
gizmo-cda/g2x
|
841364b8ef4ef4197bbb3682f33ff4ddd539619f
|
[
"MIT"
] | null | null | null |
app/Voltage/__init__.py
|
gizmo-cda/g2x
|
841364b8ef4ef4197bbb3682f33ff4ddd539619f
|
[
"MIT"
] | null | null | null |
from .voltage import Voltage
| 14.5
| 28
| 0.827586
| 4
| 29
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 29
| 1
| 29
| 29
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7c68708a9d822811f9df6d4ccf901a82123e48dd
| 19,327
|
py
|
Python
|
3QVeryMuch/lib/lib_creat3Qreport.py
|
philip-shen/note_python
|
db0ad84af25464a22ac52e348960107c81e74a56
|
[
"MIT"
] | null | null | null |
3QVeryMuch/lib/lib_creat3Qreport.py
|
philip-shen/note_python
|
db0ad84af25464a22ac52e348960107c81e74a56
|
[
"MIT"
] | 11
|
2021-02-08T20:45:23.000Z
|
2022-03-12T01:00:11.000Z
|
3QVeryMuch/lib/lib_creat3Qreport.py
|
philip-shen/note_python
|
db0ad84af25464a22ac52e348960107c81e74a56
|
[
"MIT"
] | null | null | null |
# 3/28/2020 Convert Nested JSON to Pandas DataFrame and Flatten List in a Column
# https://gist.github.com/rafaan/4ddc91ae47ea46a46c0b
# 6/25/2020 Initial
# 7/7/2020 Merge test_stort3Qdb.py and test_query3Qtable.py
########################################################
import json
from pandas.io.json import json_normalize
import pandas as pd
import os,sys,time,platform
strabspath=os.path.abspath(__file__)
strdirname=os.path.dirname(strabspath)
str_split=os.path.split(strdirname)
prevdirname=str_split[0]
dirnamelib=os.path.join(prevdirname,"lib")
dirnamelog=os.path.join(prevdirname,"logs")
sys.path.append(dirnamelib)
from logger import logger
from libCSV import *
import csvdataAnalysis as csvdata_analysis
import db_sqlite as db_sqlite
#import func_split_3channel as func_split_3ch
def trim_all_noise_wav(data,opt_verbose='OFF'):
ref_fpath_16K = data["trim_ref_info"]['ref_fpath_16K']
ref_fpath_48K = data["trim_ref_info"]['ref_fpath_48K']
add_key = 'dut'
msg = 'data["trim_ref_info"][\'ref_fpath_16K\']: {}'
logger.info(msg.format(data["trim_ref_info"]['ref_fpath_16K']))
msg = 'data["trim_ref_info"][\'ref_fpath_48K\']: {}'
logger.info(msg.format(data["trim_ref_info"]['ref_fpath_48K']))
for i,_3quest in enumerate(data["3Quest"]):
if (data["3Quest"][i]['label_dut'] != '' and data["3Quest"][i]['label_standmic'] != ''\
and os.path.isfile(data["3Quest"][i]['mic_dut']) \
and os.path.isfile(data["3Quest"][i]['mic_standmic'])):#bypass without labels and dut, standmic wav file
#opt_verbose='ON'
#opt_verbose='OFF'
func_split_3ch.mkdir_folder(data["3Quest"][i]['path_dut'])
msg = 'data["3Quest"][{}][\'mic_dut\']: {}'
logger.info(msg.format(i,data["3Quest"][i]['mic_dut']))
msg = 'data["3Quest"][{}][\'label_dut\']: {}'
logger.info(msg.format(i,data["3Quest"][i]['label_dut']))
start_time, end_time, label = func_split_3ch.load_label_file(data["3Quest"][i]['label_dut'])
msg = 'data["3Quest"][{}][\'gain_dut\']: {}'
logger.info(msg.format(i,data["3Quest"][i]['gain_dut']))
msg = 'data["3Quest"][{}][\'channel_dut\']: {}'
logger.info(msg.format(i,data["3Quest"][i]['channel_dut']))
if (data["3Quest"][i]['channel_dut'] == 1):
func_split_3ch.func_gen_dut_wav_from_mono(data["3Quest"][i]['path_dut'], \
ref_fpath_16K, ref_fpath_48K, \
data["3Quest"][i]['mic_dut'], \
start_time, label, \
data["3Quest"][i]['gain_dut'], \
add_key, opt_verbose)
elif (data["3Quest"][i]['channel_dut'] == 2):
func_split_3ch.func_gen_dut_wav_from_stereo(data["3Quest"][i]['path_dut'], \
ref_fpath_16K, ref_fpath_48K, \
data["3Quest"][i]['mic_dut'], \
start_time, label, \
data["3Quest"][i]['gain_dut'], \
add_key, opt_verbose)
#msg = 'data["3Quest"][{}][\'path_standmic\']: {}'
#logger.info(msg.format(i,data["3Quest"][i]['path_standmic']))
func_split_3ch.mkdir_folder(data["3Quest"][i]['path_standmic'])
msg = 'data["3Quest"][{}][\'mic_standmic\']: {}'
logger.info(msg.format(i,data["3Quest"][i]['mic_standmic']))
msg = 'data["3Quest"][{}][\'label_standmic\']: {}'
logger.info(msg.format(i,data["3Quest"][i]['label_standmic']))
msg = 'data["3Quest"][{}][\'gain_standmic\']: {}'
logger.info(msg.format(i,data["3Quest"][i]['gain_standmic']))
start_time, end_time, label = func_split_3ch.load_label_file(data["3Quest"][i]['label_standmic'])
func_split_3ch.func_gen_standmic_wav(data["3Quest"][i]['path_standmic'], \
ref_fpath_16K, ref_fpath_48K, \
data["3Quest"][i]['mic_standmic'], \
start_time, label, \
data["3Quest"][i]['gain_standmic'], \
opt_verbose)
else:
msg = 'Please check data["3Quest"][{}][\'mic_dut\']:{} if exist or not?'
logger.info(msg.format(i, data["3Quest"][i]['mic_dut']))
msg = 'Please check data["3Quest"][{}][\'mic_standmic\']:{} if exist or not?'
logger.info(msg.format(i, data["3Quest"][i]['mic_standmic']))
def create3Qreport(data, local_time, opt_verbose='OFF'):
for i,_3quest in enumerate(data["3Quest"]):
# Check path if exists or not
if(os.path.isdir(os.path.join(data["3Quest"][i]['path_dut']+'.3quest', 'Results'))):
'''
0th path_dut_3quest:..\logs\boommic_SWout\dut.3quest\Results
1th path_dut_3quest:..\logs\Intermic_SWin\dut.3quest\Results
'''
path_dut_3quest_results = os.path.join(data["3Quest"][i]['path_dut']+'.3quest', 'Results')
msg = '{}th path_dut_3quest_results:{}'
logger.info(msg.format(i, path_dut_3quest_results) )
file_type="*.csv"
ret_list_3questFolder_CsvFiles = walk_in_dir(path_dut_3quest_results,file_type)
local_csvdata_analysis = csvdata_analysis.CSVDataAnalysis(dirnamelog,\
path_dut_3quest_results,\
ret_list_3questFolder_CsvFiles
)
local_csvdata_analysis.read_CSVFile()
tmp_csv=local_csvdata_analysis.write_CSVFile_del1strow()
# copy tmp.csv to output.csv of 3Quest Result Path
local_csvdata_analysis.copy_CSVFile_to3questResultPath(tmp_csv,\
local_csvdata_analysis._3questfolder_csvfiles)
local_csvdata_analysis = csvdata_analysis.PandasDataAnalysis(dirnamelog,\
path_dut_3quest_results,\
ret_list_3questFolder_CsvFiles
)
# get list of all background noise 3Quest value
list_allnoises_3quest_values = local_csvdata_analysis.parse_CSVFile_02()
# prepare dut_foldername, insert_date, insert_time
path_dut = os.path.dirname(data["3Quest"][i]['path_dut'])
str_split=os.path.split(path_dut)
dut_foldername=str_split[1]
insert_date = str(local_time.tm_year)+str("{:02d}".format(local_time.tm_mon) )+str("{:02d}".format(local_time.tm_mday))
insert_time = str("{:02d}".format(local_time.tm_hour))+':'+str("{:02d}".format(local_time.tm_min))+':'+str("{:02d}".format(local_time.tm_sec))
# Ready to store 3Quest data to DB
if platform.system().lower() == 'windows': db_name_3quest = '3QuestDB.db'
if platform.system().lower() == 'linux': db_name_3quest = '3QuestDB_tensor4.db'
path_db = os.path.join(dirnamelog,db_name_3quest)
if opt_verbose.lower() == "on":
msg = "path_db: {}"
logger.info(msg.format(path_db))
localdb_sqlite = db_sqlite.DB_sqlite(path_db,\
dut_foldername,insert_date,insert_time,\
path_dut,\
opt_verbose)
# create a database connection
conn = localdb_sqlite.create_connection()
if conn is not None:
# create projects table
localdb_sqlite.create_all_tables_3Quest(conn)
else:
print("Error! cannot create the database connection.")
# Insert noise type data to DB
localdb_sqlite.insert_noise_file_tosqlite(localdb_sqlite, conn)
# Insert dut path data to DB to prevent 3Quest data redundancy
number_of_rows_3Quest_path = localdb_sqlite.insert_3quest_path_tosqlite(localdb_sqlite, conn)
if number_of_rows_3Quest_path < 1:# Insert if not exists
for list_noises_3quest_values in list_allnoises_3quest_values:
'''
INFO: list_noises_3quest_values:[['pub', 'pub', 'pub', 'pub'], ['SMOS', 'NMOS', 'GMOS', 'delta_SNR'], ['2.840550', '4.154481', '2.914813', '29.453750']]
INFO: list_noises_3quest_values:[['AVG', 'AVG', 'AVG', 'AVG'], ['SMOS', 'NMOS', 'GMOS', 'delta_SNR'], ['3.358136', '4.220144', '3.328679', '24.638061']]
'''
#Insert list_noises_3quest_values data into sqlite
localdb_sqlite.insert_csv_data_tosqlite(list_noises_3quest_values, \
localdb_sqlite, \
conn)
# create dataframe by SQL for excel report
localdb_sqlite.query_3quest_table(localdb_sqlite, conn)
# write dataframe to excel
localdb_sqlite.write_to_excel()
# We can also close the connection if we are done with it.
# Just be sure any changes have been committed or they will be lost.
conn.close()
def test_create3Qreport_wonobgn_reAverage(data, local_time, opt_verbose='OFF'):
for i,_ in enumerate(data["3Quest"]):
# Check path if exists or not
if(os.path.isdir(os.path.join(data["3Quest"][i]['path_dut']+'.3quest', 'Results'))):
# prepare dut_foldername, insert_date, insert_time
path_dut = os.path.dirname(data["3Quest"][i]['path_dut'])
str_split=os.path.split(path_dut)
dut_foldername=str_split[1]
#insert_date = str(local_time.tm_year)+str("{:02d}".format(local_time.tm_mon) )+str("{:02d}".format(local_time.tm_mday))
insert_date = '20200713'
insert_time = str("{:02d}".format(local_time.tm_hour))+':'+str("{:02d}".format(local_time.tm_min))+':'+str("{:02d}".format(local_time.tm_sec))
# Ready to store 3Quest data to DB
if platform.system().lower() == 'windows': db_name_3quest = '3QuestDB.db'
if platform.system().lower() == 'linux': db_name_3quest = '3QuestDB_tensor4.db'
path_db = os.path.join(dirnamelog,db_name_3quest)
if opt_verbose.lower() == "on":
msg = "path_db: {}"
logger.info(msg.format(path_db))
localdb_sqlite = db_sqlite.DB_sqlite(path_db,\
dut_foldername,insert_date,insert_time,\
path_dut,\
opt_verbose)
# create a database connection
conn = localdb_sqlite.create_connection()
if conn is not None:
# create projects table
localdb_sqlite.create_all_tables_3Quest(conn)
else:
print("Error! cannot create the database connection.")
# Insert noise type data to DB
#localdb_sqlite.insert_noise_file_tosqlite(localdb_sqlite, conn)
# Insert dut path data to DB to prevent 3Quest data redundancy
#number_of_rows_3Quest_path = localdb_sqlite.insert_3quest_path_tosqlite(localdb_sqlite, conn)
#if number_of_rows_3Quest_path < 1:# Insert if not exists
# for list_noises_3quest_values in list_allnoises_3quest_values:
# '''
# INFO: list_noises_3quest_values:[['pub', 'pub', 'pub', 'pub'], ['SMOS', 'NMOS', 'GMOS', 'delta_SNR'], ['2.840550', '4.154481', '2.914813', '29.453750']]
# INFO: list_noises_3quest_values:[['AVG', 'AVG', 'AVG', 'AVG'], ['SMOS', 'NMOS', 'GMOS', 'delta_SNR'], ['3.358136', '4.220144', '3.328679', '24.638061']]
# '''
#Insert list_noises_3quest_values data into sqlite
# localdb_sqlite.insert_csv_data_tosqlite(list_noises_3quest_values, \
# localdb_sqlite, \
# conn)
# create dataframe by SQL for excel report
# localdb_sqlite.query_3quest_table_nobgnOnly(localdb_sqlite, conn)
# localdb_sqlite.query_3quest_table_withoutnobgn(localdb_sqlite, conn)
# write dataframe to excel
#localdb_sqlite.write_to_excel()
# test purpose
localdb_sqlite.query_3quest_table_nobgnOnly(localdb_sqlite, conn)
localdb_sqlite.query_3quest_table_withoutnobgn(localdb_sqlite, conn)
path_report_excel = os.path.join(path_dut, dut_foldername+'.xlsx')
df_3quest_table_excel= localdb_sqlite.df_query_3quest_table_noise.iloc [0:11, 1:8]
localdb_sqlite.write_to_excel_fromdata(path_report_excel,df_3quest_table_excel)
# We can also close the connection if we are done with it.
# Just be sure any changes have been committed or they will be lost.
conn.close()
def create3Qreport_wonobgn_reAverage(data, local_time, opt_verbose='OFF'):
for i,_3quest in enumerate(data["3Quest"]):
# Check path if exists or not
if(os.path.isdir(os.path.join(data["3Quest"][i]['path_dut']+'.3quest', 'Results'))):
'''
0th path_dut_3quest:..\logs\boommic_SWout\dut.3quest\Results
1th path_dut_3quest:..\logs\Intermic_SWin\dut.3quest\Results
'''
path_dut_3quest_results = os.path.join(data["3Quest"][i]['path_dut']+'.3quest', 'Results')
msg = '{}th path_dut_3quest_results:{}'
logger.info(msg.format(i, path_dut_3quest_results) )
file_type="*.csv"
ret_list_3questFolder_CsvFiles = walk_in_dir(path_dut_3quest_results,file_type)
local_csvdata_analysis = csvdata_analysis.CSVDataAnalysis(dirnamelog,\
path_dut_3quest_results,\
ret_list_3questFolder_CsvFiles
)
local_csvdata_analysis.read_CSVFile()
tmp_csv=local_csvdata_analysis.write_CSVFile_del1strow()
# copy tmp.csv to output.csv of 3Quest Result Path
local_csvdata_analysis.copy_CSVFile_to3questResultPath(tmp_csv,\
local_csvdata_analysis._3questfolder_csvfiles)
local_csvdata_analysis = csvdata_analysis.PandasDataAnalysis(dirnamelog,\
path_dut_3quest_results,\
ret_list_3questFolder_CsvFiles
)
# get list of all background noise 3Quest value
list_allnoises_3quest_values = local_csvdata_analysis.parse_CSVFile_02()
# prepare dut_foldername, insert_date, insert_time
path_dut = os.path.dirname(data["3Quest"][i]['path_dut'])
str_split=os.path.split(path_dut)
dut_foldername=str_split[1]
insert_date = str(local_time.tm_year)+str("{:02d}".format(local_time.tm_mon) )+str("{:02d}".format(local_time.tm_mday))
insert_time = str("{:02d}".format(local_time.tm_hour))+':'+str("{:02d}".format(local_time.tm_min))+':'+str("{:02d}".format(local_time.tm_sec))
# Ready to store 3Quest data to DB
if platform.system().lower() == 'windows': db_name_3quest = '3QuestDB.db'
if platform.system().lower() == 'linux': db_name_3quest = '3QuestDB_tensor4.db'
path_db = os.path.join(dirnamelog,db_name_3quest)
if opt_verbose.lower() == "on":
msg = "path_db: {}"
logger.info(msg.format(path_db))
localdb_sqlite = db_sqlite.DB_sqlite(path_db,\
dut_foldername,insert_date,insert_time,\
path_dut,\
opt_verbose)
# create a database connection
conn = localdb_sqlite.create_connection()
if conn is not None:
# create projects table
localdb_sqlite.create_all_tables_3Quest(conn)
else:
print("Error! cannot create the database connection.")
# Insert noise type data to DB
localdb_sqlite.insert_noise_file_tosqlite(localdb_sqlite, conn)
# Insert dut path data to DB to prevent 3Quest data redundancy
number_of_rows_3Quest_path = localdb_sqlite.insert_3quest_path_tosqlite(localdb_sqlite, conn)
if number_of_rows_3Quest_path < 1:# Insert if not exists
for list_noises_3quest_values in list_allnoises_3quest_values:
'''
INFO: list_noises_3quest_values:[['pub', 'pub', 'pub', 'pub'], ['SMOS', 'NMOS', 'GMOS', 'delta_SNR'], ['2.840550', '4.154481', '2.914813', '29.453750']]
INFO: list_noises_3quest_values:[['AVG', 'AVG', 'AVG', 'AVG'], ['SMOS', 'NMOS', 'GMOS', 'delta_SNR'], ['3.358136', '4.220144', '3.328679', '24.638061']]
'''
#Insert list_noises_3quest_values data into sqlite
localdb_sqlite.insert_csv_data_tosqlite(list_noises_3quest_values, \
localdb_sqlite, \
conn)
# create dataframe by SQL for excel report
localdb_sqlite.query_3quest_table_nobgnOnly(localdb_sqlite, conn)
localdb_sqlite.query_3quest_table_withoutnobgn(localdb_sqlite, conn)
path_report_excel = os.path.join(path_dut, dut_foldername+'.xlsx')
# write dataframe to excel
df_3quest_table_excel= localdb_sqlite.df_query_3quest_table_noise.iloc [0:11, 1:8]
localdb_sqlite.write_to_excel_fromdata(path_report_excel,df_3quest_table_excel)
# We can also close the connection if we are done with it.
# Just be sure any changes have been committed or they will be lost.
conn.close()
| 51.265252
| 173
| 0.550008
| 2,151
| 19,327
| 4.635053
| 0.116225
| 0.051153
| 0.040822
| 0.032397
| 0.905015
| 0.880542
| 0.875426
| 0.85667
| 0.845035
| 0.812538
| 0
| 0.036768
| 0.331557
| 19,327
| 376
| 174
| 51.401596
| 0.734964
| 0.175092
| 0
| 0.696078
| 0
| 0
| 0.10842
| 0.006649
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019608
| false
| 0
| 0.039216
| 0
| 0.058824
| 0.014706
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7cad683cac5ccd06ecc87e158c9648b31fcaeb5c
| 25,629
|
py
|
Python
|
test/test_ip_mcast.py
|
LabNConsulting/vpp-marvell
|
427808849f9db408ef76b1b96df3c56b1c3d0bbf
|
[
"Apache-2.0"
] | 9
|
2018-07-25T07:43:09.000Z
|
2022-03-11T09:55:03.000Z
|
test/test_ip_mcast.py
|
LabNConsulting/vpp-marvell
|
427808849f9db408ef76b1b96df3c56b1c3d0bbf
|
[
"Apache-2.0"
] | null | null | null |
test/test_ip_mcast.py
|
LabNConsulting/vpp-marvell
|
427808849f9db408ef76b1b96df3c56b1c3d0bbf
|
[
"Apache-2.0"
] | 15
|
2018-05-07T04:56:40.000Z
|
2021-11-21T09:06:29.000Z
|
#!/usr/bin/env python
import unittest
from framework import VppTestCase, VppTestRunner
from vpp_sub_interface import VppSubInterface, VppDot1QSubint, VppDot1ADSubint
from vpp_ip_route import VppIpMRoute, VppMRoutePath, VppMFibSignal, \
MRouteItfFlags, MRouteEntryFlags, VppIpTable
from scapy.packet import Raw
from scapy.layers.l2 import Ether
from scapy.layers.inet import IP, UDP, getmacbyip, ICMP
from scapy.layers.inet6 import IPv6, getmacbyip6
from util import ppp
#
# The number of packets sent is set to 90 so that when we replicate more than 3
# times, which we do for some entries, we will generate more than 256 packets
# to the next node in the VLIB graph. Thus we are testing the code's
# correctness handling this over-flow
#
N_PKTS_IN_STREAM = 90
class TestMFIB(VppTestCase):
""" MFIB Test Case """
def setUp(self):
super(TestMFIB, self).setUp()
def test_mfib(self):
""" MFIB Unit Tests """
error = self.vapi.cli("test mfib")
if error:
self.logger.critical(error)
self.assertEqual(error.find("Failed"), -1)
class TestIPMcast(VppTestCase):
""" IP Multicast Test Case """
def setUp(self):
super(TestIPMcast, self).setUp()
# create 8 pg interfaces
self.create_pg_interfaces(range(9))
# setup interfaces
for i in self.pg_interfaces[:8]:
i.admin_up()
i.config_ip4()
i.config_ip6()
i.resolve_arp()
i.resolve_ndp()
# one more in a vrf
tbl4 = VppIpTable(self, 10)
tbl4.add_vpp_config()
self.pg8.set_table_ip4(10)
self.pg8.config_ip4()
tbl6 = VppIpTable(self, 10, is_ip6=1)
tbl6.add_vpp_config()
self.pg8.set_table_ip6(10)
self.pg8.config_ip6()
def tearDown(self):
for i in self.pg_interfaces:
i.unconfig_ip4()
i.unconfig_ip6()
i.admin_down()
self.pg8.set_table_ip4(0)
self.pg8.set_table_ip6(0)
super(TestIPMcast, self).tearDown()
def create_stream_ip4(self, src_if, src_ip, dst_ip, payload_size=0):
pkts = []
# default to small packet sizes
p = (Ether(dst=src_if.local_mac, src=src_if.remote_mac) /
IP(src=src_ip, dst=dst_ip) /
UDP(sport=1234, dport=1234))
if not payload_size:
payload_size = 64 - len(p)
p = p / Raw('\xa5' * payload_size)
for i in range(0, N_PKTS_IN_STREAM):
pkts.append(p)
return pkts
def create_stream_ip6(self, src_if, src_ip, dst_ip):
pkts = []
for i in range(0, N_PKTS_IN_STREAM):
info = self.create_packet_info(src_if, src_if)
payload = self.info_to_payload(info)
p = (Ether(dst=src_if.local_mac, src=src_if.remote_mac) /
IPv6(src=src_ip, dst=dst_ip) /
UDP(sport=1234, dport=1234) /
Raw(payload))
info.data = p.copy()
pkts.append(p)
return pkts
def verify_filter(self, capture, sent):
if not len(capture) == len(sent):
# filter out any IPv6 RAs from the captur
for p in capture:
if (p.haslayer(IPv6)):
capture.remove(p)
return capture
def verify_capture_ip4(self, rx_if, sent):
rxd = rx_if.get_capture(len(sent))
try:
capture = self.verify_filter(rxd, sent)
self.assertEqual(len(capture), len(sent))
for i in range(len(capture)):
tx = sent[i]
rx = capture[i]
eth = rx[Ether]
self.assertEqual(eth.type, 0x800)
tx_ip = tx[IP]
rx_ip = rx[IP]
# check the MAC address on the RX'd packet is correctly formed
self.assertEqual(eth.dst, getmacbyip(rx_ip.dst))
self.assertEqual(rx_ip.src, tx_ip.src)
self.assertEqual(rx_ip.dst, tx_ip.dst)
# IP processing post pop has decremented the TTL
self.assertEqual(rx_ip.ttl + 1, tx_ip.ttl)
except:
raise
def verify_capture_ip6(self, rx_if, sent):
capture = rx_if.get_capture(len(sent))
self.assertEqual(len(capture), len(sent))
for i in range(len(capture)):
tx = sent[i]
rx = capture[i]
eth = rx[Ether]
self.assertEqual(eth.type, 0x86DD)
tx_ip = tx[IPv6]
rx_ip = rx[IPv6]
# check the MAC address on the RX'd packet is correctly formed
self.assertEqual(eth.dst, getmacbyip6(rx_ip.dst))
self.assertEqual(rx_ip.src, tx_ip.src)
self.assertEqual(rx_ip.dst, tx_ip.dst)
# IP processing post pop has decremented the TTL
self.assertEqual(rx_ip.hlim + 1, tx_ip.hlim)
def test_ip_mcast(self):
""" IP Multicast Replication """
#
# a stream that matches the default route. gets dropped.
#
self.vapi.cli("clear trace")
tx = self.create_stream_ip4(self.pg0, "1.1.1.1", "232.1.1.1")
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.assert_nothing_captured(
remark="IP multicast packets forwarded on default route")
#
# A (*,G).
# one accepting interface, pg0, 7 forwarding interfaces
# many forwarding interfaces test the case where the replicare DPO
# needs to use extra cache lines for the buckets.
#
route_232_1_1_1 = VppIpMRoute(
self,
"0.0.0.0",
"232.1.1.1", 32,
MRouteEntryFlags.MFIB_ENTRY_FLAG_NONE,
[VppMRoutePath(self.pg0.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_ACCEPT),
VppMRoutePath(self.pg1.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD),
VppMRoutePath(self.pg2.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD),
VppMRoutePath(self.pg3.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD),
VppMRoutePath(self.pg4.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD),
VppMRoutePath(self.pg5.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD),
VppMRoutePath(self.pg6.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD),
VppMRoutePath(self.pg7.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD)])
route_232_1_1_1.add_vpp_config()
#
# An (S,G).
# one accepting interface, pg0, 2 forwarding interfaces
#
route_1_1_1_1_232_1_1_1 = VppIpMRoute(
self,
"1.1.1.1",
"232.1.1.1", 64,
MRouteEntryFlags.MFIB_ENTRY_FLAG_NONE,
[VppMRoutePath(self.pg0.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_ACCEPT),
VppMRoutePath(self.pg1.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD),
VppMRoutePath(self.pg2.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD)])
route_1_1_1_1_232_1_1_1.add_vpp_config()
#
# An (*,G/m).
# one accepting interface, pg0, 1 forwarding interfaces
#
route_232 = VppIpMRoute(
self,
"0.0.0.0",
"232.0.0.0", 8,
MRouteEntryFlags.MFIB_ENTRY_FLAG_NONE,
[VppMRoutePath(self.pg0.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_ACCEPT),
VppMRoutePath(self.pg1.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD)])
route_232.add_vpp_config()
#
# a stream that matches the route for (1.1.1.1,232.1.1.1)
# small packets
#
self.vapi.cli("clear trace")
tx = self.create_stream_ip4(self.pg0, "1.1.1.1", "232.1.1.1")
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
# We expect replications on Pg1->7
self.verify_capture_ip4(self.pg1, tx)
self.verify_capture_ip4(self.pg2, tx)
# no replications on Pg0
self.pg0.assert_nothing_captured(
remark="IP multicast packets forwarded on PG0")
self.pg3.assert_nothing_captured(
remark="IP multicast packets forwarded on PG3")
#
# a stream that matches the route for (1.1.1.1,232.1.1.1)
# large packets
#
self.vapi.cli("clear trace")
tx = self.create_stream_ip4(self.pg0, "1.1.1.1", "232.1.1.1",
payload_size=1024)
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
# We expect replications on Pg1->7
self.verify_capture_ip4(self.pg1, tx)
self.verify_capture_ip4(self.pg2, tx)
# no replications on Pg0
self.pg0.assert_nothing_captured(
remark="IP multicast packets forwarded on PG0")
self.pg3.assert_nothing_captured(
remark="IP multicast packets forwarded on PG3")
#
# a stream that matches the route for (*,232.0.0.0/8)
# Send packets with the 9th bit set so we test the correct clearing
# of that bit in the mac rewrite
#
self.vapi.cli("clear trace")
tx = self.create_stream_ip4(self.pg0, "1.1.1.1", "232.255.255.255")
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
# We expect replications on Pg1 only
self.verify_capture_ip4(self.pg1, tx)
# no replications on Pg0, Pg2 not Pg3
self.pg0.assert_nothing_captured(
remark="IP multicast packets forwarded on PG0")
self.pg2.assert_nothing_captured(
remark="IP multicast packets forwarded on PG2")
self.pg3.assert_nothing_captured(
remark="IP multicast packets forwarded on PG3")
#
# a stream that matches the route for (*,232.1.1.1)
#
self.vapi.cli("clear trace")
tx = self.create_stream_ip4(self.pg0, "1.1.1.2", "232.1.1.1")
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
# We expect replications on Pg1, 2, 3.
self.verify_capture_ip4(self.pg1, tx)
self.verify_capture_ip4(self.pg2, tx)
self.verify_capture_ip4(self.pg3, tx)
self.verify_capture_ip4(self.pg4, tx)
self.verify_capture_ip4(self.pg5, tx)
self.verify_capture_ip4(self.pg6, tx)
self.verify_capture_ip4(self.pg7, tx)
route_232_1_1_1.remove_vpp_config()
route_1_1_1_1_232_1_1_1.remove_vpp_config()
route_232.remove_vpp_config()
def test_ip6_mcast(self):
""" IPv6 Multicast Replication """
#
# a stream that matches the default route. gets dropped.
#
self.vapi.cli("clear trace")
tx = self.create_stream_ip6(self.pg0, "2001::1", "ff01::1")
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.assert_nothing_captured(
remark="IPv6 multicast packets forwarded on default route")
#
# A (*,G).
# one accepting interface, pg0, 3 forwarding interfaces
#
route_ff01_1 = VppIpMRoute(
self,
"::",
"ff01::1", 128,
MRouteEntryFlags.MFIB_ENTRY_FLAG_NONE,
[VppMRoutePath(self.pg0.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_ACCEPT),
VppMRoutePath(self.pg1.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD),
VppMRoutePath(self.pg2.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD),
VppMRoutePath(self.pg3.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD)],
is_ip6=1)
route_ff01_1.add_vpp_config()
#
# An (S,G).
# one accepting interface, pg0, 2 forwarding interfaces
#
route_2001_ff01_1 = VppIpMRoute(
self,
"2001::1",
"ff01::1", 256,
MRouteEntryFlags.MFIB_ENTRY_FLAG_NONE,
[VppMRoutePath(self.pg0.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_ACCEPT),
VppMRoutePath(self.pg1.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD),
VppMRoutePath(self.pg2.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD)],
is_ip6=1)
route_2001_ff01_1.add_vpp_config()
#
# An (*,G/m).
# one accepting interface, pg0, 1 forwarding interface
#
route_ff01 = VppIpMRoute(
self,
"::",
"ff01::", 16,
MRouteEntryFlags.MFIB_ENTRY_FLAG_NONE,
[VppMRoutePath(self.pg0.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_ACCEPT),
VppMRoutePath(self.pg1.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD)],
is_ip6=1)
route_ff01.add_vpp_config()
#
# a stream that matches the route for (*, ff01::/16)
#
self.vapi.cli("clear trace")
tx = self.create_stream_ip6(self.pg0, "2002::1", "ff01:2::255")
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
# We expect replications on Pg1
self.verify_capture_ip6(self.pg1, tx)
# no replications on Pg0, Pg3
self.pg0.assert_nothing_captured(
remark="IP multicast packets forwarded on PG0")
self.pg2.assert_nothing_captured(
remark="IP multicast packets forwarded on PG2")
self.pg3.assert_nothing_captured(
remark="IP multicast packets forwarded on PG3")
#
# Bounce the interface and it should still work
#
self.pg1.admin_down()
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.assert_nothing_captured(
remark="IP multicast packets forwarded on down PG1")
self.pg1.admin_up()
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.verify_capture_ip6(self.pg1, tx)
#
# a stream that matches the route for (*,ff01::1)
#
self.vapi.cli("clear trace")
tx = self.create_stream_ip6(self.pg0, "2002::2", "ff01::1")
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
# We expect replications on Pg1, 2, 3.
self.verify_capture_ip6(self.pg1, tx)
self.verify_capture_ip6(self.pg2, tx)
self.verify_capture_ip6(self.pg3, tx)
# no replications on Pg0
self.pg0.assert_nothing_captured(
remark="IPv6 multicast packets forwarded on PG0")
#
# a stream that matches the route for (2001::1, ff00::1)
#
self.vapi.cli("clear trace")
tx = self.create_stream_ip6(self.pg0, "2001::1", "ff01::1")
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
# We expect replications on Pg1, 2,
self.verify_capture_ip6(self.pg1, tx)
self.verify_capture_ip6(self.pg2, tx)
# no replications on Pg0, Pg3
self.pg0.assert_nothing_captured(
remark="IP multicast packets forwarded on PG0")
self.pg3.assert_nothing_captured(
remark="IP multicast packets forwarded on PG3")
route_ff01.remove_vpp_config()
route_ff01_1.remove_vpp_config()
route_2001_ff01_1.remove_vpp_config()
def _mcast_connected_send_stream(self, dst_ip):
self.vapi.cli("clear trace")
tx = self.create_stream_ip4(self.pg0,
self.pg0.remote_ip4,
dst_ip)
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
# We expect replications on Pg1.
self.verify_capture_ip4(self.pg1, tx)
return tx
def test_ip_mcast_connected(self):
""" IP Multicast Connected Source check """
#
# A (*,G).
# one accepting interface, pg0, 1 forwarding interfaces
#
route_232_1_1_1 = VppIpMRoute(
self,
"0.0.0.0",
"232.1.1.1", 32,
MRouteEntryFlags.MFIB_ENTRY_FLAG_NONE,
[VppMRoutePath(self.pg0.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_ACCEPT),
VppMRoutePath(self.pg1.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD)])
route_232_1_1_1.add_vpp_config()
route_232_1_1_1.update_entry_flags(
MRouteEntryFlags.MFIB_ENTRY_FLAG_CONNECTED)
#
# Now the (*,G) is present, send from connected source
#
tx = self._mcast_connected_send_stream("232.1.1.1")
#
# Constrct a representation of the signal we expect on pg0
#
signal_232_1_1_1_itf_0 = VppMFibSignal(self,
route_232_1_1_1,
self.pg0.sw_if_index,
tx[0])
#
# read the only expected signal
#
signals = self.vapi.mfib_signal_dump()
self.assertEqual(1, len(signals))
signal_232_1_1_1_itf_0.compare(signals[0])
#
# reading the signal allows for the generation of another
# so send more packets and expect the next signal
#
tx = self._mcast_connected_send_stream("232.1.1.1")
signals = self.vapi.mfib_signal_dump()
self.assertEqual(1, len(signals))
signal_232_1_1_1_itf_0.compare(signals[0])
#
# A Second entry with connected check
# one accepting interface, pg0, 1 forwarding interfaces
#
route_232_1_1_2 = VppIpMRoute(
self,
"0.0.0.0",
"232.1.1.2", 32,
MRouteEntryFlags.MFIB_ENTRY_FLAG_NONE,
[VppMRoutePath(self.pg0.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_ACCEPT),
VppMRoutePath(self.pg1.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD)])
route_232_1_1_2.add_vpp_config()
route_232_1_1_2.update_entry_flags(
MRouteEntryFlags.MFIB_ENTRY_FLAG_CONNECTED)
#
# Send traffic to both entries. One read should net us two signals
#
signal_232_1_1_2_itf_0 = VppMFibSignal(self,
route_232_1_1_2,
self.pg0.sw_if_index,
tx[0])
tx = self._mcast_connected_send_stream("232.1.1.1")
tx2 = self._mcast_connected_send_stream("232.1.1.2")
#
# read the only expected signal
#
signals = self.vapi.mfib_signal_dump()
self.assertEqual(2, len(signals))
signal_232_1_1_1_itf_0.compare(signals[1])
signal_232_1_1_2_itf_0.compare(signals[0])
route_232_1_1_1.remove_vpp_config()
route_232_1_1_2.remove_vpp_config()
def test_ip_mcast_signal(self):
""" IP Multicast Signal """
#
# A (*,G).
# one accepting interface, pg0, 1 forwarding interfaces
#
route_232_1_1_1 = VppIpMRoute(
self,
"0.0.0.0",
"232.1.1.1", 32,
MRouteEntryFlags.MFIB_ENTRY_FLAG_NONE,
[VppMRoutePath(self.pg0.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_ACCEPT),
VppMRoutePath(self.pg1.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD)])
route_232_1_1_1.add_vpp_config()
route_232_1_1_1.update_entry_flags(
MRouteEntryFlags.MFIB_ENTRY_FLAG_SIGNAL)
#
# Now the (*,G) is present, send from connected source
#
tx = self._mcast_connected_send_stream("232.1.1.1")
#
# Constrct a representation of the signal we expect on pg0
#
signal_232_1_1_1_itf_0 = VppMFibSignal(self,
route_232_1_1_1,
self.pg0.sw_if_index,
tx[0])
#
# read the only expected signal
#
signals = self.vapi.mfib_signal_dump()
self.assertEqual(1, len(signals))
signal_232_1_1_1_itf_0.compare(signals[0])
#
# reading the signal allows for the generation of another
# so send more packets and expect the next signal
#
tx = self._mcast_connected_send_stream("232.1.1.1")
signals = self.vapi.mfib_signal_dump()
self.assertEqual(1, len(signals))
signal_232_1_1_1_itf_0.compare(signals[0])
#
# Set the negate-signal on the accepting interval - the signals
# should stop
#
route_232_1_1_1.update_path_flags(
self.pg0.sw_if_index,
(MRouteItfFlags.MFIB_ITF_FLAG_ACCEPT |
MRouteItfFlags.MFIB_ITF_FLAG_NEGATE_SIGNAL))
self.vapi.cli("clear trace")
tx = self._mcast_connected_send_stream("232.1.1.1")
signals = self.vapi.mfib_signal_dump()
self.assertEqual(0, len(signals))
#
# Clear the SIGNAL flag on the entry and the signals should
# come back since the interface is still NEGATE-SIGNAL
#
route_232_1_1_1.update_entry_flags(
MRouteEntryFlags.MFIB_ENTRY_FLAG_NONE)
tx = self._mcast_connected_send_stream("232.1.1.1")
signals = self.vapi.mfib_signal_dump()
self.assertEqual(1, len(signals))
signal_232_1_1_1_itf_0.compare(signals[0])
#
# Lastly remove the NEGATE-SIGNAL from the interface and the
# signals should stop
#
route_232_1_1_1.update_path_flags(self.pg0.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_ACCEPT)
tx = self._mcast_connected_send_stream("232.1.1.1")
signals = self.vapi.mfib_signal_dump()
self.assertEqual(0, len(signals))
#
# Cleanup
#
route_232_1_1_1.remove_vpp_config()
def test_ip_mcast_vrf(self):
""" IP Multicast Replication in non-default table"""
#
# An (S,G).
# one accepting interface, pg0, 2 forwarding interfaces
#
route_1_1_1_1_232_1_1_1 = VppIpMRoute(
self,
"1.1.1.1",
"232.1.1.1", 64,
MRouteEntryFlags.MFIB_ENTRY_FLAG_NONE,
[VppMRoutePath(self.pg8.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_ACCEPT),
VppMRoutePath(self.pg1.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD),
VppMRoutePath(self.pg2.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD)],
table_id=10)
route_1_1_1_1_232_1_1_1.add_vpp_config()
#
# a stream that matches the route for (1.1.1.1,232.1.1.1)
# small packets
#
self.vapi.cli("clear trace")
tx = self.create_stream_ip4(self.pg8, "1.1.1.1", "232.1.1.1")
self.pg8.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
# We expect replications on Pg1 & 2
self.verify_capture_ip4(self.pg1, tx)
self.verify_capture_ip4(self.pg2, tx)
def test_ip6_mcast_vrf(self):
""" IPv6 Multicast Replication in non-default table"""
#
# An (S,G).
# one accepting interface, pg0, 2 forwarding interfaces
#
route_2001_ff01_1 = VppIpMRoute(
self,
"2001::1",
"ff01::1", 256,
MRouteEntryFlags.MFIB_ENTRY_FLAG_NONE,
[VppMRoutePath(self.pg8.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_ACCEPT),
VppMRoutePath(self.pg1.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD),
VppMRoutePath(self.pg2.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD)],
table_id=10,
is_ip6=1)
route_2001_ff01_1.add_vpp_config()
#
# a stream that matches the route for (2001::1, ff00::1)
#
self.vapi.cli("clear trace")
tx = self.create_stream_ip6(self.pg8, "2001::1", "ff01::1")
self.pg8.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
# We expect replications on Pg1, 2,
self.verify_capture_ip6(self.pg1, tx)
self.verify_capture_ip6(self.pg2, tx)
if __name__ == '__main__':
unittest.main(testRunner=VppTestRunner)
| 33.678055
| 79
| 0.578329
| 3,240
| 25,629
| 4.307099
| 0.093827
| 0.022644
| 0.017628
| 0.021928
| 0.804156
| 0.792476
| 0.768685
| 0.743891
| 0.730276
| 0.712003
| 0
| 0.057963
| 0.330875
| 25,629
| 760
| 80
| 33.722368
| 0.755788
| 0.150572
| 0
| 0.662971
| 0
| 0
| 0.055916
| 0
| 0
| 0
| 0.00051
| 0
| 0.08204
| 1
| 0.035477
| false
| 0
| 0.019956
| 0
| 0.068736
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7ce22399ba8b23097688953de356af8ae6fa91fc
| 9,351
|
py
|
Python
|
martypy/ClientGeneric.py
|
robotical/martypy
|
afc1f89d471875ca1beb775f375438f97fc33679
|
[
"Apache-2.0"
] | 8
|
2017-08-02T11:31:50.000Z
|
2022-01-05T14:36:53.000Z
|
martypy/ClientGeneric.py
|
robotical/martypy
|
afc1f89d471875ca1beb775f375438f97fc33679
|
[
"Apache-2.0"
] | 17
|
2017-07-24T22:39:43.000Z
|
2022-01-05T14:41:20.000Z
|
martypy/ClientGeneric.py
|
robotical/martypy
|
afc1f89d471875ca1beb775f375438f97fc33679
|
[
"Apache-2.0"
] | 5
|
2017-11-12T08:51:18.000Z
|
2020-11-27T09:28:46.000Z
|
from abc import ABC, abstractmethod
from typing import Callable, Dict, List, Optional, Union, Tuple
from warnings import warn
class ClientGeneric(ABC):
SIDE_CODES = {
'left' : 0,
'right' : 1,
'forward' : 2,
'back' : 3,
'auto' : 0,
}
EYE_POSES = {
'angry' : 'eyesAngry',
'excited' : 'eyesExcited',
'normal' : 'eyesNormal',
'wide' : 'eyesWide',
'wiggle' : 'wiggleEyes'
}
NOT_IMPLEMENTED = "Unfortunately this Marty doesn't do that"
def __init__(self, blocking: Union[bool, None], *args, **kwargs):
super().__init__()
if len(args) > 0:
warn(f"Ignoring unexpected constructor argument(s): {args}", stacklevel=4)
if len(kwargs) > 0:
warn(f"Ignoring unexpected constructor argument(s): {kwargs}", stacklevel=4)
self._is_blocking: bool = True if blocking is None else blocking
@classmethod
def dict_merge(cls, *dicts):
'''
Merge all provided dicts into one dict
'''
merged = {}
for d in dicts:
if not isinstance(d, dict):
raise ValueError('Value should be a dict')
else:
merged.update(d)
return merged
@abstractmethod
def start(self):
pass
@abstractmethod
def close(self):
pass
def is_blocking(self, local_override: Optional[bool] = None) -> bool:
"""
Check if this client is blocking, optionally taking into account a local
blocking override flag.
"""
if local_override is not None:
return local_override
else:
return self._is_blocking
def set_blocking(self, blocking: bool):
self._is_blocking = blocking
@abstractmethod
def wait_if_required(self, expected_wait_ms: int, blocking_override: Union[bool, None]):
raise NotImplementedError()
@abstractmethod
def hello(self) -> bool:
return False
@abstractmethod
def get_ready(self) -> bool:
return False
@abstractmethod
def stand_straight(self, move_time: int) -> bool:
return False
@abstractmethod
def discover(self) -> List[str]:
return []
@abstractmethod
def stop(self, stop_type: str, stopCode: int) -> bool:
return False
@abstractmethod
def resume(self) -> bool:
return False
@abstractmethod
def hold_position(self, hold_time: int) -> bool:
return False
@abstractmethod
def move_joint(self, joint_id: int, position: int, move_time: int) -> bool:
return False
@abstractmethod
def get_joint_position(self, joint_id: Union[int, str]) -> float:
return 0
@abstractmethod
def get_joint_current(self, joint_id: Union[int, str]) -> float:
return 0
@abstractmethod
def get_joint_status(self, joint_id: Union[int, str]) -> int:
return 0
@abstractmethod
def lean(self, direction: str, amount: Optional[int], move_time: int) -> bool:
return False
@abstractmethod
def walk(self, num_steps: int = 2, start_foot:str = 'auto', turn: int = 0,
step_length:int = 25, move_time: int = 1500) -> bool:
return False
@abstractmethod
def eyes(self, joint_id: int, pose_or_angle: Union[str, int], move_time: int = 1000) -> bool:
return False
@abstractmethod
def kick(self, side: str = 'right', twist: int = 0, move_time: int = 2500) -> bool:
return False
@abstractmethod
def arms(self, left_angle: int, right_angle: int, move_time: int) -> bool:
return False
@abstractmethod
def celebrate(self, move_time: int = 4000) -> bool:
return False
@abstractmethod
def circle_dance(self, side: str = 'right', move_time: int = 2500) -> bool:
return False
@abstractmethod
def dance(self, side: str = 'right', move_time: int = 3000) -> bool:
return False
@abstractmethod
def wiggle(self, move_time: int = 5000) -> bool:
return False
@abstractmethod
def sidestep(self, side: str, steps: int = 1, step_length: int = 50,
move_time: int = 1000) -> bool:
return False
@abstractmethod
def play_sound(self, name_or_freq_start: Union[str,float],
freq_end: Optional[float] = None,
duration: Optional[int] = None) -> bool:
return False
@abstractmethod
def pinmode_gpio(self, gpio: int, mode: str) -> bool:
return False
@abstractmethod
def write_gpio(self, gpio: int, value: int) -> bool:
return False
@abstractmethod
def digitalread_gpio(self, gpio: int) -> bool:
return False
@abstractmethod
def i2c_write(self, *byte_array: int) -> bool:
return False
@abstractmethod
def i2c_write_to_ric(self, address: int, byte_array: bytes) -> bool:
return False
@abstractmethod
def get_battery_voltage(self) -> float:
return 0
@abstractmethod
def get_battery_remaining(self) -> float:
return 0
@abstractmethod
def get_distance_sensor(self) -> Union[int, float]:
return 0
@abstractmethod
def get_accelerometer(self, axis: Optional[str] = None, axisCode: int = 0) -> float:
return 0
@abstractmethod
def enable_motors(self, enable: bool = True, clear_queue: bool = True) -> bool:
return False
@abstractmethod
def enable_safeties(self, enable: bool = True) -> bool:
return False
@abstractmethod
def fall_protection(self, enable: bool = True) -> bool:
return False
@abstractmethod
def motor_protection(self, enable: bool = True) -> bool:
return False
@abstractmethod
def battery_protection(self, enable: bool = True) -> bool:
return False
@abstractmethod
def buzz_prevention(self, enable: bool = True) -> bool:
return False
@abstractmethod
def lifelike_behaviour(self, enable: bool = True) -> bool:
return False
@abstractmethod
def set_parameter(self, *byte_array: int) -> bool:
return False
@abstractmethod
def save_calibration(self) -> bool:
return False
@abstractmethod
def clear_calibration(self) -> bool:
return False
@abstractmethod
def is_calibrated(self) -> bool:
return False
@abstractmethod
def ros_command(self, *byte_array: int) -> bool:
return False
@abstractmethod
def keyframe (self, time: float, num_of_msgs: int, msgs) -> List[bytes]:
return False
@abstractmethod
def get_chatter(self) -> bytes:
return False
@abstractmethod
def get_firmware_version(self) -> bool:
return False
@abstractmethod
def _mute_serial(self) -> bool:
return False
@abstractmethod
def ros_serial_formatter(self, topicID: int, send: bool = False, *message: int) -> List[int]:
return False
@abstractmethod
def is_moving(self) -> bool:
return False
@abstractmethod
def is_paused(self) -> bool:
return False
@abstractmethod
def get_robot_status(self) -> Dict:
return {}
@abstractmethod
def get_joints(self) -> Dict:
return {}
@abstractmethod
def get_power_status(self) -> Dict:
return {}
@abstractmethod
def get_add_ons_status(self) -> Dict:
return {}
@abstractmethod
def get_add_on_status(self, add_on_name_or_id: Union[int, str]) -> Dict:
return {}
@abstractmethod
def add_on_query(self, add_on_name: str, data_to_write: bytes, num_bytes_to_read: int) -> Dict:
return {}
@abstractmethod
def get_system_info(self) -> Dict:
return {}
@abstractmethod
def set_marty_name(self, name: str) -> bool:
return False
@abstractmethod
def get_marty_name(self) -> str:
return ""
@abstractmethod
def is_marty_name_set(self) -> bool:
return False
@abstractmethod
def get_hw_elems_list(self) -> List:
return []
@abstractmethod
def send_ric_rest_cmd(self, ricRestCmd: str) -> None:
pass
@abstractmethod
def send_ric_rest_cmd_sync(self, ricRestCmd: str) -> Dict:
return {}
@abstractmethod
def disco_off(self, add_on: str) -> bool :
return False
@abstractmethod
def disco_pattern(self, pattern: int, add_on: str) -> bool :
return False
@abstractmethod
def disco_color(self, color: Union[str, Tuple[int, int, int]], add_on: str, region: Union[int, str]) -> bool:
return False
@abstractmethod
def disco_group_operation(self, disco_operation: Callable, whoami_type_codes: set, operation_kwargs: dict) -> bool:
return False
@abstractmethod
def register_logging_callback(self, loggingCallback: Callable[[str],None]) -> None:
pass
@abstractmethod
def get_interface_stats(self) -> Dict:
return {}
@abstractmethod
def preException(self, isFatal: bool) -> None:
pass
@abstractmethod
def get_test_output(self) -> dict:
return ""
@abstractmethod
def is_conn_ready(self) -> bool:
return False
| 25.831492
| 119
| 0.617902
| 1,079
| 9,351
| 5.191844
| 0.23355
| 0.227597
| 0.214209
| 0.239914
| 0.521064
| 0.428418
| 0.337201
| 0.237237
| 0.179936
| 0.053195
| 0
| 0.008817
| 0.284355
| 9,351
| 361
| 120
| 25.903047
| 0.828302
| 0.014437
| 0
| 0.557196
| 0
| 0
| 0.03111
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.291513
| false
| 0.01845
| 0.01107
| 0.254613
| 0.583026
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
7ce6853b6ad2d42cc2fc010dd74ccd6b3fc38a0a
| 37
|
py
|
Python
|
tests/_support/has_modules.py
|
daobook/invoke
|
577faf1c016a69392583046613bfb42356855e8f
|
[
"BSD-2-Clause"
] | null | null | null |
tests/_support/has_modules.py
|
daobook/invoke
|
577faf1c016a69392583046613bfb42356855e8f
|
[
"BSD-2-Clause"
] | null | null | null |
tests/_support/has_modules.py
|
daobook/invoke
|
577faf1c016a69392583046613bfb42356855e8f
|
[
"BSD-2-Clause"
] | null | null | null |
# Not picklable!
import os # noqa
| 12.333333
| 18
| 0.648649
| 5
| 37
| 4.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.27027
| 37
| 2
| 19
| 18.5
| 0.888889
| 0.513514
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6b0a6cd1eccb677c6bf9760f19a138a800366ac7
| 6,463
|
py
|
Python
|
recipes/LibriMix/prepare_data.py
|
anonymspeechbrain/speechbrain
|
9a0632ddb066f5bceffb71fb971552fb542f7b7e
|
[
"Apache-2.0"
] | null | null | null |
recipes/LibriMix/prepare_data.py
|
anonymspeechbrain/speechbrain
|
9a0632ddb066f5bceffb71fb971552fb542f7b7e
|
[
"Apache-2.0"
] | null | null | null |
recipes/LibriMix/prepare_data.py
|
anonymspeechbrain/speechbrain
|
9a0632ddb066f5bceffb71fb971552fb542f7b7e
|
[
"Apache-2.0"
] | null | null | null |
"""
The functions to create the .csv files for LibriMix
Author
* Anonymous
"""
import os
import csv
def prepare_librimix(
datapath,
savepath,
n_spks=2,
skip_prep=False,
librimix_addnoise=False,
fs=8000,
):
"""
Prepare .csv files for librimix
Arguments:
----------
datapath (str) : path for the wsj0-mix dataset.
savepath (str) : path where we save the csv file.
n_spks (int): number of speakers
skip_prep (bool): If True, skip data preparation
librimix_addnoise: If True, add whamnoise to librimix datasets
"""
if skip_prep:
return
if "Libri" in datapath:
# Libri 2/3Mix datasets
if n_spks == 2:
assert (
"Libri2Mix" in datapath
), "Inconsistent number of speakers and datapath"
create_libri2mix_csv(datapath, savepath, addnoise=librimix_addnoise)
elif n_spks == 3:
assert (
"Libri3Mix" in datapath
), "Inconsistent number of speakers and datapath"
create_libri3mix_csv(datapath, savepath, addnoise=librimix_addnoise)
else:
raise ValueError("Unsupported Number of Speakers")
else:
raise ValueError("Unsupported Dataset")
def create_libri2mix_csv(
datapath,
savepath,
addnoise=False,
version="wav8k/min/",
set_types=["train-360", "dev", "test"],
):
"""
This functions creates the .csv file for the libri2mix dataset
"""
for set_type in set_types:
if addnoise:
mix_path = os.path.join(datapath, version, set_type, "mix_both/")
else:
mix_path = os.path.join(datapath, version, set_type, "mix_clean/")
s1_path = os.path.join(datapath, version, set_type, "s1/")
s2_path = os.path.join(datapath, version, set_type, "s2/")
noise_path = os.path.join(datapath, version, set_type, "noise/")
files = os.listdir(mix_path)
mix_fl_paths = [mix_path + fl for fl in files]
s1_fl_paths = [s1_path + fl for fl in files]
s2_fl_paths = [s2_path + fl for fl in files]
noise_fl_paths = [noise_path + fl for fl in files]
csv_columns = [
"ID",
"duration",
"mix_wav",
"mix_wav_format",
"mix_wav_opts",
"s1_wav",
"s1_wav_format",
"s1_wav_opts",
"s2_wav",
"s2_wav_format",
"s2_wav_opts",
"noise_wav",
"noise_wav_format",
"noise_wav_opts",
]
with open(savepath + "/libri2mix_" + set_type + ".csv", "w") as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=csv_columns)
writer.writeheader()
for i, (mix_path, s1_path, s2_path, noise_path) in enumerate(
zip(mix_fl_paths, s1_fl_paths, s2_fl_paths, noise_fl_paths)
):
row = {
"ID": i,
"duration": 1.0,
"mix_wav": mix_path,
"mix_wav_format": "wav",
"mix_wav_opts": None,
"s1_wav": s1_path,
"s1_wav_format": "wav",
"s1_wav_opts": None,
"s2_wav": s2_path,
"s2_wav_format": "wav",
"s2_wav_opts": None,
"noise_wav": noise_path,
"noise_wav_format": "wav",
"noise_wav_opts": None,
}
writer.writerow(row)
def create_libri3mix_csv(
datapath,
savepath,
addnoise=False,
version="wav8k/min/",
set_types=["train-360", "dev", "test"],
):
"""
This functions creates the .csv file for the libri3mix dataset
"""
for set_type in set_types:
if addnoise:
mix_path = os.path.join(datapath, version, set_type, "mix_both/")
else:
mix_path = os.path.join(datapath, version, set_type, "mix_clean/")
s1_path = os.path.join(datapath, version, set_type, "s1/")
s2_path = os.path.join(datapath, version, set_type, "s2/")
s3_path = os.path.join(datapath, version, set_type, "s3/")
noise_path = os.path.join(datapath, version, set_type, "noise/")
files = os.listdir(mix_path)
mix_fl_paths = [mix_path + fl for fl in files]
s1_fl_paths = [s1_path + fl for fl in files]
s2_fl_paths = [s2_path + fl for fl in files]
s3_fl_paths = [s3_path + fl for fl in files]
noise_fl_paths = [noise_path + fl for fl in files]
csv_columns = [
"ID",
"duration",
"mix_wav",
"mix_wav_format",
"mix_wav_opts",
"s1_wav",
"s1_wav_format",
"s1_wav_opts",
"s2_wav",
"s2_wav_format",
"s2_wav_opts",
"s3_wav",
"s3_wav_format",
"s3_wav_opts",
"noise_wav",
"noise_wav_format",
"noise_wav_opts",
]
with open(savepath + "/libri3mix_" + set_type + ".csv", "w") as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=csv_columns)
writer.writeheader()
for (
i,
(mix_path, s1_path, s2_path, s3_path, noise_path),
) in enumerate(
zip(
mix_fl_paths,
s1_fl_paths,
s2_fl_paths,
s3_fl_paths,
noise_fl_paths,
)
):
row = {
"ID": i,
"duration": 1.0,
"mix_wav": mix_path,
"mix_wav_format": "wav",
"mix_wav_opts": None,
"s1_wav": s1_path,
"s1_wav_format": "wav",
"s1_wav_opts": None,
"s2_wav": s2_path,
"s2_wav_format": "wav",
"s2_wav_opts": None,
"s3_wav": s3_path,
"s3_wav_format": "wav",
"s3_wav_opts": None,
"noise_wav": noise_path,
"noise_wav_format": "wav",
"noise_wav_opts": None,
}
writer.writerow(row)
| 30.77619
| 80
| 0.504564
| 730
| 6,463
| 4.172603
| 0.146575
| 0.041366
| 0.036113
| 0.050558
| 0.782994
| 0.782994
| 0.739002
| 0.739002
| 0.727183
| 0.69107
| 0
| 0.024632
| 0.390685
| 6,463
| 209
| 81
| 30.923445
| 0.748857
| 0.083398
| 0
| 0.709877
| 0
| 0
| 0.165012
| 0
| 0
| 0
| 0
| 0
| 0.012346
| 1
| 0.018519
| false
| 0
| 0.012346
| 0
| 0.037037
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6b233b0618af2c13245a4c15d93d6022540aded8
| 157
|
py
|
Python
|
Udemy/GeekUniversity/secao_4/ex2.py
|
SandboxGTASA/Python-1
|
bbb5f8bdf7d5110528e457b2a9ebdb2d67e40805
|
[
"MIT"
] | null | null | null |
Udemy/GeekUniversity/secao_4/ex2.py
|
SandboxGTASA/Python-1
|
bbb5f8bdf7d5110528e457b2a9ebdb2d67e40805
|
[
"MIT"
] | null | null | null |
Udemy/GeekUniversity/secao_4/ex2.py
|
SandboxGTASA/Python-1
|
bbb5f8bdf7d5110528e457b2a9ebdb2d67e40805
|
[
"MIT"
] | null | null | null |
# Faça um programa que leia um numero real e imprima
num_real = float(input('Entre com um numero real: '))
print(f'O numero real digitado foi: {num_real}')
| 31.4
| 53
| 0.732484
| 28
| 157
| 4.035714
| 0.678571
| 0.265487
| 0.212389
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.165605
| 157
| 4
| 54
| 39.25
| 0.862595
| 0.318471
| 0
| 0
| 0
| 0
| 0.609524
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
6b48f807504030b86fdf539f7cc0b007735576fe
| 25,630
|
py
|
Python
|
tests.py
|
mthh/smoomapy
|
a603a62e76592e84509591fddcde8bfb1e826b84
|
[
"MIT"
] | 6
|
2017-01-10T16:01:17.000Z
|
2021-07-06T12:52:37.000Z
|
tests.py
|
mthh/smoomapy
|
a603a62e76592e84509591fddcde8bfb1e826b84
|
[
"MIT"
] | null | null | null |
tests.py
|
mthh/smoomapy
|
a603a62e76592e84509591fddcde8bfb1e826b84
|
[
"MIT"
] | 1
|
2020-02-29T05:08:19.000Z
|
2020-02-29T05:08:19.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import numpy as np
import random
import sys
from geopandas import GeoDataFrame
from io import StringIO
from smoomapy import (
quick_stewart, quick_idw, SmoothIdw, SmoothStewart,
head_tail_breaks, maximal_breaks, get_opt_nb_class)
from smoomapy.helpers_classif import _chain
class TestSmoothIdw(unittest.TestCase):
def setUp(self):
pass
def test_one_shot_idw(self):
# Exports correctly to `bytes`:
res = quick_idw(
"misc/nuts3_data.geojson", "pop2008",
power=1, resolution=80000, nb_class=8,
disc_func='jenks', mask="misc/nuts3_data.geojson")
self.assertIsInstance(res, bytes)
# Exports correctly to `GeoDataFrame`
# and respects the choosen number of class:
res = quick_idw(
"misc/nuts3_data.geojson", "pop2008",
power=1, nb_pts=8000,
nb_class=8, disc_func="jenks",
mask="misc/nuts3_data.geojson",
output="GeoDataFrame")
self.assertIsInstance(res, GeoDataFrame)
self.assertEqual(len(res), 8)
def test_object_idw(self):
# Test the OO approach for building smoothed map with stewart potentials
idw = SmoothIdw("misc/nuts3_data.geojson", "pop2008",
power=2,
resolution=90000,
mask="misc/nuts3_data.geojson")
# Test using percentiles :
result = idw.render(nb_class=10,
disc_func="percentiles",
output="geodataframe")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), 10)
# Test using somes already choosed break values :
my_breaks = [0, 250000, 375000, 500000, 870000, 1850000, 4250000]
result = idw.render(
nb_class=48, # bogus values as `nb_class` and
disc_func="foobar", # ... disc_func should be overrided
user_defined_breaks=my_breaks, # ... by the `user_defined_breaks` params
output="geodataframe") # ... and this is what we are testing here
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), len(my_breaks) - 1)
# Assert these break values were actually used :
for wanted_break, obtained_break in zip(my_breaks[1:-1], result["max"][:-1]):
self.assertAlmostEqual(wanted_break, obtained_break)
# Test again using another discretization method : "head tail breaks"
# (should define automatically the number of class)
result = idw.render(nb_class=None,
disc_func="head_tail",
output="geodataframe")
self.assertIsInstance(result, GeoDataFrame)
# Test that the object has a nice representation :
a = str(idw)
b = repr(idw)
self.assertEqual(a, b)
self.assertIn("SmoothIdw - variable :", a)
self.assertIn("{} features".format(len(idw.gdf)), a)
if sys.version_info >= (3, 0):
sys.stdout = StringIO()
idw.properties
printed = sys.stdout.getvalue()
sys.stdout = sys.__stdout__
self.assertIn("SmoothIdw - variable :", printed)
# def test_object_idw_two_var(self):
# # Test the OO approach with two variables :
# idw = SmoothIdw("misc/nuts3_data.geojson", "gdppps2008",
# power=0.7, resolution=80000,
# variable_name2="pop2008",
# mask="misc/nuts3_data.geojson")
# result = idw.render(8, "equal_interval", output="Geodataframe")
# self.assertIsInstance(result, GeoDataFrame)
# self.assertEqual(len(result), 8)
def test_distance_not_geo(self):
# First whith one variable :
idw = SmoothIdw("misc/nuts3_data.geojson",
"gdppps2008",
nb_pts=7200,
power=3,
mask="misc/nuts3_data.geojson",
distGeo=False)
result = idw.render(8, "jenks", output="Geodataframe")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), 8)
# # Then with two variables and a custom projection to use :
# idw = SmoothIdw("misc/nuts3_data.geojson",
# "gdppps2008",
# power=1.5,
# variable_name2="pop2008",
# mask="misc/nuts3_data.geojson",
# distGeo=False,
# projDistance={"init": "epsg:3035"})
# result = idw.render(8, "equal_interval", output="Geodataframe")
# self.assertIsInstance(result, GeoDataFrame)
# self.assertEqual(len(result), 8)
# self.assertEqual(result.crs, {'init': 'epsg:3035'})
def test_from_gdf_with_new_mask(self):
gdf = GeoDataFrame.from_file("misc/nuts3_data.geojson")
idw = SmoothIdw(gdf, "gdppps2008", power=1, nb_pts=2800, mask=None)
result = idw.render(6, "percentiles", output="Geodataframe")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), 6)
# Finally, use a mask (from a file) :
result = idw.render(5, "percentiles",
output="Geodataframe",
new_mask="misc/nuts3_data.geojson")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(idw.use_mask, True)
self.assertEqual(len(result), 5)
# Or from a GeoDataFrame :
result = idw.render(6, "percentiles",
output="Geodataframe",
new_mask=gdf)
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(idw.use_mask, True)
self.assertEqual(len(result), 6)
# # Nope, no mask :
# result = idw.render(5, "percentiles",
# output="Geodataframe",
# new_mask=None)
# self.assertIsInstance(result, GeoDataFrame)
# self.assertEqual(idw.use_mask, False)
# self.assertEqual(len(result), 5)
# Test that it skips the mask parameter if the layer provided as a mask
# is not a Polygon/MultiPolygon layer :
gdf_mask = gdf[1:50].copy()
gdf_mask.geometry = gdf_mask.geometry.centroid
result = idw.render(5, "percentiles",
output="Geodataframe",
new_mask=gdf_mask)
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(idw.use_mask, False)
self.assertEqual(len(result), 5)
def test_input_with_missing_values(self):
gdf = GeoDataFrame.from_file("misc/nuts3_data.geojson")
gdf.loc[12:18, "gdppps2008"] = np.NaN
idw = SmoothIdw(gdf, "gdppps2008", power=1, nb_pts=2600, mask=gdf)
result = idw.render(9, "jenks", output="Geodataframe")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), 9)
gdf2 = GeoDataFrame.from_file('misc/nuts3_data.geojson').to_crs({"init": "epsg:3035"})
gdf2.loc[:, 'gdppps2008'] = gdf2['gdppps2008'].astype(object)
gdf2.loc[15:20, 'gdppps2008'] = ""
gdf2.loc[75:78, 'gdppps2008'] = ""
idw = SmoothIdw(gdf2, 'gdppps2008', power=1, nb_pts=1200, mask=gdf2)
result = idw.render(9, 'jenks', output="GeoDataFrame")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), 9)
def test_wrong_dtype_missing_values(self):
gdf = GeoDataFrame.from_file("misc/nuts3_data.geojson")
gdf.loc[12:18, "gdppps2008"] = np.NaN
gdf.loc[25:35, "pop2008"] = np.NaN
gdf.loc[0:len(gdf)-1, "pop2008"] = gdf["pop2008"].astype(str)
idw = SmoothIdw(gdf, "gdppps2008", power=1, nb_pts=2600,
mask="misc/nuts3_data.geojson")
result = idw.render(9, "jenks", output="Geodataframe")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), 9)
# idw = SmoothIdw(gdf, "gdppps2008", variable_name2="pop2008",
# power=1, nb_pts=1200, mask="misc/nuts3_data.geojson")
# result = idw.render(9, "equal_interval", output="Geodataframe")
# self.assertIsInstance(result, GeoDataFrame)
# self.assertEqual(len(result), 9)
def test_from_point_layer_and_maximal_breaks(self):
gdf = GeoDataFrame.from_file("misc/nuts3_data.geojson").to_crs({"init": "epsg:4326"})
# Convert the input layer to a point layer :
gdf.geometry = gdf.geometry.centroid
idw = SmoothIdw(gdf, "gdppps2008", power=1, nb_pts=7600,
mask="misc/nuts3_data.geojson")
# Use equal interval :
result = idw.render(3, "equal_interval", output="Geodataframe")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), 3)
# Use maximal breaks discretisation method:
result = idw.render(9, "maximal_breaks", output="Geodataframe")
self.assertIsInstance(result, GeoDataFrame)
def test_from_polygon_layer_no_crs(self):
gdf = GeoDataFrame.from_file("misc/nuts3_data.geojson")
gdf.crs = ''
# Convert the input layer to a polygon layer (instead of multipolygon):
gdf.geometry = gdf.geometry.union(gdf.geometry)
idw = SmoothIdw(gdf, "gdppps2008", power=1, nb_pts=2600,
mask="misc/nuts3_data.geojson")
# Use equal interval :
result = idw.render(8, "jenks", output="Geodataframe")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), 8)
def test_errors(self):
idw = SmoothIdw("misc/nuts3_data.geojson", "gdppps2008",
power=2, nb_pts=1000)
# Test with a wrong discretization function name :
with self.assertRaises(ValueError):
idw.render(9, "foo", output="Geodataframe")
# Test with a sizelimit and a high number of points
# (the nuts3 layer contains 1448 features)
with self.assertRaises(ValueError):
idw = SmoothIdw("misc/nuts3_data.geojson", "gdppps2008",
power=2, nb_pts=100000, sizelimit=10000000)
class TestSmoothStewart(unittest.TestCase):
def setUp(self):
pass
def test_one_shot_stewart(self):
# Exports correctly to `bytes`:
res = quick_stewart(
"misc/nuts3_data.geojson", "pop2008",
span=65000, beta=2, resolution=80000, nb_class=8,
mask="misc/nuts3_data.geojson")
self.assertIsInstance(res, bytes)
# Exports correctly to `GeoDataFrame`
# and respects the choosen number of class:
res = quick_stewart(
"misc/nuts3_data.geojson", "pop2008",
span=65000, beta=2, nb_pts=8000, nb_class=8,
mask="misc/nuts3_data.geojson", output="GeoDataFrame")
self.assertIsInstance(res, GeoDataFrame)
self.assertEqual(len(res), 8)
# Test that it works without specifying without `nb_pts`,
# `nb_class` and `resolution`:
res = quick_stewart(
"misc/nuts3_data.geojson", "pop2008",
span=65000,
beta=2,
mask="misc/nuts3_data.geojson",
output="GeoDataFrame")
self.assertIsInstance(res, GeoDataFrame)
# Test with user defined breaks values :
my_breaks = [0, 197000, 1295000, 2093000, 3091000,
5888000, 10186000, 13500000]
res = quick_stewart(
"misc/nuts3_data.geojson",
"pop2008",
span=65000,
beta=2,
resolution=80000,
user_defined_breaks=my_breaks,
mask="misc/nuts3_data.geojson",
output="GeoDataFrame")
self.assertIsInstance(res, GeoDataFrame)
self.assertEqual(len(res), 7)
# Assert these break values were actually used :
for wanted_break, obtained_break in zip(my_breaks[1:-1], res["max"][:-1]):
self.assertAlmostEqual(wanted_break, obtained_break)
# Test with user defined breaks values
# (the maximum value is volontarily low, and the minimum volontarily high,
# two new class will be created,
# respectively between the minimum and the first break value
# and between the last break value and the maximum)
my_breaks = [1295000, 2093000, 3091000, 5888000, 10186000]
nb_interval = len(my_breaks) - 1
res2 = quick_stewart(
"misc/nuts3_data.geojson",
"pop2008",
span=65000,
beta=2,
resolution=80000,
user_defined_breaks=my_breaks,
mask="misc/nuts3_data.geojson",
output="GeoDataFrame")
self.assertIsInstance(res2, GeoDataFrame)
# We can test that there is no hole by comparing the area of theses polygons
# and the area of the previously computed resultat :
self.assertAlmostEqual(res2.area.sum(), res.area.sum(), 2)
# And by the fact that there is two extra class compared to our break values :
self.assertEqual(len(res2), nb_interval + 2)
# Test with break values non-unique (likely due to the discretization choosed):
# + Not correctly ordered values
# They should be reorderer and duplicates should be removed ...
my_breaks = [0, 0, 197000, 1295000, 3091000, 2093000,
5888000, 10186000, 13500000]
res3 = quick_stewart(
"misc/nuts3_data.geojson",
"pop2008",
span=65000,
beta=2,
resolution=80000,
user_defined_breaks=my_breaks,
mask="misc/nuts3_data.geojson",
output="GeoDataFrame")
self.assertIsInstance(res3, GeoDataFrame)
# ... so we should have the same class number than `res` :
self.assertEqual(len(res3), len(res))
def test_object_stewart(self):
# Test the OO approach for building smoothed map with stewart potentials
StePot = SmoothStewart("misc/nuts3_data.geojson", "pop2008",
span=65000, beta=2, resolution=90000,
mask="misc/nuts3_data.geojson")
# Test using percentiles :
result = StePot.render(nb_class=10,
disc_func="percentiles",
output="geodataframe")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), 10)
# Test using somes already choosed break values :
my_breaks = [0, 197000, 1295000, 2093000, 3091000,
5888000, 10186000, 12000000]
result = StePot.render(
nb_class=48, # bogus values as `nb_class` and
disc_func="foobar", # ... disc_func should be overrided
user_defined_breaks=my_breaks, # ... by the `user_defined_breaks` params
output="geodataframe") # ... and this is what we are testing here
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), 7)
# Assert these break values were actually used :
for wanted_break, obtained_break in zip(my_breaks[1:-1], result["max"][:-1]):
self.assertAlmostEqual(wanted_break, obtained_break)
# Test again using another discretization method : "head tail breaks"
# (should define automatically the number of class)
result = StePot.render(nb_class=None,
disc_func="head_tail",
output="geodataframe")
self.assertIsInstance(result, GeoDataFrame)
# Test that the object has a nice representation :
a = str(StePot)
b = repr(StePot)
self.assertEqual(a, b)
self.assertIn("SmoothStewart - variable :", a)
self.assertIn("{} features".format(len(StePot.gdf)), a)
def test_object_stewart_two_var(self):
# Test the OO approach with two variables :
StePot = SmoothStewart("misc/nuts3_data.geojson", "gdppps2008",
span=65000, beta=2, resolution=80000,
variable_name2="pop2008",
mask="misc/nuts3_data.geojson")
result = StePot.render(8, "equal_interval", output="Geodataframe")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), 8)
def test_distance_not_geo(self):
# First whith one variable :
StePot = SmoothStewart("misc/nuts3_data.geojson",
"gdppps2008",
resolution=100000,
span=65000, beta=3,
mask="misc/nuts3_data.geojson",
distGeo=False)
result = StePot.render(8, "equal_interval", output="Geodataframe")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), 8)
# Then with two variables and a custom projection to use :
StePot = SmoothStewart("misc/nuts3_data.geojson",
"gdppps2008",
span=65000, beta=2,
resolution=80000,
variable_name2="pop2008",
mask="misc/nuts3_data.geojson",
distGeo=False,
projDistance={"init": "epsg:3035"})
result = StePot.render(8, "equal_interval", output="Geodataframe")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), 8)
self.assertEqual(result.crs, {'init': 'epsg:3035'})
def test_from_gdf_with_new_mask(self):
gdf = GeoDataFrame.from_file("misc/nuts3_data.geojson")
# Let's use pareto function for this one :
StePot = SmoothStewart(gdf, "gdppps2008", typefct="pareto",
span=65000, beta=2.33, resolution=80000,
mask=None)
result = StePot.render(6, output="Geodataframe")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), 6)
# Finally, use a mask (from a file) :
result = StePot.render(5, output="Geodataframe",
new_mask="misc/nuts3_data.geojson")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(StePot.use_mask, True)
self.assertEqual(len(result), 5)
# Or from a GeoDataFrame :
result = StePot.render(6, output="Geodataframe",
new_mask=gdf)
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(StePot.use_mask, True)
self.assertEqual(len(result), 6)
# # Nope, no mask :
# result = StePot.render(5, output="Geodataframe",
# new_mask=None)
# self.assertIsInstance(result, GeoDataFrame)
# self.assertEqual(StePot.use_mask, False)
# self.assertEqual(len(result), 5)
# Test that it skips the mask parameter if the layer provided as a mask
# is not a Polygon/MultiPolygon layer :
gdf_mask = gdf[1:50].copy()
gdf_mask.geometry = gdf_mask.geometry.centroid
result = StePot.render(5, output="Geodataframe",
new_mask=gdf_mask)
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(StePot.use_mask, False)
self.assertEqual(len(result), 5)
def test_input_with_missing_values(self):
gdf = GeoDataFrame.from_file("misc/nuts3_data.geojson")
gdf.loc[12:18, "gdppps2008"] = np.NaN
StePot = SmoothStewart(gdf, "gdppps2008",
span=65000, beta=2, resolution=100000,
mask=gdf)
result = StePot.render(9, "equal_interval", output="Geodataframe")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), 9)
gdf2 = GeoDataFrame.from_file('misc/nuts3_data.geojson').to_crs({"init": "epsg:3035"})
gdf2.loc[:, 'gdppps2008'] = gdf2['gdppps2008'].astype(object)
gdf2.loc[15:20, 'gdppps2008'] = ""
gdf2.loc[75:78, 'gdppps2008'] = ""
StePot = SmoothStewart(gdf2, 'gdppps2008', span=65000, beta=2,
resolution=80000, mask=gdf2)
result = StePot.render(9, 'equal_interval', output="GeoDataFrame")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), 9)
def test_wrong_dtype_missing_values(self):
gdf = GeoDataFrame.from_file("misc/nuts3_data.geojson")
gdf.loc[12:18, "gdppps2008"] = np.NaN
gdf.loc[25:35, "pop2008"] = np.NaN
gdf.loc[0:len(gdf)-1, "pop2008"] = gdf["pop2008"].astype(str)
StePot = SmoothStewart(gdf, "gdppps2008",
span=65000, beta=2, resolution=100000,
mask="misc/nuts3_data.geojson")
result = StePot.render(9, "equal_interval", output="Geodataframe")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), 9)
StePot = SmoothStewart(gdf, "gdppps2008", variable_name2="pop2008",
span=65000, beta=2, resolution=100000,
mask="misc/nuts3_data.geojson")
result = StePot.render(9, "equal_interval", output="Geodataframe")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), 9)
def test_from_point_layer_and_maximal_breaks(self):
gdf = GeoDataFrame.from_file("misc/nuts3_data.geojson").to_crs({"init": "epsg:4326"})
# Convert the input layer to a point layer :
gdf.geometry = gdf.geometry.centroid
StePot = SmoothStewart(gdf, "gdppps2008",
span=65000, beta=2, resolution=80000,
mask="misc/nuts3_data.geojson")
# Use equal interval :
result = StePot.render(9, "equal_interval", output="Geodataframe")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), 9)
# Use maximal breaks discretisation method:
result = StePot.render(9, "maximal_breaks", output="Geodataframe")
self.assertIsInstance(result, GeoDataFrame)
def test_from_polygon_layer_no_crs(self):
gdf = GeoDataFrame.from_file("misc/nuts3_data.geojson")
gdf.crs = ''
# Convert the input layer to a polygon layer (instead of multipolygon):
gdf.geometry = gdf.geometry.union(gdf.geometry)
StePot = SmoothStewart(gdf, "gdppps2008",
span=65000, beta=2, resolution=100000,
mask="misc/nuts3_data.geojson")
# Use equal interval :
result = StePot.render(8, "equal_interval", output="Geodataframe")
self.assertIsInstance(result, GeoDataFrame)
self.assertEqual(len(result), 8)
def test_errors(self):
# Test with a wrong interaction function name :
with self.assertRaises(ValueError):
StePot = SmoothStewart("misc/nuts3_data.geojson", "gdppps2008",
span=65000, beta=2,
typefct="abcdefg")
StePot = SmoothStewart("misc/nuts3_data.geojson", "gdppps2008",
span=65000, beta=2, resolution=90000)
# Test with a wrong discretization function name :
with self.assertRaises(ValueError):
StePot.render(9, "foo", output="Geodataframe")
# Test with a sizelimit and a high number of points
# (the nuts3 layer contains 1448 features)
with self.assertRaises(ValueError):
StePot = SmoothStewart(
"misc/nuts3_data.geojson", "gdppps2008",
span=65000, beta=2, typefct='pareto',
nb_pts=100000, sizelimit=10000000)
class TestHelpers(unittest.TestCase):
def setUp(self):
self.li = [random.random() * 1000 for i in range(1200)]
def test_head_tail_breaks(self):
breaks = head_tail_breaks(self.li)
self.assertIsInstance(breaks, list)
breaks2 = head_tail_breaks(self.li, direction="head")
self.assertIsInstance(breaks, list)
self.assertAlmostEqual(breaks2, sorted(breaks2))
self.assertAlmostEqual(breaks, breaks2)
breaks3 = head_tail_breaks(self.li, direction="tail")
self.assertIsInstance(breaks, list)
self.assertAlmostEqual(breaks3, sorted(breaks3))
with self.assertRaises(ValueError):
head_tail_breaks(self.li, direction="nope")
def test_maximal_breaks(self):
breaks = maximal_breaks(self.li)
self.assertIsInstance(breaks, list)
breaks = maximal_breaks(self.li, k=6)
self.assertIsInstance(breaks, list)
self.assertEqual(len(breaks), 7)
def test_get_opt_nb_class(self):
nb_class = get_opt_nb_class(len(self.li))
self.assertEqual(nb_class, 11)
def test_chain_list(self):
_list = [i for i in _chain([789, 45], [78, 96], [7878, 789, 36])]
self.assertEqual(_list, [789, 45, 78, 96, 7878, 789, 36])
if __name__ == "__main__":
unittest.main()
| 43.514431
| 94
| 0.595786
| 2,793
| 25,630
| 5.346939
| 0.11672
| 0.071783
| 0.051359
| 0.079014
| 0.854359
| 0.83628
| 0.795902
| 0.769385
| 0.751373
| 0.732691
| 0
| 0.061321
| 0.296918
| 25,630
| 588
| 95
| 43.588435
| 0.767425
| 0.207686
| 0
| 0.677582
| 0
| 0
| 0.13404
| 0.061522
| 0
| 0
| 0
| 0
| 0.267003
| 1
| 0.065491
| false
| 0.005038
| 0.020151
| 0
| 0.093199
| 0.005038
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
86e127ca6d855b0f3a3f25fb179cc231634a14ba
| 22
|
py
|
Python
|
python/__init__.py
|
ksloataamp/libplanes
|
632fb3bf52838f9bd0531fc2040b3b2ec448f70f
|
[
"MIT"
] | 2
|
2018-03-13T15:13:16.000Z
|
2019-04-23T14:10:46.000Z
|
python/__init__.py
|
ksloataamp/libplanes
|
632fb3bf52838f9bd0531fc2040b3b2ec448f70f
|
[
"MIT"
] | 4
|
2019-10-22T11:29:11.000Z
|
2021-02-17T17:55:41.000Z
|
python/__init__.py
|
ksloataamp/libplanes
|
632fb3bf52838f9bd0531fc2040b3b2ec448f70f
|
[
"MIT"
] | 5
|
2018-10-23T06:04:18.000Z
|
2021-02-15T02:46:12.000Z
|
from .planes import *
| 11
| 21
| 0.727273
| 3
| 22
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 22
| 1
| 22
| 22
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
86e4b4ac35f055169037bd9f25c316ec04298868
| 65
|
py
|
Python
|
src/oxint/__init__.py
|
joaquinOnSoft/oxint
|
c90dc25e0ccd5bb3576c9e9cb61e3a74d695e16a
|
[
"Apache-2.0"
] | null | null | null |
src/oxint/__init__.py
|
joaquinOnSoft/oxint
|
c90dc25e0ccd5bb3576c9e9cb61e3a74d695e16a
|
[
"Apache-2.0"
] | null | null | null |
src/oxint/__init__.py
|
joaquinOnSoft/oxint
|
c90dc25e0ccd5bb3576c9e9cb61e3a74d695e16a
|
[
"Apache-2.0"
] | null | null | null |
from . import ingest
from . import scraping
from . import utils
| 13
| 22
| 0.753846
| 9
| 65
| 5.444444
| 0.555556
| 0.612245
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 65
| 4
| 23
| 16.25
| 0.942308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
86e500f8999d6ac4ba9d8d6bcf1b9628b7615017
| 44
|
py
|
Python
|
generate_face/__init__.py
|
evoreign/generate_face
|
94cbc89b202c0bc6d2b82e75ec1d414049a90b0b
|
[
"MIT"
] | null | null | null |
generate_face/__init__.py
|
evoreign/generate_face
|
94cbc89b202c0bc6d2b82e75ec1d414049a90b0b
|
[
"MIT"
] | null | null | null |
generate_face/__init__.py
|
evoreign/generate_face
|
94cbc89b202c0bc6d2b82e75ec1d414049a90b0b
|
[
"MIT"
] | null | null | null |
from generate_face.main import FaceGenerator
| 44
| 44
| 0.909091
| 6
| 44
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068182
| 44
| 1
| 44
| 44
| 0.95122
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
8106e4008358d96852b8c8e65f0e924699d60203
| 70
|
py
|
Python
|
enthought/pyface/workbench/action/workbench_action.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/pyface/workbench/action/workbench_action.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/pyface/workbench/action/workbench_action.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from pyface.workbench.action.workbench_action import *
| 23.333333
| 54
| 0.828571
| 9
| 70
| 6.333333
| 0.777778
| 0.526316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 70
| 2
| 55
| 35
| 0.904762
| 0.171429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8140583e64e4fe933558cbb7974508986fb8fa9d
| 36
|
py
|
Python
|
mantraml/models/__init__.py
|
cclauss/mantra
|
19e2f72960da8314f11768d9acfe7836629b817c
|
[
"Apache-2.0"
] | 330
|
2018-09-04T19:07:51.000Z
|
2021-09-14T11:21:05.000Z
|
mantraml/models/__init__.py
|
cclauss/mantra
|
19e2f72960da8314f11768d9acfe7836629b817c
|
[
"Apache-2.0"
] | 13
|
2018-09-06T06:08:16.000Z
|
2018-12-01T17:04:38.000Z
|
mantraml/models/__init__.py
|
cclauss/mantra
|
19e2f72960da8314f11768d9acfe7836629b817c
|
[
"Apache-2.0"
] | 20
|
2018-09-06T11:56:07.000Z
|
2021-12-03T19:48:21.000Z
|
from .MantraModel import MantraModel
| 36
| 36
| 0.888889
| 4
| 36
| 8
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 36
| 1
| 36
| 36
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d48ecb509746ea46df7886601f21149e83979eb2
| 3,834
|
py
|
Python
|
mak/libs/pyxx/cxx/grammar/declaration/declarator/name.py
|
motor-dev/Motor
|
98cb099fe1c2d31e455ed868cc2a25eae51e79f0
|
[
"BSD-3-Clause"
] | null | null | null |
mak/libs/pyxx/cxx/grammar/declaration/declarator/name.py
|
motor-dev/Motor
|
98cb099fe1c2d31e455ed868cc2a25eae51e79f0
|
[
"BSD-3-Clause"
] | null | null | null |
mak/libs/pyxx/cxx/grammar/declaration/declarator/name.py
|
motor-dev/Motor
|
98cb099fe1c2d31e455ed868cc2a25eae51e79f0
|
[
"BSD-3-Clause"
] | null | null | null |
"""
type-id:
type-specifier-seq abstract-declarator?
defining-type-id:
defining-type-specifier-seq abstract-declarator?
abstract-declarator:
ptr-abstract-declarator
noptr-abstract-declarator? parameters-and-qualifiers trailing-return-type
abstract-pack-declarator
ptr-abstract-declarator:
noptr-abstract-declarator
ptr-operator ptr-abstract-declarator?
noptr-abstract-declarator:
noptr-abstract-declarator? parameters-and-qualifiers
noptr-abstract-declarator? [ constant-expression? ] attribute-specifier-seq?
( ptr-abstract-declarator )
abstract-pack-declarator:
noptr-abstract-pack-declarator
ptr-operator abstract-pack-declarator
noptr-abstract-pack-declarator:
noptr-abstract-pack-declarator parameters-and-qualifiers
noptr-abstract-pack-declarator [ constant-expression? ] attribute-specifier-seq?
...
"""
import glrp
from ....parser import cxx98, cxx11
from motor_typing import TYPE_CHECKING
@glrp.rule('type-id : type-specifier-seq abstract-declarator?')
@cxx98
def type_id(self, p):
# type: (CxxParser, glrp.Production) -> None
pass
@glrp.rule('defining-type-id : defining-type-specifier-seq abstract-declarator?')
@cxx98
def defining_type_id(self, p):
# type: (CxxParser, glrp.Production) -> None
pass
@glrp.rule('abstract-declarator? : ptr-abstract-declarator')
@glrp.rule('abstract-declarator? : [split:declarator_end]')
@cxx98
def abstract_declarator_opt(self, p):
# type: (CxxParser, glrp.Production) -> None
pass
@glrp.rule('abstract-declarator? : parameters-and-qualifiers trailing-return-type')
@glrp.rule('abstract-declarator? : noptr-abstract-declarator parameters-and-qualifiers trailing-return-type')
#@glrp.rule('abstract-declarator? : abstract-pack-declarator')
@cxx11
def abstract_declarator_opt_cxx11(self, p):
# type: (CxxParser, glrp.Production) -> None
pass
@glrp.rule('ptr-abstract-declarator : noptr-abstract-declarator[split:declarator_end]')
@glrp.rule('ptr-abstract-declarator : ptr-operator[split:declarator_end]')
@glrp.rule('ptr-abstract-declarator : ptr-operator ptr-abstract-declarator')
@cxx98
def ptr_abstract_declarator(self, p):
# type: (CxxParser, glrp.Production) -> None
pass
@glrp.rule('noptr-abstract-declarator : parameters-and-qualifiers')
@glrp.rule('noptr-abstract-declarator : "[" constant-expression? "]" attribute-specifier-seq?')
@glrp.rule('noptr-abstract-declarator : noptr-abstract-declarator parameters-and-qualifiers')
@glrp.rule(
'noptr-abstract-declarator : noptr-abstract-declarator "[" constant-expression? "]" attribute-specifier-seq?'
)
@glrp.rule(
'noptr-abstract-declarator : [split:declarator_continue]"(" noptr-abstract-declarator-disambiguation ptr-abstract-declarator ")"'
)
@cxx98
def noptr_abstract_declarator(self, p):
# type: (CxxParser, glrp.Production) -> None
pass
@glrp.rule('noptr-abstract-declarator-disambiguation[split:noptr_abstract_declarator] :')
@cxx98
def noptr_abstract_declarator_disambiguation(self, p):
# type: (CxxParser, glrp.Production) -> None
pass
@glrp.rule('abstract-pack-declarator : noptr-abstract-pack-declarator')
@glrp.rule('abstract-pack-declarator : ptr-operator abstract-pack-declarator')
@cxx11
def abstract_pack_declarator(self, p):
# type: (CxxParser, glrp.Production) -> None
pass
@glrp.rule('noptr-abstract-pack-declarator : noptr-abstract-pack-declarator parameters-and-qualifiers')
@glrp.rule(
'noptr-abstract-pack-declarator : noptr-abstract-pack-declarator "[" constant-expression? "]" attribute-specifier-seq?'
)
@glrp.rule('noptr-abstract-pack-declarator : [split:pack_declarator]"..."')
@cxx11
def noptr_abstract_pack_declarator(self, p):
# type: (CxxParser, glrp.Production) -> None
pass
if TYPE_CHECKING:
from ....parser import CxxParser
| 32.218487
| 134
| 0.750391
| 453
| 3,834
| 6.284768
| 0.092715
| 0.265543
| 0.153495
| 0.10432
| 0.890411
| 0.849315
| 0.812785
| 0.684932
| 0.624517
| 0.537759
| 0
| 0.007038
| 0.110589
| 3,834
| 119
| 135
| 32.218487
| 0.827859
| 0.343505
| 0
| 0.362069
| 0
| 0.051724
| 0.590564
| 0.435026
| 0
| 0
| 0
| 0
| 0
| 1
| 0.155172
| false
| 0.155172
| 0.068966
| 0
| 0.224138
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
be09500e8b13301e1ed4ad270ce2ac522fb93798
| 202
|
py
|
Python
|
polyaxon_k8s/exceptions.py
|
gideonbros/polyaxon-k8s
|
60bf1f97b276e84a17462ba904a82aed652f19fe
|
[
"MIT"
] | 5
|
2017-11-22T21:45:35.000Z
|
2020-02-14T19:51:48.000Z
|
polyaxon_k8s/exceptions.py
|
gideonbros/polyaxon-k8s
|
60bf1f97b276e84a17462ba904a82aed652f19fe
|
[
"MIT"
] | 3
|
2017-12-18T15:42:03.000Z
|
2019-11-19T10:34:39.000Z
|
polyaxon_k8s/exceptions.py
|
gideonbros/polyaxon-k8s
|
60bf1f97b276e84a17462ba904a82aed652f19fe
|
[
"MIT"
] | 5
|
2017-12-11T12:49:28.000Z
|
2021-12-03T07:11:38.000Z
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
class PolyaxonK8SError(Exception):
"""Exception class to raise in case of a Kubernetes issue."""
pass
| 22.444444
| 65
| 0.722772
| 25
| 202
| 5.6
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011976
| 0.173267
| 202
| 8
| 66
| 25.25
| 0.826347
| 0.386139
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
07d36e66b0ac5221ed421225df73d5d0e847b95f
| 154,101
|
py
|
Python
|
cinder/tests/unit/volume/drivers/dell_emc/powermax/test_powermax_common.py
|
stackhpc/cinder
|
93f0ca4dc9eedee10df2f03dad834a31b7f09847
|
[
"Apache-2.0"
] | null | null | null |
cinder/tests/unit/volume/drivers/dell_emc/powermax/test_powermax_common.py
|
stackhpc/cinder
|
93f0ca4dc9eedee10df2f03dad834a31b7f09847
|
[
"Apache-2.0"
] | null | null | null |
cinder/tests/unit/volume/drivers/dell_emc/powermax/test_powermax_common.py
|
stackhpc/cinder
|
93f0ca4dc9eedee10df2f03dad834a31b7f09847
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2017-2019 Dell Inc. or its subsidiaries.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ast
from copy import deepcopy
import mock
import six
from cinder import exception
from cinder.objects import fields
from cinder import test
from cinder.tests.unit import fake_snapshot
from cinder.tests.unit import fake_volume
from cinder.tests.unit.volume.drivers.dell_emc.powermax import (
powermax_data as tpd)
from cinder.tests.unit.volume.drivers.dell_emc.powermax import (
powermax_fake_objects as tpfo)
from cinder.volume.drivers.dell_emc.powermax import common
from cinder.volume.drivers.dell_emc.powermax import fc
from cinder.volume.drivers.dell_emc.powermax import masking
from cinder.volume.drivers.dell_emc.powermax import provision
from cinder.volume.drivers.dell_emc.powermax import rest
from cinder.volume.drivers.dell_emc.powermax import utils
from cinder.volume import volume_utils
class PowerMaxCommonTest(test.TestCase):
def setUp(self):
self.data = tpd.PowerMaxData()
super(PowerMaxCommonTest, self).setUp()
self.mock_object(volume_utils, 'get_max_over_subscription_ratio',
return_value=1.0)
configuration = tpfo.FakeConfiguration(
None, 'CommonTests', 1, 1, san_ip='1.1.1.1', san_login='smc',
vmax_array=self.data.array, vmax_srp='SRP_1', san_password='smc',
san_api_port=8443, vmax_port_groups=[self.data.port_group_name_f])
rest.PowerMaxRest._establish_rest_session = mock.Mock(
return_value=tpfo.FakeRequestsSession())
driver = fc.PowerMaxFCDriver(configuration=configuration)
self.driver = driver
self.common = self.driver.common
self.masking = self.common.masking
self.provision = self.common.provision
self.rest = self.common.rest
self.utils = self.common.utils
self.utils.get_volumetype_extra_specs = (
mock.Mock(return_value=self.data.vol_type_extra_specs))
@mock.patch.object(rest.PowerMaxRest, 'get_array_ucode_version',
return_value=tpd.PowerMaxData.next_gen_ucode)
@mock.patch.object(rest.PowerMaxRest, 'get_array_model_info',
return_value=('PowerMax 2000', True))
@mock.patch.object(rest.PowerMaxRest, 'set_rest_credentials')
@mock.patch.object(common.PowerMaxCommon, '_get_slo_workload_combinations',
return_value=[])
@mock.patch.object(common.PowerMaxCommon,
'get_attributes_from_cinder_config',
side_effect=[[], tpd.PowerMaxData.array_info_wl])
def test_gather_info_tests(self, mck_parse, mck_combo, mck_rest,
mck_nextgen, mck_ucode):
# Use-Case 1: Gather info no-opts
configuration = tpfo.FakeConfiguration(
None, 'config_group', None, None)
fc.PowerMaxFCDriver(configuration=configuration)
# Use-Case 2: Gather info next-gen with ucode/version
self.common._gather_info()
self.assertTrue(self.common.next_gen)
self.assertEqual(self.common.ucode_level, self.data.next_gen_ucode)
def test_get_slo_workload_combinations_powermax(self):
array_info = self.common.get_attributes_from_cinder_config()
finalarrayinfolist = self.common._get_slo_workload_combinations(
array_info)
self.assertTrue(len(finalarrayinfolist) > 1)
@mock.patch.object(
rest.PowerMaxRest, 'get_vmax_model',
return_value=(tpd.PowerMaxData.vmax_model_details['model']))
@mock.patch.object(
rest.PowerMaxRest, 'get_slo_list',
return_value=(tpd.PowerMaxData.vmax_slo_details['sloId']))
def test_get_slo_workload_combinations_vmax(self, mck_slo, mck_model):
array_info = self.common.get_attributes_from_cinder_config()
finalarrayinfolist = self.common._get_slo_workload_combinations(
array_info)
self.assertTrue(len(finalarrayinfolist) > 1)
@mock.patch.object(
rest.PowerMaxRest, 'get_vmax_model',
return_value=tpd.PowerMaxData.powermax_model_details['model'])
@mock.patch.object(rest.PowerMaxRest, 'get_workload_settings',
return_value=[])
@mock.patch.object(
rest.PowerMaxRest, 'get_slo_list',
return_value=tpd.PowerMaxData.powermax_slo_details['sloId'])
def test_get_slo_workload_combinations_next_gen(self, mck_slo, mck_wl,
mck_model):
self.common.next_gen = True
self.common.array_model = 'PowerMax 2000'
finalarrayinfolist = self.common._get_slo_workload_combinations(
self.data.array_info_no_wl)
self.assertTrue(len(finalarrayinfolist) == 14)
@mock.patch.object(
rest.PowerMaxRest, 'get_vmax_model',
return_value=tpd.PowerMaxData.vmax_model_details['model'])
@mock.patch.object(rest.PowerMaxRest, 'get_workload_settings',
return_value=[])
@mock.patch.object(
rest.PowerMaxRest, 'get_slo_list',
return_value=tpd.PowerMaxData.powermax_slo_details['sloId'])
def test_get_slo_workload_combinations_next_gen_vmax(
self, mck_slo, mck_wl, mck_model):
self.common.next_gen = True
finalarrayinfolist = self.common._get_slo_workload_combinations(
self.data.array_info_no_wl)
self.assertTrue(len(finalarrayinfolist) == 18)
def test_get_slo_workload_combinations_failed(self):
array_info = {}
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._get_slo_workload_combinations, array_info)
@mock.patch.object(
common.PowerMaxCommon, 'get_volume_metadata',
return_value={'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2'})
def test_create_volume(self, mck_meta):
ref_model_update = (
{'provider_location': six.text_type(self.data.provider_location),
'metadata': {'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2',
'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}})
volume = deepcopy(self.data.test_volume)
volume.metadata = {'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}
model_update = self.common.create_volume(volume)
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(common.PowerMaxCommon, 'get_volume_metadata',
return_value='')
def test_create_volume_qos(self, mck_meta):
ref_model_update = (
{'provider_location': six.text_type(self.data.provider_location),
'metadata': ''})
extra_specs = deepcopy(self.data.extra_specs_intervals_set)
extra_specs['qos'] = {
'total_iops_sec': '4000', 'DistributionType': 'Always'}
with mock.patch.object(self.utils, 'get_volumetype_extra_specs',
return_value=extra_specs):
model_update = self.common.create_volume(self.data.test_volume)
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(common.PowerMaxCommon, '_clone_check')
@mock.patch.object(common.PowerMaxCommon, 'get_volume_metadata',
return_value='')
def test_create_volume_from_snapshot(self, mck_meta, mck_clone_chk):
ref_model_update = ({'provider_location': six.text_type(
deepcopy(self.data.provider_location_snapshot))})
model_update = self.common.create_volume_from_snapshot(
self.data.test_clone_volume, self.data.test_snapshot)
self.assertEqual(
ast.literal_eval(ref_model_update['provider_location']),
ast.literal_eval(model_update['provider_location']))
# Test from legacy snapshot
ref_model_update = (
{'provider_location': six.text_type(
deepcopy(self.data.provider_location_clone))})
model_update = self.common.create_volume_from_snapshot(
self.data.test_clone_volume, self.data.test_legacy_snapshot)
self.assertEqual(
ast.literal_eval(ref_model_update['provider_location']),
ast.literal_eval(model_update['provider_location']))
@mock.patch.object(common.PowerMaxCommon, '_clone_check')
@mock.patch.object(common.PowerMaxCommon, 'get_volume_metadata',
return_value='')
def test_cloned_volume(self, mck_meta, mck_clone_chk):
ref_model_update = ({'provider_location': six.text_type(
self.data.provider_location_clone)})
model_update = self.common.create_cloned_volume(
self.data.test_clone_volume, self.data.test_volume)
self.assertEqual(
ast.literal_eval(ref_model_update['provider_location']),
ast.literal_eval(model_update['provider_location']))
def test_delete_volume(self):
with mock.patch.object(self.common, '_delete_volume') as mock_delete:
self.common.delete_volume(self.data.test_volume)
mock_delete.assert_called_once_with(self.data.test_volume)
@mock.patch.object(common.PowerMaxCommon, '_clone_check')
@mock.patch.object(
common.PowerMaxCommon, 'get_snapshot_metadata',
return_value={'snap-meta-key-1': 'snap-meta-value-1',
'snap-meta-key-2': 'snap-meta-value-2'})
def test_create_snapshot(self, mck_meta, mck_clone_chk):
ref_model_update = (
{'provider_location': six.text_type(self.data.snap_location),
'metadata': {'snap-meta-key-1': 'snap-meta-value-1',
'snap-meta-key-2': 'snap-meta-value-2',
'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}})
snapshot = deepcopy(self.data.test_snapshot_manage)
snapshot.metadata = {'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}
model_update = self.common.create_snapshot(
snapshot, self.data.test_volume)
self.assertEqual(ref_model_update, model_update)
def test_delete_snapshot(self):
snap_name = self.data.snap_location['snap_name']
sourcedevice_id = self.data.snap_location['source_id']
generation = 0
with mock.patch.object(
self.provision, 'delete_volume_snap') as mock_delete_snap:
self.common.delete_snapshot(
self.data.test_snapshot, self.data.test_volume)
mock_delete_snap.assert_called_once_with(
self.data.array, snap_name, [sourcedevice_id],
restored=False, generation=generation)
def test_delete_snapshot_not_found(self):
with mock.patch.object(self.common, '_parse_snap_info',
return_value=(None, 'Something')):
with mock.patch.object(
self.provision, 'delete_volume_snap') as mock_delete_snap:
self.common.delete_snapshot(self.data.test_snapshot,
self.data.test_volume)
mock_delete_snap.assert_not_called()
def test_delete_legacy_snap(self):
with mock.patch.object(self.common, '_delete_volume') as mock_del:
self.common.delete_snapshot(self.data.test_legacy_snapshot,
self.data.test_legacy_vol)
mock_del.assert_called_once_with(self.data.test_legacy_snapshot)
@mock.patch.object(masking.PowerMaxMasking,
'return_volume_to_fast_managed_group')
@mock.patch.object(masking.PowerMaxMasking, 'remove_and_reset_members')
def test_remove_members(self, mock_rm, mock_return):
array = self.data.array
device_id = self.data.device_id
volume = self.data.test_volume
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
self.common._remove_members(
array, volume, device_id, extra_specs, self.data.connector, False)
mock_rm.assert_called_once_with(
array, volume, device_id, volume_name,
extra_specs, True, self.data.connector, async_grp=None)
@mock.patch.object(masking.PowerMaxMasking,
'return_volume_to_fast_managed_group')
@mock.patch.object(masking.PowerMaxMasking, 'remove_and_reset_members')
def test_remove_members_multiattach_case(self, mock_rm, mock_return):
array = self.data.array
device_id = self.data.device_id
volume = self.data.test_volume
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
self.common._remove_members(
array, volume, device_id, extra_specs, self.data.connector, True)
mock_rm.assert_called_once_with(
array, volume, device_id, volume_name,
extra_specs, False, self.data.connector, async_grp=None)
mock_return.assert_called_once()
def test_unmap_lun(self):
array = self.data.array
device_id = self.data.device_id
volume = self.data.test_volume
extra_specs = deepcopy(self.data.extra_specs_intervals_set)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
connector = self.data.connector
with mock.patch.object(self.common, '_remove_members') as mock_remove:
self.common._unmap_lun(volume, connector)
mock_remove.assert_called_once_with(
array, volume, device_id, extra_specs,
connector, False, async_grp=None)
@mock.patch.object(common.PowerMaxCommon, '_remove_members')
def test_unmap_lun_attachments(self, mock_rm):
volume1 = deepcopy(self.data.test_volume)
volume1.volume_attachment.objects = [self.data.test_volume_attachment]
connector = self.data.connector
self.common._unmap_lun(volume1, connector)
mock_rm.assert_called_once()
mock_rm.reset_mock()
volume2 = deepcopy(volume1)
volume2.volume_attachment.objects.append(
self.data.test_volume_attachment)
self.common._unmap_lun(volume2, connector)
mock_rm.assert_not_called()
def test_unmap_lun_qos(self):
array = self.data.array
device_id = self.data.device_id
volume = self.data.test_volume
extra_specs = deepcopy(self.data.extra_specs_intervals_set)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
extra_specs['qos'] = {
'total_iops_sec': '4000', 'DistributionType': 'Always'}
connector = self.data.connector
with mock.patch.object(self.common, '_remove_members') as mock_remove:
with mock.patch.object(self.utils, 'get_volumetype_extra_specs',
return_value=extra_specs):
self.common._unmap_lun(volume, connector)
mock_remove.assert_called_once_with(
array, volume, device_id, extra_specs,
connector, False, async_grp=None)
def test_unmap_lun_not_mapped(self):
volume = self.data.test_volume
connector = self.data.connector
with mock.patch.object(self.common, 'find_host_lun_id',
return_value=({}, False)):
with mock.patch.object(
self.common, '_remove_members') as mock_remove:
self.common._unmap_lun(volume, connector)
mock_remove.assert_not_called()
def test_unmap_lun_connector_is_none(self):
array = self.data.array
device_id = self.data.device_id
volume = self.data.test_volume
extra_specs = deepcopy(self.data.extra_specs_intervals_set)
extra_specs['storagetype:portgroupname'] = (
self.data.port_group_name_f)
with mock.patch.object(self.common, '_remove_members') as mock_remove:
self.common._unmap_lun(volume, None)
mock_remove.assert_called_once_with(
array, volume, device_id, extra_specs, None,
False, async_grp=None)
def test_initialize_connection_already_mapped(self):
volume = self.data.test_volume
connector = self.data.connector
host_lun = (self.data.maskingview[0]['maskingViewConnection'][0][
'host_lun_address'])
ref_dict = {'hostlunid': int(host_lun, 16),
'maskingview': self.data.masking_view_name_f,
'array': self.data.array,
'device_id': self.data.device_id}
device_info_dict = self.common.initialize_connection(volume, connector)
self.assertEqual(ref_dict, device_info_dict)
def test_initialize_connection_already_mapped_next_gen(self):
with mock.patch.object(self.rest, 'is_next_gen_array',
return_value=True):
volume = self.data.test_volume
connector = self.data.connector
host_lun = (self.data.maskingview[0]['maskingViewConnection'][0][
'host_lun_address'])
ref_dict = {'hostlunid': int(host_lun, 16),
'maskingview': self.data.masking_view_name_f,
'array': self.data.array,
'device_id': self.data.device_id}
device_info_dict = self.common.initialize_connection(volume,
connector)
self.assertEqual(ref_dict, device_info_dict)
@mock.patch.object(common.PowerMaxCommon, 'find_host_lun_id',
return_value=({}, False))
@mock.patch.object(
common.PowerMaxCommon, '_attach_volume',
return_value=({}, tpd.PowerMaxData.port_group_name_f))
def test_initialize_connection_not_mapped(self, mock_attach, mock_id):
volume = self.data.test_volume
connector = self.data.connector
extra_specs = deepcopy(self.data.extra_specs_intervals_set)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
masking_view_dict = self.common._populate_masking_dict(
volume, connector, extra_specs)
masking_view_dict[utils.IS_MULTIATTACH] = False
device_info_dict = self.common.initialize_connection(
volume, connector)
self.assertEqual({}, device_info_dict)
mock_attach.assert_called_once_with(
volume, connector, extra_specs, masking_view_dict)
@mock.patch.object(rest.PowerMaxRest, 'is_next_gen_array',
return_value=True)
@mock.patch.object(common.PowerMaxCommon, 'find_host_lun_id',
return_value=({}, False))
@mock.patch.object(
common.PowerMaxCommon, '_attach_volume',
return_value=({}, tpd.PowerMaxData.port_group_name_f))
def test_initialize_connection_not_mapped_next_gen(self, mock_attach,
mock_id, mck_gen):
volume = self.data.test_volume
connector = self.data.connector
device_info_dict = self.common.initialize_connection(
volume, connector)
self.assertEqual({}, device_info_dict)
@mock.patch.object(
masking.PowerMaxMasking, 'pre_multiattach',
return_value=tpd.PowerMaxData.masking_view_dict_multiattach)
@mock.patch.object(common.PowerMaxCommon, 'find_host_lun_id',
return_value=({}, True))
@mock.patch.object(
common.PowerMaxCommon, '_attach_volume',
return_value=({}, tpd.PowerMaxData.port_group_name_f))
def test_initialize_connection_multiattach_case(
self, mock_attach, mock_id, mock_pre):
volume = self.data.test_volume
connector = self.data.connector
self.common.initialize_connection(volume, connector)
mock_attach.assert_called_once()
mock_pre.assert_called_once()
def test_attach_volume_success(self):
volume = self.data.test_volume
connector = self.data.connector
extra_specs = deepcopy(self.data.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
masking_view_dict = self.common._populate_masking_dict(
volume, connector, extra_specs)
host_lun = (self.data.maskingview[0]['maskingViewConnection'][0][
'host_lun_address'])
ref_dict = {'hostlunid': int(host_lun, 16),
'maskingview': self.data.masking_view_name_f,
'array': self.data.array,
'device_id': self.data.device_id}
with mock.patch.object(self.masking, 'setup_masking_view',
return_value={
utils.PORTGROUPNAME:
self.data.port_group_name_f}):
device_info_dict, pg = self.common._attach_volume(
volume, connector, extra_specs, masking_view_dict)
self.assertEqual(ref_dict, device_info_dict)
@mock.patch.object(masking.PowerMaxMasking,
'check_if_rollback_action_for_masking_required')
@mock.patch.object(masking.PowerMaxMasking, 'setup_masking_view',
return_value={})
@mock.patch.object(common.PowerMaxCommon, 'find_host_lun_id',
return_value=({}, False))
def test_attach_volume_failed(self, mock_lun, mock_setup, mock_rollback):
volume = self.data.test_volume
connector = self.data.connector
extra_specs = deepcopy(self.data.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
masking_view_dict = self.common._populate_masking_dict(
volume, connector, extra_specs)
self.assertRaises(exception.VolumeBackendAPIException,
self.common._attach_volume, volume,
connector, extra_specs, masking_view_dict)
device_id = self.data.device_id
(mock_rollback.assert_called_once_with(
self.data.array, volume, device_id, {}))
def test_terminate_connection(self):
volume = self.data.test_volume
connector = self.data.connector
with mock.patch.object(self.common, '_unmap_lun') as mock_unmap:
self.common.terminate_connection(volume, connector)
mock_unmap.assert_called_once_with(
volume, connector)
@mock.patch.object(provision.PowerMaxProvision, 'extend_volume')
@mock.patch.object(common.PowerMaxCommon, '_array_ode_capabilities_check',
return_value=[True] * 4)
@mock.patch.object(common.PowerMaxCommon, '_extend_vol_validation_checks')
def test_extend_vol_no_rep_success(self, mck_val_chk, mck_ode_chk,
mck_extend):
volume = self.data.test_volume
array = self.data.array
device_id = self.data.device_id
new_size = self.data.test_volume.size
ref_extra_specs = deepcopy(self.data.extra_specs_intervals_set)
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
self.common.extend_volume(volume, new_size)
mck_extend.assert_called_once_with(
array, device_id, new_size, ref_extra_specs, None)
@mock.patch.object(provision.PowerMaxProvision, 'extend_volume')
@mock.patch.object(common.PowerMaxCommon, 'get_rdf_details',
return_value=(10, None))
@mock.patch.object(common.PowerMaxCommon, '_array_ode_capabilities_check',
return_value=[True] * 4)
@mock.patch.object(common.PowerMaxCommon, '_extend_vol_validation_checks')
def test_extend_vol_rep_success(self, mck_val_chk, mck_ode_chk,
mck_get_rdf, mck_extend):
volume = self.data.test_volume
array = self.data.array
device_id = self.data.device_id
new_size = self.data.test_volume.size
ref_extra_specs = deepcopy(self.data.rep_extra_specs_ode)
with mock.patch.object(self.common, '_initial_setup',
return_value=self.data.rep_extra_specs_ode):
self.common.next_gen = True
self.common.rep_config = deepcopy(ref_extra_specs)
self.common.extend_volume(volume, new_size)
mck_extend.assert_called_with(
array, device_id, new_size, ref_extra_specs, 10)
@mock.patch.object(common.PowerMaxCommon, '_sync_check')
def test_extend_volume_failed_snap_src(self, mck_sync):
volume = self.data.test_volume
new_size = self.data.test_volume.size
with mock.patch.object(self.rest, 'is_vol_in_rep_session',
return_value=(False, True, None)):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.extend_volume, volume, new_size)
def test_extend_volume_failed_no_device_id(self):
volume = self.data.test_volume
new_size = self.data.test_volume.size
with mock.patch.object(self.common, '_find_device_on_array',
return_value=None):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.extend_volume, volume, new_size)
@mock.patch.object(common.PowerMaxCommon, '_sync_check')
def test_extend_volume_failed_wrong_size(self, mck_sync):
volume = self.data.test_volume
new_size = 1
self.assertRaises(exception.VolumeBackendAPIException,
self.common.extend_volume, volume, new_size)
def test_update_volume_stats(self):
data = self.common.update_volume_stats()
self.assertEqual('CommonTests', data['volume_backend_name'])
def test_update_volume_stats_no_wlp(self):
with mock.patch.object(self.common, '_update_srp_stats',
return_value=('123s#SRP_1#None#None',
100, 90, 90, 10)):
data = self.common.update_volume_stats()
self.assertEqual('CommonTests', data['volume_backend_name'])
def test_update_srp_stats_with_wl(self):
with mock.patch.object(self.rest, 'get_srp_by_name',
return_value=self.data.srp_details):
location_info, __, __, __, __ = self.common._update_srp_stats(
self.data.array_info_wl)
self.assertEqual(location_info, '000197800123#SRP_1#Diamond#OLTP')
def test_update_srp_stats_no_wl(self):
with mock.patch.object(self.rest, 'get_srp_by_name',
return_value=self.data.srp_details):
location_info, __, __, __, __ = self.common._update_srp_stats(
self.data.array_info_no_wl)
self.assertEqual(location_info, '000197800123#SRP_1#Diamond')
def test_find_device_on_array_success(self):
volume = self.data.test_volume
extra_specs = self.data.extra_specs
ref_device_id = self.data.device_id
founddevice_id = self.common._find_device_on_array(volume, extra_specs)
self.assertEqual(ref_device_id, founddevice_id)
def test_find_device_on_array_provider_location_not_string(self):
volume = fake_volume.fake_volume_obj(
context='cxt', provider_location=None)
extra_specs = self.data.extra_specs
founddevice_id = self.common._find_device_on_array(
volume, extra_specs)
self.assertIsNone(founddevice_id)
def test_find_legacy_device_on_array(self):
volume = self.data.test_legacy_vol
extra_specs = self.data.extra_specs
ref_device_id = self.data.device_id
founddevice_id = self.common._find_device_on_array(volume, extra_specs)
self.assertEqual(ref_device_id, founddevice_id)
def test_find_host_lun_id_attached(self):
volume = self.data.test_volume
extra_specs = self.data.extra_specs
host = 'HostX'
host_lun = (
self.data.maskingview[0]['maskingViewConnection'][0][
'host_lun_address'])
ref_masked = {'hostlunid': int(host_lun, 16),
'maskingview': self.data.masking_view_name_f,
'array': self.data.array,
'device_id': self.data.device_id}
maskedvols, __ = self.common.find_host_lun_id(volume, host,
extra_specs)
self.assertEqual(ref_masked, maskedvols)
def test_find_host_lun_id_not_attached(self):
volume = self.data.test_volume
extra_specs = self.data.extra_specs
host = 'HostX'
with mock.patch.object(self.rest, 'find_mv_connections_for_vol',
return_value=None):
maskedvols, __ = self.common.find_host_lun_id(
volume, host, extra_specs)
self.assertEqual({}, maskedvols)
@mock.patch.object(
common.PowerMaxCommon, '_get_masking_views_from_volume',
return_value=([], [tpd.PowerMaxData.masking_view_name_f]))
def test_find_host_lun_id_multiattach(self, mock_mask):
volume = self.data.test_volume
extra_specs = self.data.extra_specs
__, is_multiattach = self.common.find_host_lun_id(
volume, 'HostX', extra_specs)
self.assertTrue(is_multiattach)
@mock.patch.object(common.PowerMaxCommon, 'get_remote_target_device',
return_value=tpd.PowerMaxData.device_id2)
@mock.patch.object(rest.PowerMaxRest, 'get_volume',
return_value=tpd.PowerMaxData.volume_details[0])
def test_find_host_lun_id_rep_extra_specs(self, mock_vol, mock_tgt):
self.common.find_host_lun_id(
self.data.test_volume, 'HostX',
self.data.extra_specs, self.data.rep_extra_specs)
mock_tgt.assert_called_once()
def test_get_masking_views_from_volume(self):
array = self.data.array
device_id = self.data.device_id
host = 'HostX'
ref_mv_list = [self.data.masking_view_name_f]
maskingview_list, __ = self.common.get_masking_views_from_volume(
array, self.data.test_volume, device_id, host)
self.assertEqual(ref_mv_list, maskingview_list)
# is metro
with mock.patch.object(self.utils, 'is_metro_device',
return_value=True):
__, is_metro = self.common.get_masking_views_from_volume(
array, self.data.test_volume, device_id, host)
self.assertTrue(is_metro)
def test_get_masking_views_from_volume_wrong_host(self):
array = self.data.array
device_id = self.data.device_id
host = 'DifferentHost'
maskingview_list, __ = self.common.get_masking_views_from_volume(
array, self.data.test_volume, device_id, host)
self.assertEqual([], maskingview_list)
def test_find_host_lun_id_no_host_check(self):
volume = self.data.test_volume
extra_specs = self.data.extra_specs
host_lun = (self.data.maskingview[0]['maskingViewConnection'][0][
'host_lun_address'])
ref_masked = {'hostlunid': int(host_lun, 16),
'maskingview': self.data.masking_view_name_f,
'array': self.data.array,
'device_id': self.data.device_id}
maskedvols, __ = self.common.find_host_lun_id(
volume, None, extra_specs)
self.assertEqual(ref_masked, maskedvols)
def test_initial_setup_success(self):
volume = self.data.test_volume
ref_extra_specs = deepcopy(self.data.extra_specs_intervals_set)
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
extra_specs = self.common._initial_setup(volume)
self.assertEqual(ref_extra_specs, extra_specs)
def test_initial_setup_failed(self):
volume = self.data.test_volume
with mock.patch.object(
self.common, 'get_attributes_from_cinder_config',
return_value=None):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._initial_setup, volume)
@mock.patch.object(common.PowerMaxCommon, 'get_remote_target_device',
return_value=tpd.PowerMaxData.device_id2)
def test_populate_masking_dict(self, mock_tgt):
volume = self.data.test_volume
connector = self.data.connector
extra_specs = deepcopy(self.data.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
extra_specs[utils.WORKLOAD] = self.data.workload
ref_mv_dict = self.data.masking_view_dict
self.common.next_gen = False
masking_view_dict = self.common._populate_masking_dict(
volume, connector, extra_specs)
self.assertEqual(ref_mv_dict, masking_view_dict)
# Metro volume, pass in rep_extra_specs and retrieve target device
rep_extra_specs = deepcopy(self.data.rep_extra_specs)
rep_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
self.common._populate_masking_dict(
volume, connector, extra_specs, rep_extra_specs)
mock_tgt.assert_called_once()
# device_id is None
with mock.patch.object(self.common, '_find_device_on_array',
return_value=None):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._populate_masking_dict,
volume, connector, extra_specs)
def test_populate_masking_dict_no_slo(self):
volume = self.data.test_volume
connector = self.data.connector
extra_specs = {'slo': None, 'workload': None, 'srp': self.data.srp,
'array': self.data.array,
utils.PORTGROUPNAME: self.data.port_group_name_f}
ref_mv_dict = self.data.masking_view_dict_no_slo
masking_view_dict = self.common._populate_masking_dict(
volume, connector, extra_specs)
self.assertEqual(ref_mv_dict, masking_view_dict)
def test_populate_masking_dict_compr_disabled(self):
volume = self.data.test_volume
connector = self.data.connector
extra_specs = deepcopy(self.data.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
extra_specs[utils.DISABLECOMPRESSION] = "true"
ref_mv_dict = self.data.masking_view_dict_compression_disabled
extra_specs[utils.WORKLOAD] = self.data.workload
masking_view_dict = self.common._populate_masking_dict(
volume, connector, extra_specs)
self.assertEqual(ref_mv_dict, masking_view_dict)
def test_populate_masking_dict_next_gen(self):
volume = self.data.test_volume
connector = self.data.connector
extra_specs = deepcopy(self.data.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
self.common.next_gen = True
masking_view_dict = self.common._populate_masking_dict(
volume, connector, extra_specs)
self.assertEqual('NONE', masking_view_dict[utils.WORKLOAD])
@mock.patch.object(common.PowerMaxCommon, '_clone_check')
def test_create_cloned_volume(self, mck_clone_chk):
volume = self.data.test_clone_volume
source_volume = self.data.test_volume
extra_specs = self.data.extra_specs
ref_dict = self.data.provider_location_clone
clone_dict = self.common._create_cloned_volume(
volume, source_volume, extra_specs)
self.assertEqual(ref_dict, clone_dict)
@mock.patch.object(common.PowerMaxCommon, '_clone_check')
def test_create_cloned_volume_is_snapshot(self, mck_clone_chk):
volume = self.data.test_snapshot
source_volume = self.data.test_volume
extra_specs = self.data.extra_specs
ref_dict = self.data.snap_location
clone_dict = self.common._create_cloned_volume(
volume, source_volume, extra_specs, True, False)
self.assertEqual(ref_dict, clone_dict)
@mock.patch.object(common.PowerMaxCommon, '_clone_check')
def test_create_cloned_volume_from_snapshot(self, mck_clone_chk):
volume = self.data.test_clone_volume
source_volume = self.data.test_snapshot
extra_specs = self.data.extra_specs
ref_dict = self.data.provider_location_snapshot
clone_dict = self.common._create_cloned_volume(
volume, source_volume, extra_specs, False, True)
self.assertEqual(ref_dict, clone_dict)
def test_create_cloned_volume_not_licenced(self):
volume = self.data.test_clone_volume
source_volume = self.data.test_volume
extra_specs = self.data.extra_specs
with mock.patch.object(self.rest, 'is_snapvx_licensed',
return_value=False):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._create_cloned_volume,
volume, source_volume, extra_specs)
@mock.patch.object(common.PowerMaxCommon,
'_find_device_on_array')
def test_create_cloned_volume_not_licenced_2(self, mock_device):
volume = self.data.test_clone_volume
source_volume = self.data.test_volume
extra_specs = self.data.extra_specs
with mock.patch.object(self.rest, 'is_snapvx_licensed',
return_value=False):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._create_cloned_volume,
volume, source_volume, extra_specs,
False, False)
mock_device.assert_not_called()
@mock.patch.object(common.PowerMaxCommon,
'_find_device_on_array',
return_value=None)
@mock.patch.object(common.PowerMaxCommon,
'_clone_check')
def test_create_cloned_volume_source_not_found(
self, mock_check, mock_device):
volume = self.data.test_clone_volume
source_volume = self.data.test_volume
extra_specs = self.data.extra_specs
with mock.patch.object(self.rest, 'is_snapvx_licensed',
return_value=True):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._create_cloned_volume,
volume, source_volume, extra_specs,
False, False)
mock_check.assert_not_called()
def test_parse_snap_info_found(self):
ref_device_id = self.data.device_id
ref_snap_name = self.data.snap_location['snap_name']
sourcedevice_id, foundsnap_name = self.common._parse_snap_info(
self.data.array, self.data.test_snapshot)
self.assertEqual(ref_device_id, sourcedevice_id)
self.assertEqual(ref_snap_name, foundsnap_name)
def test_parse_snap_info_not_found(self):
ref_snap_name = None
with mock.patch.object(self.rest, 'get_volume_snap',
return_value=None):
__, foundsnap_name = self.common._parse_snap_info(
self.data.array, self.data.test_snapshot)
self.assertIsNone(ref_snap_name, foundsnap_name)
def test_parse_snap_info_exception(self):
with mock.patch.object(
self.rest, 'get_volume_snap',
side_effect=exception.VolumeBackendAPIException):
__, foundsnap_name = self.common._parse_snap_info(
self.data.array, self.data.test_snapshot)
self.assertIsNone(foundsnap_name)
def test_parse_snap_info_provider_location_not_string(self):
snapshot = fake_snapshot.fake_snapshot_obj(
context='ctxt', provider_loaction={'not': 'string'})
sourcedevice_id, foundsnap_name = self.common._parse_snap_info(
self.data.array, snapshot)
self.assertIsNone(foundsnap_name)
def test_create_snapshot_success(self):
array = self.data.array
snapshot = self.data.test_snapshot
source_device_id = self.data.device_id
extra_specs = self.data.extra_specs
ref_dict = {'snap_name': self.data.test_snapshot_snap_name,
'source_id': self.data.device_id}
snap_dict = self.common._create_snapshot(
array, snapshot, source_device_id, extra_specs)
self.assertEqual(ref_dict, snap_dict)
def test_create_snapshot_exception(self):
array = self.data.array
snapshot = self.data.test_snapshot
source_device_id = self.data.device_id
extra_specs = self.data.extra_specs
with mock.patch.object(
self.provision, 'create_volume_snapvx',
side_effect=exception.VolumeBackendAPIException):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._create_snapshot,
array, snapshot, source_device_id, extra_specs)
@mock.patch.object(masking.PowerMaxMasking,
'remove_vol_from_storage_group')
def test_delete_volume_from_srp(self, mock_rm):
array = self.data.array
device_id = self.data.device_id
volume_name = self.data.test_volume.name
ref_extra_specs = self.data.extra_specs_intervals_set
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
volume = self.data.test_volume
with mock.patch.object(self.common, '_sync_check'):
with mock.patch.object(
self.common, '_delete_from_srp') as mock_delete:
self.common._delete_volume(volume)
mock_delete.assert_called_once_with(
array, device_id, volume_name, ref_extra_specs)
def test_delete_volume_not_found(self):
volume = self.data.test_volume
with mock.patch.object(self.common, '_find_device_on_array',
return_value=None):
with mock.patch.object(
self.common, '_delete_from_srp') as mock_delete:
self.common._delete_volume(volume)
mock_delete.assert_not_called()
def test_create_volume_success(self):
volume_name = '1'
volume_size = self.data.test_volume.size
extra_specs = self.data.extra_specs
ref_dict = self.data.provider_location
with mock.patch.object(self.rest, 'get_volume',
return_value=self.data.volume_details[0]):
volume_dict = self.common._create_volume(
volume_name, volume_size, extra_specs)
self.assertEqual(ref_dict, volume_dict)
def test_create_volume_success_next_gen(self):
volume_name = '1'
volume_size = self.data.test_volume.size
extra_specs = self.data.extra_specs
self.common.next_gen = True
with mock.patch.object(
self.utils, 'is_compression_disabled', return_value=True):
with mock.patch.object(
self.rest, 'get_array_model_info',
return_value=('PowerMax 2000', True)):
with mock.patch.object(
self.masking,
'get_or_create_default_storage_group') as mock_get:
self.common._create_volume(
volume_name, volume_size, extra_specs)
mock_get.assert_called_once_with(
extra_specs['array'], extra_specs[utils.SRP],
extra_specs[utils.SLO], 'NONE', extra_specs, True,
False, None)
def test_create_volume_failed(self):
volume_name = self.data.test_volume.name
volume_size = self.data.test_volume.size
extra_specs = self.data.extra_specs
with mock.patch.object(
self.masking, 'get_or_create_default_storage_group',
return_value=self.data.failed_resource):
with mock.patch.object(
self.rest, 'delete_storage_group') as mock_delete:
# path 1: not last vol in sg
with mock.patch.object(
self.rest, 'get_num_vols_in_sg', return_value=2):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._create_volume,
volume_name, volume_size, extra_specs)
mock_delete.assert_not_called()
# path 2: last vol in sg, delete sg
with mock.patch.object(self.rest, 'get_num_vols_in_sg',
return_value=0):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._create_volume,
volume_name, volume_size, extra_specs)
mock_delete.assert_called_once_with(
self.data.array, self.data.failed_resource)
def test_create_volume_incorrect_slo(self):
volume_name = self.data.test_volume.name
volume_size = self.data.test_volume.size
extra_specs = {'slo': 'Diamondz',
'workload': 'DSSSS',
'srp': self.data.srp,
'array': self.data.array}
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._create_volume,
volume_name, volume_size, extra_specs)
@mock.patch.object(rest.PowerMaxRest, 'is_next_gen_array',
return_value=False)
@mock.patch.object(provision.PowerMaxProvision, 'verify_slo_workload',
return_value=(True, True))
@mock.patch.object(provision.PowerMaxProvision, 'create_volume_from_sg')
def test_create_volume_in_use_replication_enabled(self, mock_create,
mock_verify,
mock_nextgen):
volume_name = '1'
volume_size = self.data.test_volume.size
rep_extra_specs = self.data.rep_extra_specs3
with mock.patch.object(
self.masking,
'get_or_create_default_storage_group') as mck_sg:
self.common._create_volume(
volume_name, volume_size, rep_extra_specs, in_use=True)
mck_sg.assert_called_once_with(
rep_extra_specs['array'], rep_extra_specs['srp'],
rep_extra_specs['slo'], rep_extra_specs['workload'],
rep_extra_specs, False, True, rep_extra_specs['rep_mode'])
def test_set_vmax_extra_specs(self):
srp_record = self.common.get_attributes_from_cinder_config()
extra_specs = self.common._set_vmax_extra_specs(
self.data.vol_type_extra_specs, srp_record)
ref_extra_specs = deepcopy(self.data.extra_specs_intervals_set)
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
self.assertEqual(ref_extra_specs, extra_specs)
def test_set_vmax_extra_specs_no_srp_name(self):
srp_record = self.common.get_attributes_from_cinder_config()
with mock.patch.object(self.rest, 'get_slo_list',
return_value=[]):
extra_specs = self.common._set_vmax_extra_specs({}, srp_record)
self.assertIsNone(extra_specs['slo'])
def test_set_vmax_extra_specs_compr_disabled(self):
with mock.patch.object(self.rest, 'is_compression_capable',
return_value=True):
srp_record = self.common.get_attributes_from_cinder_config()
extra_specs = self.common._set_vmax_extra_specs(
self.data.vol_type_extra_specs_compr_disabled, srp_record)
ref_extra_specs = deepcopy(self.data.extra_specs_intervals_set)
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
ref_extra_specs[utils.DISABLECOMPRESSION] = "true"
self.assertEqual(ref_extra_specs, extra_specs)
def test_set_vmax_extra_specs_compr_disabled_not_compr_capable(self):
srp_record = self.common.get_attributes_from_cinder_config()
extra_specs = self.common._set_vmax_extra_specs(
self.data.vol_type_extra_specs_compr_disabled, srp_record)
ref_extra_specs = deepcopy(self.data.extra_specs_intervals_set)
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
self.assertEqual(ref_extra_specs, extra_specs)
def test_set_vmax_extra_specs_portgroup_as_spec(self):
srp_record = self.common.get_attributes_from_cinder_config()
extra_specs = self.common._set_vmax_extra_specs(
{utils.PORTGROUPNAME: 'extra_spec_pg'}, srp_record)
self.assertEqual('extra_spec_pg', extra_specs[utils.PORTGROUPNAME])
def test_set_vmax_extra_specs_no_portgroup_set(self):
srp_record = {
'srpName': 'SRP_1', 'RestServerIp': '1.1.1.1',
'RestPassword': 'smc', 'SSLCert': None, 'RestServerPort': 8443,
'SSLVerify': False, 'RestUserName': 'smc',
'SerialNumber': '000197800123'}
self.assertRaises(exception.VolumeBackendAPIException,
self.common._set_vmax_extra_specs,
{}, srp_record)
def test_set_vmax_extra_specs_next_gen(self):
srp_record = self.common.get_attributes_from_cinder_config()
self.common.next_gen = True
extra_specs = self.common._set_vmax_extra_specs(
self.data.vol_type_extra_specs, srp_record)
ref_extra_specs = deepcopy(self.data.extra_specs_intervals_set)
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
self.assertEqual('NONE', extra_specs[utils.WORKLOAD])
def test_delete_volume_from_srp_success(self):
array = self.data.array
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
with mock.patch.object(
self.provision, 'delete_volume_from_srp') as mock_del:
self.common._delete_from_srp(array, device_id, volume_name,
extra_specs)
mock_del.assert_called_once_with(array, device_id, volume_name)
def test_delete_volume_from_srp_failed(self):
array = self.data.array
device_id = self.data.failed_resource
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
with mock.patch.object(
self.masking,
'add_volume_to_default_storage_group') as mock_add:
self.assertRaises(exception.VolumeBackendAPIException,
self.common._delete_from_srp, array,
device_id, volume_name, extra_specs)
mock_add.assert_not_called()
@mock.patch.object(utils.PowerMaxUtils, 'is_replication_enabled',
side_effect=[False, True])
def test_remove_vol_and_cleanup_replication(self, mock_rep_enabled):
array = self.data.array
device_id = self.data.device_id
volume = self.data.test_volume
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
with mock.patch.object(
self.masking, 'remove_and_reset_members') as mock_rm:
with mock.patch.object(
self.common, 'cleanup_lun_replication') as mock_clean:
self.common._remove_vol_and_cleanup_replication(
array, device_id, volume_name, extra_specs, volume)
mock_rm.assert_called_once_with(
array, volume, device_id, volume_name, extra_specs, False)
mock_clean.assert_not_called()
self.common._remove_vol_and_cleanup_replication(
array, device_id, volume_name, extra_specs, volume)
mock_clean.assert_called_once_with(
volume, volume_name, device_id, extra_specs)
@mock.patch.object(utils.PowerMaxUtils, 'is_volume_failed_over',
side_effect=[True, False])
@mock.patch.object(common.PowerMaxCommon, '_get_replication_extra_specs',
return_value=tpd.PowerMaxData.rep_extra_specs)
def test_get_target_wwns_from_masking_view(self, mock_rep_specs, mock_fo):
ref_wwns = [self.data.wwnn1]
for x in range(0, 2):
target_wwns = self.common._get_target_wwns_from_masking_view(
self.data.device_id, self.data.connector['host'],
self.data.extra_specs)
self.assertEqual(ref_wwns, target_wwns)
def test_get_target_wwns_from_masking_view_no_mv(self):
with mock.patch.object(self.common, '_get_masking_views_from_volume',
return_value=([], None)):
target_wwns = self.common._get_target_wwns_from_masking_view(
self.data.device_id, self.data.connector['host'],
self.data.extra_specs)
self.assertEqual([], target_wwns)
@mock.patch.object(common.PowerMaxCommon, '_get_replication_extra_specs',
return_value=tpd.PowerMaxData.rep_extra_specs)
@mock.patch.object(common.PowerMaxCommon, 'get_remote_target_device',
return_value=(tpd.PowerMaxData.device_id2,))
@mock.patch.object(utils.PowerMaxUtils, 'is_metro_device',
side_effect=[False, True])
def test_get_target_wwns(self, mock_metro, mock_tgt, mock_specs):
__, metro_wwns = self.common.get_target_wwns_from_masking_view(
self.data.test_volume, self.data.connector)
self.assertEqual([], metro_wwns)
# Is metro volume
__, metro_wwns = self.common.get_target_wwns_from_masking_view(
self.data.test_volume, self.data.connector)
self.assertEqual([self.data.wwnn1], metro_wwns)
def test_get_port_group_from_masking_view(self):
array = self.data.array
maskingview_name = self.data.masking_view_name_f
with mock.patch.object(self.rest,
'get_element_from_masking_view') as mock_get:
self.common.get_port_group_from_masking_view(
array, maskingview_name)
mock_get.assert_called_once_with(
array, maskingview_name, portgroup=True)
def test_get_initiator_group_from_masking_view(self):
array = self.data.array
maskingview_name = self.data.masking_view_name_f
with mock.patch.object(
self.rest, 'get_element_from_masking_view') as mock_get:
self.common.get_initiator_group_from_masking_view(
array, maskingview_name)
mock_get.assert_called_once_with(
array, maskingview_name, host=True)
def test_get_common_masking_views(self):
array = self.data.array
portgroup_name = self.data.port_group_name_f
initiator_group_name = self.data.initiatorgroup_name_f
with mock.patch.object(
self.rest, 'get_common_masking_views') as mock_get:
self.common.get_common_masking_views(
array, portgroup_name, initiator_group_name)
mock_get.assert_called_once_with(
array, portgroup_name, initiator_group_name)
def test_get_ip_and_iqn(self):
ref_ip_iqn = [{'iqn': self.data.initiator,
'ip': self.data.ip}]
director = self.data.portgroup[1]['symmetrixPortKey'][0]['directorId']
port = self.data.portgroup[1]['symmetrixPortKey'][0]['portId']
dirport = "%s:%s" % (director, port)
ip_iqn_list = self.common._get_ip_and_iqn(self.data.array, dirport)
self.assertEqual(ref_ip_iqn, ip_iqn_list)
def test_find_ip_and_iqns(self):
ref_ip_iqn = [{'iqn': self.data.initiator,
'ip': self.data.ip}]
ip_iqn_list = self.common._find_ip_and_iqns(
self.data.array, self.data.port_group_name_i)
self.assertEqual(ref_ip_iqn, ip_iqn_list)
def test_create_replica_snap_name(self):
array = self.data.array
clone_volume = self.data.test_clone_volume
source_device_id = self.data.device_id
snap_name = self.data.snap_location['snap_name']
ref_dict = self.data.provider_location_snapshot
clone_dict = self.common._create_replica(
array, clone_volume, source_device_id,
self.data.extra_specs, snap_name)
self.assertEqual(ref_dict, clone_dict)
def test_create_replica_no_snap_name(self):
array = self.data.array
clone_volume = self.data.test_clone_volume
source_device_id = self.data.device_id
snap_name = "temp-" + source_device_id + "-snapshot_for_clone"
ref_dict = self.data.provider_location_clone
with mock.patch.object(
self.utils, 'get_temp_snap_name',
return_value=snap_name) as mock_get_snap:
clone_dict = self.common._create_replica(
array, clone_volume, source_device_id,
self.data.extra_specs)
self.assertEqual(ref_dict, clone_dict)
mock_get_snap.assert_called_once_with(source_device_id)
def test_create_replica_failed_cleanup_target(self):
array = self.data.array
clone_volume = self.data.test_clone_volume
device_id = self.data.device_id
snap_name = self.data.failed_resource
clone_name = 'OS-' + clone_volume.id
extra_specs = self.data.extra_specs
with mock.patch.object(
self.common, '_cleanup_target') as mock_cleanup:
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._create_replica, array, clone_volume, device_id,
self.data.extra_specs, snap_name)
mock_cleanup.assert_called_once_with(
array, device_id, device_id, clone_name, snap_name,
extra_specs, target_volume=clone_volume)
def test_create_replica_failed_no_target(self):
array = self.data.array
clone_volume = self.data.test_clone_volume
source_device_id = self.data.device_id
snap_name = self.data.failed_resource
with mock.patch.object(self.common, '_create_volume',
return_value={'device_id': None}):
with mock.patch.object(
self.common, '_cleanup_target') as mock_cleanup:
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._create_replica, array, clone_volume,
source_device_id, self.data.extra_specs, snap_name)
mock_cleanup.assert_not_called()
@mock.patch.object(
utils.PowerMaxUtils,
'compare_cylinders',
side_effect=exception.VolumeBackendAPIException)
def test_create_replica_cylinder_mismatch(self, mock_cyl):
array = self.data.array
clone_volume = self.data.test_clone_volume
source_device_id = self.data.device_id
snap_name = self.data.snap_location['snap_name']
clone_name = 'OS-' + clone_volume.id
with mock.patch.object(
self.common, '_cleanup_target') as mock_cleanup:
self.assertRaises(
Exception, self.common._create_replica, array,
clone_volume, source_device_id,
self.data.extra_specs, snap_name) # noqa: ignore=H202
mock_cleanup.assert_called_once_with(
array, source_device_id, source_device_id,
clone_name, snap_name, self.data.extra_specs,
target_volume=clone_volume)
@mock.patch.object(
masking.PowerMaxMasking,
'remove_and_reset_members')
def test_cleanup_target_sync_present(self, mock_remove):
array = self.data.array
clone_volume = self.data.test_clone_volume
source_device_id = self.data.device_id
target_device_id = self.data.device_id2
snap_name = self.data.failed_resource
clone_name = clone_volume.name
extra_specs = self.data.extra_specs
generation = 0
with mock.patch.object(self.rest, 'get_sync_session',
return_value='session'):
with mock.patch.object(
self.provision,
'break_replication_relationship') as mock_break:
self.common._cleanup_target(
array, target_device_id, source_device_id,
clone_name, snap_name, extra_specs)
mock_break.assert_called_with(
array, target_device_id, source_device_id,
snap_name, extra_specs, generation)
@mock.patch.object(masking.PowerMaxMasking, 'remove_volume_from_sg')
def test_cleanup_target_no_sync(self, mock_remove):
array = self.data.array
clone_volume = self.data.test_clone_volume
source_device_id = self.data.device_id
target_device_id = self.data.device_id2
snap_name = self.data.failed_resource
clone_name = clone_volume.name
extra_specs = self.data.extra_specs
with mock.patch.object(self.rest, 'get_sync_session',
return_value=None):
with mock.patch.object(
self.common, '_delete_from_srp') as mock_delete:
self.common._cleanup_target(
array, target_device_id, source_device_id,
clone_name, snap_name, extra_specs)
mock_delete.assert_called_once_with(
array, target_device_id, clone_name,
extra_specs)
@mock.patch.object(
common.PowerMaxCommon, 'get_volume_metadata',
return_value={'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2'})
def test_manage_existing_success(self, mck_meta):
external_ref = {u'source-name': u'00002'}
provider_location = {'device_id': u'00002', 'array': u'000197800123'}
ref_update = {'provider_location': six.text_type(provider_location),
'metadata': {'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2',
'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}}
volume = deepcopy(self.data.test_volume)
volume.metadata = {'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}
with mock.patch.object(
self.common, '_check_lun_valid_for_cinder_management',
return_value=('vol1', 'test_sg')):
model_update = self.common.manage_existing(volume, external_ref)
self.assertEqual(ref_update, model_update)
@mock.patch.object(
rest.PowerMaxRest, 'get_masking_views_from_storage_group',
return_value=None)
@mock.patch.object(rest.PowerMaxRest, 'is_vol_in_rep_session',
return_value=(False, False, None))
def test_check_lun_valid_for_cinder_management(self, mock_rep, mock_mv):
external_ref = {u'source-name': u'00003'}
vol, source_sg = self.common._check_lun_valid_for_cinder_management(
self.data.array, self.data.device_id3,
self.data.test_volume.id, external_ref)
self.assertEqual(vol, '123')
self.assertIsNone(source_sg)
@mock.patch.object(
rest.PowerMaxRest, 'get_masking_views_from_storage_group',
return_value=None)
@mock.patch.object(rest.PowerMaxRest, 'is_vol_in_rep_session',
return_value=(False, False, None))
def test_check_lun_valid_for_cinder_management_multiple_sg_exception(
self, mock_rep, mock_mv):
external_ref = {u'source-name': u'00004'}
self.assertRaises(
exception.ManageExistingInvalidReference,
self.common._check_lun_valid_for_cinder_management,
self.data.array, self.data.device_id4,
self.data.test_volume.id, external_ref)
@mock.patch.object(rest.PowerMaxRest, 'get_volume',
side_effect=[None,
tpd.PowerMaxData.volume_details[2],
tpd.PowerMaxData.volume_details[2],
tpd.PowerMaxData.volume_details[1]])
@mock.patch.object(
rest.PowerMaxRest, 'get_masking_views_from_storage_group',
side_effect=[tpd.PowerMaxData.sg_details[1]['maskingview'],
None])
@mock.patch.object(
rest.PowerMaxRest, 'get_storage_groups_from_volume',
return_value=([tpd.PowerMaxData.defaultstoragegroup_name]))
@mock.patch.object(rest.PowerMaxRest, 'is_vol_in_rep_session',
side_effect=[(True, False, []), (False, False, None)])
def test_check_lun_valid_for_cinder_management_exception(
self, mock_rep, mock_sg, mock_mvs, mock_get_vol):
external_ref = {u'source-name': u'00003'}
for x in range(0, 3):
self.assertRaises(
exception.ManageExistingInvalidReference,
self.common._check_lun_valid_for_cinder_management,
self.data.array, self.data.device_id3,
self.data.test_volume.id, external_ref)
self.assertRaises(exception.ManageExistingAlreadyManaged,
self.common._check_lun_valid_for_cinder_management,
self.data.array, self.data.device_id3,
self.data.test_volume.id, external_ref)
def test_manage_existing_get_size(self):
external_ref = {u'source-name': u'00001'}
size = self.common.manage_existing_get_size(
self.data.test_volume, external_ref)
self.assertEqual(2, size)
def test_manage_existing_get_size_exception(self):
external_ref = {u'source-name': u'00001'}
with mock.patch.object(self.rest, 'get_size_of_device_on_array',
return_value=3.5):
self.assertRaises(exception.ManageExistingInvalidReference,
self.common.manage_existing_get_size,
self.data.test_volume, external_ref)
@mock.patch.object(common.PowerMaxCommon,
'_remove_vol_and_cleanup_replication')
@mock.patch.object(common.PowerMaxCommon, '_sync_check')
def test_unmanage_success(self, mck_sync, mock_rm):
volume = self.data.test_volume
with mock.patch.object(self.rest, 'rename_volume') as mock_rename:
self.common.unmanage(volume)
mock_rename.assert_called_once_with(
self.data.array, self.data.device_id,
self.data.test_volume.id)
# Test for success when create storage group fails
with mock.patch.object(self.rest, 'rename_volume') as mock_rename:
with mock.patch.object(
self.provision, 'create_storage_group',
side_effect=exception.VolumeBackendAPIException):
self.common.unmanage(volume)
mock_rename.assert_called_once_with(
self.data.array, self.data.device_id,
self.data.test_volume.id)
def test_unmanage_device_not_found(self):
volume = self.data.test_volume
with mock.patch.object(self.common, '_find_device_on_array',
return_value=None):
with mock.patch.object(self.rest, 'rename_volume') as mock_rename:
self.common.unmanage(volume)
mock_rename.assert_not_called()
@mock.patch.object(common.PowerMaxCommon, '_slo_workload_migration')
def test_retype(self, mock_migrate):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs_intervals_set
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
volume = self.data.test_volume
new_type = {'extra_specs': {}}
host = {'host': self.data.new_host}
self.common.retype(volume, new_type, host)
mock_migrate.assert_called_once_with(
device_id, volume, host, volume_name, new_type, extra_specs)
with mock.patch.object(
self.common, '_find_device_on_array', return_value=None):
self.assertFalse(self.common.retype(volume, new_type, host))
def test_retype_attached_vol(self):
host = {'host': self.data.new_host}
new_type = {'extra_specs': {}}
with mock.patch.object(
self.common, '_find_device_on_array', return_value=True):
with mock.patch.object(self.common,
'_slo_workload_migration') as mock_retype:
self.common.retype(self.data.test_attached_volume,
new_type, host)
mock_retype.assert_called_once()
@mock.patch.object(
rest.PowerMaxRest, 'get_volume',
return_value=tpd.PowerMaxData.volume_details_attached)
@mock.patch.object(rest.PowerMaxRest, 'get_storage_group',
return_value=tpd.PowerMaxData.sg_details[1])
@mock.patch.object(utils.PowerMaxUtils, 'get_child_sg_name',
return_value=('OS-Test-SG', '', '', ''))
@mock.patch.object(rest.PowerMaxRest, 'is_child_sg_in_parent_sg',
return_value=True)
@mock.patch.object(masking.PowerMaxMasking,
'move_volume_between_storage_groups')
@mock.patch.object(rest.PowerMaxRest, 'is_volume_in_storagegroup',
return_value=True)
def test_retype_inuse_volume_tgt_sg_exist(self, mck_vol_in_sg, mck_sg_move,
mck_child_sg_in_sg,
mck_get_sg_name,
mck_get_sg, mck_get_vol):
array = self.data.array
srp = self.data.srp
slo = self.data.slo
workload = self.data.workload
device_id = self.data.device_id
volume = self.data.test_attached_volume
rep_mode = 'Synchronous'
src_extra_specs = self.data.extra_specs_migrate
interval = src_extra_specs['interval']
retries = src_extra_specs['retries']
tgt_extra_specs = {
'srp': srp, 'array': array, 'slo': slo, 'workload': workload,
'interval': interval, 'retries': retries, 'rep_mode': rep_mode}
success = self.common._retype_inuse_volume(
array, srp, volume, device_id, src_extra_specs, slo, workload,
tgt_extra_specs, False)[0]
self.assertTrue(success)
mck_sg_move.assert_called()
mck_vol_in_sg.assert_called()
@mock.patch.object(
rest.PowerMaxRest, 'get_volume',
return_value=tpd.PowerMaxData.volume_details_attached)
@mock.patch.object(utils.PowerMaxUtils, 'get_child_sg_name',
return_value=('OS-Test-SG', '', '', ''))
@mock.patch.object(provision.PowerMaxProvision, 'create_storage_group')
@mock.patch.object(masking.PowerMaxMasking, 'add_child_sg_to_parent_sg')
@mock.patch.object(rest.PowerMaxRest, 'is_child_sg_in_parent_sg',
return_value=True)
@mock.patch.object(masking.PowerMaxMasking,
'move_volume_between_storage_groups')
@mock.patch.object(rest.PowerMaxRest, 'is_volume_in_storagegroup',
return_value=True)
def test_retype_inuse_volume_no_tgt_sg(self, mck_vol_in_sg, mck_move_vol,
mck_sg_in_sg, mck_add_sg_to_sg,
mck_create_sg, mck_get_csg_name,
mck_get_vol):
array = self.data.array
srp = self.data.srp
slo = self.data.slo
workload = self.data.workload
device_id = self.data.device_id
volume = self.data.test_attached_volume
rep_mode = 'Synchronous'
src_extra_specs = self.data.extra_specs_migrate
interval = src_extra_specs['interval']
retries = src_extra_specs['retries']
tgt_extra_specs = {
'srp': srp, 'array': array, 'slo': slo, 'workload': workload,
'interval': interval, 'retries': retries, 'rep_mode': rep_mode}
with mock.patch.object(self.rest, 'get_storage_group',
side_effect=[self.data.sg_details[1], None,
self.data.sg_details[1]]):
success = self.common._retype_inuse_volume(
array, srp, volume, device_id, src_extra_specs, slo, workload,
tgt_extra_specs, False)[0]
mck_create_sg.assert_called()
mck_add_sg_to_sg.assert_called()
self.assertTrue(success)
@mock.patch.object(provision.PowerMaxProvision, 'create_storage_group',
return_value=None)
@mock.patch.object(rest.PowerMaxRest, 'get_volume',
return_value=tpd.PowerMaxData.volume_details_attached)
@mock.patch.object(rest.PowerMaxRest, 'get_storage_group',
side_effect=[tpd.PowerMaxData.sg_details[1], None])
@mock.patch.object(utils.PowerMaxUtils, 'get_child_sg_name',
return_value=('OS-Test-SG', '', '', ''))
@mock.patch.object(rest.PowerMaxRest, 'is_child_sg_in_parent_sg',
return_value=False)
@mock.patch.object(masking.PowerMaxMasking,
'move_volume_between_storage_groups')
@mock.patch.object(rest.PowerMaxRest, 'is_volume_in_storagegroup',
return_value=False)
def test_retype_inuse_volume_fail(self, mck_vol_in_sg, mck_sg_move,
mck_child_sg_in_sg, mck_get_sg_name,
mck_get_sg, mck_get_vol, mck_create_sg):
array = self.data.array
srp = self.data.srp
slo = self.data.slo
workload = self.data.workload
device_id = self.data.device_id
volume = self.data.test_attached_volume
rep_mode = 'Synchronous'
src_extra_specs = self.data.extra_specs_migrate
interval = src_extra_specs['interval']
retries = src_extra_specs['retries']
tgt_extra_specs = {
'srp': srp, 'array': array, 'slo': slo, 'workload': workload,
'interval': interval, 'retries': retries, 'rep_mode': rep_mode}
success = self.common._retype_inuse_volume(
array, srp, volume, device_id, src_extra_specs, slo, workload,
tgt_extra_specs, False)[0]
self.assertFalse(success)
mck_vol_in_sg.assert_not_called()
mck_sg_move.assert_not_called()
@mock.patch.object(
rest.PowerMaxRest, 'get_volume',
return_value=tpd.PowerMaxData.volume_details_attached)
@mock.patch.object(rest.PowerMaxRest, 'get_storage_group',
return_value=tpd.PowerMaxData.sg_details[1])
@mock.patch.object(utils.PowerMaxUtils, 'get_volume_attached_hostname',
return_value=None)
def test_retype_inuse_volume_fail_no_attached_host(self, mck_get_hostname,
mck_get_sg,
mck_get_vol):
array = self.data.array
srp = self.data.srp
slo = self.data.slo
workload = self.data.workload
device_id = self.data.device_id
volume = self.data.test_attached_volume
rep_mode = 'Synchronous'
src_extra_specs = self.data.extra_specs_migrate
interval = src_extra_specs['interval']
retries = src_extra_specs['retries']
tgt_extra_specs = {
'srp': srp, 'array': array, 'slo': slo, 'workload': workload,
'interval': interval, 'retries': retries, 'rep_mode': rep_mode}
success = self.common._retype_inuse_volume(
array, srp, volume, device_id, src_extra_specs, slo, workload,
tgt_extra_specs, False)[0]
self.assertFalse(success)
def test_slo_workload_migration_valid(self):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
new_type = {'extra_specs': {}}
volume = self.data.test_volume
host = {'host': self.data.new_host}
with mock.patch.object(self.common, '_migrate_volume') as mock_migrate:
self.common._slo_workload_migration(
device_id, volume, host, volume_name, new_type, extra_specs)
mock_migrate.assert_called_once_with(
extra_specs[utils.ARRAY], volume, device_id,
extra_specs[utils.SRP], 'Silver',
'OLTP', volume_name, new_type, extra_specs)
def test_slo_workload_migration_not_valid(self):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
volume = self.data.test_volume
new_type = {'extra_specs': {}}
host = {'host': self.data.new_host}
with mock.patch.object(
self.common, '_is_valid_for_storage_assisted_migration',
return_value=(False, 'Silver', 'OLTP')):
migrate_status = self.common._slo_workload_migration(
device_id, volume, host, volume_name, new_type, extra_specs)
self.assertFalse(migrate_status)
def test_slo_workload_migration_same_hosts(self):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
volume = self.data.test_volume
host = {'host': self.data.fake_host}
new_type = {'extra_specs': {'slo': 'Bronze'}}
migrate_status = self.common._slo_workload_migration(
device_id, volume, host, volume_name, new_type, extra_specs)
self.assertFalse(migrate_status)
def test_slo_workload_migration_same_host_change_compression(self):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
volume = self.data.test_volume
host = {'host': self.data.fake_host}
new_type = {'extra_specs': {utils.DISABLECOMPRESSION: "true"}}
with mock.patch.object(
self.common, '_is_valid_for_storage_assisted_migration',
return_value=(True, self.data.slo, self.data.workload)):
with mock.patch.object(
self.common, '_migrate_volume') as mock_migrate:
migrate_status = self.common._slo_workload_migration(
device_id, volume, host, volume_name, new_type,
extra_specs)
self.assertTrue(bool(migrate_status))
mock_migrate.assert_called_once_with(
extra_specs[utils.ARRAY], volume, device_id,
extra_specs[utils.SRP], self.data.slo,
self.data.workload, volume_name, new_type, extra_specs)
@mock.patch.object(masking.PowerMaxMasking, 'remove_and_reset_members')
@mock.patch.object(common.PowerMaxCommon, 'get_volume_metadata',
return_value='')
def test_migrate_volume_success(self, mck_meta, mock_remove):
with mock.patch.object(self.rest, 'is_volume_in_storagegroup',
return_value=True):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
volume = self.data.test_volume
new_type = {'extra_specs': {}}
migrate_status = self.common._migrate_volume(
self.data.array, volume, device_id, self.data.srp,
self.data.slo, self.data.workload, volume_name,
new_type, extra_specs)[0]
self.assertTrue(migrate_status)
target_extra_specs = {
'array': self.data.array, 'interval': 3,
'retries': 120, 'slo': self.data.slo,
'srp': self.data.srp, 'workload': self.data.workload}
mock_remove.assert_called_once_with(
self.data.array, volume, device_id, volume_name,
target_extra_specs, reset=True)
mock_remove.reset_mock()
with mock.patch.object(
self.rest, 'get_storage_groups_from_volume',
return_value=[]):
migrate_status = self.common._migrate_volume(
self.data.array, volume, device_id, self.data.srp,
self.data.slo, self.data.workload, volume_name,
new_type, extra_specs)[0]
self.assertTrue(migrate_status)
mock_remove.assert_not_called()
@mock.patch.object(common.PowerMaxCommon, 'cleanup_lun_replication')
@mock.patch.object(common.PowerMaxCommon, '_retype_inuse_volume',
return_value=(True, 'Test'))
@mock.patch.object(common.PowerMaxCommon,
'setup_inuse_volume_replication',
return_value=('Status', 'Data', 'Info'))
@mock.patch.object(common.PowerMaxCommon, '_retype_remote_volume',
return_value=True)
@mock.patch.object(common.PowerMaxCommon, 'get_volume_metadata',
return_value='')
@mock.patch.object(utils.PowerMaxUtils, 'get_async_rdf_managed_grp_name')
@mock.patch.object(rest.PowerMaxRest, 'get_storage_group',
return_value=True)
@mock.patch.object(masking.PowerMaxMasking, 'add_volume_to_storage_group')
def test_migrate_in_use_volume(
self, mck_add_vol, mck_get_sg, mck_get_rdf_name, mck_meta,
mck_remote_retype, mck_setup, mck_retype, mck_cleanup):
# Array/Volume info
array = self.data.array
srp = self.data.srp
slo = self.data.slo
workload = self.data.workload
device_id = self.data.device_id
volume = self.data.test_attached_volume
volume_name = self.data.test_attached_volume.name
# Rep Config
rep_mode = 'Synchronous'
self.common.rep_config = {'mode': rep_mode, 'metro_use_bias': True}
# Extra Specs
new_type = {'extra_specs': {}}
src_extra_specs = self.data.extra_specs_migrate
interval = src_extra_specs['interval']
retries = src_extra_specs['retries']
tgt_extra_specs = {
'srp': srp, 'array': array, 'slo': slo, 'workload': workload,
'interval': interval, 'retries': retries, 'rep_mode': rep_mode}
def _reset_mocks():
mck_cleanup.reset_mock()
mck_setup.reset_mock()
mck_retype.reset_mock()
mck_remote_retype.reset_mock()
# Scenario 1: no_rep => no_rep
with mock.patch.object(self.utils, 'is_replication_enabled',
side_effect=[False, False]):
success = self.common._migrate_volume(
array, volume, device_id, srp, slo, workload, volume_name,
new_type, src_extra_specs)[0]
mck_retype.assert_called_once_with(
array, srp, volume, device_id, src_extra_specs, slo, workload,
tgt_extra_specs, False)
mck_cleanup.assert_not_called()
mck_setup.assert_not_called()
mck_remote_retype.assert_not_called()
self.assertTrue(success)
_reset_mocks()
# Scenario 2: rep => no_rep
with mock.patch.object(self.utils, 'is_replication_enabled',
side_effect=[True, False]):
success = self.common._migrate_volume(
array, volume, device_id, srp, slo, workload, volume_name,
new_type, src_extra_specs)[0]
cleanup_specs = src_extra_specs
cleanup_specs['force_vol_add'] = True
mck_cleanup.assert_called_once_with(
volume, volume_name, device_id, cleanup_specs)
mck_retype.assert_called_once_with(
array, srp, volume, device_id, src_extra_specs, slo, workload,
tgt_extra_specs, False)
mck_setup.assert_not_called()
mck_remote_retype.assert_not_called()
self.assertTrue(success)
_reset_mocks()
# Scenario 3: no_rep => rep
with mock.patch.object(self.utils, 'is_replication_enabled',
side_effect=[False, True]):
tgt_extra_specs['rep_mode'] = utils.REP_METRO
self.common.rep_config['mode'] = utils.REP_METRO
success = self.common._migrate_volume(
array, volume, device_id, srp, slo, workload, volume_name,
new_type, src_extra_specs)[0]
mck_setup_specs = src_extra_specs
mck_setup_specs[utils.METROBIAS] = self.common.rep_config[
'metro_use_bias']
mck_setup.assert_called_once_with(
self.data.array, volume, device_id, mck_setup_specs)
mck_retype.assert_called_once_with(
array, srp, volume, device_id, src_extra_specs, slo,
workload, tgt_extra_specs, False)
mck_add_vol.assert_called_once()
mck_get_sg.assert_called_once()
mck_get_rdf_name.assert_called_once()
mck_cleanup.assert_not_called()
mck_remote_retype.assert_not_called()
self.assertTrue(success)
_reset_mocks()
# Scenario 4: rep => rep
with mock.patch.object(self.utils, 'is_replication_enabled',
side_effect=[True, True]):
success = self.common._migrate_volume(
array, volume, device_id, srp, slo, workload, volume_name,
new_type, src_extra_specs)[0]
mck_retype.assert_called_once_with(
array, srp, volume, device_id, src_extra_specs, slo, workload,
tgt_extra_specs, False)
mck_remote_retype.assert_called_once_with(
array, volume, device_id, volume_name, utils.REP_METRO, True,
tgt_extra_specs)
mck_cleanup.assert_not_called()
mck_setup.assert_not_called()
self.assertTrue(success)
@mock.patch.object(common.PowerMaxCommon, 'setup_volume_replication',
return_value=('Status', 'Data', 'Info'))
@mock.patch.object(common.PowerMaxCommon, '_retype_volume',
return_value=True)
@mock.patch.object(common.PowerMaxCommon, 'cleanup_lun_replication')
@mock.patch.object(common.PowerMaxCommon, '_retype_inuse_volume',
return_value=(True, 'test'))
@mock.patch.object(common.PowerMaxCommon,
'setup_inuse_volume_replication',
return_value=('Status', 'Data', 'Info'))
@mock.patch.object(common.PowerMaxCommon, '_retype_remote_volume',
return_value=True)
@mock.patch.object(common.PowerMaxCommon, 'get_volume_metadata',
return_value='')
def test_migrate_volume_attachment_path(
self, mck_meta, mck_remote_retype, mck_setup_use, mck_inuse_retype,
mck_cleanup, mck_retype, mck_setup):
# Array/Volume info
array = self.data.array
srp = self.data.srp
slo = self.data.slo
workload = self.data.workload
device_id = self.data.device_id
volume_attached = self.data.test_attached_volume
volume_attached_name = self.data.test_attached_volume.name
volume_not_attached = self.data.test_volume
volume_not_attached_name = self.data.test_volume.name
# Extra Specs
new_type = {'extra_specs': {}}
self.common.rep_config = {'mode': None}
src_extra_specs = self.data.extra_specs_migrate
# Scenario 1: Volume attached
with mock.patch.object(self.utils, 'is_replication_enabled',
side_effect=[False, False]):
success = self.common._migrate_volume(
array, volume_attached, device_id, srp, slo, workload,
volume_attached_name, new_type, src_extra_specs)[0]
mck_inuse_retype.assert_called_once()
self.assertTrue(success)
mck_cleanup.reset_mock()
mck_setup_use.reset_mock()
# Scenario 2: Volume not attached
with mock.patch.object(self.utils, 'is_replication_enabled',
side_effect=[False, False]):
success = self.common._migrate_volume(
array, volume_not_attached, device_id, srp, slo, workload,
volume_not_attached_name, new_type, src_extra_specs)[0]
mck_retype.assert_called_once()
self.assertTrue(success)
# Scenario 3: Volume not attached, enable RDF
tgt_extra_specs = {
'srp': srp, 'array': array, 'slo': slo, 'workload': workload,
'interval': src_extra_specs['interval'],
'retries': src_extra_specs['retries'],
utils.METROBIAS: True}
self.common.rep_config[utils.METROBIAS] = True
with mock.patch.object(self.utils, 'is_replication_enabled',
side_effect=[False, True]):
success = self.common._migrate_volume(
array, volume_not_attached, device_id, srp, slo, workload,
volume_not_attached_name, new_type, src_extra_specs)[0]
mck_setup.assert_called_once_with(array, volume_not_attached,
device_id, tgt_extra_specs)
mck_retype.assert_called_once()
self.assertTrue(success)
@mock.patch.object(masking.PowerMaxMasking, 'remove_and_reset_members')
def test_migrate_volume_failed_get_new_sg_failed(self, mock_remove):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
new_type = {'extra_specs': {}}
with mock.patch.object(
self.masking, 'get_or_create_default_storage_group',
side_effect=exception.VolumeBackendAPIException):
migrate_status = self.common._migrate_volume(
self.data.array, self.data.test_volume, device_id,
self.data.srp, self.data.slo,
self.data.workload, volume_name, new_type, extra_specs)
self.assertFalse(migrate_status)
def test_migrate_volume_failed_vol_not_added(self):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
new_type = {'extra_specs': {}}
with mock.patch.object(
self.rest, 'is_volume_in_storagegroup',
return_value=False):
migrate_status = self.common._migrate_volume(
self.data.array, self.data.test_volume, device_id,
self.data.srp, self.data.slo,
self.data.workload, volume_name, new_type, extra_specs)[0]
self.assertFalse(migrate_status)
def test_is_valid_for_storage_assisted_migration_true(self):
device_id = self.data.device_id
host = {'host': self.data.new_host}
volume_name = self.data.test_volume.name
ref_return = (True, 'Silver', 'OLTP')
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host, self.data.array,
self.data.srp, volume_name, False, False)
self.assertEqual(ref_return, return_val)
# No current sgs found
with mock.patch.object(self.rest, 'get_storage_groups_from_volume',
return_value=None):
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host, self.data.array, self.data.srp,
volume_name, False, False)
self.assertEqual(ref_return, return_val)
host = {'host': 'HostX@Backend#Silver+SRP_1+000197800123'}
ref_return = (True, 'Silver', 'NONE')
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host, self.data.array,
self.data.srp, volume_name, False, False)
self.assertEqual(ref_return, return_val)
def test_is_valid_for_storage_assisted_migration_false(self):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
ref_return = (False, None, None)
# IndexError
host = {'host': 'HostX@Backend#Silver+SRP_1+000197800123+dummy+data'}
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host, self.data.array,
self.data.srp, volume_name, False, False)
self.assertEqual(ref_return, return_val)
# Wrong array
host2 = {'host': 'HostX@Backend#Silver+OLTP+SRP_1+00012345678'}
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host2, self.data.array,
self.data.srp, volume_name, False, False)
self.assertEqual(ref_return, return_val)
# Wrong srp
host3 = {'host': 'HostX@Backend#Silver+OLTP+SRP_2+000197800123'}
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host3, self.data.array,
self.data.srp, volume_name, False, False)
self.assertEqual(ref_return, return_val)
# Already in correct sg
host4 = {'host': self.data.fake_host}
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host4, self.data.array,
self.data.srp, volume_name, False, False)
self.assertEqual(ref_return, return_val)
def test_is_valid_for_storage_assisted_migration_next_gen(self):
device_id = self.data.device_id
host = {'host': self.data.new_host}
volume_name = self.data.test_volume.name
ref_return = (True, 'Silver', 'NONE')
with mock.patch.object(self.rest, 'is_next_gen_array',
return_value=True):
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host, self.data.array,
self.data.srp, volume_name, False, False)
self.assertEqual(ref_return, return_val)
def test_find_volume_group(self):
group = self.data.test_group_1
array = self.data.array
volume_group = self.common._find_volume_group(array, group)
ref_group = self.data.sg_details_rep[0]
self.assertEqual(ref_group, volume_group)
def test_get_volume_device_ids(self):
array = self.data.array
volumes = [self.data.test_volume]
ref_device_ids = [self.data.device_id]
device_ids = self.common._get_volume_device_ids(volumes, array)
self.assertEqual(ref_device_ids, device_ids)
def test_get_members_of_volume_group(self):
array = self.data.array
group_name = self.data.storagegroup_name_source
ref_volumes = [self.data.device_id, self.data.device_id2]
member_device_ids = self.common._get_members_of_volume_group(
array, group_name)
self.assertEqual(ref_volumes, member_device_ids)
def test_get_members_of_volume_group_empty(self):
array = self.data.array
group_name = self.data.storagegroup_name_source
with mock.patch.object(
self.rest, 'get_volumes_in_storage_group',
return_value=None):
member_device_ids = self.common._get_members_of_volume_group(
array, group_name
)
self.assertIsNone(member_device_ids)
@mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True)
def test_create_group_replica(self, mock_check):
source_group = self.data.test_group_1
snap_name = self.data.group_snapshot_name
with mock.patch.object(
self.common,
'_create_group_replica') as mock_create_replica:
self.common._create_group_replica(
source_group, snap_name)
mock_create_replica.assert_called_once_with(
source_group, snap_name)
def test_create_group_replica_exception(self):
source_group = self.data.test_group_failed
snap_name = self.data.group_snapshot_name
with mock.patch.object(
volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._create_group_replica,
source_group,
snap_name)
def test_create_group_snapshot(self):
context = None
group_snapshot = self.data.test_group_snapshot_1
snapshots = []
ref_model_update = {'status': fields.GroupStatus.AVAILABLE}
with mock.patch.object(
volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
model_update, snapshots_model_update = (
self.common.create_group_snapshot(
context, group_snapshot, snapshots))
self.assertEqual(ref_model_update, model_update)
def test_create_group_snapshot_exception(self):
context = None
group_snapshot = self.data.test_group_snapshot_failed
snapshots = []
with mock.patch.object(
volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.create_group_snapshot,
context,
group_snapshot,
snapshots)
@mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True)
@mock.patch.object(volume_utils, 'is_group_a_type', return_value=False)
def test_create_group(self, mock_type, mock_cg_type):
ref_model_update = {'status': fields.GroupStatus.AVAILABLE}
model_update = self.common.create_group(None, self.data.test_group_1)
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(provision.PowerMaxProvision, 'create_volume_group',
side_effect=exception.CinderException)
@mock.patch.object(volume_utils, 'is_group_a_type', return_value=False)
def test_create_group_exception(self, mock_type, mock_create):
context = None
group = self.data.test_group_failed
with mock.patch.object(
volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.create_group,
context, group)
def test_delete_group_snapshot(self):
group_snapshot = self.data.test_group_snapshot_1
snapshots = []
context = None
ref_model_update = {'status': fields.GroupSnapshotStatus.DELETED}
with mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
model_update, snapshots_model_update = (
self.common.delete_group_snapshot(context,
group_snapshot, snapshots))
self.assertEqual(ref_model_update, model_update)
def test_delete_group_snapshot_success(self):
group_snapshot = self.data.test_group_snapshot_1
snapshots = []
ref_model_update = {'status': fields.GroupSnapshotStatus.DELETED}
with mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
model_update, snapshots_model_update = (
self.common._delete_group_snapshot(group_snapshot,
snapshots))
self.assertEqual(ref_model_update, model_update)
def test_delete_group_snapshot_failed(self):
group_snapshot = self.data.test_group_snapshot_failed
snapshots = []
ref_model_update = (
{'status': fields.GroupSnapshotStatus.ERROR_DELETING})
with mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
model_update, snapshots_model_update = (
self.common._delete_group_snapshot(group_snapshot,
snapshots))
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(volume_utils, 'is_group_a_type',
return_value=False)
@mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True)
def test_update_group(self, mock_cg_type, mock_type_check):
group = self.data.test_group_1
add_vols = [self.data.test_volume]
remove_vols = []
ref_model_update = {'status': fields.GroupStatus.AVAILABLE}
model_update, __, __ = self.common.update_group(group,
add_vols,
remove_vols)
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(common.PowerMaxCommon, '_find_volume_group',
return_value=None)
@mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True)
def test_update_group_not_found(self, mock_check, mock_grp):
self.assertRaises(exception.GroupNotFound, self.common.update_group,
self.data.test_group_1, [], [])
@mock.patch.object(common.PowerMaxCommon, '_find_volume_group',
side_effect=exception.VolumeBackendAPIException)
@mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True)
def test_update_group_exception(self, mock_check, mock_grp):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.update_group,
self.data.test_group_1, [], [])
@mock.patch.object(volume_utils, 'is_group_a_type', return_value=False)
def test_delete_group(self, mock_check):
group = self.data.test_group_1
volumes = [self.data.test_volume]
context = None
ref_model_update = {'status': fields.GroupStatus.DELETED}
with mock.patch.object(
volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True), mock.patch.object(
self.rest, 'get_volumes_in_storage_group',
return_value=[]):
model_update, __ = self.common.delete_group(
context, group, volumes)
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(volume_utils, 'is_group_a_type', return_value=False)
def test_delete_group_success(self, mock_check):
group = self.data.test_group_1
volumes = []
ref_model_update = {'status': fields.GroupStatus.DELETED}
with mock.patch.object(
volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True), mock.patch.object(
self.rest, 'get_volumes_in_storage_group',
return_value=[]):
model_update, __ = self.common._delete_group(group, volumes)
self.assertEqual(ref_model_update, model_update)
def test_delete_group_already_deleted(self):
group = self.data.test_group_failed
ref_model_update = {'status': fields.GroupStatus.DELETED}
volumes = []
with mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
model_update, __ = self.common._delete_group(group, volumes)
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(volume_utils, 'is_group_a_type', return_value=False)
@mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True)
def test_delete_group_failed(self, mock_check, mock_type_check):
group = self.data.test_group_1
volumes = []
ref_model_update = {'status': fields.GroupStatus.ERROR_DELETING}
with mock.patch.object(
self.rest, 'delete_storage_group',
side_effect=exception.VolumeBackendAPIException):
model_update, __ = self.common._delete_group(
group, volumes)
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(
common.PowerMaxCommon, '_get_clone_vol_info',
return_value=(tpd.PowerMaxData.device_id,
tpd.PowerMaxData.extra_specs, 1, 'tgt_vol'))
@mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True)
@mock.patch.object(volume_utils, 'is_group_a_type',
return_value=False)
@mock.patch.object(common.PowerMaxCommon, 'get_volume_metadata',
return_value='')
def test_create_group_from_src_success(self, mck_meta, mock_type,
mock_cg_type, mock_info):
ref_model_update = {'status': fields.GroupStatus.AVAILABLE}
model_update, volumes_model_update = (
self.common.create_group_from_src(
None, self.data.test_group_1, [self.data.test_volume],
self.data.test_group_snapshot_1, [], None, []))
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(
common.PowerMaxCommon, '_remove_vol_and_cleanup_replication')
@mock.patch.object(
masking.PowerMaxMasking, 'remove_volumes_from_storage_group')
def test_rollback_create_group_from_src(
self, mock_rm, mock_clean):
rollback_dict = {
'target_group_name': self.data.target_group_name,
'snap_name': 'snap1', 'source_group_name': 'src_grp',
'volumes': (self.data.device_id, self.data.extra_specs,
self.data.test_volume),
'device_ids': [self.data.device_id],
'interval_retries_dict': self.data.extra_specs}
for x in range(0, 2):
self.common._rollback_create_group_from_src(
self.data.array, rollback_dict)
self.assertEqual(2, mock_rm.call_count)
def test_get_snap_src_dev_list(self):
src_dev_ids = self.common._get_snap_src_dev_list(
self.data.array, [self.data.test_snapshot])
ref_dev_ids = [self.data.device_id]
self.assertEqual(ref_dev_ids, src_dev_ids)
def test_get_clone_vol_info(self):
ref_dev_id = self.data.device_id
source_vols = [self.data.test_volume,
self.data.test_attached_volume]
src_snapshots = [self.data.test_snapshot]
src_dev_id1, extra_specs1, vol_size1, tgt_vol_name1 = (
self.common._get_clone_vol_info(
self.data.test_clone_volume, source_vols, []))
src_dev_id2, extra_specs2, vol_size2, tgt_vol_name2 = (
self.common._get_clone_vol_info(
self.data.test_clone_volume, [], src_snapshots))
self.assertEqual(ref_dev_id, src_dev_id1)
self.assertEqual(ref_dev_id, src_dev_id2)
def test_get_attributes_from_cinder_config_new_and_old(self):
kwargs_expected = (
{'RestServerIp': '1.1.1.1', 'RestServerPort': 8443,
'RestUserName': 'smc', 'RestPassword': 'smc', 'SSLVerify': False,
'SerialNumber': self.data.array, 'srpName': 'SRP_1',
'PortGroup': self.data.port_group_name_i})
old_conf = tpfo.FakeConfiguration(None, 'CommonTests', 1, 1)
configuration = tpfo.FakeConfiguration(
None, 'CommonTests', 1, 1, san_ip='1.1.1.1', san_login='smc',
vmax_array=self.data.array, vmax_srp='SRP_1', san_password='smc',
san_api_port=8443, vmax_port_groups=[self.data.port_group_name_i])
self.common.configuration = configuration
kwargs_returned = self.common.get_attributes_from_cinder_config()
self.assertEqual(kwargs_expected, kwargs_returned)
self.common.configuration = old_conf
kwargs = self.common.get_attributes_from_cinder_config()
self.assertIsNone(kwargs)
def test_get_attributes_from_cinder_config_with_port(self):
kwargs_expected = (
{'RestServerIp': '1.1.1.1', 'RestServerPort': 3448,
'RestUserName': 'smc', 'RestPassword': 'smc', 'SSLVerify': False,
'SerialNumber': self.data.array, 'srpName': 'SRP_1',
'PortGroup': self.data.port_group_name_i})
configuration = tpfo.FakeConfiguration(
None, 'CommonTests', 1, 1, san_ip='1.1.1.1', san_login='smc',
vmax_array=self.data.array, vmax_srp='SRP_1', san_password='smc',
san_api_port=3448, vmax_port_groups=[self.data.port_group_name_i])
self.common.configuration = configuration
kwargs_returned = self.common.get_attributes_from_cinder_config()
self.assertEqual(kwargs_expected, kwargs_returned)
def test_get_attributes_from_cinder_config_no_port(self):
kwargs_expected = (
{'RestServerIp': '1.1.1.1', 'RestServerPort': 8443,
'RestUserName': 'smc', 'RestPassword': 'smc', 'SSLVerify': False,
'SerialNumber': self.data.array, 'srpName': 'SRP_1',
'PortGroup': self.data.port_group_name_i})
configuration = tpfo.FakeConfiguration(
None, 'CommonTests', 1, 1, san_ip='1.1.1.1', san_login='smc',
vmax_array=self.data.array, vmax_srp='SRP_1', san_password='smc',
vmax_port_groups=[self.data.port_group_name_i])
self.common.configuration = configuration
kwargs_returned = self.common.get_attributes_from_cinder_config()
self.assertEqual(kwargs_expected, kwargs_returned)
def test_get_ssl_attributes_from_cinder_config(self):
conf = tpfo.FakeConfiguration(
None, 'CommonTests', 1, 1, san_ip='1.1.1.1', san_login='smc',
vmax_array=self.data.array, vmax_srp='SRP_1', san_password='smc',
vmax_port_groups=[self.data.port_group_name_i],
driver_ssl_cert_verify=True,
driver_ssl_cert_path='/path/to/cert')
self.common.configuration = conf
conf_returned = self.common.get_attributes_from_cinder_config()
self.assertEqual('/path/to/cert', conf_returned['SSLVerify'])
conf.driver_ssl_cert_verify = True
conf.driver_ssl_cert_path = None
conf_returned = self.common.get_attributes_from_cinder_config()
self.assertTrue(conf_returned['SSLVerify'])
conf.driver_ssl_cert_verify = False
conf.driver_ssl_cert_path = None
conf_returned = self.common.get_attributes_from_cinder_config()
self.assertFalse(conf_returned['SSLVerify'])
@mock.patch.object(rest.PowerMaxRest, 'get_size_of_device_on_array',
return_value=2.0)
def test_manage_snapshot_get_size_success(self, mock_get_size):
size = self.common.manage_existing_snapshot_get_size(
self.data.test_snapshot)
self.assertEqual(2, size)
@mock.patch.object(rest.PowerMaxRest, 'get_volume_snap',
return_value={'snap_name': 'snap_name'})
@mock.patch.object(
common.PowerMaxCommon, 'get_snapshot_metadata',
return_value={'snap-meta-key-1': 'snap-meta-value-1',
'snap-meta-key-2': 'snap-meta-value-2'})
def test_manage_snapshot_success(self, mck_meta, mock_snap):
snapshot = deepcopy(self.data.test_snapshot_manage)
snapshot.metadata = {'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}
existing_ref = {u'source-name': u'test_snap'}
updates_response = self.common.manage_existing_snapshot(
snapshot, existing_ref)
prov_loc = {'source_id': self.data.device_id,
'snap_name': 'OS-%s' % existing_ref['source-name']}
updates = {'display_name': 'my_snap',
'provider_location': six.text_type(prov_loc),
'metadata': {'snap-meta-key-1': 'snap-meta-value-1',
'snap-meta-key-2': 'snap-meta-value-2',
'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}}
self.assertEqual(updates_response, updates)
def test_manage_snapshot_fail_already_managed(self):
snapshot = self.data.test_snapshot_manage
existing_ref = {u'source-name': u'OS-test_snap'}
self.assertRaises(exception.VolumeBackendAPIException,
self.common.manage_existing_snapshot,
snapshot, existing_ref)
@mock.patch.object(utils.PowerMaxUtils, 'is_volume_failed_over',
return_value=True)
def test_manage_snapshot_fail_vol_failed_over(self, mock_failed):
snapshot = self.data.test_snapshot_manage
existing_ref = {u'source-name': u'test_snap'}
self.assertRaises(exception.VolumeBackendAPIException,
self.common.manage_existing_snapshot,
snapshot, existing_ref)
@mock.patch.object(rest.PowerMaxRest, 'get_volume_snap',
return_value=False)
def test_manage_snapshot_fail_vol_not_snap_src(self, mock_snap):
snapshot = self.data.test_snapshot_manage
existing_ref = {u'source-name': u'test_snap'}
self.assertRaises(exception.VolumeBackendAPIException,
self.common.manage_existing_snapshot,
snapshot, existing_ref)
@mock.patch.object(utils.PowerMaxUtils, 'modify_snapshot_prefix',
side_effect=exception.VolumeBackendAPIException)
def test_manage_snapshot_fail_add_prefix(self, mock_mod):
snapshot = self.data.test_snapshot_manage
existing_ref = {u'source-name': u'test_snap'}
self.assertRaises(exception.VolumeBackendAPIException,
self.common.manage_existing_snapshot,
snapshot, existing_ref)
@mock.patch.object(rest.PowerMaxRest, 'modify_volume_snap')
def test_unmanage_snapshot_success(self, mock_mod, ):
self.common.unmanage_snapshot(self.data.test_snapshot_manage)
mock_mod.assert_called_once()
@mock.patch.object(common.PowerMaxCommon, '_sync_check')
@mock.patch.object(rest.PowerMaxRest, 'modify_volume_snap')
def test_unmanage_snapshot_no_sync_check(self, mock_mod, mock_sync):
self.common.unmanage_snapshot(self.data.test_snapshot_manage)
mock_mod.assert_called_once()
mock_sync.assert_not_called()
@mock.patch.object(utils.PowerMaxUtils, 'is_volume_failed_over',
return_value=True)
def test_unmanage_snapshot_fail_failover(self, mock_failed):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.unmanage_snapshot,
self.data.test_snapshot_manage)
@mock.patch.object(rest.PowerMaxRest, 'modify_volume_snap',
side_effect=exception.VolumeBackendAPIException)
def test_unmanage_snapshot_fail_rename(self, mock_snap):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.unmanage_snapshot,
self.data.test_snapshot_manage)
@mock.patch.object(provision.PowerMaxProvision, 'delete_volume_snap')
@mock.patch.object(provision.PowerMaxProvision, 'is_restore_complete',
return_value=True)
@mock.patch.object(common.PowerMaxCommon, '_clone_check')
@mock.patch.object(provision.PowerMaxProvision, 'revert_volume_snapshot')
def test_revert_to_snapshot(self, mock_revert, mock_clone,
mock_complete, mock_delete):
volume = self.data.test_volume
snapshot = self.data.test_snapshot
array = self.data.array
device_id = self.data.device_id
snap_name = self.data.snap_location['snap_name']
extra_specs = deepcopy(self.data.extra_specs_intervals_set)
extra_specs['storagetype:portgroupname'] = (
self.data.port_group_name_f)
self.common.revert_to_snapshot(volume, snapshot)
mock_revert.assert_called_once_with(
array, device_id, snap_name, extra_specs)
mock_clone.assert_called_once_with(array, device_id, extra_specs)
mock_complete.assert_called_once_with(array, device_id,
snap_name, extra_specs)
mock_delete.assert_called_once_with(array, snap_name, device_id,
restored=True, generation=0)
@mock.patch.object(utils.PowerMaxUtils, 'is_replication_enabled',
return_value=True)
def test_revert_to_snapshot_replicated(self, mock_rep):
volume = self.data.test_volume
snapshot = self.data.test_snapshot
self.assertRaises(exception.VolumeDriverException,
self.common.revert_to_snapshot, volume, snapshot)
def test_get_initiator_check_flag(self):
self.common.configuration.initiator_check = False
initiator_check = self.common._get_initiator_check_flag()
self.assertFalse(initiator_check)
def test_get_initiator_check_flag_true(self):
self.common.configuration.initiator_check = True
initiator_check = self.common._get_initiator_check_flag()
self.assertTrue(initiator_check)
def test_get_manageable_volumes_success(self):
marker = limit = offset = sort_keys = sort_dirs = None
with mock.patch.object(
self.rest, 'get_private_volume_list',
return_value=self.data.priv_vol_func_response_single):
vols_lists = self.common.get_manageable_volumes(
marker, limit, offset, sort_keys, sort_dirs)
expected_response = [
{'reference': {'source-id': '00001'}, 'safe_to_manage': True,
'size': 1.0, 'reason_not_safe': None, 'cinder_id': None,
'extra_info': {'config': 'TDEV', 'emulation': 'FBA'}}]
self.assertEqual(vols_lists, expected_response)
def test_get_manageable_volumes_filters_set(self):
marker, limit, offset = '00002', 2, 1
sort_keys, sort_dirs = 'size', 'desc'
with mock.patch.object(
self.rest, 'get_private_volume_list',
return_value=self.data.priv_vol_func_response_multi):
vols_lists = self.common.get_manageable_volumes(
marker, limit, offset, sort_keys, sort_dirs)
expected_response = [
{'reference': {'source-id': '00003'}, 'safe_to_manage': True,
'size': 300, 'reason_not_safe': None, 'cinder_id': None,
'extra_info': {'config': 'TDEV', 'emulation': 'FBA'}},
{'reference': {'source-id': '00004'}, 'safe_to_manage': True,
'size': 400, 'reason_not_safe': None, 'cinder_id': None,
'extra_info': {'config': 'TDEV', 'emulation': 'FBA'}}]
self.assertEqual(vols_lists, expected_response)
def test_get_manageable_volumes_fail_no_vols(self):
marker = limit = offset = sort_keys = sort_dirs = None
with mock.patch.object(
self.rest, 'get_private_volume_list',
return_value=[]):
expected_response = []
vol_list = self.common.get_manageable_volumes(
marker, limit, offset, sort_keys, sort_dirs)
self.assertEqual(vol_list, expected_response)
def test_get_manageable_volumes_fail_no_valid_vols(self):
marker = limit = offset = sort_keys = sort_dirs = None
with mock.patch.object(
self.rest, 'get_private_volume_list',
return_value=self.data.priv_vol_func_response_multi_invalid):
expected_response = []
vol_list = self.common.get_manageable_volumes(
marker, limit, offset, sort_keys, sort_dirs)
self.assertEqual(vol_list, expected_response)
def test_get_manageable_snapshots_success(self):
marker = limit = offset = sort_keys = sort_dirs = None
with mock.patch.object(
self.rest, 'get_private_volume_list',
return_value=self.data.priv_vol_func_response_single):
snap_list = self.common.get_manageable_snapshots(
marker, limit, offset, sort_keys, sort_dirs)
expected_response = [{
'reference': {'source-name': 'testSnap1'},
'safe_to_manage': True, 'size': 1,
'reason_not_safe': None, 'cinder_id': None,
'extra_info': {
'generation': 0, 'secured': False, 'timeToLive': 'N/A',
'timestamp': mock.ANY},
'source_reference': {'source-id': '00001'}}]
self.assertEqual(snap_list, expected_response)
def test_get_manageable_snapshots_filters_set(self):
marker, limit, offset = 'testSnap2', 2, 1
sort_keys, sort_dirs = 'size', 'desc'
with mock.patch.object(
self.rest, 'get_private_volume_list',
return_value=self.data.priv_vol_func_response_multi):
vols_lists = self.common.get_manageable_snapshots(
marker, limit, offset, sort_keys, sort_dirs)
expected_response = [
{'reference': {'source-name': 'testSnap3'},
'safe_to_manage': True, 'size': 300, 'reason_not_safe': None,
'cinder_id': None, 'extra_info': {
'generation': 0, 'secured': False, 'timeToLive': 'N/A',
'timestamp': mock.ANY},
'source_reference': {'source-id': '00003'}},
{'reference': {'source-name': 'testSnap4'},
'safe_to_manage': True, 'size': 400, 'reason_not_safe': None,
'cinder_id': None, 'extra_info': {
'generation': 0, 'secured': False, 'timeToLive': 'N/A',
'timestamp': mock.ANY},
'source_reference': {'source-id': '00004'}}]
self.assertEqual(vols_lists, expected_response)
def test_get_manageable_snapshots_fail_no_snaps(self):
marker = limit = offset = sort_keys = sort_dirs = None
with mock.patch.object(self.rest, 'get_private_volume_list',
return_value=[]):
expected_response = []
vols_lists = self.common.get_manageable_snapshots(
marker, limit, offset, sort_keys, sort_dirs)
self.assertEqual(vols_lists, expected_response)
def test_get_manageable_snapshots_fail_no_valid_snaps(self):
marker = limit = offset = sort_keys = sort_dirs = None
with mock.patch.object(
self.rest, 'get_private_volume_list',
return_value=self.data.priv_vol_func_response_multi_invalid):
expected_response = []
vols_lists = self.common.get_manageable_snapshots(
marker, limit, offset, sort_keys, sort_dirs)
self.assertEqual(vols_lists, expected_response)
def test_get_slo_workload_combo_from_cinder_conf(self):
self.common.configuration.vmax_service_level = 'Diamond'
self.common.configuration.vmax_workload = 'DSS'
response1 = self.common.get_attributes_from_cinder_config()
self.assertEqual('Diamond', response1['ServiceLevel'])
self.assertEqual('DSS', response1['Workload'])
self.common.configuration.vmax_service_level = 'Diamond'
self.common.configuration.vmax_workload = None
response2 = self.common.get_attributes_from_cinder_config()
self.assertEqual(self.common.configuration.vmax_service_level,
response2['ServiceLevel'])
self.assertIsNone(response2['Workload'])
expected_response = {
'RestServerIp': '1.1.1.1', 'RestServerPort': 8443,
'RestUserName': 'smc', 'RestPassword': 'smc', 'SSLVerify': False,
'SerialNumber': '000197800123', 'srpName': 'SRP_1',
'PortGroup': 'OS-fibre-PG'}
self.common.configuration.vmax_service_level = None
self.common.configuration.vmax_workload = 'DSS'
response3 = self.common.get_attributes_from_cinder_config()
self.assertEqual(expected_response, response3)
self.common.configuration.vmax_service_level = None
self.common.configuration.vmax_workload = None
response4 = self.common.get_attributes_from_cinder_config()
self.assertEqual(expected_response, response4)
def test_get_u4p_failover_info(self):
configuration = tpfo.FakeConfiguration(
None, 'CommonTests', 1, 1, san_ip='1.1.1.1', san_login='test',
san_password='test', san_api_port=8443,
driver_ssl_cert_verify='/path/to/cert',
u4p_failover_target=(self.data.u4p_failover_config[
'u4p_failover_targets']), u4p_failover_backoff_factor='2',
u4p_failover_retries='3', u4p_failover_timeout='10',
u4p_primary='10.10.10.10')
self.common.configuration = configuration
self.common._get_u4p_failover_info()
self.assertTrue(self.rest.u4p_failover_enabled)
self.assertIsNotNone(self.rest.u4p_failover_targets)
def test_update_vol_stats_retest_u4p(self):
self.rest.u4p_in_failover = True
self.rest.u4p_failover_autofailback = True
with mock.patch.object(
self.common, 'retest_primary_u4p') as mock_retest:
self.common.update_volume_stats()
mock_retest.assert_called_once()
self.rest.u4p_in_failover = True
self.rest.u4p_failover_autofailback = False
with mock.patch.object(
self.common, 'retest_primary_u4p') as mock_retest:
self.common.update_volume_stats()
mock_retest.assert_not_called()
@mock.patch.object(rest.PowerMaxRest, 'request', return_value=[200, None])
@mock.patch.object(
common.PowerMaxCommon, 'get_attributes_from_cinder_config',
return_value=tpd.PowerMaxData.u4p_failover_target[0])
def test_retest_primary_u4p(self, mock_primary_u4p, mock_request):
self.common.retest_primary_u4p()
self.assertFalse(self.rest.u4p_in_failover)
@mock.patch.object(rest.PowerMaxRest, 'is_vol_in_rep_session',
return_value=(None, False, None))
@mock.patch.object(common.PowerMaxCommon, '_sync_check')
def test_extend_vol_validation_checks_success(self, mck_sync, mck_rep):
volume = self.data.test_volume
array = self.data.array
device_id = self.data.device_id
new_size = self.data.test_volume.size + 1
extra_specs = deepcopy(self.data.extra_specs)
self.common._extend_vol_validation_checks(
array, device_id, volume.name, extra_specs, volume.size, new_size)
@mock.patch.object(rest.PowerMaxRest, 'is_vol_in_rep_session',
return_value=(None, False, None))
@mock.patch.object(common.PowerMaxCommon, '_sync_check')
def test_extend_vol_val_check_no_device(self, mck_sync, mck_rep):
volume = self.data.test_volume
array = self.data.array
device_id = None
new_size = self.data.test_volume.size + 1
extra_specs = deepcopy(self.data.extra_specs)
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._extend_vol_validation_checks,
array, device_id, volume.name, extra_specs, volume.size, new_size)
@mock.patch.object(rest.PowerMaxRest, 'is_vol_in_rep_session',
return_value=(None, True, None))
@mock.patch.object(common.PowerMaxCommon, '_sync_check')
def test_extend_vol_val_check_snap_src(self, mck_sync, mck_rep):
volume = self.data.test_volume
array = self.data.array
device_id = self.data.device_id
new_size = self.data.test_volume.size + 1
extra_specs = deepcopy(self.data.extra_specs)
self.common.next_gen = False
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._extend_vol_validation_checks,
array, device_id, volume.name, extra_specs, volume.size, new_size)
@mock.patch.object(rest.PowerMaxRest, 'is_vol_in_rep_session',
return_value=(None, False, None))
@mock.patch.object(common.PowerMaxCommon, '_sync_check')
def test_extend_vol_val_check_wrong_size(self, mck_sync, mck_rep):
volume = self.data.test_volume
array = self.data.array
device_id = self.data.device_id
new_size = volume.size - 1
extra_specs = deepcopy(self.data.extra_specs)
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._extend_vol_validation_checks,
array, device_id, volume.name, extra_specs, volume.size, new_size)
def test_array_ode_capabilities_check_non_next_gen_local(self):
"""Rep enabled, neither array next gen, returns F,F,F,F"""
array = self.data.powermax_model_details['symmetrixId']
self.common.next_gen = False
(r1_ode, r1_ode_metro,
r2_ode, r2_ode_metro) = self.common._array_ode_capabilities_check(
array, True)
self.assertFalse(r1_ode)
self.assertFalse(r1_ode_metro)
self.assertFalse(r2_ode)
self.assertFalse(r2_ode_metro)
@mock.patch.object(rest.PowerMaxRest, 'get_array_detail',
return_value={'ucode': '5977.1.1'})
@mock.patch.object(common.PowerMaxCommon, 'get_rdf_details',
return_value=(10, tpd.PowerMaxData.remote_array))
def test_array_ode_capabilities_check_next_gen_non_rep_pre_elm(
self, mock_rdf, mock_det):
"""Rep disabled, local array next gen, pre elm, returns T,F,F,F"""
array = self.data.powermax_model_details['symmetrixId']
self.common.ucode_level = '5978.1.1'
self.common.next_gen = True
(r1_ode, r1_ode_metro,
r2_ode, r2_ode_metro) = self.common._array_ode_capabilities_check(
array, False)
self.assertTrue(r1_ode)
self.assertFalse(r1_ode_metro)
self.assertFalse(r2_ode)
self.assertFalse(r2_ode_metro)
@mock.patch.object(rest.PowerMaxRest, 'get_array_detail',
return_value={'ucode': '5977.1.1'})
@mock.patch.object(common.PowerMaxCommon, 'get_rdf_details',
return_value=(10, tpd.PowerMaxData.remote_array))
def test_array_ode_capabilities_check_next_gen_remote_rep(
self, mock_rdf, mock_det):
"""Rep enabled, remote not next gen, returns T,T,F,F"""
array = self.data.powermax_model_details['symmetrixId']
self.common.ucode_level = self.data.powermax_model_details['ucode']
self.common.next_gen = True
(r1_ode, r1_ode_metro,
r2_ode, r2_ode_metro) = self.common._array_ode_capabilities_check(
array, True)
self.assertTrue(r1_ode)
self.assertTrue(r1_ode_metro)
self.assertFalse(r2_ode)
self.assertFalse(r2_ode_metro)
@mock.patch.object(rest.PowerMaxRest, 'get_array_detail',
return_value={'ucode': '5978.1.1'})
@mock.patch.object(common.PowerMaxCommon, 'get_rdf_details',
return_value=(10, tpd.PowerMaxData.remote_array))
def test_array_ode_capabilities_check_next_gen_pre_elm_rep(
self, mock_rdf, mock_det):
"""Rep enabled, both array next gen, tgt<5978.221, returns T,T,T,F"""
array = self.data.powermax_model_details['symmetrixId']
self.common.ucode_level = self.data.powermax_model_details['ucode']
self.common.next_gen = True
(r1_ode, r1_ode_metro,
r2_ode, r2_ode_metro) = self.common._array_ode_capabilities_check(
array, True)
self.assertTrue(r1_ode)
self.assertTrue(r1_ode_metro)
self.assertTrue(r2_ode)
self.assertFalse(r2_ode_metro)
@mock.patch.object(rest.PowerMaxRest, 'get_array_detail',
return_value=tpd.PowerMaxData.ucode_5978_foxtail)
@mock.patch.object(common.PowerMaxCommon, 'get_rdf_details',
return_value=(10, tpd.PowerMaxData.remote_array))
def test_array_ode_capabilities_check_next_gen_post_elm_rep(
self, mock_rdf, mock_det):
"""Rep enabled, both array next gen, tgt>5978.221 returns T,T,T,T"""
array = self.data.powermax_model_details['symmetrixId']
self.common.ucode_level = self.data.powermax_model_details['ucode']
self.common.next_gen = True
(r1_ode, r1_ode_metro,
r2_ode, r2_ode_metro) = self.common._array_ode_capabilities_check(
array, True)
self.assertTrue(r1_ode)
self.assertTrue(r1_ode_metro)
self.assertTrue(r2_ode)
self.assertTrue(r2_ode_metro)
@mock.patch.object(common.PowerMaxCommon,
'_add_new_volume_to_volume_group')
@mock.patch.object(common.PowerMaxCommon, 'setup_volume_replication')
@mock.patch.object(provision.PowerMaxProvision, 'extend_volume')
@mock.patch.object(rest.PowerMaxRest, 'get_size_of_device_on_array',
return_value=tpd.PowerMaxData.test_volume.size)
@mock.patch.object(provision.PowerMaxProvision, 'break_rdf_relationship')
@mock.patch.object(masking.PowerMaxMasking, 'remove_and_reset_members')
@mock.patch.object(
common.PowerMaxCommon, '_get_replication_extra_specs',
return_value=tpd.PowerMaxData.rep_extra_specs)
@mock.patch.object(
common.PowerMaxCommon, 'get_remote_target_device',
return_value=(
tpd.PowerMaxData.device_id2, tpd.PowerMaxData.remote_array,
tpd.PowerMaxData.rdf_group_vol_details['localRdfGroupNumber'],
tpd.PowerMaxData.rdf_group_vol_details['localVolumeState'],
tpd.PowerMaxData.rdf_group_vol_details['rdfpairState']))
def test_extend_legacy_replicated_vol(self, mck_get_tgt, mck_rdf_specs,
mck_reset, mck_break_rdf, mck_size,
mck_extend, mck_set_rep, mck_add):
volume = self.data.test_volume_group_member
array = self.data.array
device_id = self.data.device_id
new_size = volume.size + 1
extra_specs = deepcopy(self.data.extra_specs)
self.common._extend_legacy_replicated_vol(
array, volume, device_id, volume.name, new_size, extra_specs)
@mock.patch.object(
common.PowerMaxCommon, 'get_remote_target_device',
return_value=(None, None, None, None, None))
@mock.patch.object(common.PowerMaxCommon, '_sync_check')
def test_extend_legacy_replicated_vol_fail(self, mck_sync, mck_get_tgt):
volume = self.data.test_volume_group_member
array = self.data.array
device_id = self.data.device_id
new_size = volume.size + 1
extra_specs = deepcopy(self.data.extra_specs)
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._extend_vol_validation_checks,
array, device_id, volume.name, extra_specs, volume.size, new_size)
def test_get_unisphere_port(self):
# Test user set port ID
configuration = tpfo.FakeConfiguration(
None, 'CommonTests', 1, 1, san_ip='1.1.1.1', san_login='smc',
vmax_array=self.data.array, vmax_srp='SRP_1', san_password='smc',
san_api_port=1234, vmax_port_groups=[self.data.port_group_name_i])
self.common.configuration = configuration
port = self.common._get_unisphere_port()
self.assertEqual(1234, port)
# Test no set port ID, use default port
configuration = tpfo.FakeConfiguration(
None, 'CommonTests', 1, 1, san_ip='1.1.1.1', san_login='smc',
vmax_array=self.data.array, vmax_srp='SRP_1', san_password='smc',
vmax_port_groups=[self.data.port_group_name_i])
self.common.configuration = configuration
ref_port = utils.DEFAULT_PORT
port = self.common._get_unisphere_port()
self.assertEqual(ref_port, port)
@mock.patch.object(utils.PowerMaxUtils,
'get_replication_config')
def test_get_replication_info(self, mock_config):
self.common._get_replication_info()
mock_config.assert_not_called()
@mock.patch.object(common.PowerMaxCommon,
'_do_sync_check')
def test_sync_check_no_source_device_on_array(self, mock_check):
with mock.patch.object(self.rest, 'get_volume',
side_effect=exception.VolumeBackendAPIException(
"404 00123 does not exist")):
array = self.data.array
device_id = self.data.device_id
extra_specs = self.data.extra_specs
self.common._sync_check(array, device_id, extra_specs,
source_device_id='00123')
mock_check.assert_not_called()
def test_sync_check(self):
array = self.data.array
device_id = self.data.device_id
extra_specs = self.data.extra_specs
with mock.patch.object(self.common, '_do_sync_check') as mck_sync:
self.common._sync_check(array, device_id, extra_specs, False,
self.data.device_id2)
mck_sync.assert_called_with(array, self.data.device_id2,
extra_specs, False)
mck_sync.reset_mock()
with mock.patch.object(self.common, '_get_target_source_device',
return_value=self.data.device_id3):
self.common._sync_check(array, device_id, extra_specs, True)
mck_sync.assert_called_with(array, self.data.device_id3,
extra_specs, True)
mck_sync.reset_mock()
self.common._sync_check(array, device_id, extra_specs)
mck_sync.assert_called_with(array, device_id, extra_specs, False)
@mock.patch.object(common.PowerMaxCommon,
'_unlink_targets_and_delete_temp_snapvx')
@mock.patch.object(rest.PowerMaxRest, 'find_snap_vx_sessions',
return_value=(tpd.PowerMaxData.snap_src_sessions,
tpd.PowerMaxData.snap_tgt_session))
@mock.patch.object(rest.PowerMaxRest, 'is_vol_in_rep_session',
return_value=(True, True, False))
def test_do_sync_check(self, mck_rep, mck_find, mck_unlink):
array = self.data.array
device_id = self.data.device_id
extra_specs = self.data.extra_specs
self.common._do_sync_check(array, device_id, extra_specs)
self.assertEqual(3, mck_unlink.call_count)
@mock.patch.object(provision.PowerMaxProvision, 'delete_temp_volume_snap')
@mock.patch.object(provision.PowerMaxProvision,
'break_replication_relationship')
def test_unlink_targets_and_delete_temp_snapvx(self, mck_break, mck_del):
array = self.data.array
extra_specs = self.data.extra_specs
session = self.data.snap_tgt_session_cm_enabled
snap_name = session['snap_name']
source = session['source_vol_id']
generation = session['generation']
target = session['target_vol_id']
self.common._unlink_targets_and_delete_temp_snapvx(
session, array, extra_specs)
mck_break.assert_called_with(array, target, source, snap_name,
extra_specs, generation, True)
mck_del.assert_called_once_with(array, snap_name, source, generation)
mck_break.reset_mock()
mck_del.reset_mock()
session['copy_mode'] = False
session['expired'] = True
self.common._unlink_targets_and_delete_temp_snapvx(
session, array, extra_specs)
mck_break.assert_called_with(array, target, source, snap_name,
extra_specs, generation, False)
mck_del.assert_not_called()
@mock.patch.object(rest.PowerMaxRest, 'find_snap_vx_sessions',
return_value=(None, tpd.PowerMaxData.snap_tgt_session))
@mock.patch.object(rest.PowerMaxRest, 'is_vol_in_rep_session',
return_value=(True, False, False))
def test_get_target_source_device(self, mck_rep, mck_find):
array = self.data.array
tgt_device = self.data.device_id2
src_device = self.common._get_target_source_device(array, tgt_device)
self.assertEqual(src_device, self.data.device_id)
@mock.patch.object(common.PowerMaxCommon, '_delete_valid_snapshot')
@mock.patch.object(rest.PowerMaxRest, 'find_snap_vx_sessions',
return_value=(tpd.PowerMaxData.snap_src_sessions,
tpd.PowerMaxData.snap_tgt_session))
@mock.patch.object(rest.PowerMaxRest, 'is_vol_in_rep_session',
return_value=(True, True, False))
def test_clone_check(self, mck_rep, mck_find, mck_del):
array = self.data.array
device_id = self.data.device_id
extra_specs = self.data.extra_specs
self.common.snapvx_unlink_limit = 3
self.common._clone_check(array, device_id, extra_specs)
self.assertEqual(3, mck_del.call_count)
@mock.patch.object(common.PowerMaxCommon,
'_unlink_targets_and_delete_temp_snapvx')
def test_delete_valid_snapshot(self, mck_unlink):
array = self.data.array
extra_specs = self.data.extra_specs
session = {'snap_name': 'EMC_SMI_TEST', 'expired': False}
self.common._delete_valid_snapshot(array, session, extra_specs)
mck_unlink.assert_called_with(session, array, extra_specs)
mck_unlink.reset_mock()
session = {'snap_name': 'temp-000AA-snapshot_for_clone',
'expired': True}
self.common._delete_valid_snapshot(array, session, extra_specs)
mck_unlink.assert_called_with(session, array, extra_specs)
mck_unlink.reset_mock()
session = {'snap_name': 'temp-000AA-snapshot_for_clone',
'expired': False}
self.common._delete_valid_snapshot(array, session, extra_specs)
mck_unlink.assert_not_called()
def test_delete_valid_snapshot_exception(self):
array = self.data.array
extra_specs = self.data.extra_specs
session = {'snap_name': 'temp-000AA-snapshot_for_clone',
'expired': True}
with mock.patch.object(
self.common, '_unlink_targets_and_delete_temp_snapvx',
side_effect=exception.VolumeBackendAPIException(
"404 temp-000AA-snapshot_for_clone does not exist")
) as mck_unlink:
self.common._delete_valid_snapshot(array, session, extra_specs)
mck_unlink.assert_called_with(session, array, extra_specs)
with mock.patch.object(
self.common, '_unlink_targets_and_delete_temp_snapvx',
side_effect=exception.VolumeBackendAPIException(
"500 internal server error")):
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._unlink_targets_and_delete_temp_snapvx,
array, session, extra_specs)
@mock.patch.object(rest.PowerMaxRest, '_get_private_volume',
return_value=tpd.PowerMaxData.priv_vol_response_rep)
@mock.patch.object(rest.PowerMaxRest, 'get_array_model_info',
return_value=(tpd.PowerMaxData.array_model, None))
@mock.patch.object(rest.PowerMaxRest, 'get_rdf_group',
return_value=(tpd.PowerMaxData.rdf_group_details))
def test_get_volume_metadata_rep(self, mck_rdf, mck_model, mck_priv):
ref_metadata = {
'DeviceID': self.data.device_id,
'DeviceLabel': self.data.device_label, 'ArrayID': self.data.array,
'ArrayModel': self.data.array_model, 'ServiceLevel': 'None',
'Workload': 'None', 'Emulation': 'FBA', 'Configuration': 'TDEV',
'CompressionDisabled': 'True', 'ReplicationEnabled': 'True',
'R2-DeviceID': self.data.device_id2,
'R2-ArrayID': self.data.remote_array,
'R2-ArrayModel': self.data.array_model,
'ReplicationMode': 'Synchronized',
'RDFG-Label': self.data.rdf_group_name,
'R1-RDFG': 1, 'R2-RDFG': 1}
array = self.data.array
device_id = self.data.device_id
act_metadata = self.common.get_volume_metadata(array, device_id)
self.assertEqual(ref_metadata, act_metadata)
@mock.patch.object(rest.PowerMaxRest, '_get_private_volume',
return_value=tpd.PowerMaxData.
priv_vol_response_metro_active_rep)
@mock.patch.object(rest.PowerMaxRest, 'get_array_model_info',
return_value=(tpd.PowerMaxData.array_model, None))
@mock.patch.object(rest.PowerMaxRest, 'get_rdf_group',
return_value=(tpd.PowerMaxData.rdf_group_details))
def test_get_volume_metadata_metro_active_rep(self, mck_rdf,
mck_model, mck_priv):
ref_metadata = {
'DeviceID': self.data.device_id,
'DeviceLabel': self.data.device_label, 'ArrayID': self.data.array,
'ArrayModel': self.data.array_model, 'ServiceLevel': 'None',
'Workload': 'None', 'Emulation': 'FBA', 'Configuration': 'TDEV',
'CompressionDisabled': 'True', 'ReplicationEnabled': 'True',
'R2-DeviceID': self.data.device_id2,
'R2-ArrayID': self.data.remote_array,
'R2-ArrayModel': self.data.array_model,
'ReplicationMode': 'Metro',
'RDFG-Label': self.data.rdf_group_name,
'R1-RDFG': 1, 'R2-RDFG': 1}
array = self.data.array
device_id = self.data.device_id
act_metadata = self.common.get_volume_metadata(array, device_id)
self.assertEqual(ref_metadata, act_metadata)
@mock.patch.object(rest.PowerMaxRest, '_get_private_volume',
return_value=tpd.PowerMaxData.priv_vol_response_no_rep)
@mock.patch.object(rest.PowerMaxRest, 'get_array_model_info',
return_value=(tpd.PowerMaxData.array_model, None))
def test_get_volume_metadata_no_rep(self, mck_model, mck_priv):
ref_metadata = {
'DeviceID': self.data.device_id,
'DeviceLabel': self.data.device_label, 'ArrayID': self.data.array,
'ArrayModel': self.data.array_model, 'ServiceLevel': 'None',
'Workload': 'None', 'Emulation': 'FBA', 'Configuration': 'TDEV',
'CompressionDisabled': 'True', 'ReplicationEnabled': 'False'}
array = self.data.array
device_id = self.data.device_id
act_metadata = self.common.get_volume_metadata(array, device_id)
self.assertEqual(ref_metadata, act_metadata)
@mock.patch.object(rest.PowerMaxRest, 'get_volume_snap_info',
return_value=tpd.PowerMaxData.priv_snap_response)
def test_get_snapshot_metadata(self, mck_snap):
array = self.data.array
device_id = self.data.device_id
device_label = self.data.managed_snap_id
snap_name = self.data.test_snapshot_snap_name
ref_metadata = {'SnapshotLabel': snap_name,
'SourceDeviceID': device_id,
'SourceDeviceLabel': device_label}
act_metadata = self.common.get_snapshot_metadata(
array, device_id, snap_name)
self.assertEqual(ref_metadata, act_metadata)
def test_update_metadata(self):
model_update = {'provider_location': six.text_type(
self.data.provider_location)}
ref_model_update = (
{'provider_location': six.text_type(self.data.provider_location),
'metadata': {'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2',
'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}})
existing_metadata = {'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}
object_metadata = {'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2'}
model_update = self.common.update_metadata(
model_update, existing_metadata, object_metadata)
self.assertEqual(ref_model_update, model_update)
def test_update_metadata_no_model(self):
model_update = None
ref_model_update = (
{'metadata': {'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2',
'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}})
existing_metadata = {'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}
object_metadata = {'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2'}
model_update = self.common.update_metadata(
model_update, existing_metadata, object_metadata)
self.assertEqual(ref_model_update, model_update)
def test_update_metadata_no_existing_metadata(self):
model_update = {'provider_location': six.text_type(
self.data.provider_location)}
ref_model_update = (
{'provider_location': six.text_type(self.data.provider_location),
'metadata': {'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2'}})
existing_metadata = None
object_metadata = {'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2'}
model_update = self.common.update_metadata(
model_update, existing_metadata, object_metadata)
self.assertEqual(ref_model_update, model_update)
def test_update_metadata_model_list_exception(self):
model_update = [{'provider_location': six.text_type(
self.data.provider_location)}]
existing_metadata = None
object_metadata = {'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2'}
self.assertRaises(
exception.VolumeBackendAPIException,
self.common.update_metadata, model_update, existing_metadata,
object_metadata)
| 49.249281
| 79
| 0.639723
| 18,133
| 154,101
| 5.073843
| 0.03353
| 0.061302
| 0.053802
| 0.023673
| 0.872408
| 0.833116
| 0.78051
| 0.744652
| 0.707947
| 0.677231
| 0
| 0.00752
| 0.266501
| 154,101
| 3,128
| 80
| 49.265026
| 0.806437
| 0.010889
| 0
| 0.639701
| 0
| 0
| 0.095678
| 0.032654
| 0
| 0
| 0
| 0
| 0.112616
| 1
| 0.07484
| false
| 0.004633
| 0.006415
| 0
| 0.081611
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
07d6e38ec524eff4b861d23fedf8a0b0307489a0
| 20
|
py
|
Python
|
tests/parser/good/multiple-and.py
|
Nakrez/RePy
|
057db55a99eac2c5cb3d622fa1f2e29f6083d8d6
|
[
"MIT"
] | 1
|
2020-11-24T05:24:26.000Z
|
2020-11-24T05:24:26.000Z
|
tests/parser/good/multiple-and.py
|
Nakrez/RePy
|
057db55a99eac2c5cb3d622fa1f2e29f6083d8d6
|
[
"MIT"
] | null | null | null |
tests/parser/good/multiple-and.py
|
Nakrez/RePy
|
057db55a99eac2c5cb3d622fa1f2e29f6083d8d6
|
[
"MIT"
] | null | null | null |
1 and 2 and 3 and 4
| 10
| 19
| 0.65
| 7
| 20
| 1.857143
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.307692
| 0.35
| 20
| 1
| 20
| 20
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ed02cac010039e9e725a076a45c86316363a8106
| 22,660
|
py
|
Python
|
tests/datasource/data_connector/test_runtime_data_connector.py
|
rpanai/great_expectations
|
82c686088c0652a1b2e8e5eb95b5851efed32551
|
[
"Apache-2.0"
] | 1
|
2021-07-07T00:22:09.000Z
|
2021-07-07T00:22:09.000Z
|
tests/datasource/data_connector/test_runtime_data_connector.py
|
rpanai/great_expectations
|
82c686088c0652a1b2e8e5eb95b5851efed32551
|
[
"Apache-2.0"
] | null | null | null |
tests/datasource/data_connector/test_runtime_data_connector.py
|
rpanai/great_expectations
|
82c686088c0652a1b2e8e5eb95b5851efed32551
|
[
"Apache-2.0"
] | null | null | null |
from typing import List
import pandas as pd
import pytest
from ruamel.yaml import YAML
import great_expectations.exceptions as ge_exceptions
from great_expectations.core.batch import (
BatchDefinition,
BatchRequest,
BatchSpec,
RuntimeBatchRequest,
)
from great_expectations.core.batch_spec import (
PathBatchSpec,
RuntimeDataBatchSpec,
RuntimeQueryBatchSpec,
S3BatchSpec,
)
from great_expectations.core.id_dict import IDDict
from great_expectations.datasource.data_connector import RuntimeDataConnector
yaml = YAML()
def test_self_check(basic_datasource):
test_runtime_data_connector: RuntimeDataConnector = (
basic_datasource.data_connectors["test_runtime_data_connector"]
)
assert test_runtime_data_connector.self_check() == {
"class_name": "RuntimeDataConnector",
"data_asset_count": 0,
"example_data_asset_names": [],
"data_assets": {},
"unmatched_data_reference_count": 0,
"example_unmatched_data_references": [],
}
def test_error_checking(basic_datasource):
test_df: pd.DataFrame = pd.DataFrame(data={"col1": [1, 2], "col2": [3, 4]})
test_runtime_data_connector: RuntimeDataConnector = (
basic_datasource.data_connectors["test_runtime_data_connector"]
)
# Test for an unknown datasource
with pytest.raises(ValueError):
# noinspection PyUnusedLocal
batch_definition_list: List[
BatchDefinition
] = test_runtime_data_connector.get_batch_definition_list_from_batch_request(
batch_request=RuntimeBatchRequest(
datasource_name="non_existent_datasource",
data_connector_name="test_runtime_data_connector",
data_asset_name="my_data_asset",
runtime_parameters={"batch_data": test_df},
)
)
# Test for an unknown data_connector
with pytest.raises(ValueError):
# noinspection PyUnusedLocal
batch_definition_list: List[
BatchDefinition
] = test_runtime_data_connector.get_batch_definition_list_from_batch_request(
batch_request=RuntimeBatchRequest(
datasource_name=basic_datasource.name,
data_connector_name="non_existent_data_connector",
data_asset_name="my_data_asset",
runtime_parameters={"batch_data": test_df},
)
)
# test for missing runtime_parameters arg
with pytest.raises(ge_exceptions.DataConnectorError):
# noinspection PyUnusedLocal
batch_definition_list: List[
BatchDefinition
] = test_runtime_data_connector.get_batch_definition_list_from_batch_request(
batch_request=RuntimeBatchRequest(
datasource_name=basic_datasource.name,
data_connector_name="test_runtime_data_connector",
data_asset_name="my_data_asset",
batch_identifiers={"pipeline_stage_name": "munge"},
)
)
# test for too many runtime_parameters keys
with pytest.raises(ge_exceptions.InvalidBatchRequestError):
# noinspection PyUnusedLocal
batch_definition_list: List[
BatchDefinition
] = test_runtime_data_connector.get_batch_definition_list_from_batch_request(
batch_request=RuntimeBatchRequest(
datasource_name=basic_datasource.name,
data_connector_name="test_runtime_data_connector",
data_asset_name="my_data_asset",
runtime_parameters={"batch_data": test_df, "path": "my_path"},
batch_identifiers={"pipeline_stage_name": "munge"},
)
)
def test_batch_identifiers_and_batch_identifiers_success_all_keys_present(
basic_datasource,
):
test_df: pd.DataFrame = pd.DataFrame(data={"col1": [1, 2], "col2": [3, 4]})
batch_identifiers: dict
batch_identifiers = {
"pipeline_stage_name": "core_processing",
"airflow_run_id": 1234567890,
"custom_key_0": "custom_value_0",
}
test_runtime_data_connector: RuntimeDataConnector = (
basic_datasource.data_connectors["test_runtime_data_connector"]
)
# Verify that all keys in batch_identifiers are acceptable as batch_identifiers (using batch count).
batch_request: dict = {
"datasource_name": basic_datasource.name,
"data_connector_name": test_runtime_data_connector.name,
"data_asset_name": "IN_MEMORY_DATA_ASSET",
"runtime_parameters": {"batch_data": test_df},
"batch_identifiers": batch_identifiers,
}
batch_request: RuntimeBatchRequest = RuntimeBatchRequest(**batch_request)
batch_definition_list: List[
BatchDefinition
] = test_runtime_data_connector.get_batch_definition_list_from_batch_request(
batch_request=batch_request
)
assert len(batch_definition_list) == 1
def test_batch_identifiers_and_batch_identifiers_error_illegal_keys(
basic_datasource,
):
test_df: pd.DataFrame = pd.DataFrame(data={"col1": [1, 2], "col2": [3, 4]})
batch_identifiers: dict
batch_identifiers = {
"pipeline_stage_name": "core_processing",
"airflow_run_id": 1234567890,
"custom_key_0": "custom_value_0",
"custom_key_1": "custom_value_1",
}
test_runtime_data_connector: RuntimeDataConnector = (
basic_datasource.data_connectors["test_runtime_data_connector"]
)
# Insure that keys in batch_identifiers["batch_identifiers"] that are not among batch_identifiers declared in
# configuration
# are not accepted. In this test, all legal keys plus a single illegal key are present.
batch_request: dict = {
"datasource_name": basic_datasource.name,
"data_connector_name": test_runtime_data_connector.name,
"data_asset_name": "my_data_asset_name",
"runtime_parameters": {"batch_data": test_df},
"batch_identifiers": batch_identifiers,
}
batch_request: BatchRequest = RuntimeBatchRequest(**batch_request)
with pytest.raises(ge_exceptions.DataConnectorError):
# noinspection PyUnusedLocal
batch_definition_list: List[
BatchDefinition
] = test_runtime_data_connector.get_batch_definition_list_from_batch_request(
batch_request=batch_request
)
batch_identifiers = {"batch_identifiers": {"unknown_key": "some_value"}}
test_runtime_data_connector: RuntimeDataConnector = (
basic_datasource.data_connectors["test_runtime_data_connector"]
)
# Insure that keys in batch_identifiers["batch_identifiers"] that are not among batch_identifiers declared in
# configuration
# are not accepted. In this test, a single illegal key is present.
batch_request: dict = {
"datasource_name": basic_datasource.name,
"data_connector_name": test_runtime_data_connector.name,
"data_asset_name": "IN_MEMORY_DATA_ASSET",
"runtime_parameters": {"batch_data": test_df},
"batch_identifiers": batch_identifiers,
}
batch_request: BatchRequest = RuntimeBatchRequest(**batch_request)
with pytest.raises(ge_exceptions.DataConnectorError):
# noinspection PyUnusedLocal
batch_definition_list: List[
BatchDefinition
] = test_runtime_data_connector.get_batch_definition_list_from_batch_request(
batch_request=batch_request
)
def test_get_available_data_asset_names(basic_datasource):
test_runtime_data_connector: RuntimeDataConnector = (
basic_datasource.data_connectors["test_runtime_data_connector"]
)
expected_available_data_asset_names: List[str] = []
available_data_asset_names: List[
str
] = test_runtime_data_connector.get_available_data_asset_names()
assert available_data_asset_names == expected_available_data_asset_names
def test_get_available_data_asset_names_updating_after_batch_request(basic_datasource):
test_runtime_data_connector: RuntimeDataConnector = (
basic_datasource.data_connectors["test_runtime_data_connector"]
)
test_df: pd.DataFrame = pd.DataFrame(data={"col1": [1, 2], "col2": [3, 4]})
# empty if data_connector has not been used
assert test_runtime_data_connector.get_available_data_asset_names() == []
batch_identifiers = {
"airflow_run_id": 1234567890,
}
batch_request: dict = {
"datasource_name": basic_datasource.name,
"data_connector_name": test_runtime_data_connector.name,
"data_asset_name": "my_data_asset_1",
"runtime_parameters": {
"batch_data": test_df,
},
"batch_identifiers": batch_identifiers,
}
batch_request: RuntimeBatchRequest = RuntimeBatchRequest(**batch_request)
# run with my_data_asset_1
test_runtime_data_connector.get_batch_definition_list_from_batch_request(
batch_request=batch_request
)
# updated to my_data_asset_1
assert test_runtime_data_connector.get_available_data_asset_names() == [
"my_data_asset_1"
]
batch_identifiers = {
"airflow_run_id": 1234567890,
}
batch_request: dict = {
"datasource_name": basic_datasource.name,
"data_connector_name": test_runtime_data_connector.name,
"data_asset_name": "my_data_asset_2",
"runtime_parameters": {
"batch_data": test_df,
},
"batch_identifiers": batch_identifiers,
}
batch_request: RuntimeBatchRequest = RuntimeBatchRequest(**batch_request)
# run with my_data_asset_2
test_runtime_data_connector.get_batch_definition_list_from_batch_request(
batch_request=batch_request
)
# updated to my_data_asset_1 and my_data_asset_2
assert test_runtime_data_connector.get_available_data_asset_names() == [
"my_data_asset_1",
"my_data_asset_2",
]
def test_data_references_cache_updating_after_batch_request(
basic_datasource,
):
test_runtime_data_connector: RuntimeDataConnector = (
basic_datasource.data_connectors["test_runtime_data_connector"]
)
test_df: pd.DataFrame = pd.DataFrame(data={"col1": [1, 2], "col2": [3, 4]})
# empty if data_connector has not been used
assert test_runtime_data_connector.get_available_data_asset_names() == []
batch_identifiers = {
"airflow_run_id": 1234567890,
}
batch_request: dict = {
"datasource_name": basic_datasource.name,
"data_connector_name": test_runtime_data_connector.name,
"data_asset_name": "my_data_asset_1",
"runtime_parameters": {
"batch_data": test_df,
},
"batch_identifiers": batch_identifiers,
}
batch_request: RuntimeBatchRequest = RuntimeBatchRequest(**batch_request)
# run with my_data_asset_1
test_runtime_data_connector.get_batch_definition_list_from_batch_request(
batch_request=batch_request
)
assert test_runtime_data_connector._data_references_cache == {
"my_data_asset_1": {
"1234567890": [
BatchDefinition(
datasource_name="my_datasource",
data_connector_name="test_runtime_data_connector",
data_asset_name="my_data_asset_1",
batch_identifiers=IDDict({"airflow_run_id": 1234567890}),
)
],
}
}
# update with
test_df_new: pd.DataFrame = pd.DataFrame(data={"col1": [5, 6], "col2": [7, 8]})
batch_identifiers = {
"airflow_run_id": 987654321,
}
batch_request: dict = {
"datasource_name": basic_datasource.name,
"data_connector_name": test_runtime_data_connector.name,
"data_asset_name": "my_data_asset_1",
"runtime_parameters": {
"batch_data": test_df_new,
},
"batch_identifiers": batch_identifiers,
}
batch_request: RuntimeBatchRequest = RuntimeBatchRequest(**batch_request)
# run with with new_data_asset but a new batch
test_runtime_data_connector.get_batch_definition_list_from_batch_request(
batch_request=batch_request
)
assert test_runtime_data_connector._data_references_cache == {
"my_data_asset_1": {
"1234567890": [
BatchDefinition(
datasource_name="my_datasource",
data_connector_name="test_runtime_data_connector",
data_asset_name="my_data_asset_1",
batch_identifiers=IDDict({"airflow_run_id": 1234567890}),
)
],
"987654321": [
BatchDefinition(
datasource_name="my_datasource",
data_connector_name="test_runtime_data_connector",
data_asset_name="my_data_asset_1",
batch_identifiers=IDDict({"airflow_run_id": 987654321}),
)
],
},
}
# new data_asset_name
test_df_new_asset: pd.DataFrame = pd.DataFrame(
data={"col1": [9, 10], "col2": [11, 12]}
)
batch_identifiers = {
"airflow_run_id": 5555555,
}
batch_request: dict = {
"datasource_name": basic_datasource.name,
"data_connector_name": test_runtime_data_connector.name,
"data_asset_name": "my_data_asset_2",
"runtime_parameters": {
"batch_data": test_df_new_asset,
},
"batch_identifiers": batch_identifiers,
}
batch_request: RuntimeBatchRequest = RuntimeBatchRequest(**batch_request)
# run with with new_data_asset but a new batch
test_runtime_data_connector.get_batch_definition_list_from_batch_request(
batch_request=batch_request
)
assert test_runtime_data_connector._data_references_cache == {
"my_data_asset_1": {
"1234567890": [
BatchDefinition(
datasource_name="my_datasource",
data_connector_name="test_runtime_data_connector",
data_asset_name="my_data_asset_1",
batch_identifiers=IDDict({"airflow_run_id": 1234567890}),
)
],
"987654321": [
BatchDefinition(
datasource_name="my_datasource",
data_connector_name="test_runtime_data_connector",
data_asset_name="my_data_asset_1",
batch_identifiers=IDDict({"airflow_run_id": 987654321}),
)
],
},
"my_data_asset_2": {
"5555555": [
BatchDefinition(
datasource_name="my_datasource",
data_connector_name="test_runtime_data_connector",
data_asset_name="my_data_asset_2",
batch_identifiers=IDDict({"airflow_run_id": 5555555}),
)
]
},
}
assert test_runtime_data_connector.get_available_data_asset_names() == [
"my_data_asset_1",
"my_data_asset_2",
]
assert test_runtime_data_connector.get_data_reference_list_count() == 3
def test_get_batch_definition_list_from_batch_request_length_one(
basic_datasource,
):
test_df: pd.DataFrame = pd.DataFrame(data={"col1": [1, 2], "col2": [3, 4]})
batch_identifiers: dict = {
"airflow_run_id": 1234567890,
}
test_runtime_data_connector: RuntimeDataConnector = (
basic_datasource.data_connectors["test_runtime_data_connector"]
)
batch_request: dict = {
"datasource_name": basic_datasource.name,
"data_connector_name": test_runtime_data_connector.name,
"data_asset_name": "my_data_asset",
"runtime_parameters": {"batch_data": test_df},
"batch_identifiers": batch_identifiers,
}
batch_request: RuntimeBatchRequest = RuntimeBatchRequest(**batch_request)
expected_batch_definition_list: List[BatchDefinition] = [
BatchDefinition(
datasource_name="my_datasource",
data_connector_name="test_runtime_data_connector",
data_asset_name="my_data_asset",
batch_identifiers=IDDict(batch_identifiers),
)
]
batch_definition_list: List[
BatchDefinition
] = test_runtime_data_connector.get_batch_definition_list_from_batch_request(
batch_request=batch_request
)
assert batch_definition_list == expected_batch_definition_list
def test_get_batch_definition_list_from_batch_request_with_and_without_data_asset_name(
basic_datasource,
):
test_df: pd.DataFrame = pd.DataFrame(data={"col1": [1, 2], "col2": [3, 4]})
batch_identifiers = {
"airflow_run_id": 1234567890,
}
test_runtime_data_connector: RuntimeDataConnector = (
basic_datasource.data_connectors["test_runtime_data_connector"]
)
# data_asset_name is missing
batch_request: dict = {
"datasource_name": basic_datasource.name,
"data_connector_name": test_runtime_data_connector.name,
"runtime_parameters": {
"batch_data": test_df,
},
"batch_identifiers": batch_identifiers,
}
with pytest.raises(TypeError):
batch_request: RuntimeBatchRequest = RuntimeBatchRequest(**batch_request)
# test that name can be set as "my_data_asset"
batch_request: dict = {
"datasource_name": basic_datasource.name,
"data_connector_name": test_runtime_data_connector.name,
"data_asset_name": "my_data_asset",
"runtime_parameters": {
"batch_data": test_df,
},
"batch_identifiers": batch_identifiers,
}
batch_request: RuntimeBatchRequest = RuntimeBatchRequest(**batch_request)
batch_definition_list: List[
BatchDefinition
] = test_runtime_data_connector.get_batch_definition_list_from_batch_request(
batch_request=batch_request
)
assert len(batch_definition_list) == 1
# check that default value has been set
assert batch_definition_list[0]["data_asset_name"] == "my_data_asset"
def test__get_data_reference_list(basic_datasource):
test_runtime_data_connector: RuntimeDataConnector = (
basic_datasource.data_connectors["test_runtime_data_connector"]
)
expected_data_reference_list: List[str] = []
# noinspection PyProtectedMember
data_reference_list: List[
str
] = test_runtime_data_connector._get_data_reference_list()
assert data_reference_list == expected_data_reference_list
def test_refresh_data_references_cache(basic_datasource):
test_runtime_data_connector: RuntimeDataConnector = (
basic_datasource.data_connectors["test_runtime_data_connector"]
)
assert len(test_runtime_data_connector._data_references_cache) == 0
def test__generate_batch_spec_parameters_from_batch_definition(
basic_datasource,
):
batch_identifiers = {
"custom_key_0": "staging",
"airflow_run_id": 1234567890,
}
test_runtime_data_connector: RuntimeDataConnector = (
basic_datasource.data_connectors["test_runtime_data_connector"]
)
expected_batch_spec_parameters: dict = {"data_asset_name": "my_data_asset"}
# noinspection PyProtectedMember
batch_spec_parameters: dict = test_runtime_data_connector._generate_batch_spec_parameters_from_batch_definition(
batch_definition=BatchDefinition(
datasource_name="my_datasource",
data_connector_name="test_runtime_data_connector",
data_asset_name="my_data_asset",
batch_identifiers=IDDict(batch_identifiers),
)
)
assert batch_spec_parameters == expected_batch_spec_parameters
def test__build_batch_spec(basic_datasource):
batch_identifiers = {
"custom_key_0": "staging",
"airflow_run_id": 1234567890,
}
test_runtime_data_connector: RuntimeDataConnector = (
basic_datasource.data_connectors["test_runtime_data_connector"]
)
batch_definition = BatchDefinition(
datasource_name="my_datasource",
data_connector_name="test_runtime_data_connector",
data_asset_name="my_data_asset",
batch_identifiers=IDDict(batch_identifiers),
)
batch_spec: BatchSpec = test_runtime_data_connector.build_batch_spec(
batch_definition=batch_definition,
runtime_parameters={
"batch_data": pd.DataFrame({"x": range(10)}),
},
)
assert type(batch_spec) == RuntimeDataBatchSpec
assert set(batch_spec.keys()) == {"batch_data", "data_asset_name"}
assert batch_spec["batch_data"].shape == (10, 1)
batch_spec: BatchSpec = test_runtime_data_connector.build_batch_spec(
batch_definition=batch_definition,
runtime_parameters={
"query": "my_query",
},
)
assert type(batch_spec) == RuntimeQueryBatchSpec
batch_spec: BatchSpec = test_runtime_data_connector.build_batch_spec(
batch_definition=batch_definition, runtime_parameters={"path": "my_path"}
)
assert type(batch_spec) == PathBatchSpec
batch_spec: BatchSpec = test_runtime_data_connector.build_batch_spec(
batch_definition=batch_definition,
runtime_parameters={"path": "s3://my.s3.path"},
)
assert type(batch_spec) == S3BatchSpec
batch_spec: BatchSpec = test_runtime_data_connector.build_batch_spec(
batch_definition=batch_definition,
runtime_parameters={"path": "s3a://my.s3.path"},
)
assert type(batch_spec) == S3BatchSpec
def test__get_data_reference_name(basic_datasource):
data_connector_query: dict = {
"batch_filter_parameters": {
"airflow_run_id": 1234567890,
}
}
batch_identifiers = IDDict(data_connector_query["batch_filter_parameters"])
test_runtime_data_connector: RuntimeDataConnector = (
basic_datasource.data_connectors["test_runtime_data_connector"]
)
assert (
test_runtime_data_connector._get_data_reference_name(batch_identifiers)
== "1234567890"
)
data_connector_query: dict = {
"batch_filter_parameters": {
"run_id_1": 1234567890,
"run_id_2": 1111111111,
}
}
batch_identifiers = IDDict(data_connector_query["batch_filter_parameters"])
test_runtime_data_connector: RuntimeDataConnector = (
basic_datasource.data_connectors["test_runtime_data_connector"]
)
assert (
test_runtime_data_connector._get_data_reference_name(batch_identifiers)
== "1234567890-1111111111"
)
| 34.969136
| 116
| 0.684598
| 2,419
| 22,660
| 5.906986
| 0.069864
| 0.111904
| 0.094478
| 0.151165
| 0.865421
| 0.840857
| 0.818672
| 0.790328
| 0.78074
| 0.770943
| 0
| 0.022286
| 0.231686
| 22,660
| 647
| 117
| 35.023184
| 0.798449
| 0.058782
| 0
| 0.624266
| 0
| 0
| 0.166948
| 0.047243
| 0
| 0
| 0
| 0
| 0.052838
| 1
| 0.027397
| false
| 0
| 0.017613
| 0
| 0.04501
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ed30117dd50cc97124e6711f817037435b61a9e1
| 15,503
|
py
|
Python
|
tests/test_sklearn_one_vs_rest_classifier_converter.py
|
elcolie/sklearn-onnx
|
004fa39cb995ada0cde232ae8e30341018f450ac
|
[
"MIT"
] | 1
|
2021-04-12T12:38:20.000Z
|
2021-04-12T12:38:20.000Z
|
tests/test_sklearn_one_vs_rest_classifier_converter.py
|
elcolie/sklearn-onnx
|
004fa39cb995ada0cde232ae8e30341018f450ac
|
[
"MIT"
] | null | null | null |
tests/test_sklearn_one_vs_rest_classifier_converter.py
|
elcolie/sklearn-onnx
|
004fa39cb995ada0cde232ae8e30341018f450ac
|
[
"MIT"
] | null | null | null |
from distutils.version import StrictVersion
import unittest
from numpy.testing import assert_almost_equal
from onnxruntime import InferenceSession, __version__ as ort_version
from sklearn.ensemble import (
GradientBoostingClassifier,
GradientBoostingRegressor,
)
from sklearn.linear_model import LogisticRegression, LinearRegression
from sklearn.multiclass import OneVsRestClassifier
from sklearn.neural_network import MLPClassifier, MLPRegressor
from skl2onnx import convert_sklearn
from skl2onnx.common.data_types import (
FloatTensorType,
Int64TensorType,
onnx_built_with_ml,
)
from test_utils import (
dump_data_and_model,
dump_multiple_classification,
fit_classification_model,
TARGET_OPSET
)
class TestOneVsRestClassifierConverter(unittest.TestCase):
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_ovr(self):
model = OneVsRestClassifier(LogisticRegression())
dump_multiple_classification(
model,
allow_failure="StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
target_opset=TARGET_OPSET
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_ovr_02(self):
model = OneVsRestClassifier(LogisticRegression())
dump_multiple_classification(
model,
first_class=2,
suffix="F2",
allow_failure="StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
target_opset=TARGET_OPSET
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_ovr_string(self):
model = OneVsRestClassifier(LogisticRegression())
dump_multiple_classification(
model,
verbose=False,
label_string=True,
suffix="String",
allow_failure="StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
target_opset=TARGET_OPSET
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_ovr_classification_float(self):
model, X = fit_classification_model(
OneVsRestClassifier(LogisticRegression(solver='liblinear')), 3)
model_onnx = convert_sklearn(
model,
"ovr classification",
[("input", FloatTensorType([None, X.shape[1]]))],
target_opset=TARGET_OPSET
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnOVRClassificationFloat",
allow_failure="StrictVersion(onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_ovr_classification_decision_function(self):
model, X = fit_classification_model(
OneVsRestClassifier(LogisticRegression()), 4)
options = {id(model): {'raw_scores': True}}
model_onnx = convert_sklearn(
model,
"ovr classification",
[("input", FloatTensorType([None, X.shape[1]]))],
options=options,
target_opset=TARGET_OPSET
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnOVRClassificationDecisionFunction",
allow_failure="StrictVersion(onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
methods=['predict', 'decision_function'],
)
if StrictVersion(ort_version) < StrictVersion("1.0.0"):
return
options = {id(model): {'raw_scores': True, 'zipmap': False}}
model_onnx = convert_sklearn(
model, "ovr classification",
[("input", FloatTensorType([None, X.shape[1]]))],
options=options, target_opset=TARGET_OPSET)
sess = InferenceSession(model_onnx.SerializeToString())
got = sess.run(None, {'input': X})[1]
dec = model.decision_function(X)
assert_almost_equal(got, dec, decimal=4)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_ovr_classification_decision_function_binary(self):
model, X = fit_classification_model(
OneVsRestClassifier(LogisticRegression()), 2)
options = {id(model): {'raw_scores': True}}
model_onnx = convert_sklearn(
model,
"ovr classification",
[("input", FloatTensorType([None, X.shape[1]]))],
options=options,
target_opset=TARGET_OPSET
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnOVRClassificationDecisionFunctionBinary",
allow_failure="StrictVersion(onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
methods=['predict', 'decision_function_binary'],
)
if StrictVersion(ort_version) < StrictVersion("1.0.0"):
return
options = {id(model): {'raw_scores': True, 'zipmap': False}}
model_onnx = convert_sklearn(
model, "ovr classification",
[("input", FloatTensorType([None, X.shape[1]]))],
options=options, target_opset=TARGET_OPSET)
sess = InferenceSession(model_onnx.SerializeToString())
got = sess.run(None, {'input': X})[1]
dec = model.decision_function(X)
assert_almost_equal(got[:, 1], dec, decimal=4)
assert_almost_equal(-got[:, 0], dec, decimal=4)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_ovr_classification_int(self):
model, X = fit_classification_model(
OneVsRestClassifier(LogisticRegression()), 5, is_int=True)
model_onnx = convert_sklearn(
model,
"ovr classification",
[("input", Int64TensorType([None, X.shape[1]]))],
target_opset=TARGET_OPSET
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnOVRClassificationInt",
allow_failure="StrictVersion(onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_ovr_classification_float_binary(self):
model, X = fit_classification_model(
OneVsRestClassifier(LogisticRegression()), 2)
model_onnx = convert_sklearn(
model,
"ovr classification",
[("input", FloatTensorType([None, X.shape[1]]))],
target_opset=TARGET_OPSET
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnOVRClassificationFloatBin",
allow_failure="StrictVersion(onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_ovr_classification_float_binary_nozipmap(self):
model, X = fit_classification_model(
OneVsRestClassifier(LogisticRegression()), 2)
model_onnx = convert_sklearn(
model, "ovr classification",
[("input", FloatTensorType([None, X.shape[1]]))],
target_opset=TARGET_OPSET,
options={id(model): {'zipmap': False}})
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X, model, model_onnx,
basename="SklearnOVRClassificationFloatBinNoZipMap",
allow_failure="StrictVersion(onnxruntime.__version__)"
"<= StrictVersion('0.2.1')")
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_ovr_classification_int_binary(self):
model, X = fit_classification_model(
OneVsRestClassifier(LogisticRegression()), 2, is_int=True)
model_onnx = convert_sklearn(
model,
"ovr classification",
[("input", Int64TensorType([None, X.shape[1]]))],
target_opset=TARGET_OPSET
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnOVRClassificationIntBin",
allow_failure="StrictVersion(onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_ovr_classification_float_mlp(self):
model, X = fit_classification_model(
OneVsRestClassifier(MLPClassifier()), 4)
model_onnx = convert_sklearn(
model,
"ovr classification",
[("input", FloatTensorType([None, X.shape[1]]))],
target_opset=TARGET_OPSET
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnOVRClassificationFloatMLP",
allow_failure="StrictVersion(onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_ovr_classification_int_ensemble(self):
model, X = fit_classification_model(
OneVsRestClassifier(GradientBoostingClassifier()), 5, is_int=True)
model_onnx = convert_sklearn(
model,
"ovr classification",
[("input", Int64TensorType([None, X.shape[1]]))],
target_opset=TARGET_OPSET
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnOVRClassificationIntEnsemble",
allow_failure="StrictVersion(onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_ovr_classification_float_binary_ensemble(self):
model, X = fit_classification_model(
OneVsRestClassifier(GradientBoostingClassifier()), 2)
model_onnx = convert_sklearn(
model,
"ovr classification",
[("input", FloatTensorType([None, X.shape[1]]))],
target_opset=TARGET_OPSET
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnOVRClassificationFloatBinEnsemble",
allow_failure="StrictVersion(onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_ovr_classification_int_binary_mlp(self):
model, X = fit_classification_model(
OneVsRestClassifier(MLPClassifier()), 2, is_int=True)
model_onnx = convert_sklearn(
model,
"ovr classification",
[("input", Int64TensorType([None, X.shape[1]]))],
target_opset=TARGET_OPSET
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnOVRClassificationIntBinMLP",
allow_failure="StrictVersion(onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_ovr_regression_float(self):
"""The test is unstable, some observations
are equidistant to more than one class,
the chosen is difficult to predict. So we
check only probabilities."""
rs = 11
model, X = fit_classification_model(
OneVsRestClassifier(
LinearRegression()), 3, random_state=rs)
model_onnx = convert_sklearn(
model,
"ovr regression",
[("input", FloatTensorType([None, X.shape[1]]))],
target_opset=TARGET_OPSET
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X[:5],
model,
model_onnx,
basename="SklearnOVRRegressionFloat-Out0",
allow_failure="StrictVersion(onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_ovr_regression_int(self):
model, X = fit_classification_model(
OneVsRestClassifier(LinearRegression()), 10, is_int=True)
model_onnx = convert_sklearn(
model,
"ovr regression",
[("input", Int64TensorType([None, X.shape[1]]))],
target_opset=TARGET_OPSET
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnOVRRegressionInt-Out0",
allow_failure="StrictVersion(onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_ovr_regression_float_mlp(self):
model, X = fit_classification_model(
OneVsRestClassifier(MLPRegressor()), 5)
model_onnx = convert_sklearn(
model,
"ovr regression",
[("input", FloatTensorType([None, X.shape[1]]))],
target_opset=TARGET_OPSET
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnOVRRegressionFloatMLP-Out0",
allow_failure="StrictVersion(onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
)
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_ovr_regression_int_ensemble(self):
model, X = fit_classification_model(
OneVsRestClassifier(GradientBoostingRegressor()), 4, is_int=True)
model_onnx = convert_sklearn(
model,
"ovr regression",
[("input", Int64TensorType([None, X.shape[1]]))],
target_opset=TARGET_OPSET
)
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnOVRRegressionIntEnsemble-Out0",
allow_failure="StrictVersion(onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
)
if __name__ == "__main__":
unittest.main()
| 37.44686
| 78
| 0.596723
| 1,429
| 15,503
| 6.156753
| 0.104269
| 0.050125
| 0.038645
| 0.050011
| 0.827347
| 0.827347
| 0.827347
| 0.818368
| 0.762787
| 0.71539
| 0
| 0.011716
| 0.30078
| 15,503
| 413
| 79
| 37.53753
| 0.799908
| 0.009482
| 0
| 0.693299
| 0
| 0
| 0.174018
| 0.105376
| 0
| 0
| 0
| 0
| 0.048969
| 1
| 0.046392
| false
| 0
| 0.028351
| 0
| 0.082474
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ed40bd9562304312050c9da848873a5eb1d3ae09
| 641
|
py
|
Python
|
hyperion_sat/__init__.py
|
Stemonitis/Hyperion
|
9620ea2a4ebf18e32863e0b2115be1d5e50016ce
|
[
"MIT"
] | null | null | null |
hyperion_sat/__init__.py
|
Stemonitis/Hyperion
|
9620ea2a4ebf18e32863e0b2115be1d5e50016ce
|
[
"MIT"
] | null | null | null |
hyperion_sat/__init__.py
|
Stemonitis/Hyperion
|
9620ea2a4ebf18e32863e0b2115be1d5e50016ce
|
[
"MIT"
] | null | null | null |
from .reading.metadata import *
from .reading.hdf import *
from .atmospheric_correction.making_masks import *
from .atmospheric_correction.putting_together_and_calculating_look_up_tables import *
from .atmospheric_correction.spectral_polishing import *
from .atmospheric_correction.spectral_smile import *
from .atmospheric_correction.surface_reflectance_retrieval import *
from .atmospheric_correction.water_vapor_retrieval import *
from .compiling_with_the_sun_data import *
from .display.display_as_jpeg import *
from .ecological_niche_estimation import *
from .preprocessing.preprocessing import *
from .retrieve_data_from_usgs import *
| 45.785714
| 85
| 0.862715
| 79
| 641
| 6.620253
| 0.481013
| 0.229446
| 0.240918
| 0.355641
| 0.14914
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081123
| 641
| 13
| 86
| 49.307692
| 0.887946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.